Skip to content

Commit 84a5381

Browse files
author
Sherin Thomas
committed
Merge branch 'bugfix/autoscale-batching' of github.com:Lightning-AI/lightning into bugfix/autoscale-batching
2 parents adf33d1 + 7e6e35c commit 84a5381

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/lightning_app/components/serve/auto_scaler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,13 +185,13 @@ async def send_batch(self, batch: List[Tuple[str, _BatchRequestModel]]):
185185
async def consumer(self):
186186
while True:
187187
await asyncio.sleep(0.05)
188-
batch = self._batch[:self.max_batch_size]
188+
batch = self._batch[: self.max_batch_size]
189189
is_batch_ready = len(batch) == self.max_batch_size
190190
is_batch_timeout = time.time() - self._last_batch_sent > self.timeout_batching
191191
if batch and (is_batch_ready or is_batch_timeout):
192192
asyncio.create_task(self.send_batch(batch))
193193
# resetting the batch array, TODO - not locking the array
194-
self._batch = self._batch[len(batch):]
194+
self._batch = self._batch[len(batch) :]
195195
self._last_batch_sent = time.time()
196196

197197
async def process_request(self, data: BaseModel):

0 commit comments

Comments
 (0)