Skip to content

Commit

Permalink
fix: no allow client to change server batch size (#787)
Browse files Browse the repository at this point in the history
  • Loading branch information
ZiniuYu authored Jul 28, 2022
1 parent 87928a7 commit 1db43b4
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 10 deletions.
6 changes: 2 additions & 4 deletions server/clip_server/executors/clip_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,7 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):

@requests
async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):

traversal_paths = parameters.get('traversal_paths', self._traversal_paths)
minibatch_size = parameters.get('minibatch_size', self._minibatch_size)

_img_da = DocumentArray()
_txt_da = DocumentArray()
Expand All @@ -118,7 +116,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _img_da:
for minibatch, batch_data in _img_da.map_batch(
self._preproc_images,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand All @@ -131,7 +129,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _txt_da:
for minibatch, batch_data in _txt_da.map_batch(
self._preproc_texts,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand Down
5 changes: 2 additions & 3 deletions server/clip_server/executors/clip_tensorrt.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):
@requests
async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
traversal_paths = parameters.get('traversal_paths', self._traversal_paths)
minibatch_size = parameters.get('minibatch_size', self._minibatch_size)

_img_da = DocumentArray()
_txt_da = DocumentArray()
Expand All @@ -92,7 +91,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _img_da:
for minibatch, batch_data in _img_da.map_batch(
self._preproc_images,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand All @@ -111,7 +110,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _txt_da:
for minibatch, batch_data in _txt_da.map_batch(
self._preproc_texts,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand Down
5 changes: 2 additions & 3 deletions server/clip_server/executors/clip_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ async def rank(self, docs: 'DocumentArray', parameters: Dict, **kwargs):
@requests
async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
traversal_paths = parameters.get('traversal_paths', self._traversal_paths)
minibatch_size = parameters.get('minibatch_size', self._minibatch_size)

_img_da = DocumentArray()
_txt_da = DocumentArray()
Expand All @@ -103,7 +102,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _img_da:
for minibatch, batch_data in _img_da.map_batch(
self._preproc_images,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand All @@ -121,7 +120,7 @@ async def encode(self, docs: 'DocumentArray', parameters: Dict = {}, **kwargs):
if _txt_da:
for minibatch, batch_data in _txt_da.map_batch(
self._preproc_texts,
batch_size=minibatch_size,
batch_size=self._minibatch_size,
pool=self._pool,
):
with self.monitor(
Expand Down

0 comments on commit 1db43b4

Please sign in to comment.