JoeArmani commited on
Commit
775baf9
·
1 Parent(s): 9decf80

batch sizing - search

Browse files
Files changed (1) hide show
  1. chatbot_model.py +3 -3
chatbot_model.py CHANGED
@@ -1419,13 +1419,13 @@ class StreamingDataPipeline:
1419
  # TODO: use GPU/strategy
1420
  if len(response_pool) < 100:
1421
  self.embedding_batch_size = 16
1422
- self.search_batch_size = 8
1423
  self.max_batch_size = 32
1424
  self.min_batch_size = 4
1425
  else:
1426
  self.embedding_batch_size = 64
1427
- self.search_batch_size = 32
1428
- self.min_batch_size = max(8, batch_size // 4)
1429
  self.max_batch_size = 64
1430
 
1431
  def save_cache(self, cache_dir: Path) -> None:
 
1419
  # TODO: use GPU/strategy
1420
  if len(response_pool) < 100:
1421
  self.embedding_batch_size = 16
1422
+ self.search_batch_size = 16
1423
  self.max_batch_size = 32
1424
  self.min_batch_size = 4
1425
  else:
1426
  self.embedding_batch_size = 64
1427
+ self.search_batch_size = 64
1428
+ self.min_batch_size = 8
1429
  self.max_batch_size = 64
1430
 
1431
  def save_cache(self, cache_dir: Path) -> None: