davanstrien HF Staff commited on
Commit
0f7b9e2
·
1 Parent(s): af9c7a0

Add flashinfer-python dependency

Browse files
Files changed (1) hide show
  1. generate-responses.py +2 -1
generate-responses.py CHANGED
@@ -2,6 +2,7 @@
2
  # requires-python = ">=3.10"
3
  # dependencies = [
4
  # "datasets",
 
5
  # "huggingface-hub[hf_transfer]",
6
  # "torch",
7
  # "transformers",
@@ -223,7 +224,7 @@ def main(
223
  if max_model_len is not None:
224
  vllm_kwargs["max_model_len"] = max_model_len
225
  logger.info(f"Using max_model_len={max_model_len}")
226
-
227
  llm = LLM(**vllm_kwargs)
228
 
229
  # Load tokenizer for chat template
 
2
  # requires-python = ">=3.10"
3
  # dependencies = [
4
  # "datasets",
5
+ # "flashinfer-python",
6
  # "huggingface-hub[hf_transfer]",
7
  # "torch",
8
  # "transformers",
 
224
  if max_model_len is not None:
225
  vllm_kwargs["max_model_len"] = max_model_len
226
  logger.info(f"Using max_model_len={max_model_len}")
227
+
228
  llm = LLM(**vllm_kwargs)
229
 
230
  # Load tokenizer for chat template