metastable-void commited on
Commit
245e479
·
unverified ·
1 Parent(s): b665055

nonsensual change

Browse files
Files changed (1) hide show
  1. app.py +1 -0
app.py CHANGED
@@ -18,6 +18,7 @@ MAX_MAX_NEW_TOKENS = 2048
18
  DEFAULT_MAX_NEW_TOKENS = 1024
19
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
20
 
 
21
  if torch.cuda.is_available():
22
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7"
23
  my_pipeline=pipeline(
 
18
  DEFAULT_MAX_NEW_TOKENS = 1024
19
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "32768"))
20
 
21
+
22
  if torch.cuda.is_available():
23
  model_id = "vericava/llm-jp-3-1.8b-instruct-lora-vericava7"
24
  my_pipeline=pipeline(