svjack commited on
Commit
cb77792
·
verified ·
1 Parent(s): f581b20

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -866,7 +866,7 @@ from llama_cpp import Llama
866
  llama = Llama(model_file_path,
867
  n_ctx = 4090,
868
  n_threads=2, # The number of CPU threads to use, tailor to your system and the resulting performance
869
- n_gpu_layers=-1
870
  )
871
  print("load {}".format(model_file_path))
872
 
 
866
  llama = Llama(model_file_path,
867
  n_ctx = 4090,
868
  n_threads=2, # The number of CPU threads to use, tailor to your system and the resulting performance
869
+ #n_gpu_layers=-1
870
  )
871
  print("load {}".format(model_file_path))
872