Manofem commited on
Commit
b14ade8
·
1 Parent(s): f6b5f89

Fixing. A bug

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -1,7 +1,7 @@
1
  from fastapi import FastAPI, HTTPException, Request
2
  from fastapi.responses import StreamingResponse
3
  from llama_cpp import Llama
4
-
5
  app = FastAPI()
6
 
7
  llm = Llama(model_path="llama-2-7b-chat.Q3_K_S.gguf", n_ctx=2048, n_batch=512, use_mlock=True, n_threads=8)
 
1
  from fastapi import FastAPI, HTTPException, Request
2
  from fastapi.responses import StreamingResponse
3
  from llama_cpp import Llama
4
+ ##
5
  app = FastAPI()
6
 
7
  llm = Llama(model_path="llama-2-7b-chat.Q3_K_S.gguf", n_ctx=2048, n_batch=512, use_mlock=True, n_threads=8)