Spaces:
Runtime error
Runtime error
File size: 1,514 Bytes
83dc61e ea3442d 83dc61e b05ed54 9449955 83dc61e 9449955 ea3442d 746c162 ea3442d 645c249 83dc61e 4019da3 83dc61e e8c98e5 ea3442d e8c98e5 ea3442d 83dc61e 9449955 ea3442d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import StreamingResponse
from llama_cpp import Llama
#
app = FastAPI()
llm = Llama(model_path="llama-2-7b-chat.Q3_K_S.gguf", n_ctx=2048, n_batch=512, use_mlock=True, n_threads=8)
def generate_output_stream(prompt):
output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)
while True:
try:
chunk = next(output_stream)
if chunk.get('choices') and chunk['choices'][0].get('text'):
text = chunk['choices'][0]['text']
yield f"{text}\n"
except StopIteration:
break
def clear_screen():
os.system("cls")
@app.post("/chat")
async def chat(request: Request):
try:
data = await request.json()
user_input = data.get("user_input")
if not user_input:
raise HTTPException(status_code=400, detail="Missing 'user_input' field in the request JSON.")
ask = user_input
prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"
if ask == 'clear':
clear_screen()
return StreamingResponse(generate_output_stream(prompt), media_type="text/plain")
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) |