Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
-
from fastapi import FastAPI, HTTPException, Request
|
2 |
-
from fastapi.responses import EventSourceResponse
|
3 |
from llama_cpp import Llama
|
4 |
import time
|
5 |
import os
|
@@ -14,6 +13,19 @@ def typewrite(text, delay=0.01):
|
|
14 |
time.sleep(delay)
|
15 |
print(end='', flush=True) # Print newline to move to the next line
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
@app.post("/chat")
|
18 |
async def chat(request: Request):
|
19 |
try:
|
@@ -28,31 +40,13 @@ async def chat(request: Request):
|
|
28 |
|
29 |
ask = user_input
|
30 |
|
31 |
-
prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"
|
32 |
-
|
33 |
-
output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)
|
34 |
-
|
35 |
-
async def generate_chunks():
|
36 |
-
nonlocal output_stream
|
37 |
-
response_text_chunk = ''
|
38 |
-
try:
|
39 |
-
while True:
|
40 |
-
chunk = next(output_stream)
|
41 |
-
if chunk.get('choices') and chunk['choices'][0].get('text'):
|
42 |
-
response_text_chunk += chunk['choices'][0]['text']
|
43 |
-
typewrite(response_text_chunk, delay=0.00)
|
44 |
-
yield f"data: {response_text_chunk}\n\n"
|
45 |
-
except StopIteration:
|
46 |
-
pass
|
47 |
-
|
48 |
if ask == 'clear':
|
49 |
os.system("cls")
|
50 |
|
51 |
-
return
|
52 |
|
53 |
except Exception as e:
|
54 |
-
|
55 |
-
raise HTTPException(status_code=500, detail="Internal Server Error")
|
56 |
|
57 |
if __name__ == "__main__":
|
58 |
import uvicorn
|
|
|
1 |
+
from fastapi import FastAPI, HTTPException, Request, StreamingResponse
|
|
|
2 |
from llama_cpp import Llama
|
3 |
import time
|
4 |
import os
|
|
|
13 |
time.sleep(delay)
|
14 |
print(end='', flush=True) # Print newline to move to the next line
|
15 |
|
16 |
+
async def generate_text_stream(ask):
|
17 |
+
prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"
|
18 |
+
|
19 |
+
output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)
|
20 |
+
|
21 |
+
while True:
|
22 |
+
try:
|
23 |
+
chunk = next(output_stream)
|
24 |
+
if chunk.get('choices') and chunk['choices'][0].get('text'):
|
25 |
+
yield chunk['choices'][0]['text']
|
26 |
+
except StopIteration:
|
27 |
+
break
|
28 |
+
|
29 |
@app.post("/chat")
|
30 |
async def chat(request: Request):
|
31 |
try:
|
|
|
40 |
|
41 |
ask = user_input
|
42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
if ask == 'clear':
|
44 |
os.system("cls")
|
45 |
|
46 |
+
return StreamingResponse(generate_text_stream(ask))
|
47 |
|
48 |
except Exception as e:
|
49 |
+
raise HTTPException(status_code=500, detail=str(e))
|
|
|
50 |
|
51 |
if __name__ == "__main__":
|
52 |
import uvicorn
|