Manofem commited on
Commit
8c77d9c
·
1 Parent(s): f76a4c5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -1,6 +1,4 @@
1
-
2
-
3
- from fastapi import FastAPI, HTTPException
4
  from llama_cpp import Llama
5
  import time
6
  import os
@@ -16,8 +14,14 @@ def typewrite(text, delay=0.01):
16
  print(end='', flush=True) # Print newline to move to the next line
17
 
18
  @app.post("/chat")
19
- async def chat(user_input: str):
20
  try:
 
 
 
 
 
 
21
  os.system("cls")
22
  print("Chatbot by Aritra Roy & DVLH")
23
 
@@ -25,7 +29,6 @@ async def chat(user_input: str):
25
 
26
  prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"
27
 
28
- # Send the request with stream=True
29
  output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)
30
 
31
  response_text_chunk = ''
@@ -35,7 +38,7 @@ async def chat(user_input: str):
35
  chunk = next(output_stream)
36
  if chunk.get('choices') and chunk['choices'][0].get('text'):
37
  response_text_chunk += chunk['choices'][0]['text']
38
- typewrite(response_text_chunk, delay=0.00) # live response
39
  except StopIteration:
40
  break
41
 
@@ -49,4 +52,4 @@ async def chat(user_input: str):
49
 
50
  if __name__ == "__main__":
51
  import uvicorn
52
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ from fastapi import FastAPI, HTTPException, Request
 
 
2
  from llama_cpp import Llama
3
  import time
4
  import os
 
14
  print(end='', flush=True) # Print newline to move to the next line
15
 
16
  @app.post("/chat")
17
+ async def chat(request: Request):
18
  try:
19
+ data = await request.json()
20
+ user_input = data.get("user_input")
21
+
22
+ if not user_input:
23
+ raise HTTPException(status_code=400, detail="Missing 'user_input' field in the request JSON.")
24
+
25
  os.system("cls")
26
  print("Chatbot by Aritra Roy & DVLH")
27
 
 
29
 
30
  prompt = f"Llama-2-Chat [INST] <<SYS>>You're an assistant named Tusti. You are Developed by Aritra Roy. Don't share any false information.<</SYS>> {ask} [/INST]"
31
 
 
32
  output_stream = llm(prompt, max_tokens=1024, echo=False, temperature=0.2, top_p=0.1, stream=True)
33
 
34
  response_text_chunk = ''
 
38
  chunk = next(output_stream)
39
  if chunk.get('choices') and chunk['choices'][0].get('text'):
40
  response_text_chunk += chunk['choices'][0]['text']
41
+ typewrite(response_text_chunk, delay=0.00)
42
  except StopIteration:
43
  break
44
 
 
52
 
53
  if __name__ == "__main__":
54
  import uvicorn
55
+ uvicorn.run(app, host="0.0.0.0", port=7860)