abdullahalioo commited on
Commit
f294c26
·
verified ·
1 Parent(s): cbb83bf

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +21 -20
main.py CHANGED
@@ -1,4 +1,5 @@
1
  from fastapi import FastAPI, HTTPException
 
2
  from pydantic import BaseModel
3
  from hugchat import hugchat
4
  from hugchat.login import Login
@@ -53,34 +54,34 @@ async def startup_event():
53
  @app.post("/generate")
54
  async def generate_response(request: QuestionRequest):
55
  """
56
- Generates a response from the AI based on the provided question.
57
 
58
  Args:
59
  request (QuestionRequest): JSON body containing the question.
60
 
61
  Returns:
62
- dict: A dictionary containing the AI's response or an error message.
63
  """
64
  global chatbot
65
  if chatbot is None:
66
  raise HTTPException(status_code=500, detail="Chatbot not initialized. Please try again later.")
67
 
68
- try:
69
- # Generate response (non-streaming for simplicity)
70
- response_data = chatbot.chat(request.question, stream=False)
71
-
72
- # Extract the actual response text
73
- # The response may be a dictionary; check for 'gen' or other keys
74
- if isinstance(response_data, dict):
75
- # Assuming 'gen' contains the response text (list of strings)
76
- response_text = "".join(response_data.get("gen", [])) if response_data.get("gen") else "Here's what we can do: Let's discuss your vision!"
77
- if not response_text:
78
- # Fallback to a default premium response if 'gen' is empty
79
- response_text = f"Welcome, valued client! How can Abdullah Ali and our premium team bring your vision to life with a custom website or AI chatbot?"
80
- else:
81
- response_text = response_data # Direct string response (if hugchat returns string)
82
-
83
- return {"response": response_text}
84
- except Exception as e:
85
- raise HTTPException(status_code=500, detail=f"Failed to generate response: {str(e)}")
86
 
 
1
  from fastapi import FastAPI, HTTPException
2
+ from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from hugchat import hugchat
5
  from hugchat.login import Login
 
54
  @app.post("/generate")
55
  async def generate_response(request: QuestionRequest):
56
  """
57
+ Generates a streaming response from the AI based on the provided question.
58
 
59
  Args:
60
  request (QuestionRequest): JSON body containing the question.
61
 
62
  Returns:
63
+ StreamingResponse: A streaming response with the AI's reply.
64
  """
65
  global chatbot
66
  if chatbot is None:
67
  raise HTTPException(status_code=500, detail="Chatbot not initialized. Please try again later.")
68
 
69
+ async def generate():
70
+ try:
71
+ # Use streaming to match client expectations
72
+ for token in chatbot.chat(request.question, stream=True):
73
+ # Handle hugchat response (token may be dict or string)
74
+ if isinstance(token, dict):
75
+ # Extract token from dictionary (e.g., {"type": "stream", "token": "text"})
76
+ token_text = token.get("token", "")
77
+ else:
78
+ token_text = str(token)
79
+
80
+ # Encode and yield the token
81
+ yield token_text.encode('utf-8')
82
+ except Exception as e:
83
+ error_message = f"Error: Failed to generate response: {str(e)}"
84
+ yield error_message.encode('utf-8')
85
+
86
+ return StreamingResponse(generate(), media_type="text/plain")
87