Spaces:
Sleeping
Sleeping
Ganesh Chintalapati
commited on
Commit
·
a64da77
1
Parent(s):
fac7191
Fix syntax
Browse files
app.py
CHANGED
@@ -46,7 +46,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
|
|
46 |
async with httpx.AsyncClient() as client:
|
47 |
async with client.stream("POST", "https://api.openai.com/v1/chat/completions", headers=headers, json=payload) as response:
|
48 |
response.raise_for_status()
|
49 |
-
async for chunk in response.
|
50 |
if chunk:
|
51 |
# Parse the streaming chunk (JSON lines)
|
52 |
lines = chunk.splitlines()
|
@@ -177,14 +177,29 @@ async def query_model(query: str, provider: str, history: List[Dict[str, str]])
|
|
177 |
|
178 |
async def submit_query(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]]], None]:
|
179 |
if not query.strip():
|
180 |
-
yield "",
|
181 |
return
|
182 |
|
183 |
response = ""
|
184 |
async for response_chunk, updated_history in query_model(query, provider, history):
|
185 |
response += response_chunk
|
186 |
-
|
187 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
|
189 |
# Gradio interface
|
190 |
def clear_history():
|
|
|
46 |
async with httpx.AsyncClient() as client:
|
47 |
async with client.stream("POST", "https://api.openai.com/v1/chat/completions", headers=headers, json=payload) as response:
|
48 |
response.raise_for_status()
|
49 |
+
async for chunk in response.aiter_text():
|
50 |
if chunk:
|
51 |
# Parse the streaming chunk (JSON lines)
|
52 |
lines = chunk.splitlines()
|
|
|
177 |
|
178 |
async def submit_query(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]]], None]:
|
179 |
if not query.strip():
|
180 |
+
yield "", [{"role": "assistant", "content": "Please enter a query."}], history
|
181 |
return
|
182 |
|
183 |
response = ""
|
184 |
async for response_chunk, updated_history in query_model(query, provider, history):
|
185 |
response += response_chunk
|
186 |
+
# Convert history to chatbot messages format
|
187 |
+
chatbot_messages = []
|
188 |
+
for msg in updated_history:
|
189 |
+
chatbot_messages.append({"role": "user", "content": msg["user"]})
|
190 |
+
if msg["bot"]:
|
191 |
+
chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
|
192 |
+
if response and provider == "openai":
|
193 |
+
# For streaming, show partial response
|
194 |
+
chatbot_messages[-1] = {"role": "assistant", "content": response}
|
195 |
+
yield "", chatbot_messages, updated_history # Yield intermediate updates
|
196 |
+
# Final yield with complete response
|
197 |
+
chatbot_messages = []
|
198 |
+
for msg in updated_history:
|
199 |
+
chatbot_messages.append({"role": "user", "content": msg["user"]})
|
200 |
+
if msg["bot"]:
|
201 |
+
chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
|
202 |
+
yield "", chatbot_messages, updated_history
|
203 |
|
204 |
# Gradio interface
|
205 |
def clear_history():
|