Spaces:
Sleeping
Sleeping
Ganesh Chintalapati
commited on
Commit
·
fac7191
1
Parent(s):
128373b
Fix syntaxerror
Browse files
app.py
CHANGED
@@ -46,7 +46,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
|
|
46 |
async with httpx.AsyncClient() as client:
|
47 |
async with client.stream("POST", "https://api.openai.com/v1/chat/completions", headers=headers, json=payload) as response:
|
48 |
response.raise_for_status()
|
49 |
-
async for chunk in response.
|
50 |
if chunk:
|
51 |
# Parse the streaming chunk (JSON lines)
|
52 |
lines = chunk.splitlines()
|
@@ -172,17 +172,19 @@ async def query_model(query: str, provider: str, history: List[Dict[str, str]])
|
|
172 |
|
173 |
# Update history with the new query and response
|
174 |
updated_history = history + [{"user": query, "bot": response}]
|
|
|
175 |
yield response, updated_history # Final yield with updated history
|
176 |
|
177 |
-
async def submit_query(query: str, provider: str, history: List[Dict[str, str]]) -> Tuple[str, List[Dict[str, str]], List[Dict[str, str]]]:
|
178 |
if not query.strip():
|
179 |
-
|
|
|
180 |
|
181 |
response = ""
|
182 |
async for response_chunk, updated_history in query_model(query, provider, history):
|
183 |
response += response_chunk
|
184 |
yield "", updated_history, updated_history # Yield intermediate updates for streaming
|
185 |
-
|
186 |
|
187 |
# Gradio interface
|
188 |
def clear_history():
|
|
|
46 |
async with httpx.AsyncClient() as client:
|
47 |
async with client.stream("POST", "https://api.openai.com/v1/chat/completions", headers=headers, json=payload) as response:
|
48 |
response.raise_for_status()
|
49 |
+
async for chunk in response.intro_text():
|
50 |
if chunk:
|
51 |
# Parse the streaming chunk (JSON lines)
|
52 |
lines = chunk.splitlines()
|
|
|
172 |
|
173 |
# Update history with the new query and response
|
174 |
updated_history = history + [{"user": query, "bot": response}]
|
175 |
+
logger.info(f"Updated history: {updated_history}")
|
176 |
yield response, updated_history # Final yield with updated history
|
177 |
|
178 |
+
async def submit_query(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]]], None]:
|
179 |
if not query.strip():
|
180 |
+
yield "", history, history
|
181 |
+
return
|
182 |
|
183 |
response = ""
|
184 |
async for response_chunk, updated_history in query_model(query, provider, history):
|
185 |
response += response_chunk
|
186 |
yield "", updated_history, updated_history # Yield intermediate updates for streaming
|
187 |
+
yield "", updated_history, updated_history # Final yield with cleared query
|
188 |
|
189 |
# Gradio interface
|
190 |
def clear_history():
|