Ganesh Chintalapati commited on
Commit
75e738f
·
1 Parent(s): da5b2ad

adding streaming fix error

Browse files
Files changed (1) hide show
  1. app.py +155 -5
app.py CHANGED
@@ -49,6 +49,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
49
  response.raise_for_status()
50
  async for chunk in response.aiter_text():
51
  if chunk:
 
52
  lines = chunk.splitlines()
53
  for line in lines:
54
  if line.startswith("data: "):
@@ -58,7 +59,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
58
  if not data.strip():
59
  continue
60
  try:
61
- json_data = json.loads(data)
62
  if "choices" in json_data and json_data["choices"]:
63
  delta = json_data["choices"][0].get("delta", {})
64
  if "content" in delta and delta["content"] is not None:
@@ -77,12 +78,161 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
77
  logger.error(f"OpenAI Error: {str(e)}")
78
  yield f"Error: OpenAI Error: {str(e)}"
79
 
80
- async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> AsyncGenerator[str, None]:
81
- # Previous implementation of ask_anthropic function (without streaming)
 
 
 
82
 
83
- # Other functions (ask_gemini, query_model, submit_query, clear_history) remain unchanged
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
 
85
- # Gradio interface (unchanged)
 
 
 
 
 
 
 
 
 
86
 
87
  # Launch the Gradio app
88
  demo.launch()
 
49
  response.raise_for_status()
50
  async for chunk in response.aiter_text():
51
  if chunk:
52
+ # Parse the streaming chunk (JSON lines)
53
  lines = chunk.splitlines()
54
  for line in lines:
55
  if line.startswith("data: "):
 
59
  if not data.strip():
60
  continue
61
  try:
62
+ json_data = json.loads(data) # Safely parse JSON
63
  if "choices" in json_data and json_data["choices"]:
64
  delta = json_data["choices"][0].get("delta", {})
65
  if "content" in delta and delta["content"] is not None:
 
78
  logger.error(f"OpenAI Error: {str(e)}")
79
  yield f"Error: OpenAI Error: {str(e)}"
80
 
81
+ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
82
+ anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
83
+ if not anthropic_api_key:
84
+ logger.error("Anthropic API key not provided")
85
+ return "Error: Anthropic API key not provided."
86
 
87
+ # Build message history
88
+ messages = []
89
+ for msg in history:
90
+ messages.append({"role": "user", "content": msg["user"]})
91
+ if msg["bot"]:
92
+ messages.append({"role": "assistant", "content": msg["bot"]})
93
+ messages.append({"role": "user", "content": query})
94
+
95
+ headers = {
96
+ "x-api-key": anthropic_api_key,
97
+ "anthropic-version": "2023-06-01",
98
+ "Content-Type": "application/json"
99
+ }
100
+
101
+ payload = {
102
+ "model": "claude-3-5-sonnet-20241022",
103
+ "max_tokens": 1024,
104
+ "messages": messages
105
+ }
106
+
107
+ try:
108
+ async with httpx.AsyncClient() as client:
109
+ logger.info(f"Sending Anthropic request: {payload}")
110
+ response = await client.post("https://api.anthropic.com/v1/messages", headers=headers, json=payload)
111
+
112
+ response.raise_for_status()
113
+ logger.info(f"Anthropic response: {response.json()}")
114
+ return response.json()['content'][0]['text']
115
+
116
+ except httpx.HTTPStatusError as e:
117
+ logger.error(f"Anthropic HTTP Status Error: {e.response.status_code}, {e.response.text}")
118
+ return f"Error: Anthropic HTTP Status Error: {e.response.status_code}, {e.response.text}"
119
+ except Exception as e:
120
+ logger.error(f"Anthropic Error: {str(e)}")
121
+ return f"Error: Anthropic Error: {str(e)}"
122
+
123
+ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
124
+ gemini_api_key = os.getenv("GEMINI_API_KEY")
125
+ if not gemini_api_key:
126
+ logger.error("Gemini API key not provided")
127
+ return "Error: Gemini API key not provided."
128
+
129
+ # Gemini doesn't natively support chat history in the same way, so we concatenate history as text
130
+ history_text = ""
131
+ for msg in history:
132
+ history_text += f"User: {msg['user']}\nAssistant: {msg['bot']}\n" if msg["bot"] else f"User: {msg['user']}\n"
133
+ full_query = history_text + f"User: {query}\n"
134
+
135
+ headers = {
136
+ "Content-Type": "application/json"
137
+ }
138
+
139
+ payload = {
140
+ "contents": [{"parts": [{"text": full_query}]}]
141
+ }
142
+
143
+ try:
144
+ async with httpx.AsyncClient() as client:
145
+ response = await client.post(
146
+ f"https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key={gemini_api_key}",
147
+ headers=headers,
148
+ json=payload
149
+ )
150
+
151
+ response.raise_for_status()
152
+ return response.json()['candidates'][0]['content']['parts'][0]['text']
153
+
154
+ except httpx.HTTPStatusError as e:
155
+ logger.error(f"Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}")
156
+ return f"Error: Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}"
157
+ except Exception as e:
158
+ logger.error(f"Gemini Error: {str(e)}")
159
+ return f"Error: Gemini Error: {str(e)}"
160
+
161
+ async def query_model(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]]], None]:
162
+ provider = provider.lower()
163
+ response = ""
164
+
165
+ if provider == "openai":
166
+ async for chunk in ask_openai(query, history):
167
+ response += chunk
168
+ yield chunk, history # Yield partial response for streaming
169
+ elif provider == "anthropic":
170
+ response = await ask_anthropic(query, history)
171
+ yield response, history
172
+ elif provider == "gemini":
173
+ response = await ask_gemini(query, history)
174
+ yield response, history
175
+ else:
176
+ response = f"Error: Unknown provider: {provider}"
177
+ yield response, history
178
+
179
+ # Update history with the new query and response
180
+ updated_history = history + [{"user": query, "bot": response}]
181
+ logger.info(f"Updated history: {updated_history}")
182
+ yield response, updated_history # Final yield with updated history
183
+
184
+ async def submit_query(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]]], None]:
185
+ if not query.strip():
186
+ yield "", [{"role": "assistant", "content": "Please enter a query."}], history
187
+ return
188
+
189
+ response = ""
190
+ async for response_chunk, updated_history in query_model(query, provider, history):
191
+ response += response_chunk
192
+ # Convert history to chatbot messages format
193
+ chatbot_messages = []
194
+ for msg in updated_history:
195
+ chatbot_messages.append({"role": "user", "content": msg["user"]})
196
+ if msg["bot"]:
197
+ chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
198
+ if response and provider == "openai":
199
+ # For streaming, show partial response
200
+ chatbot_messages[-1] = {"role": "assistant", "content": response}
201
+ yield "", chatbot_messages, updated_history # Yield intermediate updates
202
+ # Final yield with complete response
203
+ chatbot_messages = []
204
+ for msg in updated_history:
205
+ chatbot_messages.append({"role": "user", "content": msg["user"]})
206
+ if msg["bot"]:
207
+ chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
208
+ yield "", chatbot_messages, updated_history
209
+
210
+ # Gradio interface
211
+ def clear_history():
212
+ return [], []
213
+
214
+ # Define Gradio interface
215
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
216
+ gr.Markdown("# Multi-Model Chat")
217
+ gr.Markdown("Chat with OpenAI, Anthropic, or Gemini. Select a provider and start typing!")
218
+
219
+ provider = gr.Dropdown(choices=["OpenAI", "Anthropic", "Gemini"], label="Select Provider", value="OpenAI")
220
+ history_state = gr.State(value=[])
221
+ chatbot = gr.Chatbot(label="Conversation", type="messages")
222
+ query = gr.Textbox(label="Enter your query", placeholder="e.g., What is the capital of the United States?")
223
+ submit_button = gr.Button("Submit")
224
+ clear_button = gr.Button("Clear History")
225
 
226
+ submit_button.click(
227
+ fn=submit_query,
228
+ inputs=[query, provider, history_state],
229
+ outputs=[query, chatbot, history_state]
230
+ )
231
+ clear_button.click(
232
+ fn=clear_history,
233
+ inputs=[],
234
+ outputs=[chatbot, history_state]
235
+ )
236
 
237
  # Launch the Gradio app
238
  demo.launch()