Ganesh Chintalapati commited on
Commit
7521d2f
·
1 Parent(s): 90da489

Fix SyntaxError in ask_openai function

Browse files
Files changed (1) hide show
  1. app.py +34 -70
app.py CHANGED
@@ -3,7 +3,6 @@ import logging
3
  import httpx
4
  from dotenv import load_dotenv
5
  import gradio as gr
6
- from typing import AsyncGenerator, List, Dict
7
 
8
  # Configure logging
9
  logging.basicConfig(level=logging.INFO)
@@ -16,20 +15,11 @@ logger.info(f"OPENAI_API_KEY present: {'OPENAI_API_KEY' in os.environ}")
16
  logger.info(f"ANTHROPIC_API_KEY present: {'ANTHROPIC_API_KEY' in os.environ}")
17
  logger.info(f"GEMINI_API_KEY present: {'GEMINI_API_KEY' in os.environ}")
18
 
19
- async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerator[str, None]:
20
  openai_api_key = os.getenv("OPENAI_API_KEY")
21
  if not openai_api_key:
22
  logger.error("OpenAI API key not provided")
23
- yield "Error: OpenAI API key not provided."
24
- return
25
-
26
- # Build message history
27
- messages = []
28
- for msg in history:
29
- messages.append({"role": "user", "content": msg["user"]})
30
- if msg["bot"]:
31
- messages.append({"role": "assistant", "content": msg["bot"]})
32
- messages.append({"role": "user", "content": query})
33
 
34
  headers = {
35
  "Authorization": f"Bearer {openai_api_key}",
@@ -38,54 +28,30 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
38
 
39
  payload = {
40
  "model": "gpt-3.5-turbo",
41
- "messages": messages,
42
- "stream": True
43
  }
44
 
45
  try:
46
  async with httpx.AsyncClient() as client:
47
- async with client.stream("POST", "https://api.openai.com/v1/chat/completions", headers=headers, json=payload) as response:
48
- response.raise_for_status()
49
- async for chunk in response.aiter_text():
50
- if chunk:
51
- # Parse the streaming chunk (JSON lines)
52
- lines = chunk.splitlines()
53
- for line in lines:
54
- if line.startswith("data: "):
55
- data = line[6:] # Remove "data: " prefix
56
- if data == "[DONE]":
57
- break
58
- try:
59
- json_data = eval(data) # Safely parse JSON
60
- if "choices" inㅇ json_data and json_data["choices"]:
61
- delta = json_data["choices"][0].get("delta", {})
62
- if "content" in delta:
63
- yield delta["content"]
64
- except Exception as e:
65
- logger.error(f"Error parsing OpenAI stream chunk: {str(e)}")
66
- yield f"Error parsing stream: {str(e)}"
67
 
68
  except httpx.HTTPStatusError as e:
69
  logger.error(f"OpenAI HTTP Status Error: {e.response.status_code}, {e.response.text}")
70
- yield f"Error: OpenAI HTTP Status Error: {e.response.status_code}, {e.response.text}"
71
  except Exception as e:
72
  logger.error(f"OpenAI Error: {str(e)}")
73
- yield f"Error: OpenAI Error: {str(e)}"
74
 
75
- async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
76
  anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
77
  if not anthropic_api_key:
78
  logger.error("Anthropic API key not provided")
79
  return "Error: Anthropic API key not provided."
80
 
81
- # Build message history
82
- messages = []
83
- for msg in history:
84
- messages.append({"role": "user", "content": msg["user"]})
85
- if msg["bot"]:
86
- messages.append({"role": "assistant", "content": msg["bot"]})
87
- messages.append({"role": "user", "content": query})
88
-
89
  headers = {
90
  "x-api-key": anthropic_api_key,
91
  "anthropic-version": "2023-06-01",
@@ -95,7 +61,7 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
95
  payload = {
96
  "model": "claude-3-5-sonnet-20241022",
97
  "max_tokens": 1024,
98
- "messages": messages
99
  }
100
 
101
  try:
@@ -105,7 +71,8 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
105
 
106
  response.raise_for_status()
107
  logger.info(f"Anthropic response: {response.json()}")
108
- return response.json()['content'][0]['text']
 
109
 
110
  except httpx.HTTPStatusError as e:
111
  logger.error(f"Anthropic HTTP Status Error: {e.response.status_code}, {e.response.text}")
@@ -114,24 +81,18 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
114
  logger.error(f"Anthropic Error: {str(e)}")
115
  return f"Error: Anthropic Error: {str(e)}"
116
 
117
- async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
118
  gemini_api_key = os.getenv("GEMINI_API_KEY")
119
  if not gemini_api_key:
120
  logger.error("Gemini API key not provided")
121
  return "Error: Gemini API key not provided."
122
 
123
- # Gemini doesn't natively support chat history in the same way, so we concatenate history as text
124
- history_text = ""
125
- for msg in history:
126
- history_text += f"User: {msg['user']}\nAssistant: {msg['bot']}\n" if msg["bot"] else f"User: {msg['user']}\n"
127
- full_query = history_text + f"User: {query}\n"
128
-
129
  headers = {
130
  "Content-Type": "application/json"
131
  }
132
 
133
  payload = {
134
- "contents": [{"parts": [{"text": full_query}]}]
135
  }
136
 
137
  try:
@@ -143,7 +104,8 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
143
  )
144
 
145
  response.raise_for_status()
146
- return response.json()['candidates'][0]['content']['parts'][0]['text']
 
147
 
148
  except httpx.HTTPStatusError as e:
149
  logger.error(f"Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}")
@@ -152,29 +114,31 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
152
  logger.error(f"Gemini Error: {str(e)}")
153
  return f"Error: Gemini Error: {str(e)}"
154
 
155
- async def query_model(query: str, provider: str, history: List[Dict[str, str]]):
156
  provider = provider.lower()
157
  if provider == "openai":
158
- async for chunk in ask_openai(query, history):
159
- yield chunk
160
  elif provider == "anthropic":
161
- yield await ask_anthropic(query, history)
162
  elif provider == "gemini":
163
- yield await ask_gemini(query, history)
164
  else:
165
- yield f"Error: Unknown provider: {provider}"
166
 
167
  # Gradio interface
168
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
169
- gr.Markdown("# Multi-Model Chat")
170
- gr.Markdown("Chat with OpenAI, Anthropic, or Gemini. Select a provider and start typing!")
 
 
 
 
 
171
 
172
- provider = gr.Dropdown(choices=["OpenAI", "Anthropic", "Gemini"], label="Select Provider", value="OpenAI")
173
- chatbot = gr.ChatInterface(
174
  fn=query_model,
175
- additional_inputs=[provider],
176
- title="",
177
- description=""
178
  )
179
 
180
  # Launch the Gradio app
 
3
  import httpx
4
  from dotenv import load_dotenv
5
  import gradio as gr
 
6
 
7
  # Configure logging
8
  logging.basicConfig(level=logging.INFO)
 
15
  logger.info(f"ANTHROPIC_API_KEY present: {'ANTHROPIC_API_KEY' in os.environ}")
16
  logger.info(f"GEMINI_API_KEY present: {'GEMINI_API_KEY' in os.environ}")
17
 
18
+ async def ask_openai(query: str):
19
  openai_api_key = os.getenv("OPENAI_API_KEY")
20
  if not openai_api_key:
21
  logger.error("OpenAI API key not provided")
22
+ return "Error: OpenAI API key not provided."
 
 
 
 
 
 
 
 
 
23
 
24
  headers = {
25
  "Authorization": f"Bearer {openai_api_key}",
 
28
 
29
  payload = {
30
  "model": "gpt-3.5-turbo",
31
+ "messages": [{"role": "user", "content": query}]
 
32
  }
33
 
34
  try:
35
  async with httpx.AsyncClient() as client:
36
+ response = await client.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
37
+
38
+ response.raise_for_status()
39
+ answer = response.json()['choices'][0]['message']['content']
40
+ return answer
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  except httpx.HTTPStatusError as e:
43
  logger.error(f"OpenAI HTTP Status Error: {e.response.status_code}, {e.response.text}")
44
+ return f"Error: OpenAI HTTP Status Error: {e.response.status_code}, {e.response.text}"
45
  except Exception as e:
46
  logger.error(f"OpenAI Error: {str(e)}")
47
+ return f"Error: OpenAI Error: {str(e)}"
48
 
49
+ async def ask_anthropic(query: str):
50
  anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
51
  if not anthropic_api_key:
52
  logger.error("Anthropic API key not provided")
53
  return "Error: Anthropic API key not provided."
54
 
 
 
 
 
 
 
 
 
55
  headers = {
56
  "x-api-key": anthropic_api_key,
57
  "anthropic-version": "2023-06-01",
 
61
  payload = {
62
  "model": "claude-3-5-sonnet-20241022",
63
  "max_tokens": 1024,
64
+ "messages": [{"role": "user", "content": query}]
65
  }
66
 
67
  try:
 
71
 
72
  response.raise_for_status()
73
  logger.info(f"Anthropic response: {response.json()}")
74
+ answer = response.json()['content'][0]['text']
75
+ return answer
76
 
77
  except httpx.HTTPStatusError as e:
78
  logger.error(f"Anthropic HTTP Status Error: {e.response.status_code}, {e.response.text}")
 
81
  logger.error(f"Anthropic Error: {str(e)}")
82
  return f"Error: Anthropic Error: {str(e)}"
83
 
84
+ async def ask_gemini(query: str):
85
  gemini_api_key = os.getenv("GEMINI_API_KEY")
86
  if not gemini_api_key:
87
  logger.error("Gemini API key not provided")
88
  return "Error: Gemini API key not provided."
89
 
 
 
 
 
 
 
90
  headers = {
91
  "Content-Type": "application/json"
92
  }
93
 
94
  payload = {
95
+ "contents": [{"parts": [{"text": query}]}]
96
  }
97
 
98
  try:
 
104
  )
105
 
106
  response.raise_for_status()
107
+ answer = response.json()['candidates'][0]['content']['parts'][0]['text']
108
+ return answer
109
 
110
  except httpx.HTTPStatusError as e:
111
  logger.error(f"Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}")
 
114
  logger.error(f"Gemini Error: {str(e)}")
115
  return f"Error: Gemini Error: {str(e)}"
116
 
117
+ async def query_model(query: str, provider: str):
118
  provider = provider.lower()
119
  if provider == "openai":
120
+ return await ask_openai(query)
 
121
  elif provider == "anthropic":
122
+ return await ask_anthropic(query)
123
  elif provider == "gemini":
124
+ return await ask_gemini(query)
125
  else:
126
+ return f"Error: Unknown provider: {provider}"
127
 
128
  # Gradio interface
129
+ with gr.Blocks() as demo:
130
+ gr.Markdown("# Multi-Model Selector")
131
+ gr.Markdown("Select a provider and enter a query to get a response from the chosen AI model.")
132
+
133
+ provider = gr.Dropdown(choices=["OpenAI", "Anthropic", "Gemini"], label="Select Provider")
134
+ query = gr.Textbox(label="Enter your query", placeholder="e.g., What is the capital of the United States?")
135
+ submit_button = gr.Button("Submit")
136
+ output = gr.Textbox(label="Response", interactive=False)
137
 
138
+ submit_button.click(
 
139
  fn=query_model,
140
+ inputs=[query, provider],
141
+ outputs=output
 
142
  )
143
 
144
  # Launch the Gradio app