Spaces:
Sleeping
Sleeping
Ganesh Chintalapati
commited on
Commit
·
2cee09f
1
Parent(s):
7a83934
OpenAI Anthropic Gemini works
Browse files
app.py
CHANGED
@@ -5,6 +5,7 @@ import json
|
|
5 |
from dotenv import load_dotenv
|
6 |
import gradio as gr
|
7 |
from typing import AsyncGenerator, List, Dict, Tuple
|
|
|
8 |
|
9 |
# Configure logging
|
10 |
logging.basicConfig(level=logging.INFO)
|
@@ -108,18 +109,31 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
|
|
108 |
}
|
109 |
|
110 |
try:
|
111 |
-
async with httpx.AsyncClient() as client:
|
112 |
logger.info(f"Sending Anthropic request: {payload}")
|
113 |
response = await client.post("https://api.anthropic.com/v1/messages", headers=headers, json=payload)
|
114 |
|
115 |
response.raise_for_status()
|
116 |
-
|
117 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
118 |
except httpx.HTTPStatusError as e:
|
119 |
-
|
120 |
-
|
|
|
121 |
except Exception as e:
|
122 |
-
logger.error(f"Anthropic Error: {str(e)}")
|
123 |
return f"Error: Anthropic Error: {str(e)}"
|
124 |
|
125 |
async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
|
@@ -183,7 +197,7 @@ async def query_model(query: str, providers: List[str], history: List[Dict[str,
|
|
183 |
# Handle Anthropic (non-streaming)
|
184 |
if "Anthropic" in providers:
|
185 |
response = await ask_anthropic(query, history)
|
186 |
-
if response.strip():
|
187 |
responses.append(f"[Anthropic]: {response}")
|
188 |
|
189 |
# Handle Gemini (non-streaming)
|
|
|
5 |
from dotenv import load_dotenv
|
6 |
import gradio as gr
|
7 |
from typing import AsyncGenerator, List, Dict, Tuple
|
8 |
+
import traceback
|
9 |
|
10 |
# Configure logging
|
11 |
logging.basicConfig(level=logging.INFO)
|
|
|
109 |
}
|
110 |
|
111 |
try:
|
112 |
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
113 |
logger.info(f"Sending Anthropic request: {payload}")
|
114 |
response = await client.post("https://api.anthropic.com/v1/messages", headers=headers, json=payload)
|
115 |
|
116 |
response.raise_for_status()
|
117 |
+
response_json = response.json()
|
118 |
+
logger.info(f"Anthropic response: {response_json}")
|
119 |
+
|
120 |
+
# Validate response structure
|
121 |
+
if not isinstance(response_json, dict) or "content" not in response_json or not response_json["content"]:
|
122 |
+
logger.error(f"Invalid Anthropic response structure: {response_json}")
|
123 |
+
return f"Error: Invalid Anthropic response structure"
|
124 |
+
|
125 |
+
content = response_json["content"]
|
126 |
+
if not isinstance(content, list) or not content or "text" not in content[0]:
|
127 |
+
logger.error(f"Invalid Anthropic content format: {content}")
|
128 |
+
return f"Error: Invalid Anthropic content format"
|
129 |
+
|
130 |
+
return response_json["content"][0]["text"]
|
131 |
except httpx.HTTPStatusError as e:
|
132 |
+
response_text = await e.response.aread()
|
133 |
+
logger.error(f"Anthropic HTTP Status Error: {e.response.status_code}, {response_text.decode('utf-8')}")
|
134 |
+
return f"Error: Anthropic HTTP Status Error: {e.response.status_code}, {response_text.decode('utf-8')}"
|
135 |
except Exception as e:
|
136 |
+
logger.error(f"Anthropic Error: {str(e)}\nStack trace: {traceback.format_exc()}")
|
137 |
return f"Error: Anthropic Error: {str(e)}"
|
138 |
|
139 |
async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
|
|
|
197 |
# Handle Anthropic (non-streaming)
|
198 |
if "Anthropic" in providers:
|
199 |
response = await ask_anthropic(query, history)
|
200 |
+
if response.strip() and not response.startswith("Error:"):
|
201 |
responses.append(f"[Anthropic]: {response}")
|
202 |
|
203 |
# Handle Gemini (non-streaming)
|