Spaces:
Sleeping
Sleeping
Update genesis/providers.py
Browse files- genesis/providers.py +37 -29
genesis/providers.py
CHANGED
@@ -2,27 +2,19 @@
|
|
2 |
import os
|
3 |
import requests
|
4 |
from typing import List, Dict
|
5 |
-
from openai import OpenAI
|
6 |
|
7 |
-
# Load API keys
|
8 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
9 |
-
HF_TOKEN = os.getenv("HF_TOKEN")
|
10 |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
11 |
NCBI_API_KEY = os.getenv("NCBI_API_KEY")
|
12 |
NCBI_EMAIL = os.getenv("NCBI_EMAIL")
|
13 |
-
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
|
14 |
-
|
15 |
-
client = OpenAI(api_key=OPENAI_API_KEY)
|
16 |
|
17 |
# -------- DeepSeek Summary --------
|
18 |
def run_deepseek_summary(prompt: str) -> str:
|
19 |
"""Run a dense scientific summary using DeepSeek API."""
|
20 |
-
if not DEEPSEEK_API_KEY:
|
21 |
-
print("[DeepSeek] API key not set, skipping.")
|
22 |
-
return prompt
|
23 |
try:
|
24 |
url = "https://api.deepseek.com/v1/chat/completions"
|
25 |
-
headers = {"Authorization": f"Bearer {
|
26 |
payload = {
|
27 |
"model": "deepseek-science",
|
28 |
"messages": [{"role": "user", "content": prompt}],
|
@@ -43,9 +35,7 @@ def run_gemini_polish(text: str) -> str:
|
|
43 |
return text
|
44 |
try:
|
45 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}"
|
46 |
-
payload = {
|
47 |
-
"contents": [{"parts": [{"text": f"Polish and clarify this research report for scientists:\n\n{text}"}]}]
|
48 |
-
}
|
49 |
r = requests.post(url, json=payload, timeout=30)
|
50 |
r.raise_for_status()
|
51 |
data = r.json()
|
@@ -54,22 +44,40 @@ def run_gemini_polish(text: str) -> str:
|
|
54 |
print(f"[Gemini] Failed: {e}")
|
55 |
return text
|
56 |
|
57 |
-
# -------- OpenAI Image Generation
|
58 |
-
def
|
59 |
-
"""Generate a research diagram
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
# -------- PubMed Fallback --------
|
75 |
def pubmed_fallback_search(query: str, api_key: str, email: str) -> List[Dict]:
|
|
|
2 |
import os
|
3 |
import requests
|
4 |
from typing import List, Dict
|
|
|
5 |
|
|
|
6 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
7 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
8 |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
9 |
NCBI_API_KEY = os.getenv("NCBI_API_KEY")
|
10 |
NCBI_EMAIL = os.getenv("NCBI_EMAIL")
|
|
|
|
|
|
|
11 |
|
12 |
# -------- DeepSeek Summary --------
|
13 |
def run_deepseek_summary(prompt: str) -> str:
|
14 |
"""Run a dense scientific summary using DeepSeek API."""
|
|
|
|
|
|
|
15 |
try:
|
16 |
url = "https://api.deepseek.com/v1/chat/completions"
|
17 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"}
|
18 |
payload = {
|
19 |
"model": "deepseek-science",
|
20 |
"messages": [{"role": "user", "content": prompt}],
|
|
|
35 |
return text
|
36 |
try:
|
37 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}"
|
38 |
+
payload = {"contents": [{"parts": [{"text": f"Polish and clarify this research report:\n\n{text}"}]}]}
|
|
|
|
|
39 |
r = requests.post(url, json=payload, timeout=30)
|
40 |
r.raise_for_status()
|
41 |
data = r.json()
|
|
|
44 |
print(f"[Gemini] Failed: {e}")
|
45 |
return text
|
46 |
|
47 |
+
# -------- OpenAI + HuggingFace Image Generation --------
|
48 |
+
def run_gemini_image(query: str) -> str:
|
49 |
+
"""Generate a research diagram — OpenAI first, Hugging Face fallback."""
|
50 |
+
# Try OpenAI first
|
51 |
+
if OPENAI_API_KEY:
|
52 |
+
try:
|
53 |
+
url = "https://api.openai.com/v1/images/generations"
|
54 |
+
headers = {"Authorization": f"Bearer {OPENAI_API_KEY}"}
|
55 |
+
payload = {"model": "gpt-image-1", "prompt": f"Scientific diagram about: {query}", "size": "1024x1024"}
|
56 |
+
r = requests.post(url, headers=headers, json=payload, timeout=60)
|
57 |
+
r.raise_for_status()
|
58 |
+
data = r.json()
|
59 |
+
return data["data"][0]["url"]
|
60 |
+
except Exception as e:
|
61 |
+
print(f"[OpenAI Image] Failed: {e}")
|
62 |
+
|
63 |
+
# Hugging Face Fallback
|
64 |
+
if HF_TOKEN:
|
65 |
+
try:
|
66 |
+
print("[Image] Falling back to Hugging Face Stable Diffusion...")
|
67 |
+
url = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
|
68 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
69 |
+
payload = {"inputs": f"highly detailed scientific diagram about: {query}"}
|
70 |
+
r = requests.post(url, headers=headers, json=payload, timeout=60)
|
71 |
+
r.raise_for_status()
|
72 |
+
# Save to local file
|
73 |
+
img_path = f"/tmp/{query.replace(' ', '_')}.png"
|
74 |
+
with open(img_path, "wb") as f:
|
75 |
+
f.write(r.content)
|
76 |
+
return img_path
|
77 |
+
except Exception as e:
|
78 |
+
print(f"[HF Image] Failed: {e}")
|
79 |
+
|
80 |
+
return None
|
81 |
|
82 |
# -------- PubMed Fallback --------
|
83 |
def pubmed_fallback_search(query: str, api_key: str, email: str) -> List[Dict]:
|