Spaces:
Sleeping
Sleeping
Update genesis/providers.py
Browse files- genesis/providers.py +92 -42
genesis/providers.py
CHANGED
@@ -1,81 +1,110 @@
|
|
1 |
import os
|
2 |
import requests
|
|
|
|
|
3 |
from typing import Optional
|
4 |
|
|
|
|
|
|
|
5 |
# Load API keys from environment
|
6 |
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
|
7 |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
8 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
9 |
HF_API_KEY = os.getenv("HF_API_KEY")
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
# === DeepSeek Summarization ===
|
|
|
12 |
def run_deepseek_summary(prompt: str) -> str:
|
13 |
if not DEEPSEEK_API_KEY:
|
|
|
14 |
return "[DeepSeek] API key missing."
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
return f"[DeepSeek] Failed: {str(e)}"
|
32 |
|
33 |
|
34 |
# === Gemini Polishing ===
|
|
|
35 |
def run_gemini_polish(text: str) -> str:
|
36 |
if not GEMINI_API_KEY:
|
|
|
37 |
return text
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
return text
|
51 |
-
|
52 |
-
return f"[Gemini] Failed: {str(e)}"
|
53 |
|
54 |
|
55 |
# === Gemini Image Generation ===
|
|
|
56 |
def run_gemini_image(prompt: str) -> Optional[str]:
|
57 |
if not GEMINI_API_KEY:
|
|
|
58 |
return None
|
|
|
59 |
try:
|
60 |
resp = requests.post(
|
61 |
f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}",
|
62 |
headers={"Content-Type": "application/json"},
|
63 |
-
json={"prompt": prompt}
|
|
|
64 |
)
|
65 |
resp.raise_for_status()
|
66 |
data = resp.json()
|
67 |
if "imageUrl" in data:
|
68 |
return data["imageUrl"]
|
69 |
-
return None
|
70 |
except Exception as e:
|
71 |
-
|
72 |
-
|
73 |
|
74 |
|
75 |
# === OpenAI Image Generation ===
|
|
|
76 |
def run_openai_image(prompt: str) -> Optional[str]:
|
77 |
if not OPENAI_API_KEY:
|
|
|
78 |
return None
|
|
|
79 |
try:
|
80 |
resp = requests.post(
|
81 |
"https://api.openai.com/v1/images/generations",
|
@@ -83,31 +112,52 @@ def run_openai_image(prompt: str) -> Optional[str]:
|
|
83 |
"Authorization": f"Bearer {OPENAI_API_KEY}",
|
84 |
"Content-Type": "application/json"
|
85 |
},
|
86 |
-
json={"model": "gpt-image-1", "prompt": prompt, "size": "1024x1024"}
|
|
|
87 |
)
|
88 |
resp.raise_for_status()
|
89 |
return resp.json()["data"][0]["url"]
|
90 |
except Exception as e:
|
91 |
-
|
92 |
return None
|
93 |
|
94 |
|
95 |
-
# === Hugging Face Fallback Image
|
|
|
96 |
def run_hf_image(prompt: str) -> Optional[str]:
|
97 |
if not HF_API_KEY:
|
|
|
98 |
return None
|
|
|
99 |
try:
|
100 |
resp = requests.post(
|
101 |
"https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2",
|
102 |
headers={"Authorization": f"Bearer {HF_API_KEY}"},
|
103 |
-
json={"inputs": prompt}
|
|
|
104 |
)
|
105 |
resp.raise_for_status()
|
106 |
-
# HF returns binary image; you need to save or serve it yourself
|
107 |
output_file = "hf_image.png"
|
108 |
with open(output_file, "wb") as f:
|
109 |
f.write(resp.content)
|
|
|
110 |
return output_file
|
111 |
except Exception as e:
|
112 |
-
|
113 |
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
import requests
|
3 |
+
import logging
|
4 |
+
import time
|
5 |
from typing import Optional
|
6 |
|
7 |
+
# Setup logging
|
8 |
+
logging.basicConfig(level=logging.INFO, format="[%(levelname)s] %(message)s")
|
9 |
+
|
10 |
# Load API keys from environment
|
11 |
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
|
12 |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
13 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
14 |
HF_API_KEY = os.getenv("HF_API_KEY")
|
15 |
|
16 |
+
# Retry decorator
|
17 |
+
def retry_request(func):
|
18 |
+
def wrapper(*args, **kwargs):
|
19 |
+
retries = 3
|
20 |
+
delay = 2
|
21 |
+
for attempt in range(1, retries + 1):
|
22 |
+
try:
|
23 |
+
return func(*args, **kwargs)
|
24 |
+
except Exception as e:
|
25 |
+
logging.warning(f"{func.__name__} failed (attempt {attempt}): {e}")
|
26 |
+
if attempt < retries:
|
27 |
+
time.sleep(delay)
|
28 |
+
return None
|
29 |
+
return wrapper
|
30 |
+
|
31 |
+
|
32 |
# === DeepSeek Summarization ===
|
33 |
+
@retry_request
|
34 |
def run_deepseek_summary(prompt: str) -> str:
|
35 |
if not DEEPSEEK_API_KEY:
|
36 |
+
logging.error("Missing DeepSeek API key.")
|
37 |
return "[DeepSeek] API key missing."
|
38 |
+
|
39 |
+
resp = requests.post(
|
40 |
+
"https://api.deepseek.com/v1/chat/completions",
|
41 |
+
headers={
|
42 |
+
"Authorization": f"Bearer {DEEPSEEK_API_KEY}",
|
43 |
+
"Content-Type": "application/json"
|
44 |
+
},
|
45 |
+
json={
|
46 |
+
"model": "deepseek-chat",
|
47 |
+
"messages": [{"role": "user", "content": prompt}],
|
48 |
+
"temperature": 0.3
|
49 |
+
},
|
50 |
+
timeout=60
|
51 |
+
)
|
52 |
+
resp.raise_for_status()
|
53 |
+
return resp.json()["choices"][0]["message"]["content"].strip()
|
|
|
54 |
|
55 |
|
56 |
# === Gemini Polishing ===
|
57 |
+
@retry_request
|
58 |
def run_gemini_polish(text: str) -> str:
|
59 |
if not GEMINI_API_KEY:
|
60 |
+
logging.warning("No Gemini API key found, skipping polish.")
|
61 |
return text
|
62 |
+
|
63 |
+
resp = requests.post(
|
64 |
+
f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}",
|
65 |
+
headers={"Content-Type": "application/json"},
|
66 |
+
json={
|
67 |
+
"contents": [{"parts": [{"text": f"Polish and refine this scientific text:\n\n{text}"}]}]
|
68 |
+
},
|
69 |
+
timeout=60
|
70 |
+
)
|
71 |
+
resp.raise_for_status()
|
72 |
+
candidates = resp.json().get("candidates", [])
|
73 |
+
if candidates and "content" in candidates[0]:
|
74 |
+
return candidates[0]["content"]["parts"][0]["text"].strip()
|
75 |
+
return text
|
|
|
76 |
|
77 |
|
78 |
# === Gemini Image Generation ===
|
79 |
+
@retry_request
|
80 |
def run_gemini_image(prompt: str) -> Optional[str]:
|
81 |
if not GEMINI_API_KEY:
|
82 |
+
logging.warning("No Gemini API key found, skipping image generation.")
|
83 |
return None
|
84 |
+
|
85 |
try:
|
86 |
resp = requests.post(
|
87 |
f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}",
|
88 |
headers={"Content-Type": "application/json"},
|
89 |
+
json={"prompt": prompt},
|
90 |
+
timeout=60
|
91 |
)
|
92 |
resp.raise_for_status()
|
93 |
data = resp.json()
|
94 |
if "imageUrl" in data:
|
95 |
return data["imageUrl"]
|
|
|
96 |
except Exception as e:
|
97 |
+
logging.error(f"[Gemini Image] Failed: {e}")
|
98 |
+
return None
|
99 |
|
100 |
|
101 |
# === OpenAI Image Generation ===
|
102 |
+
@retry_request
|
103 |
def run_openai_image(prompt: str) -> Optional[str]:
|
104 |
if not OPENAI_API_KEY:
|
105 |
+
logging.warning("No OpenAI API key found, skipping OpenAI image.")
|
106 |
return None
|
107 |
+
|
108 |
try:
|
109 |
resp = requests.post(
|
110 |
"https://api.openai.com/v1/images/generations",
|
|
|
112 |
"Authorization": f"Bearer {OPENAI_API_KEY}",
|
113 |
"Content-Type": "application/json"
|
114 |
},
|
115 |
+
json={"model": "gpt-image-1", "prompt": prompt, "size": "1024x1024"},
|
116 |
+
timeout=60
|
117 |
)
|
118 |
resp.raise_for_status()
|
119 |
return resp.json()["data"][0]["url"]
|
120 |
except Exception as e:
|
121 |
+
logging.error(f"[OpenAI Image] Failed: {e}")
|
122 |
return None
|
123 |
|
124 |
|
125 |
+
# === Hugging Face Fallback Image ===
|
126 |
+
@retry_request
|
127 |
def run_hf_image(prompt: str) -> Optional[str]:
|
128 |
if not HF_API_KEY:
|
129 |
+
logging.warning("No Hugging Face API key found, skipping HF image.")
|
130 |
return None
|
131 |
+
|
132 |
try:
|
133 |
resp = requests.post(
|
134 |
"https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2",
|
135 |
headers={"Authorization": f"Bearer {HF_API_KEY}"},
|
136 |
+
json={"inputs": prompt},
|
137 |
+
timeout=120
|
138 |
)
|
139 |
resp.raise_for_status()
|
|
|
140 |
output_file = "hf_image.png"
|
141 |
with open(output_file, "wb") as f:
|
142 |
f.write(resp.content)
|
143 |
+
logging.info(f"Hugging Face image saved to {output_file}")
|
144 |
return output_file
|
145 |
except Exception as e:
|
146 |
+
logging.error(f"[HF Image] Failed: {e}")
|
147 |
return None
|
148 |
+
|
149 |
+
|
150 |
+
# === Master Image Generator ===
|
151 |
+
def generate_image_with_fallback(prompt: str) -> Optional[str]:
|
152 |
+
"""
|
153 |
+
Try Gemini β OpenAI β Hugging Face in order.
|
154 |
+
"""
|
155 |
+
img_url = run_gemini_image(prompt)
|
156 |
+
if img_url:
|
157 |
+
return img_url
|
158 |
+
|
159 |
+
img_url = run_openai_image(prompt)
|
160 |
+
if img_url:
|
161 |
+
return img_url
|
162 |
+
|
163 |
+
return run_hf_image(prompt)
|