mgbam commited on
Commit
97ec060
·
verified ·
1 Parent(s): fa94666

Update genesis/providers.py

Browse files
Files changed (1) hide show
  1. genesis/providers.py +64 -148
genesis/providers.py CHANGED
@@ -1,163 +1,79 @@
 
1
  import os
2
  import requests
3
- import logging
4
- import time
5
- from typing import Optional
 
 
6
 
7
- # Setup logging
8
- logging.basicConfig(level=logging.INFO, format="[%(levelname)s] %(message)s")
9
-
10
- # Load API keys from environment
11
- DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
12
- GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
13
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
14
- HF_API_KEY = os.getenv("HF_API_KEY")
15
-
16
- # Retry decorator
17
- def retry_request(func):
18
- def wrapper(*args, **kwargs):
19
- retries = 3
20
- delay = 2
21
- for attempt in range(1, retries + 1):
22
- try:
23
- return func(*args, **kwargs)
24
- except Exception as e:
25
- logging.warning(f"{func.__name__} failed (attempt {attempt}): {e}")
26
- if attempt < retries:
27
- time.sleep(delay)
28
- return None
29
- return wrapper
30
-
31
-
32
- # === DeepSeek Summarization ===
33
- @retry_request
34
- def run_deepseek_summary(prompt: str) -> str:
35
- if not DEEPSEEK_API_KEY:
36
- logging.error("Missing DeepSeek API key.")
37
- return "[DeepSeek] API key missing."
38
-
39
- resp = requests.post(
40
- "https://api.deepseek.com/v1/chat/completions",
41
- headers={
42
- "Authorization": f"Bearer {DEEPSEEK_API_KEY}",
43
- "Content-Type": "application/json"
44
- },
45
- json={
46
- "model": "deepseek-chat",
47
- "messages": [{"role": "user", "content": prompt}],
48
- "temperature": 0.3
49
- },
50
- timeout=60
51
- )
52
- resp.raise_for_status()
53
- return resp.json()["choices"][0]["message"]["content"].strip()
54
-
55
-
56
- # === Gemini Polishing ===
57
- @retry_request
58
- def run_gemini_polish(text: str) -> str:
59
- if not GEMINI_API_KEY:
60
- logging.warning("No Gemini API key found, skipping polish.")
61
- return text
62
-
63
- resp = requests.post(
64
- f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}",
65
- headers={"Content-Type": "application/json"},
66
- json={
67
- "contents": [{"parts": [{"text": f"Polish and refine this scientific text:\n\n{text}"}]}]
68
- },
69
- timeout=60
70
- )
71
- resp.raise_for_status()
72
- candidates = resp.json().get("candidates", [])
73
- if candidates and "content" in candidates[0]:
74
- return candidates[0]["content"]["parts"][0]["text"].strip()
75
- return text
76
 
 
 
 
 
77
 
78
- # === Gemini Image Generation ===
79
- @retry_request
80
- def run_gemini_image(prompt: str) -> Optional[str]:
81
- if not GEMINI_API_KEY:
82
- logging.warning("No Gemini API key found, skipping image generation.")
83
- return None
84
-
85
  try:
86
- resp = requests.post(
87
- f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}",
88
- headers={"Content-Type": "application/json"},
89
- json={"prompt": prompt},
90
- timeout=60
91
- )
92
- resp.raise_for_status()
93
- data = resp.json()
94
- if "imageUrl" in data:
95
- return data["imageUrl"]
96
  except Exception as e:
97
- logging.error(f"[Gemini Image] Failed: {e}")
98
- return None
99
-
100
-
101
- # === OpenAI Image Generation ===
102
- @retry_request
103
- def run_openai_image(prompt: str) -> Optional[str]:
104
- if not OPENAI_API_KEY:
105
- logging.warning("No OpenAI API key found, skipping OpenAI image.")
106
  return None
107
-
 
 
 
108
  try:
109
- resp = requests.post(
110
- "https://api.openai.com/v1/images/generations",
111
- headers={
112
- "Authorization": f"Bearer {OPENAI_API_KEY}",
113
- "Content-Type": "application/json"
114
- },
115
- json={"model": "gpt-image-1", "prompt": prompt, "size": "1024x1024"},
116
- timeout=60
117
- )
118
- resp.raise_for_status()
119
- return resp.json()["data"][0]["url"]
120
  except Exception as e:
121
- logging.error(f"[OpenAI Image] Failed: {e}")
122
- return None
123
-
124
 
125
- # === Hugging Face Fallback Image ===
126
- @retry_request
127
- def run_hf_image(prompt: str) -> Optional[str]:
128
- if not HF_API_KEY:
129
- logging.warning("No Hugging Face API key found, skipping HF image.")
130
- return None
131
-
132
  try:
133
- resp = requests.post(
134
- "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2",
135
- headers={"Authorization": f"Bearer {HF_API_KEY}"},
136
- json={"inputs": prompt},
137
- timeout=120
138
- )
139
- resp.raise_for_status()
140
- output_file = "hf_image.png"
141
- with open(output_file, "wb") as f:
142
- f.write(resp.content)
143
- logging.info(f"Hugging Face image saved to {output_file}")
144
- return output_file
 
 
 
 
 
 
 
 
 
 
 
145
  except Exception as e:
146
- logging.error(f"[HF Image] Failed: {e}")
147
  return None
148
-
149
-
150
- # === Master Image Generator ===
151
- def generate_image_with_fallback(prompt: str) -> Optional[str]:
152
- """
153
- Try Gemini → OpenAI → Hugging Face in order.
154
- """
155
- img_url = run_gemini_image(prompt)
156
- if img_url:
157
- return img_url
158
-
159
- img_url = run_openai_image(prompt)
160
- if img_url:
161
- return img_url
162
-
163
- return run_hf_image(prompt)
 
1
+ # genesis/providers.py
2
  import os
3
  import requests
4
+ from genesis.api_clients.pubmed_api import search_pubmed
5
+ from genesis.api_clients.chembl_api import search_chembl
6
+ from genesis.api_clients.bioportal_api import search_bioportal
7
+ from genesis.api_clients.umls_api import search_umls
8
+ from genesis.api_clients.ncbi_api import search_ncbi
9
 
 
 
 
 
 
 
10
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
11
+ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
12
+ DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
13
+ CLAUDE_API_KEY = os.getenv("CLAUDE_API_KEY")
14
+ HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
+ # ---------- LITERATURE ----------
17
+ def run_pubmed_literature(query: str, max_results: int = 10):
18
+ """Fetch literature from PubMed."""
19
+ return search_pubmed(query, max_results=max_results)
20
 
21
+ # ---------- DEEPSEEK ----------
22
+ def run_deepseek_summary(prompt: str):
23
+ """Summarize content using DeepSeek API."""
 
 
 
 
24
  try:
25
+ url = "https://api.deepseek.com/v1/chat/completions"
26
+ headers = {"Authorization": f"Bearer {DEEPSEEK_API_KEY}"}
27
+ data = {"model": "deepseek-chat", "messages": [{"role": "user", "content": prompt}]}
28
+ r = requests.post(url, headers=headers, json=data)
29
+ r.raise_for_status()
30
+ return r.json()["choices"][0]["message"]["content"]
 
 
 
 
31
  except Exception as e:
32
+ print(f"[DeepSeek] Failed: {e}")
 
 
 
 
 
 
 
 
33
  return None
34
+
35
+ # ---------- GEMINI ----------
36
+ def run_gemini_polish(text: str):
37
+ """Polish text using Gemini."""
38
  try:
39
+ url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}"
40
+ headers = {"Content-Type": "application/json"}
41
+ payload = {"contents": [{"parts": [{"text": text}]}]}
42
+ r = requests.post(url, headers=headers, json=payload)
43
+ r.raise_for_status()
44
+ return r.json()["candidates"][0]["content"]["parts"][0]["text"]
 
 
 
 
 
45
  except Exception as e:
46
+ print(f"[Gemini] Failed: {e}")
47
+ return text
 
48
 
49
+ # ---------- GEMINI IMAGE (Fallback to OpenAI, then HuggingFace) ----------
50
+ def run_image_generation(prompt: str):
51
+ """Generate image using Gemini > OpenAI > Hugging Face fallback."""
52
+ # Try Gemini first
 
 
 
53
  try:
54
+ url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}"
55
+ r = requests.post(url, json={"prompt": prompt})
56
+ r.raise_for_status()
57
+ return r.json().get("image_url")
58
+ except:
59
+ pass
60
+
61
+ # Try OpenAI
62
+ try:
63
+ import openai
64
+ openai.api_key = OPENAI_API_KEY
65
+ result = openai.Image.create(model="gpt-image-1", prompt=prompt, size="1024x1024")
66
+ return result["data"][0]["url"]
67
+ except:
68
+ pass
69
+
70
+ # Try Hugging Face
71
+ try:
72
+ hf_url = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2"
73
+ headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"}
74
+ r = requests.post(hf_url, headers=headers, json={"inputs": prompt})
75
+ r.raise_for_status()
76
+ return r.json()
77
  except Exception as e:
78
+ print(f"[Image Generation] Failed: {e}")
79
  return None