mgbam commited on
Commit
9932b48
·
verified ·
1 Parent(s): e69a3f3

Update genesis/providers.py

Browse files
Files changed (1) hide show
  1. genesis/providers.py +88 -40
genesis/providers.py CHANGED
@@ -1,65 +1,113 @@
1
- # genesis/providers.py
2
- """
3
- API Providers for GENESIS-AI
4
- Handles calls to DeepSeek, Gemini, OpenAI, and PubMed.
5
- """
6
-
7
  import os
8
  import requests
 
9
 
10
- # ENV
11
  DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
12
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
13
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
14
- NCBI_API_KEY = os.getenv("NCBI_API_KEY")
15
- NCBI_EMAIL = os.getenv("NCBI_EMAIL")
16
 
17
- def run_deepseek_summary(prompt):
18
- """Summarize using DeepSeek API."""
 
 
19
  try:
20
- r = requests.post(
21
  "https://api.deepseek.com/v1/chat/completions",
22
- headers={"Authorization": f"Bearer {DEEPSEEK_API_KEY}"},
23
- json={"model": "deepseek-chat", "messages": [{"role": "user", "content": prompt}]}
 
 
 
 
 
 
 
24
  )
25
- r.raise_for_status()
26
- return r.json()["choices"][0]["message"]["content"]
27
  except Exception as e:
28
- return f"[DeepSeek Error] {e}"
 
29
 
30
- def run_gemini_polish(text):
31
- """Polish text with Gemini."""
 
 
32
  try:
33
- r = requests.post(
34
  f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}",
35
- json={"contents": [{"parts": [{"text": text}]}]}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  )
37
- r.raise_for_status()
38
- return r.json()["candidates"][0]["content"]["parts"][0]["text"]
 
 
 
39
  except Exception as e:
40
- return f"[Gemini Error] {e}"
 
 
41
 
42
- def run_openai_image(prompt):
43
- """Generate image with OpenAI DALL·E or fallback."""
 
 
44
  try:
45
- r = requests.post(
46
  "https://api.openai.com/v1/images/generations",
47
- headers={"Authorization": f"Bearer {OPENAI_API_KEY}"},
 
 
 
48
  json={"model": "gpt-image-1", "prompt": prompt, "size": "1024x1024"}
49
  )
50
- r.raise_for_status()
51
- return r.json()["data"][0]["url"]
52
  except Exception as e:
53
- return f"[OpenAI Image Error] {e}"
 
54
 
55
- def pubmed_fallback_search(query, retmax=5):
56
- """Simple PubMed search for citations."""
 
 
 
57
  try:
58
- url = f"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi"
59
- params = {"db": "pubmed", "term": query, "retmode": "json", "retmax": retmax, "api_key": NCBI_API_KEY}
60
- r = requests.get(url, params=params)
61
- r.raise_for_status()
62
- ids = r.json()["esearchresult"]["idlist"]
63
- return [{"pmid": pid, "url": f"https://pubmed.ncbi.nlm.nih.gov/{pid}/"} for pid in ids]
 
 
 
 
 
64
  except Exception as e:
65
- return [{"error": str(e)}]
 
 
 
 
 
 
 
 
1
  import os
2
  import requests
3
+ from typing import Optional
4
 
5
+ # Load API keys from environment
6
  DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
7
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
8
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
9
+ HF_API_KEY = os.getenv("HF_API_KEY")
 
10
 
11
+ # === DeepSeek Summarization ===
12
+ def run_deepseek_summary(prompt: str) -> str:
13
+ if not DEEPSEEK_API_KEY:
14
+ return "[DeepSeek] API key missing."
15
  try:
16
+ resp = requests.post(
17
  "https://api.deepseek.com/v1/chat/completions",
18
+ headers={
19
+ "Authorization": f"Bearer {DEEPSEEK_API_KEY}",
20
+ "Content-Type": "application/json"
21
+ },
22
+ json={
23
+ "model": "deepseek-chat",
24
+ "messages": [{"role": "user", "content": prompt}],
25
+ "temperature": 0.3
26
+ }
27
  )
28
+ resp.raise_for_status()
29
+ return resp.json()["choices"][0]["message"]["content"].strip()
30
  except Exception as e:
31
+ return f"[DeepSeek] Failed: {str(e)}"
32
+
33
 
34
+ # === Gemini Polishing ===
35
+ def run_gemini_polish(text: str) -> str:
36
+ if not GEMINI_API_KEY:
37
+ return text
38
  try:
39
+ resp = requests.post(
40
  f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}",
41
+ headers={"Content-Type": "application/json"},
42
+ json={
43
+ "contents": [{"parts": [{"text": f"Polish and refine the following scientific text:\n\n{text}"}]}]
44
+ }
45
+ )
46
+ resp.raise_for_status()
47
+ candidates = resp.json().get("candidates", [])
48
+ if candidates and "content" in candidates[0]:
49
+ return candidates[0]["content"]["parts"][0]["text"].strip()
50
+ return text
51
+ except Exception as e:
52
+ return f"[Gemini] Failed: {str(e)}"
53
+
54
+
55
+ # === Gemini Image Generation ===
56
+ def run_gemini_image(prompt: str) -> Optional[str]:
57
+ if not GEMINI_API_KEY:
58
+ return None
59
+ try:
60
+ resp = requests.post(
61
+ f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}",
62
+ headers={"Content-Type": "application/json"},
63
+ json={"prompt": prompt}
64
  )
65
+ resp.raise_for_status()
66
+ data = resp.json()
67
+ if "imageUrl" in data:
68
+ return data["imageUrl"]
69
+ return None
70
  except Exception as e:
71
+ print(f"[Gemini Image] Failed: {e}")
72
+ return None
73
+
74
 
75
+ # === OpenAI Image Generation ===
76
+ def run_openai_image(prompt: str) -> Optional[str]:
77
+ if not OPENAI_API_KEY:
78
+ return None
79
  try:
80
+ resp = requests.post(
81
  "https://api.openai.com/v1/images/generations",
82
+ headers={
83
+ "Authorization": f"Bearer {OPENAI_API_KEY}",
84
+ "Content-Type": "application/json"
85
+ },
86
  json={"model": "gpt-image-1", "prompt": prompt, "size": "1024x1024"}
87
  )
88
+ resp.raise_for_status()
89
+ return resp.json()["data"][0]["url"]
90
  except Exception as e:
91
+ print(f"[OpenAI Image] Failed: {e}")
92
+ return None
93
 
94
+
95
+ # === Hugging Face Fallback Image Generation ===
96
+ def run_hf_image(prompt: str) -> Optional[str]:
97
+ if not HF_API_KEY:
98
+ return None
99
  try:
100
+ resp = requests.post(
101
+ "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2",
102
+ headers={"Authorization": f"Bearer {HF_API_KEY}"},
103
+ json={"inputs": prompt}
104
+ )
105
+ resp.raise_for_status()
106
+ # HF returns binary image; you need to save or serve it yourself
107
+ output_file = "hf_image.png"
108
+ with open(output_file, "wb") as f:
109
+ f.write(resp.content)
110
+ return output_file
111
  except Exception as e:
112
+ print(f"[HF Image] Failed: {e}")
113
+ return None