Spaces:
Sleeping
Sleeping
# genesis/providers.py | |
import os | |
import requests | |
from genesis.api_clients.pubmed_api import search_pubmed | |
from genesis.api_clients.chembl_api import search_chembl | |
from genesis.api_clients.bioportal_api import search_bioportal | |
from genesis.api_clients.umls_api import search_umls | |
from genesis.api_clients.ncbi_api import search_ncbi | |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY") | |
CLAUDE_API_KEY = os.getenv("CLAUDE_API_KEY") | |
HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY") | |
# ---------- LITERATURE ---------- | |
def run_pubmed_literature(query: str, max_results: int = 10): | |
"""Fetch literature from PubMed.""" | |
return search_pubmed(query, max_results=max_results) | |
# ---------- DEEPSEEK ---------- | |
def run_deepseek_summary(prompt: str): | |
"""Summarize content using DeepSeek API.""" | |
try: | |
url = "https://api.deepseek.com/v1/chat/completions" | |
headers = {"Authorization": f"Bearer {DEEPSEEK_API_KEY}"} | |
data = {"model": "deepseek-chat", "messages": [{"role": "user", "content": prompt}]} | |
r = requests.post(url, headers=headers, json=data) | |
r.raise_for_status() | |
return r.json()["choices"][0]["message"]["content"] | |
except Exception as e: | |
print(f"[DeepSeek] Failed: {e}") | |
return None | |
# ---------- GEMINI ---------- | |
def run_gemini_polish(text: str): | |
"""Polish text using Gemini.""" | |
try: | |
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key={GEMINI_API_KEY}" | |
headers = {"Content-Type": "application/json"} | |
payload = {"contents": [{"parts": [{"text": text}]}]} | |
r = requests.post(url, headers=headers, json=payload) | |
r.raise_for_status() | |
return r.json()["candidates"][0]["content"]["parts"][0]["text"] | |
except Exception as e: | |
print(f"[Gemini] Failed: {e}") | |
return text | |
# ---------- GEMINI IMAGE (Fallback to OpenAI, then HuggingFace) ---------- | |
def run_image_generation(prompt: str): | |
"""Generate image using Gemini > OpenAI > Hugging Face fallback.""" | |
# Try Gemini first | |
try: | |
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro-vision:generateImage?key={GEMINI_API_KEY}" | |
r = requests.post(url, json={"prompt": prompt}) | |
r.raise_for_status() | |
return r.json().get("image_url") | |
except: | |
pass | |
# Try OpenAI | |
try: | |
import openai | |
openai.api_key = OPENAI_API_KEY | |
result = openai.Image.create(model="gpt-image-1", prompt=prompt, size="1024x1024") | |
return result["data"][0]["url"] | |
except: | |
pass | |
# Try Hugging Face | |
try: | |
hf_url = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2" | |
headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"} | |
r = requests.post(hf_url, headers=headers, json={"inputs": prompt}) | |
r.raise_for_status() | |
return r.json() | |
except Exception as e: | |
print(f"[Image Generation] Failed: {e}") | |
return None | |