File size: 2,153 Bytes
3925910
d0cb899
 
 
 
 
 
 
 
 
 
 
3925910
d0cb899
872e232
 
d0cb899
3925910
d0cb899
 
 
 
 
3925910
d0cb899
 
3925910
d0cb899
872e232
d0cb899
 
3925910
d0cb899
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
"""
disgenet.py  Β·  Disease-Gene associations helper
Docs: https://www.disgenet.com/downloads  (REST v1)   πŸ›ˆ

Change-log
──────────
β€’ 2025-06-25  – .org β†’ .COM redirect (301) broke calls.
               We now default to https://www.disgenet.com/api
               and still follow redirects if they add a CDN later.
β€’ Graceful retry + 24 h LRU-cache.
β€’ Empty list on any error so orchestrator never crashes.
"""

from __future__ import annotations
import os, asyncio, httpx
from functools import lru_cache
from typing import List, Dict

_TOKEN  = os.getenv("DISGENET_KEY")            # optional Bearer token
_BASE   = "https://www.disgenet.com/api"       # ← new canonical host
_HDRS   = {"Accept": "application/json"}
if _TOKEN:
    _HDRS["Authorization"] = f"Bearer {_TOKEN}"

_TIMEOUT = 12
_RETRIES = 2

# ────────────────────────────────────────────────────────────────────
@lru_cache(maxsize=512)
async def disease_to_genes(disease_name: str,
                           limit: int = 10) -> List[Dict]:
    """
    Return top-N gene associations for *disease_name*.
    Empty list on failure or if none found.
    """
    url = f"{_BASE}/gda/disease/{disease_name.lower()}"
    params = {"source": "ALL", "format": "json"}

    async def _one_call() -> List[Dict]:
        async with httpx.AsyncClient(timeout=_TIMEOUT,
                                     headers=_HDRS,
                                     follow_redirects=True) as cli:
            r = await cli.get(url, params=params)
            if r.status_code == 404:
                return []
            r.raise_for_status()
            return r.json()[:limit]

    delay = 0.0
    for _ in range(_RETRIES):
        try:
            return await _one_call()
        except (httpx.HTTPStatusError, httpx.ReadTimeout):
            await asyncio.sleep(delay or 0.7)
            delay = 0.0            # retry only once
        except Exception:
            break
    return []                      # graceful fallback