mgbam commited on
Commit
30c8028
Β·
verified Β·
1 Parent(s): ae4946d

Update app/sentiment.py

Browse files
Files changed (1) hide show
  1. app/sentiment.py +67 -50
app/sentiment.py CHANGED
@@ -1,53 +1,70 @@
1
  """
2
- Safe, lazy-loading sentiment pipeline for HF Spaces with proper cache redirection.
3
  """
4
- import os
5
- import hashlib
6
  import logging
7
- from functools import lru_cache
8
-
9
- # ─── Ensure all HF caching uses a writable directory ─────────────────────────
10
- # Set HF_HOME for huggingface_hub and TRANSFORMERS_CACHE for transformers
11
- os.environ["HF_HOME"] = "/tmp/huggingface"
12
- os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface"
13
- os.makedirs("/tmp/huggingface", exist_ok=True)
14
-
15
- from transformers import pipeline
16
-
17
- class SentimentCache:
18
- """Handles in-memory caching and streaming of sentiment results."""
19
- latest_id: int = 0
20
- latest_result: dict = {}
21
- _pipeline = None # Will hold the loaded pipeline
22
-
23
- @classmethod
24
- def _get_pipeline(cls):
25
- if cls._pipeline is None:
26
- logging.info("πŸ”„ Loading sentiment model…")
27
- cls._pipeline = pipeline(
28
- "sentiment-analysis",
29
- model="distilbert-base-uncased-finetuned-sst-2-english"
30
- )
31
- return cls._pipeline
32
-
33
- @classmethod
34
- def _hash(cls, text: str) -> str:
35
- return hashlib.sha256(text.encode()).hexdigest()
36
-
37
- @classmethod
38
- @lru_cache(maxsize=128)
39
- def _analyze(cls, text: str):
40
- pipe = cls._get_pipeline()
41
- return pipe(text)[0]
42
-
43
- @classmethod
44
- def compute(cls, text: str):
45
- """Trigger inference and update latest result."""
46
- res = cls._analyze(text)
47
- cls.latest_id += 1
48
- cls.latest_result = {
49
- "text": text,
50
- "label": res.get("label"),
51
- "score": round(res.get("score", 0.0), 4)
52
- }
53
- logging.info("βœ… Sentiment computed: %s", cls.latest_result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  """
2
+ Background price cache with multi-API fallback and rate-limit handling
3
  """
4
+ import httpx
 
5
  import logging
6
+
7
+ # Primary and secondary APIs for crypto prices
8
+ COINGECKO_URL = (
9
+ "https://api.coingecko.com/api/v3/simple/price"
10
+ "?ids=bitcoin,ethereum,dogecoin&vs_currencies=usd"
11
+ )
12
+ COINCAP_URL = (
13
+ "https://api.coincap.io/v2/assets?ids=bitcoin,ethereum,dogecoin"
14
+ )
15
+
16
+ # Shared price cache
17
+ CURRENT_PRICES = {"bitcoin": "--", "ethereum": "--", "dogecoin": "--"}
18
+
19
+ # How often to retry each API before falling back (seconds)
20
+ RETRY_DELAY = 5
21
+
22
+
23
+ def fetch_prices() -> None:
24
+ """
25
+ Try CoinGecko first; on 429 or error, fall back to CoinCap.
26
+ Updates CURRENT_PRICES in-place.
27
+ """
28
+ global CURRENT_PRICES
29
+ apis = [
30
+ ("CoinGecko", COINGECKO_URL),
31
+ ("CoinCap", COINCAP_URL)
32
+ ]
33
+
34
+ for name, url in apis:
35
+ try:
36
+ resp = httpx.get(url, timeout=10)
37
+ # Handle CoinCap JSON structure separately
38
+ if name == "CoinGecko":
39
+ resp.raise_for_status()
40
+ data = resp.json()
41
+ prices = {
42
+ "bitcoin": data["bitcoin"]["usd"],
43
+ "ethereum": data["ethereum"]["usd"],
44
+ "dogecoin": data["dogecoin"]["usd"]
45
+ }
46
+ else:
47
+ resp.raise_for_status()
48
+ data = resp.json().get("data", [])
49
+ prices = {item["id"]: float(item["priceUsd"]) for item in data}
50
+
51
+ CURRENT_PRICES.update(prices)
52
+ logging.info("βœ… [%s] prices updated: %s", name, prices)
53
+ return
54
+
55
+ except httpx.HTTPStatusError as e:
56
+ status = e.response.status_code
57
+ if status == 429:
58
+ logging.warning("⚠️ [%s] rate limit (429). Retrying fallback.", name)
59
+ else:
60
+ logging.warning("⚠️ [%s] HTTP error %s: %s", name, status, e)
61
+ except Exception as e:
62
+ logging.warning("⚠️ [%s] fetch error: %s", name, e)
63
+
64
+ # If we reach here, wait a bit before trying next API
65
+ try:
66
+ import time; time.sleep(RETRY_DELAY)
67
+ except Exception:
68
+ pass
69
+
70
+ logging.error("❌ All price APIs failed. Keeping previous prices: %s", CURRENT_PRICES)