mgbam commited on
Commit
ea97c29
·
verified ·
1 Parent(s): a8e3a46

Update app/sentiment.py

Browse files
Files changed (1) hide show
  1. app/sentiment.py +20 -17
app/sentiment.py CHANGED
@@ -1,47 +1,50 @@
1
  """
2
- Sentiment analysis module using Hugging Face Transformers with cache redirection for HF Spaces.
3
  """
4
 
5
  import os
6
- from transformers import pipeline
7
- from functools import lru_cache
8
  import hashlib
9
  import logging
 
10
 
11
- # 🔧 Redirect HF cache to writable /tmp directory (important for Hugging Face Spaces)
12
  os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface"
13
  os.makedirs("/tmp/huggingface", exist_ok=True)
14
 
15
- # 🧠 Load sentiment model
16
- _sentiment = pipeline(
17
- "sentiment-analysis",
18
- model="distilbert-base-uncased-finetuned-sst-2-english"
19
- )
20
 
21
  class SentimentCache:
22
- """Handles in-memory caching and streaming of sentiment results."""
23
  latest_id: int = 0
24
  latest_result: dict = {}
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  @classmethod
27
  def _hash(cls, text: str) -> str:
28
- """Hash input text to use as a cache key."""
29
  return hashlib.sha256(text.encode()).hexdigest()
30
 
31
  @classmethod
32
  @lru_cache(maxsize=128)
33
  def _analyze(cls, text: str):
34
- """Run inference on text, cached for performance."""
35
- return _sentiment(text)[0]
36
 
37
  @classmethod
38
  def compute(cls, text: str):
39
- """Trigger inference and update latest result."""
40
- result = cls._analyze(text)
41
  cls.latest_id += 1
42
  cls.latest_result = {
43
  "text": text,
44
- "label": result["label"],
45
- "score": round(result["score"], 4)
46
  }
47
  logging.info("✅ Sentiment computed: %s", cls.latest_result)
 
1
  """
2
+ Safe lazy-loading sentiment pipeline that works in Hugging Face Spaces (no /.cache error).
3
  """
4
 
5
  import os
 
 
6
  import hashlib
7
  import logging
8
+ from functools import lru_cache
9
 
10
+ # Redirect the HF model cache to a writable directory
11
  os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface"
12
  os.makedirs("/tmp/huggingface", exist_ok=True)
13
 
14
+ from transformers import pipeline
 
 
 
 
15
 
16
  class SentimentCache:
 
17
  latest_id: int = 0
18
  latest_result: dict = {}
19
+ _pipeline = None # Lazy init
20
+
21
+ @classmethod
22
+ def _get_pipeline(cls):
23
+ if cls._pipeline is None:
24
+ logging.info("🔄 Loading sentiment model…")
25
+ cls._pipeline = pipeline(
26
+ "sentiment-analysis",
27
+ model="distilbert-base-uncased-finetuned-sst-2-english"
28
+ )
29
+ return cls._pipeline
30
 
31
  @classmethod
32
  def _hash(cls, text: str) -> str:
 
33
  return hashlib.sha256(text.encode()).hexdigest()
34
 
35
  @classmethod
36
  @lru_cache(maxsize=128)
37
  def _analyze(cls, text: str):
38
+ pipe = cls._get_pipeline()
39
+ return pipe(text)[0]
40
 
41
  @classmethod
42
  def compute(cls, text: str):
43
+ res = cls._analyze(text)
 
44
  cls.latest_id += 1
45
  cls.latest_result = {
46
  "text": text,
47
+ "label": res["label"],
48
+ "score": round(res["score"], 4)
49
  }
50
  logging.info("✅ Sentiment computed: %s", cls.latest_result)