LahiruD95 commited on
Commit
4e247c2
Β·
1 Parent(s): 859566c

Chnaged requirement text

Browse files
Files changed (1) hide show
  1. app/routes.py +2 -8
app/routes.py CHANGED
@@ -8,7 +8,7 @@ from huggingface_hub import InferenceApi
8
  from PIL import Image
9
 
10
  from app.config import Config
11
- from app.models import audio_model, sentiment_pipeline, emotion_pipeline
12
  from app.services import extract_tasks
13
  from app.utils import generate_tags, error_response
14
 
@@ -19,12 +19,6 @@ model_dir = os.getenv('EASYOCR_MODEL_STORAGE', None)
19
 
20
  # ── OCR via HF Inference API ─────────────────────────────────────────────────
21
  # We're using Microsoft's TrOCR for printed text:
22
- HF_API_TOKEN = Config.FIREWORKS_API_KEY
23
- ocr_api = InferenceApi(
24
- repo_id="microsoft/trocr-base-printed",
25
- token=HF_API_TOKEN,
26
- inference_type="text-generation" # TroCR is a seq2seq model
27
- )
28
 
29
  EMOTION_SCORE_THRESHOLD = 0.15 # Adjust based on your testing
30
  MIN_SENTIMENT_CONFIDENCE = 0.4 # Below this becomes "neutral"
@@ -104,7 +98,7 @@ def analyze_image():
104
 
105
  try:
106
  # 1) Ask the vision-LLM to describe / extract text
107
- completion = hf.chat.completions.create(
108
  model="google/gemma-3-27b-it",
109
  messages=[{
110
  "role": "user",
 
8
  from PIL import Image
9
 
10
  from app.config import Config
11
+ from app.models import audio_model, sentiment_pipeline, emotion_pipeline, client
12
  from app.services import extract_tasks
13
  from app.utils import generate_tags, error_response
14
 
 
19
 
20
  # ── OCR via HF Inference API ─────────────────────────────────────────────────
21
  # We're using Microsoft's TrOCR for printed text:
 
 
 
 
 
 
22
 
23
  EMOTION_SCORE_THRESHOLD = 0.15 # Adjust based on your testing
24
  MIN_SENTIMENT_CONFIDENCE = 0.4 # Below this becomes "neutral"
 
98
 
99
  try:
100
  # 1) Ask the vision-LLM to describe / extract text
101
+ completion = client.chat.completions.create(
102
  model="google/gemma-3-27b-it",
103
  messages=[{
104
  "role": "user",