asmashayea commited on
Commit
ca5600b
·
1 Parent(s): 678fe06
Files changed (1) hide show
  1. inference.py +7 -4
inference.py CHANGED
@@ -80,9 +80,6 @@ def infer_araberta(text):
80
  clean_tokens = [t for t in tokens if t not in tokenizer.all_special_tokens]
81
  clean_labels = [l for t, l in zip(tokens, predicted_labels) if t not in tokenizer.all_special_tokens]
82
 
83
- # Horizontal output
84
- pairs = [f"{token}: {label}" for token, label in zip(clean_tokens, clean_labels)]
85
- horizontal_output = " | ".join(pairs)
86
 
87
  # Group by aspect span
88
  aspects = []
@@ -115,13 +112,19 @@ def infer_araberta(text):
115
  "sentiment": current_sentiment
116
  })
117
 
 
 
 
 
 
118
  return {
119
- "token_predictions": horizontal_output,
120
  "aspects": aspects
121
  }
122
 
123
 
124
 
 
125
  def load_model(model_key):
126
  if model_key in cached_models:
127
  return cached_models[model_key]
 
80
  clean_tokens = [t for t in tokens if t not in tokenizer.all_special_tokens]
81
  clean_labels = [l for t, l in zip(tokens, predicted_labels) if t not in tokenizer.all_special_tokens]
82
 
 
 
 
83
 
84
  # Group by aspect span
85
  aspects = []
 
112
  "sentiment": current_sentiment
113
  })
114
 
115
+ token_predictions = [
116
+ {"token": token, "label": label}
117
+ for token, label in zip(clean_tokens, clean_labels)
118
+ ]
119
+
120
  return {
121
+ "token_predictions": token_predictions,
122
  "aspects": aspects
123
  }
124
 
125
 
126
 
127
+
128
  def load_model(model_key):
129
  if model_key in cached_models:
130
  return cached_models[model_key]