Nakhwa commited on
Commit
9081c4c
·
verified ·
1 Parent(s): a8cca58

Update test.py

Browse files
Files changed (1) hide show
  1. test.py +1 -4
test.py CHANGED
@@ -30,15 +30,12 @@ def predict_hoax(title, content):
30
  label = 'HOAX' if pred == 1 else 'NON-HOAX'
31
  return label
32
 
33
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
34
- model.to(device)
35
-
36
  @st.cache_resource
37
  # LIME prediction function
38
  def predict_proba_for_lime(texts):
39
  results = []
40
  for text in texts:
41
- inputs = tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=256).to(device)
42
  with torch.no_grad():
43
  outputs = model(**inputs)
44
  probs = softmax(outputs.logits, dim=1).detach().cpu().numpy()
 
30
  label = 'HOAX' if pred == 1 else 'NON-HOAX'
31
  return label
32
 
 
 
 
33
  @st.cache_resource
34
  # LIME prediction function
35
  def predict_proba_for_lime(texts):
36
  results = []
37
  for text in texts:
38
+ inputs = tokenizer(text, return_tensors='pt', padding=True, truncation=True, max_length=256)
39
  with torch.no_grad():
40
  outputs = model(**inputs)
41
  probs = softmax(outputs.logits, dim=1).detach().cpu().numpy()