Rerandaka commited on
Commit
1c8f4b1
Β·
verified Β·
1 Parent(s): 76eebae

update app.py 3

Browse files
Files changed (1) hide show
  1. app.py +10 -5
app.py CHANGED
@@ -1,4 +1,9 @@
1
- d = "Rerandaka/Cild_safety_bigbird"
 
 
 
 
 
2
  tokenizer = AutoTokenizer.from_pretrained(model_id, use_fast=False)
3
  model = AutoModelForSequenceClassification.from_pretrained(model_id)
4
 
@@ -10,13 +15,13 @@ def classify(text):
10
  predicted_class = torch.argmax(logits, dim=1).item()
11
  return str(predicted_class)
12
 
13
- # βœ… Create API-ready interface
14
  demo = gr.Interface(
15
  fn=classify,
16
  inputs=gr.Textbox(label="Enter text"),
17
  outputs=gr.Textbox(label="Prediction")
18
  )
19
 
20
- # βœ… Enable queue and API support
21
- demo.queue() # <-- Enables /queue/join
22
- demo.launch(show_api=True) # <-- Enables gradio_client + /predict
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification # βœ… required
4
+
5
+ # Load model
6
+ model_id = "Rerandaka/Cild_safety_bigbird"
7
  tokenizer = AutoTokenizer.from_pretrained(model_id, use_fast=False)
8
  model = AutoModelForSequenceClassification.from_pretrained(model_id)
9
 
 
15
  predicted_class = torch.argmax(logits, dim=1).item()
16
  return str(predicted_class)
17
 
18
+ # API-ready Gradio Interface
19
  demo = gr.Interface(
20
  fn=classify,
21
  inputs=gr.Textbox(label="Enter text"),
22
  outputs=gr.Textbox(label="Prediction")
23
  )
24
 
25
+ # βœ… Enable API and queue
26
+ demo.queue()
27
+ demo.launch(show_api=True)