Fix tokenizer issue
Browse files
app.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
import streamlit as st
|
2 |
from newspaper import Article
|
3 |
-
from transformers import pipeline
|
|
|
4 |
|
5 |
# Load model from Hugging Face
|
6 |
@st.cache_resource
|
7 |
def load_summarizer():
|
8 |
model_name = "cahya/t5-base-indonesian-summarization-cased"
|
9 |
tokenizer = T5Tokenizer.from_pretrained(model_name)
|
10 |
-
model =
|
11 |
|
12 |
return pipeline("summarization", model=model, tokenizer=tokenizer)
|
13 |
|
|
|
1 |
import streamlit as st
|
2 |
from newspaper import Article
|
3 |
+
from transformers import pipeline
|
4 |
+
from transformers import T5Tokenizer, T5Model, T5ForConditionalGeneration
|
5 |
|
6 |
# Load model from Hugging Face
|
7 |
@st.cache_resource
|
8 |
def load_summarizer():
|
9 |
model_name = "cahya/t5-base-indonesian-summarization-cased"
|
10 |
tokenizer = T5Tokenizer.from_pretrained(model_name)
|
11 |
+
model = T5ForConditionalGeneration.from_pretrained(model_name)
|
12 |
|
13 |
return pipeline("summarization", model=model, tokenizer=tokenizer)
|
14 |
|