Spaces:
Sleeping
Sleeping
Commit
Β·
5da516e
1
Parent(s):
5940d90
upd
Browse files- src/streamlit_app.py +3 -3
src/streamlit_app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
-
from transformers import AutoTokenizer,
|
3 |
import os
|
4 |
|
5 |
# Disable Streamlit usage stats to avoid permission issues
|
@@ -16,8 +16,8 @@ def load_model():
|
|
16 |
os.makedirs(cache_dir, exist_ok=True)
|
17 |
|
18 |
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-0.5B-Instruct", cache_dir=cache_dir)
|
19 |
-
model =
|
20 |
-
return pipeline("
|
21 |
|
22 |
st.set_page_config(page_title="LLM Demo", layout="centered")
|
23 |
st.title("π FLAN-T5 Small - HuggingFace Demo")
|
|
|
1 |
import streamlit as st
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
3 |
import os
|
4 |
|
5 |
# Disable Streamlit usage stats to avoid permission issues
|
|
|
16 |
os.makedirs(cache_dir, exist_ok=True)
|
17 |
|
18 |
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-0.5B-Instruct", cache_dir=cache_dir)
|
19 |
+
model = AutoModelForCausalLM.from_pretrained("tianzhechu/BookQA-7B-Instruct", cache_dir=cache_dir)
|
20 |
+
return pipeline("text-generation", model=model, tokenizer=tokenizer)
|
21 |
|
22 |
st.set_page_config(page_title="LLM Demo", layout="centered")
|
23 |
st.title("π FLAN-T5 Small - HuggingFace Demo")
|