Dlagpt-4 / app.py
eagletiger1's picture
Update app.py
67a9fa3 verified
raw
history blame contribute delete
905 Bytes
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
# Load the Hugging Face API token from st.secrets
# hf_api_token = st.secrets["HUGGINGFACE_API_TOKEN"]
# Load the model and tokenizer using the API token
model_name = "TinyLlama/TinyLlama_v1.1"
# Create a text generation pipeline
# generator = pipeline("text-generation", model=model_name, token=hf_api_token)
generator = pipeline("text-generation", model=model_name)
# Streamlit UI
st.title("TinyLlama_v1.1")
#st.write(hf_api_token)
# Input prompt
prompt = st.text_input("Enter your prompt:", value="Explain the significance of the theory of relativity.")
# Generate text on button click
if st.button("Generate Text"):
# Generate text using the pipeline
output = generator(prompt, max_length=100, num_return_sequences=1)
# Display the generated text
st.write(output[0]['generated_text'])