Spaces:
Sleeping
Sleeping
File size: 1,113 Bytes
5992591 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import streamlit as st
from transformers import pipeline
# Load a Hugging Face chat model
@st.cache_resource
def load_chatbot():
return pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1", max_new_tokens=200)
chatbot = load_chatbot()
# Streamlit UI
st.title("💬 Chat with Mistral (Open Source ChatGPT)")
st.markdown("Ask me anything!")
if "history" not in st.session_state:
st.session_state.history = []
user_input = st.text_input("Your message", "")
if user_input:
st.session_state.history.append({"role": "user", "content": user_input})
# Construct prompt
full_prompt = "\n".join(
[f"{m['role'].capitalize()}: {m['content']}" for m in st.session_state.history]
) + "\nAssistant:"
response = chatbot(full_prompt)[0]["generated_text"]
# Get only the new assistant response
reply = response[len(full_prompt):].strip()
st.session_state.history.append({"role": "assistant", "content": reply})
# Display chat history
for message in st.session_state.history:
st.markdown(f"**{message['role'].capitalize()}:** {message['content']}")
|