Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline | |
# Load a Hugging Face chat model | |
def load_chatbot(): | |
return pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1", max_new_tokens=200) | |
chatbot = load_chatbot() | |
# Streamlit UI | |
st.title("π¬ Chat with Mistral (Open Source ChatGPT)") | |
st.markdown("Ask me anything!") | |
if "history" not in st.session_state: | |
st.session_state.history = [] | |
user_input = st.text_input("Your message", "") | |
if user_input: | |
st.session_state.history.append({"role": "user", "content": user_input}) | |
# Construct prompt | |
full_prompt = "\n".join( | |
[f"{m['role'].capitalize()}: {m['content']}" for m in st.session_state.history] | |
) + "\nAssistant:" | |
response = chatbot(full_prompt)[0]["generated_text"] | |
# Get only the new assistant response | |
reply = response[len(full_prompt):].strip() | |
st.session_state.history.append({"role": "assistant", "content": reply}) | |
# Display chat history | |
for message in st.session_state.history: | |
st.markdown(f"**{message['role'].capitalize()}:** {message['content']}") | |