Spaces:
Running
Running
import streamlit as st | |
from langchain.chat_models import ChatOpenAI | |
from langchain.chains import ConversationChain | |
from langchain.memory import ConversationBufferMemory | |
import os | |
# Set OpenRouter API Key and Base URL | |
os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"] | |
os.environ['OPENAI_API_BASE'] = 'https://openrouter.ai/api/v1' | |
# Set up the model | |
llm = ChatOpenAI( | |
temperature=0.7, | |
model_name="deepseek/deepseek-chat-v3-0324:free" # correct model path | |
) | |
# Set up memory | |
if "memory" not in st.session_state: | |
st.session_state.memory = ConversationBufferMemory() | |
conversation = ConversationChain( | |
llm=llm, | |
memory=st.session_state.memory, | |
verbose=False | |
) | |
# Streamlit UI | |
st.set_page_config(page_title="LLM Chatbot", page_icon="π€") | |
st.title("Langchain Chatbot by Muhammad Izhan") | |
user_input = st.text_input("You:", key="input") | |
if user_input: | |
response = conversation.predict(input=user_input) | |
st.session_state.memory.chat_memory.add_user_message(user_input) | |
st.session_state.memory.chat_memory.add_ai_message(response) | |
st.write(f"**Bot:** {response}") | |
# Show chat history | |
if st.checkbox("Show Chat History"): | |
for message in st.session_state.memory.chat_memory.messages: | |
role = "You" if message.type == "human" else "Bot" | |
st.markdown(f"**{role}:** {message.content}") | |