File size: 4,593 Bytes
b6475e6 75d536a b6475e6 75d536a 7d423f2 75d536a b6475e6 75d536a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import AIMessage, HumanMessage
# (Optional) If you're using Anthropic
# from langchain.chat_models import ChatAnthropic
# Placeholder functions for other LLMs (DeepSeek, Gemini, Ollama, etc.)
# Implement or import your own logic here.
def get_deepseek_llm(api_key: str):
"""
TODO: Implement your DeepSeek integration.
"""
# return your DeepSeek LLM client
pass
def get_gemini_llm(api_key: str):
"""
TODO: Implement your Gemini integration.
"""
# return your Gemini LLM client
pass
def get_ollama_llm():
"""
TODO: Implement your local Ollama integration.
Possibly specify a port, endpoint, etc.
"""
# return your Ollama LLM client
pass
def get_claude_llm(api_key: str):
"""
Example for Anthropic's Claude
"""
# If you installed anthropic: pip install anthropic
# from langchain.chat_models import ChatAnthropic
# llm = ChatAnthropic(anthropic_api_key=api_key)
# return llm
pass
def load_llm(selected_model: str, api_key: str):
"""
Returns the LLM object depending on user selection.
"""
if selected_model == "OpenAI":
# Use OpenAI ChatModel
# By default uses GPT-3.5. You can pass model_name="gpt-4" if you have access.
llm = ChatOpenAI(temperature=0.7, openai_api_key=api_key)
elif selected_model == "Claude":
# llm = get_claude_llm(api_key) # Uncomment once implemented
llm = None # Placeholder
st.warning("Claude is not implemented. Implement the get_claude_llm function.")
elif selected_model == "Gemini":
# llm = get_gemini_llm(api_key) # Uncomment once implemented
llm = None
st.warning("Gemini is not implemented. Implement the get_gemini_llm function.")
elif selected_model == "DeepSeek":
# llm = get_deepseek_llm(api_key) # Uncomment once implemented
llm = None
st.warning("DeepSeek is not implemented. Implement the get_deepseek_llm function.")
elif selected_model == "Ollama (local)":
# llm = get_ollama_llm() # Uncomment once implemented
llm = None
st.warning("Ollama is not implemented. Implement the get_ollama_llm function.")
else:
llm = None
return llm
def initialize_session_state():
"""
Initialize the session state for storing conversation history.
"""
if "messages" not in st.session_state:
st.session_state["messages"] = []
def main():
st.title("Multi-LLM Chat App")
# Sidebar for model selection and API key
st.sidebar.header("Configuration")
selected_model = st.sidebar.selectbox(
"Select an LLM",
["OpenAI", "Claude", "Gemini", "DeepSeek", "Ollama (local)"]
)
api_key = st.sidebar.text_input("API Key (if needed)", type="password")
st.sidebar.write("---")
if st.sidebar.button("Clear Chat"):
st.session_state["messages"] = []
# Initialize conversation in session state
initialize_session_state()
# Load the chosen LLM
llm = load_llm(selected_model, api_key)
# Display existing conversation
for msg in st.session_state["messages"]:
if msg["role"] == "user":
st.markdown(f"**You:** {msg['content']}")
else:
st.markdown(f"**LLM:** {msg['content']}")
# User input
user_input = st.text_input("Type your message here...", "")
# On submit
if st.button("Send"):
if user_input.strip() == "":
st.warning("Please enter a message before sending.")
else:
# Add user message to conversation history
st.session_state["messages"].append({"role": "user", "content": user_input})
if llm is None:
st.error("LLM is not configured or implemented for this choice.")
else:
# Prepare messages in a LangChain format
lc_messages = []
for msg in st.session_state["messages"]:
if msg["role"] == "user":
lc_messages.append(HumanMessage(content=msg["content"]))
else:
lc_messages.append(AIMessage(content=msg["content"]))
# Call the LLM
response = llm(lc_messages)
# Add LLM response to conversation
st.session_state["messages"].append({"role": "assistant", "content": response.content})
# End
if __name__ == "__main__":
main()
|