Sebbe33 commited on
Commit
b6475e6
·
verified ·
1 Parent(s): ca6c3ce

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +141 -0
app.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain.chat_models import ChatOpenAI
3
+ from langchain.schema import AIMessage, HumanMessage
4
+
5
+ # (Optional) If you're using Anthropic
6
+ # from langchain.chat_models import ChatAnthropic
7
+
8
+ # Placeholder functions for other LLMs (DeepSeek, Gemini, Ollama, etc.)
9
+ # Implement or import your own logic here.
10
+ def get_deepseek_llm(api_key: str):
11
+ """
12
+ TODO: Implement your DeepSeek integration.
13
+ """
14
+ # return your DeepSeek LLM client
15
+ pass
16
+
17
+ def get_gemini_llm(api_key: str):
18
+ """
19
+ TODO: Implement your Gemini integration.
20
+ """
21
+ # return your Gemini LLM client
22
+ pass
23
+
24
+ def get_ollama_llm():
25
+ """
26
+ TODO: Implement your local Ollama integration.
27
+ Possibly specify a port, endpoint, etc.
28
+ """
29
+ # return your Ollama LLM client
30
+ pass
31
+
32
+ def get_claude_llm(api_key: str):
33
+ """
34
+ Example for Anthropic's Claude
35
+ """
36
+ # If you installed anthropic: pip install anthropic
37
+ # from langchain.chat_models import ChatAnthropic
38
+ # llm = ChatAnthropic(anthropic_api_key=api_key)
39
+ # return llm
40
+ pass
41
+
42
+ def load_llm(selected_model: str, api_key: str):
43
+ """
44
+ Returns the LLM object depending on user selection.
45
+ """
46
+ if selected_model == "OpenAI":
47
+ # Use OpenAI ChatModel
48
+ # By default uses GPT-3.5. You can pass model_name="gpt-4" if you have access.
49
+ llm = ChatOpenAI(temperature=0.7, openai_api_key=api_key)
50
+
51
+ elif selected_model == "Claude":
52
+ # llm = get_claude_llm(api_key) # Uncomment once implemented
53
+ llm = None # Placeholder
54
+ st.warning("Claude is not implemented. Implement the get_claude_llm function.")
55
+
56
+ elif selected_model == "Gemini":
57
+ # llm = get_gemini_llm(api_key) # Uncomment once implemented
58
+ llm = None
59
+ st.warning("Gemini is not implemented. Implement the get_gemini_llm function.")
60
+
61
+ elif selected_model == "DeepSeek":
62
+ # llm = get_deepseek_llm(api_key) # Uncomment once implemented
63
+ llm = None
64
+ st.warning("DeepSeek is not implemented. Implement the get_deepseek_llm function.")
65
+
66
+ elif selected_model == "Ollama (local)":
67
+ # llm = get_ollama_llm() # Uncomment once implemented
68
+ llm = None
69
+ st.warning("Ollama is not implemented. Implement the get_ollama_llm function.")
70
+
71
+ else:
72
+ llm = None
73
+
74
+ return llm
75
+
76
+ def initialize_session_state():
77
+ """
78
+ Initialize the session state for storing conversation history.
79
+ """
80
+ if "messages" not in st.session_state:
81
+ st.session_state["messages"] = []
82
+
83
+ def main():
84
+ st.title("Multi-LLM Chat App")
85
+
86
+ # Sidebar for model selection and API key
87
+ st.sidebar.header("Configuration")
88
+ selected_model = st.sidebar.selectbox(
89
+ "Select an LLM",
90
+ ["OpenAI", "Claude", "Gemini", "DeepSeek", "Ollama (local)"]
91
+ )
92
+ api_key = st.sidebar.text_input("API Key (if needed)", type="password")
93
+
94
+ st.sidebar.write("---")
95
+ if st.sidebar.button("Clear Chat"):
96
+ st.session_state["messages"] = []
97
+
98
+ # Initialize conversation in session state
99
+ initialize_session_state()
100
+
101
+ # Load the chosen LLM
102
+ llm = load_llm(selected_model, api_key)
103
+
104
+ # Display existing conversation
105
+ for msg in st.session_state["messages"]:
106
+ if msg["role"] == "user":
107
+ st.markdown(f"**You:** {msg['content']}")
108
+ else:
109
+ st.markdown(f"**LLM:** {msg['content']}")
110
+
111
+ # User input
112
+ user_input = st.text_input("Type your message here...", "")
113
+
114
+ # On submit
115
+ if st.button("Send"):
116
+ if user_input.strip() == "":
117
+ st.warning("Please enter a message before sending.")
118
+ else:
119
+ # Add user message to conversation history
120
+ st.session_state["messages"].append({"role": "user", "content": user_input})
121
+
122
+ if llm is None:
123
+ st.error("LLM is not configured or implemented for this choice.")
124
+ else:
125
+ # Prepare messages in a LangChain format
126
+ lc_messages = []
127
+ for msg in st.session_state["messages"]:
128
+ if msg["role"] == "user":
129
+ lc_messages.append(HumanMessage(content=msg["content"]))
130
+ else:
131
+ lc_messages.append(AIMessage(content=msg["content"]))
132
+
133
+ # Call the LLM
134
+ response = llm(lc_messages)
135
+ # Add LLM response to conversation
136
+ st.session_state["messages"].append({"role": "assistant", "content": response.content})
137
+
138
+ # End
139
+
140
+ if __name__ == "__main__":
141
+ main()