Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import openai
|
3 |
+
import google.generativeai as genai
|
4 |
+
|
5 |
+
def get_openai_response(api_key, model, prompt):
|
6 |
+
client = openai.OpenAI(api_key=api_key)
|
7 |
+
response = client.chat.completions.create(
|
8 |
+
model=model,
|
9 |
+
messages=[{"role": "user", "content": prompt}]
|
10 |
+
)
|
11 |
+
return response.choices[0].message.content
|
12 |
+
|
13 |
+
def get_gemini_response(api_key, model, prompt):
|
14 |
+
genai.configure(api_key=api_key)
|
15 |
+
model = genai.GenerativeModel(model)
|
16 |
+
response = model.generate_content(prompt)
|
17 |
+
return response.text
|
18 |
+
|
19 |
+
def get_deepseek_response(api_key, model, prompt):
|
20 |
+
client = openai.OpenAI(
|
21 |
+
api_key=api_key,
|
22 |
+
base_url="https://api.deepseek.com/v1",
|
23 |
+
)
|
24 |
+
response = client.chat.completions.create(
|
25 |
+
model=model,
|
26 |
+
messages=[{"role": "user", "content": prompt}]
|
27 |
+
)
|
28 |
+
return response.choices[0].message.content
|
29 |
+
|
30 |
+
# Sidebar for API configurations
|
31 |
+
with st.sidebar:
|
32 |
+
st.header("API Configuration")
|
33 |
+
|
34 |
+
# OpenAI
|
35 |
+
openai_api_key = st.text_input("OpenAI API Key", type="password")
|
36 |
+
openai_model = st.text_input("OpenAI Model", value="o3-mini")
|
37 |
+
|
38 |
+
# Gemini
|
39 |
+
gemini_api_key = st.text_input("Gemini API Key", type="password")
|
40 |
+
gemini_model = st.text_input("Gemini Model", value="gemini-2.0")
|
41 |
+
|
42 |
+
# DeepSeek
|
43 |
+
deepseek_api_key = st.text_input("DeepSeek API Key", type="password")
|
44 |
+
deepseek_model = st.text_input("DeepSeek Model", value="deepseek-r1")
|
45 |
+
|
46 |
+
# Main app interface
|
47 |
+
st.title("Multi-LLM Prompt Chain")
|
48 |
+
prompt = st.text_area("Enter your prompt:", height=150)
|
49 |
+
submit_button = st.button("Submit")
|
50 |
+
|
51 |
+
if submit_button:
|
52 |
+
if not prompt.strip():
|
53 |
+
st.error("Please enter a prompt.")
|
54 |
+
else:
|
55 |
+
responses = {}
|
56 |
+
|
57 |
+
# OpenAI Response
|
58 |
+
if openai_api_key:
|
59 |
+
try:
|
60 |
+
responses["OpenAI"] = get_openai_response(openai_api_key, openai_model, prompt)
|
61 |
+
except Exception as e:
|
62 |
+
responses["OpenAI"] = f"Error: {str(e)}"
|
63 |
+
else:
|
64 |
+
responses["OpenAI"] = "API Key not provided"
|
65 |
+
|
66 |
+
# Gemini Response
|
67 |
+
if gemini_api_key:
|
68 |
+
try:
|
69 |
+
responses["Gemini"] = get_gemini_response(gemini_api_key, gemini_model, prompt)
|
70 |
+
except Exception as e:
|
71 |
+
responses["Gemini"] = f"Error: {str(e)}"
|
72 |
+
else:
|
73 |
+
responses["Gemini"] = "API Key not provided"
|
74 |
+
|
75 |
+
# DeepSeek Response
|
76 |
+
if deepseek_api_key:
|
77 |
+
try:
|
78 |
+
responses["DeepSeek"] = get_deepseek_response(deepseek_api_key, deepseek_model, prompt)
|
79 |
+
except Exception as e:
|
80 |
+
responses["DeepSeek"] = f"Error: {str(e)}"
|
81 |
+
else:
|
82 |
+
responses["DeepSeek"] = "API Key not provided"
|
83 |
+
|
84 |
+
# Display responses
|
85 |
+
st.subheader("API Responses")
|
86 |
+
col1, col2, col3 = st.columns(3)
|
87 |
+
|
88 |
+
with col1:
|
89 |
+
st.markdown("**OpenAI**")
|
90 |
+
st.write(responses["OpenAI"])
|
91 |
+
|
92 |
+
with col2:
|
93 |
+
st.markdown("**Gemini**")
|
94 |
+
st.write(responses["Gemini"])
|
95 |
+
|
96 |
+
with col3:
|
97 |
+
st.markdown("**DeepSeek**")
|
98 |
+
st.write(responses["DeepSeek"])
|