Athspi commited on
Commit
1f52162
·
verified ·
1 Parent(s): 871c66d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -0
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+ from openai import OpenAI
5
+ from dotenv import load_dotenv
6
+
7
+ # Load API keys from .env file
8
+ load_dotenv()
9
+ HF_API_KEY = os.getenv("HF_API_KEY")
10
+ OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
11
+
12
+ # Initialize Hugging Face Gemma Client
13
+ hf_client = InferenceClient(
14
+ provider="hf-inference",
15
+ api_key=HF_API_KEY
16
+ )
17
+
18
+ # Initialize OpenRouter DeepSeek Client
19
+ openrouter_client = OpenAI(
20
+ base_url="https://openrouter.ai/api/v1",
21
+ api_key=OPENROUTER_API_KEY
22
+ )
23
+
24
+ # Function to query Gemma
25
+ def query_gemma(user_input):
26
+ messages = [{"role": "user", "content": user_input}]
27
+ completion = hf_client.chat.completions.create(
28
+ model="google/gemma-2-27b-it",
29
+ messages=messages,
30
+ max_tokens=500
31
+ )
32
+ return completion.choices[0].message["content"]
33
+
34
+ # Function to query DeepSeek
35
+ def query_deepseek(user_input):
36
+ completion = openrouter_client.chat.completions.create(
37
+ model="deepseek/deepseek-r1:free",
38
+ messages=[{"role": "user", "content": user_input}]
39
+ )
40
+ return completion.choices[0].message.content
41
+
42
+ # Function to refine response using DeepSeek
43
+ def refine_response(user_input):
44
+ # Get responses from both models
45
+ gemma_response = query_gemma(user_input)
46
+ deepseek_response = query_deepseek(user_input)
47
+
48
+ # Send both responses to DeepSeek for refinement
49
+ improvement_prompt = f"""
50
+ Here are two AI-generated responses:
51
+
52
+ Response 1 (Gemma): {gemma_response}
53
+ Response 2 (DeepSeek): {deepseek_response}
54
+
55
+ Please combine the best elements, improve clarity, and provide a final refined answer.
56
+ """
57
+
58
+ refined_completion = openrouter_client.chat.completions.create(
59
+ model="deepseek/deepseek-r1:free",
60
+ messages=[{"role": "user", "content": improvement_prompt}]
61
+ )
62
+
63
+ return refined_completion.choices[0].message.content
64
+
65
+ # Create Gradio interface
66
+ iface = gr.Interface(
67
+ fn=refine_response,
68
+ inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."),
69
+ outputs="text",
70
+ title="AI Response Enhancer",
71
+ description="Get responses from both Gemma and DeepSeek, then receive an improved final answer."
72
+ )
73
+
74
+ # Launch app
75
+ iface.launch()