Update app.py
Browse files
app.py
CHANGED
|
@@ -25,6 +25,9 @@ import requests
|
|
| 25 |
import random
|
| 26 |
import datetime
|
| 27 |
from groq import Groq
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
# Automatically get the current year
|
| 30 |
current_year = datetime.datetime.now().year
|
|
@@ -53,6 +56,10 @@ GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
|
| 53 |
# Initialize Groq client
|
| 54 |
groq_client = Groq(api_key=GROQ_API_KEY)
|
| 55 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
# Initialize the similarity model
|
| 57 |
similarity_model = SentenceTransformer('all-MiniLM-L6-v2')
|
| 58 |
|
|
@@ -387,7 +394,18 @@ Instructions:
|
|
| 387 |
stream=False
|
| 388 |
)
|
| 389 |
return response.choices[0].message.content.strip()
|
| 390 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 391 |
response = client.chat_completion(
|
| 392 |
messages=messages,
|
| 393 |
max_tokens=10000,
|
|
@@ -620,7 +638,7 @@ iface = gr.ChatInterface(
|
|
| 620 |
gr.Slider(0, 2, value=2, step=1, label="Safe Search Level"),
|
| 621 |
gr.Radio(["GET", "POST"], value="POST", label="HTTP Method"),
|
| 622 |
gr.Slider(0, 1, value=0.2, step=0.1, label="LLM Temperature"),
|
| 623 |
-
gr.Dropdown(["huggingface", "groq"], value="huggingface", label="LLM Model"),
|
| 624 |
gr.Checkbox(label="Use PyPDF2 for PDF scraping", value=True),
|
| 625 |
],
|
| 626 |
additional_inputs_accordion=gr.Accordion("⚙️ Advanced Parameters", open=True),
|
|
|
|
| 25 |
import random
|
| 26 |
import datetime
|
| 27 |
from groq import Groq
|
| 28 |
+
import os
|
| 29 |
+
from mistralai import Mistral
|
| 30 |
+
from dotenv import load_dotenv
|
| 31 |
|
| 32 |
# Automatically get the current year
|
| 33 |
current_year = datetime.datetime.now().year
|
|
|
|
| 56 |
# Initialize Groq client
|
| 57 |
groq_client = Groq(api_key=GROQ_API_KEY)
|
| 58 |
|
| 59 |
+
# Initialize Mistral client
|
| 60 |
+
MISTRAL_API_KEY = os.getenv("MISTRAL_API_KEY")
|
| 61 |
+
mistral_client = Mistral(api_key=MISTRAL_API_KEY)
|
| 62 |
+
|
| 63 |
# Initialize the similarity model
|
| 64 |
similarity_model = SentenceTransformer('all-MiniLM-L6-v2')
|
| 65 |
|
|
|
|
| 394 |
stream=False
|
| 395 |
)
|
| 396 |
return response.choices[0].message.content.strip()
|
| 397 |
+
elif model == "mistral":
|
| 398 |
+
response = mistral_client.chat.complete(
|
| 399 |
+
model="Mistral-Nemo-Instruct-2407",
|
| 400 |
+
messages=messages,
|
| 401 |
+
max_tokens=5500,
|
| 402 |
+
temperature=temperature,
|
| 403 |
+
top_p=0.9,
|
| 404 |
+
presence_penalty=1.2,
|
| 405 |
+
stream=False
|
| 406 |
+
)
|
| 407 |
+
return response.choices[0].message.content.strip()
|
| 408 |
+
else: # huggingface
|
| 409 |
response = client.chat_completion(
|
| 410 |
messages=messages,
|
| 411 |
max_tokens=10000,
|
|
|
|
| 638 |
gr.Slider(0, 2, value=2, step=1, label="Safe Search Level"),
|
| 639 |
gr.Radio(["GET", "POST"], value="POST", label="HTTP Method"),
|
| 640 |
gr.Slider(0, 1, value=0.2, step=0.1, label="LLM Temperature"),
|
| 641 |
+
gr.Dropdown(["huggingface", "groq", "mistral"], value="huggingface", label="LLM Model"),
|
| 642 |
gr.Checkbox(label="Use PyPDF2 for PDF scraping", value=True),
|
| 643 |
],
|
| 644 |
additional_inputs_accordion=gr.Accordion("⚙️ Advanced Parameters", open=True),
|