AyurVedaMate / app.py
saritha's picture
Update app.py
b3c2bf3 verified
raw
history blame
2.94 kB
import os
from groq import Groq
import gradio as gr
client = Groq(
api_key =os.getenv('api_key_gorq')
)
def response_from_llam3(query):
messages = [
{
"role" : "system",
"content": "You are an helpul Assistant who has plently of Knowledge on Ayur Veda. If the message is Hi or any greeting say namste how can i assist you "
},
{
"role": "user",
"content": "What is the answer to {}".format(query)
}
]
response = client.chat.completions.create(
messages = messages,
model = "llama3-70b-8192"
)
return response.choices[0].message.content
def response_from_mistral(query):
messages = [
{
"role" : "system",
"content": "You are an helpul Assistant who has plently of Knowledge on Ayur Veda. If the message is Hi or any greeting say namste how can i assist you "
},
{
"role": "user",
"content": "What is the answer to {}".format(query)
}
]
response = client.chat.completions.create(
messages = messages,
model = "mixtral-8x7b-32768"
)
return response.choices[0].message.content
# iface = gr.Interface(
# fn=response_from_llam3,
# inputs="text",
# outputs="text",
# examples=[
# ['What is importance of fasting according to Ayurveda?'],
# ['What are the medicinal values of Tusli?'],
# ['What are the three different doshas?'],
# ['What is the ideal diet according to ayurveda?']
# ],
# cache_examples=False,
# )
# iface.launch()
def chat_with_models(text):
llama_response = response_from_llam3(text)
mistral_response =response_from_mistral(text)
return llama_response, mistral_response
with gr.Blocks() as demo:
gr.Markdown("<h1>πŸš€ Mistral 7B vs LLama3 8B πŸ¦™</h1>")
gr.Markdown("<h3> πŸ•ΉοΈ Compare the performance and responses of two powerful models, Mistral 7B and LLama3 8B instruct. Type your questions or prompts below and see how each model responds to the same input πŸ‘Ύ </h3>")
with gr.Row():
input_text = gr.Textbox(label="Enter your prompt here:", placeholder="Type something...", lines=2)
submit_button = gr.Button("Submit")
output_llama = gr.Textbox(label="Llama 3 8B πŸ‘Ύ", placeholder="", lines=10, interactive=False)
output_mistral = gr.Textbox(label="Mistral 7B 🌠", placeholder="", lines=10, interactive=False)
examples=[
['What is importance of fasting according to Ayurveda?'],
['What are the medicinal values of Tusli?'],
['What are the three different doshas?'],
['What is the ideal diet according to ayurveda?']
],
submit_button.click(fn=chat_with_models, inputs=input_text, outputs=[output_llama, output_mistral])
if __name__ == "__main__":
demo.launch()