File size: 3,462 Bytes
4777736
cece503
c583b68
cece503
 
 
4777736
74ee35b
4777736
 
 
 
 
 
 
 
 
 
 
55a30b0
4777736
 
 
cece503
 
 
55a30b0
cece503
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4777736
7d09e3e
7cd0c55
7d09e3e
f4e6d64
 
 
 
 
 
 
 
 
 
 
 
36d2d95
 
29b269c
80fd64a
 
 
7cd0c55
 
55a30b0
36d2d95
55a30b0
36d2d95
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72

import gradio as gr
from huggingface_hub import InferenceClient
from sentence_transformers import SentenceTransformer
import torch

# Load knowledge
with open("recipesplease.txt", "r", encoding="utf-8") as file:
    knowledge = file.read()
cleaned_chunks = [chunk.strip() for chunk in knowledge.strip().split("\n") if chunk.strip()]
model = SentenceTransformer('all-MiniLM-L6-v2')
chunk_embeddings = model.encode(cleaned_chunks, convert_to_tensor=True)
def get_top_chunks(query):
    query_embedding = model.encode(query, convert_to_tensor=True)
    query_embedding_normalized = query_embedding / query_embedding.norm()
    similarities = torch.matmul(chunk_embeddings, query_embedding_normalized)
    top_indices = torch.topk(similarities, k=5).indices.tolist()
    return [cleaned_chunks[i] for i in top_indices]
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
def respond(message, history, cuisine, dietary_restrictions, allergies):
    response = ""
    top_chunks = get_top_chunks(message)
    context = "\n".join(top_chunks)
    messages = [
        {
            "role": "system",
            "content": f"You are a friendly recipe chatbot named BiteBot that responds to the user with any recipe from this: {context}. Find a recipe that is {cuisine} cuisine. They have the dietary restrictions,{dietary_restrictions} and are allergic to {allergies}. Return the title to the user and ask if this is the recipe they want. If they say yes return the recipe to the user, and if they say no provide another recipe."
        }
    ]
    if history:
        messages.extend(history)
    messages.append({"role": "user", "content": message})
    stream = client.chat_completion(
        messages,
        max_tokens=300,
        temperature=1.2,
        stream=True,
    )
    for message in stream:
        token = message.choices[0].delta.content
        if token is not None:
            response += token
            yield response


    
theme = gr.themes.Monochrome(
    primary_hue="orange",
    secondary_hue="zinc",
    neutral_hue=gr.themes.Color(c100="rgba(255, 227.4411088400613, 206.9078947368421, 1)", c200="rgba(255, 229.53334184977007, 218.0921052631579, 1)", c300="rgba(255, 234.91658150229947, 213.6184210526316, 1)", c400="rgba(189.603125, 154.41663986650488, 133.88641721491229, 1)", c50="#f3d1bbff", c500="rgba(170.2125, 139.18781968574348, 118.70082236842106, 1)", c600="rgba(193.32187499999998, 129.35648241888094, 111.07528782894737, 1)", c700="rgba(184.13125000000002, 141.9707339039346, 106.60230263157897, 1)", c800="rgba(156.06796875, 104.12209005333418, 69.81988075657894, 1)", c900="rgba(156.39999999999998, 117.22008175779253, 80.2578947368421, 1)", c950="rgba(158.43203125, 125.1788770279765, 97.28282620614036, 1)"),
    text_size="sm",
    spacing_size="md",
    radius_size="sm",
).set(
    body_background_fill='*primary_50',
    body_background_fill_dark='*primary_50'
)


with gr.Blocks(theme=theme) as chatbot:
    gr.Markdown("## 🥗🍴 The BiteBot")
    cuisine=gr.Textbox(label="cuisine")
    dietary_restrictions=gr.Dropdown(["Gluten-Free","Dairy-Free","Vegan","Vegetarian","Keto","Kosher","No Soy","No Seafood","No Pork","No Beef"], label="dietary restrictions")
    allergies=gr.Textbox(label="allergies")
    gr.ChatInterface(
                    fn=respond,
                    type="messages", additional_inputs=[cuisine,dietary_restrictions,allergies]
                    )
   
chatbot.launch()