Spaces:
Running
Running
File size: 3,641 Bytes
6e2bcd6 340c7df bf3b67e 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df 6e2bcd6 340c7df |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import streamlit as st
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import random
# Set up the page title and description
st.set_page_config(page_title="OrderBot - AI Chatbot", page_icon="π")
st.title("π OrderBot - AI Chatbot")
st.markdown(
"""
### Hey there! This is OrderBot, an AI-driven assistant powered by the DeepSeek-7B Chat model.
I am designed for seamless natural language interaction. Leveraging advanced machine learning,
I process and respond to human input with precision and efficiency.
"""
)
# Load tokenizer and model
@st.cache_resource()
def load_model():
model_name = "deepseek-ai/deepseek-llm-7b-chat"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name, torch_dtype=torch.float16, device_map="auto", offload_folder="offload_weights"
)
return tokenizer, model
tokenizer, model = load_model()
# Define menu
menu = {
"meals": ["Grilled Chicken with Rice", "Beef Steak", "Salmon with Lemon Butter Sauce", "Vegetable Stir-Fry"],
"fast_foods": ["Cheeseburger", "Pepperoni Pizza", "Fried Chicken", "Hot Dog", "Tacos", "French Fries"],
"drinks": ["Coke", "Pepsi", "Lemonade", "Orange Juice", "Iced Coffee", "Milkshake"],
"sweets": ["Chocolate Cake", "Ice Cream", "Apple Pie", "Cheesecake", "Brownies", "Donuts"]
}
# Order processing
system_prompt = f"""
You are OrderBot, a virtual restaurant assistant.
You help customers order food from the following menu:
π½οΈ **Meals**: {', '.join(menu['meals'])}
π **Fast Foods**: {', '.join(menu['fast_foods'])}
π₯€ **Drinks**: {', '.join(menu['drinks'])}
π° **Sweets**: {', '.join(menu['sweets'])}
Rules:
1οΈβ£ Always confirm the customer's order.
2οΈβ£ Ask if they need anything else.
3οΈβ£ Respond in a friendly and professional manner.
"""
def process_order(user_input):
responses = {
"greetings": ["Hello! How can I assist you today?", "Hey there! What would you like to order? π"],
"farewell": ["Goodbye! Have a great day! π", "See you next time!"],
"thanks": ["You're welcome! π", "Happy to help!"],
"default": ["I'm not sure how to respond to that. Can I take your order?", "Tell me more!"]
}
if any(word in user_input.lower() for word in ["hello", "hi", "hey"]):
return random.choice(responses["greetings"])
elif any(word in user_input.lower() for word in ["bye", "goodbye", "see you"]):
return random.choice(responses["farewell"])
elif any(word in user_input.lower() for word in ["thank you", "thanks"]):
return random.choice(responses["thanks"])
else:
prompt = f"{system_prompt}\nUser: {user_input}\nOrderBot:"
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
output = model.generate(**inputs, max_new_tokens=150)
response = tokenizer.decode(output[0], skip_special_tokens=True).split("OrderBot:")[-1].strip()
return response
# Chat interface
st.subheader("π¬ Chat with OrderBot")
if "messages" not in st.session_state:
st.session_state["messages"] = []
for msg in st.session_state["messages"]:
st.chat_message(msg["role"]).write(msg["content"])
user_input = st.text_input("You:", placeholder="Type your message here...")
if user_input:
response = process_order(user_input)
st.session_state["messages"].append({"role": "user", "content": user_input})
st.session_state["messages"].append({"role": "assistant", "content": response})
st.chat_message("user").write(user_input)
st.chat_message("assistant").write(response)
|