Spaces:
Running
Running
import streamlit as st | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import random | |
# Set up the page title and description | |
st.set_page_config(page_title="OrderBot - AI Chatbot", page_icon="π") | |
st.title("π OrderBot - AI Chatbot") | |
st.markdown( | |
""" | |
### Hey there! This is OrderBot, an AI-driven assistant powered by the DeepSeek-7B Chat model. | |
I am designed for seamless natural language interaction. Leveraging advanced machine learning, | |
I process and respond to human input with precision and efficiency. | |
""" | |
) | |
# Load tokenizer and model | |
def load_model(): | |
model_name = "deepseek-ai/deepseek-llm-7b-chat" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_name, torch_dtype=torch.float16, device_map="auto", offload_folder="offload_weights" | |
) | |
return tokenizer, model | |
tokenizer, model = load_model() | |
# Define menu | |
menu = { | |
"meals": ["Grilled Chicken with Rice", "Beef Steak", "Salmon with Lemon Butter Sauce", "Vegetable Stir-Fry"], | |
"fast_foods": ["Cheeseburger", "Pepperoni Pizza", "Fried Chicken", "Hot Dog", "Tacos", "French Fries"], | |
"drinks": ["Coke", "Pepsi", "Lemonade", "Orange Juice", "Iced Coffee", "Milkshake"], | |
"sweets": ["Chocolate Cake", "Ice Cream", "Apple Pie", "Cheesecake", "Brownies", "Donuts"] | |
} | |
# Order processing | |
system_prompt = f""" | |
You are OrderBot, a virtual restaurant assistant. | |
You help customers order food from the following menu: | |
π½οΈ **Meals**: {', '.join(menu['meals'])} | |
π **Fast Foods**: {', '.join(menu['fast_foods'])} | |
π₯€ **Drinks**: {', '.join(menu['drinks'])} | |
π° **Sweets**: {', '.join(menu['sweets'])} | |
Rules: | |
1οΈβ£ Always confirm the customer's order. | |
2οΈβ£ Ask if they need anything else. | |
3οΈβ£ Respond in a friendly and professional manner. | |
""" | |
def process_order(user_input): | |
responses = { | |
"greetings": ["Hello! How can I assist you today?", "Hey there! What would you like to order? π"], | |
"farewell": ["Goodbye! Have a great day! π", "See you next time!"], | |
"thanks": ["You're welcome! π", "Happy to help!"], | |
"default": ["I'm not sure how to respond to that. Can I take your order?", "Tell me more!"] | |
} | |
if any(word in user_input.lower() for word in ["hello", "hi", "hey"]): | |
return random.choice(responses["greetings"]) | |
elif any(word in user_input.lower() for word in ["bye", "goodbye", "see you"]): | |
return random.choice(responses["farewell"]) | |
elif any(word in user_input.lower() for word in ["thank you", "thanks"]): | |
return random.choice(responses["thanks"]) | |
else: | |
prompt = f"{system_prompt}\nUser: {user_input}\nOrderBot:" | |
inputs = tokenizer(prompt, return_tensors="pt").to("cuda") | |
output = model.generate(**inputs, max_new_tokens=150) | |
response = tokenizer.decode(output[0], skip_special_tokens=True).split("OrderBot:")[-1].strip() | |
return response | |
# Chat interface | |
st.subheader("π¬ Chat with OrderBot") | |
if "messages" not in st.session_state: | |
st.session_state["messages"] = [] | |
for msg in st.session_state["messages"]: | |
st.chat_message(msg["role"]).write(msg["content"]) | |
user_input = st.text_input("You:", placeholder="Type your message here...") | |
if user_input: | |
response = process_order(user_input) | |
st.session_state["messages"].append({"role": "user", "content": user_input}) | |
st.session_state["messages"].append({"role": "assistant", "content": response}) | |
st.chat_message("user").write(user_input) | |
st.chat_message("assistant").write(response) | |