Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
import os
|
3 |
import pandas as pd
|
4 |
-
from
|
5 |
|
6 |
# ----- Config -----
|
7 |
BASE_DIR = "data"
|
@@ -55,7 +55,26 @@ def display_quantity_info(quantity, data_dict):
|
|
55 |
else:
|
56 |
return f"β Sorry, {quantity} is currently not in stock."
|
57 |
except Exception as e:
|
58 |
-
return f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
def reset_all():
|
61 |
return (
|
@@ -65,61 +84,20 @@ def reset_all():
|
|
65 |
{} # data_state (gr.State) reset with raw dict
|
66 |
)
|
67 |
|
68 |
-
# ----- Chatbot setup -----
|
69 |
-
# Use a text2text-generation pipeline with a chat-capable model
|
70 |
-
chat_model_name = "facebook/blenderbot-400M-distill" # You can replace this with any chat-capable model
|
71 |
-
chat_generator = pipeline("text2text-generation", model=chat_model_name)
|
72 |
-
|
73 |
-
# We'll maintain chat history as a list of strings (user and bot messages)
|
74 |
-
chat_history = []
|
75 |
-
|
76 |
-
def chat_with_bot(user_message, history):
|
77 |
-
global chat_history
|
78 |
-
if history is None:
|
79 |
-
history = []
|
80 |
-
chat_history = []
|
81 |
-
|
82 |
-
# Append user message to history
|
83 |
-
chat_history.append(f"User: {user_message}")
|
84 |
-
|
85 |
-
# Create prompt by joining conversation history with a Bot prompt
|
86 |
-
prompt = "\n".join(chat_history) + "\nBot:"
|
87 |
-
|
88 |
-
# Generate response (adjust max_length and other params as needed)
|
89 |
-
response = chat_generator(prompt, max_length=100, do_sample=True)[0]['generated_text']
|
90 |
-
|
91 |
-
# Extract bot reply by removing prompt prefix if present
|
92 |
-
bot_reply = response[len(prompt):].strip()
|
93 |
-
if not bot_reply:
|
94 |
-
bot_reply = response.strip() # fallback if slicing fails
|
95 |
-
|
96 |
-
# Append bot reply to history
|
97 |
-
chat_history.append(f"Bot: {bot_reply}")
|
98 |
-
|
99 |
-
# Update Gradio chat history (list of (user, bot) tuples)
|
100 |
-
history.append((user_message, bot_reply))
|
101 |
-
|
102 |
-
return history, history
|
103 |
-
|
104 |
-
def clear_chat():
|
105 |
-
global chat_history
|
106 |
-
chat_history = []
|
107 |
-
return [], []
|
108 |
-
|
109 |
# ----- UI -----
|
110 |
with gr.Blocks(title="RetailGenie") as demo:
|
111 |
-
gr.Markdown("#
|
112 |
|
113 |
with gr.Tabs():
|
114 |
with gr.TabItem("π§ Navigator"):
|
115 |
with gr.Row():
|
116 |
country = gr.Dropdown(label="π Country", choices=get_subfolders(BASE_DIR), interactive=True)
|
117 |
-
state = gr.Dropdown(label="
|
118 |
-
city = gr.Dropdown(label="
|
119 |
store = gr.Dropdown(label="πͺ Store", choices=[], interactive=True)
|
120 |
-
category = gr.Dropdown(label="
|
121 |
product = gr.Dropdown(label="π¦ Product", choices=[], interactive=True)
|
122 |
-
brand = gr.Dropdown(label="
|
123 |
quantity = gr.Dropdown(label="π’ Quantity", visible=False, interactive=True)
|
124 |
|
125 |
result = gr.Textbox(label="π Product Info", lines=5)
|
@@ -173,11 +151,14 @@ with gr.Blocks(title="RetailGenie") as demo:
|
|
173 |
|
174 |
with gr.TabItem("π Smart Suggestions"):
|
175 |
gr.Markdown("### π€ Ask RetailGenie for Recommendations")
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
|
|
|
|
|
|
182 |
|
183 |
-
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
import os
|
3 |
import pandas as pd
|
4 |
+
from PIL import Image
|
5 |
|
6 |
# ----- Config -----
|
7 |
BASE_DIR = "data"
|
|
|
55 |
else:
|
56 |
return f"β Sorry, {quantity} is currently not in stock."
|
57 |
except Exception as e:
|
58 |
+
return f"β Error: {e}"
|
59 |
+
|
60 |
+
# Smart Suggestions Function
|
61 |
+
def suggest_items(query):
|
62 |
+
query = query.lower()
|
63 |
+
if "gift" in query and "500" in query:
|
64 |
+
return (
|
65 |
+
"π Gift Suggestions under βΉ500:\n"
|
66 |
+
"1. Bath & Body Gift Set - βΉ499\n"
|
67 |
+
"2. Mini Perfume Pack - βΉ349\n"
|
68 |
+
"3. Skin Care Hamper - βΉ399\n"
|
69 |
+
"4. Chocolates Gift Box - βΉ299"
|
70 |
+
)
|
71 |
+
if "shampoo" in query and "dry" in query:
|
72 |
+
return (
|
73 |
+
"π§΄ Shampoos for Dry Hair:\n"
|
74 |
+
"1. Dove 500 ml - βΉ325\n"
|
75 |
+
"2. Clinic Plus 500 ml - βΉ680"
|
76 |
+
)
|
77 |
+
return "π€· Sorry, no smart suggestions found. Try asking: 'Gift items under 500' or 'Shampoo for dry hair'"
|
78 |
|
79 |
def reset_all():
|
80 |
return (
|
|
|
84 |
{} # data_state (gr.State) reset with raw dict
|
85 |
)
|
86 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
87 |
# ----- UI -----
|
88 |
with gr.Blocks(title="RetailGenie") as demo:
|
89 |
+
gr.Markdown("# π§ββ RetailGenie β In-Store Smart Assistant")
|
90 |
|
91 |
with gr.Tabs():
|
92 |
with gr.TabItem("π§ Navigator"):
|
93 |
with gr.Row():
|
94 |
country = gr.Dropdown(label="π Country", choices=get_subfolders(BASE_DIR), interactive=True)
|
95 |
+
state = gr.Dropdown(label="π State", choices=[], interactive=True)
|
96 |
+
city = gr.Dropdown(label="π City", choices=[], interactive=True)
|
97 |
store = gr.Dropdown(label="πͺ Store", choices=[], interactive=True)
|
98 |
+
category = gr.Dropdown(label="π Category", choices=[], interactive=True)
|
99 |
product = gr.Dropdown(label="π¦ Product", choices=[], interactive=True)
|
100 |
+
brand = gr.Dropdown(label="π· Brand", choices=[], interactive=True)
|
101 |
quantity = gr.Dropdown(label="π’ Quantity", visible=False, interactive=True)
|
102 |
|
103 |
result = gr.Textbox(label="π Product Info", lines=5)
|
|
|
151 |
|
152 |
with gr.TabItem("π Smart Suggestions"):
|
153 |
gr.Markdown("### π€ Ask RetailGenie for Recommendations")
|
154 |
+
suggestion_input = gr.Textbox(label="Ask something like:", placeholder="Gift items under 500", lines=1)
|
155 |
+
suggest_btn = gr.Button("π‘ Get Suggestions")
|
156 |
+
suggestions_output = gr.Textbox(label="π Suggestions", lines=10)
|
157 |
+
|
158 |
+
suggest_btn.click(
|
159 |
+
suggest_items,
|
160 |
+
inputs=suggestion_input,
|
161 |
+
outputs=suggestions_output
|
162 |
+
)
|
163 |
|
164 |
+
demo.launch()
|