File size: 2,112 Bytes
f001211
7fdd8cf
722e6c7
f001211
0f2aaf1
 
 
7fdd8cf
 
f001211
7fdd8cf
722e6c7
f001211
722e6c7
7fdd8cf
0f2aaf1
 
 
7fdd8cf
0f2aaf1
 
 
 
 
 
 
7fdd8cf
0f2aaf1
7fdd8cf
0f2aaf1
7fdd8cf
0f2aaf1
 
7fdd8cf
722e6c7
 
 
7fdd8cf
722e6c7
f001211
7fdd8cf
 
 
 
 
 
 
 
 
 
f001211
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import gradio as gr
from google import generativeai as genai
import os

# — Load and configure Gemini API key from HF Secrets
gemini_api_key = os.getenv("GEMINI_API_KEY")
if not gemini_api_key:
    raise ValueError("GEMINI_API_KEY not set in environment")
genai.configure(api_key=gemini_api_key)  # sets auth for all calls :contentReference[oaicite:2]{index=2}

# — Path to your uploaded business.txt
business_file = os.path.join(os.path.dirname(__file__), "business.txt")

def chat_with_business(message, history):
    # 1️⃣ Read business info
    with open(business_file, "r", encoding="utf-8") as f:
        business_info = f.read().strip()

    # 2️⃣ Build system prompt
    system_prompt = (
        "You are a helpful customer-care assistant. "
        "Use only the information below to answer questions. "
        "If the answer is not present, reply 'Yeh information abhi available nahi hai.'\n\n"
        f"{business_info}\n\n"
    )

    # 3️⃣ Call Gemini 2.5 Flash
    model = genai.GenerativeModel(model_name="gemini-2.5-flash-preview-04-17")
    response = model.generate_content(system_prompt + "User: " + message)  # :contentReference[oaicite:3]{index=3}

    # 4️⃣ Return text
    return response.text

# — Build Gradio UI with Blocks and messages format
with gr.Blocks(theme="soft") as demo:
    gr.Markdown("## 🌿 My Business Bot")
    gr.Markdown("*Ask anything about your business in Hindi-English*")
    chatbot = gr.Chatbot(elem_id="chatbox", height=400, type="messages")  # use messages format :contentReference[oaicite:4]{index=4}
    user_input = gr.Textbox(placeholder="Type your question here...", show_label=False)

    def handle(msg, hist):
        reply = chat_with_business(msg, hist)
        # Append OpenAI-style dicts, not tuples :contentReference[oaicite:5]{index=5}
        hist = hist + [
            {"role": "user",      "content": msg},
            {"role": "assistant", "content": reply}
        ]
        return hist, ""

    user_input.submit(handle, [user_input, chatbot], [chatbot, user_input])

if __name__ == "__main__":
    demo.launch()