import gradio as gr from google import generativeai as genai import os # — Load and configure Gemini API key from HF Secrets gemini_api_key = os.getenv("GEMINI_API_KEY") if not gemini_api_key: raise ValueError("GEMINI_API_KEY not set in environment") genai.configure(api_key=gemini_api_key) # sets auth for all calls :contentReference[oaicite:2]{index=2} # — Path to your uploaded business.txt business_file = os.path.join(os.path.dirname(__file__), "business.txt") def chat_with_business(message, history): # 1️⃣ Read business info with open(business_file, "r", encoding="utf-8") as f: business_info = f.read().strip() # 2️⃣ Build system prompt system_prompt = ( "You are a helpful customer-care assistant. " "Use only the information below to answer questions. " "If the answer is not present, reply 'Yeh information abhi available nahi hai.'\n\n" f"{business_info}\n\n" ) # 3️⃣ Call Gemini 2.5 Flash model = genai.GenerativeModel(model_name="gemini-2.5-flash-preview-04-17") response = model.generate_content(system_prompt + "User: " + message) # :contentReference[oaicite:3]{index=3} # 4️⃣ Return text return response.text # — Build Gradio UI with Blocks and messages format with gr.Blocks(theme="soft") as demo: gr.Markdown("## 🌿 My Business Bot") gr.Markdown("*Ask anything about your business in Hindi-English*") chatbot = gr.Chatbot(elem_id="chatbox", height=400, type="messages") # use messages format :contentReference[oaicite:4]{index=4} user_input = gr.Textbox(placeholder="Type your question here...", show_label=False) def handle(msg, hist): reply = chat_with_business(msg, hist) # Append OpenAI-style dicts, not tuples :contentReference[oaicite:5]{index=5} hist = hist + [ {"role": "user", "content": msg}, {"role": "assistant", "content": reply} ] return hist, "" user_input.submit(handle, [user_input, chatbot], [chatbot, user_input]) if __name__ == "__main__": demo.launch()