import gradio as gr from search import search_google from scraper.py import scrape_url from rag import VectorStore from llm import generate_answer vs = VectorStore() def ask_agent(question): # Step 1: search urls = search_google(question, num_results=3) # Step 2: scrape texts = [scrape_url(url) for url in urls] # Step 3: embed + store vs.add_texts(texts) # Step 4: retrieve relevant = vs.retrieve(question, top_k=2) context = "\n\n".join(relevant) # Step 5: generate answer answer = generate_answer(context, question) return f"### 🧠 Answer:\n{answer}\n\n\n### 🔗 Sources:\n" + "\n".join(urls) with gr.Blocks() as demo: gr.Markdown("# 🔍 AI Web RAG Agent\nAsk me anything; I'll search, scrape and answer!") with gr.Row(): inp = gr.Textbox(label="Your question") out = gr.Markdown() btn = gr.Button("Ask") btn.click(fn=ask_agent, inputs=inp, outputs=out) demo.launch()