import gradio as gr from basic_llama_agent import BasicLammaAgent agent_instance = BasicLammaAgent() async def llmResponse(message, *args): return await agent_instance(message) agent_chat = gr.ChatInterface( llmResponse, title="Personalized News Agent", description=( "A conversational agent that helps you discover and analyze news on topics of your interest. " "You can:\n" "- Get the latest news articles for your query\n" "- Ask for implications of a news article\n" "- Request background events leading up to a news story\n" "- Explore summarized social media reactions (positive/negative) to news events\n\n" "The agent uses tools for news retrieval, implication generation, event chronology, and social sentiment analysis." ), type="messages" ) info_tab = gr.Blocks() with info_tab: gr.Markdown("# Personalized News Agent") gr.Markdown( "A conversational agent that helps you discover and analyze news on topics of your interest. " "You can:\n" "- Get the latest news articles for your query\n" "- Ask for implications of a news article\n" "- Request background events leading up to a news story\n" "- Explore summarized social media reactions (positive/negative) to news events\n\n" "The agent uses tools for news retrieval, implication generation, event chronology, and social sentiment analysis.") gr.HTML(""" """) gr.Image(value="agent_diagram.png", label="Agent Workflow Diagram") demo = gr.TabbedInterface([info_tab, agent_chat], ["Info", "Agent Chat"]) if __name__ == "__main__": demo.launch()