Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
import requests | |
import inspect | |
import time | |
import pandas as pd | |
from smolagents import DuckDuckGoSearchTool | |
import threading | |
from typing import Dict, List, Optional, Tuple | |
import json | |
from huggingface_hub import InferenceClient | |
# --- Constants --- | |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" | |
# --- Global Cache for Answers --- | |
cached_answers = {} | |
cached_questions = [] | |
processing_status = {"is_processing": False, "progress": 0, "total": 0} | |
# --- Intelligent Agent with Conditional Search --- | |
class IntelligentAgent: | |
def __init__(self, debug: bool = False, model_name: str = "meta-llama/Llama-3.1-8B-Instruct"): | |
self.search = DuckDuckGoSearchTool() | |
self.client = InferenceClient(model=model_name) | |
self.debug = debug | |
if self.debug: | |
print(f"IntelligentAgent initialized with model: {model_name}") | |
def _should_search(self, question: str) -> bool: | |
""" | |
Use LLM to determine if search is needed for the question. | |
Returns True if search is recommended, False otherwise. | |
""" | |
decision_prompt = f"""You are an AI assistant that decides whether a web search is needed to answer questions accurately. | |