Spaces:
Sleeping
Sleeping
File size: 1,243 Bytes
10e9b7d eccf8e4 7d65c66 620f572 3c4371f c275bbd 3164d5a 7067f57 3164d5a e80aab9 3db6293 e80aab9 3164d5a 8b49454 61401c1 c275bbd 61401c1 8b49454 61401c1 f003351 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import os
import gradio as gr
import requests
import inspect
import time
import pandas as pd
from smolagents import DuckDuckGoSearchTool
import threading
from typing import Dict, List, Optional, Tuple
import json
from huggingface_hub import InferenceClient
# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
# --- Global Cache for Answers ---
cached_answers = {}
cached_questions = []
processing_status = {"is_processing": False, "progress": 0, "total": 0}
# --- Intelligent Agent with Conditional Search ---
class IntelligentAgent:
def __init__(self, debug: bool = False, model_name: str = "meta-llama/Llama-3.1-8B-Instruct"):
self.search = DuckDuckGoSearchTool()
self.client = InferenceClient(model=model_name)
self.debug = debug
if self.debug:
print(f"IntelligentAgent initialized with model: {model_name}")
def _should_search(self, question: str) -> bool:
"""
Use LLM to determine if search is needed for the question.
Returns True if search is recommended, False otherwise.
"""
decision_prompt = f"""You are an AI assistant that decides whether a web search is needed to answer questions accurately.
|