Spaces:
Sleeping
Sleeping
File size: 5,349 Bytes
5fa4369 05b8101 10e9b7d 61c2ff2 4097d7c 6a52f23 7cfb3a2 c27f94c 61c2ff2 1381703 0b67c77 c27f94c 3635d36 abf0257 8fd0023 84f178b c27f94c 84f178b c27f94c 84f178b c27f94c 0b67c77 46eabca c27f94c 46eabca c27f94c 46eabca 84f178b 46eabca 84f178b c27f94c 9ccf47b 84f178b 7cfb3a2 c27f94c 46eabca a54e373 7cfb3a2 46eabca 6a52f23 7cfb3a2 060e212 7cfb3a2 0b67c77 7cfb3a2 84f178b 61c2ff2 84f178b 4856d2b 7cfb3a2 61c2ff2 6a52f23 7cfb3a2 6a52f23 7cfb3a2 bc758d9 7cfb3a2 84f178b 7cfb3a2 ef65c0f 7cfb3a2 6a52f23 0b67c77 7cfb3a2 4856d2b 6a52f23 84f178b 61c2ff2 7cfb3a2 84f178b 61c2ff2 c27f94c 84f178b c27f94c 9e16e60 7cfb3a2 84f178b 7cfb3a2 9e16e60 84f178b 9e16e60 84f178b 9ccf47b c27f94c 9e16e60 46eabca c27f94c 84f178b 46eabca 7cfb3a2 61c2ff2 84f178b 9e16e60 7cfb3a2 9e16e60 84f178b 7cfb3a2 61c2ff2 6a52f23 cfef47f c27f94c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
import os
import gradio as gr
import requests
import pandas as pd
import google.generativeai as genai
from smolagents import CodeAgent, DuckDuckGoSearchTool
# Define the system prompt
SYSTEM_PROMPT = """You are a general AI assistant. I will ask you a question.
Report your thoughts, and finish your answer with just the answer — no prefixes like "FINAL ANSWER:".
Your answer should be a number OR as few words as possible OR a comma-separated list of numbers and/or strings.
If you're asked for a number, don’t use commas or units like $ or %, unless specified.
If you're asked for a string, don’t use articles or abbreviations (e.g. for cities), and write digits in plain text unless told otherwise."""
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
# Gemini model wrapper (lightweight, no smolagents.model.base)
class GeminiFlashModel:
def __init__(self, model_name="gemini-1.5-flash", api_key=None):
self.model_name = model_name
self.api_key = api_key or os.getenv("GEMINI_API_KEY")
if not self.api_key:
raise ValueError("GEMINI_API_KEY is not set.")
genai.configure(api_key=self.api_key)
self.model = genai.GenerativeModel(model_name)
def generate(self, messages, stop_sequences=None, **kwargs):
# Insert system prompt if missing
if isinstance(messages, list):
if not any(m["role"] == "system" for m in messages):
messages = [{"role": "system", "content": SYSTEM_PROMPT}] + messages
else:
raise TypeError("Expected 'messages' to be a list of dicts.")
prompt = "\n".join(f"{m['role'].capitalize()}: {m['content']}" for m in messages)
try:
response = self.model.generate_content(prompt)
return response.text.strip()
except Exception as e:
return f"GENERATION ERROR: {e}"
# Agent using Gemini
class MyAgent:
def __init__(self):
self.model = GeminiFlashModel(model_name="gemini-1.5-flash")
self.agent = CodeAgent(tools=[DuckDuckGoSearchTool()], model=self.model)
def __call__(self, question: str) -> str:
return self.agent.run(question)
def run_and_submit_all(profile: gr.OAuthProfile | None):
space_id = os.getenv("SPACE_ID")
if profile:
username = profile.username
print(f"User logged in: {username}")
else:
print("User not logged in.")
return "Please login to Hugging Face.", None
questions_url = f"{DEFAULT_API_URL}/questions"
submit_url = f"{DEFAULT_API_URL}/submit"
try:
agent = MyAgent()
except Exception as e:
return f"Error initializing agent: {e}", None
try:
response = requests.get(questions_url, timeout=15)
response.raise_for_status()
questions_data = response.json()
except Exception as e:
return f"Error fetching questions: {e}", None
results_log = []
answers_payload = []
for item in questions_data:
task_id = item.get("task_id")
question_text = item.get("question")
if not task_id or question_text is None:
continue
try:
submitted_answer = agent(question_text)
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
except Exception as e:
results_log.append({
"Task ID": task_id,
"Question": question_text,
"Submitted Answer": f"AGENT ERROR: {e}"
})
if not answers_payload:
return "Agent did not return any answers.", pd.DataFrame(results_log)
submission_data = {
"username": profile.username.strip(),
"agent_code": f"https://huggingface.co/spaces/{space_id}/tree/main",
"answers": answers_payload
}
try:
response = requests.post(submit_url, json=submission_data, timeout=60)
response.raise_for_status()
result_data = response.json()
final_status = (
f"Submission Successful!\n"
f"User: {result_data.get('username')}\n"
f"Score: {result_data.get('score', 'N/A')}% "
f"({result_data.get('correct_count', '?')}/{result_data.get('total_attempted', '?')} correct)\n"
f"Message: {result_data.get('message', 'No message received.')}"
)
return final_status, pd.DataFrame(results_log)
except Exception as e:
return f"Submission failed: {e}", pd.DataFrame(results_log)
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("# Basic Agent Evaluation Runner")
gr.Markdown("""
**Instructions:**
1. Clone this space and configure your Gemini API key.
2. Log in to Hugging Face.
3. Run your agent on evaluation tasks and submit answers.
""")
gr.LoginButton()
run_button = gr.Button("Run Evaluation & Submit All Answers")
status_output = gr.Textbox(label="Submission Result", lines=5, interactive=False)
results_table = gr.DataFrame(label="Results", wrap=True)
run_button.click(fn=run_and_submit_all, outputs=[status_output, results_table])
if __name__ == "__main__":
print("🔧 App starting...")
demo.launch(debug=True, share=False)
|