Spaces:
Sleeping
Sleeping
File size: 4,961 Bytes
5fa4369 05b8101 10e9b7d 61c2ff2 4097d7c 6a52f23 932b4d5 61c2ff2 1381703 edbeeac 0b67c77 edbeeac c27f94c 3635d36 abf0257 8fd0023 84f178b 932b4d5 04dd10e 0b67c77 932b4d5 04dd10e 932b4d5 04dd10e 7cfb3a2 932b4d5 46eabca a54e373 edbeeac 932b4d5 edbeeac 932b4d5 7cfb3a2 060e212 7cfb3a2 0b67c77 7cfb3a2 84f178b 61c2ff2 84f178b 4856d2b 7cfb3a2 61c2ff2 6a52f23 7cfb3a2 6a52f23 7cfb3a2 bc758d9 7cfb3a2 84f178b 7cfb3a2 ef65c0f 7cfb3a2 6a52f23 edbeeac 6a52f23 84f178b 932b4d5 84f178b 61c2ff2 7cfb3a2 84f178b 61c2ff2 c27f94c 84f178b c27f94c 9e16e60 7cfb3a2 84f178b 7cfb3a2 9e16e60 84f178b 9e16e60 84f178b 9ccf47b 9e16e60 46eabca c27f94c 84f178b 46eabca 7cfb3a2 61c2ff2 84f178b 9e16e60 7cfb3a2 9e16e60 a11972f 7cfb3a2 61c2ff2 6a52f23 cfef47f c27f94c 9edd6bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import os
import gradio as gr
import requests
import pandas as pd
from openai import OpenAI
from smolagents import CodeAgent, DuckDuckGoSearchTool
# System prompt used by the agent
SYSTEM_PROMPT = """You are a general AI assistant. I will ask you a question.
Report your thoughts, and finish your answer with just the answer — no prefixes like "FINAL ANSWER:".
Your answer should be a number OR as few words as possible OR a comma-separated list of numbers and/or strings.
If you're asked for a number, don’t use commas or units like $ or %, unless specified.
If you're asked for a string, don’t use articles or abbreviations (e.g. for cities), and write digits in plain text unless told otherwise."""
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
class GeminiFlashModel:
def __init__(self, model_id="gemini-1.5-flash"):
self.client = OpenAI(
api_key=os.getenv("GEMINI_API_KEY"),
base_url="https://generativelanguage.googleapis.com/v1beta/openai/"
)
self.model_id = model_id
self.system_prompt = SYSTEM_PROMPT
def generate(self, messages):
# Ensure system prompt is present
if not any(m.get("role") == "system" for m in messages):
messages = [{"role": "system", "content": self.system_prompt}] + messages
response = self.client.chat.completions.create(
model=self.model_id,
messages=messages
)
# Return the generated content string directly
return response.choices[0].message.content
class MyAgent:
def __init__(self):
self.model = GeminiFlashModel()
self.agent = CodeAgent(tools=[DuckDuckGoSearchTool()], model=self.model)
def __call__(self, question: str) -> str:
return self.agent.run(question)
def run_and_submit_all(profile: gr.OAuthProfile | None):
space_id = os.getenv("SPACE_ID")
if profile:
username = profile.username
print(f"User logged in: {username}")
else:
print("User not logged in.")
return "Please login to Hugging Face.", None
questions_url = f"{DEFAULT_API_URL}/questions"
submit_url = f"{DEFAULT_API_URL}/submit"
try:
agent = MyAgent()
except Exception as e:
return f"Error initializing agent: {e}", None
try:
response = requests.get(questions_url, timeout=15)
response.raise_for_status()
questions_data = response.json()
except Exception as e:
return f"Error fetching questions: {e}", None
results_log = []
answers_payload = []
for item in questions_data:
task_id = item.get("task_id")
question_text = item.get("question")
if not task_id or question_text is None:
continue
try:
submitted_answer = agent(question_text)
answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
except Exception as e:
results_log.append({
"Task ID": task_id,
"Question": question_text,
"Submitted Answer": f"AGENT ERROR: {e}"
})
if not answers_payload:
return "Agent did not return any answers.", pd.DataFrame(results_log)
submission_data = {
"username": profile.username.strip(),
"agent_code": f"https://huggingface.co/spaces/{space_id}/tree/main",
"answers": answers_payload
}
try:
response = requests.post(submit_url, json=submission_data, timeout=60)
response.raise_for_status()
result_data = response.json()
final_status = (
f"Submission Successful!\n"
f"User: {result_data.get('username')}\n"
f"Score: {result_data.get('score', 'N/A')}% "
f"({result_data.get('correct_count', '?')}/{result_data.get('total_attempted', '?')} correct)\n"
f"Message: {result_data.get('message', 'No message received.')}"
)
return final_status, pd.DataFrame(results_log)
except Exception as e:
return f"Submission failed: {e}", pd.DataFrame(results_log)
with gr.Blocks() as demo:
gr.Markdown("# Basic Agent Evaluation Runner")
gr.Markdown("""
**Instructions:**
1. Clone this space and configure your Gemini API key.
2. Log in to Hugging Face.
3. Run your agent on evaluation tasks and submit answers.
""")
gr.LoginButton()
run_button = gr.Button("Run Evaluation & Submit All Answers")
status_output = gr.Textbox(label="Submission Result", lines=5, interactive=False)
results_table = gr.DataFrame(label="Results", wrap=True)
run_button.click(fn=run_and_submit_all, outputs=[status_output, results_table])
if __name__ == "__main__":
print("🔧 App starting...")
demo.launch(debug=True, share=False)
|