File size: 3,765 Bytes
a884a74
 
 
 
 
 
a92112f
 
3db6293
a884a74
e80aab9
a92112f
a884a74
 
 
 
 
 
 
 
31243f4
a884a74
 
31243f4
a884a74
 
 
 
 
 
 
 
a92112f
3c4371f
a884a74
 
 
 
 
 
 
 
 
 
eccf8e4
a884a74
7d65c66
a92112f
e80aab9
a884a74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a92112f
e80aab9
a884a74
 
 
 
e80aab9
7d65c66
a884a74
e80aab9
a884a74
e80aab9
 
a884a74
 
 
 
 
7e4a06b
a884a74
e80aab9
a884a74
 
e80aab9
a884a74
e80aab9
 
3c4371f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
# app.py  –  async + progress, keine Cache-Logik
# ------------------------------------------------
import os, asyncio, concurrent.futures, functools
import gradio as gr, requests, pandas as pd
from langchain_core.messages import HumanMessage
from agent import agent_executor     # dein LangGraph-Agent


DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
MAX_PAR_TASKS   = 5                  # wie viele Fragen parallel laufen


# ------------------------------------------------------------------
# Sync-Wrapper um den Agent
# ------------------------------------------------------------------
def run_agent_sync(task_id: str, question: str) -> str:
    llm_input = {
        "messages": [HumanMessage(content=question)],
        "task_id":  task_id,
    }
    try:
        result = agent_executor.invoke(llm_input)
        return result["messages"][-1].content.strip()
    except Exception as e:
        return f"AGENT ERROR: {e}"


async def run_agent_async(executor, task_id: str, question: str) -> str:
    loop = asyncio.get_event_loop()
    return await loop.run_in_executor(
        executor, functools.partial(run_agent_sync, task_id, question)
    )


# ------------------------------------------------------------------
# Haupt-Callback (async)  –  holt Fragen, verarbeitet parallel
# ------------------------------------------------------------------
async def run_and_submit_all(profile: gr.OAuthProfile | None, progress=gr.Progress()):
    if not profile:
        return "Please login with your HF account.", None
    username = profile.username

    # 1) GAIA-Fragen holen
    q_url = f"{DEFAULT_API_URL}/questions"
    try:
        q_data = requests.get(q_url, timeout=15).json()
    except Exception as e:
        return f"Error fetching questions: {e}", None

    progress(0, desc=f"Fetched {len(q_data)} questions – processing …")

    # 2) Parallel ausführen
    answers, log_rows = [], []
    with concurrent.futures.ThreadPoolExecutor(max_workers=MAX_PAR_TASKS) as ex:
        tasks = [
            run_agent_async(ex, itm["task_id"], itm["question"])
            for itm in q_data
        ]
        for i, coro in enumerate(asyncio.as_completed(tasks), 1):
            answer = await coro
            task_id = q_data[i-1]["task_id"]
            question = q_data[i-1]["question"]

            answers.append({"task_id": task_id, "submitted_answer": answer})
            log_rows.append({"Task ID": task_id, "Question": question, "Answer": answer})

            progress(i / len(q_data), desc=f"{i}/{len(q_data)} done")

    # 3) Antworten submitten
    submit_url = f"{DEFAULT_API_URL}/submit"
    payload = {
        "username": username,
        "agent_code": f"https://huggingface.co/spaces/{os.getenv('SPACE_ID')}/tree/main",
        "answers": answers,
    }
    try:
        res = requests.post(submit_url, json=payload, timeout=60).json()
        status = (
            f"Submission OK – Score: {res.get('score','?')} % "
            f"({res.get('correct_count','?')}/{res.get('total_attempted','?')})"
        )
    except Exception as e:
        status = f"Submission failed: {e}"

    return status, pd.DataFrame(log_rows)


# ------------------------------------------------------------------
# Gradio-UI
# ------------------------------------------------------------------
with gr.Blocks() as demo:
    gr.Markdown("# Fast GAIA Agent Runner  (Async + Progress)")
    gr.LoginButton()
    run_btn = gr.Button("Run & Submit")

    out_status = gr.Textbox(label="Status / Score", lines=3, interactive=False)
    out_table  = gr.DataFrame(label="Answers", wrap=True)

    run_btn.click(run_and_submit_all, outputs=[out_status, out_table])

if __name__ == "__main__":
    demo.launch(debug=True, share=False)