File size: 2,071 Bytes
beb1eb8
 
39cd847
beb1eb8
 
cfb578c
beb1eb8
 
39cd847
beb1eb8
39cd847
 
beb1eb8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39cd847
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
beb1eb8
 
 
 
 
 
 
39cd847
beb1eb8
39cd847
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import os
from langchain.agents import initialize_agent, AgentType, Tool
from langchain_huggingface import HuggingFaceEndpoint
from langchain_community.tools import DuckDuckGoSearchResults
from langchain_experimental.tools import PythonREPLTool
from huggingface_hub import login

# LLM: Mistral-7B-Instruct über Hugging Face Inference API
llm = HuggingFaceEndpoint(
    repo_id="mistralai/Mistral-7B-Instruct-v0.2",
    temperature=0.2,
    max_new_tokens=512,
)

# Tools definieren
search_tool = DuckDuckGoSearchResults()
python_tool = PythonREPLTool()

tools = [
    Tool(
        name="Search",
        func=search_tool.run,
        description="Useful for when you need to answer questions about current events or look up information online."
    ),
    Tool(
        name="Python_REPL",
        func=python_tool.run,
        description="Useful for math, calculations, or running simple python code."
    ),
]

from langchain_core.prompts import SystemMessagePromptTemplate

system_prompt = """You are an expert AI assistant specialized in answering exam-style factual questions. 
Follow these guidelines:

- Use the Search tool when external knowledge is needed (especially about recent events or niche topics).
- Use the Python_REPL tool for any math calculations, even if simple.
- Always attempt to provide a direct and concise answer without extra commentary.
- Do not apologize or state limitations.
- If a file is attached, explain how you would process it or the key steps to extract an answer.
- When dates are mentioned, be very precise and double-check calculations using the appropriate tools.
- If unsure, use the Search tool before responding.

Respond directly to the user’s question based solely on facts and without unnecessary elaboration.
Only provide what is explicitly asked for.
"""

# Agent initialisieren
agent_executor = initialize_agent(
    tools,
    llm,
    agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
    verbose=True,
    handle_parsing_errors=True,
    system_message=SystemMessagePromptTemplate.from_template(system_prompt),
)