Create basic_agent
Browse files- basic_agent +25 -0
basic_agent
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from llama_index.core.agent.workflow import AgentWorkflow
|
| 2 |
+
from llama_index.core.workflow import Context
|
| 3 |
+
from llama_index.core.tools import FunctionTool
|
| 4 |
+
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
|
| 5 |
+
from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec
|
| 6 |
+
|
| 7 |
+
class BasicAgent:
|
| 8 |
+
def __init__(self):
|
| 9 |
+
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
|
| 10 |
+
|
| 11 |
+
# Initialize tools
|
| 12 |
+
tool_spec = DuckDuckGoSearchToolSpec()
|
| 13 |
+
search_tool = FunctionTool.from_defaults(tool_spec.duckduckgo_full_search)
|
| 14 |
+
|
| 15 |
+
# Create Alfred with all the tools
|
| 16 |
+
self.agent = AgentWorkflow.from_tools_or_functions(
|
| 17 |
+
[search_tool],
|
| 18 |
+
llm=llm
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
# self.ctx = Context(self.agent)
|
| 22 |
+
|
| 23 |
+
def __call__(self, question: str) -> str:
|
| 24 |
+
response = await self.agent.run(user_msg=question) # ctx=self.ctx)
|
| 25 |
+
return response.response.content
|