Martin Bär commited on
Commit
c97999e
·
1 Parent(s): ae6abab

Implement basic_agent with llama-index

Browse files
Files changed (5) hide show
  1. .gitignore +2 -0
  2. app.py +8 -0
  3. basic_agent.py +35 -13
  4. requirements.txt +5 -1
  5. web_tools.py +0 -0
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .venv
2
+ tool_tests.ipynb
app.py CHANGED
@@ -4,12 +4,20 @@ import requests
4
  import inspect
5
  import pandas as pd
6
 
 
 
 
7
  from basic_agent import BasicAgent
8
 
9
  # (Keep Constants as is)
10
  # --- Constants ---
11
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
12
 
 
 
 
 
 
13
  def run_and_submit_all( profile: gr.OAuthProfile | None):
14
  """
15
  Fetches all questions, runs the BasicAgent on them, submits all answers,
 
4
  import inspect
5
  import pandas as pd
6
 
7
+ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
+ from llama_index.core import Settings
9
+
10
  from basic_agent import BasicAgent
11
 
12
  # (Keep Constants as is)
13
  # --- Constants ---
14
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
15
 
16
+ Settings.llm = None # disable LLM for Index Retrieval
17
+ Settings.chunk_size = 512 # Smaller chunk size for retrieval
18
+
19
+ Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
20
+
21
  def run_and_submit_all( profile: gr.OAuthProfile | None):
22
  """
23
  Fetches all questions, runs the BasicAgent on them, submits all answers,
basic_agent.py CHANGED
@@ -3,23 +3,45 @@ from llama_index.core.workflow import Context
3
  from llama_index.core.tools import FunctionTool
4
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
5
  from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec
 
 
 
 
6
 
7
  class BasicAgent:
8
  def __init__(self):
9
- llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
10
 
11
- # Initialize tools
12
- tool_spec = DuckDuckGoSearchToolSpec()
13
- search_tool = FunctionTool.from_defaults(tool_spec.duckduckgo_full_search)
14
-
15
- # Create Alfred with all the tools
16
- self.agent = AgentWorkflow.from_tools_or_functions(
17
- [search_tool],
18
- llm=llm
19
- )
20
 
21
- # self.ctx = Context(self.agent)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  async def __call__(self, question: str) -> str:
24
- response = await self.agent.run(user_msg=question) # ctx=self.ctx)
25
- return response.response.content
 
3
  from llama_index.core.tools import FunctionTool
4
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
5
  from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec
6
+ from llama_index.tools.wikipedia import WikipediaToolSpec
7
+ from llama_index.tools.tool_spec.load_and_search.base import LoadAndSearchToolSpec
8
+ from llama_index.readers.web import SimpleWebPageReader
9
+ from llama_index.core.tools.ondemand_loader_tool import OnDemandLoaderTool
10
 
11
  class BasicAgent:
12
  def __init__(self):
13
+ llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
14
 
15
+ # Initialize tools
16
+ tool_spec = DuckDuckGoSearchToolSpec()
17
+ search_tool = FunctionTool.from_defaults(tool_spec.duckduckgo_full_search)
 
 
 
 
 
 
18
 
19
+ wiki_spec = WikipediaToolSpec()
20
+ wiki_search_tool = wiki_spec.to_tool_list()[1]
21
+
22
+ # Convert into a LoadAndSearchToolSpec because the wikipedia search tool returns
23
+ # entire Wikipedia pages and this can pollute the context window of the LLM
24
+ wiki_spec = WikipediaToolSpec()
25
+ wiki_search_tool = wiki_spec.to_tool_list()[1]
26
+
27
+ # Convert into a LoadAndSearchToolSpec because the wikipedia search tool returns
28
+ # entire Wikipedia pages and this can pollute the context window of the LLM
29
+ wiki_search_tool_las = LoadAndSearchToolSpec.from_defaults(wiki_search_tool).to_tool_list()
30
+
31
+ webpage_tool = OnDemandLoaderTool.from_defaults(
32
+ SimpleWebPageReader(html_to_text=True),
33
+ name="Webpage search tool",
34
+ description="A tool for loading the content of a webpage and querying it for information",
35
+ )
36
+
37
+ # Create Alfred with all the tools
38
+ self.agent = AgentWorkflow.from_tools_or_functions(
39
+ [search_tool, wiki_search_tool_las, webpage_tool],
40
+ llm=llm
41
+ )
42
+
43
+ # self.ctx = Context(self.agent)
44
 
45
  async def __call__(self, question: str) -> str:
46
+ response = await self.agent.run(user_msg=question) # ctx=self.ctx)
47
+ return response.response.content
requirements.txt CHANGED
@@ -2,4 +2,8 @@ gradio
2
  requests
3
  llama-index
4
  llama-index-llms-huggingface-api
5
- llama_index-tools-duckduckgo
 
 
 
 
 
2
  requests
3
  llama-index
4
  llama-index-llms-huggingface-api
5
+ llama_index-tools-duckduckgo
6
+ llama_index-tools-wikipedia
7
+ llama-index-indices-managed-bge-m3
8
+ llama-index-embeddings-huggingface
9
+ llama-index-readers-web
web_tools.py ADDED
File without changes