Spaces:
Sleeping
Sleeping
Use wikipedia both en and vi
Browse files- chatbot/core.py +19 -8
chatbot/core.py
CHANGED
@@ -1,20 +1,30 @@
|
|
|
|
|
|
1 |
from chatbot.llm import gemini_llm
|
2 |
-
from chatbot.
|
3 |
-
from chatbot.
|
4 |
-
from chatbot.prompts import chat_prompt
|
5 |
from langchain.chains import ConversationalRetrievalChain
|
6 |
|
7 |
-
|
|
|
|
|
|
|
|
|
8 |
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
qa_chain = ConversationalRetrievalChain.from_llm(
|
12 |
llm=gemini_llm,
|
13 |
-
retriever=
|
14 |
memory=memory,
|
15 |
-
return_source_documents=
|
16 |
combine_docs_chain_kwargs={"prompt": chat_prompt},
|
17 |
-
output_key="result"
|
18 |
)
|
19 |
|
20 |
def get_chat_response(user_input: str) -> str:
|
@@ -24,3 +34,4 @@ def get_chat_response(user_input: str) -> str:
|
|
24 |
memory.save_context({"input": user_input}, {"output": response["result"]})
|
25 |
|
26 |
return response["result"]
|
|
|
|
1 |
+
from langchain.retrievers import WikipediaRetriever
|
2 |
+
import wikipedia
|
3 |
from chatbot.llm import gemini_llm
|
4 |
+
from chatbot.memory import memory
|
5 |
+
from chatbot.prompts import chat_prompt
|
|
|
6 |
from langchain.chains import ConversationalRetrievalChain
|
7 |
|
8 |
+
def search_wikipedia(query: str, language: str = "vi"):
|
9 |
+
"""Search Wikipedia in the specified language (vi or en)."""
|
10 |
+
wikipedia.set_lang(language) # Set Wikipedia language dynamically
|
11 |
+
retriever = WikipediaRetriever() # Create a new retriever each time to apply language setting
|
12 |
+
return retriever.get_relevant_documents(query)
|
13 |
|
14 |
+
def get_retriever(user_input: str):
|
15 |
+
"""Decide which language retriever to use based on user input."""
|
16 |
+
# Example logic: If input contains English words, use "en"; otherwise, use "vi".
|
17 |
+
if any(char.isascii() for char in user_input):
|
18 |
+
return search_wikipedia(user_input, language="en")
|
19 |
+
return search_wikipedia(user_input, language="vi")
|
20 |
|
21 |
qa_chain = ConversationalRetrievalChain.from_llm(
|
22 |
llm=gemini_llm,
|
23 |
+
retriever=get_retriever, # Dynamic Wikipedia search
|
24 |
memory=memory,
|
25 |
+
return_source_documents=False,
|
26 |
combine_docs_chain_kwargs={"prompt": chat_prompt},
|
27 |
+
output_key="result"
|
28 |
)
|
29 |
|
30 |
def get_chat_response(user_input: str) -> str:
|
|
|
34 |
memory.save_context({"input": user_input}, {"output": response["result"]})
|
35 |
|
36 |
return response["result"]
|
37 |
+
|