Spaces:
Sleeping
Sleeping
File size: 3,963 Bytes
6a383c4 f09971c 3c5f44b b1c1560 bbd820c 3c5f44b 6a383c4 bbd820c b3960d9 bbd820c b3960d9 bbd820c b3960d9 bbd820c b3960d9 bbd820c b3960d9 bbd820c 6a383c4 bbd820c f09971c a0c0e21 b1c1560 6a383c4 b1c1560 6a383c4 b1c1560 6a383c4 bbd820c a0c0e21 b1c1560 a0c0e21 6a383c4 3c5f44b 6a383c4 3c5f44b 5b97f9e 3c5f44b 5b97f9e 3c5f44b bbd820c 3c5f44b 6a383c4 3c5f44b 6a383c4 bbd820c 3c5f44b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
from chatbot.llm import gemini_llm # Import Gemini LLM
from chatbot.memory import memory
from chatbot.prompts import chat_prompt
from langchain.retrievers import WikipediaRetriever
from langchain.chains import ConversationalRetrievalChain
from pydantic import Field
from typing import List, Callable
from langchain.schema import BaseRetriever, Document
from langchain.schema import HumanMessage, AIMessage
def translate_to_english(text: str) -> str:
"""Use Gemini LLM to translate text to English with recent chat context."""
recent_messages = memory.chat_memory.messages[-3:] # Lấy 3 tin nhắn gần nhất
recent_context = "\n".join([msg.content for msg in recent_messages])
prompt = f"""
You are an assistant for Wikipedia searches.
The query may be in any language.
Extract and return only the most relevant keyword (e.g. a person's name, city, or key term) in English/international form.
Consider the recent conversation context to disambiguate references.
For query about what happened to you, interpret 'you' as Kumiko, based on your knowledge of the Hibike! Euphonium plot, return one of the following keywords
- Liz and the Blue Bird (Regarding information about Nozomi and Mizore)
- Sound! Euphonium: The Movie – Welcome to the Kitauji High School Concert Band (Information about Kumiko and Reina)
- Sound! Euphonium: The Movie – May the Melody Reach You! (Information about Asuka and Mamiko)
- Sound! Euphonium: The Movie – Our Promise: A Brand New Day (Information about Kumiko's second year)
- List of Sound! Euphonium episodes (other informations)
Recent Context:
{recent_context}
Query:
{text}
Return only the keyword—no explanations.
"""
response = gemini_llm.invoke(prompt) # Invoke Gemini for translation
return response
class WikipediaTranslationRetriever(BaseRetriever):
retriever: WikipediaRetriever = Field(..., description="The underlying Wikipedia retriever")
translator: Callable[[str], str] = Field(..., description="Function to translate queries to English")
def get_relevant_documents(self, query: str) -> List[Document]:
translated_query = self.translator(query)
print(f"🔄 Translated Query: {translated_query}")
return self.retriever.get_relevant_documents(translated_query)
async def aget_relevant_documents(self, query: str) -> List[Document]:
# For simplicity, we are not implementing the async version.
raise NotImplementedError("Async retrieval is not implemented.")
def custom_get_chat_history(chat_history):
# Nếu chat_history là chuỗi (summary) thì trả về chuỗi đó
if isinstance(chat_history, str):
return chat_history
# Nếu là danh sách các message, chuyển thành chuỗi
elif isinstance(chat_history, list):
return "\n".join([msg.content for msg in chat_history])
else:
raise ValueError("Unsupported chat history format.")
# Create the retriever instance to be used in your qa_chain:
retriever = WikipediaTranslationRetriever(
retriever=WikipediaRetriever(),
translator=translate_to_english
)
# ✅ Use ConversationalRetrievalChain
qa_chain = ConversationalRetrievalChain.from_llm(
llm=gemini_llm,
retriever=retriever,
memory=memory,
return_source_documents=False,
combine_docs_chain_kwargs={"prompt": chat_prompt},
output_key="result"
)
qa_chain.get_chat_history = custom_get_chat_history
def get_chat_response(user_input: str) -> str:
"""Process user input and return chat response using Wikipedia retrieval."""
response = qa_chain(user_input) # Pass query to retrieval-based QA chain
# Save conversation context
# memory.chat_memory.add_message(HumanMessage(content=user_input))
# memory.chat_memory.add_message(AIMessage(content=response["result"]))
return response["result"]
|