Spaces:
Runtime error
Runtime error
ray
commited on
Commit
·
8b59f5d
1
Parent(s):
0137b9f
update(chatbot): add rate limiting to history of 20
Browse files- chatbot.py +3 -16
chatbot.py
CHANGED
|
@@ -117,6 +117,9 @@ class Chatbot:
|
|
| 117 |
def stream_chat(self, message, history):
|
| 118 |
self.logger.info(history)
|
| 119 |
self.logger.info(self.convert_to_chat_messages(history))
|
|
|
|
|
|
|
|
|
|
| 120 |
response = self.chat_engine.stream_chat(
|
| 121 |
message, chat_history=self.convert_to_chat_messages(history)
|
| 122 |
)
|
|
@@ -126,15 +129,6 @@ class Chatbot:
|
|
| 126 |
partial_message += token
|
| 127 |
yield partial_message
|
| 128 |
|
| 129 |
-
# urls = [source.node.metadata.get(
|
| 130 |
-
# "file_name") for source in response.source_nodes if source.score >= 0.78 and source.node.metadata.get("file_name")]
|
| 131 |
-
# if urls:
|
| 132 |
-
# urls = list(set(urls))
|
| 133 |
-
# url_section = "\n \n\n---\n\n參考: \n" + \
|
| 134 |
-
# "\n".join(f"- {url}" for url in urls)
|
| 135 |
-
# partial_message += url_section
|
| 136 |
-
# yield partial_message
|
| 137 |
-
|
| 138 |
def convert_to_chat_messages(self, history: List[List[str]]) -> List[ChatMessage]:
|
| 139 |
chat_messages = [ChatMessage(
|
| 140 |
role=MessageRole.SYSTEM, content=self.SYSTEM_PROMPT)]
|
|
@@ -173,13 +167,6 @@ class Chatbot:
|
|
| 173 |
for part in stream:
|
| 174 |
partial_message += part.choices[0].delta.content or ""
|
| 175 |
yield partial_message
|
| 176 |
-
# yield part.choices[0].delta.content or ""
|
| 177 |
-
# partial_message = ""
|
| 178 |
-
# for chunk in response:
|
| 179 |
-
# if len(chunk["choices"][0]["delta"]) != 0:
|
| 180 |
-
# partial_message = partial_message + \
|
| 181 |
-
# chunk["choices"][0]["delta"]["content"]
|
| 182 |
-
# yield partial_message
|
| 183 |
|
| 184 |
# For 'With Prompt Wrapper' - Add system prompt, no Pinecone
|
| 185 |
def predict_with_prompt_wrapper(self, message, history):
|
|
|
|
| 117 |
def stream_chat(self, message, history):
|
| 118 |
self.logger.info(history)
|
| 119 |
self.logger.info(self.convert_to_chat_messages(history))
|
| 120 |
+
if len(history) > 20:
|
| 121 |
+
yield "Thank you for using AweSumCare. I'm sorry I can't answer your question now, but I'm still learning. Please try to ask me something else."
|
| 122 |
+
return
|
| 123 |
response = self.chat_engine.stream_chat(
|
| 124 |
message, chat_history=self.convert_to_chat_messages(history)
|
| 125 |
)
|
|
|
|
| 129 |
partial_message += token
|
| 130 |
yield partial_message
|
| 131 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
def convert_to_chat_messages(self, history: List[List[str]]) -> List[ChatMessage]:
|
| 133 |
chat_messages = [ChatMessage(
|
| 134 |
role=MessageRole.SYSTEM, content=self.SYSTEM_PROMPT)]
|
|
|
|
| 167 |
for part in stream:
|
| 168 |
partial_message += part.choices[0].delta.content or ""
|
| 169 |
yield partial_message
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
|
| 171 |
# For 'With Prompt Wrapper' - Add system prompt, no Pinecone
|
| 172 |
def predict_with_prompt_wrapper(self, message, history):
|