Fix serialization error and update chatbot type to "messages"
Browse files
app.py
CHANGED
@@ -97,7 +97,7 @@ async def check_safety(message: str, metadata: dict) -> dict:
|
|
97 |
"app": "slmdr",
|
98 |
"app_environment": "stable",
|
99 |
"chat_model_id": model_args["model"],
|
100 |
-
"mistral_results": mistral_results,
|
101 |
} | metadata,
|
102 |
"detection_config": {
|
103 |
"safety": True,
|
@@ -122,15 +122,7 @@ async def bot_response(message, chat_history, system_prompt, selected_model):
|
|
122 |
client.base_url = CHATBOT_MODELS[selected_model]["base_url"]
|
123 |
model_args["model"] = CHATBOT_MODELS[selected_model]["model_path"]
|
124 |
|
125 |
-
messages = [{"role": "system", "content": system_prompt}]
|
126 |
-
|
127 |
-
for user_msg, assistant_msg in chat_history[:-1]:
|
128 |
-
messages.extend([
|
129 |
-
{"role": "user", "content": user_msg},
|
130 |
-
{"role": "assistant", "content": assistant_msg}
|
131 |
-
])
|
132 |
-
|
133 |
-
messages.append({"role": "user", "content": message})
|
134 |
|
135 |
stream = await client.chat.completions.create(
|
136 |
**model_args,
|
@@ -138,35 +130,23 @@ async def bot_response(message, chat_history, system_prompt, selected_model):
|
|
138 |
)
|
139 |
|
140 |
full_response = ""
|
141 |
-
|
142 |
-
|
143 |
-
new_history = chat_history.copy()
|
144 |
|
145 |
async for chunk in stream:
|
146 |
if chunk.choices[0].delta.content is not None:
|
147 |
content_delta = chunk.choices[0].delta.content
|
148 |
full_response += content_delta
|
149 |
-
|
150 |
-
new_history[-1][1] = full_response
|
151 |
yield new_history, ""
|
152 |
|
153 |
-
messages
|
154 |
-
{
|
155 |
-
"role": "assistant",
|
156 |
-
"content": full_response
|
157 |
-
}
|
158 |
-
)
|
159 |
-
metadata = {
|
160 |
-
"messages": messages
|
161 |
-
}
|
162 |
safety_results = await check_safety(full_response, metadata)
|
163 |
|
164 |
yield new_history, safety_results
|
165 |
|
166 |
except Exception as e:
|
167 |
error_message = f"Error occurred: {str(e)}"
|
168 |
-
new_history = chat_history
|
169 |
-
new_history[-1][1] = error_message
|
170 |
yield new_history, ""
|
171 |
|
172 |
|
@@ -194,7 +174,7 @@ with gr.Blocks(title="🦎 Salamandra & Oranguten") as demo:
|
|
194 |
|
195 |
|
196 |
with gr.Column(scale=3):
|
197 |
-
chatbot = gr.Chatbot(height=450)
|
198 |
msg = gr.Textbox(placeholder="Type your message here...", label="Your message")
|
199 |
|
200 |
with gr.Row():
|
@@ -210,7 +190,7 @@ with gr.Blocks(title="🦎 Salamandra & Oranguten") as demo:
|
|
210 |
if not message:
|
211 |
return "", chat_history
|
212 |
# Add user message to chat history immediately with an empty assistant message
|
213 |
-
return "", chat_history + [
|
214 |
|
215 |
def load_example_prompt(example_name):
|
216 |
prompt = EXAMPLE_PROMPTS.get(example_name, EXAMPLE_PROMPTS["Default"])
|
@@ -243,7 +223,6 @@ with gr.Blocks(title="🦎 Salamandra & Oranguten") as demo:
|
|
243 |
model_selector,
|
244 |
current_model
|
245 |
)
|
246 |
-
|
247 |
new_chat.click(
|
248 |
lambda: ([], EXAMPLE_PROMPTS["Default"], EXAMPLE_PROMPTS["Default"], "Default", "Salamandra", ""),
|
249 |
None,
|
|
|
97 |
"app": "slmdr",
|
98 |
"app_environment": "stable",
|
99 |
"chat_model_id": model_args["model"],
|
100 |
+
"mistral_results": json.loads(json.dumps(mistral_results, default=str)),
|
101 |
} | metadata,
|
102 |
"detection_config": {
|
103 |
"safety": True,
|
|
|
122 |
client.base_url = CHATBOT_MODELS[selected_model]["base_url"]
|
123 |
model_args["model"] = CHATBOT_MODELS[selected_model]["model_path"]
|
124 |
|
125 |
+
messages = [{"role": "system", "content": system_prompt}] + chat_history + [{"role": "user", "content": message}]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
|
127 |
stream = await client.chat.completions.create(
|
128 |
**model_args,
|
|
|
130 |
)
|
131 |
|
132 |
full_response = ""
|
133 |
+
new_history = chat_history + [{"role": "user", "content": message}, {"role": "assistant", "content": ""}]
|
|
|
|
|
134 |
|
135 |
async for chunk in stream:
|
136 |
if chunk.choices[0].delta.content is not None:
|
137 |
content_delta = chunk.choices[0].delta.content
|
138 |
full_response += content_delta
|
139 |
+
new_history[-1]["content"] = full_response
|
|
|
140 |
yield new_history, ""
|
141 |
|
142 |
+
metadata = {"messages": messages + [{"role": "assistant", "content": full_response}]}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
safety_results = await check_safety(full_response, metadata)
|
144 |
|
145 |
yield new_history, safety_results
|
146 |
|
147 |
except Exception as e:
|
148 |
error_message = f"Error occurred: {str(e)}"
|
149 |
+
new_history = chat_history + [{"role": "user", "content": message}, {"role": "assistant", "content": error_message}]
|
|
|
150 |
yield new_history, ""
|
151 |
|
152 |
|
|
|
174 |
|
175 |
|
176 |
with gr.Column(scale=3):
|
177 |
+
chatbot = gr.Chatbot(height=450, type="messages")
|
178 |
msg = gr.Textbox(placeholder="Type your message here...", label="Your message")
|
179 |
|
180 |
with gr.Row():
|
|
|
190 |
if not message:
|
191 |
return "", chat_history
|
192 |
# Add user message to chat history immediately with an empty assistant message
|
193 |
+
return "", chat_history + [{"role": "user", "content": message}]
|
194 |
|
195 |
def load_example_prompt(example_name):
|
196 |
prompt = EXAMPLE_PROMPTS.get(example_name, EXAMPLE_PROMPTS["Default"])
|
|
|
223 |
model_selector,
|
224 |
current_model
|
225 |
)
|
|
|
226 |
new_chat.click(
|
227 |
lambda: ([], EXAMPLE_PROMPTS["Default"], EXAMPLE_PROMPTS["Default"], "Default", "Salamandra", ""),
|
228 |
None,
|