Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -87,10 +87,10 @@ def chat_model(message, history, temperature=0.7, top_p=0.9):
|
|
87 |
|
88 |
# Format chat history for the model
|
89 |
formatted_history = []
|
90 |
-
for
|
91 |
-
formatted_history.append({"role": "user", "content":
|
92 |
-
if
|
93 |
-
formatted_history.append({"role": "assistant", "content":
|
94 |
|
95 |
# Add the current message
|
96 |
formatted_history.append({"role": "user", "content": message})
|
@@ -217,31 +217,37 @@ css = """
|
|
217 |
with gr.Blocks(theme=theme, title="Stanford Soft Raccoon Chat", css=css) as demo:
|
218 |
with gr.Row():
|
219 |
with gr.Column(scale=3):
|
220 |
-
#
|
221 |
-
chatbot = gr.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
222 |
fn=chat_model,
|
223 |
-
chatbot=
|
224 |
-
|
225 |
-
|
226 |
-
height=600,
|
227 |
-
placeholder="<strong>Soft Raccoon AI Assistant</strong><br>Ask me anything!"
|
228 |
-
),
|
229 |
-
additional_inputs=[
|
230 |
-
gr.Slider(
|
231 |
-
minimum=0.1,
|
232 |
-
maximum=2.0,
|
233 |
-
value=0.7,
|
234 |
-
step=0.1,
|
235 |
-
label="Temperature"
|
236 |
-
),
|
237 |
-
gr.Slider(
|
238 |
-
minimum=0.1,
|
239 |
-
maximum=1.0,
|
240 |
-
value=0.9,
|
241 |
-
step=0.05,
|
242 |
-
label="Top-P"
|
243 |
-
)
|
244 |
-
],
|
245 |
title="Stanford Soft Raccoon Chat",
|
246 |
description="AI assistant powered by the Soft Raccoon language model",
|
247 |
examples=[
|
@@ -250,9 +256,6 @@ with gr.Blocks(theme=theme, title="Stanford Soft Raccoon Chat", css=css) as demo
|
|
250 |
"What's your favorite book?"
|
251 |
],
|
252 |
cache_examples=True,
|
253 |
-
retry_btn="Regenerate",
|
254 |
-
undo_btn="Undo",
|
255 |
-
clear_btn="Clear",
|
256 |
)
|
257 |
|
258 |
with gr.Column(scale=1):
|
|
|
87 |
|
88 |
# Format chat history for the model
|
89 |
formatted_history = []
|
90 |
+
for h in history:
|
91 |
+
formatted_history.append({"role": "user", "content": h["content"] if h["role"] == "user" else ""})
|
92 |
+
if h["role"] == "assistant":
|
93 |
+
formatted_history.append({"role": "assistant", "content": h["content"]})
|
94 |
|
95 |
# Add the current message
|
96 |
formatted_history.append({"role": "user", "content": message})
|
|
|
217 |
with gr.Blocks(theme=theme, title="Stanford Soft Raccoon Chat", css=css) as demo:
|
218 |
with gr.Row():
|
219 |
with gr.Column(scale=3):
|
220 |
+
# Create the chatbot component directly
|
221 |
+
chatbot = gr.Chatbot(
|
222 |
+
label="Soft Raccoon Chat",
|
223 |
+
avatar_images=(None, "🌲"), # Stanford tree emoji
|
224 |
+
height=600,
|
225 |
+
placeholder="<strong>Soft Raccoon AI Assistant</strong><br>Ask me anything!"
|
226 |
+
)
|
227 |
+
|
228 |
+
# Create sliders for temperature and top_p
|
229 |
+
with gr.Accordion("Generation Parameters", open=False):
|
230 |
+
temperature = gr.Slider(
|
231 |
+
minimum=0.1,
|
232 |
+
maximum=2.0,
|
233 |
+
value=0.7,
|
234 |
+
step=0.1,
|
235 |
+
label="Temperature"
|
236 |
+
)
|
237 |
+
top_p = gr.Slider(
|
238 |
+
minimum=0.1,
|
239 |
+
maximum=1.0,
|
240 |
+
value=0.9,
|
241 |
+
step=0.05,
|
242 |
+
label="Top-P"
|
243 |
+
)
|
244 |
+
|
245 |
+
# Create the ChatInterface
|
246 |
+
chat_interface = gr.ChatInterface(
|
247 |
fn=chat_model,
|
248 |
+
chatbot=chatbot,
|
249 |
+
additional_inputs=[temperature, top_p],
|
250 |
+
type="messages", # This is important for compatibility
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
251 |
title="Stanford Soft Raccoon Chat",
|
252 |
description="AI assistant powered by the Soft Raccoon language model",
|
253 |
examples=[
|
|
|
256 |
"What's your favorite book?"
|
257 |
],
|
258 |
cache_examples=True,
|
|
|
|
|
|
|
259 |
)
|
260 |
|
261 |
with gr.Column(scale=1):
|