Spaces:
Sleeping
Sleeping
Commit
·
acfa594
1
Parent(s):
798686c
anonymous
Browse files
app.py
CHANGED
|
@@ -165,31 +165,33 @@ Here is the story:
|
|
| 165 |
|
| 166 |
def send_multiple_selected_story(title, models, system_prompt):
|
| 167 |
global model_history
|
|
|
|
| 168 |
resp_list = []
|
| 169 |
-
|
|
|
|
|
|
|
|
|
|
| 170 |
if model in models:
|
| 171 |
-
|
| 172 |
#respuesta consulta,
|
| 173 |
-
resp, context,
|
| 174 |
-
|
| 175 |
try:
|
| 176 |
print(resp)
|
| 177 |
-
resp_list.append(gr.Chatbot(value=[resp], type='messages'))
|
| 178 |
except gr.exceptions.Error:
|
| 179 |
print(f"error for en modelo {model}")
|
| 180 |
else:
|
| 181 |
try:
|
| 182 |
-
resp_list.append(gr.Chatbot(
|
| 183 |
except gr.exceptions.Error:
|
| 184 |
print(f"error, else en modelo {model}")
|
| 185 |
|
| 186 |
try:
|
| 187 |
-
|
| 188 |
except gr.exceptions.Error:
|
| 189 |
print(f"error en main output\n {context}")
|
| 190 |
-
|
| 191 |
-
return
|
| 192 |
-
|
| 193 |
|
| 194 |
#inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input, chatbot_resp[0], chatbot_resp[1], chatbot_resp[2], chatbot_resp[3]],# interaction_count],
|
| 195 |
|
|
@@ -203,31 +205,44 @@ def remove_metadata(json_array):
|
|
| 203 |
return json_aux
|
| 204 |
|
| 205 |
|
| 206 |
-
|
|
|
|
|
|
|
| 207 |
print(f'chat_checkbox: {selected_model}')
|
| 208 |
resp_list = []
|
| 209 |
print(model_history)
|
|
|
|
| 210 |
|
| 211 |
if selected_model == "user_input":
|
| 212 |
history.append({"role": "assistant", "content": assistant_user_input})
|
| 213 |
history.append({"role": "user", "content": query})
|
| 214 |
|
| 215 |
else:
|
| 216 |
-
chats = [chat1, chat2, chat3, chat4]
|
|
|
|
| 217 |
#get the previous answer of the selected model
|
| 218 |
-
for model in models:
|
| 219 |
-
if
|
| 220 |
-
selected_model_history =
|
| 221 |
print(f"selected_model_history: {selected_model_history}")
|
| 222 |
-
history.append(selected_model_history
|
| 223 |
history.append({"role": "user","content": query.strip()})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 224 |
aux_history = remove_metadata(history)
|
| 225 |
#print(aux_history)
|
| 226 |
|
| 227 |
-
|
|
|
|
|
|
|
|
|
|
| 228 |
if model in models:
|
| 229 |
response = interact_groq(aux_history, model).strip()
|
| 230 |
resp_list.append(gr.Chatbot(value=[{"role": "assistant", "content": response}], type='messages'))
|
|
|
|
| 231 |
else:
|
| 232 |
resp_list.append(gr.Chatbot(value=None, type='messages', visible=False))
|
| 233 |
|
|
@@ -235,7 +250,7 @@ def multiple_interact(query, history, models, selected_model, assistant_user_inp
|
|
| 235 |
model_history.append(selected_model)
|
| 236 |
print(model_history)
|
| 237 |
|
| 238 |
-
return
|
| 239 |
|
| 240 |
|
| 241 |
|
|
@@ -250,9 +265,36 @@ def change_textbox(checkbox):
|
|
| 250 |
else:
|
| 251 |
return gr.Textbox(value="", visible=False)
|
| 252 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 253 |
|
| 254 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 255 |
model_list = list(all_models.keys())
|
|
|
|
|
|
|
|
|
|
| 256 |
# Create the chat interface using Gradio Blocks
|
| 257 |
with gr.Blocks() as demo:
|
| 258 |
with gr.Tabs():
|
|
@@ -290,29 +332,30 @@ with gr.Blocks() as demo:
|
|
| 290 |
with gr.TabItem("Multiple Evaluation"):
|
| 291 |
with gr.Group():
|
| 292 |
#model_dropdown = gr.Dropdown(choices=list(all_models.keys()), label="Select Model", value=default_model_name)
|
| 293 |
-
model_checkbox = gr.CheckboxGroup(choices=list(all_models.keys()), label="Select Model", value=
|
| 294 |
user_dropdown = gr.Dropdown(choices=user_names, label="Select User Name")
|
| 295 |
-
initial_story = stories[0]["title"] if stories else None
|
| 296 |
story_dropdown = gr.Dropdown(choices=[story["title"] for story in stories], label="Select Story", value=initial_story)
|
| 297 |
system_prompt_dropdown = gr.Dropdown(choices=system_prompts, label="Select System Prompt", value=system_prompts[0])
|
| 298 |
send_multiple_story_button = gr.Button("Send Story")
|
| 299 |
|
| 300 |
gr.Markdown("## Chat")
|
| 301 |
with gr.Group():
|
| 302 |
-
selected_story_textbox = gr.Textbox(label="Selected Story", lines=10, interactive=False)
|
| 303 |
#aqui armar una ventana x cada modelo seleccionado
|
| 304 |
-
|
| 305 |
with gr.Row():
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
|
| 311 |
-
for model in
|
| 312 |
-
|
| 313 |
-
|
|
|
|
| 314 |
user_input = gr.Textbox(placeholder="Type your message here...", label="User Input")
|
| 315 |
-
chat_radio = gr.Radio(choices=list(model_list)+["user_input"], label="Sent something to continue...", value=[model_list[0]])
|
|
|
|
| 316 |
#elegir respuesta primero, luego enviar mensaje
|
| 317 |
assistant_user_input = gr.Textbox(interactive=True, show_copy_button=True, visible=False)
|
| 318 |
send_multiple_message_button = gr.Button("Send")
|
|
@@ -335,13 +378,16 @@ with gr.Blocks() as demo:
|
|
| 335 |
#save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
|
| 336 |
|
| 337 |
chat_radio.change(fn=change_textbox, inputs=chat_radio, outputs=assistant_user_input)
|
|
|
|
|
|
|
|
|
|
| 338 |
|
| 339 |
send_multiple_story_button.click(
|
| 340 |
fn=send_multiple_selected_story,
|
| 341 |
inputs=[story_dropdown, model_checkbox, system_prompt_dropdown],
|
| 342 |
-
outputs=
|
| 343 |
)
|
| 344 |
-
|
| 345 |
#Tengo que cambiar para que los modelos responan solo las respuestas y no todo el historial
|
| 346 |
#preciso las historias previas de cada una
|
| 347 |
#el modelo que se haya elegido
|
|
@@ -349,16 +395,16 @@ with gr.Blocks() as demo:
|
|
| 349 |
#luego retorno:
|
| 350 |
#en
|
| 351 |
|
| 352 |
-
send_multiple_message_button.click(
|
| 353 |
-
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
|
| 358 |
#quiza tenga que guardar una variable con los valores de los checkbox
|
| 359 |
-
save_button_multievaluation.click(
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
|
| 364 |
demo.launch()
|
|
|
|
| 165 |
|
| 166 |
def send_multiple_selected_story(title, models, system_prompt):
|
| 167 |
global model_history
|
| 168 |
+
global chatbot_aswser_list
|
| 169 |
resp_list = []
|
| 170 |
+
print(models)
|
| 171 |
+
#iterate over words
|
| 172 |
+
shuffle_models = randomize_key_order(all_models)
|
| 173 |
+
for index, model in enumerate(shuffle_models):
|
| 174 |
if model in models:
|
|
|
|
| 175 |
#respuesta consulta,
|
| 176 |
+
resp, context, _ = send_selected_story(title, model, system_prompt)
|
| 177 |
+
chatbot_aswser_list[alphabet[index]] = {'response': resp, 'model': model}
|
| 178 |
try:
|
| 179 |
print(resp)
|
| 180 |
+
resp_list.append(gr.Chatbot(value=[resp], visible=True, type='messages'))
|
| 181 |
except gr.exceptions.Error:
|
| 182 |
print(f"error for en modelo {model}")
|
| 183 |
else:
|
| 184 |
try:
|
| 185 |
+
resp_list.append(gr.Chatbot(type='messages', visible=False))
|
| 186 |
except gr.exceptions.Error:
|
| 187 |
print(f"error, else en modelo {model}")
|
| 188 |
|
| 189 |
try:
|
| 190 |
+
resp_list.insert(0, gr.Chatbot(value=context, type='messages'))
|
| 191 |
except gr.exceptions.Error:
|
| 192 |
print(f"error en main output\n {context}")
|
| 193 |
+
#return main_output, resp_list[0], resp_list[1], resp_list[2], resp_list[3], models, story,
|
| 194 |
+
return resp_list
|
|
|
|
| 195 |
|
| 196 |
#inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input, chatbot_resp[0], chatbot_resp[1], chatbot_resp[2], chatbot_resp[3]],# interaction_count],
|
| 197 |
|
|
|
|
| 205 |
return json_aux
|
| 206 |
|
| 207 |
|
| 208 |
+
# dont know the correct model beacuse it shuffles each time
|
| 209 |
+
#selected model it's only the index in radio input
|
| 210 |
+
def multiple_interact(query, history, models, selected_model, assistant_user_input): #, interaction_count)
|
| 211 |
print(f'chat_checkbox: {selected_model}')
|
| 212 |
resp_list = []
|
| 213 |
print(model_history)
|
| 214 |
+
#quito history ahora es una variable global
|
| 215 |
|
| 216 |
if selected_model == "user_input":
|
| 217 |
history.append({"role": "assistant", "content": assistant_user_input})
|
| 218 |
history.append({"role": "user", "content": query})
|
| 219 |
|
| 220 |
else:
|
| 221 |
+
#chats = [chat1, chat2, chat3, chat4]
|
| 222 |
+
#chatbot_aswser_list
|
| 223 |
#get the previous answer of the selected model
|
| 224 |
+
for index, model in enumerate(models):
|
| 225 |
+
if alphabet[index] == selected_model:
|
| 226 |
+
selected_model_history = chatbot_aswser_list[selected_model]['response']
|
| 227 |
print(f"selected_model_history: {selected_model_history}")
|
| 228 |
+
history.append(selected_model_history)
|
| 229 |
history.append({"role": "user","content": query.strip()})
|
| 230 |
+
|
| 231 |
+
#save to csv
|
| 232 |
+
selected_model_history = {} #reset history
|
| 233 |
+
|
| 234 |
+
|
| 235 |
aux_history = remove_metadata(history)
|
| 236 |
#print(aux_history)
|
| 237 |
|
| 238 |
+
|
| 239 |
+
#shuffle all models then iterate over them
|
| 240 |
+
shuffle_models = randomize_key_order(all_models)
|
| 241 |
+
for index, model in enumerate(shuffle_models):
|
| 242 |
if model in models:
|
| 243 |
response = interact_groq(aux_history, model).strip()
|
| 244 |
resp_list.append(gr.Chatbot(value=[{"role": "assistant", "content": response}], type='messages'))
|
| 245 |
+
chatbot_aswser_list[alphabet[index]] = {'response': response, 'model': model}
|
| 246 |
else:
|
| 247 |
resp_list.append(gr.Chatbot(value=None, type='messages', visible=False))
|
| 248 |
|
|
|
|
| 250 |
model_history.append(selected_model)
|
| 251 |
print(model_history)
|
| 252 |
|
| 253 |
+
return resp_list
|
| 254 |
|
| 255 |
|
| 256 |
|
|
|
|
| 265 |
else:
|
| 266 |
return gr.Textbox(value="", visible=False)
|
| 267 |
|
| 268 |
+
def change_checkbox(checkbox):
|
| 269 |
+
print(f'checkbox: {checkbox}')
|
| 270 |
+
|
| 271 |
+
#luego cuando sean variables
|
| 272 |
+
global active_models
|
| 273 |
+
active_models = checkbox
|
| 274 |
+
quant_models = len(checkbox)
|
| 275 |
+
words = [alphabet[i] for i in range(quant_models)]
|
| 276 |
+
checkbox = gr.Radio(label="Select Model to respond...", choices=words+["user_input"])
|
| 277 |
+
#checkbox = gr.Radio(label="Select Model to respond...", choices=checkbox+["user_input"])
|
| 278 |
+
return checkbox
|
| 279 |
+
|
| 280 |
+
def change_story(story_title, ret="gradio"):
|
| 281 |
+
for story in stories:
|
| 282 |
+
if story["title"] == story_title:
|
| 283 |
+
if ret== "gradio":
|
| 284 |
+
return gr.Textbox(label="Selected Story", lines=10, interactive=False, value=story["story"])
|
| 285 |
+
else: #"string"
|
| 286 |
+
return story["story"]
|
| 287 |
+
return gr.Textbox(label="Error", lines=10, interactive=False, value="Story title does not match.")
|
| 288 |
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
chatbot_list = []
|
| 294 |
model_list = list(all_models.keys())
|
| 295 |
+
active_models = []
|
| 296 |
+
#chatbot_answer_list['model'] = "respuesta aqui"
|
| 297 |
+
chatbot_aswser_list = {}
|
| 298 |
# Create the chat interface using Gradio Blocks
|
| 299 |
with gr.Blocks() as demo:
|
| 300 |
with gr.Tabs():
|
|
|
|
| 332 |
with gr.TabItem("Multiple Evaluation"):
|
| 333 |
with gr.Group():
|
| 334 |
#model_dropdown = gr.Dropdown(choices=list(all_models.keys()), label="Select Model", value=default_model_name)
|
| 335 |
+
model_checkbox = gr.CheckboxGroup(choices=list(all_models.keys()), label="Select Model", value=None) #value=[default_model_name])
|
| 336 |
user_dropdown = gr.Dropdown(choices=user_names, label="Select User Name")
|
|
|
|
| 337 |
story_dropdown = gr.Dropdown(choices=[story["title"] for story in stories], label="Select Story", value=initial_story)
|
| 338 |
system_prompt_dropdown = gr.Dropdown(choices=system_prompts, label="Select System Prompt", value=system_prompts[0])
|
| 339 |
send_multiple_story_button = gr.Button("Send Story")
|
| 340 |
|
| 341 |
gr.Markdown("## Chat")
|
| 342 |
with gr.Group():
|
| 343 |
+
selected_story_textbox = gr.Textbox(label="Selected Story", lines=10, interactive=False, value=change_story(initial_story, "string"))
|
| 344 |
#aqui armar una ventana x cada modelo seleccionado
|
| 345 |
+
chatbot_list.append(gr.Chatbot(label="Chat History", type='messages'))
|
| 346 |
with gr.Row():
|
| 347 |
+
for i, model in enumerate(model_list):
|
| 348 |
+
label = f"Model {alphabet[i % len(alphabet)]}"
|
| 349 |
+
aux = gr.Chatbot(label=label, visible=False, type='messages')
|
| 350 |
+
chatbot_list.append(aux)
|
| 351 |
|
| 352 |
+
#for model in model_list:
|
| 353 |
+
# aux = gr.Chatbot(label=f"Model {model}", visible=False, type='messages')
|
| 354 |
+
# chatbot_list.append(aux)
|
| 355 |
+
|
| 356 |
user_input = gr.Textbox(placeholder="Type your message here...", label="User Input")
|
| 357 |
+
#chat_radio = gr.Radio(choices=list(model_list)+["user_input"], label="Sent something to continue...", value=[model_list[0]])
|
| 358 |
+
chat_radio = gr.Radio(label="Select Model to respond...")
|
| 359 |
#elegir respuesta primero, luego enviar mensaje
|
| 360 |
assistant_user_input = gr.Textbox(interactive=True, show_copy_button=True, visible=False)
|
| 361 |
send_multiple_message_button = gr.Button("Send")
|
|
|
|
| 378 |
#save_button.click(fn=save_comment_score, inputs=[chatbot_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown], outputs=[data_table, comment_input])
|
| 379 |
|
| 380 |
chat_radio.change(fn=change_textbox, inputs=chat_radio, outputs=assistant_user_input)
|
| 381 |
+
#al elegir modelo cambia el chat radio, setea los modelos elegidos
|
| 382 |
+
model_checkbox.input(fn=change_checkbox, inputs=model_checkbox, outputs=chat_radio)
|
| 383 |
+
story_dropdown.input(fn=change_story, inputs=[story_dropdown], outputs=selected_story_textbox)
|
| 384 |
|
| 385 |
send_multiple_story_button.click(
|
| 386 |
fn=send_multiple_selected_story,
|
| 387 |
inputs=[story_dropdown, model_checkbox, system_prompt_dropdown],
|
| 388 |
+
outputs=chatbot_list,
|
| 389 |
)
|
| 390 |
+
|
| 391 |
#Tengo que cambiar para que los modelos responan solo las respuestas y no todo el historial
|
| 392 |
#preciso las historias previas de cada una
|
| 393 |
#el modelo que se haya elegido
|
|
|
|
| 395 |
#luego retorno:
|
| 396 |
#en
|
| 397 |
|
| 398 |
+
#send_multiple_message_button.click(
|
| 399 |
+
# fn=multiple_interact,
|
| 400 |
+
# inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input],# interaction_count],
|
| 401 |
+
# outputs=[chatbot_list],
|
| 402 |
+
# )
|
| 403 |
|
| 404 |
#quiza tenga que guardar una variable con los valores de los checkbox
|
| 405 |
+
#save_button_multievaluation.click(
|
| 406 |
+
# fn=save_comment_score,
|
| 407 |
+
# inputs=[chatbot_main_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown, model_checkbox],
|
| 408 |
+
# outputs=[data_table, comment_input])
|
| 409 |
|
| 410 |
demo.launch()
|