Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -277,11 +277,11 @@ with demo:
|
|
277 |
value=eval_dataframe_test, datatype=TYPES, interactive=False,
|
278 |
column_widths=["20%"]
|
279 |
)
|
280 |
-
with gr.Tab("Results: Validation"):
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
|
286 |
refresh_button = gr.Button("Refresh")
|
287 |
refresh_button.click(
|
@@ -297,7 +297,7 @@ with demo:
|
|
297 |
gr.Markdown(SUBMISSION_TEXT, elem_classes="markdown-text")
|
298 |
with gr.Row():
|
299 |
with gr.Column():
|
300 |
-
level_of_test = gr.Radio(["
|
301 |
model_name_textbox = gr.Textbox(label="Agent name")
|
302 |
model_family_textbox = gr.Textbox(label="Model family")
|
303 |
system_prompt_textbox = gr.Textbox(label="System prompt example")
|
@@ -310,7 +310,7 @@ with demo:
|
|
310 |
|
311 |
with gr.Row():
|
312 |
gr.LoginButton()
|
313 |
-
submit_button = gr.Button("Submit Eval")
|
314 |
submission_result = gr.Markdown()
|
315 |
submit_button.click(
|
316 |
add_new_eval,
|
|
|
277 |
value=eval_dataframe_test, datatype=TYPES, interactive=False,
|
278 |
column_widths=["20%"]
|
279 |
)
|
280 |
+
#with gr.Tab("Results: Validation"):
|
281 |
+
# leaderboard_table_val = gr.components.Dataframe(
|
282 |
+
# value=eval_dataframe_val, datatype=TYPES, interactive=False,
|
283 |
+
# column_widths=["20%"]
|
284 |
+
# )
|
285 |
|
286 |
refresh_button = gr.Button("Refresh")
|
287 |
refresh_button.click(
|
|
|
297 |
gr.Markdown(SUBMISSION_TEXT, elem_classes="markdown-text")
|
298 |
with gr.Row():
|
299 |
with gr.Column():
|
300 |
+
level_of_test = gr.Radio(["test"], value="test", label="Split")
|
301 |
model_name_textbox = gr.Textbox(label="Agent name")
|
302 |
model_family_textbox = gr.Textbox(label="Model family")
|
303 |
system_prompt_textbox = gr.Textbox(label="System prompt example")
|
|
|
310 |
|
311 |
with gr.Row():
|
312 |
gr.LoginButton()
|
313 |
+
submit_button = gr.Button("Submit Eval On Test")
|
314 |
submission_result = gr.Markdown()
|
315 |
submit_button.click(
|
316 |
add_new_eval,
|