cgeorgiaw HF Staff commited on
Commit
1ff7bc3
·
1 Parent(s): d6d49d1

still trying to make the leaderboard

Browse files
Files changed (1) hide show
  1. app.py +7 -8
app.py CHANGED
@@ -119,17 +119,16 @@ def get_user(profile: gr.OAuthProfile | None) -> str:
119
  return profile.name
120
 
121
  def get_leaderboard(problem_type: str):
122
- ds = load_dataset(results_repo, split='train')
123
-
124
- # filtered = ds.filter(lambda x: x[“problem_type”] == problem_type and x[“evaluated”])
125
- #if len(filtered) == 0:
126
- # return pd.DataFrame(columns=[submission_time”, “problem_type”, “feasibility”, score])
127
 
128
- df = pd.DataFrame(ds)
129
  score_field = "score" if "score" in df.columns else "objective" # fallback
130
 
131
  df = df.sort_values(by=score_field, ascending=True)
132
- # leaderboard = df[["submission_time", "problem_type", score_field]].reset_index(drop=True)
133
  return df
134
 
135
  def gradio_interface() -> gr.Blocks:
@@ -142,7 +141,7 @@ def gradio_interface() -> gr.Blocks:
142
 
143
  Leaderboard(
144
  value=leaderboard_df,
145
- select_columns=["submission_time", "feasibility", "score", "objective"],
146
  search_columns=["submission_time", "score"],
147
  hide_columns=["result_filename", "submission_filename", "minimize_objective", "boundary_json", "evaluated"],
148
  # filter_columns=["T", "Precision", "Model Size"],
 
119
  return profile.name
120
 
121
  def get_leaderboard(problem_type: str):
122
+ try:
123
+ ds = load_dataset(results_repo, split='train')
124
+ df = pd.DataFrame(ds)
125
+ except:
126
+ df = pd.DataFrame(columns=["submission_time", "feasibility", "score", "objective", "result_filename", "submission_filename", "minimize_objective", "boundary_json", "evaluated", "user"])
127
 
128
+
129
  score_field = "score" if "score" in df.columns else "objective" # fallback
130
 
131
  df = df.sort_values(by=score_field, ascending=True)
 
132
  return df
133
 
134
  def gradio_interface() -> gr.Blocks:
 
141
 
142
  Leaderboard(
143
  value=leaderboard_df,
144
+ select_columns=["submission_time", "feasibility", "score", "objective", "user"],
145
  search_columns=["submission_time", "score"],
146
  hide_columns=["result_filename", "submission_filename", "minimize_objective", "boundary_json", "evaluated"],
147
  # filter_columns=["T", "Precision", "Model Size"],