siro1 HF Staff commited on
Commit
75f9ceb
·
1 Parent(s): 6493bef

Feat: stuff works

Browse files
Files changed (4) hide show
  1. .gitignore +1 -0
  2. app.py +228 -35
  3. src/result.py +3 -0
  4. src/retrieve_data.py +12 -14
.gitignore CHANGED
@@ -5,6 +5,7 @@ build/
5
  dist/
6
  wheels/
7
  *.egg-info
 
8
 
9
  # Virtual environments
10
  .venv
 
5
  dist/
6
  wheels/
7
  *.egg-info
8
+ *.json
9
 
10
  # Virtual environments
11
  .venv
app.py CHANGED
@@ -7,6 +7,7 @@ from src.retrieve_data import (
7
  get_gpus_for_leaderboard,
8
  get_leaderboard_names,
9
  get_leaderboard_submissions,
 
10
  )
11
 
12
  from src.envs import CACHE_TIMEOUT, BACKGROUND_REFRESH_INTERVAL
@@ -14,7 +15,6 @@ from src.envs import CACHE_TIMEOUT, BACKGROUND_REFRESH_INTERVAL
14
  # key: func_name:args:kwargs, value: (timestamp, data)
15
  cache = {}
16
 
17
-
18
  active_selections = {
19
  "leaderboard": None,
20
  "gpu": None,
@@ -26,9 +26,13 @@ asyncio.set_event_loop(loop)
26
  background_refresh_running = True
27
 
28
 
29
- def cached_fetch(func: Callable, *args, force_refresh=False, **kwargs):
 
 
30
  """Fetch data with caching to avoid redundant API calls"""
31
- cache_key = f"{func.__name__}:{str(args)}:{str(kwargs)}"
 
 
32
 
33
  current_time = time.time()
34
 
@@ -37,7 +41,13 @@ def cached_fetch(func: Callable, *args, force_refresh=False, **kwargs):
37
  if current_time - timestamp < CACHE_TIMEOUT:
38
  return data
39
 
40
- result = loop.run_until_complete(func(*args, **kwargs))
 
 
 
 
 
 
41
  cache[cache_key] = (current_time, result)
42
  return result
43
 
@@ -80,7 +90,7 @@ background_thread.start()
80
  def create_table_for_lb(lb_data):
81
  headers = [
82
  "Rank",
83
- "Discord User ID",
84
  "Submission Name",
85
  "Runtime (ms)",
86
  "Submission Date",
@@ -99,10 +109,10 @@ def create_table_for_lb(lb_data):
99
  rows.append(
100
  [
101
  rank_display,
102
- result.user_id,
103
  result.submission_name,
104
  f"{float(result.submission_score):.4f}",
105
- result.submission_time,
106
  ]
107
  )
108
 
@@ -113,7 +123,7 @@ def create_table_for_lb(lb_data):
113
  "int",
114
  "str",
115
  "str",
116
- "datetime",
117
  ],
118
  value=rows,
119
  interactive=False,
@@ -122,34 +132,71 @@ def create_table_for_lb(lb_data):
122
  return df
123
 
124
 
125
- def on_lb_change(lb_name):
126
- gpu_choices = cached_fetch(get_gpus_for_leaderboard, lb_name)
127
-
128
- active_selections["leaderboard"] = lb_name
129
- if gpu_choices:
130
- active_selections["gpu"] = gpu_choices[0]
131
-
132
- return (
133
- gr.update(choices=gpu_choices, value=gpu_choices[0] if gpu_choices else None),
134
- update_table(lb_name, gpu_choices[0] if gpu_choices else None),
135
- )
136
-
137
-
138
- def update_table(lb_name, gpu_name):
139
- if not gpu_name:
140
- return None
141
-
142
- active_selections["gpu"] = gpu_name
143
-
144
- data = cached_fetch(get_leaderboard_submissions, lb_name, gpu_name)
145
- return create_table_for_lb(data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
 
147
 
148
- def build_ui():
149
  with gr.Blocks(
150
  title="ML Leaderboards",
151
  theme=gr.themes.Soft(),
152
  css="""
 
153
  .gradio-container table tr:nth-child(1) {
154
  background-color: rgba(255, 215, 0, 0.2) !important; /* Gold */
155
  }
@@ -159,7 +206,31 @@ def build_ui():
159
  .gradio-container table tr:nth-child(3) {
160
  background-color: rgba(205, 127, 50, 0.2) !important; /* Bronze */
161
  }
162
- """,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  ) as app:
164
  gr.Markdown("# 🍿 KernelBot Leaderboard 🍿")
165
 
@@ -168,7 +239,19 @@ def build_ui():
168
  gpu_names = cached_fetch(get_gpus_for_leaderboard, selected_lb)
169
  selected_gpu = gpu_names[0]
170
 
171
- data = cached_fetch(get_leaderboard_submissions, selected_lb, selected_gpu)
 
 
 
 
 
 
 
 
 
 
 
 
172
 
173
  with gr.Row():
174
  with gr.Column(scale=1):
@@ -186,16 +269,126 @@ def build_ui():
186
  )
187
 
188
  with gr.Row():
189
- results_table = create_table_for_lb(data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
 
191
  lb_dropdown.change(
192
  fn=on_lb_change,
193
  inputs=[lb_dropdown],
194
- outputs=[gpu_dropdown, results_table],
195
  )
196
 
197
  gpu_dropdown.change(
198
- fn=update_table, inputs=[lb_dropdown, gpu_dropdown], outputs=results_table
 
 
 
 
 
 
 
 
 
 
 
 
 
 
199
  )
200
 
201
  return app
 
7
  get_gpus_for_leaderboard,
8
  get_leaderboard_names,
9
  get_leaderboard_submissions,
10
+ get_submission_count,
11
  )
12
 
13
  from src.envs import CACHE_TIMEOUT, BACKGROUND_REFRESH_INTERVAL
 
15
  # key: func_name:args:kwargs, value: (timestamp, data)
16
  cache = {}
17
 
 
18
  active_selections = {
19
  "leaderboard": None,
20
  "gpu": None,
 
26
  background_refresh_running = True
27
 
28
 
29
+ def cached_fetch(
30
+ func: Callable, *args, force_refresh=False, limit=None, offset=0, **kwargs
31
+ ):
32
  """Fetch data with caching to avoid redundant API calls"""
33
+ cache_key = (
34
+ f"{func.__name__}:{str(args)}:{str(kwargs)}:limit={limit}:offset={offset}"
35
+ )
36
 
37
  current_time = time.time()
38
 
 
41
  if current_time - timestamp < CACHE_TIMEOUT:
42
  return data
43
 
44
+ if func.__name__ == "get_leaderboard_submissions":
45
+ result = loop.run_until_complete(
46
+ func(*args, limit=limit, offset=offset, **kwargs)
47
+ )
48
+ else:
49
+ result = loop.run_until_complete(func(*args, **kwargs))
50
+
51
  cache[cache_key] = (current_time, result)
52
  return result
53
 
 
90
  def create_table_for_lb(lb_data):
91
  headers = [
92
  "Rank",
93
+ "User Name",
94
  "Submission Name",
95
  "Runtime (ms)",
96
  "Submission Date",
 
109
  rows.append(
110
  [
111
  rank_display,
112
+ result.user_name,
113
  result.submission_name,
114
  f"{float(result.submission_score):.4f}",
115
+ result.submission_time.strftime("%Y-%m-%d %H:%M:%S"),
116
  ]
117
  )
118
 
 
123
  "int",
124
  "str",
125
  "str",
126
+ "timestamp",
127
  ],
128
  value=rows,
129
  interactive=False,
 
132
  return df
133
 
134
 
135
+ def build_ui():
136
+ # Define the function first before using it
137
+ def create_table_for_lb_with_global_rank(lb_data, offset):
138
+ """Create table with global ranks instead of page-specific ranks"""
139
+ headers = [
140
+ "Rank",
141
+ "User Name",
142
+ "Submission ID",
143
+ "Submission Name",
144
+ "Runtime (ms)",
145
+ "Submission Date",
146
+ ]
147
+
148
+ rows = []
149
+ for i, result in enumerate(lb_data.results, 1):
150
+ # Calculate global rank by adding offset
151
+ global_rank = i + offset
152
+
153
+ # Only show medals for the top 3 overall and only on the first page
154
+ if offset == 0 and global_rank <= 3: # first page and top 3
155
+ if global_rank == 1:
156
+ rank_display = "🥇 1"
157
+ elif global_rank == 2:
158
+ rank_display = "🥈 2"
159
+ elif global_rank == 3:
160
+ rank_display = "🥉 3"
161
+ else:
162
+ rank_display = str(global_rank)
163
+
164
+ rows.append(
165
+ [
166
+ rank_display,
167
+ result.user_name,
168
+ str(result.submission_id), # Add submission ID as a new column
169
+ result.submission_name,
170
+ f"{float(result.submission_score):.4f}",
171
+ result.submission_time.strftime("%Y-%m-%d %H:%M:%S"),
172
+ ]
173
+ )
174
+
175
+ # Apply different class based on whether it's the first page or not
176
+ elem_classes = "" if offset == 0 else "non-first-page-table"
177
+
178
+ df = gr.Dataframe(
179
+ headers=headers,
180
+ datatype=[
181
+ "str",
182
+ "str",
183
+ "str", # Submission ID
184
+ "str",
185
+ "str",
186
+ "timestamp",
187
+ ],
188
+ value=rows,
189
+ interactive=False,
190
+ elem_classes=elem_classes,
191
+ )
192
 
193
+ return df
194
 
 
195
  with gr.Blocks(
196
  title="ML Leaderboards",
197
  theme=gr.themes.Soft(),
198
  css="""
199
+ /* Apply medal colors to all tables by default */
200
  .gradio-container table tr:nth-child(1) {
201
  background-color: rgba(255, 215, 0, 0.2) !important; /* Gold */
202
  }
 
206
  .gradio-container table tr:nth-child(3) {
207
  background-color: rgba(205, 127, 50, 0.2) !important; /* Bronze */
208
  }
209
+
210
+ /* Remove medal colors for non-first pages */
211
+ .non-first-page-table tr:nth-child(1),
212
+ .non-first-page-table tr:nth-child(2),
213
+ .non-first-page-table tr:nth-child(3) {
214
+ background-color: inherit !important;
215
+ }
216
+
217
+ .pagination-controls {
218
+ display: flex;
219
+ justify-content: space-between;
220
+ align-items: center;
221
+ margin-top: 10px;
222
+ width: 100%;
223
+ }
224
+
225
+ .pagination-info {
226
+ text-align: center;
227
+ flex-grow: 1;
228
+ }
229
+
230
+ .pagination-button {
231
+ min-width: 100px;
232
+ }
233
+ """,
234
  ) as app:
235
  gr.Markdown("# 🍿 KernelBot Leaderboard 🍿")
236
 
 
239
  gpu_names = cached_fetch(get_gpus_for_leaderboard, selected_lb)
240
  selected_gpu = gpu_names[0]
241
 
242
+ # Set default pagination values
243
+ items_per_page = 10
244
+ current_page = 1
245
+
246
+ data = cached_fetch(
247
+ get_leaderboard_submissions,
248
+ selected_lb,
249
+ selected_gpu,
250
+ limit=items_per_page,
251
+ offset=0,
252
+ )
253
+ total_count = cached_fetch(get_submission_count, selected_lb, selected_gpu)
254
+ total_pages = (total_count + items_per_page - 1) // items_per_page
255
 
256
  with gr.Row():
257
  with gr.Column(scale=1):
 
269
  )
270
 
271
  with gr.Row():
272
+ # Initial table is first page
273
+ results_table = create_table_for_lb_with_global_rank(data, 0)
274
+
275
+ with gr.Row(elem_classes="pagination-controls"):
276
+ with gr.Column(scale=1, min_width=100, elem_classes="pagination-button"):
277
+ prev_btn = gr.Button("← Previous", interactive=(current_page > 1))
278
+
279
+ with gr.Column(scale=2, elem_classes="pagination-info"):
280
+ page_info = gr.Markdown(f"Page {current_page} of {total_pages}")
281
+
282
+ with gr.Column(scale=1, min_width=100, elem_classes="pagination-button"):
283
+ next_btn = gr.Button("Next →", interactive=(current_page < total_pages))
284
+
285
+ def on_lb_change(lb_name):
286
+ gpu_choices = cached_fetch(get_gpus_for_leaderboard, lb_name)
287
+
288
+ active_selections["leaderboard"] = lb_name
289
+ if gpu_choices:
290
+ active_selections["gpu"] = gpu_choices[0]
291
+
292
+ # Reset to page 1 when changing leaderboard
293
+ data = cached_fetch(
294
+ get_leaderboard_submissions,
295
+ lb_name,
296
+ gpu_choices[0] if gpu_choices else None,
297
+ limit=items_per_page,
298
+ offset=0,
299
+ )
300
+
301
+ # Get total count for pagination
302
+ total_count = cached_fetch(
303
+ get_submission_count, lb_name, gpu_choices[0] if gpu_choices else None
304
+ )
305
+ total_pages = (total_count + items_per_page - 1) // items_per_page
306
+
307
+ return (
308
+ gr.update(
309
+ choices=gpu_choices, value=gpu_choices[0] if gpu_choices else None
310
+ ),
311
+ create_table_for_lb_with_global_rank(data, 0),
312
+ gr.update(value=f"Page 1 of {total_pages}"),
313
+ gr.update(interactive=False), # prev button disabled on page 1
314
+ gr.update(
315
+ interactive=(total_pages > 1)
316
+ ), # next button enabled if more than 1 page
317
+ )
318
+
319
+ def update_table(lb_name, gpu_name, page=1):
320
+ if not gpu_name:
321
+ return None, gr.update(), gr.update(), gr.update()
322
+
323
+ active_selections["gpu"] = gpu_name
324
+ offset = (page - 1) * items_per_page
325
+
326
+ data = cached_fetch(
327
+ get_leaderboard_submissions,
328
+ lb_name,
329
+ gpu_name,
330
+ limit=items_per_page,
331
+ offset=offset,
332
+ )
333
+
334
+ # Get total count for pagination
335
+ total_count = cached_fetch(get_submission_count, lb_name, gpu_name)
336
+ total_pages = (total_count + items_per_page - 1) // items_per_page
337
+
338
+ # Create table with global ranks
339
+ table = create_table_for_lb_with_global_rank(data, offset)
340
+
341
+ return (
342
+ table,
343
+ gr.update(value=f"Page {page} of {total_pages}"),
344
+ gr.update(interactive=(page > 1)),
345
+ gr.update(interactive=(page < total_pages)),
346
+ )
347
+
348
+ def next_page():
349
+ nonlocal current_page
350
+ lb_name = active_selections["leaderboard"]
351
+ gpu_name = active_selections["gpu"]
352
+
353
+ # Get total count to check if we can go to next page
354
+ total_count = cached_fetch(get_submission_count, lb_name, gpu_name)
355
+ total_pages = (total_count + items_per_page - 1) // items_per_page
356
+
357
+ if current_page < total_pages:
358
+ current_page += 1
359
+ return update_table(lb_name, gpu_name, current_page)
360
+ return update_table(lb_name, gpu_name, current_page)
361
+
362
+ def prev_page():
363
+ nonlocal current_page
364
+ if current_page > 1:
365
+ current_page -= 1
366
+ lb_name = active_selections["leaderboard"]
367
+ gpu_name = active_selections["gpu"]
368
+ return update_table(lb_name, gpu_name, current_page)
369
 
370
  lb_dropdown.change(
371
  fn=on_lb_change,
372
  inputs=[lb_dropdown],
373
+ outputs=[gpu_dropdown, results_table, page_info, prev_btn, next_btn],
374
  )
375
 
376
  gpu_dropdown.change(
377
+ fn=lambda lb, gpu: update_table(lb, gpu, 1), # Reset to page 1
378
+ inputs=[lb_dropdown, gpu_dropdown],
379
+ outputs=[results_table, page_info, prev_btn, next_btn],
380
+ )
381
+
382
+ next_btn.click(
383
+ fn=next_page,
384
+ inputs=[],
385
+ outputs=[results_table, page_info, prev_btn, next_btn],
386
+ )
387
+
388
+ prev_btn.click(
389
+ fn=prev_page,
390
+ inputs=[],
391
+ outputs=[results_table, page_info, prev_btn, next_btn],
392
  )
393
 
394
  return app
src/result.py CHANGED
@@ -10,6 +10,7 @@ class Result:
10
  submission_score: float
11
  submission_id: str
12
  user_id: str
 
13
  rank: int
14
 
15
  @classmethod
@@ -21,6 +22,7 @@ class Result:
21
  submission_id=data["submission_id"],
22
  user_id=data["user_id"],
23
  rank=data["rank"],
 
24
  )
25
 
26
  @classmethod
@@ -36,6 +38,7 @@ class Result:
36
  "submission_score": self.submission_score,
37
  "submission_id": self.submission_id,
38
  "user_id": self.user_id,
 
39
  "rank": self.rank,
40
  }
41
 
 
10
  submission_score: float
11
  submission_id: str
12
  user_id: str
13
+ user_name: str
14
  rank: int
15
 
16
  @classmethod
 
22
  submission_id=data["submission_id"],
23
  user_id=data["user_id"],
24
  rank=data["rank"],
25
+ user_name=data["user_name"],
26
  )
27
 
28
  @classmethod
 
38
  "submission_score": self.submission_score,
39
  "submission_id": self.submission_id,
40
  "user_id": self.user_id,
41
+ "user_name": self.user_name,
42
  "rank": self.rank,
43
  }
44
 
src/retrieve_data.py CHANGED
@@ -1,5 +1,3 @@
1
- from collections import defaultdict
2
-
3
  from httpx import AsyncClient
4
 
5
  from src.envs import API_URL, TIMEOUT
@@ -20,9 +18,14 @@ async def get_gpus_for_leaderboard(lb_name: str) -> list[str]:
20
  return response.json()
21
 
22
 
23
- async def get_leaderboard_submissions(lb_name: str, gpu: str) -> LbData:
 
 
24
  async with AsyncClient(timeout=TIMEOUT) as client:
25
- response = await client.get(f"{API_URL}/submissions/{lb_name}/{gpu}")
 
 
 
26
  response.raise_for_status()
27
  return LbData(
28
  gpu=gpu,
@@ -31,13 +34,8 @@ async def get_leaderboard_submissions(lb_name: str, gpu: str) -> LbData:
31
  )
32
 
33
 
34
- async def populate_lb_data():
35
- leaderboards: dict[str, dict[str, LbData]] = defaultdict(dict)
36
- lb_names = await get_leaderboard_names()
37
- for lb_name in lb_names:
38
- gpus = await get_gpus_for_leaderboard(lb_name)
39
- for gpu in gpus:
40
- lb_data = await get_leaderboard_submissions(lb_name, gpu)
41
- leaderboards[lb_name][gpu] = lb_data
42
-
43
- return leaderboards
 
 
 
1
  from httpx import AsyncClient
2
 
3
  from src.envs import API_URL, TIMEOUT
 
18
  return response.json()
19
 
20
 
21
+ async def get_leaderboard_submissions(
22
+ lb_name: str, gpu: str, limit: int = None, offset: int = 0
23
+ ) -> LbData:
24
  async with AsyncClient(timeout=TIMEOUT) as client:
25
+ params = {"limit": limit, "offset": offset}
26
+ response = await client.get(
27
+ f"{API_URL}/submissions/{lb_name}/{gpu}", params=params
28
+ )
29
  response.raise_for_status()
30
  return LbData(
31
  gpu=gpu,
 
34
  )
35
 
36
 
37
+ async def get_submission_count(lb_name: str, gpu: str) -> int:
38
+ async with AsyncClient(timeout=TIMEOUT) as client:
39
+ response = await client.get(f"{API_URL}/submission_count/{lb_name}/{gpu}")
40
+ response.raise_for_status()
41
+ return response.json()["count"]