Abhishek Thakur commited on
Commit
2f00a93
·
1 Parent(s): d812604
Dockerfile CHANGED
@@ -32,6 +32,4 @@ RUN conda create -p /app/env -y python=3.8
32
  SHELL ["conda", "run","--no-capture-output", "-p","/app/env", "/bin/bash", "-c"]
33
 
34
  COPY --chown=1000:1000 . /app/
35
- RUN python setup.py install
36
-
37
- CMD competitions run
 
32
  SHELL ["conda", "run","--no-capture-output", "-p","/app/env", "/bin/bash", "-c"]
33
 
34
  COPY --chown=1000:1000 . /app/
35
+ RUN python setup.py install
 
 
competitions/leaderboard.py CHANGED
@@ -8,7 +8,6 @@ from datetime import datetime
8
  import pandas as pd
9
  from loguru import logger
10
 
11
- # from huggingface_hub import snapshot_download
12
  from .download import snapshot_download
13
 
14
 
@@ -38,14 +37,14 @@ class Leaderboard:
38
  start_time = time.time()
39
  submissions_folder = snapshot_download(
40
  repo_id=self.competition_id,
41
- allow_patterns="*.json",
42
  use_auth_token=self.autotrain_token,
43
  repo_type="dataset",
44
  )
45
  logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
46
  start_time = time.time()
47
  submissions = []
48
- for submission in glob.glob(os.path.join(submissions_folder, "*.json")):
49
  with open(submission, "r") as f:
50
  submission_info = json.load(f)
51
  # only select submissions that are done
@@ -78,14 +77,14 @@ class Leaderboard:
78
  start_time = time.time()
79
  submissions_folder = snapshot_download(
80
  repo_id=self.competition_id,
81
- allow_patterns="*.json",
82
  use_auth_token=self.autotrain_token,
83
  repo_type="dataset",
84
  )
85
  logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
86
  start_time = time.time()
87
  submissions = []
88
- for submission in glob.glob(os.path.join(submissions_folder, "*.json")):
89
  with open(submission, "r") as f:
90
  submission_info = json.load(f)
91
  submission_info["submissions"] = [
 
8
  import pandas as pd
9
  from loguru import logger
10
 
 
11
  from .download import snapshot_download
12
 
13
 
 
37
  start_time = time.time()
38
  submissions_folder = snapshot_download(
39
  repo_id=self.competition_id,
40
+ allow_patterns="submission_info/*.json",
41
  use_auth_token=self.autotrain_token,
42
  repo_type="dataset",
43
  )
44
  logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
45
  start_time = time.time()
46
  submissions = []
47
+ for submission in glob.glob(os.path.join(submissions_folder, "submission_info", "*.json")):
48
  with open(submission, "r") as f:
49
  submission_info = json.load(f)
50
  # only select submissions that are done
 
77
  start_time = time.time()
78
  submissions_folder = snapshot_download(
79
  repo_id=self.competition_id,
80
+ allow_patterns="submission_info/*.json",
81
  use_auth_token=self.autotrain_token,
82
  repo_type="dataset",
83
  )
84
  logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
85
  start_time = time.time()
86
  submissions = []
87
+ for submission in glob.glob(os.path.join(submissions_folder, "submission_info", "*.json")):
88
  with open(submission, "r") as f:
89
  submission_info = json.load(f)
90
  submission_info["submissions"] = [
competitions/submissions.py CHANGED
@@ -57,7 +57,7 @@ class Submissions:
57
 
58
  api.upload_file(
59
  path_or_fileobj=user_submission_info_json_buffer,
60
- path_in_repo=f"{user_info['id']}.json",
61
  repo_id=self.competition_id,
62
  repo_type="dataset",
63
  token=self.autotrain_token,
@@ -68,7 +68,7 @@ class Submissions:
68
  try:
69
  user_fname = hf_hub_download(
70
  repo_id=self.competition_id,
71
- filename=f"{user_id}.json",
72
  use_auth_token=self.autotrain_token,
73
  repo_type="dataset",
74
  )
@@ -76,7 +76,7 @@ class Submissions:
76
  self._add_new_user(user_info)
77
  user_fname = hf_hub_download(
78
  repo_id=self.competition_id,
79
- filename=f"{user_id}.json",
80
  use_auth_token=self.autotrain_token,
81
  repo_type="dataset",
82
  )
@@ -103,7 +103,7 @@ class Submissions:
103
  def _increment_submissions(self, user_id, submission_id, submission_comment):
104
  user_fname = hf_hub_download(
105
  repo_id=self.competition_id,
106
- filename=f"{user_id}.json",
107
  use_auth_token=self.autotrain_token,
108
  repo_type="dataset",
109
  )
@@ -138,7 +138,7 @@ class Submissions:
138
  api = HfApi()
139
  api.upload_file(
140
  path_or_fileobj=user_submission_info_json_buffer,
141
- path_in_repo=f"{user_id}.json",
142
  repo_id=self.competition_id,
143
  repo_type="dataset",
144
  token=self.autotrain_token,
@@ -148,7 +148,7 @@ class Submissions:
148
  def _download_user_subs(self, user_id):
149
  user_fname = hf_hub_download(
150
  repo_id=self.competition_id,
151
- filename=f"{user_id}.json",
152
  use_auth_token=self.autotrain_token,
153
  repo_type="dataset",
154
  )
@@ -166,7 +166,7 @@ class Submissions:
166
 
167
  user_fname = hf_hub_download(
168
  repo_id=self.competition_id,
169
- filename=f"{user_id}.json",
170
  use_auth_token=self.autotrain_token,
171
  repo_type="dataset",
172
  )
@@ -186,7 +186,7 @@ class Submissions:
186
  api = HfApi()
187
  api.upload_file(
188
  path_or_fileobj=user_submission_info_json_buffer,
189
- path_in_repo=f"{user_id}.json",
190
  repo_id=self.competition_id,
191
  repo_type="dataset",
192
  token=self.autotrain_token,
 
57
 
58
  api.upload_file(
59
  path_or_fileobj=user_submission_info_json_buffer,
60
+ path_in_repo=f"submission_info/{user_info['id']}.json",
61
  repo_id=self.competition_id,
62
  repo_type="dataset",
63
  token=self.autotrain_token,
 
68
  try:
69
  user_fname = hf_hub_download(
70
  repo_id=self.competition_id,
71
+ filename=f"submission_info/{user_id}.json",
72
  use_auth_token=self.autotrain_token,
73
  repo_type="dataset",
74
  )
 
76
  self._add_new_user(user_info)
77
  user_fname = hf_hub_download(
78
  repo_id=self.competition_id,
79
+ filename=f"submission_info/{user_id}.json",
80
  use_auth_token=self.autotrain_token,
81
  repo_type="dataset",
82
  )
 
103
  def _increment_submissions(self, user_id, submission_id, submission_comment):
104
  user_fname = hf_hub_download(
105
  repo_id=self.competition_id,
106
+ filename=f"submission_info/{user_id}.json",
107
  use_auth_token=self.autotrain_token,
108
  repo_type="dataset",
109
  )
 
138
  api = HfApi()
139
  api.upload_file(
140
  path_or_fileobj=user_submission_info_json_buffer,
141
+ path_in_repo=f"submission_info/{user_id}.json",
142
  repo_id=self.competition_id,
143
  repo_type="dataset",
144
  token=self.autotrain_token,
 
148
  def _download_user_subs(self, user_id):
149
  user_fname = hf_hub_download(
150
  repo_id=self.competition_id,
151
+ filename=f"submission_info/{user_id}.json",
152
  use_auth_token=self.autotrain_token,
153
  repo_type="dataset",
154
  )
 
166
 
167
  user_fname = hf_hub_download(
168
  repo_id=self.competition_id,
169
+ filename=f"submission_info/{user_id}.json",
170
  use_auth_token=self.autotrain_token,
171
  repo_type="dataset",
172
  )
 
186
  api = HfApi()
187
  api.upload_file(
188
  path_or_fileobj=user_submission_info_json_buffer,
189
+ path_in_repo=f"submission_info/{user_id}.json",
190
  repo_id=self.competition_id,
191
  repo_type="dataset",
192
  token=self.autotrain_token,