HuggingFace0920 commited on
Commit
48628b5
·
verified ·
1 Parent(s): 4cf155f

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +181 -139
sync_data.sh CHANGED
@@ -1,206 +1,248 @@
1
  #!/bin/sh
2
 
3
- # 检查必要的环境变量
 
 
 
 
 
 
4
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
- echo "错误: 未设置 HF_TOKEN 或 DATASET_ID 环境变量" >&2
6
- exit 1
 
 
 
 
 
7
  fi
8
 
9
  # 激活虚拟环境
10
- if [ -f "$APP_HOME/venv/bin/activate" ]; then
11
- . "$APP_HOME/venv/bin/activate"
12
- else
13
- echo "错误: 虚拟环境激活脚本未找到" >&2
14
- exit 1
15
- fi
16
-
17
- # 设置默认同步间隔
18
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200} # 默认2小时
19
- STORAGE_PATH="$APP_HOME/data"
20
- SQUASH_FLAG_FILE="/tmp/last_squash_time"
21
 
22
  # 生成同步脚本
23
  cat > hf_sync.py << 'EOL'
24
- # HuggingFace 同步脚本
25
  from huggingface_hub import HfApi
26
  import sys
27
  import os
28
  import tarfile
29
  import tempfile
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
- # 管理备份文件数量,超出最大数量则自动删除最旧的备份
32
- def manage_backups(api, repo_id, max_files=50):
33
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
34
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
35
- backup_files.sort()
36
- if len(backup_files) >= max_files:
37
- files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
38
- for file_to_delete in files_to_delete:
39
- try:
40
- api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
41
- print(f'已删除旧备份: {file_to_delete}')
42
- except Exception as e:
43
- print(f'删除 {file_to_delete} 时出错: {str(e)}')
44
-
45
- # 上传备份文件到 HuggingFace
46
- def upload_backup(file_path, file_name, token, repo_id):
47
  api = HfApi(token=token)
48
  try:
 
 
 
49
  api.upload_file(
50
  path_or_fileobj=file_path,
51
  path_in_repo=file_name,
52
  repo_id=repo_id,
53
  repo_type="dataset"
54
  )
55
- print(f"成功上传 {file_name}")
56
- manage_backups(api, repo_id)
 
 
 
57
  except Exception as e:
58
  print(f"上传文件出错: {str(e)}")
59
  return False
60
- return True
61
 
62
- # 下载最新备份
63
- def download_latest_backup(token, repo_id, extract_path):
64
- try:
65
- api = HfApi(token=token)
66
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
67
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
68
- if not backup_files:
69
- print("未找到任何备份文件")
70
- return False
71
- latest_backup = sorted(backup_files)[-1]
72
- with tempfile.TemporaryDirectory() as temp_dir:
73
- filepath = api.hf_hub_download(
74
- repo_id=repo_id,
75
- filename=latest_backup,
76
- repo_type="dataset",
77
- local_dir=temp_dir
78
- )
79
- if filepath and os.path.exists(filepath):
80
- with tarfile.open(filepath, 'r:gz') as tar:
81
- tar.extractall(extract_path)
82
- print(f"已成功恢复备份: {latest_backup}")
83
- return True
84
- except Exception as e:
85
- print(f"下载备份出错: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
86
  return False
87
 
88
- # 合并历史提交
89
  def super_squash_history(token, repo_id):
90
  try:
91
  api = HfApi(token=token)
 
92
  api.super_squash_history(repo_id=repo_id, repo_type="dataset")
93
  print("历史合并完成。")
94
  except Exception as e:
95
  print(f"合并历史出错: {str(e)}")
96
- return False
97
- return True
98
 
99
- # 主函数
100
  if __name__ == "__main__":
101
- if len(sys.argv) < 4:
102
- print("错误: 参数不足")
103
- sys.exit(1)
104
-
105
  action = sys.argv[1]
106
  token = sys.argv[2]
107
  repo_id = sys.argv[3]
108
 
109
  if action == "upload":
110
- if len(sys.argv) < 6:
111
- print("错误: upload 操作需要文件路径和文件名参数")
112
- sys.exit(1)
113
  file_path = sys.argv[4]
114
  file_name = sys.argv[5]
115
- if not upload_backup(file_path, file_name, token, repo_id):
116
- sys.exit(1)
117
  elif action == "download":
118
  extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
119
- if not download_latest_backup(token, repo_id, extract_path):
120
- sys.exit(1)
121
  elif action == "super_squash":
122
- if not super_squash_history(token, repo_id):
123
- sys.exit(1)
124
- else:
125
- print(f"错误: 未知操作 '{action}'")
126
- sys.exit(1)
127
-
128
- sys.exit(0)
129
  EOL
130
 
131
  # 首次启动时从 HuggingFace 下载最新备份
132
- echo "正在从 HuggingFace 下载最新备份..."
133
- if ! python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "${APP_HOME}"; then
134
- echo "警告: 下载备份失败,将使用空数据目录启动" >&2
 
 
 
135
  fi
136
 
137
- # 启动 OpenList 服务
138
- echo "启动 OpenList 服务..."
139
- $APP_HOME/openlist server &
140
- OPENLIST_PID=$!
141
-
142
  # 同步函数
143
  sync_data() {
144
  while true; do
145
- echo "同步进程启动于 $(date)"
146
 
147
- # 确保数据目录存在
148
- if [ -d "${STORAGE_PATH}" ]; then
149
- # 创建备份
150
- timestamp=$(date +%Y%m%d_%H%M%S)
151
- backup_file="backup_${timestamp}.tar.gz"
152
- temp_backup="/tmp/${backup_file}"
153
-
154
- # 压缩目录
155
- echo "正在创建数据备份..."
156
- if tar -czf "${temp_backup}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"; then
157
- # 上传到 HuggingFace
158
- echo "正在上传备份到 HuggingFace..."
159
- if python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "${temp_backup}" "${backup_file}"; then
160
- echo "备份上传成功"
161
- else
162
- echo "错误: 备份上传失败" >&2
163
- fi
164
-
165
- # 合并历史提交
166
- NOW=$(date +%s)
167
- SEVEN_DAYS=$((7*24*60*60))
168
- if [ ! -f "$SQUASH_FLAG_FILE" ]; then
169
- echo $NOW > "$SQUASH_FLAG_FILE"
170
- echo "首次合并历史提交..."
171
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
172
- else
173
- LAST=$(cat "$SQUASH_FLAG_FILE")
174
- DIFF=$((NOW - LAST))
175
- if [ $DIFF -ge $SEVEN_DAYS ]; then
176
- echo $NOW > "$SQUASH_FLAG_FILE"
177
- echo "距离上次合并已超过7天,正在合并历史提交..."
178
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
179
- else
180
- echo "距离上次合并未满7天,本次跳过合并历史提交。"
181
- fi
182
- fi
183
- else
184
- echo "错误: 创建备份失败" >&2
185
- fi
186
-
187
- # 清理临时文件
188
  rm -f "${temp_backup}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
  else
190
- echo "警告: 存储目录 ${STORAGE_PATH} 不存在,等待中..." >&2
191
  fi
192
 
193
- # 检查 OpenList 服务是否仍在运行
194
- if ! kill -0 $OPENLIST_PID 2>/dev/null; then
195
- echo "错误: OpenList 服务已停止,退出同步进程" >&2
196
- exit 1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
  fi
198
 
199
- # 同步间隔
200
- echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
201
- sleep $SYNC_INTERVAL
 
 
 
 
 
 
 
 
202
  done
203
  }
204
 
 
 
 
 
205
  # 启动同步进程
206
- sync_data
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  #!/bin/sh
2
 
3
+ # 设置默认值
4
+ : ${APP_HOME:=/OpenList}
5
+ : ${STORAGE_PATH:=$APP_HOME/data}
6
+ : ${SYNC_INTERVAL:=7200}
7
+ : ${MAX_BACKUPS:=50}
8
+
9
+ # 检查必要环境变量
10
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
11
+ echo "警告: 未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
12
+ if [ "$REQUIRE_BACKUP" = "true" ]; then
13
+ echo "错误: 备份为必需功能,但缺少必要环境变量"
14
+ exit 1
15
+ fi
16
+ # 在没有备份功能的情况下直接运行服务
17
+ exec ./openlist server
18
  fi
19
 
20
  # 激活虚拟环境
21
+ . ${APP_HOME}/venv/bin/activate
 
 
 
 
 
 
 
 
 
 
22
 
23
  # 生成同步脚本
24
  cat > hf_sync.py << 'EOL'
 
25
  from huggingface_hub import HfApi
26
  import sys
27
  import os
28
  import tarfile
29
  import tempfile
30
+ import time
31
+ from datetime import datetime
32
+
33
+ def manage_backups(api, repo_id, max_files):
34
+ try:
35
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
36
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
37
+ backup_files.sort()
38
+
39
+ if len(backup_files) >= max_files:
40
+ files_to_delete = backup_files[:len(backup_files) - max_files + 1]
41
+ for file_to_delete in files_to_delete:
42
+ try:
43
+ api.delete_file(
44
+ path_in_repo=file_to_delete,
45
+ repo_id=repo_id,
46
+ repo_type="dataset"
47
+ )
48
+ print(f'已删除旧备份: {file_to_delete}')
49
+ except Exception as e:
50
+ print(f'删除 {file_to_delete} 时出错: {str(e)}')
51
+ except Exception as e:
52
+ print(f'管理备份时出错: {str(e)}')
53
 
54
+ def upload_backup(file_path, file_name, token, repo_id, max_files):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  api = HfApi(token=token)
56
  try:
57
+ print(f"正在上传备份 {file_name}...")
58
+ start_time = time.time()
59
+
60
  api.upload_file(
61
  path_or_fileobj=file_path,
62
  path_in_repo=file_name,
63
  repo_id=repo_id,
64
  repo_type="dataset"
65
  )
66
+
67
+ upload_time = time.time() - start_time
68
+ print(f"成功上传 {file_name} (耗时 {upload_time:.2f}秒)")
69
+ manage_backups(api, repo_id, max_files)
70
+ return True
71
  except Exception as e:
72
  print(f"上传文件出错: {str(e)}")
73
  return False
 
74
 
75
+ def download_latest_backup(token, repo_id, extract_path, max_retries=3):
76
+ for attempt in range(max_retries):
77
+ try:
78
+ api = HfApi(token=token)
79
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
80
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
81
+
82
+ if not backup_files:
83
+ print("未找到任何备份文件")
84
+ return False
85
+
86
+ latest_backup = sorted(backup_files)[-1]
87
+ print(f"正在下载最新备份: {latest_backup}")
88
+
89
+ with tempfile.TemporaryDirectory() as temp_dir:
90
+ filepath = api.hf_hub_download(
91
+ repo_id=repo_id,
92
+ filename=latest_backup,
93
+ repo_type="dataset",
94
+ local_dir=temp_dir
95
+ )
96
+
97
+ if filepath and os.path.exists(filepath):
98
+ print("正在解压备份文件...")
99
+ with tarfile.open(filepath, 'r:gz') as tar:
100
+ tar.extractall(extract_path)
101
+ print(f"已成功恢复备份: {latest_backup}")
102
+ return True
103
+ except Exception as e:
104
+ print(f"下载备份出错 (尝试 {attempt + 1}/${max_retries}): {str(e)}")
105
+ if attempt < max_retries - 1:
106
+ wait_time = (attempt + 1) * 10
107
+ print(f"等待 {wait_time}秒后重试...")
108
+ time.sleep(wait_time)
109
+
110
  return False
111
 
 
112
  def super_squash_history(token, repo_id):
113
  try:
114
  api = HfApi(token=token)
115
+ print("正在合并历史提交...")
116
  api.super_squash_history(repo_id=repo_id, repo_type="dataset")
117
  print("历史合并完成。")
118
  except Exception as e:
119
  print(f"合并历史出错: {str(e)}")
 
 
120
 
 
121
  if __name__ == "__main__":
 
 
 
 
122
  action = sys.argv[1]
123
  token = sys.argv[2]
124
  repo_id = sys.argv[3]
125
 
126
  if action == "upload":
 
 
 
127
  file_path = sys.argv[4]
128
  file_name = sys.argv[5]
129
+ max_files = int(sys.argv[6]) if len(sys.argv) > 6 else 50
130
+ upload_backup(file_path, file_name, token, repo_id, max_files)
131
  elif action == "download":
132
  extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
133
+ download_latest_backup(token, repo_id, extract_path)
 
134
  elif action == "super_squash":
135
+ super_squash_history(token, repo_id)
 
 
 
 
 
 
136
  EOL
137
 
138
  # 首次启动时从 HuggingFace 下载最新备份
139
+ if [ "$SKIP_INITIAL_DOWNLOAD" != "true" ]; then
140
+ echo "正在尝试从 HuggingFace 下载最新备份..."
141
+ if ! python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "${STORAGE_PATH}"; then
142
+ echo "警告: 初始备份下载失败,继续使用空数据目录"
143
+ mkdir -p "${STORAGE_PATH}"
144
+ fi
145
  fi
146
 
 
 
 
 
 
147
  # 同步函数
148
  sync_data() {
149
  while true; do
150
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] 开始同步流程"
151
 
152
+ # 检查数据目录
153
+ if [ ! -d "${STORAGE_PATH}" ]; then
154
+ echo "错误: 存储目录 ${STORAGE_PATH} 不存在"
155
+ sleep 60
156
+ continue
157
+ fi
158
+
159
+ # 创建备份
160
+ timestamp=$(date +%Y%m%d_%H%M%S)
161
+ backup_file="backup_${timestamp}.tar.gz"
162
+ temp_backup="/tmp/${backup_file}"
163
+
164
+ echo "正在创建备份: ${backup_file}"
165
+ start_time=$(date +%s)
166
+
167
+ # 压缩目录
168
+ if ! tar -czf "${temp_backup}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"; then
169
+ echo "错误: 创建备份文件失败"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
170
  rm -f "${temp_backup}"
171
+ sleep ${SYNC_INTERVAL}
172
+ continue
173
+ fi
174
+
175
+ # 检查备份文件
176
+ if [ ! -f "${temp_backup}" ]; then
177
+ echo "错误: 备份文件未创建成功"
178
+ sleep ${SYNC_INTERVAL}
179
+ continue
180
+ fi
181
+
182
+ # 上传备份
183
+ echo "正在上传备份到 HuggingFace..."
184
+ if python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "${temp_backup}" "${backup_file}" "${MAX_BACKUPS}"; then
185
+ echo "备份上传成功"
186
  else
187
+ echo "警告: 备份上传失败"
188
  fi
189
 
190
+ # 清理临时文件
191
+ rm -f "${temp_backup}"
192
+
193
+ # 每周合并历史
194
+ SQUASH_FLAG_FILE="/tmp/last_squash_time"
195
+ NOW=$(date +%s)
196
+ SEVEN_DAYS=$((7*24*60*60))
197
+
198
+ if [ ! -f "$SQUASH_FLAG_FILE" ]; then
199
+ echo $NOW > "$SQUASH_FLAG_FILE"
200
+ echo "首次合并历史提交..."
201
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
202
+ else
203
+ LAST=$(cat "$SQUASH_FLAG_FILE")
204
+ DIFF=$((NOW - LAST))
205
+
206
+ if [ $DIFF -ge $SEVEN_DAYS ]; then
207
+ echo $NOW > "$SQUASH_FLAG_FILE"
208
+ echo "距离上次合并已超过7天,正在合并历史提交..."
209
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
210
+ else
211
+ remaining_days=$(( (SEVEN_DAYS - DIFF) / 86400 ))
212
+ echo "距离下次历史合并还有约 ${remaining_days} 天"
213
+ fi
214
  fi
215
 
216
+ # 计算下次同步时间
217
+ end_time=$(date +%s)
218
+ duration=$((end_time - start_time))
219
+ next_sync=$((SYNC_INTERVAL - duration))
220
+
221
+ if [ $next_sync -gt 0 ]; then
222
+ echo "同步完成,耗时 ${duration} 秒,下次同步将在 ${next_sync} 秒后 ($(date -d "@$(($(date +%s) + next_sync))" '+%Y-%m-%d %H:%M:%S'))"
223
+ sleep $next_sync
224
+ else
225
+ echo "同步完成,耗时 ${duration} 秒 (超过同步间隔),立即开始下次同步"
226
+ fi
227
  done
228
  }
229
 
230
+ # 启动OpenList服务
231
+ ./openlist server &
232
+ SERVER_PID=$!
233
+
234
  # 启动同步进程
235
+ if [ "$DISABLE_SYNC" != "true" ]; then
236
+ sync_data &
237
+ SYNC_PID=$!
238
+ fi
239
+
240
+ # 等待进程结束
241
+ wait $SERVER_PID
242
+
243
+ # 清理
244
+ if [ -n "$SYNC_PID" ]; then
245
+ kill $SYNC_PID
246
+ fi
247
+
248
+ exit 0