HuggingFace0920 commited on
Commit
41348e6
·
verified ·
1 Parent(s): 819a890

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +90 -40
sync_data.sh CHANGED
@@ -1,13 +1,23 @@
1
  #!/bin/sh
2
 
3
- # 检查环境变量
4
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
- echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
6
  exit 1
7
  fi
8
 
9
  # 激活虚拟环境
10
- . ${APP_HOME}/venv/bin/activate
 
 
 
 
 
 
 
 
 
 
11
 
12
  # 生成同步脚本
13
  cat > hf_sync.py << 'EOL'
@@ -46,7 +56,8 @@ def upload_backup(file_path, file_name, token, repo_id):
46
  manage_backups(api, repo_id)
47
  except Exception as e:
48
  print(f"上传文件出错: {str(e)}")
49
-
 
50
 
51
  # 下载最新备份
52
  def download_latest_backup(token, repo_id, extract_path):
@@ -56,7 +67,7 @@ def download_latest_backup(token, repo_id, extract_path):
56
  backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
57
  if not backup_files:
58
  print("未找到任何备份文件")
59
- return
60
  latest_backup = sorted(backup_files)[-1]
61
  with tempfile.TemporaryDirectory() as temp_dir:
62
  filepath = api.hf_hub_download(
@@ -69,8 +80,10 @@ def download_latest_backup(token, repo_id, extract_path):
69
  with tarfile.open(filepath, 'r:gz') as tar:
70
  tar.extractall(extract_path)
71
  print(f"已成功恢复备份: {latest_backup}")
 
72
  except Exception as e:
73
  print(f"下载备份出错: {str(e)}")
 
74
 
75
  # 合并历史提交
76
  def super_squash_history(token, repo_id):
@@ -80,77 +93,114 @@ def super_squash_history(token, repo_id):
80
  print("历史合并完成。")
81
  except Exception as e:
82
  print(f"合并历史出错: {str(e)}")
 
 
83
 
84
  # 主函数
85
  if __name__ == "__main__":
 
 
 
 
86
  action = sys.argv[1]
87
  token = sys.argv[2]
88
  repo_id = sys.argv[3]
 
89
  if action == "upload":
 
 
 
90
  file_path = sys.argv[4]
91
  file_name = sys.argv[5]
92
- upload_backup(file_path, file_name, token, repo_id)
 
93
  elif action == "download":
94
  extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
95
- download_latest_backup(token, repo_id, extract_path)
 
96
  elif action == "super_squash":
97
- super_squash_history(token, repo_id)
 
 
 
 
 
 
98
  EOL
99
- # 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
 
100
  echo "正在从 HuggingFace 下载最新备份..."
101
- python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "/OpenList"
 
 
 
 
 
 
 
102
 
103
  # 同步函数
104
  sync_data() {
105
  while true; do
106
  echo "同步进程启动于 $(date)"
107
 
108
- # 确保数据目录存在(请根据实际路径修改)
109
- STORAGE_PATH="/OpenList/data"
110
  if [ -d "${STORAGE_PATH}" ]; then
111
  # 创建备份
112
  timestamp=$(date +%Y%m%d_%H%M%S)
113
  backup_file="backup_${timestamp}.tar.gz"
 
114
 
115
- # 压缩目录(使用-C避免包含父路径)
116
- tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
117
-
118
- # 上传到 HuggingFace
119
- echo "正在上传备份到 HuggingFace..."
120
- python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
121
-
122
- # 合并历史提交
123
- SQUASH_FLAG_FILE="/tmp/last_squash_time"
124
- NOW=$(date +%s)
125
- SEVEN_DAYS=$((7*24*60*60))
126
- if [ ! -f "$SQUASH_FLAG_FILE" ]; then
127
- echo $NOW > "$SQUASH_FLAG_FILE"
128
- echo "首次合并历史提交..."
129
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
130
- else
131
- LAST=$(cat "$SQUASH_FLAG_FILE")
132
- DIFF=$((NOW - LAST))
133
- if [ $DIFF -ge $SEVEN_DAYS ]; then
134
- echo $NOW > "$SQUASH_FLAG_FILE"
135
- echo "距离上次合并已超过7天,正在合并历史提交..."
136
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
 
 
 
 
 
 
 
137
  else
138
- echo "距离上次合并未满7天,本次跳过合并历史提交。"
139
  fi
140
- fi
141
 
142
  # 清理临时文件
143
- rm -f "/tmp/${backup_file}"
144
  else
145
- echo "存储目录 ${STORAGE_PATH} 不存在,等待中..."
 
 
 
 
 
 
146
  fi
147
 
148
  # 同步间隔
149
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
150
  echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
151
  sleep $SYNC_INTERVAL
152
  done
153
  }
154
 
155
  # 启动同步进程
156
- sync_data &
 
1
  #!/bin/sh
2
 
3
+ # 检查必要的环境变量
4
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
+ echo "错误: 未设置 HF_TOKEN 或 DATASET_ID 环境变量" >&2
6
  exit 1
7
  fi
8
 
9
  # 激活虚拟环境
10
+ if [ -f "$APP_HOME/venv/bin/activate" ]; then
11
+ . "$APP_HOME/venv/bin/activate"
12
+ else
13
+ echo "错误: 虚拟环境激活脚本未找到" >&2
14
+ exit 1
15
+ fi
16
+
17
+ # 设置默认同步间隔
18
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200} # 默认2小时
19
+ STORAGE_PATH="$APP_HOME/data"
20
+ SQUASH_FLAG_FILE="/tmp/last_squash_time"
21
 
22
  # 生成同步脚本
23
  cat > hf_sync.py << 'EOL'
 
56
  manage_backups(api, repo_id)
57
  except Exception as e:
58
  print(f"上传文件出错: {str(e)}")
59
+ return False
60
+ return True
61
 
62
  # 下载最新备份
63
  def download_latest_backup(token, repo_id, extract_path):
 
67
  backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
68
  if not backup_files:
69
  print("未找到任何备份文件")
70
+ return False
71
  latest_backup = sorted(backup_files)[-1]
72
  with tempfile.TemporaryDirectory() as temp_dir:
73
  filepath = api.hf_hub_download(
 
80
  with tarfile.open(filepath, 'r:gz') as tar:
81
  tar.extractall(extract_path)
82
  print(f"已成功恢复备份: {latest_backup}")
83
+ return True
84
  except Exception as e:
85
  print(f"下载备份出错: {str(e)}")
86
+ return False
87
 
88
  # 合并历史提交
89
  def super_squash_history(token, repo_id):
 
93
  print("历史合并完成。")
94
  except Exception as e:
95
  print(f"合并历史出错: {str(e)}")
96
+ return False
97
+ return True
98
 
99
  # 主函数
100
  if __name__ == "__main__":
101
+ if len(sys.argv) < 4:
102
+ print("错误: 参数不足")
103
+ sys.exit(1)
104
+
105
  action = sys.argv[1]
106
  token = sys.argv[2]
107
  repo_id = sys.argv[3]
108
+
109
  if action == "upload":
110
+ if len(sys.argv) < 6:
111
+ print("错误: upload 操作需要文件路径和文件名参数")
112
+ sys.exit(1)
113
  file_path = sys.argv[4]
114
  file_name = sys.argv[5]
115
+ if not upload_backup(file_path, file_name, token, repo_id):
116
+ sys.exit(1)
117
  elif action == "download":
118
  extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
119
+ if not download_latest_backup(token, repo_id, extract_path):
120
+ sys.exit(1)
121
  elif action == "super_squash":
122
+ if not super_squash_history(token, repo_id):
123
+ sys.exit(1)
124
+ else:
125
+ print(f"错误: 未知操作 '{action}'")
126
+ sys.exit(1)
127
+
128
+ sys.exit(0)
129
  EOL
130
+
131
+ # 首次启动时从 HuggingFace 下载最新备份
132
  echo "正在从 HuggingFace 下载最新备份..."
133
+ if ! python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "${APP_HOME}"; then
134
+ echo "警告: 下载备份失败,将使用空数据目录启动" >&2
135
+ fi
136
+
137
+ # 启动 OpenList 服务
138
+ echo "启动 OpenList 服务..."
139
+ $APP_HOME/openlist server &
140
+ OPENLIST_PID=$!
141
 
142
  # 同步函数
143
  sync_data() {
144
  while true; do
145
  echo "同步进程启动于 $(date)"
146
 
147
+ # 确保数据目录存在
 
148
  if [ -d "${STORAGE_PATH}" ]; then
149
  # 创建备份
150
  timestamp=$(date +%Y%m%d_%H%M%S)
151
  backup_file="backup_${timestamp}.tar.gz"
152
+ temp_backup="/tmp/${backup_file}"
153
 
154
+ # 压缩目录
155
+ echo "正在创建数据备份..."
156
+ if tar -czf "${temp_backup}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"; then
157
+ # 上传到 HuggingFace
158
+ echo "正在上传备份到 HuggingFace..."
159
+ if python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "${temp_backup}" "${backup_file}"; then
160
+ echo "备份上传成功"
161
+ else
162
+ echo "错误: 备份上传失败" >&2
163
+ fi
164
+
165
+ # 合并历史提交
166
+ NOW=$(date +%s)
167
+ SEVEN_DAYS=$((7*24*60*60))
168
+ if [ ! -f "$SQUASH_FLAG_FILE" ]; then
169
+ echo $NOW > "$SQUASH_FLAG_FILE"
170
+ echo "首次合并历史提交..."
171
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
172
+ else
173
+ LAST=$(cat "$SQUASH_FLAG_FILE")
174
+ DIFF=$((NOW - LAST))
175
+ if [ $DIFF -ge $SEVEN_DAYS ]; then
176
+ echo $NOW > "$SQUASH_FLAG_FILE"
177
+ echo "距离上次合并已超过7天,正在合并历史提交..."
178
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
179
+ else
180
+ echo "距离上次合并未满7天,本次跳过合并历史提交。"
181
+ fi
182
+ fi
183
  else
184
+ echo "错误: 创建备份失败" >&2
185
  fi
 
186
 
187
  # 清理临时文件
188
+ rm -f "${temp_backup}"
189
  else
190
+ echo "警告: 存储目录 ${STORAGE_PATH} 不存在,等待中..." >&2
191
+ fi
192
+
193
+ # 检查 OpenList 服务是否仍在运行
194
+ if ! kill -0 $OPENLIST_PID 2>/dev/null; then
195
+ echo "错误: OpenList 服务已停止,退出同步进程" >&2
196
+ exit 1
197
  fi
198
 
199
  # 同步间隔
 
200
  echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
201
  sleep $SYNC_INTERVAL
202
  done
203
  }
204
 
205
  # 启动同步进程
206
+ sync_data