Update sync_data.sh
Browse files- sync_data.sh +80 -172
sync_data.sh
CHANGED
@@ -1,200 +1,128 @@
|
|
1 |
#!/bin/sh
|
2 |
|
3 |
-
#
|
4 |
-
: ${APP_HOME:=/OpenList}
|
5 |
-
: ${STORAGE_PATH:=$APP_HOME/data}
|
6 |
-
: ${SYNC_INTERVAL:=7200}
|
7 |
-
: ${MAX_BACKUPS:=50}
|
8 |
-
|
9 |
-
# 检查必要环境变量
|
10 |
if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
|
11 |
-
echo "
|
12 |
-
|
13 |
-
echo "错误: 备份为必需功能,但缺少必要环境变量"
|
14 |
-
exit 1
|
15 |
-
fi
|
16 |
-
# 在没有备份功能的情况下直接运行服务
|
17 |
-
exec ./openlist server
|
18 |
fi
|
19 |
|
20 |
# 激活虚拟环境
|
21 |
-
|
22 |
|
23 |
# 生成同步脚本
|
24 |
cat > hf_sync.py << 'EOL'
|
|
|
25 |
from huggingface_hub import HfApi
|
26 |
import sys
|
27 |
import os
|
28 |
import tarfile
|
29 |
import tempfile
|
30 |
-
import time
|
31 |
-
from datetime import datetime
|
32 |
-
|
33 |
-
def manage_backups(api, repo_id, max_files):
|
34 |
-
try:
|
35 |
-
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
|
36 |
-
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
|
37 |
-
backup_files.sort()
|
38 |
-
|
39 |
-
if len(backup_files) >= max_files:
|
40 |
-
files_to_delete = backup_files[:len(backup_files) - max_files + 1]
|
41 |
-
for file_to_delete in files_to_delete:
|
42 |
-
try:
|
43 |
-
api.delete_file(
|
44 |
-
path_in_repo=file_to_delete,
|
45 |
-
repo_id=repo_id,
|
46 |
-
repo_type="dataset"
|
47 |
-
)
|
48 |
-
print(f'已删除旧备份: {file_to_delete}')
|
49 |
-
except Exception as e:
|
50 |
-
print(f'删除 {file_to_delete} 时出错: {str(e)}')
|
51 |
-
except Exception as e:
|
52 |
-
print(f'管理备份时出错: {str(e)}')
|
53 |
|
54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
api = HfApi(token=token)
|
56 |
try:
|
57 |
-
print(f"正在上传备份 {file_name}...")
|
58 |
-
start_time = time.time()
|
59 |
-
|
60 |
api.upload_file(
|
61 |
path_or_fileobj=file_path,
|
62 |
path_in_repo=file_name,
|
63 |
repo_id=repo_id,
|
64 |
repo_type="dataset"
|
65 |
)
|
66 |
-
|
67 |
-
|
68 |
-
print(f"成功上传 {file_name} (耗时 {upload_time:.2f}秒)")
|
69 |
-
manage_backups(api, repo_id, max_files)
|
70 |
-
return True
|
71 |
except Exception as e:
|
72 |
print(f"上传文件出错: {str(e)}")
|
73 |
-
return False
|
74 |
|
75 |
-
def download_latest_backup(token, repo_id, extract_path, max_retries=3):
|
76 |
-
for attempt in range(max_retries):
|
77 |
-
try:
|
78 |
-
api = HfApi(token=token)
|
79 |
-
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
|
80 |
-
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
|
81 |
-
|
82 |
-
if not backup_files:
|
83 |
-
print("未找到任何备份文件")
|
84 |
-
return False
|
85 |
-
|
86 |
-
latest_backup = sorted(backup_files)[-1]
|
87 |
-
print(f"正在下载最新备份: {latest_backup}")
|
88 |
-
|
89 |
-
with tempfile.TemporaryDirectory() as temp_dir:
|
90 |
-
filepath = api.hf_hub_download(
|
91 |
-
repo_id=repo_id,
|
92 |
-
filename=latest_backup,
|
93 |
-
repo_type="dataset",
|
94 |
-
local_dir=temp_dir
|
95 |
-
)
|
96 |
-
|
97 |
-
if filepath and os.path.exists(filepath):
|
98 |
-
print("正在解压备份文件...")
|
99 |
-
with tarfile.open(filepath, 'r:gz') as tar:
|
100 |
-
tar.extractall(extract_path)
|
101 |
-
print(f"已成功恢复备份: {latest_backup}")
|
102 |
-
return True
|
103 |
-
except Exception as e:
|
104 |
-
print(f"下载备份出错 (尝试 {attempt + 1}/${max_retries}): {str(e)}")
|
105 |
-
if attempt < max_retries - 1:
|
106 |
-
wait_time = (attempt + 1) * 10
|
107 |
-
print(f"等待 {wait_time}秒后重试...")
|
108 |
-
time.sleep(wait_time)
|
109 |
-
|
110 |
-
return False
|
111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
def super_squash_history(token, repo_id):
|
113 |
try:
|
114 |
api = HfApi(token=token)
|
115 |
-
print("正在合并历史提交...")
|
116 |
api.super_squash_history(repo_id=repo_id, repo_type="dataset")
|
117 |
print("历史合并完成。")
|
118 |
except Exception as e:
|
119 |
print(f"合并历史出错: {str(e)}")
|
120 |
|
|
|
121 |
if __name__ == "__main__":
|
122 |
action = sys.argv[1]
|
123 |
token = sys.argv[2]
|
124 |
repo_id = sys.argv[3]
|
125 |
-
|
126 |
if action == "upload":
|
127 |
file_path = sys.argv[4]
|
128 |
file_name = sys.argv[5]
|
129 |
-
|
130 |
-
upload_backup(file_path, file_name, token, repo_id, max_files)
|
131 |
elif action == "download":
|
132 |
extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
|
133 |
download_latest_backup(token, repo_id, extract_path)
|
134 |
elif action == "super_squash":
|
135 |
super_squash_history(token, repo_id)
|
136 |
EOL
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
echo "正在尝试从 HuggingFace 下载最新备份..."
|
141 |
-
if ! python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "${STORAGE_PATH}"; then
|
142 |
-
echo "警告: 初始备份下载失败,继续使用空数据目录"
|
143 |
-
mkdir -p "${STORAGE_PATH}"
|
144 |
-
fi
|
145 |
-
fi
|
146 |
|
147 |
# 同步函数
|
148 |
sync_data() {
|
149 |
while true; do
|
150 |
-
echo "
|
151 |
-
|
152 |
-
# 检查数据目录
|
153 |
-
if [ ! -d "${STORAGE_PATH}" ]; then
|
154 |
-
echo "错误: 存储目录 ${STORAGE_PATH} 不存在"
|
155 |
-
sleep 60
|
156 |
-
continue
|
157 |
-
fi
|
158 |
-
|
159 |
-
# 创建备份
|
160 |
-
timestamp=$(date +%Y%m%d_%H%M%S)
|
161 |
-
backup_file="backup_${timestamp}.tar.gz"
|
162 |
-
temp_backup="/tmp/${backup_file}"
|
163 |
-
|
164 |
-
echo "正在创建备份: ${backup_file}"
|
165 |
-
start_time=$(date +%s)
|
166 |
|
167 |
-
#
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
# 上传备份
|
183 |
-
echo "正在上传备份到 HuggingFace..."
|
184 |
-
if python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "${temp_backup}" "${backup_file}" "${MAX_BACKUPS}"; then
|
185 |
-
echo "备份上传成功"
|
186 |
-
else
|
187 |
-
echo "警告: 备份上传失败"
|
188 |
-
fi
|
189 |
-
|
190 |
-
# 清理临时文件
|
191 |
-
rm -f "${temp_backup}"
|
192 |
-
|
193 |
-
# 每周合并历史
|
194 |
SQUASH_FLAG_FILE="/tmp/last_squash_time"
|
195 |
NOW=$(date +%s)
|
196 |
SEVEN_DAYS=$((7*24*60*60))
|
197 |
-
|
198 |
if [ ! -f "$SQUASH_FLAG_FILE" ]; then
|
199 |
echo $NOW > "$SQUASH_FLAG_FILE"
|
200 |
echo "首次合并历史提交..."
|
@@ -202,47 +130,27 @@ sync_data() {
|
|
202 |
else
|
203 |
LAST=$(cat "$SQUASH_FLAG_FILE")
|
204 |
DIFF=$((NOW - LAST))
|
205 |
-
|
206 |
if [ $DIFF -ge $SEVEN_DAYS ]; then
|
207 |
echo $NOW > "$SQUASH_FLAG_FILE"
|
208 |
echo "距离上次合并已超过7天,正在合并历史提交..."
|
209 |
python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
|
210 |
else
|
211 |
-
|
212 |
-
echo "距离下次历史合并还有约 ${remaining_days} 天"
|
213 |
fi
|
214 |
fi
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
duration=$((end_time - start_time))
|
219 |
-
next_sync=$((SYNC_INTERVAL - duration))
|
220 |
-
|
221 |
-
if [ $next_sync -gt 0 ]; then
|
222 |
-
echo "同步完成,耗时 ${duration} 秒,下次同步将在 ${next_sync} 秒后 ($(date -d "@$(($(date +%s) + next_sync))" '+%Y-%m-%d %H:%M:%S'))"
|
223 |
-
sleep $next_sync
|
224 |
else
|
225 |
-
echo "
|
226 |
fi
|
|
|
|
|
|
|
|
|
|
|
227 |
done
|
228 |
}
|
229 |
|
230 |
-
# 启动OpenList服务
|
231 |
-
./openlist server &
|
232 |
-
SERVER_PID=$!
|
233 |
-
|
234 |
# 启动同步进程
|
235 |
-
|
236 |
-
sync_data &
|
237 |
-
SYNC_PID=$!
|
238 |
-
fi
|
239 |
-
|
240 |
-
# 等待进程结束
|
241 |
-
wait $SERVER_PID
|
242 |
-
|
243 |
-
# 清理
|
244 |
-
if [ -n "$SYNC_PID" ]; then
|
245 |
-
kill $SYNC_PID
|
246 |
-
fi
|
247 |
-
|
248 |
-
exit 0
|
|
|
1 |
#!/bin/sh
|
2 |
|
3 |
+
# 检查环境变量
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
|
5 |
+
echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
|
6 |
+
exit 1
|
|
|
|
|
|
|
|
|
|
|
7 |
fi
|
8 |
|
9 |
# 激活虚拟环境
|
10 |
+
. $HOME/venv/bin/activate
|
11 |
|
12 |
# 生成同步脚本
|
13 |
cat > hf_sync.py << 'EOL'
|
14 |
+
# HuggingFace 同步脚本
|
15 |
from huggingface_hub import HfApi
|
16 |
import sys
|
17 |
import os
|
18 |
import tarfile
|
19 |
import tempfile
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
+
# 管理备份文件数量,超出最大数量则自动删除最旧的备份
|
22 |
+
def manage_backups(api, repo_id, max_files=50):
|
23 |
+
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
|
24 |
+
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
|
25 |
+
backup_files.sort()
|
26 |
+
if len(backup_files) >= max_files:
|
27 |
+
files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
|
28 |
+
for file_to_delete in files_to_delete:
|
29 |
+
try:
|
30 |
+
api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
|
31 |
+
print(f'已删除旧备份: {file_to_delete}')
|
32 |
+
except Exception as e:
|
33 |
+
print(f'删除 {file_to_delete} 时出错: {str(e)}')
|
34 |
+
|
35 |
+
# 上传备份文件到 HuggingFace
|
36 |
+
def upload_backup(file_path, file_name, token, repo_id):
|
37 |
api = HfApi(token=token)
|
38 |
try:
|
|
|
|
|
|
|
39 |
api.upload_file(
|
40 |
path_or_fileobj=file_path,
|
41 |
path_in_repo=file_name,
|
42 |
repo_id=repo_id,
|
43 |
repo_type="dataset"
|
44 |
)
|
45 |
+
print(f"成功上传 {file_name}")
|
46 |
+
manage_backups(api, repo_id)
|
|
|
|
|
|
|
47 |
except Exception as e:
|
48 |
print(f"上传文件出错: {str(e)}")
|
|
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
+
# 下载最新备份
|
52 |
+
def download_latest_backup(token, repo_id, extract_path):
|
53 |
+
try:
|
54 |
+
api = HfApi(token=token)
|
55 |
+
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
|
56 |
+
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
|
57 |
+
if not backup_files:
|
58 |
+
print("未找到任何备份文件")
|
59 |
+
return
|
60 |
+
latest_backup = sorted(backup_files)[-1]
|
61 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
62 |
+
filepath = api.hf_hub_download(
|
63 |
+
repo_id=repo_id,
|
64 |
+
filename=latest_backup,
|
65 |
+
repo_type="dataset",
|
66 |
+
local_dir=temp_dir
|
67 |
+
)
|
68 |
+
if filepath and os.path.exists(filepath):
|
69 |
+
with tarfile.open(filepath, 'r:gz') as tar:
|
70 |
+
tar.extractall(extract_path)
|
71 |
+
print(f"已成功恢复备份: {latest_backup}")
|
72 |
+
except Exception as e:
|
73 |
+
print(f"下载备份出错: {str(e)}")
|
74 |
+
|
75 |
+
# 合并历史提交
|
76 |
def super_squash_history(token, repo_id):
|
77 |
try:
|
78 |
api = HfApi(token=token)
|
|
|
79 |
api.super_squash_history(repo_id=repo_id, repo_type="dataset")
|
80 |
print("历史合并完成。")
|
81 |
except Exception as e:
|
82 |
print(f"合并历史出错: {str(e)}")
|
83 |
|
84 |
+
# 主函数
|
85 |
if __name__ == "__main__":
|
86 |
action = sys.argv[1]
|
87 |
token = sys.argv[2]
|
88 |
repo_id = sys.argv[3]
|
|
|
89 |
if action == "upload":
|
90 |
file_path = sys.argv[4]
|
91 |
file_name = sys.argv[5]
|
92 |
+
upload_backup(file_path, file_name, token, repo_id)
|
|
|
93 |
elif action == "download":
|
94 |
extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
|
95 |
download_latest_backup(token, repo_id, extract_path)
|
96 |
elif action == "super_squash":
|
97 |
super_squash_history(token, repo_id)
|
98 |
EOL
|
99 |
+
# 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
|
100 |
+
echo "正在从 HuggingFace 下载最新备份..."
|
101 |
+
python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
|
103 |
# 同步函数
|
104 |
sync_data() {
|
105 |
while true; do
|
106 |
+
echo "同步进程启动于 $(date)"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
107 |
|
108 |
+
# 确保数据目录存在(请根据实际路径修改)
|
109 |
+
STORAGE_PATH="$HOME/app"
|
110 |
+
if [ -d "${STORAGE_PATH}" ]; then
|
111 |
+
# 创建备份
|
112 |
+
timestamp=$(date +%Y%m%d_%H%M%S)
|
113 |
+
backup_file="backup_${timestamp}.tar.gz"
|
114 |
+
|
115 |
+
# 压缩目录(使用-C避免包含父路径)
|
116 |
+
tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
|
117 |
+
|
118 |
+
# 上传到 HuggingFace
|
119 |
+
echo "正在上传备份到 HuggingFace..."
|
120 |
+
python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
|
121 |
+
|
122 |
+
# 合并历史提交
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
SQUASH_FLAG_FILE="/tmp/last_squash_time"
|
124 |
NOW=$(date +%s)
|
125 |
SEVEN_DAYS=$((7*24*60*60))
|
|
|
126 |
if [ ! -f "$SQUASH_FLAG_FILE" ]; then
|
127 |
echo $NOW > "$SQUASH_FLAG_FILE"
|
128 |
echo "首次合并历史提交..."
|
|
|
130 |
else
|
131 |
LAST=$(cat "$SQUASH_FLAG_FILE")
|
132 |
DIFF=$((NOW - LAST))
|
|
|
133 |
if [ $DIFF -ge $SEVEN_DAYS ]; then
|
134 |
echo $NOW > "$SQUASH_FLAG_FILE"
|
135 |
echo "距离上次合并已超过7天,正在合并历史提交..."
|
136 |
python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
|
137 |
else
|
138 |
+
echo "距离上次合并未满7天,本次跳过合并历史提交。"
|
|
|
139 |
fi
|
140 |
fi
|
141 |
+
|
142 |
+
# 清理临时文件
|
143 |
+
rm -f "/tmp/${backup_file}"
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
else
|
145 |
+
echo "存储目录 ${STORAGE_PATH} 不存在,等待中..."
|
146 |
fi
|
147 |
+
|
148 |
+
# 同步间隔
|
149 |
+
SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
|
150 |
+
echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
|
151 |
+
sleep $SYNC_INTERVAL
|
152 |
done
|
153 |
}
|
154 |
|
|
|
|
|
|
|
|
|
155 |
# 启动同步进程
|
156 |
+
sync_data &
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|