HuggingFace0920 commited on
Commit
cabc5bb
·
verified ·
1 Parent(s): 29e7707

Create sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +153 -0
sync_data.sh ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ # 检查环境变量
4
+ if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
+ echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
6
+ exit 1
7
+ fi
8
+
9
+ # 生成同步脚本
10
+ cat > hf_sync.py << 'EOL'
11
+ # HuggingFace 同步脚本
12
+ from huggingface_hub import HfApi
13
+ import sys
14
+ import os
15
+ import tarfile
16
+ import tempfile
17
+
18
+ # 管理备份文件数量,超出最大数量则自动删除最旧的备份
19
+ def manage_backups(api, repo_id, max_files=50):
20
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
21
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
22
+ backup_files.sort()
23
+ if len(backup_files) >= max_files:
24
+ files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
25
+ for file_to_delete in files_to_delete:
26
+ try:
27
+ api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
28
+ print(f'已删除旧备份: {file_to_delete}')
29
+ except Exception as e:
30
+ print(f'删除 {file_to_delete} 时出错: {str(e)}')
31
+
32
+ # 上传备份文件到 HuggingFace
33
+ def upload_backup(file_path, file_name, token, repo_id):
34
+ api = HfApi(token=token)
35
+ try:
36
+ api.upload_file(
37
+ path_or_fileobj=file_path,
38
+ path_in_repo=file_name,
39
+ repo_id=repo_id,
40
+ repo_type="dataset"
41
+ )
42
+ print(f"成功上传 {file_name}")
43
+ manage_backups(api, repo_id)
44
+ except Exception as e:
45
+ print(f"上传文件出错: {str(e)}")
46
+
47
+
48
+ # 下载最新备份
49
+ def download_latest_backup(token, repo_id, extract_path):
50
+ try:
51
+ api = HfApi(token=token)
52
+ files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
53
+ backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
54
+ if not backup_files:
55
+ print("未找到任何备份文件")
56
+ return
57
+ latest_backup = sorted(backup_files)[-1]
58
+ with tempfile.TemporaryDirectory() as temp_dir:
59
+ filepath = api.hf_hub_download(
60
+ repo_id=repo_id,
61
+ filename=latest_backup,
62
+ repo_type="dataset",
63
+ local_dir=temp_dir
64
+ )
65
+ if filepath and os.path.exists(filepath):
66
+ with tarfile.open(filepath, 'r:gz') as tar:
67
+ tar.extractall(extract_path)
68
+ print(f"已成功恢复备份: {latest_backup}")
69
+ except Exception as e:
70
+ print(f"下载备份出错: {str(e)}")
71
+
72
+ # 合并历史提交
73
+ def super_squash_history(token, repo_id):
74
+ try:
75
+ api = HfApi(token=token)
76
+ api.super_squash_history(repo_id=repo_id, repo_type="dataset")
77
+ print("历史合并完成。")
78
+ except Exception as e:
79
+ print(f"合并历史出错: {str(e)}")
80
+
81
+ # 主函数
82
+ if __name__ == "__main__":
83
+ action = sys.argv[1]
84
+ token = sys.argv[2]
85
+ repo_id = sys.argv[3]
86
+ if action == "upload":
87
+ file_path = sys.argv[4]
88
+ file_name = sys.argv[5]
89
+ upload_backup(file_path, file_name, token, repo_id)
90
+ elif action == "download":
91
+ extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
92
+ download_latest_backup(token, repo_id, extract_path)
93
+ elif action == "super_squash":
94
+ super_squash_history(token, repo_id)
95
+ EOL
96
+ # 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
97
+ echo "正在从 HuggingFace 下载最新备份..."
98
+ python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "/OpenList"
99
+
100
+ # 同步函数
101
+ sync_data() {
102
+ while true; do
103
+ echo "同步进程启动于 $(date)"
104
+
105
+ # 确保数据目录存在(请根据实际路径修改)
106
+ STORAGE_PATH="/OpenList/data"
107
+ if [ -d "${STORAGE_PATH}" ]; then
108
+ # 创建备份
109
+ timestamp=$(date +%Y%m%d_%H%M%S)
110
+ backup_file="backup_${timestamp}.tar.gz"
111
+
112
+ # 压缩目录(使用-C避免包含父路径)
113
+ tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
114
+
115
+ # 上传到 HuggingFace
116
+ echo "正在上传备份到 HuggingFace..."
117
+ python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
118
+
119
+ # 合并历史提交
120
+ SQUASH_FLAG_FILE="/tmp/last_squash_time"
121
+ NOW=$(date +%s)
122
+ SEVEN_DAYS=$((7*24*60*60))
123
+ if [ ! -f "$SQUASH_FLAG_FILE" ]; then
124
+ echo $NOW > "$SQUASH_FLAG_FILE"
125
+ echo "首次合并历史提交..."
126
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
127
+ else
128
+ LAST=$(cat "$SQUASH_FLAG_FILE")
129
+ DIFF=$((NOW - LAST))
130
+ if [ $DIFF -ge $SEVEN_DAYS ]; then
131
+ echo $NOW > "$SQUASH_FLAG_FILE"
132
+ echo "距离上次合并已超过7天,正在合并历史提交..."
133
+ python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
134
+ else
135
+ echo "距离上次合并未满7天,本次跳过合并历史提交。"
136
+ fi
137
+ fi
138
+
139
+ # 清理临时文件
140
+ rm -f "/tmp/${backup_file}"
141
+ else
142
+ echo "存储目录 ${STORAGE_PATH} 不存在,等待中..."
143
+ fi
144
+
145
+ # 同步间隔
146
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
147
+ echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
148
+ sleep $SYNC_INTERVAL
149
+ done
150
+ }
151
+
152
+ # 启动同步进程
153
+ sync_data &