lzwqx commited on
Commit
883dc50
·
verified ·
1 Parent(s): 43acb38

Delete sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +0 -151
sync_data.sh DELETED
@@ -1,151 +0,0 @@
1
- #!/bin/sh
2
- set -e
3
-
4
- # 必须设置的环境变量
5
- if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
6
- echo "请设置环境变量 HF_TOKEN 和 DATASET_ID"
7
- exit 1
8
- fi
9
-
10
- # 激活 Python 虚拟环境(请根据实际路径修改)
11
- . "$HOME/venv/bin/activate"
12
-
13
- STORAGE_PATH="$HOME/app/data"
14
- FLAG_FILE="$HOME/.hf_backup_first_done"
15
-
16
- # 生成 hf_sync.py 脚本
17
- cat > hf_sync.py << 'EOL'
18
- from huggingface_hub import HfApi
19
- import sys, os, tarfile, tempfile
20
-
21
- def manage_backups(api, repo_id, max_files=50):
22
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
23
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
24
- backup_files.sort()
25
- if len(backup_files) >= max_files:
26
- for file_to_delete in backup_files[:(len(backup_files) - max_files + 1)]:
27
- try:
28
- api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
29
- print(f'已删除旧备份: {file_to_delete}')
30
- except Exception as e:
31
- print(f'删除 {file_to_delete} 时出错: {str(e)}')
32
-
33
- def upload_backup(file_path, file_name, token, repo_id):
34
- api = HfApi(token=token)
35
- try:
36
- api.upload_file(
37
- path_or_fileobj=file_path,
38
- path_in_repo=file_name,
39
- repo_id=repo_id,
40
- repo_type="dataset"
41
- )
42
- print(f"成功上传 {file_name}")
43
- manage_backups(api, repo_id)
44
- except Exception as e:
45
- print(f"上传文件出错: {str(e)}")
46
-
47
- def download_latest_backup(token, repo_id, extract_path):
48
- try:
49
- api = HfApi(token=token)
50
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
51
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
52
- if not backup_files:
53
- print("未找到任何备份文件")
54
- return
55
- latest_backup = sorted(backup_files)[-1]
56
- with tempfile.TemporaryDirectory() as temp_dir:
57
- filepath = api.hf_hub_download(
58
- repo_id=repo_id,
59
- filename=latest_backup,
60
- repo_type="dataset",
61
- local_dir=temp_dir
62
- )
63
- if filepath and os.path.exists(filepath):
64
- with tarfile.open(filepath, 'r:gz') as tar:
65
- tar.extractall(extract_path)
66
- print(f"已成功恢复备份: {latest_backup}")
67
- except Exception as e:
68
- print(f"下载备份出错: {str(e)}")
69
-
70
- def super_squash_history(token, repo_id):
71
- try:
72
- api = HfApi(token=token)
73
- api.super_squash_history(repo_id=repo_id, repo_type="dataset")
74
- print("历史合并完成。")
75
- except Exception as e:
76
- print(f"合并历史出错: {str(e)}")
77
-
78
- if __name__ == "__main__":
79
- action = sys.argv[1]
80
- token = sys.argv[2]
81
- repo_id = sys.argv[3]
82
- if action == "upload":
83
- file_path = sys.argv[4]
84
- file_name = sys.argv[5]
85
- upload_backup(file_path, file_name, token, repo_id)
86
- elif action == "download":
87
- extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
88
- download_latest_backup(token, repo_id, extract_path)
89
- elif action == "super_squash":
90
- super_squash_history(token, repo_id)
91
- EOL
92
-
93
- upload_backup() {
94
- if [ -d "${STORAGE_PATH}" ]; then
95
- echo "上传备份数据..."
96
- timestamp=$(date +%Y%m%d_%H%M%S)
97
- backup_file="backup_${timestamp}.tar.gz"
98
- tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
99
- python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
100
- rm -f "/tmp/${backup_file}"
101
- else
102
- echo "数据目录不存在,无法上传备份"
103
- fi
104
- }
105
-
106
- download_restore() {
107
- echo "从 HuggingFace 下载最新备份并恢复..."
108
- python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"
109
- }
110
-
111
- if [ ! -f "$FLAG_FILE" ]; then
112
- echo "首次运行,开始上传备份..."
113
- upload_backup
114
- touch "$FLAG_FILE"
115
- echo "首次备份完成,程序退出。"
116
- exit 0
117
- fi
118
-
119
- sync_data() {
120
- while true; do
121
- echo "同步进程启动于 $(date)"
122
- upload_backup
123
- download_restore
124
-
125
- SQUASH_FLAG_FILE="/tmp/last_squash_time"
126
- NOW=$(date +%s)
127
- SEVEN_DAYS=$((7*24*60*60))
128
-
129
- if [ ! -f "$SQUASH_FLAG_FILE" ]; then
130
- echo $NOW > "$SQUASH_FLAG_FILE"
131
- echo "首次合并历史提交..."
132
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
133
- else
134
- LAST=$(cat "$SQUASH_FLAG_FILE")
135
- DIFF=$((NOW - LAST))
136
- if [ $DIFF -ge $SEVEN_DAYS ]; then
137
- echo $NOW > "$SQUASH_FLAG_FILE"
138
- echo "超过7天,合并历史提交..."
139
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
140
- else
141
- echo "未满7天,跳过合并历史提交。"
142
- fi
143
- fi
144
-
145
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
146
- echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
147
- sleep $SYNC_INTERVAL
148
- done
149
- }
150
-
151
- sync_data &