kingtest commited on
Commit
70783a9
·
verified ·
1 Parent(s): 9a8d372

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +47 -72
sync_data.sh CHANGED
@@ -2,8 +2,8 @@
2
 
3
  # 检查环境变量
4
  if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then
5
- echo "缺少 HF_TOKEN DATASET_ID 环境变量,无法启用备份功能"
6
- exit 0
7
  fi
8
 
9
  # 激活虚拟环境
@@ -11,29 +11,28 @@ source /opt/venv/bin/activate
11
 
12
  # 上传备份
13
  upload_backup() {
14
- file_path="$1"
15
- file_name="$2"
16
- token="$HF_TOKEN"
17
- repo_id="$DATASET_ID"
18
 
19
- python3 -c "
20
  from huggingface_hub import HfApi
21
  import sys
22
  import os
23
-
24
- def manage_backups(api, repo_id, max_files=50):
25
  files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
26
  backup_files = [f for f in files if f.startswith('qinglong_backup_') and f.endswith('.tar.gz')]
27
  backup_files.sort()
 
28
  if len(backup_files) >= max_files:
29
  files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
30
  for file_to_delete in files_to_delete:
31
  try:
32
  api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
33
- print(f'已删除旧备份: {file_to_delete}')
34
  except Exception as e:
35
- print(f'删除 {file_to_delete} 时出错: {str(e)}')
36
-
37
  api = HfApi(token='$token')
38
  try:
39
  api.upload_file(
@@ -42,34 +41,36 @@ try:
42
  repo_id='$repo_id',
43
  repo_type='dataset'
44
  )
45
- print(f'成功上传 $file_name')
 
46
  manage_backups(api, '$repo_id')
47
  except Exception as e:
48
- print(f'上传文件时出错: {str(e)}')
49
  "
50
  }
51
 
52
  # 下载最新备份
53
  download_latest_backup() {
54
- token="$HF_TOKEN"
55
- repo_id="$DATASET_ID"
56
 
57
- python3 -c "
58
  from huggingface_hub import HfApi
59
  import sys
60
  import os
61
  import tarfile
62
  import tempfile
63
-
64
  api = HfApi(token='$token')
65
  try:
66
  files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset')
67
  backup_files = [f for f in files if f.startswith('qinglong_backup_') and f.endswith('.tar.gz')]
 
68
  if not backup_files:
69
- print('未找到备份文件')
70
  sys.exit()
71
-
72
  latest_backup = sorted(backup_files)[-1]
 
73
  with tempfile.TemporaryDirectory() as temp_dir:
74
  filepath = api.hf_hub_download(
75
  repo_id='$repo_id',
@@ -81,67 +82,41 @@ try:
81
  if filepath and os.path.exists(filepath):
82
  with tarfile.open(filepath, 'r:gz') as tar:
83
  tar.extractall('/ql/data')
84
- print(f'成功从 {latest_backup} 恢复备份')
 
85
  except Exception as e:
86
- print(f'下载备份时出错: {str(e)}')
87
  "
88
  }
89
 
90
- # 清理本地备份文件
91
- cleanup_local_backups() {
92
- local_backup_dir="/tmp"
93
- max_local_backups=5
94
-
95
- echo "正在清理本地备份文件..."
96
-
97
- # 列出本地备份文件并按时间排序
98
- backup_files=$(find ${local_backup_dir} -name "qinglong_backup_*.tar.gz" -type f | sort)
99
-
100
- # 计算要删除的文件数量
101
- file_count=$(echo "$backup_files" | wc -l)
102
- files_to_delete=$((file_count - max_local_backups))
103
-
104
- # 如果需要删除文件
105
- if [ $files_to_delete -gt 0 ]; then
106
- echo "发现 $file_count 个备份文件,正在删除 $files_to_delete 个旧文件"
107
- echo "$backup_files" | head -n $files_to_delete | xargs rm -f
108
- echo "本地清理完成"
109
- else
110
- echo "无需删除本地备份文件"
111
- fi
112
- }
113
-
114
  # 首次启动时下载最新备份
115
- echo "正在从 HuggingFace 下载最新备份..."
116
  download_latest_backup
117
 
118
  # 同步函数
119
  sync_data() {
120
- while true; do
121
- echo "开始同步进程,当前时间 $(date)"
122
- if [ -d /ql/data ]; then
123
- timestamp=$(date +%Y%m%d_%H%M%S)
124
- backup_file="qinglong_backup_${timestamp}.tar.gz"
125
-
126
- # 压缩数据目录
127
- tar -czf "/tmp/${backup_file}" -C /ql/data .
128
- echo "正在上传备份到 HuggingFace..."
129
- upload_backup "/tmp/${backup_file}" "${backup_file}"
130
-
131
- # 清理本地备份文件
132
- cleanup_local_backups
133
-
134
- # 删除当前备份文件
135
- rm -f "/tmp/${backup_file}"
136
- echo "备份完成并清理了临时文件"
137
- else
138
- echo "数据目录尚不存在,等待下次同步..."
139
- fi
140
-
141
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
142
- echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
143
- sleep $SYNC_INTERVAL
144
- done
145
  }
146
 
147
  # 启动同步进程
 
2
 
3
  # 检查环境变量
4
  if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then
5
+ echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
6
+ exit 0
7
  fi
8
 
9
  # 激活虚拟环境
 
11
 
12
  # 上传备份
13
  upload_backup() {
14
+ file_path="$1"
15
+ file_name="$2"
16
+ token="$HF_TOKEN"
17
+ repo_id="$DATASET_ID"
18
 
19
+ python3 -c "
20
  from huggingface_hub import HfApi
21
  import sys
22
  import os
23
+ def manage_backups(api, repo_id, max_files=30):
 
24
  files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
25
  backup_files = [f for f in files if f.startswith('qinglong_backup_') and f.endswith('.tar.gz')]
26
  backup_files.sort()
27
+
28
  if len(backup_files) >= max_files:
29
  files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
30
  for file_to_delete in files_to_delete:
31
  try:
32
  api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
33
+ print(f'Deleted old backup: {file_to_delete}')
34
  except Exception as e:
35
+ print(f'Error deleting {file_to_delete}: {str(e)}')
 
36
  api = HfApi(token='$token')
37
  try:
38
  api.upload_file(
 
41
  repo_id='$repo_id',
42
  repo_type='dataset'
43
  )
44
+ print(f'Successfully uploaded $file_name')
45
+
46
  manage_backups(api, '$repo_id')
47
  except Exception as e:
48
+ print(f'Error uploading file: {str(e)}')
49
  "
50
  }
51
 
52
  # 下载最新备份
53
  download_latest_backup() {
54
+ token="$HF_TOKEN"
55
+ repo_id="$DATASET_ID"
56
 
57
+ python3 -c "
58
  from huggingface_hub import HfApi
59
  import sys
60
  import os
61
  import tarfile
62
  import tempfile
 
63
  api = HfApi(token='$token')
64
  try:
65
  files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset')
66
  backup_files = [f for f in files if f.startswith('qinglong_backup_') and f.endswith('.tar.gz')]
67
+
68
  if not backup_files:
69
+ print('No backup files found')
70
  sys.exit()
71
+
72
  latest_backup = sorted(backup_files)[-1]
73
+
74
  with tempfile.TemporaryDirectory() as temp_dir:
75
  filepath = api.hf_hub_download(
76
  repo_id='$repo_id',
 
82
  if filepath and os.path.exists(filepath):
83
  with tarfile.open(filepath, 'r:gz') as tar:
84
  tar.extractall('/ql/data')
85
+ print(f'Successfully restored backup from {latest_backup}')
86
+
87
  except Exception as e:
88
+ print(f'Error downloading backup: {str(e)}')
89
  "
90
  }
91
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  # 首次启动时下载最新备份
93
+ echo "Downloading latest backup from HuggingFace..."
94
  download_latest_backup
95
 
96
  # 同步函数
97
  sync_data() {
98
+ while true; do
99
+ echo "Starting sync process at $(date)"
100
+
101
+ if [ -d /ql/data ]; then
102
+ timestamp=$(date +%Y%m%d_%H%M%S)
103
+ backup_file="qinglong_backup_${timestamp}.tar.gz"
104
+
105
+ # 压缩数据目录
106
+ tar -czf "/tmp/${backup_file}" -C /ql/data .
107
+
108
+ echo "Uploading backup to HuggingFace..."
109
+ upload_backup "/tmp/${backup_file}" "${backup_file}"
110
+
111
+ rm -f "/tmp/${backup_file}"
112
+ else
113
+ echo "Data directory does not exist yet, waiting for next sync..."
114
+ fi
115
+
116
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
117
+ echo "Next sync in ${SYNC_INTERVAL} seconds..."
118
+ sleep $SYNC_INTERVAL
119
+ done
 
 
 
120
  }
121
 
122
  # 启动同步进程