lzwqx commited on
Commit
43acb38
·
verified ·
1 Parent(s): cd3da94

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +44 -49
sync_data.sh CHANGED
@@ -1,38 +1,35 @@
1
  #!/bin/sh
 
2
 
3
- # 检查环境变量
4
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
- echo "未检测到 HF_TOKEN DATASET_ID,备份功能不可用"
6
  exit 1
7
  fi
8
 
9
- # 激活虚拟环境
10
- . $HOME/venv/bin/activate
11
 
12
- # 生成同步脚本
 
 
 
13
  cat > hf_sync.py << 'EOL'
14
- # HuggingFace 同步脚本
15
  from huggingface_hub import HfApi
16
- import sys
17
- import os
18
- import tarfile
19
- import tempfile
20
 
21
- # 管理备份文件数量,超出最大数量则自动删除最旧的备份
22
  def manage_backups(api, repo_id, max_files=50):
23
  files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
24
  backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
25
  backup_files.sort()
26
  if len(backup_files) >= max_files:
27
- files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
28
- for file_to_delete in files_to_delete:
29
  try:
30
  api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
31
  print(f'已删除旧备份: {file_to_delete}')
32
  except Exception as e:
33
  print(f'删除 {file_to_delete} 时出错: {str(e)}')
34
 
35
- # 上传备份文件到 HuggingFace
36
  def upload_backup(file_path, file_name, token, repo_id):
37
  api = HfApi(token=token)
38
  try:
@@ -47,8 +44,6 @@ def upload_backup(file_path, file_name, token, repo_id):
47
  except Exception as e:
48
  print(f"上传文件出错: {str(e)}")
49
 
50
-
51
- # 下载最新备份
52
  def download_latest_backup(token, repo_id, extract_path):
53
  try:
54
  api = HfApi(token=token)
@@ -72,7 +67,6 @@ def download_latest_backup(token, repo_id, extract_path):
72
  except Exception as e:
73
  print(f"下载备份出错: {str(e)}")
74
 
75
- # 合并历史提交
76
  def super_squash_history(token, repo_id):
77
  try:
78
  api = HfApi(token=token)
@@ -81,7 +75,6 @@ def super_squash_history(token, repo_id):
81
  except Exception as e:
82
  print(f"合并历史出错: {str(e)}")
83
 
84
- # 主函数
85
  if __name__ == "__main__":
86
  action = sys.argv[1]
87
  token = sys.argv[2]
@@ -96,33 +89,43 @@ if __name__ == "__main__":
96
  elif action == "super_squash":
97
  super_squash_history(token, repo_id)
98
  EOL
99
- # 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
100
- echo "正在从 HuggingFace 下载最新备份..."
101
- python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"
102
 
103
- # 同步函数
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104
  sync_data() {
105
  while true; do
106
  echo "同步进程启动于 $(date)"
107
-
108
- # 确保数据目录存在(请根据实际路径修改)
109
- STORAGE_PATH="$HOME/app/data"
110
- if [ -d "${STORAGE_PATH}" ]; then
111
- # 创建备份
112
- timestamp=$(date +%Y%m%d_%H%M%S)
113
- backup_file="backup_${timestamp}.tar.gz"
114
-
115
- # 压缩目录(使用-C避免包含父路径)
116
- tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
117
-
118
- # 上传到 HuggingFace
119
- echo "正在上传备份到 HuggingFace..."
120
- python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
121
-
122
- # 合并历史提交
123
  SQUASH_FLAG_FILE="/tmp/last_squash_time"
124
  NOW=$(date +%s)
125
  SEVEN_DAYS=$((7*24*60*60))
 
126
  if [ ! -f "$SQUASH_FLAG_FILE" ]; then
127
  echo $NOW > "$SQUASH_FLAG_FILE"
128
  echo "首次合并历史提交..."
@@ -132,25 +135,17 @@ sync_data() {
132
  DIFF=$((NOW - LAST))
133
  if [ $DIFF -ge $SEVEN_DAYS ]; then
134
  echo $NOW > "$SQUASH_FLAG_FILE"
135
- echo "距离上次合并已超过7天,正在合并历史提交..."
136
  python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
137
  else
138
- echo "距离上次合并未满7天,本次跳过合并历史提交。"
139
  fi
140
  fi
141
 
142
- # 清理临时文件
143
- rm -f "/tmp/${backup_file}"
144
- else
145
- echo "存储目录 ${STORAGE_PATH} 不存在,等待中..."
146
- fi
147
-
148
- # 同步间隔
149
  SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
150
  echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
151
  sleep $SYNC_INTERVAL
152
  done
153
  }
154
 
155
- # 启动同步进程
156
- sync_data &
 
1
  #!/bin/sh
2
+ set -e
3
 
4
+ # 必须设置的环境变量
5
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
6
+ echo "请设置环境变量 HF_TOKEN DATASET_ID"
7
  exit 1
8
  fi
9
 
10
+ # 激活 Python 虚拟环境(请根据实际路径修改)
11
+ . "$HOME/venv/bin/activate"
12
 
13
+ STORAGE_PATH="$HOME/app/data"
14
+ FLAG_FILE="$HOME/.hf_backup_first_done"
15
+
16
+ # 生成 hf_sync.py 脚本
17
  cat > hf_sync.py << 'EOL'
 
18
  from huggingface_hub import HfApi
19
+ import sys, os, tarfile, tempfile
 
 
 
20
 
 
21
  def manage_backups(api, repo_id, max_files=50):
22
  files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
23
  backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
24
  backup_files.sort()
25
  if len(backup_files) >= max_files:
26
+ for file_to_delete in backup_files[:(len(backup_files) - max_files + 1)]:
 
27
  try:
28
  api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
29
  print(f'已删除旧备份: {file_to_delete}')
30
  except Exception as e:
31
  print(f'删除 {file_to_delete} 时出错: {str(e)}')
32
 
 
33
  def upload_backup(file_path, file_name, token, repo_id):
34
  api = HfApi(token=token)
35
  try:
 
44
  except Exception as e:
45
  print(f"上传文件出错: {str(e)}")
46
 
 
 
47
  def download_latest_backup(token, repo_id, extract_path):
48
  try:
49
  api = HfApi(token=token)
 
67
  except Exception as e:
68
  print(f"下载备份出错: {str(e)}")
69
 
 
70
  def super_squash_history(token, repo_id):
71
  try:
72
  api = HfApi(token=token)
 
75
  except Exception as e:
76
  print(f"合并历史出错: {str(e)}")
77
 
 
78
  if __name__ == "__main__":
79
  action = sys.argv[1]
80
  token = sys.argv[2]
 
89
  elif action == "super_squash":
90
  super_squash_history(token, repo_id)
91
  EOL
 
 
 
92
 
93
+ upload_backup() {
94
+ if [ -d "${STORAGE_PATH}" ]; then
95
+ echo "上传备份数据..."
96
+ timestamp=$(date +%Y%m%d_%H%M%S)
97
+ backup_file="backup_${timestamp}.tar.gz"
98
+ tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
99
+ python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
100
+ rm -f "/tmp/${backup_file}"
101
+ else
102
+ echo "数据目录不存在,无法上传备份"
103
+ fi
104
+ }
105
+
106
+ download_restore() {
107
+ echo "从 HuggingFace 下载最新备份并恢复..."
108
+ python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"
109
+ }
110
+
111
+ if [ ! -f "$FLAG_FILE" ]; then
112
+ echo "首次运行,开始上传备份..."
113
+ upload_backup
114
+ touch "$FLAG_FILE"
115
+ echo "首次备份完成,程序退出。"
116
+ exit 0
117
+ fi
118
+
119
  sync_data() {
120
  while true; do
121
  echo "同步进程启动于 $(date)"
122
+ upload_backup
123
+ download_restore
124
+
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  SQUASH_FLAG_FILE="/tmp/last_squash_time"
126
  NOW=$(date +%s)
127
  SEVEN_DAYS=$((7*24*60*60))
128
+
129
  if [ ! -f "$SQUASH_FLAG_FILE" ]; then
130
  echo $NOW > "$SQUASH_FLAG_FILE"
131
  echo "首次合并历史提交..."
 
135
  DIFF=$((NOW - LAST))
136
  if [ $DIFF -ge $SEVEN_DAYS ]; then
137
  echo $NOW > "$SQUASH_FLAG_FILE"
138
+ echo "超过7天,合并历史提交..."
139
  python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
140
  else
141
+ echo "未满7天,跳过合并历史提交。"
142
  fi
143
  fi
144
 
 
 
 
 
 
 
 
145
  SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
146
  echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
147
  sleep $SYNC_INTERVAL
148
  done
149
  }
150
 
151
+ sync_data &