|
#!/bin/sh |
|
|
|
|
|
if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then |
|
echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用" |
|
exit 1 |
|
fi |
|
|
|
|
|
. $HOME/venv/bin/activate |
|
|
|
|
|
cat > hf_sync.py << 'EOL' |
|
|
|
from huggingface_hub import HfApi |
|
import sys |
|
import os |
|
import tarfile |
|
import tempfile |
|
|
|
|
|
def manage_backups(api, repo_id, max_files=50): |
|
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset") |
|
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')] |
|
backup_files.sort() |
|
if len(backup_files) >= max_files: |
|
files_to_delete = backup_files[:(len(backup_files) - max_files + 1)] |
|
for file_to_delete in files_to_delete: |
|
try: |
|
api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset") |
|
print(f'已删除旧备份: {file_to_delete}') |
|
except Exception as e: |
|
print(f'删除 {file_to_delete} 时出错: {str(e)}') |
|
|
|
|
|
def upload_backup(file_path, file_name, token, repo_id): |
|
api = HfApi(token=token) |
|
try: |
|
api.upload_file( |
|
path_or_fileobj=file_path, |
|
path_in_repo=file_name, |
|
repo_id=repo_id, |
|
repo_type="dataset" |
|
) |
|
print(f"成功上传 {file_name}") |
|
manage_backups(api, repo_id) |
|
except Exception as e: |
|
print(f"上传文件出错: {str(e)}") |
|
|
|
|
|
|
|
def download_latest_backup(token, repo_id, extract_path): |
|
try: |
|
api = HfApi(token=token) |
|
files = api.list_repo_files(repo_id=repo_id, repo_type="dataset") |
|
backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')] |
|
if not backup_files: |
|
print("未找到任何备份文件") |
|
return |
|
latest_backup = sorted(backup_files)[-1] |
|
with tempfile.TemporaryDirectory() as temp_dir: |
|
filepath = api.hf_hub_download( |
|
repo_id=repo_id, |
|
filename=latest_backup, |
|
repo_type="dataset", |
|
local_dir=temp_dir |
|
) |
|
if filepath and os.path.exists(filepath): |
|
with tarfile.open(filepath, 'r:gz') as tar: |
|
tar.extractall(extract_path) |
|
print(f"已成功恢复备份: {latest_backup}") |
|
except Exception as e: |
|
print(f"下载备份出错: {str(e)}") |
|
|
|
|
|
def super_squash_history(token, repo_id): |
|
try: |
|
api = HfApi(token=token) |
|
api.super_squash_history(repo_id=repo_id, repo_type="dataset") |
|
print("历史合并完成。") |
|
except Exception as e: |
|
print(f"合并历史出错: {str(e)}") |
|
|
|
|
|
if __name__ == "__main__": |
|
action = sys.argv[1] |
|
token = sys.argv[2] |
|
repo_id = sys.argv[3] |
|
if action == "upload": |
|
file_path = sys.argv[4] |
|
file_name = sys.argv[5] |
|
upload_backup(file_path, file_name, token, repo_id) |
|
elif action == "download": |
|
extract_path = sys.argv[4] if len(sys.argv) > 4 else '.' |
|
download_latest_backup(token, repo_id, extract_path) |
|
elif action == "super_squash": |
|
super_squash_history(token, repo_id) |
|
EOL |
|
|
|
echo "正在从 HuggingFace 下载最新备份..." |
|
python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app" |
|
|
|
|
|
sync_data() { |
|
while true; do |
|
echo "同步进程启动于 $(date)" |
|
|
|
|
|
STORAGE_PATH="$HOME/app/data" |
|
if [ -d "${STORAGE_PATH}" ]; then |
|
|
|
timestamp=$(date +%Y%m%d_%H%M%S) |
|
backup_file="backup_${timestamp}.tar.gz" |
|
|
|
|
|
tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")" |
|
|
|
|
|
echo "正在上传备份到 HuggingFace..." |
|
python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}" |
|
|
|
|
|
SQUASH_FLAG_FILE="/tmp/last_squash_time" |
|
NOW=$(date +%s) |
|
SEVEN_DAYS=$((7*24*60*60)) |
|
if [ ! -f "$SQUASH_FLAG_FILE" ]; then |
|
echo $NOW > "$SQUASH_FLAG_FILE" |
|
echo "首次合并历史提交..." |
|
python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}" |
|
else |
|
LAST=$(cat "$SQUASH_FLAG_FILE") |
|
DIFF=$((NOW - LAST)) |
|
if [ $DIFF -ge $SEVEN_DAYS ]; then |
|
echo $NOW > "$SQUASH_FLAG_FILE" |
|
echo "距离上次合并已超过7天,正在合并历史提交..." |
|
python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}" |
|
else |
|
echo "距离上次合并未满7天,本次跳过合并历史提交。" |
|
fi |
|
fi |
|
|
|
|
|
rm -f "/tmp/${backup_file}" |
|
else |
|
echo "存储目录 ${STORAGE_PATH} 不存在,等待中..." |
|
fi |
|
|
|
|
|
SYNC_INTERVAL=${SYNC_INTERVAL:-7200} |
|
echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..." |
|
sleep $SYNC_INTERVAL |
|
done |
|
} |
|
|
|
|
|
sync_data & |