#!/bin/bash # 检查环境变量 if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID" exec /start.sh exit 0 fi # 等待Cloudflare Tunnel就绪 check_tunnel() { while ! curl -s http://localhost:7860 >/dev/null; do echo "Waiting for Cloudflare Tunnel to be ready..." sleep 5 done } # 激活虚拟环境 source /opt/venv/bin/activate # 上传备份(含自动清理旧备份) upload_backup() { file_path="$1" file_name="$2" token="$HF_TOKEN" repo_id="$DATASET_ID" python3 -c " from huggingface_hub import HfApi import sys import os def manage_backups(api, repo_id, max_files=50): files = api.list_repo_files(repo_id=repo_id, repo_type='dataset') backup_files = [f for f in files if f.startswith('vaultwarden_backup_') and f.endswith('.tar.gz')] backup_files.sort() if len(backup_files) >= max_files: delete_count = len(backup_files) - max_files + 1 files_to_delete = backup_files[:delete_count] for file_to_delete in files_to_delete: try: api.delete_file( path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset' ) print(f'Deleted old backup: {file_to_delete}') except Exception as e: print(f'Error deleting {file_to_delete}: {str(e)}') api = HfApi(token='$token') try: # 上传新备份 api.upload_file( path_or_fileobj='$file_path', path_in_repo='$file_name', repo_id='$repo_id', repo_type='dataset' ) print(f'Successfully uploaded $file_name') # 清理旧备份 manage_backups(api, '$repo_id') except Exception as e: print(f'Error uploading file: {str(e)}') " } # 下载最新备份(完整实现) download_latest_backup() { token="$HF_TOKEN" repo_id="$DATASET_ID" python3 -c " from huggingface_hub import HfApi import sys import os import tarfile import tempfile import shutil api = HfApi(token='$token') try: files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset') backup_files = [f for f in files if f.startswith('vaultwarden_backup_') and f.endswith('.tar.gz')] if not backup_files: print('No backup files found') sys.exit() latest_backup = sorted(backup_files)[-1] with tempfile.TemporaryDirectory() as temp_dir: filepath = api.hf_hub_download( repo_id='$repo_id', filename=latest_backup, repo_type='dataset', local_dir=temp_dir ) if filepath and os.path.exists(filepath): # 确保数据目录存在 os.makedirs('/data', exist_ok=True) # 清空旧数据(防止残留文件干扰) if os.listdir('/data'): for f in os.listdir('/data'): item_path = os.path.join('/data', f) if os.path.isfile(item_path): os.remove(item_path) elif os.path.isdir(item_path): shutil.rmtree(item_path) # 解压备份文件 with tarfile.open(filepath, 'r:gz') as tar: tar.extractall('/data') print(f'Successfully restored backup from {latest_backup}') except Exception as e: print(f'Error downloading backup: {str(e)}') sys.exit(1) " } # 首次启动时下载最新备份 echo "Downloading latest backup from HuggingFace..." download_latest_backup # 同步函数 sync_data() { while true; do echo "Starting sync process at $(date)" if [ -d /data ]; then timestamp=$(date +%Y%m%d_%H%M%S) backup_file="vaultwarden_backup_${timestamp}.tar.gz" # 压缩数据目录(排除临时文件) tar -czf "/tmp/${backup_file}" -C /data . echo "Uploading backup to HuggingFace..." upload_backup "/tmp/${backup_file}" "${backup_file}" rm -f "/tmp/${backup_file}" else echo "Data directory does not exist yet, waiting for next sync..." fi SYNC_INTERVAL=${SYNC_INTERVAL:-7200} echo "Next sync in ${SYNC_INTERVAL} seconds..." sleep $SYNC_INTERVAL done } # 启动Cloudflare Tunnel检查 check_tunnel & # 启动备份同步进程 sync_data & # 启动Vaultwarden主程序 exec /start.sh