File size: 2,602 Bytes
aa1262d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
#!/usr/bin/env python3
"""
Helper script to prepare models for deployment
"""
import os
import zipfile
import shutil
from pathlib import Path
def setup_bert_model():
"""Extract and setup the fine-tuned BERT model"""
zip_path = "fine_tuned_bert_sentiment.zip"
extract_path = "./fine_tuned_bert_sentiment"
if not os.path.exists(zip_path):
print(f"β {zip_path} not found. Please upload your fine-tuned BERT model.")
return False
print(f"π¦ Extracting {zip_path}...")
# Create extraction directory
os.makedirs(extract_path, exist_ok=True)
# Extract zip file
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(extract_path)
# Verify required files exist
required_files = [
"config.json",
"pytorch_model.bin",
"tokenizer_config.json",
"vocab.txt"
]
missing_files = []
for file in required_files:
if not os.path.exists(os.path.join(extract_path, file)):
missing_files.append(file)
if missing_files:
print(f"β οΈ Missing required files: {missing_files}")
return False
print("β
BERT model setup complete!")
return True
def download_fallback_models():
"""Download fallback models if needed"""
from transformers import AutoTokenizer, AutoModel
print("π₯ Downloading fallback models...")
# Download SigLIP model
try:
AutoTokenizer.from_pretrained("google/siglip-large-patch16-384")
AutoModel.from_pretrained("google/siglip-large-patch16-384")
print("β
SigLIP-Large downloaded")
except Exception as e:
print(f"β οΈ SigLIP-Large download failed: {e}")
print("π₯ Downloading SigLIP-Base as fallback...")
AutoTokenizer.from_pretrained("google/siglip-base-patch16-224")
AutoModel.from_pretrained("google/siglip-base-patch16-224")
# Download sentiment model
AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment-latest")
AutoModel.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment-latest")
print("β
Sentiment model downloaded")
if __name__ == "__main__":
print("π Setting up Enhanced Ensemble Model...")
# Setup BERT model
bert_success = setup_bert_model()
# Download other models
download_fallback_models()
if bert_success:
print("π All models ready for deployment!")
else:
print("β οΈ Deployment ready with fallback models. Upload your BERT model for best performance.") |