Chanlefe commited on
Commit
aa1262d
Β·
verified Β·
1 Parent(s): b369404

Create model_setup.py

Browse files
Files changed (1) hide show
  1. model_setup.py +83 -0
model_setup.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Helper script to prepare models for deployment
4
+ """
5
+ import os
6
+ import zipfile
7
+ import shutil
8
+ from pathlib import Path
9
+
10
+ def setup_bert_model():
11
+ """Extract and setup the fine-tuned BERT model"""
12
+
13
+ zip_path = "fine_tuned_bert_sentiment.zip"
14
+ extract_path = "./fine_tuned_bert_sentiment"
15
+
16
+ if not os.path.exists(zip_path):
17
+ print(f"❌ {zip_path} not found. Please upload your fine-tuned BERT model.")
18
+ return False
19
+
20
+ print(f"πŸ“¦ Extracting {zip_path}...")
21
+
22
+ # Create extraction directory
23
+ os.makedirs(extract_path, exist_ok=True)
24
+
25
+ # Extract zip file
26
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
27
+ zip_ref.extractall(extract_path)
28
+
29
+ # Verify required files exist
30
+ required_files = [
31
+ "config.json",
32
+ "pytorch_model.bin",
33
+ "tokenizer_config.json",
34
+ "vocab.txt"
35
+ ]
36
+
37
+ missing_files = []
38
+ for file in required_files:
39
+ if not os.path.exists(os.path.join(extract_path, file)):
40
+ missing_files.append(file)
41
+
42
+ if missing_files:
43
+ print(f"⚠️ Missing required files: {missing_files}")
44
+ return False
45
+
46
+ print("βœ… BERT model setup complete!")
47
+ return True
48
+
49
+ def download_fallback_models():
50
+ """Download fallback models if needed"""
51
+ from transformers import AutoTokenizer, AutoModel
52
+
53
+ print("πŸ“₯ Downloading fallback models...")
54
+
55
+ # Download SigLIP model
56
+ try:
57
+ AutoTokenizer.from_pretrained("google/siglip-large-patch16-384")
58
+ AutoModel.from_pretrained("google/siglip-large-patch16-384")
59
+ print("βœ… SigLIP-Large downloaded")
60
+ except Exception as e:
61
+ print(f"⚠️ SigLIP-Large download failed: {e}")
62
+ print("πŸ“₯ Downloading SigLIP-Base as fallback...")
63
+ AutoTokenizer.from_pretrained("google/siglip-base-patch16-224")
64
+ AutoModel.from_pretrained("google/siglip-base-patch16-224")
65
+
66
+ # Download sentiment model
67
+ AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment-latest")
68
+ AutoModel.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment-latest")
69
+ print("βœ… Sentiment model downloaded")
70
+
71
+ if __name__ == "__main__":
72
+ print("πŸš€ Setting up Enhanced Ensemble Model...")
73
+
74
+ # Setup BERT model
75
+ bert_success = setup_bert_model()
76
+
77
+ # Download other models
78
+ download_fallback_models()
79
+
80
+ if bert_success:
81
+ print("πŸŽ‰ All models ready for deployment!")
82
+ else:
83
+ print("⚠️ Deployment ready with fallback models. Upload your BERT model for best performance.")