nonamelife commited on
Commit
21c551e
·
verified ·
1 Parent(s): 7f4e000

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +139 -93
app.py CHANGED
@@ -1,93 +1,139 @@
1
- import os
2
- import numpy as np
3
- import tensorflow as tf
4
- from flask import Flask, request, render_template, jsonify
5
- from tensorflow.keras.utils import load_img, img_to_array
6
- from werkzeug.utils import secure_filename
7
- from datetime import datetime
8
-
9
- app = Flask(__name__)
10
-
11
- # Load the trained model
12
- MODEL_PATH = r"model.keras" # Update to correct path
13
- model = tf.keras.models.load_model(MODEL_PATH)
14
-
15
- # Configurations
16
- UPLOAD_FOLDER = os.path.join('static', 'uploads')
17
- ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png'}
18
- app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
19
- os.makedirs(UPLOAD_FOLDER, exist_ok=True)
20
-
21
- def allowed_file(filename):
22
- return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
23
-
24
- def preprocess_image(image_path):
25
- img = load_img(image_path, target_size=(224, 224))
26
- img_array = img_to_array(img) / 255.0
27
- return np.expand_dims(img_array, axis=0)
28
-
29
- @app.route('/')
30
- def index():
31
- return render_template('home.html')
32
-
33
- @app.route('/tool')
34
- def tool():
35
- return render_template('tool.html')
36
-
37
- @app.route('/about')
38
- def about():
39
- return render_template('about.html')
40
-
41
- @app.route('/contact')
42
- def contact():
43
- return render_template('contact.html')
44
-
45
- @app.route('/predict', methods=['POST'])
46
- def predict():
47
- if 'file' not in request.files:
48
- return jsonify({'error': 'No files uploaded'}), 400
49
-
50
- files = request.files.getlist('file')
51
- if not files or all(f.filename == '' for f in files):
52
- return jsonify({'error': 'No files selected'}), 400
53
-
54
- results = []
55
- for file in files:
56
- if file and allowed_file(file.filename):
57
- filename = secure_filename(file.filename)
58
- timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
59
- unique_filename = f"{timestamp}_{filename}"
60
- file_path = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)
61
- file.save(file_path)
62
-
63
- try:
64
- img_array = preprocess_image(file_path)
65
- prediction = model.predict(img_array)[0][0]
66
- label = "Dirty" if prediction > 0.5 else "Clean"
67
- confidence = prediction if label == "Dirty" else 1 - prediction
68
-
69
- results.append({
70
- 'label': label,
71
- 'confidence': f"{confidence:.2%}",
72
- 'image_url': f"/static/uploads/{unique_filename}"
73
- })
74
- except Exception as e:
75
- results.append({
76
- 'label': 'Error',
77
- 'confidence': 'N/A',
78
- 'image_url': None,
79
- 'error': str(e)
80
- })
81
- else:
82
- results.append({
83
- 'label': 'Error',
84
- 'confidence': 'N/A',
85
- 'image_url': None,
86
- 'error': f"Invalid file type: {file.filename}"
87
- })
88
-
89
- # Render a results page and pass results into it
90
- return render_template('results.html', results=results)
91
-
92
- if __name__ == '__main__':
93
- app.run(debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import numpy as np
3
+ import tensorflow as tf
4
+ from flask import Flask, request, render_template, jsonify
5
+ from tensorflow.keras.utils import load_img, img_to_array
6
+ from werkzeug.utils import secure_filename
7
+ from datetime import datetime
8
+ from huggingface_hub import hf_hub_download # New import!
9
+ import time
10
+
11
+ app = Flask(__name__)
12
+
13
+ # --- Model Loading Configuration ---
14
+ MODEL_FILE_NAME = "model.keras"
15
+ # REPLACE THIS WITH YOUR HUGGING FACE MODEL REPO ID
16
+ # Format: "your-username/your-model-repo-name"
17
+ HF_MODEL_REPO_ID = "YOUR_USERNAME/garbage-detection-model" # Example!
18
+
19
+ # Check if model exists, if not, try to download it from Hugging Face Hub
20
+ if not os.path.exists(MODEL_FILE_NAME):
21
+ print(f"'{MODEL_FILE_NAME}' not found locally. Attempting to download from Hugging Face Hub...")
22
+ try:
23
+ # Download the model from Hugging Face Hub
24
+ # The downloaded file will be in a cache directory by default,
25
+ # so we'll move it to the current directory for easier loading.
26
+ model_path = hf_hub_download(repo_id=HF_MODEL_REPO_ID, filename=MODEL_FILE_NAME)
27
+ # Move the downloaded file to the root directory for app.py to find it easily
28
+ os.rename(model_path, MODEL_FILE_NAME)
29
+ print(f"'{MODEL_FILE_NAME}' downloaded successfully from Hugging Face Hub.")
30
+ except Exception as e:
31
+ print(f"FATAL: Could not download model from Hugging Face Hub: {e}")
32
+ # If download fails, the model will remain None, and prediction attempts will fail.
33
+ model = None
34
+
35
+ # Load the trained model
36
+ model = None # Initialize model to None
37
+ try:
38
+ if os.path.exists(MODEL_FILE_NAME):
39
+ model = tf.keras.models.load_model(MODEL_FILE_NAME)
40
+ print(f"Model loaded successfully from {MODEL_FILE_NAME}")
41
+ else:
42
+ print(f"Model file '{MODEL_FILE_NAME}' still not found after download attempt.")
43
+ except Exception as e:
44
+ print(f"Error loading model from {MODEL_FILE_NAME}: {e}")
45
+ model = None # Ensure model is None if loading fails
46
+
47
+ # Configurations
48
+ UPLOAD_FOLDER = os.path.join('static', 'uploads')
49
+ ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png'}
50
+ app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
51
+ os.makedirs(UPLOAD_FOLDER, exist_ok=True) # Ensure uploads directory exists
52
+
53
+ def allowed_file(filename):
54
+ return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
55
+
56
+ def preprocess_image(image_path):
57
+ img = load_img(image_path, target_size=(224, 224))
58
+ img_array = img_to_array(img) / 255.0
59
+ return np.expand_dims(img_array, axis=0)
60
+
61
+ @app.route('/')
62
+ def index():
63
+ return render_template('home.html')
64
+
65
+ @app.route('/tool')
66
+ def tool():
67
+ return render_template('tool.html')
68
+
69
+ @app.route('/about')
70
+ def about():
71
+ return render_template('about.html')
72
+
73
+ @app.route('/contact')
74
+ def contact():
75
+ return render_template('contact.html')
76
+
77
+ @app.route('/predict', methods=['POST'])
78
+ def predict():
79
+ if model is None:
80
+ return jsonify({'error': 'Model not loaded. Please check server logs.'}), 500
81
+
82
+ if 'file' not in request.files:
83
+ return jsonify({'error': 'No files uploaded'}), 400
84
+
85
+ files = request.files.getlist('file')
86
+ if not files or all(f.filename == '' for f in files):
87
+ return jsonify({'error': 'No files selected'}), 400
88
+
89
+ results = []
90
+ for file in files:
91
+ file_path = None
92
+ if file and allowed_file(file.filename):
93
+ filename = secure_filename(file.filename)
94
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
95
+ unique_filename = f"{timestamp}_{filename}"
96
+ file_path = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)
97
+ file.save(file_path)
98
+
99
+ try:
100
+ img_array = preprocess_image(file_path)
101
+ prediction = model.predict(img_array)[0][0]
102
+ label = "Dirty" if prediction > 0.5 else "Clean"
103
+ confidence = prediction if label == "Dirty" else 1 - prediction
104
+
105
+ results.append({
106
+ 'label': label,
107
+ 'confidence': f"{confidence:.2%}",
108
+ 'image_url': f"/static/uploads/{unique_filename}"
109
+ })
110
+ except Exception as e:
111
+ results.append({
112
+ 'label': 'Error',
113
+ 'confidence': 'N/A',
114
+ 'image_url': None,
115
+ 'error': str(e)
116
+ })
117
+ finally:
118
+ # Clean up the uploaded file after processing
119
+ if file_path and os.path.exists(file_path):
120
+ try:
121
+ os.remove(file_path)
122
+ print(f"Deleted uploaded file: {file_path}")
123
+ except Exception as e:
124
+ print(f"Error deleting file {file_path}: {e}")
125
+ else:
126
+ results.append({
127
+ 'label': 'Error',
128
+ 'confidence': 'N/A',
129
+ 'image_url': None,
130
+ 'error': f"Invalid file type: {file.filename}"
131
+ })
132
+
133
+ return render_template('results.html', results=results)
134
+
135
+ if __name__ == '__main__':
136
+ # Hugging Face Spaces sets the PORT environment variable
137
+ # Default to 7860 as it's common for HF Spaces apps
138
+ port = int(os.environ.get('PORT', 7860))
139
+ app.run(host='0.0.0.0', port=port, debug=True)