Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -5,80 +5,100 @@ from flask import Flask, request, render_template, jsonify
|
|
5 |
from tensorflow.keras.utils import load_img, img_to_array
|
6 |
from werkzeug.utils import secure_filename
|
7 |
from datetime import datetime
|
8 |
-
from huggingface_hub import hf_hub_download #
|
9 |
-
import time
|
10 |
|
11 |
app = Flask(__name__)
|
12 |
|
13 |
# --- Model Loading Configuration ---
|
14 |
MODEL_FILE_NAME = "model.keras"
|
15 |
-
#
|
16 |
-
#
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
# Download the model from Hugging Face Hub
|
24 |
-
# The downloaded file will be in a cache directory by default,
|
25 |
-
# so we'll move it to the current directory for easier loading.
|
26 |
-
model_path = hf_hub_download(repo_id=HF_MODEL_REPO_ID, filename=MODEL_FILE_NAME)
|
27 |
-
# Move the downloaded file to the root directory for app.py to find it easily
|
28 |
-
os.rename(model_path, MODEL_FILE_NAME)
|
29 |
-
print(f"'{MODEL_FILE_NAME}' downloaded successfully from Hugging Face Hub.")
|
30 |
-
except Exception as e:
|
31 |
-
print(f"FATAL: Could not download model from Hugging Face Hub: {e}")
|
32 |
-
# If download fails, the model will remain None, and prediction attempts will fail.
|
33 |
-
model = None
|
34 |
-
|
35 |
-
# Load the trained model
|
36 |
-
model = None # Initialize model to None
|
37 |
try:
|
38 |
-
|
39 |
-
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
else:
|
42 |
-
|
|
|
|
|
43 |
except Exception as e:
|
44 |
-
|
45 |
-
|
|
|
|
|
|
|
|
|
46 |
|
47 |
-
# Configurations
|
48 |
UPLOAD_FOLDER = os.path.join('static', 'uploads')
|
49 |
ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png'}
|
50 |
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
|
51 |
-
|
|
|
52 |
|
53 |
def allowed_file(filename):
|
54 |
-
|
|
|
|
|
55 |
|
56 |
def preprocess_image(image_path):
|
57 |
-
|
58 |
-
|
59 |
-
|
|
|
60 |
|
|
|
61 |
@app.route('/')
|
62 |
def index():
|
|
|
63 |
return render_template('home.html')
|
64 |
|
65 |
@app.route('/tool')
|
66 |
def tool():
|
|
|
67 |
return render_template('tool.html')
|
68 |
|
69 |
@app.route('/about')
|
70 |
def about():
|
|
|
71 |
return render_template('about.html')
|
72 |
|
73 |
@app.route('/contact')
|
74 |
def contact():
|
|
|
75 |
return render_template('contact.html')
|
76 |
|
77 |
@app.route('/predict', methods=['POST'])
|
78 |
def predict():
|
|
|
|
|
79 |
if model is None:
|
80 |
return jsonify({'error': 'Model not loaded. Please check server logs.'}), 500
|
81 |
|
|
|
82 |
if 'file' not in request.files:
|
83 |
return jsonify({'error': 'No files uploaded'}), 400
|
84 |
|
@@ -90,24 +110,28 @@ def predict():
|
|
90 |
for file in files:
|
91 |
file_path = None
|
92 |
if file and allowed_file(file.filename):
|
|
|
93 |
filename = secure_filename(file.filename)
|
94 |
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
95 |
unique_filename = f"{timestamp}_{filename}"
|
96 |
file_path = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)
|
97 |
-
file.save(file_path)
|
98 |
|
99 |
try:
|
100 |
-
img_array = preprocess_image(file_path)
|
101 |
-
prediction = model.predict(img_array)[0][0]
|
|
|
|
|
102 |
label = "Dirty" if prediction > 0.5 else "Clean"
|
103 |
confidence = prediction if label == "Dirty" else 1 - prediction
|
104 |
|
105 |
results.append({
|
106 |
'label': label,
|
107 |
-
'confidence': f"{confidence:.2%}",
|
108 |
-
'image_url': f"/static/uploads/{unique_filename}"
|
109 |
})
|
110 |
except Exception as e:
|
|
|
111 |
results.append({
|
112 |
'label': 'Error',
|
113 |
'confidence': 'N/A',
|
@@ -115,7 +139,7 @@ def predict():
|
|
115 |
'error': str(e)
|
116 |
})
|
117 |
finally:
|
118 |
-
# Clean up the uploaded file
|
119 |
if file_path and os.path.exists(file_path):
|
120 |
try:
|
121 |
os.remove(file_path)
|
@@ -123,6 +147,7 @@ def predict():
|
|
123 |
except Exception as e:
|
124 |
print(f"Error deleting file {file_path}: {e}")
|
125 |
else:
|
|
|
126 |
results.append({
|
127 |
'label': 'Error',
|
128 |
'confidence': 'N/A',
|
@@ -130,10 +155,13 @@ def predict():
|
|
130 |
'error': f"Invalid file type: {file.filename}"
|
131 |
})
|
132 |
|
|
|
133 |
return render_template('results.html', results=results)
|
134 |
|
|
|
135 |
if __name__ == '__main__':
|
136 |
-
# Hugging Face Spaces sets the PORT environment variable
|
137 |
-
#
|
|
|
138 |
port = int(os.environ.get('PORT', 7860))
|
139 |
-
app.run(host='0.0.0.0', port=port, debug=
|
|
|
5 |
from tensorflow.keras.utils import load_img, img_to_array
|
6 |
from werkzeug.utils import secure_filename
|
7 |
from datetime import datetime
|
8 |
+
from huggingface_hub import hf_hub_download # Crucial for downloading model from HF Hub
|
9 |
+
import time # Used for potential retry logic, though not explicitly in hf_hub_download here
|
10 |
|
11 |
app = Flask(__name__)
|
12 |
|
13 |
# --- Model Loading Configuration ---
|
14 |
MODEL_FILE_NAME = "model.keras"
|
15 |
+
# IMPORTANT: Replace "YOUR_USERNAME/garbage-detection-model" with the actual REPO ID of YOUR MODEL on Hugging Face Hub.
|
16 |
+
# This is the repository where your 'model.keras' file is stored.
|
17 |
+
# Example: "your_huggingface_username/your_model_repo_name"
|
18 |
+
MODEL_REPO_ID = "nonamelife/garbage-detection-model" # <--- MAKE SURE THIS IS YOUR CORRECT MODEL REPO ID!
|
19 |
+
|
20 |
+
model = None # Initialize model as None
|
21 |
+
|
22 |
+
# --- Model Loading Logic ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
try:
|
24 |
+
print(f"Attempting to download '{MODEL_FILE_NAME}' from Hugging Face Hub ({MODEL_REPO_ID})...")
|
25 |
+
|
26 |
+
# hf_hub_download returns the FULL PATH to the downloaded file in the cache.
|
27 |
+
# We specify local_dir within /app to ensure write permissions and consistency.
|
28 |
+
# local_dir_use_symlinks=False is important for Docker environments to avoid symlink issues.
|
29 |
+
downloaded_model_path = hf_hub_download(
|
30 |
+
repo_id=MODEL_REPO_ID,
|
31 |
+
filename=MODEL_FILE_NAME,
|
32 |
+
local_dir="/app/.cache/huggingface/models", # This directory will be created if it doesn't exist
|
33 |
+
local_dir_use_symlinks=False
|
34 |
+
)
|
35 |
+
|
36 |
+
print(f"'{MODEL_FILE_NAME}' downloaded successfully to: {downloaded_model_path}")
|
37 |
+
|
38 |
+
# Now, load the model directly from the downloaded path.
|
39 |
+
# This check is a safeguard, as hf_hub_download should ensure existence.
|
40 |
+
if os.path.exists(downloaded_model_path):
|
41 |
+
model = tf.keras.models.load_model(downloaded_model_path)
|
42 |
+
print("Model loaded successfully!")
|
43 |
else:
|
44 |
+
# This message indicates a very unusual state if download was reported successful.
|
45 |
+
print(f"ERROR: Download reported success, but file not found at expected path: {downloaded_model_path}")
|
46 |
+
|
47 |
except Exception as e:
|
48 |
+
# Catch any exceptions during download or loading and log them.
|
49 |
+
print(f"FATAL: Could not download or load model from Hugging Face Hub: {e}")
|
50 |
+
model = None # Ensure model remains None if there's an error
|
51 |
+
|
52 |
+
# --- End Model Loading Logic ---
|
53 |
+
|
54 |
|
55 |
+
# Configurations for Flask app
|
56 |
UPLOAD_FOLDER = os.path.join('static', 'uploads')
|
57 |
ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png'}
|
58 |
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
|
59 |
+
# Ensure the uploads directory exists within the container
|
60 |
+
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
|
61 |
|
62 |
def allowed_file(filename):
|
63 |
+
"""Checks if the uploaded file has an allowed extension."""
|
64 |
+
return '.' in filename and \
|
65 |
+
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
66 |
|
67 |
def preprocess_image(image_path):
|
68 |
+
"""Loads and preprocesses an image for model prediction."""
|
69 |
+
img = load_img(image_path, target_size=(224, 224)) # Load image, resize to 224x224
|
70 |
+
img_array = img_to_array(img) / 255.0 # Convert to array and normalize pixel values
|
71 |
+
return np.expand_dims(img_array, axis=0) # Add batch dimension for model input
|
72 |
|
73 |
+
# --- Flask Routes ---
|
74 |
@app.route('/')
|
75 |
def index():
|
76 |
+
"""Renders the home page."""
|
77 |
return render_template('home.html')
|
78 |
|
79 |
@app.route('/tool')
|
80 |
def tool():
|
81 |
+
"""Renders the image upload tool page."""
|
82 |
return render_template('tool.html')
|
83 |
|
84 |
@app.route('/about')
|
85 |
def about():
|
86 |
+
"""Renders the about page."""
|
87 |
return render_template('about.html')
|
88 |
|
89 |
@app.route('/contact')
|
90 |
def contact():
|
91 |
+
"""Renders the contact page."""
|
92 |
return render_template('contact.html')
|
93 |
|
94 |
@app.route('/predict', methods=['POST'])
|
95 |
def predict():
|
96 |
+
"""Handles image uploads and returns predictions."""
|
97 |
+
# Check if the model was loaded successfully at startup
|
98 |
if model is None:
|
99 |
return jsonify({'error': 'Model not loaded. Please check server logs.'}), 500
|
100 |
|
101 |
+
# Check if a file was part of the request
|
102 |
if 'file' not in request.files:
|
103 |
return jsonify({'error': 'No files uploaded'}), 400
|
104 |
|
|
|
110 |
for file in files:
|
111 |
file_path = None
|
112 |
if file and allowed_file(file.filename):
|
113 |
+
# Secure filename and create a unique name to prevent collisions
|
114 |
filename = secure_filename(file.filename)
|
115 |
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
116 |
unique_filename = f"{timestamp}_{filename}"
|
117 |
file_path = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)
|
118 |
+
file.save(file_path) # Save the uploaded file temporarily
|
119 |
|
120 |
try:
|
121 |
+
img_array = preprocess_image(file_path) # Preprocess the image
|
122 |
+
prediction = model.predict(img_array)[0][0] # Get prediction from the model
|
123 |
+
|
124 |
+
# Determine label and confidence based on sigmoid output
|
125 |
label = "Dirty" if prediction > 0.5 else "Clean"
|
126 |
confidence = prediction if label == "Dirty" else 1 - prediction
|
127 |
|
128 |
results.append({
|
129 |
'label': label,
|
130 |
+
'confidence': f"{confidence:.2%}", # Format confidence as percentage
|
131 |
+
'image_url': f"/static/uploads/{unique_filename}" # URL for displaying the image
|
132 |
})
|
133 |
except Exception as e:
|
134 |
+
# Catch any errors during prediction or processing
|
135 |
results.append({
|
136 |
'label': 'Error',
|
137 |
'confidence': 'N/A',
|
|
|
139 |
'error': str(e)
|
140 |
})
|
141 |
finally:
|
142 |
+
# Clean up: delete the temporary uploaded file
|
143 |
if file_path and os.path.exists(file_path):
|
144 |
try:
|
145 |
os.remove(file_path)
|
|
|
147 |
except Exception as e:
|
148 |
print(f"Error deleting file {file_path}: {e}")
|
149 |
else:
|
150 |
+
# Handle invalid file types
|
151 |
results.append({
|
152 |
'label': 'Error',
|
153 |
'confidence': 'N/A',
|
|
|
155 |
'error': f"Invalid file type: {file.filename}"
|
156 |
})
|
157 |
|
158 |
+
# Render the results page with predictions
|
159 |
return render_template('results.html', results=results)
|
160 |
|
161 |
+
# --- Main execution block ---
|
162 |
if __name__ == '__main__':
|
163 |
+
# Hugging Face Spaces sets the PORT environment variable for the app to listen on.
|
164 |
+
# We default to 7860 as it's common for HF Spaces apps.
|
165 |
+
# Debug mode should be OFF for production deployments (like Hugging Face Spaces) for security.
|
166 |
port = int(os.environ.get('PORT', 7860))
|
167 |
+
app.run(host='0.0.0.0', port=port, debug=False)
|