Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,93 +1,139 @@
|
|
1 |
-
import os
|
2 |
-
import numpy as np
|
3 |
-
import tensorflow as tf
|
4 |
-
from flask import Flask, request, render_template, jsonify
|
5 |
-
from tensorflow.keras.utils import load_img, img_to_array
|
6 |
-
from werkzeug.utils import secure_filename
|
7 |
-
from datetime import datetime
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
#
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
if
|
93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import numpy as np
|
3 |
+
import tensorflow as tf
|
4 |
+
from flask import Flask, request, render_template, jsonify
|
5 |
+
from tensorflow.keras.utils import load_img, img_to_array
|
6 |
+
from werkzeug.utils import secure_filename
|
7 |
+
from datetime import datetime
|
8 |
+
from huggingface_hub import hf_hub_download # New import!
|
9 |
+
import time
|
10 |
+
|
11 |
+
app = Flask(__name__)
|
12 |
+
|
13 |
+
# --- Model Loading Configuration ---
|
14 |
+
MODEL_FILE_NAME = "model.keras"
|
15 |
+
# REPLACE THIS WITH YOUR HUGGING FACE MODEL REPO ID
|
16 |
+
# Format: "your-username/your-model-repo-name"
|
17 |
+
HF_MODEL_REPO_ID = "YOUR_USERNAME/garbage-detection-model" # Example!
|
18 |
+
|
19 |
+
# Check if model exists, if not, try to download it from Hugging Face Hub
|
20 |
+
if not os.path.exists(MODEL_FILE_NAME):
|
21 |
+
print(f"'{MODEL_FILE_NAME}' not found locally. Attempting to download from Hugging Face Hub...")
|
22 |
+
try:
|
23 |
+
# Download the model from Hugging Face Hub
|
24 |
+
# The downloaded file will be in a cache directory by default,
|
25 |
+
# so we'll move it to the current directory for easier loading.
|
26 |
+
model_path = hf_hub_download(repo_id=HF_MODEL_REPO_ID, filename=MODEL_FILE_NAME)
|
27 |
+
# Move the downloaded file to the root directory for app.py to find it easily
|
28 |
+
os.rename(model_path, MODEL_FILE_NAME)
|
29 |
+
print(f"'{MODEL_FILE_NAME}' downloaded successfully from Hugging Face Hub.")
|
30 |
+
except Exception as e:
|
31 |
+
print(f"FATAL: Could not download model from Hugging Face Hub: {e}")
|
32 |
+
# If download fails, the model will remain None, and prediction attempts will fail.
|
33 |
+
model = None
|
34 |
+
|
35 |
+
# Load the trained model
|
36 |
+
model = None # Initialize model to None
|
37 |
+
try:
|
38 |
+
if os.path.exists(MODEL_FILE_NAME):
|
39 |
+
model = tf.keras.models.load_model(MODEL_FILE_NAME)
|
40 |
+
print(f"Model loaded successfully from {MODEL_FILE_NAME}")
|
41 |
+
else:
|
42 |
+
print(f"Model file '{MODEL_FILE_NAME}' still not found after download attempt.")
|
43 |
+
except Exception as e:
|
44 |
+
print(f"Error loading model from {MODEL_FILE_NAME}: {e}")
|
45 |
+
model = None # Ensure model is None if loading fails
|
46 |
+
|
47 |
+
# Configurations
|
48 |
+
UPLOAD_FOLDER = os.path.join('static', 'uploads')
|
49 |
+
ALLOWED_EXTENSIONS = {'jpg', 'jpeg', 'png'}
|
50 |
+
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
|
51 |
+
os.makedirs(UPLOAD_FOLDER, exist_ok=True) # Ensure uploads directory exists
|
52 |
+
|
53 |
+
def allowed_file(filename):
|
54 |
+
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
55 |
+
|
56 |
+
def preprocess_image(image_path):
|
57 |
+
img = load_img(image_path, target_size=(224, 224))
|
58 |
+
img_array = img_to_array(img) / 255.0
|
59 |
+
return np.expand_dims(img_array, axis=0)
|
60 |
+
|
61 |
+
@app.route('/')
|
62 |
+
def index():
|
63 |
+
return render_template('home.html')
|
64 |
+
|
65 |
+
@app.route('/tool')
|
66 |
+
def tool():
|
67 |
+
return render_template('tool.html')
|
68 |
+
|
69 |
+
@app.route('/about')
|
70 |
+
def about():
|
71 |
+
return render_template('about.html')
|
72 |
+
|
73 |
+
@app.route('/contact')
|
74 |
+
def contact():
|
75 |
+
return render_template('contact.html')
|
76 |
+
|
77 |
+
@app.route('/predict', methods=['POST'])
|
78 |
+
def predict():
|
79 |
+
if model is None:
|
80 |
+
return jsonify({'error': 'Model not loaded. Please check server logs.'}), 500
|
81 |
+
|
82 |
+
if 'file' not in request.files:
|
83 |
+
return jsonify({'error': 'No files uploaded'}), 400
|
84 |
+
|
85 |
+
files = request.files.getlist('file')
|
86 |
+
if not files or all(f.filename == '' for f in files):
|
87 |
+
return jsonify({'error': 'No files selected'}), 400
|
88 |
+
|
89 |
+
results = []
|
90 |
+
for file in files:
|
91 |
+
file_path = None
|
92 |
+
if file and allowed_file(file.filename):
|
93 |
+
filename = secure_filename(file.filename)
|
94 |
+
timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
|
95 |
+
unique_filename = f"{timestamp}_{filename}"
|
96 |
+
file_path = os.path.join(app.config['UPLOAD_FOLDER'], unique_filename)
|
97 |
+
file.save(file_path)
|
98 |
+
|
99 |
+
try:
|
100 |
+
img_array = preprocess_image(file_path)
|
101 |
+
prediction = model.predict(img_array)[0][0]
|
102 |
+
label = "Dirty" if prediction > 0.5 else "Clean"
|
103 |
+
confidence = prediction if label == "Dirty" else 1 - prediction
|
104 |
+
|
105 |
+
results.append({
|
106 |
+
'label': label,
|
107 |
+
'confidence': f"{confidence:.2%}",
|
108 |
+
'image_url': f"/static/uploads/{unique_filename}"
|
109 |
+
})
|
110 |
+
except Exception as e:
|
111 |
+
results.append({
|
112 |
+
'label': 'Error',
|
113 |
+
'confidence': 'N/A',
|
114 |
+
'image_url': None,
|
115 |
+
'error': str(e)
|
116 |
+
})
|
117 |
+
finally:
|
118 |
+
# Clean up the uploaded file after processing
|
119 |
+
if file_path and os.path.exists(file_path):
|
120 |
+
try:
|
121 |
+
os.remove(file_path)
|
122 |
+
print(f"Deleted uploaded file: {file_path}")
|
123 |
+
except Exception as e:
|
124 |
+
print(f"Error deleting file {file_path}: {e}")
|
125 |
+
else:
|
126 |
+
results.append({
|
127 |
+
'label': 'Error',
|
128 |
+
'confidence': 'N/A',
|
129 |
+
'image_url': None,
|
130 |
+
'error': f"Invalid file type: {file.filename}"
|
131 |
+
})
|
132 |
+
|
133 |
+
return render_template('results.html', results=results)
|
134 |
+
|
135 |
+
if __name__ == '__main__':
|
136 |
+
# Hugging Face Spaces sets the PORT environment variable
|
137 |
+
# Default to 7860 as it's common for HF Spaces apps
|
138 |
+
port = int(os.environ.get('PORT', 7860))
|
139 |
+
app.run(host='0.0.0.0', port=port, debug=True)
|