Face-Mask-Detection-Model / tune_model.py
HuangYiYang's picture
Upload 4 files
995ef3a verified
import numpy as np
from tensorflow.keras.preprocessing.image import ImageDataGenerator, load_img, img_to_array
from tensorflow.keras.applications import MobileNetV2
from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
from tensorflow.keras.models import Model
from tensorflow.keras.layers import AveragePooling2D, Dropout, Flatten, Dense, Input
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.model_selection import train_test_split
import os
from PIL import UnidentifiedImageError
import keras_tuner as kt
# --- Data Loading and Preprocessing ---
data_dir = "dataset"
categories = ["with_mask", "without_mask"]
data = []
labels = []
print("Loading and preprocessing images for tuning...")
for category in categories:
path = os.path.join(data_dir, category)
for img_name in os.listdir(path):
img_path = os.path.join(path, img_name)
try:
image = load_img(img_path, target_size=(224, 224))
image = img_to_array(image)
image = preprocess_input(image)
data.append(image)
labels.append(0 if category == "with_mask" else 1)
except UnidentifiedImageError:
print(f"Skipped invalid image file: {img_path}")
except Exception as e:
print(f"Error loading image {img_path}: {e}")
print(f"Loaded {len(data)} images.")
data = np.array(data, dtype="float32")
labels = to_categorical(labels)
x_train, x_test, y_train, y_test = train_test_split(data, labels, test_size=0.2, stratify=labels, random_state=42)
print(f"Training samples: {len(x_train)}, Validation samples: {len(x_test)}")
# --- Data Augmentation Configuration ---
aug = ImageDataGenerator(
rotation_range=30,
zoom_range=0.2,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
horizontal_flip=True,
brightness_range=[0.7, 1.3],
channel_shift_range=50,
fill_mode="nearest"
)
# --- Model Building Function for KerasTuner ---
def build_model(hp):
base_model = MobileNetV2(weights="imagenet", include_top=False, input_tensor=Input(shape=(224, 224, 3)))
for layer in base_model.layers[:-20]:
layer.trainable = False
head_model = base_model.output
head_model = AveragePooling2D(pool_size=(7, 7))(head_model)
head_model = Flatten()(head_model)
# Define hyperparameter search spaces
hp_units = hp.Int('units', min_value=64, max_value=256, step=32, default=128)
hp_l2_reg = hp.Choice('l2_regularizer', values=[1e-4, 1e-3, 1e-2], default=1e-2)
hp_dropout = hp.Float('dropout_rate', min_value=0.2, max_value=0.6, step=0.1, default=0.5)
hp_learning_rate = hp.Choice('learning_rate', values=[1e-5, 5e-5, 1e-4], default=1e-5)
head_model = Dense(units=hp_units, activation="relu", kernel_regularizer=l2(hp_l2_reg))(head_model)
head_model = Dropout(hp_dropout)(head_model)
head_model = Dense(2, activation="softmax")(head_model)
model = Model(inputs=base_model.input, outputs=head_model)
model.compile(optimizer=Adam(learning_rate=hp_learning_rate),
loss="categorical_crossentropy",
metrics=["accuracy"])
return model
# --- Hyperparameter Tuning Setup and Execution ---
tuner = kt.Hyperband(
build_model,
objective='val_accuracy',
max_epochs=30,
factor=3,
directory='keras_tuner_dir',
project_name='mask_detector_tuning_run'
)
# EarlyStopping callback for each trial during tuning
early_stopping_tuner = EarlyStopping(
monitor='val_loss',
patience=7,
restore_best_weights=True
)
print("\nStarting hyperparameter search. This may take a while...")
tuner.search(aug.flow(x_train, y_train, batch_size=32),
validation_data=(x_test, y_test),
steps_per_epoch=len(x_train) // 32,
epochs=30,
callbacks=[early_stopping_tuner])
print("\nHyperparameter search complete.")
# Get the best hyperparameters found
best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]
print(f"\n==========================================")
print(f" Best Hyperparameters Found:")
print(f" ------------------------------------------")
print(f" Units in Dense layer: {best_hps.get('units')}")
print(f" L2 Regularizer strength: {best_hps.get('l2_regularizer')}")
print(f" Dropout Rate: {best_hps.get('dropout_rate')}")
print(f" Learning Rate: {best_hps.get('learning_rate')}")
print(f"==========================================\n")
print("Please take note of these hyperparameters and use them to define your model in 'model.py'.")