Ahmedhassan54's picture
Upload 4 files
ae329be verified
raw
history blame
4.52 kB
import tensorflow as tf
from tensorflow.keras import layers, models, callbacks
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import numpy as np
import matplotlib.pyplot as plt
import datetime
from sklearn.metrics import classification_report, confusion_matrix
import seaborn as sns
import os
import zipfile
from google.colab import files
from shutil import move
from pathlib import Path
print("TensorFlow version:", tf.__version__)
uploaded = files.upload()
zip_filename = list(uploaded.keys())[0]
with zipfile.ZipFile(zip_filename, 'r') as zip_ref:
zip_ref.extractall('extracted_dataset')
def organize_dataset(input_dir, output_dir):
os.makedirs(os.path.join(output_dir, 'cat'), exist_ok=True)
os.makedirs(os.path.join(output_dir, 'dog'), exist_ok=True)
for file in Path(input_dir).glob('cat.*.jpg'):
move(str(file), os.path.join(output_dir, 'cat', file.name))
for file in Path(input_dir).glob('dog.*.jpg'):
move(str(file), os.path.join(output_dir, 'dog', file.name))
input_path = 'extracted_dataset/custom_dataset/train'
output_path = 'organized_dataset/train'
organize_dataset(input_path, output_path)
IMG_SIZE = (150, 150)
BATCH_SIZE = 32
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
validation_split=0.2
)
train_generator = train_datagen.flow_from_directory(
'organized_dataset/train',
target_size=IMG_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary',
subset='training',
shuffle=True
)
validation_generator = train_datagen.flow_from_directory(
'organized_dataset/train',
target_size=IMG_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary',
subset='validation',
shuffle=True
)
class_names = list(train_generator.class_indices.keys())
print("\nDetected classes:", class_names)
print("Number of training samples:", train_generator.samples)
print("Number of validation samples:", validation_generator.samples)
plt.figure(figsize=(12, 9))
for i in range(9):
img, label = next(train_generator)
plt.subplot(3, 3, i+1)
plt.imshow(img[i])
plt.title(class_names[int(label[i])])
plt.axis('off')
plt.suptitle("Sample Training Images")
plt.show()
def build_model(input_shape):
model = models.Sequential([
layers.Conv2D(32, (3,3), activation='relu', input_shape=input_shape),
layers.MaxPooling2D((2,2)),
layers.Conv2D(64, (3,3), activation='relu'),
layers.MaxPooling2D((2,2)),
layers.Conv2D(128, (3,3), activation='relu'),
layers.MaxPooling2D((2,2)),
layers.Flatten(),
layers.Dense(512, activation='relu'),
layers.Dropout(0.5),
layers.Dense(1, activation='sigmoid') # Binary output
])
model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy']
)
return model
model = build_model(input_shape=(IMG_SIZE[0], IMG_SIZE[1], 3))
model.summary()
log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
callbacks = [
callbacks.EarlyStopping(patience=5, restore_best_weights=True),
callbacks.ModelCheckpoint('best_model.h5', save_best_only=True),
callbacks.TensorBoard(log_dir=log_dir),
callbacks.ReduceLROnPlateau(factor=0.1, patience=3)
]
history = model.fit(
train_generator,
steps_per_epoch=train_generator.samples // BATCH_SIZE,
epochs=30,
validation_data=validation_generator,
validation_steps=validation_generator.samples // BATCH_SIZE,
callbacks=callbacks
)
plt.figure(figsize=(12, 4))
plt.subplot(1, 2, 1)
plt.plot(history.history['accuracy'], label='Train')
plt.plot(history.history['val_accuracy'], label='Validation')
plt.title('Accuracy')
plt.legend()
plt.subplot(1, 2, 2)
plt.plot(history.history['loss'], label='Train')
plt.plot(history.history['val_loss'], label='Validation')
plt.title('Loss')
plt.legend()
plt.show()
model.save('cat_dog_classifier.h5')
converter = tf.lite.TFLiteConverter.from_keras_model(model)
tflite_model = converter.convert()
with open('cat_dog.tflite', 'wb') as f:
f.write(tflite_model)
print("\nModel saved in HDF5 and TFLite formats")