itzbhav commited on
Commit
3fde58f
·
verified ·
1 Parent(s): e1aa483

Upload 8 files

Browse files
Files changed (8) hide show
  1. Resnet.ipynb +0 -0
  2. Try1.py +218 -0
  3. alexnet.py +219 -0
  4. app.py +137 -0
  5. best_model.h5 +3 -0
  6. blood_home.jpg +0 -0
  7. lenet.py +201 -0
  8. mobilevnet.py +268 -0
Resnet.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
Try1.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -------------------------
2
+ # 1. Import Libraries
3
+ # -------------------------
4
+ import os
5
+ import glob
6
+ import numpy as np
7
+ import pandas as pd
8
+ import seaborn as sns
9
+ import matplotlib.pyplot as plt
10
+
11
+ from sklearn.model_selection import train_test_split
12
+ from sklearn.metrics import classification_report, confusion_matrix
13
+ import itertools
14
+
15
+ from keras.preprocessing.image import ImageDataGenerator
16
+ from keras.models import Model, load_model
17
+ from keras.layers import Dense, Dropout
18
+ from keras.applications import ResNet50
19
+ from keras.applications.resnet50 import preprocess_input
20
+ from keras.preprocessing import image
21
+
22
+ import warnings
23
+ warnings.filterwarnings('ignore')
24
+
25
+ # -------------------------
26
+ # 2. Load Dataset
27
+ # -------------------------
28
+ file_path = 'dataset'
29
+
30
+ # List all classes
31
+ name_class = os.listdir(file_path)
32
+ print("Classes:", name_class)
33
+
34
+ # Get all filepaths
35
+ filepaths = list(glob.glob(file_path + '/**/*.*'))
36
+ print(f"Total images found: {len(filepaths)}")
37
+
38
+ # Extract labels
39
+ labels = list(map(lambda x: os.path.split(os.path.split(x)[0])[1], filepaths))
40
+
41
+ # Create DataFrame
42
+ filepath_series = pd.Series(filepaths, name='Filepath').astype(str)
43
+ labels_series = pd.Series(labels, name='Label')
44
+ data = pd.concat([filepath_series, labels_series], axis=1)
45
+ data = data.sample(frac=1).reset_index(drop=True) # shuffle
46
+ print(data.head())
47
+
48
+ # -------------------------
49
+ # 3. EDA (Exploratory Data Analysis)
50
+ # -------------------------
51
+ # Class distribution
52
+ plt.figure(figsize=(8,5))
53
+ sns.countplot(x='Label', data=data, order=data['Label'].value_counts().index)
54
+ plt.title('Number of Images per Class')
55
+ plt.xticks(rotation=45)
56
+ plt.show()
57
+
58
+ # Check image dimensions
59
+ sample_img = plt.imread(data['Filepath'][0])
60
+ print(f"Sample image shape: {sample_img.shape}")
61
+
62
+ # Visualize few images
63
+ fig, axes = plt.subplots(2, 4, figsize=(12,6))
64
+ for ax, (img_path, label) in zip(axes.flatten(), zip(data['Filepath'][:8], data['Label'][:8])):
65
+ img = plt.imread(img_path)
66
+ ax.imshow(img)
67
+ ax.set_title(label)
68
+ ax.axis('off')
69
+ plt.tight_layout()
70
+ plt.show()
71
+
72
+ # -------------------------
73
+ # 4. Train-Test Split
74
+ # -------------------------
75
+ train, test = train_test_split(data, test_size=0.2, random_state=42)
76
+ print(f"Training samples: {len(train)}, Testing samples: {len(test)}")
77
+
78
+ # -------------------------
79
+ # 5. Data Preprocessing and Augmentation
80
+ # -------------------------
81
+ train_datagen = ImageDataGenerator(
82
+ preprocessing_function=preprocess_input,
83
+ rotation_range=20,
84
+ width_shift_range=0.1,
85
+ height_shift_range=0.1,
86
+ zoom_range=0.1,
87
+ horizontal_flip=True
88
+ )
89
+
90
+ test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
91
+
92
+ train_gen = train_datagen.flow_from_dataframe(
93
+ dataframe=train,
94
+ x_col='Filepath',
95
+ y_col='Label',
96
+ target_size=(256, 256),
97
+ class_mode='categorical',
98
+ batch_size=32,
99
+ shuffle=True,
100
+ seed=42
101
+ )
102
+
103
+ valid_gen = test_datagen.flow_from_dataframe(
104
+ dataframe=test,
105
+ x_col='Filepath',
106
+ y_col='Label',
107
+ target_size=(256, 256),
108
+ class_mode='categorical',
109
+ batch_size=32,
110
+ shuffle=False
111
+ )
112
+
113
+ # -------------------------
114
+ # 6. Model Building (Transfer Learning with ResNet50)
115
+ # -------------------------
116
+ pretrained_model = ResNet50(
117
+ input_shape=(256, 256, 3),
118
+ include_top=False,
119
+ weights='imagenet',
120
+ pooling='avg'
121
+ )
122
+
123
+ pretrained_model.trainable = False
124
+
125
+ x = Dense(128, activation="relu")(pretrained_model.output)
126
+ x = Dropout(0.5)(x)
127
+ x = Dense(128, activation="relu")(x)
128
+ outputs = Dense(8, activation='softmax')(x)
129
+
130
+ model = Model(inputs=pretrained_model.input, outputs=outputs)
131
+
132
+ model.compile(optimizer="adam", loss='categorical_crossentropy', metrics=['accuracy'])
133
+
134
+ # -------------------------
135
+ # 7. Model Training
136
+ # -------------------------
137
+ history = model.fit(
138
+ train_gen,
139
+ validation_data=valid_gen,
140
+ epochs=20,
141
+ )
142
+
143
+ # -------------------------
144
+ # 8. Training Curves
145
+ # -------------------------
146
+ pd.DataFrame(history.history)[['accuracy', 'val_accuracy']].plot()
147
+ plt.title('Training vs Validation Accuracy')
148
+ plt.show()
149
+
150
+ pd.DataFrame(history.history)[['loss', 'val_loss']].plot()
151
+ plt.title('Training vs Validation Loss')
152
+ plt.show()
153
+
154
+ # -------------------------
155
+ # 9. Evaluation and Confusion Matrix
156
+ # -------------------------
157
+ # Evaluate
158
+ results = model.evaluate(valid_gen, verbose=0)
159
+ print(f"Test Loss: {results[0]:.5f}")
160
+ print(f"Test Accuracy: {results[1]*100:.2f}%")
161
+
162
+ # Predictions
163
+ predictions = model.predict(valid_gen)
164
+ y_pred = np.argmax(predictions, axis=1)
165
+
166
+ # True labels
167
+ y_true = valid_gen.classes
168
+
169
+ # Labels Mapping
170
+ labels_map = train_gen.class_indices
171
+ labels_map = dict((v,k) for k,v in labels_map.items())
172
+
173
+ # Classification report
174
+ print(classification_report(y_true, y_pred, target_names=list(labels_map.values())))
175
+
176
+ # Confusion Matrix
177
+ cm = confusion_matrix(y_true, y_pred)
178
+
179
+ plt.figure(figsize=(8,6))
180
+ sns.heatmap(cm, annot=True, fmt='d', cmap='Blues',
181
+ xticklabels=list(labels_map.values()),
182
+ yticklabels=list(labels_map.values()))
183
+ plt.xlabel('Predicted')
184
+ plt.ylabel('True')
185
+ plt.title('Confusion Matrix')
186
+ plt.show()
187
+
188
+ # -------------------------
189
+ # 10. Save the Model
190
+ # -------------------------
191
+ model.save("model_blood_group_detection_resnet.h5")
192
+ print("Model saved successfully!")
193
+
194
+ # -------------------------
195
+ # 11. Single Image Prediction Example
196
+ # -------------------------
197
+ # Load model
198
+ model = load_model('model_blood_group_detection_resnet.h5')
199
+
200
+ # Single image prediction
201
+ img_path = 'C:\Users\ADMIN\Documents\SEM-6\DL PROJECT\dataset\AB+\augmented_cluster_4_4.BMP' # Update this path as needed
202
+ img = image.load_img(img_path, target_size=(256, 256))
203
+ x = image.img_to_array(img)
204
+ x = np.expand_dims(x, axis=0)
205
+ x = preprocess_input(x)
206
+
207
+ preds = model.predict(x)
208
+ predicted_class = np.argmax(preds)
209
+ confidence = preds[0][predicted_class] * 100
210
+
211
+ # Get label
212
+ predicted_label = labels_map[predicted_class]
213
+
214
+ # Show image
215
+ plt.imshow(image.array_to_img(img))
216
+ plt.axis('off')
217
+ plt.title(f"Prediction: {predicted_label} ({confidence:.2f}%)")
218
+ plt.show()
alexnet.py ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ------------------------------------------------------
2
+ # 1. Import Libraries
3
+ # ------------------------------------------------------
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+ import matplotlib.pyplot as plt
8
+ import seaborn as sns
9
+ import os
10
+ import cv2
11
+ from sklearn.model_selection import train_test_split
12
+ from keras.preprocessing.image import ImageDataGenerator
13
+ from keras.utils import to_categorical
14
+ from keras.applications import MobileNetV2
15
+ from keras.layers import Dense, GlobalAveragePooling2D, Dropout
16
+ from keras.models import Model, load_model
17
+ from keras.preprocessing import image
18
+ from keras.applications.mobilenet_v2 import preprocess_input
19
+
20
+ # ------------------------------------------------------
21
+ # 2. Load Dataset
22
+ # ------------------------------------------------------
23
+
24
+ data_dir = 'dataset' # <-- Replace with your dataset folder
25
+ categories = os.listdir(data_dir)
26
+
27
+ data = []
28
+ for category in categories:
29
+ category_path = os.path.join(data_dir, category)
30
+ for img_name in os.listdir(category_path):
31
+ img_path = os.path.join(category_path, img_name)
32
+ data.append((img_path, category))
33
+
34
+ data = pd.DataFrame(data, columns=['Filepath', 'Label'])
35
+
36
+ print(f"Total samples: {len(data)}")
37
+ print(data.head())
38
+
39
+ # ------------------------------------------------------
40
+ # 3. Exploratory Data Analysis (EDA)
41
+ # ------------------------------------------------------
42
+
43
+ # Class distribution
44
+ plt.figure(figsize=(8,6))
45
+ sns.countplot(x='Label', data=data)
46
+ plt.title('Blood Group Class Distribution')
47
+ plt.xticks(rotation=45)
48
+ plt.show()
49
+
50
+ # Display few images
51
+ plt.figure(figsize=(12,8))
52
+ for i in range(9):
53
+ sample = data.sample(n=1).iloc[0]
54
+ img = cv2.imread(sample['Filepath'])
55
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
56
+ plt.subplot(3,3,i+1)
57
+ plt.imshow(img)
58
+ plt.title(sample['Label'])
59
+ plt.axis('off')
60
+ plt.tight_layout()
61
+ plt.show()
62
+
63
+ # ------------------------------------------------------
64
+ # 4. Train-Validation-Test Split
65
+ # ------------------------------------------------------
66
+
67
+ train, temp = train_test_split(data, test_size=0.3, random_state=42, stratify=data['Label'])
68
+ valid, test = train_test_split(temp, test_size=0.5, random_state=42, stratify=temp['Label'])
69
+
70
+ print(f"Training samples: {len(train)}")
71
+ print(f"Validation samples: {len(valid)}")
72
+ print(f"Testing samples: {len(test)}")
73
+
74
+ # ------------------------------------------------------
75
+ # 5. Preprocessing (Image Augmentation + Scaling)
76
+ # ------------------------------------------------------
77
+
78
+ train_datagen = ImageDataGenerator(
79
+ preprocessing_function=preprocess_input,
80
+ rotation_range=20,
81
+ zoom_range=0.2,
82
+ width_shift_range=0.2,
83
+ height_shift_range=0.2,
84
+ shear_range=0.2,
85
+ horizontal_flip=True
86
+ )
87
+
88
+ valid_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
89
+
90
+ target_size = (224, 224)
91
+
92
+ train_gen = train_datagen.flow_from_dataframe(
93
+ dataframe=train,
94
+ x_col='Filepath',
95
+ y_col='Label',
96
+ target_size=target_size,
97
+ class_mode='categorical',
98
+ batch_size=32,
99
+ shuffle=True,
100
+ seed=42
101
+ )
102
+
103
+ valid_gen = valid_datagen.flow_from_dataframe(
104
+ dataframe=valid,
105
+ x_col='Filepath',
106
+ y_col='Label',
107
+ target_size=target_size,
108
+ class_mode='categorical',
109
+ batch_size=32,
110
+ shuffle=False
111
+ )
112
+
113
+ # ------------------------------------------------------
114
+ # 6. Load MobileNetV2 Base Model
115
+ # ------------------------------------------------------
116
+
117
+ base_model = MobileNetV2(weights='imagenet', include_top=False, input_shape=(224,224,3))
118
+
119
+ # ------------------------------------------------------
120
+ # 7. Freeze Layers
121
+ # ------------------------------------------------------
122
+
123
+ for layer in base_model.layers:
124
+ layer.trainable = False
125
+
126
+ # ------------------------------------------------------
127
+ # 8. Add Custom Layers
128
+ # ------------------------------------------------------
129
+
130
+ x = base_model.output
131
+ x = GlobalAveragePooling2D()(x)
132
+ x = Dropout(0.3)(x)
133
+ x = Dense(128, activation='relu')(x)
134
+ x = Dropout(0.2)(x)
135
+ predictions = Dense(len(categories), activation='softmax')(x)
136
+
137
+ model = Model(inputs=base_model.input, outputs=predictions)
138
+
139
+ # ------------------------------------------------------
140
+ # 9. Compile and Train the Model
141
+ # ------------------------------------------------------
142
+
143
+ model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
144
+
145
+ history = model.fit(
146
+ train_gen,
147
+ validation_data=valid_gen,
148
+ epochs=20
149
+ )
150
+
151
+ # ------------------------------------------------------
152
+ # 10. Save the Model
153
+ # ------------------------------------------------------
154
+
155
+ model.save('bloodgroup_mobilenet_model.h5')
156
+ print("Model saved as bloodgroup_mobilenet_model.h5")
157
+
158
+ # ------------------------------------------------------
159
+ # 11. Evaluate the Model
160
+ # ------------------------------------------------------
161
+
162
+ # Accuracy and Loss plots
163
+ plt.figure(figsize=(14,5))
164
+
165
+ plt.subplot(1,2,1)
166
+ plt.plot(history.history['accuracy'], label='Train Accuracy')
167
+ plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
168
+ plt.title('Model Accuracy')
169
+ plt.legend()
170
+
171
+ plt.subplot(1,2,2)
172
+ plt.plot(history.history['loss'], label='Train Loss')
173
+ plt.plot(history.history['val_loss'], label='Validation Loss')
174
+ plt.title('Model Loss')
175
+ plt.legend()
176
+
177
+ plt.show()
178
+
179
+ # ------------------------------------------------------
180
+ # 12. Prediction on Single Image (User Input)
181
+ # ------------------------------------------------------
182
+
183
+ import numpy as np
184
+ import matplotlib.pyplot as plt
185
+ from keras.models import load_model
186
+ from keras.preprocessing import image
187
+ from keras.applications.imagenet_utils import preprocess_input
188
+
189
+ # Load the pre-t rained model
190
+ model = load_model('bloodgroup_mobilenet_model.h5')
191
+
192
+ # Define the class labels
193
+ labels = {'A+': 0, 'A-': 1, 'AB+': 2, 'AB-': 3, 'B+': 4, 'B-': 5, 'O+': 6, 'O-': 7}
194
+ labels = dict((v, k) for k, v in labels.items())
195
+
196
+ # Example of loading a single image and making a prediction
197
+ img_path = 'dataset/AB+/augmented_cluster_4_4.BMP'
198
+
199
+ # Preprocess the image accordingly (check the model's expected input dimensions)
200
+ img = image.load_img(img_path, target_size=(224, 224)) # Example target size for AlexNet (224x224)
201
+ x = image.img_to_array(img)
202
+ x = np.expand_dims(x, axis=0)
203
+ x = preprocess_input(x) # Ensure this matches the model's preprocessing function
204
+
205
+ # Make prediction
206
+ result = model.predict(x)
207
+ predicted_class = np.argmax(result) # Get the predicted class index
208
+
209
+ # Map the predicted class to the label
210
+ predicted_label = labels[predicted_class]
211
+ confidence = result[0][predicted_class] * 100 # Confidence level
212
+
213
+ # Display the image
214
+ plt.imshow(image.array_to_img(image.img_to_array(img) / 255.0))
215
+ plt.axis('off') # Hide axes
216
+
217
+ # Display the prediction and confidence below the image
218
+ plt.title(f"Prediction: {predicted_label} with confidence {confidence:.2f}%")
219
+ plt.show()
app.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import required libraries
2
+ import streamlit as st
3
+ import numpy as np
4
+ import pandas as pd
5
+ from keras.models import load_model
6
+ from keras.preprocessing import image
7
+ import os
8
+ import matplotlib.pyplot as plt
9
+ import random
10
+
11
+ # Set page configuration FIRST
12
+ st.set_page_config(page_title="Blood Group Detection", layout="wide")
13
+
14
+ # Title of the app
15
+ st.title("🩸 Blood Group Detection using LeNet Model")
16
+
17
+ # Sidebar navigation
18
+ st.sidebar.header("Navigation")
19
+ selected_option = st.sidebar.selectbox(
20
+ "Select an option:",
21
+ ["Home", "EDA", "Predict Blood Group"]
22
+ )
23
+
24
+ # Load the trained model
25
+ @st.cache_resource
26
+ def load_trained_model():
27
+ model = load_model('bloodgroup_mobilenet_finetuned.h5') # Ensure correct path
28
+ return model
29
+
30
+ model = load_trained_model()
31
+
32
+ # Define class labels
33
+ class_names = ['A+', 'A-', 'B+', 'B-', 'AB+', 'AB-', 'O+', 'O-']
34
+
35
+ # Dataset directory (you must adjust this if needed)
36
+ DATASET_DIR = "dataset" # Example path
37
+
38
+ # Home page
39
+ if selected_option == "Home":
40
+ st.subheader("About the Project")
41
+ st.write("""
42
+ Welcome to the Blood Group Detection App!
43
+
44
+ This application uses a Deep Learning model (LeNet architecture) to detect blood groups from blood sample images.
45
+
46
+ ### 🛠 Technologies Used:
47
+ - Streamlit for Web UI
48
+ - TensorFlow/Keras for Deep Learning
49
+ - Image Processing with Computer Vision
50
+
51
+ **Upload a blood sample image and predict the blood group instantly!**
52
+ """)
53
+ try:
54
+ st.image("blood_home.jpg", caption="Blood Sample Analysis", use_column_width=True)
55
+ except:
56
+ st.warning("Home image not found. (Optional)")
57
+
58
+ # EDA page
59
+ elif selected_option == "EDA":
60
+ st.subheader("Exploratory Data Analysis (EDA)")
61
+
62
+ # Check if dataset exists
63
+ if os.path.exists(DATASET_DIR):
64
+ st.write("### 📊 Number of Images per Blood Group:")
65
+
66
+ counts = {}
67
+ for class_name in class_names:
68
+ class_path = os.path.join(DATASET_DIR, class_name)
69
+ if os.path.exists(class_path):
70
+ counts[class_name] = len(os.listdir(class_path))
71
+ else:
72
+ counts[class_name] = 0
73
+
74
+ df_counts = pd.DataFrame(list(counts.items()), columns=['Blood Group', 'Number of Images'])
75
+ st.dataframe(df_counts)
76
+
77
+ # Bar Chart
78
+ st.bar_chart(df_counts.set_index('Blood Group'))
79
+
80
+ st.write("### 🖼️ Sample Images from Each Class:")
81
+
82
+ cols = st.columns(4) # create 4 columns
83
+
84
+ for idx, class_name in enumerate(class_names):
85
+ class_path = os.path.join(DATASET_DIR, class_name)
86
+ if os.path.exists(class_path) and len(os.listdir(class_path)) > 0:
87
+ img_file = random.choice(os.listdir(class_path))
88
+ img_path = os.path.join(class_path, img_file)
89
+ img = image.load_img(img_path, target_size=(64, 64)) # resized
90
+ with cols[idx % 4]: # arrange in 4 columns
91
+ st.image(img, caption=class_name, width=150)
92
+
93
+ st.write("### 🧩 Image Properties:")
94
+ sample_class = class_names[0]
95
+ sample_path = os.path.join(DATASET_DIR, sample_class, os.listdir(os.path.join(DATASET_DIR, sample_class))[0])
96
+ sample_img = image.load_img(sample_path)
97
+ st.write(f"- **Image shape:** {np.array(sample_img).shape}")
98
+ st.write(f"- **Color channels:** {np.array(sample_img).shape[-1]} (RGB)")
99
+
100
+ else:
101
+ st.warning("Dataset not found! Please make sure the 'dataset/train' folder exists.")
102
+
103
+ # Prediction page
104
+ elif selected_option == "Predict Blood Group":
105
+ st.subheader("Upload an Image to Predict Blood Group")
106
+
107
+ uploaded_file = st.file_uploader("Choose a blood sample image...", type=["jpg", "jpeg", "png", "bmp"])
108
+
109
+ if uploaded_file is not None:
110
+ # Display uploaded image
111
+ st.image(uploaded_file, caption="Uploaded Image", use_column_width=True)
112
+
113
+ # Ensure temp directory exists
114
+ if not os.path.exists('temp'):
115
+ os.makedirs('temp')
116
+
117
+ # Save uploaded file temporarily
118
+ temp_file_path = os.path.join("temp", uploaded_file.name)
119
+ with open(temp_file_path, "wb") as f:
120
+ f.write(uploaded_file.getbuffer())
121
+
122
+ # Preprocess the image
123
+ img = image.load_img(temp_file_path, target_size=(224, 224)) # Adjust size if needed
124
+ img_array = image.img_to_array(img)
125
+ img_array = np.expand_dims(img_array, axis=0)
126
+ img_array = img_array / 255.0 # Normalize pixel values
127
+
128
+ # Predict the blood group
129
+ with st.spinner('Predicting...'):
130
+ prediction = model.predict(img_array)
131
+ predicted_class = class_names[np.argmax(prediction)]
132
+
133
+ # Show result
134
+ st.success(f"🧬 Predicted Blood Group: **{predicted_class}**")
135
+
136
+ # Remove temporary file
137
+ os.remove(temp_file_path)
best_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:430d170f8fb637cb7dbb5634502aad6c75800c59ee502e496de84e98e110d768
3
+ size 11521400
blood_home.jpg ADDED
lenet.py ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import glob
3
+ import numpy as np
4
+ import seaborn as sns
5
+ import matplotlib.pyplot as plt
6
+ import pandas as pd
7
+ from sklearn.model_selection import train_test_split
8
+ from keras_preprocessing.image import ImageDataGenerator
9
+ from keras.models import Sequential
10
+ from keras.layers import Dense, Dropout, Conv2D, MaxPooling2D, Flatten
11
+ from keras.applications import ResNet50
12
+ from keras.applications.resnet50 import preprocess_input
13
+ from sklearn.metrics import classification_report
14
+
15
+ import zipfile
16
+ import os
17
+
18
+ # Define the file name
19
+ zip_file = 'dataset.zip'
20
+
21
+ # Unzip it to a folder (you can choose your own target directory)
22
+ with zipfile.ZipFile(zip_file, 'r') as zip_ref:
23
+ zip_ref.extractall('blood_group_dataset') # Extract to this folder
24
+
25
+
26
+
27
+ # Walk through the directory
28
+ for root, dirs, files in os.walk('blood_group_dataset'):
29
+ print(root)
30
+ for file in dirs:
31
+ print(' ', file)
32
+
33
+ import os
34
+ import glob
35
+ import pandas as pd
36
+ import seaborn as sns
37
+ import matplotlib.pyplot as plt
38
+
39
+ # Walk through the directory and collect file paths and labels
40
+ filepaths = []
41
+ labels = []
42
+
43
+ for root, dirs, files in os.walk('blood_group_dataset'):
44
+ for dir in dirs: # Iterate through subdirectories (blood group types)
45
+ for file in glob.glob(os.path.join(root, dir, '*')): # Get all files in the subdirectory
46
+ filepaths.append(file)
47
+ labels.append(dir) # Use the subdirectory name as the label
48
+
49
+ # Create a DataFrame with file paths and labels
50
+ filepath = pd.Series(filepaths, name='Filepath').astype(str)
51
+ Labels = pd.Series(labels, name='Label')
52
+ data = pd.concat([filepath, Labels], axis=1)
53
+ data = data.sample(frac=1).reset_index(drop=True)
54
+
55
+
56
+ # Filter out the 'dataset' label
57
+ filtered_data = data[data['Label'] != 'dataset'] # Remove rows with 'dataset' label
58
+
59
+ # Visualize class distribution using sns.barplot
60
+ counts = filtered_data.Label.value_counts()
61
+ sns.barplot(x=counts.index, y=counts)
62
+ plt.xlabel('Blood Group Type') # Changed x-axis label
63
+ plt.ylabel('Number of Images') # Added y-axis label
64
+ plt.xticks(rotation=90)
65
+ plt.title('Class Distribution in Blood Group Dataset') # Added title
66
+ plt.show()
67
+
68
+ # Split data into training and testing sets
69
+ train, test = train_test_split(data, test_size=0.20, random_state=42)
70
+
71
+ # Visualize some images from the dataset
72
+ fig, axes = plt.subplots(nrows=5, ncols=3, figsize=(10, 8), subplot_kw={'xticks': [], 'yticks': []})
73
+ for i, ax in enumerate(axes.flat):
74
+ ax.imshow(plt.imread(data.Filepath[i]))
75
+ ax.set_title(data.Label[i])
76
+ plt.tight_layout()
77
+ plt.show()
78
+
79
+ # Set up ImageDataGenerator for training and validation data
80
+ train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
81
+ test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
82
+
83
+ train_gen = train_datagen.flow_from_dataframe(
84
+ dataframe=train,
85
+ x_col='Filepath',
86
+ y_col='Label',
87
+ target_size=(224, 224), # Adjusted to match ResNet50 input size
88
+ class_mode='categorical',
89
+ batch_size=32,
90
+ shuffle=True,
91
+ seed=42
92
+ )
93
+
94
+ valid_gen = test_datagen.flow_from_dataframe(
95
+ dataframe=test,
96
+ x_col='Filepath',
97
+ y_col='Label',
98
+ target_size=(224, 224), # Adjusted to match ResNet50 input size
99
+ class_mode='categorical',
100
+ batch_size=32,
101
+ shuffle=False,
102
+ seed=42
103
+ )
104
+
105
+ # Define the LeNet model
106
+ model = Sequential([
107
+ Conv2D(6, kernel_size=(5, 5), activation='relu', input_shape=(224, 224, 3)),
108
+ MaxPooling2D(pool_size=(2, 2)),
109
+ Conv2D(16, kernel_size=(5, 5), activation='relu'),
110
+ MaxPooling2D(pool_size=(2, 2)),
111
+ Flatten(),
112
+ Dense(120, activation='relu'),
113
+ Dense(84, activation='relu'),
114
+ Dense(8, activation='softmax')
115
+ ])
116
+
117
+ model.compile(
118
+ optimizer="adam",
119
+ loss='categorical_crossentropy',
120
+ metrics=['accuracy']
121
+ )
122
+
123
+ # Train the model
124
+ history = model.fit(
125
+ train_gen,
126
+ validation_data=valid_gen,
127
+ epochs=20
128
+ )
129
+
130
+ # Plot training history: accuracy
131
+ pd.DataFrame(history.history)[['accuracy', 'val_accuracy']].plot()
132
+ plt.title("Accuracy")
133
+ plt.show()
134
+
135
+ # Plot training history: loss
136
+ pd.DataFrame(history.history)[['loss', 'val_loss']].plot()
137
+ plt.title("Loss")
138
+ plt.show()
139
+
140
+ # Evaluate the model on test data
141
+ results = model.evaluate(valid_gen, verbose=0)
142
+ print(f"Test Loss: {results[0]:.5f}")
143
+ print(f"Test Accuracy: {results[1]*100:.2f}%")
144
+
145
+ # Predict labels for test data
146
+ pred = model.predict(valid_gen)
147
+ pred = np.argmax(pred, axis=1)
148
+
149
+ # Map predicted labels
150
+ labels = train_gen.class_indices
151
+ labels = dict((v, k) for k, v in labels.items())
152
+ pred = [labels[k] for k in pred]
153
+
154
+ # Compare predicted labels with true labels and print classification report
155
+ # Get the true labels from the test DataFrame, ensuring they match the predictions in length
156
+ y_test = list(test.Label)
157
+ # Adjust y_test to match pred length
158
+ y_test = y_test[:len(pred)] # Truncate y_test to match pred length
159
+
160
+ print(classification_report(y_test, pred))
161
+
162
+ model.save("model_blood_group_detection_lenet.keras")
163
+
164
+ import numpy as np
165
+ import matplotlib.pyplot as plt
166
+ from keras.models import load_model
167
+ from keras.preprocessing import image
168
+ from keras.applications.imagenet_utils import preprocess_input
169
+
170
+ # Load the pre-t rained model
171
+ model = load_model('model_blood_group_detection_lenet.keras')
172
+
173
+ # Define the class labels
174
+ labels = {'A+': 0, 'A-': 1, 'AB+': 2, 'AB-': 3, 'B+': 4, 'B-': 5, 'O+': 6, 'O-': 7}
175
+ labels = dict((v, k) for k, v in labels.items())
176
+
177
+ # Example of loading a single image and making a prediction
178
+ img_path = 'augmented_cluster_4_3505.BMP'
179
+
180
+ # Preprocess the image accordingly (check the model's expected input dimensions)
181
+ img = image.load_img(img_path, target_size=(224, 224)) # Example target size for AlexNet (224x224)
182
+ x = image.img_to_array(img)
183
+ x = np.expand_dims(x, axis=0)
184
+ x = preprocess_input(x) # Ensure this matches the model's preprocessing function
185
+
186
+ # Make prediction
187
+ result = model.predict(x)
188
+ predicted_class = np.argmax(result) # Get the predicted class index
189
+
190
+ # Map the predicted class to the label
191
+ predicted_label = labels[predicted_class]
192
+ confidence = result[0][predicted_class] * 100 # Confidence level
193
+
194
+ # Display the image
195
+ plt.imshow(image.array_to_img(image.img_to_array(img) / 255.0))
196
+ plt.axis('off') # Hide axes
197
+
198
+ # Display the prediction and confidence below the image
199
+ plt.title(f"Prediction: {predicted_label} with confidence {confidence:.2f}%")
200
+ plt.show()
201
+
mobilevnet.py ADDED
@@ -0,0 +1,268 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ------------------------------------------------------
2
+ # 1. Import Libraries
3
+ # ------------------------------------------------------
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+ import matplotlib.pyplot as plt
8
+ import seaborn as sns
9
+ import os
10
+ import cv2
11
+ import tensorflow as tf
12
+ from sklearn.model_selection import train_test_split
13
+ from keras.preprocessing.image import ImageDataGenerator
14
+ from keras.utils import to_categorical
15
+ from keras.applications import MobileNetV2
16
+ from keras.layers import Dense, GlobalAveragePooling2D, Dropout
17
+ from keras.models import Model, load_model
18
+ from keras.preprocessing import image
19
+ from keras.applications.mobilenet_v2 import preprocess_input
20
+ from keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint
21
+
22
+ # ------------------------------------------------------
23
+ # 2. Load Dataset
24
+ # ------------------------------------------------------
25
+
26
+ data_dir = 'dataset' # <-- Replace with your dataset folder
27
+ categories = os.listdir(data_dir)
28
+
29
+ data = []
30
+ for category in categories:
31
+ category_path = os.path.join(data_dir, category)
32
+ for img_name in os.listdir(category_path):
33
+ img_path = os.path.join(category_path, img_name)
34
+ data.append((img_path, category))
35
+
36
+ data = pd.DataFrame(data, columns=['Filepath', 'Label'])
37
+
38
+ print(f"Total samples: {len(data)}")
39
+ print(data.head())
40
+
41
+ # ------------------------------------------------------
42
+ # 3. Exploratory Data Analysis (EDA)
43
+ # ------------------------------------------------------
44
+
45
+ # Class distribution
46
+ plt.figure(figsize=(8,6))
47
+ sns.countplot(x='Label', data=data)
48
+ plt.title('Blood Group Class Distribution')
49
+ plt.xticks(rotation=45)
50
+ plt.show()
51
+
52
+ # Display few images
53
+ plt.figure(figsize=(12,8))
54
+ for i in range(9):
55
+ sample = data.sample(n=1).iloc[0]
56
+ img = cv2.imread(sample['Filepath'])
57
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
58
+ plt.subplot(3,3,i+1)
59
+ plt.imshow(img)
60
+ plt.title(sample['Label'])
61
+ plt.axis('off')
62
+ plt.tight_layout()
63
+ plt.show()
64
+
65
+ # ------------------------------------------------------
66
+ # 4. Train-Validation-Test Split
67
+ # ------------------------------------------------------
68
+
69
+ train, temp = train_test_split(data, test_size=0.3, random_state=42, stratify=data['Label'])
70
+ valid, test = train_test_split(temp, test_size=0.5, random_state=42, stratify=temp['Label'])
71
+
72
+ print(f"Training samples: {len(train)}")
73
+ print(f"Validation samples: {len(valid)}")
74
+ print(f"Testing samples: {len(test)}")
75
+
76
+ # ------------------------------------------------------
77
+ # 5. Preprocessing (Image Augmentation + Scaling)
78
+ # ------------------------------------------------------
79
+
80
+ train_datagen = ImageDataGenerator(
81
+ preprocessing_function=preprocess_input,
82
+ rotation_range=20,
83
+ zoom_range=0.2,
84
+ width_shift_range=0.2,
85
+ height_shift_range=0.2,
86
+ shear_range=0.2,
87
+ horizontal_flip=True
88
+ )
89
+
90
+ valid_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
91
+
92
+ target_size = (224, 224)
93
+
94
+ train_gen = train_datagen.flow_from_dataframe(
95
+ dataframe=train,
96
+ x_col='Filepath',
97
+ y_col='Label',
98
+ target_size=target_size,
99
+ class_mode='categorical',
100
+ batch_size=32,
101
+ shuffle=True,
102
+ seed=42
103
+ )
104
+
105
+ valid_gen = valid_datagen.flow_from_dataframe(
106
+ dataframe=valid,
107
+ x_col='Filepath',
108
+ y_col='Label',
109
+ target_size=target_size,
110
+ class_mode='categorical',
111
+ batch_size=32,
112
+ shuffle=False
113
+ )
114
+
115
+ # ------------------------------------------------------
116
+ # 6. Load MobileNetV2 Base Model
117
+ # ------------------------------------------------------
118
+
119
+ base_model = MobileNetV2(weights='imagenet', include_top=False, input_shape=(224,224,3))
120
+
121
+ # ------------------------------------------------------
122
+ # 7. Freeze Base Layers
123
+ # ------------------------------------------------------
124
+
125
+ for layer in base_model.layers:
126
+ layer.trainable = False
127
+
128
+ # ------------------------------------------------------
129
+ # 8. Define Function to Build Model (for tuning)
130
+ # ------------------------------------------------------
131
+
132
+ def build_model(dropout_rate=0.3, learning_rate=0.001):
133
+ x = base_model.output
134
+ x = GlobalAveragePooling2D()(x)
135
+ x = Dropout(dropout_rate)(x)
136
+ x = Dense(128, activation='relu')(x)
137
+ x = Dropout(dropout_rate)(x)
138
+ predictions = Dense(len(categories), activation='softmax')(x)
139
+
140
+ model = Model(inputs=base_model.input, outputs=predictions)
141
+
142
+ optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
143
+ model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
144
+
145
+ return model
146
+
147
+ # ------------------------------------------------------
148
+ # 9. Hyperparameter Tuning (Manual)
149
+ # ------------------------------------------------------
150
+
151
+ dropout_rates = [0.3, 0.4]
152
+ learning_rates = [0.001, 0.0005]
153
+
154
+ best_val_accuracy = 0
155
+ best_model = None
156
+ best_params = {}
157
+
158
+ for dr in dropout_rates:
159
+ for lr in learning_rates:
160
+ print(f"\nTraining with Dropout: {dr}, Learning Rate: {lr}")
161
+
162
+ model = build_model(dropout_rate=dr, learning_rate=lr)
163
+
164
+ callbacks = [
165
+ EarlyStopping(monitor='val_accuracy', patience=5, restore_best_weights=True),
166
+ ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=3, verbose=1),
167
+ ModelCheckpoint('best_model.h5', monitor='val_accuracy', save_best_only=True, verbose=1)
168
+ ]
169
+
170
+ history = model.fit(
171
+ train_gen,
172
+ validation_data=valid_gen,
173
+ epochs=20,
174
+ callbacks=callbacks,
175
+ verbose=1
176
+ )
177
+
178
+ val_acc = max(history.history['val_accuracy'])
179
+ print(f"Validation Accuracy: {val_acc:.4f}")
180
+
181
+ if val_acc > best_val_accuracy:
182
+ best_val_accuracy = val_acc
183
+ best_model = model
184
+ best_params = {'dropout_rate': dr, 'learning_rate': lr}
185
+
186
+ print("\nBest Parameters:", best_params)
187
+ print(f"Best Validation Accuracy: {best_val_accuracy:.4f}")
188
+
189
+ # ------------------------------------------------------
190
+ # 10. Fine-tuning (Unfreeze some base layers)
191
+ # ------------------------------------------------------
192
+
193
+ # Unfreeze last 30 layers for fine-tuning
194
+ for layer in base_model.layers[-30:]:
195
+ layer.trainable = True
196
+
197
+ # Recompile
198
+ optimizer = tf.keras.optimizers.Adam(learning_rate=best_params['learning_rate'] / 10)
199
+ best_model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
200
+
201
+ # Train again
202
+ history_finetune = best_model.fit(
203
+ train_gen,
204
+ validation_data=valid_gen,
205
+ epochs=10,
206
+ callbacks=[
207
+ EarlyStopping(monitor='val_accuracy', patience=5, restore_best_weights=True),
208
+ ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=3, verbose=1)
209
+ ],
210
+ verbose=1
211
+ )
212
+
213
+ # ------------------------------------------------------
214
+ # 11. Save the Final Model
215
+ # ------------------------------------------------------
216
+
217
+ best_model.save('bloodgroup_mobilenet_finetuned.h5')
218
+ print("Fine-tuned model saved as bloodgroup_mobilenet_finetuned.h5")
219
+
220
+ # ------------------------------------------------------
221
+ # 12. Evaluate the Final Model
222
+ # ------------------------------------------------------
223
+
224
+ # Accuracy and Loss plots
225
+ plt.figure(figsize=(14,5))
226
+
227
+ plt.subplot(1,2,1)
228
+ plt.plot(history_finetune.history['accuracy'], label='Fine-tuned Train Accuracy')
229
+ plt.plot(history_finetune.history['val_accuracy'], label='Fine-tuned Validation Accuracy')
230
+ plt.title('Fine-tuned Model Accuracy')
231
+ plt.legend()
232
+
233
+ plt.subplot(1,2,2)
234
+ plt.plot(history_finetune.history['loss'], label='Fine-tuned Train Loss')
235
+ plt.plot(history_finetune.history['val_loss'], label='Fine-tuned Validation Loss')
236
+ plt.title('Fine-tuned Model Loss')
237
+ plt.legend()
238
+
239
+ plt.show()
240
+
241
+ # ------------------------------------------------------
242
+ # 13. Prediction on Single Image (User Input)
243
+ # ------------------------------------------------------
244
+
245
+ # Load the fine-tuned model
246
+ model = load_model('bloodgroup_mobilenet_finetuned.h5')
247
+
248
+ # Define the class labels
249
+ labels = {'A+': 0, 'A-': 1, 'AB+': 2, 'AB-': 3, 'B+': 4, 'B-': 5, 'O+': 6, 'O-': 7}
250
+ labels = dict((v, k) for k, v in labels.items())
251
+
252
+ # Example: Single image prediction
253
+ img_path = 'dataset/AB+/augmented_cluster_4_4.BMP'
254
+
255
+ img = image.load_img(img_path, target_size=(224, 224))
256
+ x = image.img_to_array(img)
257
+ x = np.expand_dims(x, axis=0)
258
+ x = preprocess_input(x)
259
+
260
+ result = model.predict(x)
261
+ predicted_class = np.argmax(result)
262
+ predicted_label = labels[predicted_class]
263
+ confidence = result[0][predicted_class] * 100
264
+
265
+ plt.imshow(image.array_to_img(image.img_to_array(img)/255.0))
266
+ plt.axis('off')
267
+ plt.title(f"Prediction: {predicted_label} with confidence {confidence:.2f}%")
268
+ plt.show()