nisharg nargund commited on
Commit
dd2c48e
·
1 Parent(s): 03be6a6

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -0
app.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """tumor-classification-using-keras.ipynb
3
+
4
+ Automatically generated by Colaboratory.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1EgMc5_zGbuWuvrsGd2sg2bVfXYUQXanx
8
+
9
+ # Import Statements
10
+ """
11
+
12
+ from zipfile import ZipFile
13
+ file_name = "brain_tumor_dataset_kaggle.zip"
14
+
15
+ with ZipFile(file_name,'r') as zip:
16
+ zip.extractall()
17
+ print("done")
18
+
19
+ import os
20
+ import keras
21
+ from keras.models import Sequential
22
+ from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization
23
+ from PIL import Image
24
+ import numpy as np
25
+ import pandas as pd
26
+ import matplotlib.pyplot as plt
27
+
28
+ from sklearn.preprocessing import OneHotEncoder
29
+ encoder = OneHotEncoder()
30
+ encoder.fit([[0], [1]])
31
+
32
+ # 0 - Tumor
33
+ # 1 - Normal
34
+
35
+ """
36
+ 1. data list for storing image data in numpy array form
37
+ 2. paths list for storing paths of all images
38
+ 3. result list for storing one hot encoded form of target class whether normal or tumor"""
39
+
40
+ # This cell updates result list for images with tumor
41
+
42
+ data = []
43
+ paths = []
44
+ result = []
45
+
46
+ for r, d, f in os.walk(r'../content/brain_tumor_dataset/yes'): #r-read, d-directory, f-file
47
+ for file in f:
48
+ if '.jpg' in file:
49
+ paths.append(os.path.join(r, file))
50
+
51
+ for path in paths:
52
+ img = Image.open(path)
53
+ img = img.resize((128,128))
54
+ img = np.array(img)
55
+ if(img.shape == (128,128,3)):
56
+ data.append(np.array(img))
57
+ result.append(encoder.transform([[0]]).toarray())
58
+
59
+ # This cell updates result list for images without tumor
60
+
61
+ paths = []
62
+ for r, d, f in os.walk(r"../content/brain_tumor_dataset/no"):
63
+ for file in f:
64
+ if '.jpg' in file:
65
+ paths.append(os.path.join(r, file))
66
+
67
+ for path in paths:
68
+ img = Image.open(path)
69
+ img = img.resize((128,128))
70
+ img = np.array(img)
71
+ if(img.shape == (128,128,3)):
72
+ data.append(np.array(img))
73
+ result.append(encoder.transform([[1]]).toarray())
74
+
75
+ data = np.array(data)
76
+ data.shape
77
+
78
+ result = np.array(result)
79
+ result = result.reshape(139,2)
80
+
81
+ from sklearn.model_selection import train_test_split
82
+ x_train,x_test,y_train,y_test = train_test_split(data, result, test_size=0.1, shuffle=True, random_state=0)
83
+
84
+ """# Model Building
85
+
86
+ Batch normalization is a technique for training very deep neural networks that standardizes the inputs to a layer for each mini-batch. This has the effect of stabilizing the learning process and dramatically reducing the number of training epochs required to train deep networks.
87
+ """
88
+
89
+ model = Sequential()
90
+
91
+ model.add(Conv2D(32, kernel_size=(2, 2), input_shape=(128, 128, 3), padding = 'Same'))
92
+ model.add(Conv2D(32, kernel_size=(2, 2), activation ='relu', padding = 'Same'))
93
+
94
+
95
+ model.add(BatchNormalization())
96
+ model.add(MaxPooling2D(pool_size=(2, 2)))
97
+ model.add(Dropout(0.25))
98
+
99
+ model.add(Conv2D(64, kernel_size = (2,2), activation ='relu', padding = 'Same'))
100
+ model.add(Conv2D(64, kernel_size = (2,2), activation ='relu', padding = 'Same'))
101
+ model.add(Conv2D(64, kernel_size = (2,2), activation ='relu', padding = 'Same'))
102
+ model.add(Conv2D(64, kernel_size = (2,2), activation ='relu', padding = 'Same'))
103
+
104
+ model.add(BatchNormalization())
105
+ model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2)))
106
+ model.add(Dropout(0.25))
107
+
108
+ model.add(Flatten())
109
+
110
+ model.add(Dense(512, activation='relu'))
111
+ model.add(Dropout(0.5))
112
+ model.add(Dense(2, activation='softmax'))
113
+
114
+ model.compile(loss = "categorical_crossentropy", optimizer='Adamax')
115
+ print(model.summary())
116
+
117
+ y_train.shape
118
+
119
+ history = model.fit(x_train, y_train, epochs = 30, batch_size = 40, verbose = 1,validation_data = (x_test, y_test))
120
+
121
+ """# Plotting Losses"""
122
+
123
+ plt.plot(history.history['loss'])
124
+ plt.plot(history.history['val_loss'])
125
+ plt.title('Model Loss')
126
+ plt.ylabel('Loss')
127
+ plt.xlabel('Epoch')
128
+ plt.legend(['Test', 'Validation'], loc='upper right')
129
+ plt.show()
130
+
131
+ """# Just Checking the Model"""
132
+
133
+ def names(number):
134
+ if number==0:
135
+ return 'Its a Tumor'
136
+ else:
137
+ return 'No, Its not a tumor'
138
+
139
+ from matplotlib.pyplot import imshow
140
+ img = Image.open(r"../content/brain_tumor_dataset/no/11 no.jpg")
141
+ x = np.array(img.resize((128,128)))
142
+ x = x.reshape(1,128,128,3)
143
+ res = model.predict_on_batch(x)
144
+ classification = np.where(res == np.amax(res))[1][0]
145
+ imshow(img)
146
+ print(str(res[0][classification]*100) + '% Confidence This Is A ' + names(classification))
147
+
148
+ from matplotlib.pyplot import imshow
149
+ img = Image.open(r"../content/brain_tumor_dataset/no/18 no.jpg")
150
+ x = np.array(img.resize((128,128)))
151
+ x = x.reshape(1,128,128,3)
152
+ res = model.predict_on_batch(x)
153
+ classification = np.where(res == np.amax(res))[1][0]
154
+ imshow(img)
155
+ print(str(res[0][classification]*100) + '% Confidence This Is A ' + names(classification))
156
+
157
+ """# Thats It !!"""