Spaces:
Sleeping
Sleeping
Ben Wolfson
commited on
Commit
·
067afc1
1
Parent(s):
f335d81
more updates
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- CNN.ipynb +135 -60
- test/bench_0.jpg +3 -0
- test/bench_105.jpg +3 -0
- test/bench_106.jpg +3 -0
- test/bench_11.jpg +3 -0
- test/bench_115.jpg +3 -0
- test/bench_116.jpg +3 -0
- test/bench_121.jpg +3 -0
- test/bench_13.jpg +3 -0
- test/bench_133.jpg +3 -0
- test/bench_136.jpg +3 -0
- test/bench_149.jpg +3 -0
- test/bench_152.jpg +3 -0
- test/bench_23.jpg +3 -0
- test/bench_24.jpg +3 -0
- test/bench_29.jpg +3 -0
- test/bench_41.jpg +3 -0
- test/bench_5.jpg +3 -0
- test/bench_52.jpg +3 -0
- test/bench_60.jpg +3 -0
- test/bench_62.jpg +3 -0
- test/bench_70.jpg +3 -0
- test/bench_71.jpg +3 -0
- test/bench_72.jpg +3 -0
- test/bench_75.jpg +3 -0
- test/bench_78.jpg +3 -0
- test/bench_79.jpg +3 -0
- test/bench_80.jpg +3 -0
- test/bench_86.jpg +3 -0
- test/bench_89.jpg +3 -0
- test/bench_9.jpg +3 -0
- test/bench_95.jpg +3 -0
- test/bench_96.jpg +3 -0
- test/bench_97.jpg +3 -0
- test/bench_99.jpg +3 -0
- test/deadlift_11.jpg +3 -0
- test/deadlift_12.jpg +3 -0
- test/deadlift_121.jpg +3 -0
- test/deadlift_125.jpg +3 -0
- test/deadlift_134.jpg +3 -0
- test/deadlift_135.jpg +3 -0
- test/deadlift_141.jpg +3 -0
- test/deadlift_157.jpg +3 -0
- test/deadlift_158.jpg +3 -0
- test/deadlift_17.jpg +3 -0
- test/deadlift_171.jpg +3 -0
- test/deadlift_172.jpg +3 -0
- test/deadlift_173.jpg +3 -0
- test/deadlift_182.jpg +3 -0
- test/deadlift_190.jpg +3 -0
CNN.ipynb
CHANGED
@@ -16,6 +16,28 @@
|
|
16 |
" os.rename(src, string)"
|
17 |
]
|
18 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
{
|
20 |
"cell_type": "markdown",
|
21 |
"metadata": {},
|
@@ -25,17 +47,11 @@
|
|
25 |
},
|
26 |
{
|
27 |
"cell_type": "code",
|
28 |
-
"execution_count":
|
29 |
-
"metadata": {
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
"output_type": "stream",
|
34 |
-
"text": [
|
35 |
-
"(549,) ()\n"
|
36 |
-
]
|
37 |
-
}
|
38 |
-
],
|
39 |
"source": [
|
40 |
"from os import listdir\n",
|
41 |
"from numpy import asarray\n",
|
@@ -44,46 +60,93 @@
|
|
44 |
"from keras.preprocessing.image import img_to_array\n",
|
45 |
"\n",
|
46 |
"folder = \"train/\"\n",
|
47 |
-
"
|
|
|
|
|
48 |
"\n",
|
49 |
"for file in listdir(folder):\n",
|
50 |
-
" output = 0
|
51 |
-
" if
|
52 |
-
" output = 1
|
53 |
-
" if
|
54 |
-
" output = 2
|
55 |
" photo = load_img(folder + file, target_size=(150,150))\n",
|
56 |
-
" photo = img_to_array\n",
|
57 |
-
" \n",
|
58 |
-
" photos.append(photo)\n",
|
59 |
" labels.append(output)\n",
|
60 |
-
"photos
|
61 |
-
"labels = asarray(output)\n",
|
62 |
-
"print(photos.shape, labels.shape)\n",
|
63 |
"\n",
|
64 |
-
"
|
65 |
-
"
|
|
|
|
|
66 |
]
|
67 |
},
|
68 |
{
|
69 |
"cell_type": "code",
|
70 |
-
"execution_count":
|
71 |
"metadata": {},
|
72 |
"outputs": [
|
73 |
{
|
74 |
"name": "stdout",
|
75 |
"output_type": "stream",
|
76 |
"text": [
|
77 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
]
|
79 |
}
|
80 |
],
|
81 |
"source": [
|
82 |
-
"
|
83 |
-
"photos = load(\"
|
84 |
-
"labels = load(\"
|
85 |
-
"
|
86 |
-
"print(photos.shape, labels.shape)"
|
87 |
]
|
88 |
},
|
89 |
{
|
@@ -133,37 +196,30 @@
|
|
133 |
},
|
134 |
{
|
135 |
"cell_type": "code",
|
136 |
-
"execution_count":
|
137 |
"metadata": {},
|
138 |
"outputs": [
|
139 |
{
|
140 |
-
"
|
141 |
-
"
|
142 |
-
"text": [
|
143 |
-
"Found 448 images belonging to 3 classes.\n",
|
144 |
-
"1\n",
|
145 |
-
"Found 101 images belonging to 3 classes.\n"
|
146 |
-
]
|
147 |
-
},
|
148 |
-
{
|
149 |
-
"ename": "InvalidArgumentError",
|
150 |
-
"evalue": " Matrix size-incompatible: In[0]: [128,3], In[1]: [128,1]\n\t [[node gradient_tape/sequential_21/dense_41/MatMul (defined at <ipython-input-70-ca63fab2532d>:115) ]] [Op:__inference_train_function_17586]\n\nFunction call stack:\ntrain_function\n",
|
151 |
"output_type": "error",
|
152 |
"traceback": [
|
153 |
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
154 |
-
"\u001b[1;
|
155 |
-
"\u001b[1;32m<ipython-input-
|
156 |
-
"\u001b[1;32m<ipython-input-
|
157 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_method_wrapper\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 106\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_method_wrapper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 107\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_in_multi_worker_mode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 108\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mmethod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 109\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;31m# Running inside `run_distribute_coordinator` already.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
158 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1096\u001b[0m batch_size=batch_size):\n\u001b[0;32m 1097\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1098\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1099\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1100\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
159 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 778\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 779\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"nonXla\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 780\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 781\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 782\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
160 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m
|
161 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\
|
162 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;
|
163 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;
|
164 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;
|
165 |
-
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\
|
166 |
-
"\u001b[1;
|
|
|
|
|
167 |
]
|
168 |
}
|
169 |
],
|
@@ -184,8 +240,10 @@
|
|
184 |
"from keras.models import Sequential\n",
|
185 |
"from keras.layers import Dense, Dropout, Flatten\n",
|
186 |
"from keras.layers import Conv2D, MaxPooling2D\n",
|
|
|
187 |
"\n",
|
188 |
" \n",
|
|
|
189 |
"# one block VGG\n",
|
190 |
"\"\"\"\n",
|
191 |
"def define_model():\n",
|
@@ -225,7 +283,7 @@
|
|
225 |
" model.add(Dropout(0.2))\n",
|
226 |
" model.add(Flatten())\n",
|
227 |
" model.add(Dense(128, activation='relu'))\n",
|
228 |
-
" model.add(Dense(
|
229 |
" # compile model\n",
|
230 |
" #opt = SGD(lr=0.001, momentum=0.9)\n",
|
231 |
" model.compile(optimizer=keras.optimizers.Adam(), loss='categorical_crossentropy', metrics=['accuracy'])\n",
|
@@ -268,7 +326,22 @@
|
|
268 |
" filename = sys.argv[0].split('/')[-1]\n",
|
269 |
" pyplot.savefig(filename + '_plot.png')\n",
|
270 |
" pyplot.close()\n",
|
271 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
272 |
"# run the test harness for evaluating a model\n",
|
273 |
"def run_test_harness():\n",
|
274 |
" # define model\n",
|
@@ -276,10 +349,12 @@
|
|
276 |
" # create data generator\n",
|
277 |
" datagen = ImageDataGenerator(rescale=1.0/255.0)\n",
|
278 |
" # prepare iterators\n",
|
279 |
-
" train_it = datagen.flow_from_directory('dataset/train/',\n",
|
280 |
-
"
|
281 |
-
" test_it = datagen.flow_from_directory('dataset/test/',\n",
|
282 |
-
"
|
|
|
|
|
283 |
" # fit model\n",
|
284 |
" history = model.fit(train_it, steps_per_epoch=len(train_it),\n",
|
285 |
" validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n",
|
|
|
16 |
" os.rename(src, string)"
|
17 |
]
|
18 |
},
|
19 |
+
{
|
20 |
+
"cell_type": "code",
|
21 |
+
"execution_count": 97,
|
22 |
+
"metadata": {},
|
23 |
+
"outputs": [
|
24 |
+
{
|
25 |
+
"ename": "SyntaxError",
|
26 |
+
"evalue": "unexpected EOF while parsing (<ipython-input-97-439ef8adfd37>, line 5)",
|
27 |
+
"output_type": "error",
|
28 |
+
"traceback": [
|
29 |
+
"\u001b[1;36m File \u001b[1;32m\"<ipython-input-97-439ef8adfd37>\"\u001b[1;36m, line \u001b[1;32m5\u001b[0m\n\u001b[1;33m \u001b[0m\n\u001b[1;37m ^\u001b[0m\n\u001b[1;31mSyntaxError\u001b[0m\u001b[1;31m:\u001b[0m unexpected EOF while parsing\n"
|
30 |
+
]
|
31 |
+
}
|
32 |
+
],
|
33 |
+
"source": [
|
34 |
+
"import os\n",
|
35 |
+
"from os import path\n",
|
36 |
+
"\n",
|
37 |
+
"for count in enumerate(os.listdir(\"dataset/test/deadlift\")):\n",
|
38 |
+
" "
|
39 |
+
]
|
40 |
+
},
|
41 |
{
|
42 |
"cell_type": "markdown",
|
43 |
"metadata": {},
|
|
|
47 |
},
|
48 |
{
|
49 |
"cell_type": "code",
|
50 |
+
"execution_count": 8,
|
51 |
+
"metadata": {
|
52 |
+
"scrolled": false
|
53 |
+
},
|
54 |
+
"outputs": [],
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
"source": [
|
56 |
"from os import listdir\n",
|
57 |
"from numpy import asarray\n",
|
|
|
60 |
"from keras.preprocessing.image import img_to_array\n",
|
61 |
"\n",
|
62 |
"folder = \"train/\"\n",
|
63 |
+
"\n",
|
64 |
+
"photos = []\n",
|
65 |
+
"labels = []\n",
|
66 |
"\n",
|
67 |
"for file in listdir(folder):\n",
|
68 |
+
" output = 0\n",
|
69 |
+
" if \"squat\" in file:\n",
|
70 |
+
" output = 1\n",
|
71 |
+
" if \"deadlift\" in file:\n",
|
72 |
+
" output = 2\n",
|
73 |
" photo = load_img(folder + file, target_size=(150,150))\n",
|
74 |
+
" photo = img_to_array(photo)\n",
|
|
|
|
|
75 |
" labels.append(output)\n",
|
76 |
+
" photos.append(photo)\n",
|
|
|
|
|
77 |
"\n",
|
78 |
+
"photos = asarray(photos)\n",
|
79 |
+
"labels = asarray(labels)\n",
|
80 |
+
"save(\"train_photos.npy\", photos)\n",
|
81 |
+
"save(\"train_labels.npy\", labels)"
|
82 |
]
|
83 |
},
|
84 |
{
|
85 |
"cell_type": "code",
|
86 |
+
"execution_count": 11,
|
87 |
"metadata": {},
|
88 |
"outputs": [
|
89 |
{
|
90 |
"name": "stdout",
|
91 |
"output_type": "stream",
|
92 |
"text": [
|
93 |
+
"[[[255. 255. 255.]\n",
|
94 |
+
" [255. 255. 255.]\n",
|
95 |
+
" [255. 255. 255.]\n",
|
96 |
+
" ...\n",
|
97 |
+
" [255. 255. 255.]\n",
|
98 |
+
" [255. 255. 255.]\n",
|
99 |
+
" [255. 255. 255.]]\n",
|
100 |
+
"\n",
|
101 |
+
" [[255. 255. 255.]\n",
|
102 |
+
" [255. 255. 255.]\n",
|
103 |
+
" [255. 255. 255.]\n",
|
104 |
+
" ...\n",
|
105 |
+
" [255. 255. 255.]\n",
|
106 |
+
" [255. 255. 255.]\n",
|
107 |
+
" [255. 255. 255.]]\n",
|
108 |
+
"\n",
|
109 |
+
" [[255. 255. 255.]\n",
|
110 |
+
" [255. 255. 255.]\n",
|
111 |
+
" [255. 255. 255.]\n",
|
112 |
+
" ...\n",
|
113 |
+
" [255. 255. 255.]\n",
|
114 |
+
" [255. 255. 255.]\n",
|
115 |
+
" [255. 255. 255.]]\n",
|
116 |
+
"\n",
|
117 |
+
" ...\n",
|
118 |
+
"\n",
|
119 |
+
" [[255. 255. 255.]\n",
|
120 |
+
" [255. 255. 255.]\n",
|
121 |
+
" [255. 255. 255.]\n",
|
122 |
+
" ...\n",
|
123 |
+
" [255. 255. 255.]\n",
|
124 |
+
" [255. 255. 255.]\n",
|
125 |
+
" [255. 255. 255.]]\n",
|
126 |
+
"\n",
|
127 |
+
" [[255. 255. 255.]\n",
|
128 |
+
" [255. 255. 255.]\n",
|
129 |
+
" [255. 255. 255.]\n",
|
130 |
+
" ...\n",
|
131 |
+
" [255. 255. 255.]\n",
|
132 |
+
" [255. 255. 255.]\n",
|
133 |
+
" [255. 255. 255.]]\n",
|
134 |
+
"\n",
|
135 |
+
" [[255. 255. 255.]\n",
|
136 |
+
" [255. 255. 255.]\n",
|
137 |
+
" [255. 255. 255.]\n",
|
138 |
+
" ...\n",
|
139 |
+
" [255. 255. 255.]\n",
|
140 |
+
" [255. 255. 255.]\n",
|
141 |
+
" [255. 255. 255.]]]\n"
|
142 |
]
|
143 |
}
|
144 |
],
|
145 |
"source": [
|
146 |
+
"import numpy as np\n",
|
147 |
+
"photos = np.load(\"test_photos.npy\",allow_pickle=True)\n",
|
148 |
+
"labels = np.load(\"test_labels.npy\",allow_pickle=True)\n",
|
149 |
+
"print(np.array(photos[0]))"
|
|
|
150 |
]
|
151 |
},
|
152 |
{
|
|
|
196 |
},
|
197 |
{
|
198 |
"cell_type": "code",
|
199 |
+
"execution_count": 15,
|
200 |
"metadata": {},
|
201 |
"outputs": [
|
202 |
{
|
203 |
+
"ename": "ValueError",
|
204 |
+
"evalue": "in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 3) are incompatible\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
205 |
"output_type": "error",
|
206 |
"traceback": [
|
207 |
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
208 |
+
"\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
|
209 |
+
"\u001b[1;32m<ipython-input-15-883c4c3058a6>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 140\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 141\u001b[0m \u001b[1;31m# entry point, run the test harness\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 142\u001b[1;33m \u001b[0mrun_test_harness\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
|
210 |
+
"\u001b[1;32m<ipython-input-15-883c4c3058a6>\u001b[0m in \u001b[0;36mrun_test_harness\u001b[1;34m()\u001b[0m\n\u001b[0;32m 131\u001b[0m \u001b[0mtest_it\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdatagen\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mflow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtest_photos_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtest_labels\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 132\u001b[0m \u001b[1;31m# fit model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 133\u001b[1;33m history = model.fit(train_it, steps_per_epoch=len(train_it),\n\u001b[0m\u001b[0;32m 134\u001b[0m validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n\u001b[0;32m 135\u001b[0m \u001b[1;31m# evaluate model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
211 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_method_wrapper\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 106\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_method_wrapper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 107\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_in_multi_worker_mode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 108\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mmethod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 109\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;31m# Running inside `run_distribute_coordinator` already.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
212 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1096\u001b[0m batch_size=batch_size):\n\u001b[0;32m 1097\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1098\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1099\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1100\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
213 |
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 778\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 779\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"nonXla\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 780\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 781\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 782\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
214 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 821\u001b[0m \u001b[1;31m# This is the first call of __call__, so we have to initialize.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 822\u001b[0m \u001b[0minitializers\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 823\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_initialize\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0madd_initializers_to\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitializers\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 824\u001b[0m \u001b[1;32mfinally\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 825\u001b[0m \u001b[1;31m# At this point we know that the initialization is complete (or less\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
215 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_initialize\u001b[1;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[0;32m 694\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_graph_deleter\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mFunctionDeleter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lifted_initializer_graph\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 695\u001b[0m self._concrete_stateful_fn = (\n\u001b[1;32m--> 696\u001b[1;33m self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\n\u001b[0m\u001b[0;32m 697\u001b[0m *args, **kwds))\n\u001b[0;32m 698\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
216 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_get_concrete_function_internal_garbage_collected\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 2853\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2854\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2855\u001b[1;33m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_maybe_define_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2856\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2857\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
217 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_maybe_define_function\u001b[1;34m(self, args, kwargs)\u001b[0m\n\u001b[0;32m 3211\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3212\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmissed\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcall_context_key\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3213\u001b[1;33m \u001b[0mgraph_function\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_create_graph_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3214\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mprimary\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mcache_key\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3215\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
218 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_create_graph_function\u001b[1;34m(self, args, kwargs, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 3063\u001b[0m \u001b[0marg_names\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbase_arg_names\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mmissing_arg_names\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3064\u001b[0m graph_function = ConcreteFunction(\n\u001b[1;32m-> 3065\u001b[1;33m func_graph_module.func_graph_from_py_func(\n\u001b[0m\u001b[0;32m 3066\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3067\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_python_function\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
219 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mfunc_graph_from_py_func\u001b[1;34m(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 984\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moriginal_func\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_decorator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0munwrap\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpython_func\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 985\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 986\u001b[1;33m \u001b[0mfunc_outputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpython_func\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0mfunc_args\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mfunc_kwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 987\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 988\u001b[0m \u001b[1;31m# invariant: `func_outputs` contains only Tensors, CompositeTensors,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
220 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36mwrapped_fn\u001b[1;34m(*args, **kwds)\u001b[0m\n\u001b[0;32m 598\u001b[0m \u001b[1;31m# __wrapped__ allows AutoGraph to swap in a converted function. We give\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 599\u001b[0m \u001b[1;31m# the function a weak reference to itself to avoid a reference cycle.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 600\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m__wrapped__\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 601\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mweakref\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mref\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwrapped_fn\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 602\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
221 |
+
"\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mwrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 971\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint:disable=broad-except\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 972\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"ag_error_metadata\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 973\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mag_error_metadata\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto_exception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 974\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 975\u001b[0m \u001b[1;32mraise\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
222 |
+
"\u001b[1;31mValueError\u001b[0m: in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 3) are incompatible\n"
|
223 |
]
|
224 |
}
|
225 |
],
|
|
|
240 |
"from keras.models import Sequential\n",
|
241 |
"from keras.layers import Dense, Dropout, Flatten\n",
|
242 |
"from keras.layers import Conv2D, MaxPooling2D\n",
|
243 |
+
"import numpy as np\n",
|
244 |
"\n",
|
245 |
" \n",
|
246 |
+
"\n",
|
247 |
"# one block VGG\n",
|
248 |
"\"\"\"\n",
|
249 |
"def define_model():\n",
|
|
|
283 |
" model.add(Dropout(0.2))\n",
|
284 |
" model.add(Flatten())\n",
|
285 |
" model.add(Dense(128, activation='relu'))\n",
|
286 |
+
" model.add(Dense(3, activation='softmax'))\n",
|
287 |
" # compile model\n",
|
288 |
" #opt = SGD(lr=0.001, momentum=0.9)\n",
|
289 |
" model.compile(optimizer=keras.optimizers.Adam(), loss='categorical_crossentropy', metrics=['accuracy'])\n",
|
|
|
326 |
" filename = sys.argv[0].split('/')[-1]\n",
|
327 |
" pyplot.savefig(filename + '_plot.png')\n",
|
328 |
" pyplot.close()\n",
|
329 |
+
"\n",
|
330 |
+
"import skimage.transform\n",
|
331 |
+
"new_shape = (200,200,3)\n",
|
332 |
+
"train_photos = np.load(\"train_photos.npy\",allow_pickle=True)\n",
|
333 |
+
"train_labels = np.load(\"train_labels.npy\",allow_pickle=True)\n",
|
334 |
+
"test_photos = np.load(\"test_photos.npy\",allow_pickle=True)\n",
|
335 |
+
"test_labels = np.load(\"test_labels.npy\",allow_pickle=True)\n",
|
336 |
+
"\n",
|
337 |
+
"train_photos_ = np.empty(shape=(train_photos.shape[0],)+new_shape)\n",
|
338 |
+
"for idx in range(train_photos.shape[0]):\n",
|
339 |
+
" \n",
|
340 |
+
" train_photos_[idx] = skimage.transform.resize(train_photos[idx], new_shape)\n",
|
341 |
+
"test_photos_ = np.empty(shape=(test_photos.shape[0],)+new_shape)\n",
|
342 |
+
"for idx in range(test_photos.shape[0]):\n",
|
343 |
+
" test_photos_[idx] = skimage.transform.resize(test_photos[idx], new_shape)\n",
|
344 |
+
" \n",
|
345 |
"# run the test harness for evaluating a model\n",
|
346 |
"def run_test_harness():\n",
|
347 |
" # define model\n",
|
|
|
349 |
" # create data generator\n",
|
350 |
" datagen = ImageDataGenerator(rescale=1.0/255.0)\n",
|
351 |
" # prepare iterators\n",
|
352 |
+
" #train_it = datagen.flow_from_directory('dataset/train/',\n",
|
353 |
+
" # class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
|
354 |
+
" #test_it = datagen.flow_from_directory('dataset/test/',\n",
|
355 |
+
" # class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
|
356 |
+
" train_it = datagen.flow(train_photos_, train_labels)\n",
|
357 |
+
" test_it = datagen.flow(test_photos_, test_labels)\n",
|
358 |
" # fit model\n",
|
359 |
" history = model.fit(train_it, steps_per_epoch=len(train_it),\n",
|
360 |
" validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n",
|
test/bench_0.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_105.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_106.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_11.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_115.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_116.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_121.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_13.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_133.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_136.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_149.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_152.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_23.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_24.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_29.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_41.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_5.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_52.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_60.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_62.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_70.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_71.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_72.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_75.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_78.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_79.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_80.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_86.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_89.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_9.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_95.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_96.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_97.jpg
ADDED
![]() |
Git LFS Details
|
test/bench_99.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_11.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_12.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_121.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_125.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_134.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_135.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_141.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_157.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_158.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_17.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_171.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_172.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_173.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_182.jpg
ADDED
![]() |
Git LFS Details
|
test/deadlift_190.jpg
ADDED
![]() |
Git LFS Details
|