Ben Wolfson commited on
Commit
a657511
·
1 Parent(s): 3a9ad40

Updated cnn

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. CNN.ipynb +255 -11
  2. dataset/test/bench/bench_0.jpg +3 -0
  3. dataset/test/bench/bench_105.jpg +3 -0
  4. dataset/test/bench/bench_106.jpg +3 -0
  5. dataset/test/bench/bench_11.jpg +3 -0
  6. dataset/test/bench/bench_115.jpg +3 -0
  7. dataset/test/bench/bench_116.jpg +3 -0
  8. dataset/test/bench/bench_121.jpg +3 -0
  9. dataset/test/bench/bench_13.jpg +3 -0
  10. dataset/test/bench/bench_133.jpg +3 -0
  11. dataset/test/bench/bench_136.jpg +3 -0
  12. dataset/test/bench/bench_149.jpg +3 -0
  13. dataset/test/bench/bench_152.jpg +3 -0
  14. dataset/test/bench/bench_23.jpg +3 -0
  15. dataset/test/bench/bench_24.jpg +3 -0
  16. dataset/test/bench/bench_29.jpg +3 -0
  17. dataset/test/bench/bench_41.jpg +3 -0
  18. dataset/test/bench/bench_5.jpg +3 -0
  19. dataset/test/bench/bench_52.jpg +3 -0
  20. dataset/test/bench/bench_60.jpg +3 -0
  21. dataset/test/bench/bench_62.jpg +3 -0
  22. dataset/test/bench/bench_70.jpg +3 -0
  23. dataset/test/bench/bench_71.jpg +3 -0
  24. dataset/test/bench/bench_72.jpg +3 -0
  25. dataset/test/bench/bench_75.jpg +3 -0
  26. dataset/test/bench/bench_78.jpg +3 -0
  27. dataset/test/bench/bench_79.jpg +3 -0
  28. dataset/test/bench/bench_80.jpg +3 -0
  29. dataset/test/bench/bench_86.jpg +3 -0
  30. dataset/test/bench/bench_89.jpg +3 -0
  31. dataset/test/bench/bench_9.jpg +3 -0
  32. dataset/test/bench/bench_95.jpg +3 -0
  33. dataset/test/bench/bench_96.jpg +3 -0
  34. dataset/test/bench/bench_97.jpg +3 -0
  35. dataset/test/bench/bench_99.jpg +3 -0
  36. dataset/test/deadlift/deadlift_11.jpg +3 -0
  37. dataset/test/deadlift/deadlift_12.jpg +3 -0
  38. dataset/test/deadlift/deadlift_121.jpg +3 -0
  39. dataset/test/deadlift/deadlift_125.jpg +3 -0
  40. dataset/test/deadlift/deadlift_134.jpg +3 -0
  41. dataset/test/deadlift/deadlift_135.jpg +3 -0
  42. dataset/test/deadlift/deadlift_141.jpg +3 -0
  43. dataset/test/deadlift/deadlift_157.jpg +3 -0
  44. dataset/test/deadlift/deadlift_158.jpg +3 -0
  45. dataset/test/deadlift/deadlift_17.jpg +3 -0
  46. dataset/test/deadlift/deadlift_171.jpg +3 -0
  47. dataset/test/deadlift/deadlift_172.jpg +3 -0
  48. dataset/test/deadlift/deadlift_173.jpg +3 -0
  49. dataset/test/deadlift/deadlift_182.jpg +3 -0
  50. dataset/test/deadlift/deadlift_190.jpg +3 -0
CNN.ipynb CHANGED
@@ -2,31 +2,275 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 5,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
- "# Dataset Building Imports\n",
10
- "from requests import exceptions\n",
11
- "import argparse\n",
12
- "import requests\n",
13
- "import cv2\n",
14
- "import os"
 
 
 
 
 
 
 
 
 
15
  ]
16
  },
17
  {
18
  "cell_type": "code",
19
- "execution_count": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  "metadata": {},
21
  "outputs": [],
22
- "source": []
 
 
 
 
 
 
 
 
 
 
 
23
  },
24
  {
25
  "cell_type": "code",
26
- "execution_count": null,
27
  "metadata": {},
28
  "outputs": [],
29
- "source": []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  },
31
  {
32
  "cell_type": "code",
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 36,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
+ "# File renaming for consistency\n",
10
+ "import os\n",
11
+ "from os import path\n",
12
+ "\n",
13
+ "for count, filename in enumerate(os.listdir(\"deadlift2\")):\n",
14
+ " src = \"deadlift2/\" + filename\n",
15
+ " string = \"deadlift2/deadlift_\" + str(count) + \".jpg\"\n",
16
+ " os.rename(src, string)"
17
+ ]
18
+ },
19
+ {
20
+ "cell_type": "markdown",
21
+ "metadata": {},
22
+ "source": [
23
+ "https://machinelearningmastery.com/how-to-develop-a-convolutional-neural-network-to-classify-photos-of-dogs-and-cats/"
24
  ]
25
  },
26
  {
27
  "cell_type": "code",
28
+ "execution_count": 37,
29
+ "metadata": {},
30
+ "outputs": [
31
+ {
32
+ "name": "stdout",
33
+ "output_type": "stream",
34
+ "text": [
35
+ "(549,) ()\n"
36
+ ]
37
+ }
38
+ ],
39
+ "source": [
40
+ "from os import listdir\n",
41
+ "from numpy import asarray\n",
42
+ "from numpy import save\n",
43
+ "from keras.preprocessing.image import load_img\n",
44
+ "from keras.preprocessing.image import img_to_array\n",
45
+ "\n",
46
+ "folder = \"train/\"\n",
47
+ "photos, labels = list(), list()\n",
48
+ "\n",
49
+ "for file in listdir(folder):\n",
50
+ " output = 0.0\n",
51
+ " if file.startswith(\"squat\"):\n",
52
+ " output = 1.0\n",
53
+ " if file.startswith(\"deadlift\"):\n",
54
+ " output = 2.0\n",
55
+ " photo = load_img(folder + file, target_size=(150,150))\n",
56
+ " photo = img_to_array\n",
57
+ " \n",
58
+ " photos.append(photo)\n",
59
+ " labels.append(output)\n",
60
+ "photos = asarray(photos)\n",
61
+ "labels = asarray(output)\n",
62
+ "print(photos.shape, labels.shape)\n",
63
+ "\n",
64
+ "save(\"exercise_photos.npy\", photos)\n",
65
+ "save(\"exercise_labels.npy\", photos)"
66
+ ]
67
+ },
68
+ {
69
+ "cell_type": "code",
70
+ "execution_count": 39,
71
+ "metadata": {},
72
+ "outputs": [
73
+ {
74
+ "name": "stdout",
75
+ "output_type": "stream",
76
+ "text": [
77
+ "(549,) (549,)\n"
78
+ ]
79
+ }
80
+ ],
81
+ "source": [
82
+ "from numpy import load\n",
83
+ "photos = load(\"exercise_photos.npy\",allow_pickle=True)\n",
84
+ "labels = load(\"exercise_labels.npy\",allow_pickle=True)\n",
85
+ "\n",
86
+ "print(photos.shape, labels.shape)"
87
+ ]
88
+ },
89
+ {
90
+ "cell_type": "code",
91
+ "execution_count": 42,
92
  "metadata": {},
93
  "outputs": [],
94
+ "source": [
95
+ "# Directory Generation\n",
96
+ "from os import makedirs\n",
97
+ "dataset_home = \"dataset/\"\n",
98
+ "subdirs = [\"train/\", \"test/\"]\n",
99
+ "for subdir in subdirs:\n",
100
+ " # create label subdirectories\n",
101
+ " labeldirs = [\"bench/\", \"squat/\", \"deadlift/\"]\n",
102
+ " for labldir in labeldirs:\n",
103
+ " newdir = dataset_home + subdir + labldir\n",
104
+ " makedirs(newdir)"
105
+ ]
106
  },
107
  {
108
  "cell_type": "code",
109
+ "execution_count": 46,
110
  "metadata": {},
111
  "outputs": [],
112
+ "source": [
113
+ "# Segment into testing and training images\n",
114
+ "import random\n",
115
+ "from shutil import copyfile\n",
116
+ "random.seed(1)\n",
117
+ "ratio = 0.2\n",
118
+ "dataset_home = \"dataset/\"\n",
119
+ "src_directory = \"images/\"\n",
120
+ "for file in listdir(src_directory):\n",
121
+ " src = src_directory + '/' + file\n",
122
+ " dst_dir = \"train/\"\n",
123
+ " if random.random() < ratio:\n",
124
+ " dst_dir = \"test/\"\n",
125
+ " if file.startswith(\"bench\"):\n",
126
+ " dst = dataset_home + dst_dir + \"bench/\" + file\n",
127
+ " elif file.startswith(\"squat\"):\n",
128
+ " dst = dataset_home + dst_dir + \"squat/\" + file\n",
129
+ " else:\n",
130
+ " dst = dataset_home + dst_dir + \"deadlift/\" + file\n",
131
+ " copyfile(src, dst) "
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": 58,
137
+ "metadata": {},
138
+ "outputs": [
139
+ {
140
+ "name": "stdout",
141
+ "output_type": "stream",
142
+ "text": [
143
+ "Found 448 images belonging to 3 classes.\n",
144
+ "Found 101 images belonging to 3 classes.\n"
145
+ ]
146
+ },
147
+ {
148
+ "ename": "ValueError",
149
+ "evalue": "in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 10) are incompatible\n",
150
+ "output_type": "error",
151
+ "traceback": [
152
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
153
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
154
+ "\u001b[1;32m<ipython-input-58-08697642c99a>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 99\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 100\u001b[0m \u001b[1;31m# entry point, run the test harness\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 101\u001b[1;33m \u001b[0mrun_test_harness\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
155
+ "\u001b[1;32m<ipython-input-58-08697642c99a>\u001b[0m in \u001b[0;36mrun_test_harness\u001b[1;34m()\u001b[0m\n\u001b[0;32m 90\u001b[0m class_mode='binary', batch_size=64, target_size=(150, 150))\n\u001b[0;32m 91\u001b[0m \u001b[1;31m# fit model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 92\u001b[1;33m history = model.fit(train_it, steps_per_epoch=len(train_it),\n\u001b[0m\u001b[0;32m 93\u001b[0m validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n\u001b[0;32m 94\u001b[0m \u001b[1;31m# evaluate model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
156
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_method_wrapper\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 106\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_method_wrapper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 107\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_in_multi_worker_mode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 108\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mmethod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 109\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;31m# Running inside `run_distribute_coordinator` already.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
157
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1096\u001b[0m batch_size=batch_size):\n\u001b[0;32m 1097\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1098\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1099\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1100\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
158
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 778\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 779\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"nonXla\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 780\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 781\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 782\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
159
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 821\u001b[0m \u001b[1;31m# This is the first call of __call__, so we have to initialize.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 822\u001b[0m \u001b[0minitializers\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 823\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_initialize\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0madd_initializers_to\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitializers\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 824\u001b[0m \u001b[1;32mfinally\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 825\u001b[0m \u001b[1;31m# At this point we know that the initialization is complete (or less\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
160
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_initialize\u001b[1;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[0;32m 694\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_graph_deleter\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mFunctionDeleter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lifted_initializer_graph\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 695\u001b[0m self._concrete_stateful_fn = (\n\u001b[1;32m--> 696\u001b[1;33m self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\n\u001b[0m\u001b[0;32m 697\u001b[0m *args, **kwds))\n\u001b[0;32m 698\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
161
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_get_concrete_function_internal_garbage_collected\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 2853\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2854\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2855\u001b[1;33m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_maybe_define_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2856\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2857\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
162
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_maybe_define_function\u001b[1;34m(self, args, kwargs)\u001b[0m\n\u001b[0;32m 3211\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3212\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmissed\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcall_context_key\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3213\u001b[1;33m \u001b[0mgraph_function\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_create_graph_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3214\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mprimary\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mcache_key\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3215\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
163
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_create_graph_function\u001b[1;34m(self, args, kwargs, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 3063\u001b[0m \u001b[0marg_names\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbase_arg_names\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mmissing_arg_names\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3064\u001b[0m graph_function = ConcreteFunction(\n\u001b[1;32m-> 3065\u001b[1;33m func_graph_module.func_graph_from_py_func(\n\u001b[0m\u001b[0;32m 3066\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3067\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_python_function\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
164
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mfunc_graph_from_py_func\u001b[1;34m(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 984\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moriginal_func\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_decorator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0munwrap\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpython_func\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 985\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 986\u001b[1;33m \u001b[0mfunc_outputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpython_func\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0mfunc_args\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mfunc_kwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 987\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 988\u001b[0m \u001b[1;31m# invariant: `func_outputs` contains only Tensors, CompositeTensors,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
165
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36mwrapped_fn\u001b[1;34m(*args, **kwds)\u001b[0m\n\u001b[0;32m 598\u001b[0m \u001b[1;31m# __wrapped__ allows AutoGraph to swap in a converted function. We give\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 599\u001b[0m \u001b[1;31m# the function a weak reference to itself to avoid a reference cycle.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 600\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m__wrapped__\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 601\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mweakref\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mref\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwrapped_fn\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 602\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
166
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mwrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 971\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint:disable=broad-except\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 972\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"ag_error_metadata\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 973\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mag_error_metadata\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto_exception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 974\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 975\u001b[0m \u001b[1;32mraise\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
167
+ "\u001b[1;31mValueError\u001b[0m: in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 10) are incompatible\n"
168
+ ]
169
+ }
170
+ ],
171
+ "source": [
172
+ "# Baseline CNN Model\n",
173
+ "import sys\n",
174
+ "from matplotlib import pyplot\n",
175
+ "import keras\n",
176
+ "from keras.utils import to_categorical\n",
177
+ "from keras.models import Sequential\n",
178
+ "from keras.layers import Conv2D\n",
179
+ "from keras.layers import MaxPooling2D\n",
180
+ "from keras.layers import Dense\n",
181
+ "from keras.layers import Flatten\n",
182
+ "from keras.layers import Dropout\n",
183
+ "from keras.optimizers import SGD\n",
184
+ "from keras.preprocessing.image import ImageDataGenerator\n",
185
+ " \n",
186
+ "# one block VGG\n",
187
+ "\"\"\"\n",
188
+ "def define_model():\n",
189
+ " model = Sequential()\n",
190
+ " model.add(Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=(150, 150, 3)))\n",
191
+ " model.add(MaxPooling2D((2, 2)))\n",
192
+ " model.add(Flatten())\n",
193
+ " model.add(Dense(128, activation='relu', kernel_initializer='he_uniform'))\n",
194
+ " model.add(Dense(1, activation='sigmoid'))\n",
195
+ " # compile model\n",
196
+ " opt = SGD(lr=0.001, momentum=0.9)\n",
197
+ " model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['accuracy'])\n",
198
+ " return model\n",
199
+ "\"\"\"\n",
200
+ "\"\"\"\n",
201
+ "# two block VGG\n",
202
+ "def define_model():\n",
203
+ " model = Sequential()\n",
204
+ " model.add(Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=(150, 150, 3)))\n",
205
+ " model.add(MaxPooling2D((2, 2)))\n",
206
+ " model.add(Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same'))\n",
207
+ " model.add(MaxPooling2D((2, 2)))\n",
208
+ " model.add(Flatten())\n",
209
+ " model.add(Dense(128, activation='relu', kernel_initializer='he_uniform'))\n",
210
+ " model.add(Dense(1, activation='sigmoid'))\n",
211
+ " # compile model\n",
212
+ " opt = SGD(lr=0.001, momentum=0.9)\n",
213
+ " model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['accuracy'])\n",
214
+ " return model\n",
215
+ "\"\"\"\n",
216
+ "# three block VGG\n",
217
+ "def define_model():\n",
218
+ "\n",
219
+ " cnn1 = Sequential()\n",
220
+ " cnn1.add(Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))\n",
221
+ " cnn1.add(MaxPooling2D((2, 2)))\n",
222
+ " cnn1.add(Dropout(0.2))\n",
223
+ "\n",
224
+ " cnn1.add(Flatten())\n",
225
+ "\n",
226
+ " cnn1.add(Dense(128, activation='relu'))\n",
227
+ " cnn1.add(Dense(10, activation='softmax'))\n",
228
+ "\n",
229
+ " cnn1.compile(loss=keras.losses.categorical_crossentropy,\n",
230
+ " optimizer=keras.optimizers.Adam(),\n",
231
+ " metrics=['accuracy'])\n",
232
+ " return cnn1\n",
233
+ "\n",
234
+ "# plot diagnostic learning curves\n",
235
+ "def summarize_diagnostics(history):\n",
236
+ " # plot loss\n",
237
+ " pyplot.subplot(211)\n",
238
+ " pyplot.title('Cross Entropy Loss')\n",
239
+ " pyplot.plot(history.history['loss'], color='blue', label='train')\n",
240
+ " pyplot.plot(history.history['val_loss'], color='orange', label='test')\n",
241
+ " # plot accuracy\n",
242
+ " pyplot.subplot(212)\n",
243
+ " pyplot.title('Classification Accuracy')\n",
244
+ " pyplot.plot(history.history['accuracy'], color='blue', label='train')\n",
245
+ " pyplot.plot(history.history['val_accuracy'], color='orange', label='test')\n",
246
+ " # save plot to file\n",
247
+ " filename = sys.argv[0].split('/')[-1]\n",
248
+ " pyplot.savefig(filename + '_plot.png')\n",
249
+ " pyplot.close()\n",
250
+ " \n",
251
+ "# run the test harness for evaluating a model\n",
252
+ "def run_test_harness():\n",
253
+ " # define model\n",
254
+ " model = define_model()\n",
255
+ " # create data generator\n",
256
+ " datagen = ImageDataGenerator(rescale=1.0/255.0)\n",
257
+ " # prepare iterators\n",
258
+ " train_it = datagen.flow_from_directory('dataset/train/',\n",
259
+ " class_mode='binary', batch_size=64, target_size=(150, 150))\n",
260
+ " test_it = datagen.flow_from_directory('dataset/test/',\n",
261
+ " class_mode='binary', batch_size=64, target_size=(150, 150))\n",
262
+ " # fit model\n",
263
+ " history = model.fit(train_it, steps_per_epoch=len(train_it),\n",
264
+ " validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n",
265
+ " # evaluate model\n",
266
+ " _, acc = model.evaluate_generator(test_it, steps=len(test_it), verbose=0)\n",
267
+ " print('> %.3f' % (acc * 100.0))\n",
268
+ " # learning curves\n",
269
+ " summarize_diagnostics(history)\n",
270
+ " \n",
271
+ "# entry point, run the test harness\n",
272
+ "run_test_harness()\n"
273
+ ]
274
  },
275
  {
276
  "cell_type": "code",
dataset/test/bench/bench_0.jpg ADDED

Git LFS Details

  • SHA256: 4b0b791a39a2df1a5294f3d54c1fdddb643d3b51c73d9b3537b87e3fd22474f6
  • Pointer size: 130 Bytes
  • Size of remote file: 29.8 kB
dataset/test/bench/bench_105.jpg ADDED

Git LFS Details

  • SHA256: a39ca10295973fb7a71fc7710728ee0f8baa6519fafc017cd382c1ef79e6d20c
  • Pointer size: 131 Bytes
  • Size of remote file: 415 kB
dataset/test/bench/bench_106.jpg ADDED

Git LFS Details

  • SHA256: e9db12c24ae45319e10bf7700e2b15b5aa5a32b613753ebf57fc7f975b78d984
  • Pointer size: 130 Bytes
  • Size of remote file: 31 kB
dataset/test/bench/bench_11.jpg ADDED

Git LFS Details

  • SHA256: d9fe27cf53b4be2fd679193386fa885da390d393b09747ad0930b848f085b382
  • Pointer size: 130 Bytes
  • Size of remote file: 14.4 kB
dataset/test/bench/bench_115.jpg ADDED

Git LFS Details

  • SHA256: 0bd6b764ff6a06853290a52b5e9a9b5b77d07d2d70894ba8f38d9db25bba238e
  • Pointer size: 130 Bytes
  • Size of remote file: 82.8 kB
dataset/test/bench/bench_116.jpg ADDED

Git LFS Details

  • SHA256: 1c63a49b1fb5e5e0240a98ae07ec407f7915478b9429831247a48723f54e3a59
  • Pointer size: 131 Bytes
  • Size of remote file: 106 kB
dataset/test/bench/bench_121.jpg ADDED

Git LFS Details

  • SHA256: a0c9bc41f54efe31077caa956ce9c6a7a2703c688736acc708a54ed2bf1e58c5
  • Pointer size: 131 Bytes
  • Size of remote file: 207 kB
dataset/test/bench/bench_13.jpg ADDED

Git LFS Details

  • SHA256: fb16051bb18f1b21da60dcbb2868d7806f5dd2070ff8e7bc8dd371927300fd9c
  • Pointer size: 130 Bytes
  • Size of remote file: 17.4 kB
dataset/test/bench/bench_133.jpg ADDED

Git LFS Details

  • SHA256: 12efcfa7d19d9ca787e12a7383a7e5eab7987bf28502169969c309fba34b0faa
  • Pointer size: 130 Bytes
  • Size of remote file: 10.1 kB
dataset/test/bench/bench_136.jpg ADDED

Git LFS Details

  • SHA256: 95e39a0a7f10bd7161470f9c12e3e79a654c64e16773e566db8c26482cd33b03
  • Pointer size: 130 Bytes
  • Size of remote file: 17 kB
dataset/test/bench/bench_149.jpg ADDED

Git LFS Details

  • SHA256: 9eb6b5867d372a7bca74cd7a6e829603edddae2a90102d6a9c79a7a0f3ee873a
  • Pointer size: 131 Bytes
  • Size of remote file: 103 kB
dataset/test/bench/bench_152.jpg ADDED

Git LFS Details

  • SHA256: 3a7fb56f4b6a9c027496904dd261ebd7bac99415e00ce2fa5c8e34043bb71d77
  • Pointer size: 130 Bytes
  • Size of remote file: 42.4 kB
dataset/test/bench/bench_23.jpg ADDED

Git LFS Details

  • SHA256: 8a71c9f3ae849fa4f843a717388c4a9d7bbed6fa4295594bec5291688f1c620d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.79 MB
dataset/test/bench/bench_24.jpg ADDED

Git LFS Details

  • SHA256: cfb9439e02d978addff6f04e1f7d18c77e6c98b45858a107d4069a0d493c3e0e
  • Pointer size: 130 Bytes
  • Size of remote file: 90.8 kB
dataset/test/bench/bench_29.jpg ADDED

Git LFS Details

  • SHA256: eac795a0498472f96daeb8ae6c4335647a49c6ae6fb356a9452d3378440c4d28
  • Pointer size: 130 Bytes
  • Size of remote file: 48.6 kB
dataset/test/bench/bench_41.jpg ADDED

Git LFS Details

  • SHA256: 8a71c9f3ae849fa4f843a717388c4a9d7bbed6fa4295594bec5291688f1c620d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.79 MB
dataset/test/bench/bench_5.jpg ADDED

Git LFS Details

  • SHA256: 2c9ec02084bddb414be276130dbc693b9cea89352131a4bbc2e56c36b9481287
  • Pointer size: 130 Bytes
  • Size of remote file: 59.5 kB
dataset/test/bench/bench_52.jpg ADDED

Git LFS Details

  • SHA256: 231ff0572e2400418281208cadb49a68d4534e3db8d5c25d24b0df257aca963e
  • Pointer size: 130 Bytes
  • Size of remote file: 20.8 kB
dataset/test/bench/bench_60.jpg ADDED

Git LFS Details

  • SHA256: 2b27599f255873d5e724ac5041c9da0b431b43266a99758ec92fc9d3087ccf88
  • Pointer size: 129 Bytes
  • Size of remote file: 8.12 kB
dataset/test/bench/bench_62.jpg ADDED

Git LFS Details

  • SHA256: c1c06194cf660e3f8b5c8e09b39c25ae8c1a06f4c74385fb15c47ebb6e287228
  • Pointer size: 130 Bytes
  • Size of remote file: 21.2 kB
dataset/test/bench/bench_70.jpg ADDED

Git LFS Details

  • SHA256: cb6fe432697a60bc7429492a775d3b180b177567f34757b53e91eaf21d204e4f
  • Pointer size: 130 Bytes
  • Size of remote file: 35.9 kB
dataset/test/bench/bench_71.jpg ADDED

Git LFS Details

  • SHA256: 96cde83dd6c63f15baac906c71fe483983baf8efc8f58b201237bcba8066a719
  • Pointer size: 131 Bytes
  • Size of remote file: 153 kB
dataset/test/bench/bench_72.jpg ADDED

Git LFS Details

  • SHA256: 37a82c9981ebc1010eecc37ae5f9b5d077166be6bc9f925e518ba32f814d5c21
  • Pointer size: 130 Bytes
  • Size of remote file: 80 kB
dataset/test/bench/bench_75.jpg ADDED

Git LFS Details

  • SHA256: afecbf1711e529dc2516cebd977cc08af7b2bce1157eaedd633cdc5e9af8ee04
  • Pointer size: 130 Bytes
  • Size of remote file: 25.1 kB
dataset/test/bench/bench_78.jpg ADDED

Git LFS Details

  • SHA256: 36550de9438a729894f38308a6a22d50d4e64422c0527e24c0b6fbab25f0b2c4
  • Pointer size: 130 Bytes
  • Size of remote file: 77.3 kB
dataset/test/bench/bench_79.jpg ADDED

Git LFS Details

  • SHA256: 25a1da893e1a979e635c0900f951c1ed875a7b25ce608b6a6f2078eb33ec5138
  • Pointer size: 130 Bytes
  • Size of remote file: 76.2 kB
dataset/test/bench/bench_80.jpg ADDED

Git LFS Details

  • SHA256: 58d10712c5a57ffa5ac2cc52ab2a96da616d739bdefea0db7808c22d79536ca7
  • Pointer size: 130 Bytes
  • Size of remote file: 46.8 kB
dataset/test/bench/bench_86.jpg ADDED

Git LFS Details

  • SHA256: bb9aaca1e09d5ce4b1069c0be1701fa5d200a38b93b43f6e66a50a6303508f9c
  • Pointer size: 130 Bytes
  • Size of remote file: 28.7 kB
dataset/test/bench/bench_89.jpg ADDED

Git LFS Details

  • SHA256: ba6f68162768c507d0e49f39e06cd5cfd22ceaf7001dffd495d423e22d0a88a4
  • Pointer size: 130 Bytes
  • Size of remote file: 34.1 kB
dataset/test/bench/bench_9.jpg ADDED

Git LFS Details

  • SHA256: 1a90291798b36ce34216980ddea08e208aaef3f6193bd72ccbdc8edc81a8c65c
  • Pointer size: 130 Bytes
  • Size of remote file: 57.1 kB
dataset/test/bench/bench_95.jpg ADDED

Git LFS Details

  • SHA256: cb6fe432697a60bc7429492a775d3b180b177567f34757b53e91eaf21d204e4f
  • Pointer size: 130 Bytes
  • Size of remote file: 35.9 kB
dataset/test/bench/bench_96.jpg ADDED

Git LFS Details

  • SHA256: 50b3565007af6738da991b5cefbce80dbb9e0e472968084755bc3e046c2d18de
  • Pointer size: 131 Bytes
  • Size of remote file: 355 kB
dataset/test/bench/bench_97.jpg ADDED

Git LFS Details

  • SHA256: 166560f6cad9a1f16c887cbfb48366c3a366f99796ee4dc9dc6e361943ce8a87
  • Pointer size: 131 Bytes
  • Size of remote file: 182 kB
dataset/test/bench/bench_99.jpg ADDED

Git LFS Details

  • SHA256: 752ff0a652d1a5da0079263d31c3366a1c8807996cc59b2efaad0871fd09a8a7
  • Pointer size: 130 Bytes
  • Size of remote file: 64.2 kB
dataset/test/deadlift/deadlift_11.jpg ADDED

Git LFS Details

  • SHA256: fe31fcd348c39642eb555f7bb80fd1b0f40f925be3707efa4df2dea878b4d365
  • Pointer size: 130 Bytes
  • Size of remote file: 45 kB
dataset/test/deadlift/deadlift_12.jpg ADDED

Git LFS Details

  • SHA256: f0249fd6b97cb70cbf7e7d8549092c50210798b6562760c9a131136e0aacb168
  • Pointer size: 131 Bytes
  • Size of remote file: 101 kB
dataset/test/deadlift/deadlift_121.jpg ADDED

Git LFS Details

  • SHA256: bcbba5b6f9e88fa8a8445fc4355ace15abb4a1388b748d8ee95c3242ea9ee812
  • Pointer size: 130 Bytes
  • Size of remote file: 71.5 kB
dataset/test/deadlift/deadlift_125.jpg ADDED

Git LFS Details

  • SHA256: 92017be321c8d2e9b75dcfb9c3dc6d9df7c8f18a2c94a926acd46b6524452389
  • Pointer size: 130 Bytes
  • Size of remote file: 22.8 kB
dataset/test/deadlift/deadlift_134.jpg ADDED

Git LFS Details

  • SHA256: c83e5fb2fe5e4c6a1c0e538bcd80751d40cf71b78bd3751c770b87719964260c
  • Pointer size: 130 Bytes
  • Size of remote file: 47.5 kB
dataset/test/deadlift/deadlift_135.jpg ADDED

Git LFS Details

  • SHA256: e3173c7959fce95c73c67f92dfdb2c6f49b6adb4f46460e6f75c26090a06d789
  • Pointer size: 131 Bytes
  • Size of remote file: 128 kB
dataset/test/deadlift/deadlift_141.jpg ADDED

Git LFS Details

  • SHA256: e4ea5200e1ee75d4e71909a5f1608ada107d5f27d30acd86c743cedadfc4dba8
  • Pointer size: 131 Bytes
  • Size of remote file: 103 kB
dataset/test/deadlift/deadlift_157.jpg ADDED

Git LFS Details

  • SHA256: 74dec4d8058fdbbd709f06320f3f5244d1e946eb1373b57568093622b1490c02
  • Pointer size: 130 Bytes
  • Size of remote file: 19.7 kB
dataset/test/deadlift/deadlift_158.jpg ADDED

Git LFS Details

  • SHA256: 81193560d167f6d67c746ea50716a469c7c785c3929f7cdcb1821aa3feb381e1
  • Pointer size: 131 Bytes
  • Size of remote file: 157 kB
dataset/test/deadlift/deadlift_17.jpg ADDED

Git LFS Details

  • SHA256: ac0d4ea1a5ae4669c9b71376c654853306c775ad8fec54db807e8ec1224ecd81
  • Pointer size: 129 Bytes
  • Size of remote file: 9.63 kB
dataset/test/deadlift/deadlift_171.jpg ADDED

Git LFS Details

  • SHA256: 809ba1e8bda3dabe0d5240de4714ccc5cd7d17d8e3da0d4f0bf77687ed18a12c
  • Pointer size: 130 Bytes
  • Size of remote file: 40.7 kB
dataset/test/deadlift/deadlift_172.jpg ADDED

Git LFS Details

  • SHA256: 54971e5bd78a2b761e0bdf5a2a0e940e2cbe6504487e2c630f6d610aa9b10508
  • Pointer size: 130 Bytes
  • Size of remote file: 33.6 kB
dataset/test/deadlift/deadlift_173.jpg ADDED

Git LFS Details

  • SHA256: 4757d82a1a7174323da141140bea7c5b0d6856cb6ea78c919ce2080b3c086d25
  • Pointer size: 132 Bytes
  • Size of remote file: 2.75 MB
dataset/test/deadlift/deadlift_182.jpg ADDED

Git LFS Details

  • SHA256: 59a8a1ad607b78ac1745d7d9f856568c0e99efbe0bde5954e11173b65dca8b62
  • Pointer size: 130 Bytes
  • Size of remote file: 95.3 kB
dataset/test/deadlift/deadlift_190.jpg ADDED

Git LFS Details

  • SHA256: 3fdb73e7ca43e90b52e91485a09951e522bb309f0d70a75e4363e8a203a328d4
  • Pointer size: 131 Bytes
  • Size of remote file: 169 kB