Ben Wolfson commited on
Commit
067afc1
·
1 Parent(s): f335d81

more updates

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
CNN.ipynb CHANGED
@@ -16,6 +16,28 @@
16
  " os.rename(src, string)"
17
  ]
18
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  {
20
  "cell_type": "markdown",
21
  "metadata": {},
@@ -25,17 +47,11 @@
25
  },
26
  {
27
  "cell_type": "code",
28
- "execution_count": 37,
29
- "metadata": {},
30
- "outputs": [
31
- {
32
- "name": "stdout",
33
- "output_type": "stream",
34
- "text": [
35
- "(549,) ()\n"
36
- ]
37
- }
38
- ],
39
  "source": [
40
  "from os import listdir\n",
41
  "from numpy import asarray\n",
@@ -44,46 +60,93 @@
44
  "from keras.preprocessing.image import img_to_array\n",
45
  "\n",
46
  "folder = \"train/\"\n",
47
- "photos, labels = list(), list()\n",
 
 
48
  "\n",
49
  "for file in listdir(folder):\n",
50
- " output = 0.0\n",
51
- " if file.startswith(\"squat\"):\n",
52
- " output = 1.0\n",
53
- " if file.startswith(\"deadlift\"):\n",
54
- " output = 2.0\n",
55
  " photo = load_img(folder + file, target_size=(150,150))\n",
56
- " photo = img_to_array\n",
57
- " \n",
58
- " photos.append(photo)\n",
59
  " labels.append(output)\n",
60
- "photos = asarray(photos)\n",
61
- "labels = asarray(output)\n",
62
- "print(photos.shape, labels.shape)\n",
63
  "\n",
64
- "save(\"exercise_photos.npy\", photos)\n",
65
- "save(\"exercise_labels.npy\", photos)"
 
 
66
  ]
67
  },
68
  {
69
  "cell_type": "code",
70
- "execution_count": 39,
71
  "metadata": {},
72
  "outputs": [
73
  {
74
  "name": "stdout",
75
  "output_type": "stream",
76
  "text": [
77
- "(549,) (549,)\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  ]
79
  }
80
  ],
81
  "source": [
82
- "from numpy import load\n",
83
- "photos = load(\"exercise_photos.npy\",allow_pickle=True)\n",
84
- "labels = load(\"exercise_labels.npy\",allow_pickle=True)\n",
85
- "\n",
86
- "print(photos.shape, labels.shape)"
87
  ]
88
  },
89
  {
@@ -133,37 +196,30 @@
133
  },
134
  {
135
  "cell_type": "code",
136
- "execution_count": 70,
137
  "metadata": {},
138
  "outputs": [
139
  {
140
- "name": "stdout",
141
- "output_type": "stream",
142
- "text": [
143
- "Found 448 images belonging to 3 classes.\n",
144
- "1\n",
145
- "Found 101 images belonging to 3 classes.\n"
146
- ]
147
- },
148
- {
149
- "ename": "InvalidArgumentError",
150
- "evalue": " Matrix size-incompatible: In[0]: [128,3], In[1]: [128,1]\n\t [[node gradient_tape/sequential_21/dense_41/MatMul (defined at <ipython-input-70-ca63fab2532d>:115) ]] [Op:__inference_train_function_17586]\n\nFunction call stack:\ntrain_function\n",
151
  "output_type": "error",
152
  "traceback": [
153
  "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
154
- "\u001b[1;31mInvalidArgumentError\u001b[0m Traceback (most recent call last)",
155
- "\u001b[1;32m<ipython-input-70-ca63fab2532d>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 122\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 123\u001b[0m \u001b[1;31m# entry point, run the test harness\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 124\u001b[1;33m \u001b[0mrun_test_harness\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
156
- "\u001b[1;32m<ipython-input-70-ca63fab2532d>\u001b[0m in \u001b[0;36mrun_test_harness\u001b[1;34m()\u001b[0m\n\u001b[0;32m 113\u001b[0m class_mode='categorical', batch_size=128, target_size=(150, 150))\n\u001b[0;32m 114\u001b[0m \u001b[1;31m# fit model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 115\u001b[1;33m history = model.fit(train_it, steps_per_epoch=len(train_it),\n\u001b[0m\u001b[0;32m 116\u001b[0m validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n\u001b[0;32m 117\u001b[0m \u001b[1;31m# evaluate model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
157
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_method_wrapper\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 106\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_method_wrapper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 107\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_in_multi_worker_mode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 108\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mmethod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 109\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;31m# Running inside `run_distribute_coordinator` already.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
158
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1096\u001b[0m batch_size=batch_size):\n\u001b[0;32m 1097\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1098\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1099\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1100\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
159
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 778\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 779\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"nonXla\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 780\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 781\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 782\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
160
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 838\u001b[0m \u001b[1;31m# Lifting succeeded, so variables are initialized and we can run the\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 839\u001b[0m \u001b[1;31m# stateless function.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 840\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_stateless_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 841\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 842\u001b[0m \u001b[0mcanon_args\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcanon_kwds\u001b[0m \u001b[1;33m=\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m\\\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
161
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 2827\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2828\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_maybe_define_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2829\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_filtered_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2830\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2831\u001b[0m \u001b[1;33m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
162
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_filtered_call\u001b[1;34m(self, args, kwargs, cancellation_manager)\u001b[0m\n\u001b[0;32m 1841\u001b[0m \u001b[0;31m`\u001b[0m\u001b[0margs\u001b[0m\u001b[0;31m`\u001b[0m \u001b[1;32mand\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;31m`\u001b[0m\u001b[1;33m.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1842\u001b[0m \"\"\"\n\u001b[1;32m-> 1843\u001b[1;33m return self._call_flat(\n\u001b[0m\u001b[0;32m 1844\u001b[0m [t for t in nest.flatten((args, kwargs), expand_composites=True)\n\u001b[0;32m 1845\u001b[0m if isinstance(t, (ops.Tensor,\n",
163
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[1;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[0;32m 1921\u001b[0m and executing_eagerly):\n\u001b[0;32m 1922\u001b[0m \u001b[1;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1923\u001b[1;33m return self._build_call_outputs(self._inference_function.call(\n\u001b[0m\u001b[0;32m 1924\u001b[0m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[0;32m 1925\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n",
164
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36mcall\u001b[1;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[0;32m 543\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0m_InterpolateFunctionError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 544\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mcancellation_manager\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 545\u001b[1;33m outputs = execute.execute(\n\u001b[0m\u001b[0;32m 546\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msignature\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 547\u001b[0m \u001b[0mnum_outputs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_num_outputs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
165
- "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[1;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 59\u001b[1;33m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0m\u001b[0;32m 60\u001b[0m inputs, attrs, num_outputs)\n\u001b[0;32m 61\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
166
- "\u001b[1;31mInvalidArgumentError\u001b[0m: Matrix size-incompatible: In[0]: [128,3], In[1]: [128,1]\n\t [[node gradient_tape/sequential_21/dense_41/MatMul (defined at <ipython-input-70-ca63fab2532d>:115) ]] [Op:__inference_train_function_17586]\n\nFunction call stack:\ntrain_function\n"
 
 
167
  ]
168
  }
169
  ],
@@ -184,8 +240,10 @@
184
  "from keras.models import Sequential\n",
185
  "from keras.layers import Dense, Dropout, Flatten\n",
186
  "from keras.layers import Conv2D, MaxPooling2D\n",
 
187
  "\n",
188
  " \n",
 
189
  "# one block VGG\n",
190
  "\"\"\"\n",
191
  "def define_model():\n",
@@ -225,7 +283,7 @@
225
  " model.add(Dropout(0.2))\n",
226
  " model.add(Flatten())\n",
227
  " model.add(Dense(128, activation='relu'))\n",
228
- " model.add(Dense(1, activation='softmax'))\n",
229
  " # compile model\n",
230
  " #opt = SGD(lr=0.001, momentum=0.9)\n",
231
  " model.compile(optimizer=keras.optimizers.Adam(), loss='categorical_crossentropy', metrics=['accuracy'])\n",
@@ -268,7 +326,22 @@
268
  " filename = sys.argv[0].split('/')[-1]\n",
269
  " pyplot.savefig(filename + '_plot.png')\n",
270
  " pyplot.close()\n",
271
- " \n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272
  "# run the test harness for evaluating a model\n",
273
  "def run_test_harness():\n",
274
  " # define model\n",
@@ -276,10 +349,12 @@
276
  " # create data generator\n",
277
  " datagen = ImageDataGenerator(rescale=1.0/255.0)\n",
278
  " # prepare iterators\n",
279
- " train_it = datagen.flow_from_directory('dataset/train/',\n",
280
- " class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
281
- " test_it = datagen.flow_from_directory('dataset/test/',\n",
282
- " class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
 
 
283
  " # fit model\n",
284
  " history = model.fit(train_it, steps_per_epoch=len(train_it),\n",
285
  " validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n",
 
16
  " os.rename(src, string)"
17
  ]
18
  },
19
+ {
20
+ "cell_type": "code",
21
+ "execution_count": 97,
22
+ "metadata": {},
23
+ "outputs": [
24
+ {
25
+ "ename": "SyntaxError",
26
+ "evalue": "unexpected EOF while parsing (<ipython-input-97-439ef8adfd37>, line 5)",
27
+ "output_type": "error",
28
+ "traceback": [
29
+ "\u001b[1;36m File \u001b[1;32m\"<ipython-input-97-439ef8adfd37>\"\u001b[1;36m, line \u001b[1;32m5\u001b[0m\n\u001b[1;33m \u001b[0m\n\u001b[1;37m ^\u001b[0m\n\u001b[1;31mSyntaxError\u001b[0m\u001b[1;31m:\u001b[0m unexpected EOF while parsing\n"
30
+ ]
31
+ }
32
+ ],
33
+ "source": [
34
+ "import os\n",
35
+ "from os import path\n",
36
+ "\n",
37
+ "for count in enumerate(os.listdir(\"dataset/test/deadlift\")):\n",
38
+ " "
39
+ ]
40
+ },
41
  {
42
  "cell_type": "markdown",
43
  "metadata": {},
 
47
  },
48
  {
49
  "cell_type": "code",
50
+ "execution_count": 8,
51
+ "metadata": {
52
+ "scrolled": false
53
+ },
54
+ "outputs": [],
 
 
 
 
 
 
55
  "source": [
56
  "from os import listdir\n",
57
  "from numpy import asarray\n",
 
60
  "from keras.preprocessing.image import img_to_array\n",
61
  "\n",
62
  "folder = \"train/\"\n",
63
+ "\n",
64
+ "photos = []\n",
65
+ "labels = []\n",
66
  "\n",
67
  "for file in listdir(folder):\n",
68
+ " output = 0\n",
69
+ " if \"squat\" in file:\n",
70
+ " output = 1\n",
71
+ " if \"deadlift\" in file:\n",
72
+ " output = 2\n",
73
  " photo = load_img(folder + file, target_size=(150,150))\n",
74
+ " photo = img_to_array(photo)\n",
 
 
75
  " labels.append(output)\n",
76
+ " photos.append(photo)\n",
 
 
77
  "\n",
78
+ "photos = asarray(photos)\n",
79
+ "labels = asarray(labels)\n",
80
+ "save(\"train_photos.npy\", photos)\n",
81
+ "save(\"train_labels.npy\", labels)"
82
  ]
83
  },
84
  {
85
  "cell_type": "code",
86
+ "execution_count": 11,
87
  "metadata": {},
88
  "outputs": [
89
  {
90
  "name": "stdout",
91
  "output_type": "stream",
92
  "text": [
93
+ "[[[255. 255. 255.]\n",
94
+ " [255. 255. 255.]\n",
95
+ " [255. 255. 255.]\n",
96
+ " ...\n",
97
+ " [255. 255. 255.]\n",
98
+ " [255. 255. 255.]\n",
99
+ " [255. 255. 255.]]\n",
100
+ "\n",
101
+ " [[255. 255. 255.]\n",
102
+ " [255. 255. 255.]\n",
103
+ " [255. 255. 255.]\n",
104
+ " ...\n",
105
+ " [255. 255. 255.]\n",
106
+ " [255. 255. 255.]\n",
107
+ " [255. 255. 255.]]\n",
108
+ "\n",
109
+ " [[255. 255. 255.]\n",
110
+ " [255. 255. 255.]\n",
111
+ " [255. 255. 255.]\n",
112
+ " ...\n",
113
+ " [255. 255. 255.]\n",
114
+ " [255. 255. 255.]\n",
115
+ " [255. 255. 255.]]\n",
116
+ "\n",
117
+ " ...\n",
118
+ "\n",
119
+ " [[255. 255. 255.]\n",
120
+ " [255. 255. 255.]\n",
121
+ " [255. 255. 255.]\n",
122
+ " ...\n",
123
+ " [255. 255. 255.]\n",
124
+ " [255. 255. 255.]\n",
125
+ " [255. 255. 255.]]\n",
126
+ "\n",
127
+ " [[255. 255. 255.]\n",
128
+ " [255. 255. 255.]\n",
129
+ " [255. 255. 255.]\n",
130
+ " ...\n",
131
+ " [255. 255. 255.]\n",
132
+ " [255. 255. 255.]\n",
133
+ " [255. 255. 255.]]\n",
134
+ "\n",
135
+ " [[255. 255. 255.]\n",
136
+ " [255. 255. 255.]\n",
137
+ " [255. 255. 255.]\n",
138
+ " ...\n",
139
+ " [255. 255. 255.]\n",
140
+ " [255. 255. 255.]\n",
141
+ " [255. 255. 255.]]]\n"
142
  ]
143
  }
144
  ],
145
  "source": [
146
+ "import numpy as np\n",
147
+ "photos = np.load(\"test_photos.npy\",allow_pickle=True)\n",
148
+ "labels = np.load(\"test_labels.npy\",allow_pickle=True)\n",
149
+ "print(np.array(photos[0]))"
 
150
  ]
151
  },
152
  {
 
196
  },
197
  {
198
  "cell_type": "code",
199
+ "execution_count": 15,
200
  "metadata": {},
201
  "outputs": [
202
  {
203
+ "ename": "ValueError",
204
+ "evalue": "in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 3) are incompatible\n",
 
 
 
 
 
 
 
 
 
205
  "output_type": "error",
206
  "traceback": [
207
  "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
208
+ "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
209
+ "\u001b[1;32m<ipython-input-15-883c4c3058a6>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 140\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 141\u001b[0m \u001b[1;31m# entry point, run the test harness\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 142\u001b[1;33m \u001b[0mrun_test_harness\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
210
+ "\u001b[1;32m<ipython-input-15-883c4c3058a6>\u001b[0m in \u001b[0;36mrun_test_harness\u001b[1;34m()\u001b[0m\n\u001b[0;32m 131\u001b[0m \u001b[0mtest_it\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdatagen\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mflow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtest_photos_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtest_labels\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 132\u001b[0m \u001b[1;31m# fit model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 133\u001b[1;33m history = model.fit(train_it, steps_per_epoch=len(train_it),\n\u001b[0m\u001b[0;32m 134\u001b[0m validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n\u001b[0;32m 135\u001b[0m \u001b[1;31m# evaluate model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
211
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_method_wrapper\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 106\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_method_wrapper\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 107\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_in_multi_worker_mode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 108\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mmethod\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 109\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 110\u001b[0m \u001b[1;31m# Running inside `run_distribute_coordinator` already.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
212
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1096\u001b[0m batch_size=batch_size):\n\u001b[0;32m 1097\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1098\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1099\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1100\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
213
  "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 778\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 779\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m\"nonXla\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 780\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 781\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 782\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
214
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 821\u001b[0m \u001b[1;31m# This is the first call of __call__, so we have to initialize.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 822\u001b[0m \u001b[0minitializers\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 823\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_initialize\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0madd_initializers_to\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitializers\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 824\u001b[0m \u001b[1;32mfinally\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 825\u001b[0m \u001b[1;31m# At this point we know that the initialization is complete (or less\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
215
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_initialize\u001b[1;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[0;32m 694\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_graph_deleter\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mFunctionDeleter\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lifted_initializer_graph\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 695\u001b[0m self._concrete_stateful_fn = (\n\u001b[1;32m--> 696\u001b[1;33m self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access\n\u001b[0m\u001b[0;32m 697\u001b[0m *args, **kwds))\n\u001b[0;32m 698\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
216
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_get_concrete_function_internal_garbage_collected\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 2853\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2854\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2855\u001b[1;33m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0m_\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_maybe_define_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2856\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2857\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
217
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_maybe_define_function\u001b[1;34m(self, args, kwargs)\u001b[0m\n\u001b[0;32m 3211\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3212\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmissed\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0madd\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcall_context_key\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 3213\u001b[1;33m \u001b[0mgraph_function\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_create_graph_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 3214\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_function_cache\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mprimary\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mcache_key\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3215\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mgraph_function\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
218
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_create_graph_function\u001b[1;34m(self, args, kwargs, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 3063\u001b[0m \u001b[0marg_names\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbase_arg_names\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mmissing_arg_names\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3064\u001b[0m graph_function = ConcreteFunction(\n\u001b[1;32m-> 3065\u001b[1;33m func_graph_module.func_graph_from_py_func(\n\u001b[0m\u001b[0;32m 3066\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3067\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_python_function\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
219
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mfunc_graph_from_py_func\u001b[1;34m(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)\u001b[0m\n\u001b[0;32m 984\u001b[0m \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moriginal_func\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_decorator\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0munwrap\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpython_func\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 985\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 986\u001b[1;33m \u001b[0mfunc_outputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpython_func\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0mfunc_args\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mfunc_kwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 987\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 988\u001b[0m \u001b[1;31m# invariant: `func_outputs` contains only Tensors, CompositeTensors,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
220
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36mwrapped_fn\u001b[1;34m(*args, **kwds)\u001b[0m\n\u001b[0;32m 598\u001b[0m \u001b[1;31m# __wrapped__ allows AutoGraph to swap in a converted function. We give\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 599\u001b[0m \u001b[1;31m# the function a weak reference to itself to avoid a reference cycle.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 600\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m__wrapped__\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 601\u001b[0m \u001b[0mweak_wrapped_fn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mweakref\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mref\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwrapped_fn\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 602\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
221
+ "\u001b[1;32mc:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py\u001b[0m in \u001b[0;36mwrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 971\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m \u001b[1;31m# pylint:disable=broad-except\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 972\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"ag_error_metadata\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 973\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mag_error_metadata\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto_exception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 974\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 975\u001b[0m \u001b[1;32mraise\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
222
+ "\u001b[1;31mValueError\u001b[0m: in user code:\n\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:806 train_function *\n return step_function(self, iterator)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:796 step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:1211 run\n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2585 call_for_each_replica\n return self._call_for_each_replica(fn, args, kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\distribute\\distribute_lib.py:2945 _call_for_each_replica\n return fn(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:789 run_step **\n outputs = model.train_step(data)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:748 train_step\n loss = self.compiled_loss(\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\compile_utils.py:204 __call__\n loss_value = loss_obj(y_t, y_p, sample_weight=sw)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:149 __call__\n losses = ag_call(y_true, y_pred)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:253 call **\n return ag_fn(y_true, y_pred, **self._fn_kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\losses.py:1535 categorical_crossentropy\n return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py:201 wrapper\n return target(*args, **kwargs)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py:4687 categorical_crossentropy\n target.shape.assert_is_compatible_with(output.shape)\n c:\\python38-64\\lib\\site-packages\\tensorflow\\python\\framework\\tensor_shape.py:1134 assert_is_compatible_with\n raise ValueError(\"Shapes %s and %s are incompatible\" % (self, other))\n\n ValueError: Shapes (None, 1) and (None, 3) are incompatible\n"
223
  ]
224
  }
225
  ],
 
240
  "from keras.models import Sequential\n",
241
  "from keras.layers import Dense, Dropout, Flatten\n",
242
  "from keras.layers import Conv2D, MaxPooling2D\n",
243
+ "import numpy as np\n",
244
  "\n",
245
  " \n",
246
+ "\n",
247
  "# one block VGG\n",
248
  "\"\"\"\n",
249
  "def define_model():\n",
 
283
  " model.add(Dropout(0.2))\n",
284
  " model.add(Flatten())\n",
285
  " model.add(Dense(128, activation='relu'))\n",
286
+ " model.add(Dense(3, activation='softmax'))\n",
287
  " # compile model\n",
288
  " #opt = SGD(lr=0.001, momentum=0.9)\n",
289
  " model.compile(optimizer=keras.optimizers.Adam(), loss='categorical_crossentropy', metrics=['accuracy'])\n",
 
326
  " filename = sys.argv[0].split('/')[-1]\n",
327
  " pyplot.savefig(filename + '_plot.png')\n",
328
  " pyplot.close()\n",
329
+ "\n",
330
+ "import skimage.transform\n",
331
+ "new_shape = (200,200,3)\n",
332
+ "train_photos = np.load(\"train_photos.npy\",allow_pickle=True)\n",
333
+ "train_labels = np.load(\"train_labels.npy\",allow_pickle=True)\n",
334
+ "test_photos = np.load(\"test_photos.npy\",allow_pickle=True)\n",
335
+ "test_labels = np.load(\"test_labels.npy\",allow_pickle=True)\n",
336
+ "\n",
337
+ "train_photos_ = np.empty(shape=(train_photos.shape[0],)+new_shape)\n",
338
+ "for idx in range(train_photos.shape[0]):\n",
339
+ " \n",
340
+ " train_photos_[idx] = skimage.transform.resize(train_photos[idx], new_shape)\n",
341
+ "test_photos_ = np.empty(shape=(test_photos.shape[0],)+new_shape)\n",
342
+ "for idx in range(test_photos.shape[0]):\n",
343
+ " test_photos_[idx] = skimage.transform.resize(test_photos[idx], new_shape)\n",
344
+ " \n",
345
  "# run the test harness for evaluating a model\n",
346
  "def run_test_harness():\n",
347
  " # define model\n",
 
349
  " # create data generator\n",
350
  " datagen = ImageDataGenerator(rescale=1.0/255.0)\n",
351
  " # prepare iterators\n",
352
+ " #train_it = datagen.flow_from_directory('dataset/train/',\n",
353
+ " # class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
354
+ " #test_it = datagen.flow_from_directory('dataset/test/',\n",
355
+ " # class_mode='categorical', batch_size=128, target_size=(150, 150))\n",
356
+ " train_it = datagen.flow(train_photos_, train_labels)\n",
357
+ " test_it = datagen.flow(test_photos_, test_labels)\n",
358
  " # fit model\n",
359
  " history = model.fit(train_it, steps_per_epoch=len(train_it),\n",
360
  " validation_data=test_it, validation_steps=len(test_it), epochs=20, verbose=0)\n",
test/bench_0.jpg ADDED

Git LFS Details

  • SHA256: 4b0b791a39a2df1a5294f3d54c1fdddb643d3b51c73d9b3537b87e3fd22474f6
  • Pointer size: 130 Bytes
  • Size of remote file: 29.8 kB
test/bench_105.jpg ADDED

Git LFS Details

  • SHA256: a39ca10295973fb7a71fc7710728ee0f8baa6519fafc017cd382c1ef79e6d20c
  • Pointer size: 131 Bytes
  • Size of remote file: 415 kB
test/bench_106.jpg ADDED

Git LFS Details

  • SHA256: e9db12c24ae45319e10bf7700e2b15b5aa5a32b613753ebf57fc7f975b78d984
  • Pointer size: 130 Bytes
  • Size of remote file: 31 kB
test/bench_11.jpg ADDED

Git LFS Details

  • SHA256: d9fe27cf53b4be2fd679193386fa885da390d393b09747ad0930b848f085b382
  • Pointer size: 130 Bytes
  • Size of remote file: 14.4 kB
test/bench_115.jpg ADDED

Git LFS Details

  • SHA256: 0bd6b764ff6a06853290a52b5e9a9b5b77d07d2d70894ba8f38d9db25bba238e
  • Pointer size: 130 Bytes
  • Size of remote file: 82.8 kB
test/bench_116.jpg ADDED

Git LFS Details

  • SHA256: 1c63a49b1fb5e5e0240a98ae07ec407f7915478b9429831247a48723f54e3a59
  • Pointer size: 131 Bytes
  • Size of remote file: 106 kB
test/bench_121.jpg ADDED

Git LFS Details

  • SHA256: a0c9bc41f54efe31077caa956ce9c6a7a2703c688736acc708a54ed2bf1e58c5
  • Pointer size: 131 Bytes
  • Size of remote file: 207 kB
test/bench_13.jpg ADDED

Git LFS Details

  • SHA256: fb16051bb18f1b21da60dcbb2868d7806f5dd2070ff8e7bc8dd371927300fd9c
  • Pointer size: 130 Bytes
  • Size of remote file: 17.4 kB
test/bench_133.jpg ADDED

Git LFS Details

  • SHA256: 12efcfa7d19d9ca787e12a7383a7e5eab7987bf28502169969c309fba34b0faa
  • Pointer size: 130 Bytes
  • Size of remote file: 10.1 kB
test/bench_136.jpg ADDED

Git LFS Details

  • SHA256: 95e39a0a7f10bd7161470f9c12e3e79a654c64e16773e566db8c26482cd33b03
  • Pointer size: 130 Bytes
  • Size of remote file: 17 kB
test/bench_149.jpg ADDED

Git LFS Details

  • SHA256: 9eb6b5867d372a7bca74cd7a6e829603edddae2a90102d6a9c79a7a0f3ee873a
  • Pointer size: 131 Bytes
  • Size of remote file: 103 kB
test/bench_152.jpg ADDED

Git LFS Details

  • SHA256: 3a7fb56f4b6a9c027496904dd261ebd7bac99415e00ce2fa5c8e34043bb71d77
  • Pointer size: 130 Bytes
  • Size of remote file: 42.4 kB
test/bench_23.jpg ADDED

Git LFS Details

  • SHA256: 8a71c9f3ae849fa4f843a717388c4a9d7bbed6fa4295594bec5291688f1c620d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.79 MB
test/bench_24.jpg ADDED

Git LFS Details

  • SHA256: cfb9439e02d978addff6f04e1f7d18c77e6c98b45858a107d4069a0d493c3e0e
  • Pointer size: 130 Bytes
  • Size of remote file: 90.8 kB
test/bench_29.jpg ADDED

Git LFS Details

  • SHA256: eac795a0498472f96daeb8ae6c4335647a49c6ae6fb356a9452d3378440c4d28
  • Pointer size: 130 Bytes
  • Size of remote file: 48.6 kB
test/bench_41.jpg ADDED

Git LFS Details

  • SHA256: 8a71c9f3ae849fa4f843a717388c4a9d7bbed6fa4295594bec5291688f1c620d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.79 MB
test/bench_5.jpg ADDED

Git LFS Details

  • SHA256: 2c9ec02084bddb414be276130dbc693b9cea89352131a4bbc2e56c36b9481287
  • Pointer size: 130 Bytes
  • Size of remote file: 59.5 kB
test/bench_52.jpg ADDED

Git LFS Details

  • SHA256: 231ff0572e2400418281208cadb49a68d4534e3db8d5c25d24b0df257aca963e
  • Pointer size: 130 Bytes
  • Size of remote file: 20.8 kB
test/bench_60.jpg ADDED

Git LFS Details

  • SHA256: 2b27599f255873d5e724ac5041c9da0b431b43266a99758ec92fc9d3087ccf88
  • Pointer size: 129 Bytes
  • Size of remote file: 8.12 kB
test/bench_62.jpg ADDED

Git LFS Details

  • SHA256: c1c06194cf660e3f8b5c8e09b39c25ae8c1a06f4c74385fb15c47ebb6e287228
  • Pointer size: 130 Bytes
  • Size of remote file: 21.2 kB
test/bench_70.jpg ADDED

Git LFS Details

  • SHA256: cb6fe432697a60bc7429492a775d3b180b177567f34757b53e91eaf21d204e4f
  • Pointer size: 130 Bytes
  • Size of remote file: 35.9 kB
test/bench_71.jpg ADDED

Git LFS Details

  • SHA256: 96cde83dd6c63f15baac906c71fe483983baf8efc8f58b201237bcba8066a719
  • Pointer size: 131 Bytes
  • Size of remote file: 153 kB
test/bench_72.jpg ADDED

Git LFS Details

  • SHA256: 37a82c9981ebc1010eecc37ae5f9b5d077166be6bc9f925e518ba32f814d5c21
  • Pointer size: 130 Bytes
  • Size of remote file: 80 kB
test/bench_75.jpg ADDED

Git LFS Details

  • SHA256: afecbf1711e529dc2516cebd977cc08af7b2bce1157eaedd633cdc5e9af8ee04
  • Pointer size: 130 Bytes
  • Size of remote file: 25.1 kB
test/bench_78.jpg ADDED

Git LFS Details

  • SHA256: 36550de9438a729894f38308a6a22d50d4e64422c0527e24c0b6fbab25f0b2c4
  • Pointer size: 130 Bytes
  • Size of remote file: 77.3 kB
test/bench_79.jpg ADDED

Git LFS Details

  • SHA256: 25a1da893e1a979e635c0900f951c1ed875a7b25ce608b6a6f2078eb33ec5138
  • Pointer size: 130 Bytes
  • Size of remote file: 76.2 kB
test/bench_80.jpg ADDED

Git LFS Details

  • SHA256: 58d10712c5a57ffa5ac2cc52ab2a96da616d739bdefea0db7808c22d79536ca7
  • Pointer size: 130 Bytes
  • Size of remote file: 46.8 kB
test/bench_86.jpg ADDED

Git LFS Details

  • SHA256: bb9aaca1e09d5ce4b1069c0be1701fa5d200a38b93b43f6e66a50a6303508f9c
  • Pointer size: 130 Bytes
  • Size of remote file: 28.7 kB
test/bench_89.jpg ADDED

Git LFS Details

  • SHA256: ba6f68162768c507d0e49f39e06cd5cfd22ceaf7001dffd495d423e22d0a88a4
  • Pointer size: 130 Bytes
  • Size of remote file: 34.1 kB
test/bench_9.jpg ADDED

Git LFS Details

  • SHA256: 1a90291798b36ce34216980ddea08e208aaef3f6193bd72ccbdc8edc81a8c65c
  • Pointer size: 130 Bytes
  • Size of remote file: 57.1 kB
test/bench_95.jpg ADDED

Git LFS Details

  • SHA256: cb6fe432697a60bc7429492a775d3b180b177567f34757b53e91eaf21d204e4f
  • Pointer size: 130 Bytes
  • Size of remote file: 35.9 kB
test/bench_96.jpg ADDED

Git LFS Details

  • SHA256: 50b3565007af6738da991b5cefbce80dbb9e0e472968084755bc3e046c2d18de
  • Pointer size: 131 Bytes
  • Size of remote file: 355 kB
test/bench_97.jpg ADDED

Git LFS Details

  • SHA256: 166560f6cad9a1f16c887cbfb48366c3a366f99796ee4dc9dc6e361943ce8a87
  • Pointer size: 131 Bytes
  • Size of remote file: 182 kB
test/bench_99.jpg ADDED

Git LFS Details

  • SHA256: 752ff0a652d1a5da0079263d31c3366a1c8807996cc59b2efaad0871fd09a8a7
  • Pointer size: 130 Bytes
  • Size of remote file: 64.2 kB
test/deadlift_11.jpg ADDED

Git LFS Details

  • SHA256: fe31fcd348c39642eb555f7bb80fd1b0f40f925be3707efa4df2dea878b4d365
  • Pointer size: 130 Bytes
  • Size of remote file: 45 kB
test/deadlift_12.jpg ADDED

Git LFS Details

  • SHA256: f0249fd6b97cb70cbf7e7d8549092c50210798b6562760c9a131136e0aacb168
  • Pointer size: 131 Bytes
  • Size of remote file: 101 kB
test/deadlift_121.jpg ADDED

Git LFS Details

  • SHA256: bcbba5b6f9e88fa8a8445fc4355ace15abb4a1388b748d8ee95c3242ea9ee812
  • Pointer size: 130 Bytes
  • Size of remote file: 71.5 kB
test/deadlift_125.jpg ADDED

Git LFS Details

  • SHA256: 92017be321c8d2e9b75dcfb9c3dc6d9df7c8f18a2c94a926acd46b6524452389
  • Pointer size: 130 Bytes
  • Size of remote file: 22.8 kB
test/deadlift_134.jpg ADDED

Git LFS Details

  • SHA256: c83e5fb2fe5e4c6a1c0e538bcd80751d40cf71b78bd3751c770b87719964260c
  • Pointer size: 130 Bytes
  • Size of remote file: 47.5 kB
test/deadlift_135.jpg ADDED

Git LFS Details

  • SHA256: e3173c7959fce95c73c67f92dfdb2c6f49b6adb4f46460e6f75c26090a06d789
  • Pointer size: 131 Bytes
  • Size of remote file: 128 kB
test/deadlift_141.jpg ADDED

Git LFS Details

  • SHA256: e4ea5200e1ee75d4e71909a5f1608ada107d5f27d30acd86c743cedadfc4dba8
  • Pointer size: 131 Bytes
  • Size of remote file: 103 kB
test/deadlift_157.jpg ADDED

Git LFS Details

  • SHA256: 74dec4d8058fdbbd709f06320f3f5244d1e946eb1373b57568093622b1490c02
  • Pointer size: 130 Bytes
  • Size of remote file: 19.7 kB
test/deadlift_158.jpg ADDED

Git LFS Details

  • SHA256: 81193560d167f6d67c746ea50716a469c7c785c3929f7cdcb1821aa3feb381e1
  • Pointer size: 131 Bytes
  • Size of remote file: 157 kB
test/deadlift_17.jpg ADDED

Git LFS Details

  • SHA256: ac0d4ea1a5ae4669c9b71376c654853306c775ad8fec54db807e8ec1224ecd81
  • Pointer size: 129 Bytes
  • Size of remote file: 9.63 kB
test/deadlift_171.jpg ADDED

Git LFS Details

  • SHA256: 809ba1e8bda3dabe0d5240de4714ccc5cd7d17d8e3da0d4f0bf77687ed18a12c
  • Pointer size: 130 Bytes
  • Size of remote file: 40.7 kB
test/deadlift_172.jpg ADDED

Git LFS Details

  • SHA256: 54971e5bd78a2b761e0bdf5a2a0e940e2cbe6504487e2c630f6d610aa9b10508
  • Pointer size: 130 Bytes
  • Size of remote file: 33.6 kB
test/deadlift_173.jpg ADDED

Git LFS Details

  • SHA256: 4757d82a1a7174323da141140bea7c5b0d6856cb6ea78c919ce2080b3c086d25
  • Pointer size: 132 Bytes
  • Size of remote file: 2.75 MB
test/deadlift_182.jpg ADDED

Git LFS Details

  • SHA256: 59a8a1ad607b78ac1745d7d9f856568c0e99efbe0bde5954e11173b65dca8b62
  • Pointer size: 130 Bytes
  • Size of remote file: 95.3 kB
test/deadlift_190.jpg ADDED

Git LFS Details

  • SHA256: 3fdb73e7ca43e90b52e91485a09951e522bb309f0d70a75e4363e8a203a328d4
  • Pointer size: 131 Bytes
  • Size of remote file: 169 kB