Upload 64 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/log.txt +665 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/config.yaml +61 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/log.txt +665 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/log.txt +665 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/log.txt +665 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/log.txt +746 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/config.yaml +61 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/log.txt +757 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/log.txt +746 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/log.txt +746 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/log.txt +626 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/log.txt +626 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/log.txt +626 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/decouple_gcn.py +235 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/eval_results/best_acc.pkl +3 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/log.txt +626 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xview/ntu_bone_motion_xview/config.yaml +59 -0
- ckpt/Others/DC-GCN+ADG/ntu60_xview/ntu_bone_motion_xview/decouple_gcn.py +235 -0
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_bone_motion_xset
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xset/train_bone_motion.yaml
|
5 |
+
device:
|
6 |
+
- 2
|
7 |
+
- 3
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_bone_motion_xset
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_bone_motion_xset
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a40bb22e41beb6e992d7435ddd1c8bc4c1dcb696d63f95763226fc85d45adec
|
3 |
+
size 34946665
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_motion_xset/log.txt
ADDED
@@ -0,0 +1,665 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Tue Sep 13 18:24:44 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_bone_motion_xset', 'model_saved_name': './save_models/ntu120_bone_motion_xset', 'Experiment_name': 'ntu120_bone_motion_xset', 'config': './config/ntu120_xset/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Tue Sep 13 18:24:44 2022 ] Training epoch: 1
|
5 |
+
[ Tue Sep 13 18:25:39 2022 ] Batch(99/162) done. Loss: 3.8043 lr:0.100000
|
6 |
+
[ Tue Sep 13 18:26:06 2022 ] Eval epoch: 1
|
7 |
+
[ Tue Sep 13 18:29:00 2022 ] Mean test loss of 930 batches: 5.199268817901611.
|
8 |
+
[ Tue Sep 13 18:29:01 2022 ] Top1: 4.51%
|
9 |
+
[ Tue Sep 13 18:29:01 2022 ] Top5: 15.33%
|
10 |
+
[ Tue Sep 13 18:29:01 2022 ] Training epoch: 2
|
11 |
+
[ Tue Sep 13 18:29:25 2022 ] Batch(37/162) done. Loss: 3.4914 lr:0.100000
|
12 |
+
[ Tue Sep 13 18:30:17 2022 ] Batch(137/162) done. Loss: 3.1869 lr:0.100000
|
13 |
+
[ Tue Sep 13 18:30:30 2022 ] Eval epoch: 2
|
14 |
+
[ Tue Sep 13 18:33:25 2022 ] Mean test loss of 930 batches: 4.734589576721191.
|
15 |
+
[ Tue Sep 13 18:33:26 2022 ] Top1: 8.77%
|
16 |
+
[ Tue Sep 13 18:33:26 2022 ] Top5: 27.13%
|
17 |
+
[ Tue Sep 13 18:33:26 2022 ] Training epoch: 3
|
18 |
+
[ Tue Sep 13 18:34:10 2022 ] Batch(75/162) done. Loss: 2.8329 lr:0.100000
|
19 |
+
[ Tue Sep 13 18:34:55 2022 ] Eval epoch: 3
|
20 |
+
[ Tue Sep 13 18:37:50 2022 ] Mean test loss of 930 batches: 4.638702392578125.
|
21 |
+
[ Tue Sep 13 18:37:51 2022 ] Top1: 11.38%
|
22 |
+
[ Tue Sep 13 18:37:51 2022 ] Top5: 32.37%
|
23 |
+
[ Tue Sep 13 18:37:51 2022 ] Training epoch: 4
|
24 |
+
[ Tue Sep 13 18:38:02 2022 ] Batch(13/162) done. Loss: 2.4933 lr:0.100000
|
25 |
+
[ Tue Sep 13 18:38:55 2022 ] Batch(113/162) done. Loss: 2.5471 lr:0.100000
|
26 |
+
[ Tue Sep 13 18:39:20 2022 ] Eval epoch: 4
|
27 |
+
[ Tue Sep 13 18:42:15 2022 ] Mean test loss of 930 batches: 4.334815979003906.
|
28 |
+
[ Tue Sep 13 18:42:15 2022 ] Top1: 16.17%
|
29 |
+
[ Tue Sep 13 18:42:16 2022 ] Top5: 38.91%
|
30 |
+
[ Tue Sep 13 18:42:16 2022 ] Training epoch: 5
|
31 |
+
[ Tue Sep 13 18:42:47 2022 ] Batch(51/162) done. Loss: 1.9323 lr:0.100000
|
32 |
+
[ Tue Sep 13 18:43:40 2022 ] Batch(151/162) done. Loss: 1.8449 lr:0.100000
|
33 |
+
[ Tue Sep 13 18:43:45 2022 ] Eval epoch: 5
|
34 |
+
[ Tue Sep 13 18:46:39 2022 ] Mean test loss of 930 batches: 4.109243869781494.
|
35 |
+
[ Tue Sep 13 18:46:40 2022 ] Top1: 15.85%
|
36 |
+
[ Tue Sep 13 18:46:41 2022 ] Top5: 40.64%
|
37 |
+
[ Tue Sep 13 18:46:41 2022 ] Training epoch: 6
|
38 |
+
[ Tue Sep 13 18:47:32 2022 ] Batch(89/162) done. Loss: 1.7456 lr:0.100000
|
39 |
+
[ Tue Sep 13 18:48:10 2022 ] Eval epoch: 6
|
40 |
+
[ Tue Sep 13 18:51:04 2022 ] Mean test loss of 930 batches: 4.857425212860107.
|
41 |
+
[ Tue Sep 13 18:51:05 2022 ] Top1: 12.64%
|
42 |
+
[ Tue Sep 13 18:51:05 2022 ] Top5: 35.87%
|
43 |
+
[ Tue Sep 13 18:51:06 2022 ] Training epoch: 7
|
44 |
+
[ Tue Sep 13 18:51:23 2022 ] Batch(27/162) done. Loss: 1.4699 lr:0.100000
|
45 |
+
[ Tue Sep 13 18:52:16 2022 ] Batch(127/162) done. Loss: 1.3623 lr:0.100000
|
46 |
+
[ Tue Sep 13 18:52:35 2022 ] Eval epoch: 7
|
47 |
+
[ Tue Sep 13 18:55:28 2022 ] Mean test loss of 930 batches: 3.478107213973999.
|
48 |
+
[ Tue Sep 13 18:55:29 2022 ] Top1: 25.40%
|
49 |
+
[ Tue Sep 13 18:55:29 2022 ] Top5: 52.54%
|
50 |
+
[ Tue Sep 13 18:55:30 2022 ] Training epoch: 8
|
51 |
+
[ Tue Sep 13 18:56:08 2022 ] Batch(65/162) done. Loss: 1.5486 lr:0.100000
|
52 |
+
[ Tue Sep 13 18:56:59 2022 ] Eval epoch: 8
|
53 |
+
[ Tue Sep 13 18:59:52 2022 ] Mean test loss of 930 batches: 4.49910306930542.
|
54 |
+
[ Tue Sep 13 18:59:53 2022 ] Top1: 15.79%
|
55 |
+
[ Tue Sep 13 18:59:53 2022 ] Top5: 42.98%
|
56 |
+
[ Tue Sep 13 18:59:53 2022 ] Training epoch: 9
|
57 |
+
[ Tue Sep 13 18:59:58 2022 ] Batch(3/162) done. Loss: 1.4525 lr:0.100000
|
58 |
+
[ Tue Sep 13 19:00:51 2022 ] Batch(103/162) done. Loss: 1.3946 lr:0.100000
|
59 |
+
[ Tue Sep 13 19:01:22 2022 ] Eval epoch: 9
|
60 |
+
[ Tue Sep 13 19:04:16 2022 ] Mean test loss of 930 batches: 4.081279277801514.
|
61 |
+
[ Tue Sep 13 19:04:17 2022 ] Top1: 18.95%
|
62 |
+
[ Tue Sep 13 19:04:17 2022 ] Top5: 51.57%
|
63 |
+
[ Tue Sep 13 19:04:17 2022 ] Training epoch: 10
|
64 |
+
[ Tue Sep 13 19:04:43 2022 ] Batch(41/162) done. Loss: 1.3731 lr:0.100000
|
65 |
+
[ Tue Sep 13 19:05:36 2022 ] Batch(141/162) done. Loss: 1.2596 lr:0.100000
|
66 |
+
[ Tue Sep 13 19:05:47 2022 ] Eval epoch: 10
|
67 |
+
[ Tue Sep 13 19:08:42 2022 ] Mean test loss of 930 batches: 4.165366172790527.
|
68 |
+
[ Tue Sep 13 19:08:43 2022 ] Top1: 23.41%
|
69 |
+
[ Tue Sep 13 19:08:43 2022 ] Top5: 54.06%
|
70 |
+
[ Tue Sep 13 19:08:43 2022 ] Training epoch: 11
|
71 |
+
[ Tue Sep 13 19:09:29 2022 ] Batch(79/162) done. Loss: 1.6100 lr:0.100000
|
72 |
+
[ Tue Sep 13 19:10:12 2022 ] Eval epoch: 11
|
73 |
+
[ Tue Sep 13 19:13:06 2022 ] Mean test loss of 930 batches: 3.0280818939208984.
|
74 |
+
[ Tue Sep 13 19:13:06 2022 ] Top1: 33.52%
|
75 |
+
[ Tue Sep 13 19:13:07 2022 ] Top5: 66.71%
|
76 |
+
[ Tue Sep 13 19:13:07 2022 ] Training epoch: 12
|
77 |
+
[ Tue Sep 13 19:13:20 2022 ] Batch(17/162) done. Loss: 1.0361 lr:0.100000
|
78 |
+
[ Tue Sep 13 19:14:13 2022 ] Batch(117/162) done. Loss: 1.5490 lr:0.100000
|
79 |
+
[ Tue Sep 13 19:14:36 2022 ] Eval epoch: 12
|
80 |
+
[ Tue Sep 13 19:17:30 2022 ] Mean test loss of 930 batches: 3.2200257778167725.
|
81 |
+
[ Tue Sep 13 19:17:30 2022 ] Top1: 31.72%
|
82 |
+
[ Tue Sep 13 19:17:31 2022 ] Top5: 62.12%
|
83 |
+
[ Tue Sep 13 19:17:31 2022 ] Training epoch: 13
|
84 |
+
[ Tue Sep 13 19:18:03 2022 ] Batch(55/162) done. Loss: 1.3301 lr:0.100000
|
85 |
+
[ Tue Sep 13 19:18:56 2022 ] Batch(155/162) done. Loss: 0.8611 lr:0.100000
|
86 |
+
[ Tue Sep 13 19:19:00 2022 ] Eval epoch: 13
|
87 |
+
[ Tue Sep 13 19:21:53 2022 ] Mean test loss of 930 batches: 3.0923314094543457.
|
88 |
+
[ Tue Sep 13 19:21:54 2022 ] Top1: 30.39%
|
89 |
+
[ Tue Sep 13 19:21:54 2022 ] Top5: 62.35%
|
90 |
+
[ Tue Sep 13 19:21:55 2022 ] Training epoch: 14
|
91 |
+
[ Tue Sep 13 19:22:48 2022 ] Batch(93/162) done. Loss: 1.0040 lr:0.100000
|
92 |
+
[ Tue Sep 13 19:23:24 2022 ] Eval epoch: 14
|
93 |
+
[ Tue Sep 13 19:26:17 2022 ] Mean test loss of 930 batches: 2.770219087600708.
|
94 |
+
[ Tue Sep 13 19:26:18 2022 ] Top1: 38.51%
|
95 |
+
[ Tue Sep 13 19:26:19 2022 ] Top5: 70.18%
|
96 |
+
[ Tue Sep 13 19:26:19 2022 ] Training epoch: 15
|
97 |
+
[ Tue Sep 13 19:26:39 2022 ] Batch(31/162) done. Loss: 1.2515 lr:0.100000
|
98 |
+
[ Tue Sep 13 19:27:32 2022 ] Batch(131/162) done. Loss: 1.3614 lr:0.100000
|
99 |
+
[ Tue Sep 13 19:27:48 2022 ] Eval epoch: 15
|
100 |
+
[ Tue Sep 13 19:30:42 2022 ] Mean test loss of 930 batches: 2.787174940109253.
|
101 |
+
[ Tue Sep 13 19:30:42 2022 ] Top1: 35.36%
|
102 |
+
[ Tue Sep 13 19:30:43 2022 ] Top5: 68.80%
|
103 |
+
[ Tue Sep 13 19:30:43 2022 ] Training epoch: 16
|
104 |
+
[ Tue Sep 13 19:31:23 2022 ] Batch(69/162) done. Loss: 0.7219 lr:0.100000
|
105 |
+
[ Tue Sep 13 19:32:12 2022 ] Eval epoch: 16
|
106 |
+
[ Tue Sep 13 19:35:06 2022 ] Mean test loss of 930 batches: 4.6869659423828125.
|
107 |
+
[ Tue Sep 13 19:35:07 2022 ] Top1: 26.56%
|
108 |
+
[ Tue Sep 13 19:35:07 2022 ] Top5: 52.24%
|
109 |
+
[ Tue Sep 13 19:35:07 2022 ] Training epoch: 17
|
110 |
+
[ Tue Sep 13 19:35:15 2022 ] Batch(7/162) done. Loss: 0.4922 lr:0.100000
|
111 |
+
[ Tue Sep 13 19:36:08 2022 ] Batch(107/162) done. Loss: 0.9194 lr:0.100000
|
112 |
+
[ Tue Sep 13 19:36:37 2022 ] Eval epoch: 17
|
113 |
+
[ Tue Sep 13 19:39:31 2022 ] Mean test loss of 930 batches: 3.368222236633301.
|
114 |
+
[ Tue Sep 13 19:39:31 2022 ] Top1: 30.47%
|
115 |
+
[ Tue Sep 13 19:39:32 2022 ] Top5: 61.70%
|
116 |
+
[ Tue Sep 13 19:39:32 2022 ] Training epoch: 18
|
117 |
+
[ Tue Sep 13 19:39:59 2022 ] Batch(45/162) done. Loss: 0.9355 lr:0.100000
|
118 |
+
[ Tue Sep 13 19:40:52 2022 ] Batch(145/162) done. Loss: 1.0187 lr:0.100000
|
119 |
+
[ Tue Sep 13 19:41:01 2022 ] Eval epoch: 18
|
120 |
+
[ Tue Sep 13 19:43:55 2022 ] Mean test loss of 930 batches: 3.0660781860351562.
|
121 |
+
[ Tue Sep 13 19:43:56 2022 ] Top1: 36.15%
|
122 |
+
[ Tue Sep 13 19:43:56 2022 ] Top5: 70.34%
|
123 |
+
[ Tue Sep 13 19:43:57 2022 ] Training epoch: 19
|
124 |
+
[ Tue Sep 13 19:44:44 2022 ] Batch(83/162) done. Loss: 1.0415 lr:0.100000
|
125 |
+
[ Tue Sep 13 19:45:26 2022 ] Eval epoch: 19
|
126 |
+
[ Tue Sep 13 19:48:19 2022 ] Mean test loss of 930 batches: 3.684891939163208.
|
127 |
+
[ Tue Sep 13 19:48:20 2022 ] Top1: 32.52%
|
128 |
+
[ Tue Sep 13 19:48:20 2022 ] Top5: 63.57%
|
129 |
+
[ Tue Sep 13 19:48:21 2022 ] Training epoch: 20
|
130 |
+
[ Tue Sep 13 19:48:35 2022 ] Batch(21/162) done. Loss: 0.7119 lr:0.100000
|
131 |
+
[ Tue Sep 13 19:49:28 2022 ] Batch(121/162) done. Loss: 0.8407 lr:0.100000
|
132 |
+
[ Tue Sep 13 19:49:49 2022 ] Eval epoch: 20
|
133 |
+
[ Tue Sep 13 19:52:43 2022 ] Mean test loss of 930 batches: 3.269587993621826.
|
134 |
+
[ Tue Sep 13 19:52:44 2022 ] Top1: 36.59%
|
135 |
+
[ Tue Sep 13 19:52:45 2022 ] Top5: 68.75%
|
136 |
+
[ Tue Sep 13 19:52:45 2022 ] Training epoch: 21
|
137 |
+
[ Tue Sep 13 19:53:19 2022 ] Batch(59/162) done. Loss: 0.8355 lr:0.100000
|
138 |
+
[ Tue Sep 13 19:54:12 2022 ] Batch(159/162) done. Loss: 0.9426 lr:0.100000
|
139 |
+
[ Tue Sep 13 19:54:14 2022 ] Eval epoch: 21
|
140 |
+
[ Tue Sep 13 19:57:08 2022 ] Mean test loss of 930 batches: 3.8170042037963867.
|
141 |
+
[ Tue Sep 13 19:57:08 2022 ] Top1: 35.14%
|
142 |
+
[ Tue Sep 13 19:57:09 2022 ] Top5: 65.00%
|
143 |
+
[ Tue Sep 13 19:57:09 2022 ] Training epoch: 22
|
144 |
+
[ Tue Sep 13 19:58:04 2022 ] Batch(97/162) done. Loss: 0.9230 lr:0.100000
|
145 |
+
[ Tue Sep 13 19:58:38 2022 ] Eval epoch: 22
|
146 |
+
[ Tue Sep 13 20:01:32 2022 ] Mean test loss of 930 batches: 2.589581251144409.
|
147 |
+
[ Tue Sep 13 20:01:33 2022 ] Top1: 45.12%
|
148 |
+
[ Tue Sep 13 20:01:33 2022 ] Top5: 75.72%
|
149 |
+
[ Tue Sep 13 20:01:34 2022 ] Training epoch: 23
|
150 |
+
[ Tue Sep 13 20:01:56 2022 ] Batch(35/162) done. Loss: 0.6825 lr:0.100000
|
151 |
+
[ Tue Sep 13 20:02:48 2022 ] Batch(135/162) done. Loss: 0.6183 lr:0.100000
|
152 |
+
[ Tue Sep 13 20:03:02 2022 ] Eval epoch: 23
|
153 |
+
[ Tue Sep 13 20:05:55 2022 ] Mean test loss of 930 batches: 2.98416805267334.
|
154 |
+
[ Tue Sep 13 20:05:56 2022 ] Top1: 39.40%
|
155 |
+
[ Tue Sep 13 20:05:56 2022 ] Top5: 69.77%
|
156 |
+
[ Tue Sep 13 20:05:56 2022 ] Training epoch: 24
|
157 |
+
[ Tue Sep 13 20:06:39 2022 ] Batch(73/162) done. Loss: 0.5535 lr:0.100000
|
158 |
+
[ Tue Sep 13 20:07:25 2022 ] Eval epoch: 24
|
159 |
+
[ Tue Sep 13 20:10:19 2022 ] Mean test loss of 930 batches: 4.608972072601318.
|
160 |
+
[ Tue Sep 13 20:10:20 2022 ] Top1: 32.01%
|
161 |
+
[ Tue Sep 13 20:10:20 2022 ] Top5: 65.21%
|
162 |
+
[ Tue Sep 13 20:10:20 2022 ] Training epoch: 25
|
163 |
+
[ Tue Sep 13 20:10:30 2022 ] Batch(11/162) done. Loss: 0.5998 lr:0.100000
|
164 |
+
[ Tue Sep 13 20:11:23 2022 ] Batch(111/162) done. Loss: 0.6096 lr:0.100000
|
165 |
+
[ Tue Sep 13 20:11:49 2022 ] Eval epoch: 25
|
166 |
+
[ Tue Sep 13 20:14:43 2022 ] Mean test loss of 930 batches: 4.218852519989014.
|
167 |
+
[ Tue Sep 13 20:14:44 2022 ] Top1: 31.82%
|
168 |
+
[ Tue Sep 13 20:14:44 2022 ] Top5: 62.13%
|
169 |
+
[ Tue Sep 13 20:14:45 2022 ] Training epoch: 26
|
170 |
+
[ Tue Sep 13 20:15:14 2022 ] Batch(49/162) done. Loss: 0.4509 lr:0.100000
|
171 |
+
[ Tue Sep 13 20:16:07 2022 ] Batch(149/162) done. Loss: 0.5795 lr:0.100000
|
172 |
+
[ Tue Sep 13 20:16:14 2022 ] Eval epoch: 26
|
173 |
+
[ Tue Sep 13 20:19:07 2022 ] Mean test loss of 930 batches: 4.979351997375488.
|
174 |
+
[ Tue Sep 13 20:19:08 2022 ] Top1: 31.84%
|
175 |
+
[ Tue Sep 13 20:19:08 2022 ] Top5: 64.70%
|
176 |
+
[ Tue Sep 13 20:19:09 2022 ] Training epoch: 27
|
177 |
+
[ Tue Sep 13 20:19:58 2022 ] Batch(87/162) done. Loss: 1.0953 lr:0.100000
|
178 |
+
[ Tue Sep 13 20:20:37 2022 ] Eval epoch: 27
|
179 |
+
[ Tue Sep 13 20:23:32 2022 ] Mean test loss of 930 batches: 3.376075267791748.
|
180 |
+
[ Tue Sep 13 20:23:33 2022 ] Top1: 36.73%
|
181 |
+
[ Tue Sep 13 20:23:33 2022 ] Top5: 68.99%
|
182 |
+
[ Tue Sep 13 20:23:34 2022 ] Training epoch: 28
|
183 |
+
[ Tue Sep 13 20:23:50 2022 ] Batch(25/162) done. Loss: 0.5626 lr:0.100000
|
184 |
+
[ Tue Sep 13 20:24:43 2022 ] Batch(125/162) done. Loss: 0.4676 lr:0.100000
|
185 |
+
[ Tue Sep 13 20:25:02 2022 ] Eval epoch: 28
|
186 |
+
[ Tue Sep 13 20:27:56 2022 ] Mean test loss of 930 batches: 2.888674020767212.
|
187 |
+
[ Tue Sep 13 20:27:57 2022 ] Top1: 39.02%
|
188 |
+
[ Tue Sep 13 20:27:57 2022 ] Top5: 72.62%
|
189 |
+
[ Tue Sep 13 20:27:58 2022 ] Training epoch: 29
|
190 |
+
[ Tue Sep 13 20:28:34 2022 ] Batch(63/162) done. Loss: 0.6923 lr:0.100000
|
191 |
+
[ Tue Sep 13 20:29:26 2022 ] Eval epoch: 29
|
192 |
+
[ Tue Sep 13 20:32:20 2022 ] Mean test loss of 930 batches: 8.909499168395996.
|
193 |
+
[ Tue Sep 13 20:32:20 2022 ] Top1: 16.27%
|
194 |
+
[ Tue Sep 13 20:32:21 2022 ] Top5: 36.47%
|
195 |
+
[ Tue Sep 13 20:32:21 2022 ] Training epoch: 30
|
196 |
+
[ Tue Sep 13 20:32:25 2022 ] Batch(1/162) done. Loss: 0.4918 lr:0.100000
|
197 |
+
[ Tue Sep 13 20:33:18 2022 ] Batch(101/162) done. Loss: 0.4076 lr:0.100000
|
198 |
+
[ Tue Sep 13 20:33:50 2022 ] Eval epoch: 30
|
199 |
+
[ Tue Sep 13 20:36:44 2022 ] Mean test loss of 930 batches: 3.943101167678833.
|
200 |
+
[ Tue Sep 13 20:36:44 2022 ] Top1: 38.68%
|
201 |
+
[ Tue Sep 13 20:36:44 2022 ] Top5: 70.67%
|
202 |
+
[ Tue Sep 13 20:36:45 2022 ] Training epoch: 31
|
203 |
+
[ Tue Sep 13 20:37:09 2022 ] Batch(39/162) done. Loss: 0.4131 lr:0.100000
|
204 |
+
[ Tue Sep 13 20:38:02 2022 ] Batch(139/162) done. Loss: 0.5514 lr:0.100000
|
205 |
+
[ Tue Sep 13 20:38:13 2022 ] Eval epoch: 31
|
206 |
+
[ Tue Sep 13 20:41:07 2022 ] Mean test loss of 930 batches: 6.147310256958008.
|
207 |
+
[ Tue Sep 13 20:41:08 2022 ] Top1: 29.80%
|
208 |
+
[ Tue Sep 13 20:41:08 2022 ] Top5: 62.57%
|
209 |
+
[ Tue Sep 13 20:41:09 2022 ] Training epoch: 32
|
210 |
+
[ Tue Sep 13 20:41:53 2022 ] Batch(77/162) done. Loss: 0.4296 lr:0.100000
|
211 |
+
[ Tue Sep 13 20:42:38 2022 ] Eval epoch: 32
|
212 |
+
[ Tue Sep 13 20:45:32 2022 ] Mean test loss of 930 batches: 2.972085475921631.
|
213 |
+
[ Tue Sep 13 20:45:32 2022 ] Top1: 45.22%
|
214 |
+
[ Tue Sep 13 20:45:33 2022 ] Top5: 75.38%
|
215 |
+
[ Tue Sep 13 20:45:33 2022 ] Training epoch: 33
|
216 |
+
[ Tue Sep 13 20:45:44 2022 ] Batch(15/162) done. Loss: 0.1958 lr:0.100000
|
217 |
+
[ Tue Sep 13 20:46:37 2022 ] Batch(115/162) done. Loss: 0.6321 lr:0.100000
|
218 |
+
[ Tue Sep 13 20:47:02 2022 ] Eval epoch: 33
|
219 |
+
[ Tue Sep 13 20:49:55 2022 ] Mean test loss of 930 batches: 3.6302051544189453.
|
220 |
+
[ Tue Sep 13 20:49:56 2022 ] Top1: 35.10%
|
221 |
+
[ Tue Sep 13 20:49:56 2022 ] Top5: 68.23%
|
222 |
+
[ Tue Sep 13 20:49:56 2022 ] Training epoch: 34
|
223 |
+
[ Tue Sep 13 20:50:28 2022 ] Batch(53/162) done. Loss: 0.3220 lr:0.100000
|
224 |
+
[ Tue Sep 13 20:51:21 2022 ] Batch(153/162) done. Loss: 0.4241 lr:0.100000
|
225 |
+
[ Tue Sep 13 20:51:25 2022 ] Eval epoch: 34
|
226 |
+
[ Tue Sep 13 20:54:19 2022 ] Mean test loss of 930 batches: 5.966952323913574.
|
227 |
+
[ Tue Sep 13 20:54:20 2022 ] Top1: 23.96%
|
228 |
+
[ Tue Sep 13 20:54:20 2022 ] Top5: 54.38%
|
229 |
+
[ Tue Sep 13 20:54:21 2022 ] Training epoch: 35
|
230 |
+
[ Tue Sep 13 20:55:12 2022 ] Batch(91/162) done. Loss: 0.2236 lr:0.100000
|
231 |
+
[ Tue Sep 13 20:55:50 2022 ] Eval epoch: 35
|
232 |
+
[ Tue Sep 13 20:58:44 2022 ] Mean test loss of 930 batches: 3.8734185695648193.
|
233 |
+
[ Tue Sep 13 20:58:44 2022 ] Top1: 36.80%
|
234 |
+
[ Tue Sep 13 20:58:45 2022 ] Top5: 68.49%
|
235 |
+
[ Tue Sep 13 20:58:45 2022 ] Training epoch: 36
|
236 |
+
[ Tue Sep 13 20:59:04 2022 ] Batch(29/162) done. Loss: 0.3525 lr:0.100000
|
237 |
+
[ Tue Sep 13 20:59:57 2022 ] Batch(129/162) done. Loss: 0.6794 lr:0.100000
|
238 |
+
[ Tue Sep 13 21:00:14 2022 ] Eval epoch: 36
|
239 |
+
[ Tue Sep 13 21:03:08 2022 ] Mean test loss of 930 batches: 3.38310170173645.
|
240 |
+
[ Tue Sep 13 21:03:08 2022 ] Top1: 36.34%
|
241 |
+
[ Tue Sep 13 21:03:09 2022 ] Top5: 66.81%
|
242 |
+
[ Tue Sep 13 21:03:09 2022 ] Training epoch: 37
|
243 |
+
[ Tue Sep 13 21:03:48 2022 ] Batch(67/162) done. Loss: 0.3958 lr:0.100000
|
244 |
+
[ Tue Sep 13 21:04:38 2022 ] Eval epoch: 37
|
245 |
+
[ Tue Sep 13 21:07:31 2022 ] Mean test loss of 930 batches: 3.379849672317505.
|
246 |
+
[ Tue Sep 13 21:07:32 2022 ] Top1: 40.11%
|
247 |
+
[ Tue Sep 13 21:07:32 2022 ] Top5: 72.22%
|
248 |
+
[ Tue Sep 13 21:07:33 2022 ] Training epoch: 38
|
249 |
+
[ Tue Sep 13 21:07:39 2022 ] Batch(5/162) done. Loss: 0.1820 lr:0.100000
|
250 |
+
[ Tue Sep 13 21:08:31 2022 ] Batch(105/162) done. Loss: 0.3579 lr:0.100000
|
251 |
+
[ Tue Sep 13 21:09:01 2022 ] Eval epoch: 38
|
252 |
+
[ Tue Sep 13 21:11:55 2022 ] Mean test loss of 930 batches: 5.3892083168029785.
|
253 |
+
[ Tue Sep 13 21:11:55 2022 ] Top1: 29.35%
|
254 |
+
[ Tue Sep 13 21:11:56 2022 ] Top5: 54.10%
|
255 |
+
[ Tue Sep 13 21:11:56 2022 ] Training epoch: 39
|
256 |
+
[ Tue Sep 13 21:12:22 2022 ] Batch(43/162) done. Loss: 0.3955 lr:0.100000
|
257 |
+
[ Tue Sep 13 21:13:15 2022 ] Batch(143/162) done. Loss: 0.2325 lr:0.100000
|
258 |
+
[ Tue Sep 13 21:13:25 2022 ] Eval epoch: 39
|
259 |
+
[ Tue Sep 13 21:16:18 2022 ] Mean test loss of 930 batches: 9.454630851745605.
|
260 |
+
[ Tue Sep 13 21:16:19 2022 ] Top1: 19.79%
|
261 |
+
[ Tue Sep 13 21:16:19 2022 ] Top5: 45.30%
|
262 |
+
[ Tue Sep 13 21:16:20 2022 ] Training epoch: 40
|
263 |
+
[ Tue Sep 13 21:17:06 2022 ] Batch(81/162) done. Loss: 0.4193 lr:0.100000
|
264 |
+
[ Tue Sep 13 21:17:48 2022 ] Eval epoch: 40
|
265 |
+
[ Tue Sep 13 21:20:41 2022 ] Mean test loss of 930 batches: 10.120402336120605.
|
266 |
+
[ Tue Sep 13 21:20:42 2022 ] Top1: 16.91%
|
267 |
+
[ Tue Sep 13 21:20:42 2022 ] Top5: 38.84%
|
268 |
+
[ Tue Sep 13 21:20:42 2022 ] Training epoch: 41
|
269 |
+
[ Tue Sep 13 21:20:56 2022 ] Batch(19/162) done. Loss: 0.3093 lr:0.100000
|
270 |
+
[ Tue Sep 13 21:21:49 2022 ] Batch(119/162) done. Loss: 0.3140 lr:0.100000
|
271 |
+
[ Tue Sep 13 21:22:11 2022 ] Eval epoch: 41
|
272 |
+
[ Tue Sep 13 21:25:05 2022 ] Mean test loss of 930 batches: 4.387808322906494.
|
273 |
+
[ Tue Sep 13 21:25:06 2022 ] Top1: 32.27%
|
274 |
+
[ Tue Sep 13 21:25:06 2022 ] Top5: 63.54%
|
275 |
+
[ Tue Sep 13 21:25:07 2022 ] Training epoch: 42
|
276 |
+
[ Tue Sep 13 21:25:40 2022 ] Batch(57/162) done. Loss: 0.1977 lr:0.100000
|
277 |
+
[ Tue Sep 13 21:26:33 2022 ] Batch(157/162) done. Loss: 0.7468 lr:0.100000
|
278 |
+
[ Tue Sep 13 21:26:35 2022 ] Eval epoch: 42
|
279 |
+
[ Tue Sep 13 21:29:29 2022 ] Mean test loss of 930 batches: 4.2767720222473145.
|
280 |
+
[ Tue Sep 13 21:29:29 2022 ] Top1: 36.36%
|
281 |
+
[ Tue Sep 13 21:29:30 2022 ] Top5: 66.10%
|
282 |
+
[ Tue Sep 13 21:29:30 2022 ] Training epoch: 43
|
283 |
+
[ Tue Sep 13 21:30:23 2022 ] Batch(95/162) done. Loss: 0.4878 lr:0.100000
|
284 |
+
[ Tue Sep 13 21:30:58 2022 ] Eval epoch: 43
|
285 |
+
[ Tue Sep 13 21:33:52 2022 ] Mean test loss of 930 batches: 5.679744243621826.
|
286 |
+
[ Tue Sep 13 21:33:52 2022 ] Top1: 27.87%
|
287 |
+
[ Tue Sep 13 21:33:53 2022 ] Top5: 60.73%
|
288 |
+
[ Tue Sep 13 21:33:53 2022 ] Training epoch: 44
|
289 |
+
[ Tue Sep 13 21:34:14 2022 ] Batch(33/162) done. Loss: 0.2142 lr:0.100000
|
290 |
+
[ Tue Sep 13 21:35:06 2022 ] Batch(133/162) done. Loss: 0.4588 lr:0.100000
|
291 |
+
[ Tue Sep 13 21:35:21 2022 ] Eval epoch: 44
|
292 |
+
[ Tue Sep 13 21:38:15 2022 ] Mean test loss of 930 batches: 4.639147758483887.
|
293 |
+
[ Tue Sep 13 21:38:16 2022 ] Top1: 36.21%
|
294 |
+
[ Tue Sep 13 21:38:16 2022 ] Top5: 67.00%
|
295 |
+
[ Tue Sep 13 21:38:16 2022 ] Training epoch: 45
|
296 |
+
[ Tue Sep 13 21:38:57 2022 ] Batch(71/162) done. Loss: 0.4394 lr:0.100000
|
297 |
+
[ Tue Sep 13 21:39:45 2022 ] Eval epoch: 45
|
298 |
+
[ Tue Sep 13 21:42:39 2022 ] Mean test loss of 930 batches: 3.90327787399292.
|
299 |
+
[ Tue Sep 13 21:42:39 2022 ] Top1: 38.62%
|
300 |
+
[ Tue Sep 13 21:42:40 2022 ] Top5: 69.91%
|
301 |
+
[ Tue Sep 13 21:42:40 2022 ] Training epoch: 46
|
302 |
+
[ Tue Sep 13 21:42:48 2022 ] Batch(9/162) done. Loss: 0.3888 lr:0.100000
|
303 |
+
[ Tue Sep 13 21:43:41 2022 ] Batch(109/162) done. Loss: 0.2235 lr:0.100000
|
304 |
+
[ Tue Sep 13 21:44:08 2022 ] Eval epoch: 46
|
305 |
+
[ Tue Sep 13 21:47:02 2022 ] Mean test loss of 930 batches: 6.589352130889893.
|
306 |
+
[ Tue Sep 13 21:47:02 2022 ] Top1: 25.58%
|
307 |
+
[ Tue Sep 13 21:47:03 2022 ] Top5: 54.44%
|
308 |
+
[ Tue Sep 13 21:47:03 2022 ] Training epoch: 47
|
309 |
+
[ Tue Sep 13 21:47:31 2022 ] Batch(47/162) done. Loss: 0.3766 lr:0.100000
|
310 |
+
[ Tue Sep 13 21:48:24 2022 ] Batch(147/162) done. Loss: 0.5963 lr:0.100000
|
311 |
+
[ Tue Sep 13 21:48:32 2022 ] Eval epoch: 47
|
312 |
+
[ Tue Sep 13 21:51:26 2022 ] Mean test loss of 930 batches: 6.4680495262146.
|
313 |
+
[ Tue Sep 13 21:51:26 2022 ] Top1: 29.32%
|
314 |
+
[ Tue Sep 13 21:51:27 2022 ] Top5: 57.34%
|
315 |
+
[ Tue Sep 13 21:51:27 2022 ] Training epoch: 48
|
316 |
+
[ Tue Sep 13 21:52:15 2022 ] Batch(85/162) done. Loss: 0.2175 lr:0.100000
|
317 |
+
[ Tue Sep 13 21:52:56 2022 ] Eval epoch: 48
|
318 |
+
[ Tue Sep 13 21:55:50 2022 ] Mean test loss of 930 batches: 5.157181739807129.
|
319 |
+
[ Tue Sep 13 21:55:50 2022 ] Top1: 34.15%
|
320 |
+
[ Tue Sep 13 21:55:51 2022 ] Top5: 67.10%
|
321 |
+
[ Tue Sep 13 21:55:51 2022 ] Training epoch: 49
|
322 |
+
[ Tue Sep 13 21:56:06 2022 ] Batch(23/162) done. Loss: 0.3487 lr:0.100000
|
323 |
+
[ Tue Sep 13 21:56:59 2022 ] Batch(123/162) done. Loss: 0.2986 lr:0.100000
|
324 |
+
[ Tue Sep 13 21:57:19 2022 ] Eval epoch: 49
|
325 |
+
[ Tue Sep 13 22:00:13 2022 ] Mean test loss of 930 batches: 3.3797988891601562.
|
326 |
+
[ Tue Sep 13 22:00:14 2022 ] Top1: 43.03%
|
327 |
+
[ Tue Sep 13 22:00:14 2022 ] Top5: 71.34%
|
328 |
+
[ Tue Sep 13 22:00:15 2022 ] Training epoch: 50
|
329 |
+
[ Tue Sep 13 22:00:50 2022 ] Batch(61/162) done. Loss: 0.2856 lr:0.100000
|
330 |
+
[ Tue Sep 13 22:01:43 2022 ] Batch(161/162) done. Loss: 0.2811 lr:0.100000
|
331 |
+
[ Tue Sep 13 22:01:43 2022 ] Eval epoch: 50
|
332 |
+
[ Tue Sep 13 22:04:37 2022 ] Mean test loss of 930 batches: 6.036594390869141.
|
333 |
+
[ Tue Sep 13 22:04:38 2022 ] Top1: 26.76%
|
334 |
+
[ Tue Sep 13 22:04:38 2022 ] Top5: 54.91%
|
335 |
+
[ Tue Sep 13 22:04:38 2022 ] Training epoch: 51
|
336 |
+
[ Tue Sep 13 22:05:34 2022 ] Batch(99/162) done. Loss: 0.3003 lr:0.100000
|
337 |
+
[ Tue Sep 13 22:06:07 2022 ] Eval epoch: 51
|
338 |
+
[ Tue Sep 13 22:09:00 2022 ] Mean test loss of 930 batches: 3.1611266136169434.
|
339 |
+
[ Tue Sep 13 22:09:01 2022 ] Top1: 45.07%
|
340 |
+
[ Tue Sep 13 22:09:01 2022 ] Top5: 73.64%
|
341 |
+
[ Tue Sep 13 22:09:01 2022 ] Training epoch: 52
|
342 |
+
[ Tue Sep 13 22:09:25 2022 ] Batch(37/162) done. Loss: 0.3267 lr:0.100000
|
343 |
+
[ Tue Sep 13 22:10:17 2022 ] Batch(137/162) done. Loss: 0.3280 lr:0.100000
|
344 |
+
[ Tue Sep 13 22:10:30 2022 ] Eval epoch: 52
|
345 |
+
[ Tue Sep 13 22:13:24 2022 ] Mean test loss of 930 batches: 3.928891658782959.
|
346 |
+
[ Tue Sep 13 22:13:25 2022 ] Top1: 42.29%
|
347 |
+
[ Tue Sep 13 22:13:25 2022 ] Top5: 73.62%
|
348 |
+
[ Tue Sep 13 22:13:25 2022 ] Training epoch: 53
|
349 |
+
[ Tue Sep 13 22:14:08 2022 ] Batch(75/162) done. Loss: 0.2201 lr:0.100000
|
350 |
+
[ Tue Sep 13 22:14:54 2022 ] Eval epoch: 53
|
351 |
+
[ Tue Sep 13 22:17:48 2022 ] Mean test loss of 930 batches: 5.713135242462158.
|
352 |
+
[ Tue Sep 13 22:17:48 2022 ] Top1: 26.39%
|
353 |
+
[ Tue Sep 13 22:17:48 2022 ] Top5: 50.29%
|
354 |
+
[ Tue Sep 13 22:17:49 2022 ] Training epoch: 54
|
355 |
+
[ Tue Sep 13 22:17:59 2022 ] Batch(13/162) done. Loss: 0.1089 lr:0.100000
|
356 |
+
[ Tue Sep 13 22:18:52 2022 ] Batch(113/162) done. Loss: 0.3991 lr:0.100000
|
357 |
+
[ Tue Sep 13 22:19:18 2022 ] Eval epoch: 54
|
358 |
+
[ Tue Sep 13 22:22:11 2022 ] Mean test loss of 930 batches: 4.794914722442627.
|
359 |
+
[ Tue Sep 13 22:22:12 2022 ] Top1: 34.96%
|
360 |
+
[ Tue Sep 13 22:22:12 2022 ] Top5: 64.15%
|
361 |
+
[ Tue Sep 13 22:22:13 2022 ] Training epoch: 55
|
362 |
+
[ Tue Sep 13 22:22:43 2022 ] Batch(51/162) done. Loss: 0.2343 lr:0.100000
|
363 |
+
[ Tue Sep 13 22:23:36 2022 ] Batch(151/162) done. Loss: 0.2005 lr:0.100000
|
364 |
+
[ Tue Sep 13 22:23:41 2022 ] Eval epoch: 55
|
365 |
+
[ Tue Sep 13 22:26:35 2022 ] Mean test loss of 930 batches: 11.087225914001465.
|
366 |
+
[ Tue Sep 13 22:26:36 2022 ] Top1: 21.75%
|
367 |
+
[ Tue Sep 13 22:26:36 2022 ] Top5: 43.36%
|
368 |
+
[ Tue Sep 13 22:26:36 2022 ] Training epoch: 56
|
369 |
+
[ Tue Sep 13 22:27:27 2022 ] Batch(89/162) done. Loss: 0.3925 lr:0.100000
|
370 |
+
[ Tue Sep 13 22:28:05 2022 ] Eval epoch: 56
|
371 |
+
[ Tue Sep 13 22:30:59 2022 ] Mean test loss of 930 batches: 4.006224632263184.
|
372 |
+
[ Tue Sep 13 22:30:59 2022 ] Top1: 40.97%
|
373 |
+
[ Tue Sep 13 22:31:00 2022 ] Top5: 70.19%
|
374 |
+
[ Tue Sep 13 22:31:00 2022 ] Training epoch: 57
|
375 |
+
[ Tue Sep 13 22:31:18 2022 ] Batch(27/162) done. Loss: 0.1392 lr:0.100000
|
376 |
+
[ Tue Sep 13 22:32:11 2022 ] Batch(127/162) done. Loss: 0.3604 lr:0.100000
|
377 |
+
[ Tue Sep 13 22:32:29 2022 ] Eval epoch: 57
|
378 |
+
[ Tue Sep 13 22:35:23 2022 ] Mean test loss of 930 batches: 5.908833026885986.
|
379 |
+
[ Tue Sep 13 22:35:23 2022 ] Top1: 32.79%
|
380 |
+
[ Tue Sep 13 22:35:24 2022 ] Top5: 61.89%
|
381 |
+
[ Tue Sep 13 22:35:24 2022 ] Training epoch: 58
|
382 |
+
[ Tue Sep 13 22:36:01 2022 ] Batch(65/162) done. Loss: 0.3371 lr:0.100000
|
383 |
+
[ Tue Sep 13 22:36:52 2022 ] Eval epoch: 58
|
384 |
+
[ Tue Sep 13 22:39:46 2022 ] Mean test loss of 930 batches: 5.860087871551514.
|
385 |
+
[ Tue Sep 13 22:39:47 2022 ] Top1: 36.03%
|
386 |
+
[ Tue Sep 13 22:39:47 2022 ] Top5: 67.77%
|
387 |
+
[ Tue Sep 13 22:39:47 2022 ] Training epoch: 59
|
388 |
+
[ Tue Sep 13 22:39:53 2022 ] Batch(3/162) done. Loss: 0.4174 lr:0.100000
|
389 |
+
[ Tue Sep 13 22:40:45 2022 ] Batch(103/162) done. Loss: 0.0923 lr:0.100000
|
390 |
+
[ Tue Sep 13 22:41:16 2022 ] Eval epoch: 59
|
391 |
+
[ Tue Sep 13 22:44:09 2022 ] Mean test loss of 930 batches: 9.257292747497559.
|
392 |
+
[ Tue Sep 13 22:44:10 2022 ] Top1: 20.87%
|
393 |
+
[ Tue Sep 13 22:44:10 2022 ] Top5: 44.52%
|
394 |
+
[ Tue Sep 13 22:44:11 2022 ] Training epoch: 60
|
395 |
+
[ Tue Sep 13 22:44:36 2022 ] Batch(41/162) done. Loss: 0.1475 lr:0.100000
|
396 |
+
[ Tue Sep 13 22:45:29 2022 ] Batch(141/162) done. Loss: 0.2011 lr:0.100000
|
397 |
+
[ Tue Sep 13 22:45:39 2022 ] Eval epoch: 60
|
398 |
+
[ Tue Sep 13 22:48:33 2022 ] Mean test loss of 930 batches: 6.781947135925293.
|
399 |
+
[ Tue Sep 13 22:48:33 2022 ] Top1: 29.67%
|
400 |
+
[ Tue Sep 13 22:48:34 2022 ] Top5: 57.45%
|
401 |
+
[ Tue Sep 13 22:48:34 2022 ] Training epoch: 61
|
402 |
+
[ Tue Sep 13 22:49:20 2022 ] Batch(79/162) done. Loss: 0.1082 lr:0.010000
|
403 |
+
[ Tue Sep 13 22:50:03 2022 ] Eval epoch: 61
|
404 |
+
[ Tue Sep 13 22:52:57 2022 ] Mean test loss of 930 batches: 2.992205858230591.
|
405 |
+
[ Tue Sep 13 22:52:58 2022 ] Top1: 53.45%
|
406 |
+
[ Tue Sep 13 22:52:58 2022 ] Top5: 80.64%
|
407 |
+
[ Tue Sep 13 22:52:59 2022 ] Training epoch: 62
|
408 |
+
[ Tue Sep 13 22:53:11 2022 ] Batch(17/162) done. Loss: 0.0523 lr:0.010000
|
409 |
+
[ Tue Sep 13 22:54:04 2022 ] Batch(117/162) done. Loss: 0.0956 lr:0.010000
|
410 |
+
[ Tue Sep 13 22:54:28 2022 ] Eval epoch: 62
|
411 |
+
[ Tue Sep 13 22:57:21 2022 ] Mean test loss of 930 batches: 3.067354917526245.
|
412 |
+
[ Tue Sep 13 22:57:21 2022 ] Top1: 53.46%
|
413 |
+
[ Tue Sep 13 22:57:22 2022 ] Top5: 80.63%
|
414 |
+
[ Tue Sep 13 22:57:22 2022 ] Training epoch: 63
|
415 |
+
[ Tue Sep 13 22:57:55 2022 ] Batch(55/162) done. Loss: 0.0558 lr:0.010000
|
416 |
+
[ Tue Sep 13 22:58:48 2022 ] Batch(155/162) done. Loss: 0.0274 lr:0.010000
|
417 |
+
[ Tue Sep 13 22:58:51 2022 ] Eval epoch: 63
|
418 |
+
[ Tue Sep 13 23:01:45 2022 ] Mean test loss of 930 batches: 3.045286178588867.
|
419 |
+
[ Tue Sep 13 23:01:45 2022 ] Top1: 54.33%
|
420 |
+
[ Tue Sep 13 23:01:46 2022 ] Top5: 81.25%
|
421 |
+
[ Tue Sep 13 23:01:46 2022 ] Training epoch: 64
|
422 |
+
[ Tue Sep 13 23:02:38 2022 ] Batch(93/162) done. Loss: 0.0190 lr:0.010000
|
423 |
+
[ Tue Sep 13 23:03:15 2022 ] Eval epoch: 64
|
424 |
+
[ Tue Sep 13 23:06:09 2022 ] Mean test loss of 930 batches: 3.025174140930176.
|
425 |
+
[ Tue Sep 13 23:06:09 2022 ] Top1: 54.49%
|
426 |
+
[ Tue Sep 13 23:06:09 2022 ] Top5: 81.48%
|
427 |
+
[ Tue Sep 13 23:06:10 2022 ] Training epoch: 65
|
428 |
+
[ Tue Sep 13 23:06:29 2022 ] Batch(31/162) done. Loss: 0.0290 lr:0.010000
|
429 |
+
[ Tue Sep 13 23:07:22 2022 ] Batch(131/162) done. Loss: 0.0235 lr:0.010000
|
430 |
+
[ Tue Sep 13 23:07:38 2022 ] Eval epoch: 65
|
431 |
+
[ Tue Sep 13 23:10:33 2022 ] Mean test loss of 930 batches: 3.1589932441711426.
|
432 |
+
[ Tue Sep 13 23:10:33 2022 ] Top1: 52.91%
|
433 |
+
[ Tue Sep 13 23:10:34 2022 ] Top5: 80.61%
|
434 |
+
[ Tue Sep 13 23:10:34 2022 ] Training epoch: 66
|
435 |
+
[ Tue Sep 13 23:11:14 2022 ] Batch(69/162) done. Loss: 0.0282 lr:0.010000
|
436 |
+
[ Tue Sep 13 23:12:03 2022 ] Eval epoch: 66
|
437 |
+
[ Tue Sep 13 23:14:56 2022 ] Mean test loss of 930 batches: 3.023002862930298.
|
438 |
+
[ Tue Sep 13 23:14:57 2022 ] Top1: 54.05%
|
439 |
+
[ Tue Sep 13 23:14:57 2022 ] Top5: 81.27%
|
440 |
+
[ Tue Sep 13 23:14:57 2022 ] Training epoch: 67
|
441 |
+
[ Tue Sep 13 23:15:05 2022 ] Batch(7/162) done. Loss: 0.0235 lr:0.010000
|
442 |
+
[ Tue Sep 13 23:15:57 2022 ] Batch(107/162) done. Loss: 0.1305 lr:0.010000
|
443 |
+
[ Tue Sep 13 23:16:26 2022 ] Eval epoch: 67
|
444 |
+
[ Tue Sep 13 23:19:20 2022 ] Mean test loss of 930 batches: 3.1956241130828857.
|
445 |
+
[ Tue Sep 13 23:19:20 2022 ] Top1: 52.66%
|
446 |
+
[ Tue Sep 13 23:19:21 2022 ] Top5: 80.47%
|
447 |
+
[ Tue Sep 13 23:19:21 2022 ] Training epoch: 68
|
448 |
+
[ Tue Sep 13 23:19:48 2022 ] Batch(45/162) done. Loss: 0.0245 lr:0.010000
|
449 |
+
[ Tue Sep 13 23:20:41 2022 ] Batch(145/162) done. Loss: 0.0290 lr:0.010000
|
450 |
+
[ Tue Sep 13 23:20:50 2022 ] Eval epoch: 68
|
451 |
+
[ Tue Sep 13 23:23:44 2022 ] Mean test loss of 930 batches: 3.221503734588623.
|
452 |
+
[ Tue Sep 13 23:23:44 2022 ] Top1: 52.79%
|
453 |
+
[ Tue Sep 13 23:23:45 2022 ] Top5: 80.55%
|
454 |
+
[ Tue Sep 13 23:23:45 2022 ] Training epoch: 69
|
455 |
+
[ Tue Sep 13 23:24:33 2022 ] Batch(83/162) done. Loss: 0.0249 lr:0.010000
|
456 |
+
[ Tue Sep 13 23:25:15 2022 ] Eval epoch: 69
|
457 |
+
[ Tue Sep 13 23:28:08 2022 ] Mean test loss of 930 batches: 3.1967437267303467.
|
458 |
+
[ Tue Sep 13 23:28:09 2022 ] Top1: 53.74%
|
459 |
+
[ Tue Sep 13 23:28:09 2022 ] Top5: 80.80%
|
460 |
+
[ Tue Sep 13 23:28:10 2022 ] Training epoch: 70
|
461 |
+
[ Tue Sep 13 23:28:25 2022 ] Batch(21/162) done. Loss: 0.0995 lr:0.010000
|
462 |
+
[ Tue Sep 13 23:29:17 2022 ] Batch(121/162) done. Loss: 0.0304 lr:0.010000
|
463 |
+
[ Tue Sep 13 23:29:38 2022 ] Eval epoch: 70
|
464 |
+
[ Tue Sep 13 23:32:31 2022 ] Mean test loss of 930 batches: 3.089479923248291.
|
465 |
+
[ Tue Sep 13 23:32:32 2022 ] Top1: 54.28%
|
466 |
+
[ Tue Sep 13 23:32:32 2022 ] Top5: 81.39%
|
467 |
+
[ Tue Sep 13 23:32:32 2022 ] Training epoch: 71
|
468 |
+
[ Tue Sep 13 23:33:07 2022 ] Batch(59/162) done. Loss: 0.0558 lr:0.010000
|
469 |
+
[ Tue Sep 13 23:34:00 2022 ] Batch(159/162) done. Loss: 0.0395 lr:0.010000
|
470 |
+
[ Tue Sep 13 23:34:02 2022 ] Eval epoch: 71
|
471 |
+
[ Tue Sep 13 23:36:55 2022 ] Mean test loss of 930 batches: 3.0850040912628174.
|
472 |
+
[ Tue Sep 13 23:36:55 2022 ] Top1: 54.34%
|
473 |
+
[ Tue Sep 13 23:36:56 2022 ] Top5: 81.43%
|
474 |
+
[ Tue Sep 13 23:36:56 2022 ] Training epoch: 72
|
475 |
+
[ Tue Sep 13 23:37:51 2022 ] Batch(97/162) done. Loss: 0.0396 lr:0.010000
|
476 |
+
[ Tue Sep 13 23:38:25 2022 ] Eval epoch: 72
|
477 |
+
[ Tue Sep 13 23:41:18 2022 ] Mean test loss of 930 batches: 3.267887592315674.
|
478 |
+
[ Tue Sep 13 23:41:19 2022 ] Top1: 53.87%
|
479 |
+
[ Tue Sep 13 23:41:19 2022 ] Top5: 80.92%
|
480 |
+
[ Tue Sep 13 23:41:19 2022 ] Training epoch: 73
|
481 |
+
[ Tue Sep 13 23:41:42 2022 ] Batch(35/162) done. Loss: 0.0307 lr:0.010000
|
482 |
+
[ Tue Sep 13 23:42:34 2022 ] Batch(135/162) done. Loss: 0.0440 lr:0.010000
|
483 |
+
[ Tue Sep 13 23:42:48 2022 ] Eval epoch: 73
|
484 |
+
[ Tue Sep 13 23:45:42 2022 ] Mean test loss of 930 batches: 3.163264036178589.
|
485 |
+
[ Tue Sep 13 23:45:42 2022 ] Top1: 54.46%
|
486 |
+
[ Tue Sep 13 23:45:43 2022 ] Top5: 81.37%
|
487 |
+
[ Tue Sep 13 23:45:43 2022 ] Training epoch: 74
|
488 |
+
[ Tue Sep 13 23:46:25 2022 ] Batch(73/162) done. Loss: 0.0482 lr:0.010000
|
489 |
+
[ Tue Sep 13 23:47:12 2022 ] Eval epoch: 74
|
490 |
+
[ Tue Sep 13 23:50:05 2022 ] Mean test loss of 930 batches: 3.155078172683716.
|
491 |
+
[ Tue Sep 13 23:50:05 2022 ] Top1: 54.46%
|
492 |
+
[ Tue Sep 13 23:50:06 2022 ] Top5: 81.15%
|
493 |
+
[ Tue Sep 13 23:50:06 2022 ] Training epoch: 75
|
494 |
+
[ Tue Sep 13 23:50:16 2022 ] Batch(11/162) done. Loss: 0.0118 lr:0.010000
|
495 |
+
[ Tue Sep 13 23:51:09 2022 ] Batch(111/162) done. Loss: 0.0536 lr:0.010000
|
496 |
+
[ Tue Sep 13 23:51:35 2022 ] Eval epoch: 75
|
497 |
+
[ Tue Sep 13 23:54:28 2022 ] Mean test loss of 930 batches: 3.2689106464385986.
|
498 |
+
[ Tue Sep 13 23:54:29 2022 ] Top1: 53.18%
|
499 |
+
[ Tue Sep 13 23:54:29 2022 ] Top5: 80.74%
|
500 |
+
[ Tue Sep 13 23:54:29 2022 ] Training epoch: 76
|
501 |
+
[ Tue Sep 13 23:54:59 2022 ] Batch(49/162) done. Loss: 0.0446 lr:0.010000
|
502 |
+
[ Tue Sep 13 23:55:52 2022 ] Batch(149/162) done. Loss: 0.0947 lr:0.010000
|
503 |
+
[ Tue Sep 13 23:55:58 2022 ] Eval epoch: 76
|
504 |
+
[ Tue Sep 13 23:58:52 2022 ] Mean test loss of 930 batches: 3.3122920989990234.
|
505 |
+
[ Tue Sep 13 23:58:52 2022 ] Top1: 53.77%
|
506 |
+
[ Tue Sep 13 23:58:53 2022 ] Top5: 80.86%
|
507 |
+
[ Tue Sep 13 23:58:53 2022 ] Training epoch: 77
|
508 |
+
[ Tue Sep 13 23:59:43 2022 ] Batch(87/162) done. Loss: 0.0506 lr:0.010000
|
509 |
+
[ Wed Sep 14 00:00:22 2022 ] Eval epoch: 77
|
510 |
+
[ Wed Sep 14 00:03:15 2022 ] Mean test loss of 930 batches: 3.2514548301696777.
|
511 |
+
[ Wed Sep 14 00:03:16 2022 ] Top1: 54.14%
|
512 |
+
[ Wed Sep 14 00:03:16 2022 ] Top5: 81.17%
|
513 |
+
[ Wed Sep 14 00:03:16 2022 ] Training epoch: 78
|
514 |
+
[ Wed Sep 14 00:03:33 2022 ] Batch(25/162) done. Loss: 0.0222 lr:0.010000
|
515 |
+
[ Wed Sep 14 00:04:26 2022 ] Batch(125/162) done. Loss: 0.0258 lr:0.010000
|
516 |
+
[ Wed Sep 14 00:04:45 2022 ] Eval epoch: 78
|
517 |
+
[ Wed Sep 14 00:07:39 2022 ] Mean test loss of 930 batches: 3.293084144592285.
|
518 |
+
[ Wed Sep 14 00:07:39 2022 ] Top1: 53.73%
|
519 |
+
[ Wed Sep 14 00:07:40 2022 ] Top5: 80.95%
|
520 |
+
[ Wed Sep 14 00:07:40 2022 ] Training epoch: 79
|
521 |
+
[ Wed Sep 14 00:08:17 2022 ] Batch(63/162) done. Loss: 0.0131 lr:0.010000
|
522 |
+
[ Wed Sep 14 00:09:09 2022 ] Eval epoch: 79
|
523 |
+
[ Wed Sep 14 00:12:02 2022 ] Mean test loss of 930 batches: 3.262202739715576.
|
524 |
+
[ Wed Sep 14 00:12:03 2022 ] Top1: 53.55%
|
525 |
+
[ Wed Sep 14 00:12:03 2022 ] Top5: 80.66%
|
526 |
+
[ Wed Sep 14 00:12:03 2022 ] Training epoch: 80
|
527 |
+
[ Wed Sep 14 00:12:07 2022 ] Batch(1/162) done. Loss: 0.0178 lr:0.010000
|
528 |
+
[ Wed Sep 14 00:13:00 2022 ] Batch(101/162) done. Loss: 0.0411 lr:0.010000
|
529 |
+
[ Wed Sep 14 00:13:32 2022 ] Eval epoch: 80
|
530 |
+
[ Wed Sep 14 00:16:26 2022 ] Mean test loss of 930 batches: 3.2297379970550537.
|
531 |
+
[ Wed Sep 14 00:16:27 2022 ] Top1: 54.15%
|
532 |
+
[ Wed Sep 14 00:16:27 2022 ] Top5: 81.03%
|
533 |
+
[ Wed Sep 14 00:16:27 2022 ] Training epoch: 81
|
534 |
+
[ Wed Sep 14 00:16:52 2022 ] Batch(39/162) done. Loss: 0.0240 lr:0.001000
|
535 |
+
[ Wed Sep 14 00:17:45 2022 ] Batch(139/162) done. Loss: 0.0447 lr:0.001000
|
536 |
+
[ Wed Sep 14 00:17:56 2022 ] Eval epoch: 81
|
537 |
+
[ Wed Sep 14 00:20:50 2022 ] Mean test loss of 930 batches: 3.254833698272705.
|
538 |
+
[ Wed Sep 14 00:20:50 2022 ] Top1: 54.14%
|
539 |
+
[ Wed Sep 14 00:20:51 2022 ] Top5: 81.11%
|
540 |
+
[ Wed Sep 14 00:20:51 2022 ] Training epoch: 82
|
541 |
+
[ Wed Sep 14 00:21:35 2022 ] Batch(77/162) done. Loss: 0.0049 lr:0.001000
|
542 |
+
[ Wed Sep 14 00:22:20 2022 ] Eval epoch: 82
|
543 |
+
[ Wed Sep 14 00:25:13 2022 ] Mean test loss of 930 batches: 3.3861968517303467.
|
544 |
+
[ Wed Sep 14 00:25:14 2022 ] Top1: 53.41%
|
545 |
+
[ Wed Sep 14 00:25:14 2022 ] Top5: 80.56%
|
546 |
+
[ Wed Sep 14 00:25:14 2022 ] Training epoch: 83
|
547 |
+
[ Wed Sep 14 00:25:26 2022 ] Batch(15/162) done. Loss: 0.0557 lr:0.001000
|
548 |
+
[ Wed Sep 14 00:26:19 2022 ] Batch(115/162) done. Loss: 0.0230 lr:0.001000
|
549 |
+
[ Wed Sep 14 00:26:43 2022 ] Eval epoch: 83
|
550 |
+
[ Wed Sep 14 00:29:36 2022 ] Mean test loss of 930 batches: 3.257885456085205.
|
551 |
+
[ Wed Sep 14 00:29:37 2022 ] Top1: 54.50%
|
552 |
+
[ Wed Sep 14 00:29:37 2022 ] Top5: 81.17%
|
553 |
+
[ Wed Sep 14 00:29:37 2022 ] Training epoch: 84
|
554 |
+
[ Wed Sep 14 00:30:09 2022 ] Batch(53/162) done. Loss: 0.0179 lr:0.001000
|
555 |
+
[ Wed Sep 14 00:31:02 2022 ] Batch(153/162) done. Loss: 0.0429 lr:0.001000
|
556 |
+
[ Wed Sep 14 00:31:07 2022 ] Eval epoch: 84
|
557 |
+
[ Wed Sep 14 00:34:00 2022 ] Mean test loss of 930 batches: 3.3241426944732666.
|
558 |
+
[ Wed Sep 14 00:34:00 2022 ] Top1: 54.11%
|
559 |
+
[ Wed Sep 14 00:34:01 2022 ] Top5: 81.14%
|
560 |
+
[ Wed Sep 14 00:34:01 2022 ] Training epoch: 85
|
561 |
+
[ Wed Sep 14 00:34:53 2022 ] Batch(91/162) done. Loss: 0.0074 lr:0.001000
|
562 |
+
[ Wed Sep 14 00:35:30 2022 ] Eval epoch: 85
|
563 |
+
[ Wed Sep 14 00:38:23 2022 ] Mean test loss of 930 batches: 3.2798843383789062.
|
564 |
+
[ Wed Sep 14 00:38:23 2022 ] Top1: 54.26%
|
565 |
+
[ Wed Sep 14 00:38:24 2022 ] Top5: 81.27%
|
566 |
+
[ Wed Sep 14 00:38:24 2022 ] Training epoch: 86
|
567 |
+
[ Wed Sep 14 00:38:43 2022 ] Batch(29/162) done. Loss: 0.0892 lr:0.001000
|
568 |
+
[ Wed Sep 14 00:39:35 2022 ] Batch(129/162) done. Loss: 0.0756 lr:0.001000
|
569 |
+
[ Wed Sep 14 00:39:53 2022 ] Eval epoch: 86
|
570 |
+
[ Wed Sep 14 00:42:46 2022 ] Mean test loss of 930 batches: 3.2872209548950195.
|
571 |
+
[ Wed Sep 14 00:42:46 2022 ] Top1: 54.41%
|
572 |
+
[ Wed Sep 14 00:42:47 2022 ] Top5: 81.32%
|
573 |
+
[ Wed Sep 14 00:42:47 2022 ] Training epoch: 87
|
574 |
+
[ Wed Sep 14 00:43:26 2022 ] Batch(67/162) done. Loss: 0.0296 lr:0.001000
|
575 |
+
[ Wed Sep 14 00:44:16 2022 ] Eval epoch: 87
|
576 |
+
[ Wed Sep 14 00:47:09 2022 ] Mean test loss of 930 batches: 3.325481653213501.
|
577 |
+
[ Wed Sep 14 00:47:09 2022 ] Top1: 53.61%
|
578 |
+
[ Wed Sep 14 00:47:10 2022 ] Top5: 80.88%
|
579 |
+
[ Wed Sep 14 00:47:10 2022 ] Training epoch: 88
|
580 |
+
[ Wed Sep 14 00:47:16 2022 ] Batch(5/162) done. Loss: 0.0617 lr:0.001000
|
581 |
+
[ Wed Sep 14 00:48:09 2022 ] Batch(105/162) done. Loss: 0.0109 lr:0.001000
|
582 |
+
[ Wed Sep 14 00:48:39 2022 ] Eval epoch: 88
|
583 |
+
[ Wed Sep 14 00:51:32 2022 ] Mean test loss of 930 batches: 3.276432752609253.
|
584 |
+
[ Wed Sep 14 00:51:33 2022 ] Top1: 54.77%
|
585 |
+
[ Wed Sep 14 00:51:33 2022 ] Top5: 81.54%
|
586 |
+
[ Wed Sep 14 00:51:33 2022 ] Training epoch: 89
|
587 |
+
[ Wed Sep 14 00:51:59 2022 ] Batch(43/162) done. Loss: 0.0557 lr:0.001000
|
588 |
+
[ Wed Sep 14 00:52:52 2022 ] Batch(143/162) done. Loss: 0.0207 lr:0.001000
|
589 |
+
[ Wed Sep 14 00:53:02 2022 ] Eval epoch: 89
|
590 |
+
[ Wed Sep 14 00:55:56 2022 ] Mean test loss of 930 batches: 3.3057165145874023.
|
591 |
+
[ Wed Sep 14 00:55:57 2022 ] Top1: 53.95%
|
592 |
+
[ Wed Sep 14 00:55:57 2022 ] Top5: 81.01%
|
593 |
+
[ Wed Sep 14 00:55:57 2022 ] Training epoch: 90
|
594 |
+
[ Wed Sep 14 00:56:44 2022 ] Batch(81/162) done. Loss: 0.0615 lr:0.001000
|
595 |
+
[ Wed Sep 14 00:57:26 2022 ] Eval epoch: 90
|
596 |
+
[ Wed Sep 14 01:00:20 2022 ] Mean test loss of 930 batches: 3.2415735721588135.
|
597 |
+
[ Wed Sep 14 01:00:21 2022 ] Top1: 54.78%
|
598 |
+
[ Wed Sep 14 01:00:21 2022 ] Top5: 81.43%
|
599 |
+
[ Wed Sep 14 01:00:22 2022 ] Training epoch: 91
|
600 |
+
[ Wed Sep 14 01:00:35 2022 ] Batch(19/162) done. Loss: 0.0479 lr:0.001000
|
601 |
+
[ Wed Sep 14 01:01:28 2022 ] Batch(119/162) done. Loss: 0.0326 lr:0.001000
|
602 |
+
[ Wed Sep 14 01:01:51 2022 ] Eval epoch: 91
|
603 |
+
[ Wed Sep 14 01:04:44 2022 ] Mean test loss of 930 batches: 3.2997372150421143.
|
604 |
+
[ Wed Sep 14 01:04:44 2022 ] Top1: 54.55%
|
605 |
+
[ Wed Sep 14 01:04:45 2022 ] Top5: 81.29%
|
606 |
+
[ Wed Sep 14 01:04:45 2022 ] Training epoch: 92
|
607 |
+
[ Wed Sep 14 01:05:19 2022 ] Batch(57/162) done. Loss: 0.0322 lr:0.001000
|
608 |
+
[ Wed Sep 14 01:06:12 2022 ] Batch(157/162) done. Loss: 0.0710 lr:0.001000
|
609 |
+
[ Wed Sep 14 01:06:14 2022 ] Eval epoch: 92
|
610 |
+
[ Wed Sep 14 01:09:07 2022 ] Mean test loss of 930 batches: 3.306330919265747.
|
611 |
+
[ Wed Sep 14 01:09:08 2022 ] Top1: 53.68%
|
612 |
+
[ Wed Sep 14 01:09:08 2022 ] Top5: 80.86%
|
613 |
+
[ Wed Sep 14 01:09:08 2022 ] Training epoch: 93
|
614 |
+
[ Wed Sep 14 01:10:02 2022 ] Batch(95/162) done. Loss: 0.0152 lr:0.001000
|
615 |
+
[ Wed Sep 14 01:10:38 2022 ] Eval epoch: 93
|
616 |
+
[ Wed Sep 14 01:13:32 2022 ] Mean test loss of 930 batches: 3.4323089122772217.
|
617 |
+
[ Wed Sep 14 01:13:32 2022 ] Top1: 52.69%
|
618 |
+
[ Wed Sep 14 01:13:32 2022 ] Top5: 80.27%
|
619 |
+
[ Wed Sep 14 01:13:33 2022 ] Training epoch: 94
|
620 |
+
[ Wed Sep 14 01:13:54 2022 ] Batch(33/162) done. Loss: 0.0922 lr:0.001000
|
621 |
+
[ Wed Sep 14 01:14:47 2022 ] Batch(133/162) done. Loss: 0.0339 lr:0.001000
|
622 |
+
[ Wed Sep 14 01:15:02 2022 ] Eval epoch: 94
|
623 |
+
[ Wed Sep 14 01:17:55 2022 ] Mean test loss of 930 batches: 3.324550151824951.
|
624 |
+
[ Wed Sep 14 01:17:55 2022 ] Top1: 54.07%
|
625 |
+
[ Wed Sep 14 01:17:56 2022 ] Top5: 80.87%
|
626 |
+
[ Wed Sep 14 01:17:56 2022 ] Training epoch: 95
|
627 |
+
[ Wed Sep 14 01:18:37 2022 ] Batch(71/162) done. Loss: 0.0160 lr:0.001000
|
628 |
+
[ Wed Sep 14 01:19:25 2022 ] Eval epoch: 95
|
629 |
+
[ Wed Sep 14 01:22:18 2022 ] Mean test loss of 930 batches: 3.3670194149017334.
|
630 |
+
[ Wed Sep 14 01:22:19 2022 ] Top1: 52.92%
|
631 |
+
[ Wed Sep 14 01:22:19 2022 ] Top5: 80.41%
|
632 |
+
[ Wed Sep 14 01:22:20 2022 ] Training epoch: 96
|
633 |
+
[ Wed Sep 14 01:22:28 2022 ] Batch(9/162) done. Loss: 0.0091 lr:0.001000
|
634 |
+
[ Wed Sep 14 01:23:21 2022 ] Batch(109/162) done. Loss: 0.0671 lr:0.001000
|
635 |
+
[ Wed Sep 14 01:23:48 2022 ] Eval epoch: 96
|
636 |
+
[ Wed Sep 14 01:26:42 2022 ] Mean test loss of 930 batches: 3.3024790287017822.
|
637 |
+
[ Wed Sep 14 01:26:43 2022 ] Top1: 53.83%
|
638 |
+
[ Wed Sep 14 01:26:43 2022 ] Top5: 81.13%
|
639 |
+
[ Wed Sep 14 01:26:44 2022 ] Training epoch: 97
|
640 |
+
[ Wed Sep 14 01:27:12 2022 ] Batch(47/162) done. Loss: 0.0389 lr:0.001000
|
641 |
+
[ Wed Sep 14 01:28:05 2022 ] Batch(147/162) done. Loss: 0.0605 lr:0.001000
|
642 |
+
[ Wed Sep 14 01:28:13 2022 ] Eval epoch: 97
|
643 |
+
[ Wed Sep 14 01:31:06 2022 ] Mean test loss of 930 batches: 3.321824789047241.
|
644 |
+
[ Wed Sep 14 01:31:06 2022 ] Top1: 53.47%
|
645 |
+
[ Wed Sep 14 01:31:07 2022 ] Top5: 80.94%
|
646 |
+
[ Wed Sep 14 01:31:07 2022 ] Training epoch: 98
|
647 |
+
[ Wed Sep 14 01:31:56 2022 ] Batch(85/162) done. Loss: 0.0401 lr:0.001000
|
648 |
+
[ Wed Sep 14 01:32:36 2022 ] Eval epoch: 98
|
649 |
+
[ Wed Sep 14 01:35:29 2022 ] Mean test loss of 930 batches: 3.3567888736724854.
|
650 |
+
[ Wed Sep 14 01:35:29 2022 ] Top1: 54.22%
|
651 |
+
[ Wed Sep 14 01:35:30 2022 ] Top5: 81.11%
|
652 |
+
[ Wed Sep 14 01:35:30 2022 ] Training epoch: 99
|
653 |
+
[ Wed Sep 14 01:35:46 2022 ] Batch(23/162) done. Loss: 0.0865 lr:0.001000
|
654 |
+
[ Wed Sep 14 01:36:39 2022 ] Batch(123/162) done. Loss: 0.0793 lr:0.001000
|
655 |
+
[ Wed Sep 14 01:36:59 2022 ] Eval epoch: 99
|
656 |
+
[ Wed Sep 14 01:39:53 2022 ] Mean test loss of 930 batches: 3.289196491241455.
|
657 |
+
[ Wed Sep 14 01:39:54 2022 ] Top1: 53.88%
|
658 |
+
[ Wed Sep 14 01:39:54 2022 ] Top5: 80.97%
|
659 |
+
[ Wed Sep 14 01:39:54 2022 ] Training epoch: 100
|
660 |
+
[ Wed Sep 14 01:40:31 2022 ] Batch(61/162) done. Loss: 0.0328 lr:0.001000
|
661 |
+
[ Wed Sep 14 01:41:24 2022 ] Batch(161/162) done. Loss: 0.0169 lr:0.001000
|
662 |
+
[ Wed Sep 14 01:41:24 2022 ] Eval epoch: 100
|
663 |
+
[ Wed Sep 14 01:44:18 2022 ] Mean test loss of 930 batches: 3.32149076461792.
|
664 |
+
[ Wed Sep 14 01:44:18 2022 ] Top1: 53.94%
|
665 |
+
[ Wed Sep 14 01:44:19 2022 ] Top5: 80.92%
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/config.yaml
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_bone_xset
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xset/train_bone.yaml
|
5 |
+
device:
|
6 |
+
- 0
|
7 |
+
- 1
|
8 |
+
- 2
|
9 |
+
- 3
|
10 |
+
eval_interval: 5
|
11 |
+
feeder: feeders.feeder.Feeder
|
12 |
+
groups: 8
|
13 |
+
ignore_weights: []
|
14 |
+
keep_rate: 0.9
|
15 |
+
log_interval: 100
|
16 |
+
model: model.decouple_gcn.Model
|
17 |
+
model_args:
|
18 |
+
block_size: 41
|
19 |
+
graph: graph.ntu_rgb_d.Graph
|
20 |
+
graph_args:
|
21 |
+
labeling_mode: spatial
|
22 |
+
groups: 16
|
23 |
+
num_class: 120
|
24 |
+
num_person: 2
|
25 |
+
num_point: 25
|
26 |
+
model_saved_name: ./save_models/ntu120_bone_xset
|
27 |
+
nesterov: true
|
28 |
+
num_epoch: 100
|
29 |
+
num_worker: 32
|
30 |
+
only_train_epoch: 1
|
31 |
+
only_train_part: true
|
32 |
+
optimizer: SGD
|
33 |
+
phase: train
|
34 |
+
print_log: true
|
35 |
+
save_interval: 2
|
36 |
+
save_score: false
|
37 |
+
seed: 1
|
38 |
+
show_topk:
|
39 |
+
- 1
|
40 |
+
- 5
|
41 |
+
start_epoch: 0
|
42 |
+
step:
|
43 |
+
- 60
|
44 |
+
- 80
|
45 |
+
test_batch_size: 64
|
46 |
+
test_feeder_args:
|
47 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy
|
48 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
|
49 |
+
train_feeder_args:
|
50 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy
|
51 |
+
debug: false
|
52 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
|
53 |
+
normalization: false
|
54 |
+
random_choose: false
|
55 |
+
random_move: false
|
56 |
+
random_shift: false
|
57 |
+
window_size: -1
|
58 |
+
warm_up_epoch: 0
|
59 |
+
weight_decay: 0.0001
|
60 |
+
weights: null
|
61 |
+
work_dir: ./work_dir/ntu120_bone_xset
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4cc39dedd1c7b93b150d9bb906c078558113b002b4b6788a3dd9298b21a6549f
|
3 |
+
size 34946665
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_bone_xset/log.txt
ADDED
@@ -0,0 +1,665 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Wed Sep 14 13:01:27 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_bone_xset', 'model_saved_name': './save_models/ntu120_bone_xset', 'Experiment_name': 'ntu120_bone_xset', 'config': './config/ntu120_xset/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [0, 1, 2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Wed Sep 14 13:01:27 2022 ] Training epoch: 1
|
5 |
+
[ Wed Sep 14 13:02:04 2022 ] Batch(99/162) done. Loss: 3.9355 lr:0.100000
|
6 |
+
[ Wed Sep 14 13:02:19 2022 ] Eval epoch: 1
|
7 |
+
[ Wed Sep 14 13:04:04 2022 ] Mean test loss of 930 batches: 5.392993927001953.
|
8 |
+
[ Wed Sep 14 13:04:04 2022 ] Top1: 4.91%
|
9 |
+
[ Wed Sep 14 13:04:05 2022 ] Top5: 17.70%
|
10 |
+
[ Wed Sep 14 13:04:05 2022 ] Training epoch: 2
|
11 |
+
[ Wed Sep 14 13:04:20 2022 ] Batch(37/162) done. Loss: 3.5251 lr:0.100000
|
12 |
+
[ Wed Sep 14 13:04:50 2022 ] Batch(137/162) done. Loss: 3.2359 lr:0.100000
|
13 |
+
[ Wed Sep 14 13:04:58 2022 ] Eval epoch: 2
|
14 |
+
[ Wed Sep 14 13:06:44 2022 ] Mean test loss of 930 batches: 4.477888584136963.
|
15 |
+
[ Wed Sep 14 13:06:45 2022 ] Top1: 9.46%
|
16 |
+
[ Wed Sep 14 13:06:45 2022 ] Top5: 28.49%
|
17 |
+
[ Wed Sep 14 13:06:45 2022 ] Training epoch: 3
|
18 |
+
[ Wed Sep 14 13:07:12 2022 ] Batch(75/162) done. Loss: 2.8039 lr:0.100000
|
19 |
+
[ Wed Sep 14 13:07:38 2022 ] Eval epoch: 3
|
20 |
+
[ Wed Sep 14 13:09:23 2022 ] Mean test loss of 930 batches: 4.6684160232543945.
|
21 |
+
[ Wed Sep 14 13:09:24 2022 ] Top1: 8.49%
|
22 |
+
[ Wed Sep 14 13:09:24 2022 ] Top5: 29.99%
|
23 |
+
[ Wed Sep 14 13:09:24 2022 ] Training epoch: 4
|
24 |
+
[ Wed Sep 14 13:09:33 2022 ] Batch(13/162) done. Loss: 2.8484 lr:0.100000
|
25 |
+
[ Wed Sep 14 13:10:03 2022 ] Batch(113/162) done. Loss: 3.0293 lr:0.100000
|
26 |
+
[ Wed Sep 14 13:10:18 2022 ] Eval epoch: 4
|
27 |
+
[ Wed Sep 14 13:12:04 2022 ] Mean test loss of 930 batches: 4.1727519035339355.
|
28 |
+
[ Wed Sep 14 13:12:05 2022 ] Top1: 12.35%
|
29 |
+
[ Wed Sep 14 13:12:05 2022 ] Top5: 37.33%
|
30 |
+
[ Wed Sep 14 13:12:06 2022 ] Training epoch: 5
|
31 |
+
[ Wed Sep 14 13:12:25 2022 ] Batch(51/162) done. Loss: 2.1966 lr:0.100000
|
32 |
+
[ Wed Sep 14 13:12:55 2022 ] Batch(151/162) done. Loss: 2.2492 lr:0.100000
|
33 |
+
[ Wed Sep 14 13:12:58 2022 ] Eval epoch: 5
|
34 |
+
[ Wed Sep 14 13:14:44 2022 ] Mean test loss of 930 batches: 4.021293640136719.
|
35 |
+
[ Wed Sep 14 13:14:45 2022 ] Top1: 16.43%
|
36 |
+
[ Wed Sep 14 13:14:45 2022 ] Top5: 41.27%
|
37 |
+
[ Wed Sep 14 13:14:45 2022 ] Training epoch: 6
|
38 |
+
[ Wed Sep 14 13:15:16 2022 ] Batch(89/162) done. Loss: 1.6511 lr:0.100000
|
39 |
+
[ Wed Sep 14 13:15:38 2022 ] Eval epoch: 6
|
40 |
+
[ Wed Sep 14 13:17:24 2022 ] Mean test loss of 930 batches: 3.7663354873657227.
|
41 |
+
[ Wed Sep 14 13:17:24 2022 ] Top1: 19.50%
|
42 |
+
[ Wed Sep 14 13:17:25 2022 ] Top5: 45.88%
|
43 |
+
[ Wed Sep 14 13:17:25 2022 ] Training epoch: 7
|
44 |
+
[ Wed Sep 14 13:17:38 2022 ] Batch(27/162) done. Loss: 1.9444 lr:0.100000
|
45 |
+
[ Wed Sep 14 13:18:08 2022 ] Batch(127/162) done. Loss: 1.4796 lr:0.100000
|
46 |
+
[ Wed Sep 14 13:18:19 2022 ] Eval epoch: 7
|
47 |
+
[ Wed Sep 14 13:20:05 2022 ] Mean test loss of 930 batches: 3.9329254627227783.
|
48 |
+
[ Wed Sep 14 13:20:06 2022 ] Top1: 17.07%
|
49 |
+
[ Wed Sep 14 13:20:06 2022 ] Top5: 44.80%
|
50 |
+
[ Wed Sep 14 13:20:06 2022 ] Training epoch: 8
|
51 |
+
[ Wed Sep 14 13:20:30 2022 ] Batch(65/162) done. Loss: 1.8294 lr:0.100000
|
52 |
+
[ Wed Sep 14 13:21:00 2022 ] Eval epoch: 8
|
53 |
+
[ Wed Sep 14 13:22:45 2022 ] Mean test loss of 930 batches: 3.2654027938842773.
|
54 |
+
[ Wed Sep 14 13:22:46 2022 ] Top1: 26.08%
|
55 |
+
[ Wed Sep 14 13:22:46 2022 ] Top5: 53.97%
|
56 |
+
[ Wed Sep 14 13:22:47 2022 ] Training epoch: 9
|
57 |
+
[ Wed Sep 14 13:22:52 2022 ] Batch(3/162) done. Loss: 1.6582 lr:0.100000
|
58 |
+
[ Wed Sep 14 13:23:22 2022 ] Batch(103/162) done. Loss: 1.8690 lr:0.100000
|
59 |
+
[ Wed Sep 14 13:23:40 2022 ] Eval epoch: 9
|
60 |
+
[ Wed Sep 14 13:25:26 2022 ] Mean test loss of 930 batches: 3.132371187210083.
|
61 |
+
[ Wed Sep 14 13:25:27 2022 ] Top1: 28.38%
|
62 |
+
[ Wed Sep 14 13:25:27 2022 ] Top5: 57.74%
|
63 |
+
[ Wed Sep 14 13:25:27 2022 ] Training epoch: 10
|
64 |
+
[ Wed Sep 14 13:25:44 2022 ] Batch(41/162) done. Loss: 1.3916 lr:0.100000
|
65 |
+
[ Wed Sep 14 13:26:14 2022 ] Batch(141/162) done. Loss: 1.4474 lr:0.100000
|
66 |
+
[ Wed Sep 14 13:26:20 2022 ] Eval epoch: 10
|
67 |
+
[ Wed Sep 14 13:28:06 2022 ] Mean test loss of 930 batches: 3.1850171089172363.
|
68 |
+
[ Wed Sep 14 13:28:06 2022 ] Top1: 29.50%
|
69 |
+
[ Wed Sep 14 13:28:07 2022 ] Top5: 59.01%
|
70 |
+
[ Wed Sep 14 13:28:07 2022 ] Training epoch: 11
|
71 |
+
[ Wed Sep 14 13:28:35 2022 ] Batch(79/162) done. Loss: 1.5578 lr:0.100000
|
72 |
+
[ Wed Sep 14 13:29:00 2022 ] Eval epoch: 11
|
73 |
+
[ Wed Sep 14 13:30:45 2022 ] Mean test loss of 930 batches: 3.093348264694214.
|
74 |
+
[ Wed Sep 14 13:30:46 2022 ] Top1: 32.56%
|
75 |
+
[ Wed Sep 14 13:30:46 2022 ] Top5: 66.21%
|
76 |
+
[ Wed Sep 14 13:30:46 2022 ] Training epoch: 12
|
77 |
+
[ Wed Sep 14 13:30:56 2022 ] Batch(17/162) done. Loss: 1.2190 lr:0.100000
|
78 |
+
[ Wed Sep 14 13:31:26 2022 ] Batch(117/162) done. Loss: 1.4434 lr:0.100000
|
79 |
+
[ Wed Sep 14 13:31:39 2022 ] Eval epoch: 12
|
80 |
+
[ Wed Sep 14 13:33:25 2022 ] Mean test loss of 930 batches: 2.7024426460266113.
|
81 |
+
[ Wed Sep 14 13:33:25 2022 ] Top1: 34.69%
|
82 |
+
[ Wed Sep 14 13:33:26 2022 ] Top5: 65.19%
|
83 |
+
[ Wed Sep 14 13:33:26 2022 ] Training epoch: 13
|
84 |
+
[ Wed Sep 14 13:33:47 2022 ] Batch(55/162) done. Loss: 1.7219 lr:0.100000
|
85 |
+
[ Wed Sep 14 13:34:17 2022 ] Batch(155/162) done. Loss: 1.1206 lr:0.100000
|
86 |
+
[ Wed Sep 14 13:34:19 2022 ] Eval epoch: 13
|
87 |
+
[ Wed Sep 14 13:36:05 2022 ] Mean test loss of 930 batches: 2.4375510215759277.
|
88 |
+
[ Wed Sep 14 13:36:06 2022 ] Top1: 38.73%
|
89 |
+
[ Wed Sep 14 13:36:06 2022 ] Top5: 70.53%
|
90 |
+
[ Wed Sep 14 13:36:06 2022 ] Training epoch: 14
|
91 |
+
[ Wed Sep 14 13:36:39 2022 ] Batch(93/162) done. Loss: 1.0880 lr:0.100000
|
92 |
+
[ Wed Sep 14 13:37:00 2022 ] Eval epoch: 14
|
93 |
+
[ Wed Sep 14 13:38:45 2022 ] Mean test loss of 930 batches: 3.0172321796417236.
|
94 |
+
[ Wed Sep 14 13:38:45 2022 ] Top1: 34.33%
|
95 |
+
[ Wed Sep 14 13:38:46 2022 ] Top5: 68.36%
|
96 |
+
[ Wed Sep 14 13:38:46 2022 ] Training epoch: 15
|
97 |
+
[ Wed Sep 14 13:39:00 2022 ] Batch(31/162) done. Loss: 1.3558 lr:0.100000
|
98 |
+
[ Wed Sep 14 13:39:29 2022 ] Batch(131/162) done. Loss: 1.3403 lr:0.100000
|
99 |
+
[ Wed Sep 14 13:39:39 2022 ] Eval epoch: 15
|
100 |
+
[ Wed Sep 14 13:41:25 2022 ] Mean test loss of 930 batches: 2.841313362121582.
|
101 |
+
[ Wed Sep 14 13:41:25 2022 ] Top1: 35.99%
|
102 |
+
[ Wed Sep 14 13:41:25 2022 ] Top5: 68.93%
|
103 |
+
[ Wed Sep 14 13:41:26 2022 ] Training epoch: 16
|
104 |
+
[ Wed Sep 14 13:41:51 2022 ] Batch(69/162) done. Loss: 0.9169 lr:0.100000
|
105 |
+
[ Wed Sep 14 13:42:19 2022 ] Eval epoch: 16
|
106 |
+
[ Wed Sep 14 13:44:04 2022 ] Mean test loss of 930 batches: 2.3747642040252686.
|
107 |
+
[ Wed Sep 14 13:44:05 2022 ] Top1: 41.51%
|
108 |
+
[ Wed Sep 14 13:44:05 2022 ] Top5: 73.76%
|
109 |
+
[ Wed Sep 14 13:44:05 2022 ] Training epoch: 17
|
110 |
+
[ Wed Sep 14 13:44:12 2022 ] Batch(7/162) done. Loss: 0.6578 lr:0.100000
|
111 |
+
[ Wed Sep 14 13:44:42 2022 ] Batch(107/162) done. Loss: 1.0512 lr:0.100000
|
112 |
+
[ Wed Sep 14 13:44:59 2022 ] Eval epoch: 17
|
113 |
+
[ Wed Sep 14 13:46:44 2022 ] Mean test loss of 930 batches: 2.545225143432617.
|
114 |
+
[ Wed Sep 14 13:46:45 2022 ] Top1: 40.27%
|
115 |
+
[ Wed Sep 14 13:46:45 2022 ] Top5: 73.80%
|
116 |
+
[ Wed Sep 14 13:46:45 2022 ] Training epoch: 18
|
117 |
+
[ Wed Sep 14 13:47:03 2022 ] Batch(45/162) done. Loss: 1.1284 lr:0.100000
|
118 |
+
[ Wed Sep 14 13:47:33 2022 ] Batch(145/162) done. Loss: 1.1653 lr:0.100000
|
119 |
+
[ Wed Sep 14 13:47:39 2022 ] Eval epoch: 18
|
120 |
+
[ Wed Sep 14 13:49:24 2022 ] Mean test loss of 930 batches: 3.1756742000579834.
|
121 |
+
[ Wed Sep 14 13:49:25 2022 ] Top1: 37.12%
|
122 |
+
[ Wed Sep 14 13:49:25 2022 ] Top5: 69.36%
|
123 |
+
[ Wed Sep 14 13:49:25 2022 ] Training epoch: 19
|
124 |
+
[ Wed Sep 14 13:49:54 2022 ] Batch(83/162) done. Loss: 1.1490 lr:0.100000
|
125 |
+
[ Wed Sep 14 13:50:18 2022 ] Eval epoch: 19
|
126 |
+
[ Wed Sep 14 13:52:03 2022 ] Mean test loss of 930 batches: 2.689011573791504.
|
127 |
+
[ Wed Sep 14 13:52:04 2022 ] Top1: 37.80%
|
128 |
+
[ Wed Sep 14 13:52:04 2022 ] Top5: 70.91%
|
129 |
+
[ Wed Sep 14 13:52:04 2022 ] Training epoch: 20
|
130 |
+
[ Wed Sep 14 13:52:15 2022 ] Batch(21/162) done. Loss: 0.6293 lr:0.100000
|
131 |
+
[ Wed Sep 14 13:52:45 2022 ] Batch(121/162) done. Loss: 1.2528 lr:0.100000
|
132 |
+
[ Wed Sep 14 13:52:57 2022 ] Eval epoch: 20
|
133 |
+
[ Wed Sep 14 13:54:43 2022 ] Mean test loss of 930 batches: 2.447504758834839.
|
134 |
+
[ Wed Sep 14 13:54:43 2022 ] Top1: 43.61%
|
135 |
+
[ Wed Sep 14 13:54:44 2022 ] Top5: 75.39%
|
136 |
+
[ Wed Sep 14 13:54:44 2022 ] Training epoch: 21
|
137 |
+
[ Wed Sep 14 13:55:06 2022 ] Batch(59/162) done. Loss: 1.0095 lr:0.100000
|
138 |
+
[ Wed Sep 14 13:55:37 2022 ] Batch(159/162) done. Loss: 1.2219 lr:0.100000
|
139 |
+
[ Wed Sep 14 13:55:38 2022 ] Eval epoch: 21
|
140 |
+
[ Wed Sep 14 13:57:24 2022 ] Mean test loss of 930 batches: 2.4144906997680664.
|
141 |
+
[ Wed Sep 14 13:57:24 2022 ] Top1: 42.64%
|
142 |
+
[ Wed Sep 14 13:57:25 2022 ] Top5: 73.40%
|
143 |
+
[ Wed Sep 14 13:57:25 2022 ] Training epoch: 22
|
144 |
+
[ Wed Sep 14 13:57:58 2022 ] Batch(97/162) done. Loss: 0.9796 lr:0.100000
|
145 |
+
[ Wed Sep 14 13:58:18 2022 ] Eval epoch: 22
|
146 |
+
[ Wed Sep 14 14:00:04 2022 ] Mean test loss of 930 batches: 2.3580615520477295.
|
147 |
+
[ Wed Sep 14 14:00:05 2022 ] Top1: 44.59%
|
148 |
+
[ Wed Sep 14 14:00:05 2022 ] Top5: 76.79%
|
149 |
+
[ Wed Sep 14 14:00:06 2022 ] Training epoch: 23
|
150 |
+
[ Wed Sep 14 14:00:20 2022 ] Batch(35/162) done. Loss: 0.7063 lr:0.100000
|
151 |
+
[ Wed Sep 14 14:00:50 2022 ] Batch(135/162) done. Loss: 0.8184 lr:0.100000
|
152 |
+
[ Wed Sep 14 14:00:58 2022 ] Eval epoch: 23
|
153 |
+
[ Wed Sep 14 14:02:44 2022 ] Mean test loss of 930 batches: 7.181952476501465.
|
154 |
+
[ Wed Sep 14 14:02:45 2022 ] Top1: 23.00%
|
155 |
+
[ Wed Sep 14 14:02:45 2022 ] Top5: 48.61%
|
156 |
+
[ Wed Sep 14 14:02:46 2022 ] Training epoch: 24
|
157 |
+
[ Wed Sep 14 14:03:12 2022 ] Batch(73/162) done. Loss: 0.6685 lr:0.100000
|
158 |
+
[ Wed Sep 14 14:03:39 2022 ] Eval epoch: 24
|
159 |
+
[ Wed Sep 14 14:05:24 2022 ] Mean test loss of 930 batches: 2.6629889011383057.
|
160 |
+
[ Wed Sep 14 14:05:25 2022 ] Top1: 41.00%
|
161 |
+
[ Wed Sep 14 14:05:25 2022 ] Top5: 73.77%
|
162 |
+
[ Wed Sep 14 14:05:25 2022 ] Training epoch: 25
|
163 |
+
[ Wed Sep 14 14:05:33 2022 ] Batch(11/162) done. Loss: 0.7258 lr:0.100000
|
164 |
+
[ Wed Sep 14 14:06:03 2022 ] Batch(111/162) done. Loss: 0.5101 lr:0.100000
|
165 |
+
[ Wed Sep 14 14:06:19 2022 ] Eval epoch: 25
|
166 |
+
[ Wed Sep 14 14:08:04 2022 ] Mean test loss of 930 batches: 2.775555372238159.
|
167 |
+
[ Wed Sep 14 14:08:04 2022 ] Top1: 43.26%
|
168 |
+
[ Wed Sep 14 14:08:04 2022 ] Top5: 73.96%
|
169 |
+
[ Wed Sep 14 14:08:05 2022 ] Training epoch: 26
|
170 |
+
[ Wed Sep 14 14:08:24 2022 ] Batch(49/162) done. Loss: 0.6546 lr:0.100000
|
171 |
+
[ Wed Sep 14 14:08:54 2022 ] Batch(149/162) done. Loss: 0.5799 lr:0.100000
|
172 |
+
[ Wed Sep 14 14:08:58 2022 ] Eval epoch: 26
|
173 |
+
[ Wed Sep 14 14:10:44 2022 ] Mean test loss of 930 batches: 2.257340431213379.
|
174 |
+
[ Wed Sep 14 14:10:44 2022 ] Top1: 47.48%
|
175 |
+
[ Wed Sep 14 14:10:45 2022 ] Top5: 78.46%
|
176 |
+
[ Wed Sep 14 14:10:45 2022 ] Training epoch: 27
|
177 |
+
[ Wed Sep 14 14:11:15 2022 ] Batch(87/162) done. Loss: 1.2185 lr:0.100000
|
178 |
+
[ Wed Sep 14 14:11:38 2022 ] Eval epoch: 27
|
179 |
+
[ Wed Sep 14 14:13:23 2022 ] Mean test loss of 930 batches: 2.4034464359283447.
|
180 |
+
[ Wed Sep 14 14:13:24 2022 ] Top1: 44.35%
|
181 |
+
[ Wed Sep 14 14:13:24 2022 ] Top5: 77.35%
|
182 |
+
[ Wed Sep 14 14:13:24 2022 ] Training epoch: 28
|
183 |
+
[ Wed Sep 14 14:13:36 2022 ] Batch(25/162) done. Loss: 0.7420 lr:0.100000
|
184 |
+
[ Wed Sep 14 14:14:07 2022 ] Batch(125/162) done. Loss: 0.7080 lr:0.100000
|
185 |
+
[ Wed Sep 14 14:14:18 2022 ] Eval epoch: 28
|
186 |
+
[ Wed Sep 14 14:16:05 2022 ] Mean test loss of 930 batches: 2.270505428314209.
|
187 |
+
[ Wed Sep 14 14:16:05 2022 ] Top1: 47.75%
|
188 |
+
[ Wed Sep 14 14:16:06 2022 ] Top5: 79.27%
|
189 |
+
[ Wed Sep 14 14:16:06 2022 ] Training epoch: 29
|
190 |
+
[ Wed Sep 14 14:16:28 2022 ] Batch(63/162) done. Loss: 0.6288 lr:0.100000
|
191 |
+
[ Wed Sep 14 14:16:58 2022 ] Eval epoch: 29
|
192 |
+
[ Wed Sep 14 14:18:44 2022 ] Mean test loss of 930 batches: 2.4446792602539062.
|
193 |
+
[ Wed Sep 14 14:18:45 2022 ] Top1: 46.85%
|
194 |
+
[ Wed Sep 14 14:18:45 2022 ] Top5: 77.90%
|
195 |
+
[ Wed Sep 14 14:18:45 2022 ] Training epoch: 30
|
196 |
+
[ Wed Sep 14 14:18:50 2022 ] Batch(1/162) done. Loss: 0.4565 lr:0.100000
|
197 |
+
[ Wed Sep 14 14:19:20 2022 ] Batch(101/162) done. Loss: 0.3626 lr:0.100000
|
198 |
+
[ Wed Sep 14 14:19:38 2022 ] Eval epoch: 30
|
199 |
+
[ Wed Sep 14 14:21:24 2022 ] Mean test loss of 930 batches: 3.0422732830047607.
|
200 |
+
[ Wed Sep 14 14:21:25 2022 ] Top1: 43.09%
|
201 |
+
[ Wed Sep 14 14:21:25 2022 ] Top5: 75.57%
|
202 |
+
[ Wed Sep 14 14:21:25 2022 ] Training epoch: 31
|
203 |
+
[ Wed Sep 14 14:21:41 2022 ] Batch(39/162) done. Loss: 0.4943 lr:0.100000
|
204 |
+
[ Wed Sep 14 14:22:11 2022 ] Batch(139/162) done. Loss: 0.6466 lr:0.100000
|
205 |
+
[ Wed Sep 14 14:22:19 2022 ] Eval epoch: 31
|
206 |
+
[ Wed Sep 14 14:24:03 2022 ] Mean test loss of 930 batches: 2.4501664638519287.
|
207 |
+
[ Wed Sep 14 14:24:04 2022 ] Top1: 47.30%
|
208 |
+
[ Wed Sep 14 14:24:04 2022 ] Top5: 77.81%
|
209 |
+
[ Wed Sep 14 14:24:04 2022 ] Training epoch: 32
|
210 |
+
[ Wed Sep 14 14:24:32 2022 ] Batch(77/162) done. Loss: 0.5112 lr:0.100000
|
211 |
+
[ Wed Sep 14 14:24:58 2022 ] Eval epoch: 32
|
212 |
+
[ Wed Sep 14 14:26:43 2022 ] Mean test loss of 930 batches: 2.6631052494049072.
|
213 |
+
[ Wed Sep 14 14:26:44 2022 ] Top1: 46.35%
|
214 |
+
[ Wed Sep 14 14:26:44 2022 ] Top5: 76.90%
|
215 |
+
[ Wed Sep 14 14:26:45 2022 ] Training epoch: 33
|
216 |
+
[ Wed Sep 14 14:26:53 2022 ] Batch(15/162) done. Loss: 0.6779 lr:0.100000
|
217 |
+
[ Wed Sep 14 14:27:23 2022 ] Batch(115/162) done. Loss: 0.7528 lr:0.100000
|
218 |
+
[ Wed Sep 14 14:27:37 2022 ] Eval epoch: 33
|
219 |
+
[ Wed Sep 14 14:29:22 2022 ] Mean test loss of 930 batches: 2.6296942234039307.
|
220 |
+
[ Wed Sep 14 14:29:23 2022 ] Top1: 45.78%
|
221 |
+
[ Wed Sep 14 14:29:23 2022 ] Top5: 76.56%
|
222 |
+
[ Wed Sep 14 14:29:24 2022 ] Training epoch: 34
|
223 |
+
[ Wed Sep 14 14:29:43 2022 ] Batch(53/162) done. Loss: 0.3927 lr:0.100000
|
224 |
+
[ Wed Sep 14 14:30:13 2022 ] Batch(153/162) done. Loss: 0.5658 lr:0.100000
|
225 |
+
[ Wed Sep 14 14:30:16 2022 ] Eval epoch: 34
|
226 |
+
[ Wed Sep 14 14:32:01 2022 ] Mean test loss of 930 batches: 2.6114041805267334.
|
227 |
+
[ Wed Sep 14 14:32:02 2022 ] Top1: 47.60%
|
228 |
+
[ Wed Sep 14 14:32:02 2022 ] Top5: 77.87%
|
229 |
+
[ Wed Sep 14 14:32:02 2022 ] Training epoch: 35
|
230 |
+
[ Wed Sep 14 14:32:34 2022 ] Batch(91/162) done. Loss: 0.4018 lr:0.100000
|
231 |
+
[ Wed Sep 14 14:32:55 2022 ] Eval epoch: 35
|
232 |
+
[ Wed Sep 14 14:34:40 2022 ] Mean test loss of 930 batches: 2.69102144241333.
|
233 |
+
[ Wed Sep 14 14:34:41 2022 ] Top1: 47.25%
|
234 |
+
[ Wed Sep 14 14:34:41 2022 ] Top5: 78.07%
|
235 |
+
[ Wed Sep 14 14:34:42 2022 ] Training epoch: 36
|
236 |
+
[ Wed Sep 14 14:34:55 2022 ] Batch(29/162) done. Loss: 0.3885 lr:0.100000
|
237 |
+
[ Wed Sep 14 14:35:25 2022 ] Batch(129/162) done. Loss: 0.6378 lr:0.100000
|
238 |
+
[ Wed Sep 14 14:35:35 2022 ] Eval epoch: 36
|
239 |
+
[ Wed Sep 14 14:37:21 2022 ] Mean test loss of 930 batches: 2.399557590484619.
|
240 |
+
[ Wed Sep 14 14:37:21 2022 ] Top1: 49.29%
|
241 |
+
[ Wed Sep 14 14:37:22 2022 ] Top5: 80.08%
|
242 |
+
[ Wed Sep 14 14:37:22 2022 ] Training epoch: 37
|
243 |
+
[ Wed Sep 14 14:37:46 2022 ] Batch(67/162) done. Loss: 0.6209 lr:0.100000
|
244 |
+
[ Wed Sep 14 14:38:15 2022 ] Eval epoch: 37
|
245 |
+
[ Wed Sep 14 14:39:59 2022 ] Mean test loss of 930 batches: 2.537379741668701.
|
246 |
+
[ Wed Sep 14 14:40:00 2022 ] Top1: 47.78%
|
247 |
+
[ Wed Sep 14 14:40:00 2022 ] Top5: 78.34%
|
248 |
+
[ Wed Sep 14 14:40:00 2022 ] Training epoch: 38
|
249 |
+
[ Wed Sep 14 14:40:06 2022 ] Batch(5/162) done. Loss: 0.2133 lr:0.100000
|
250 |
+
[ Wed Sep 14 14:40:36 2022 ] Batch(105/162) done. Loss: 0.6252 lr:0.100000
|
251 |
+
[ Wed Sep 14 14:40:53 2022 ] Eval epoch: 38
|
252 |
+
[ Wed Sep 14 14:42:39 2022 ] Mean test loss of 930 batches: 2.464905261993408.
|
253 |
+
[ Wed Sep 14 14:42:39 2022 ] Top1: 50.67%
|
254 |
+
[ Wed Sep 14 14:42:40 2022 ] Top5: 80.14%
|
255 |
+
[ Wed Sep 14 14:42:40 2022 ] Training epoch: 39
|
256 |
+
[ Wed Sep 14 14:42:57 2022 ] Batch(43/162) done. Loss: 0.6274 lr:0.100000
|
257 |
+
[ Wed Sep 14 14:43:28 2022 ] Batch(143/162) done. Loss: 0.3125 lr:0.100000
|
258 |
+
[ Wed Sep 14 14:43:34 2022 ] Eval epoch: 39
|
259 |
+
[ Wed Sep 14 14:45:18 2022 ] Mean test loss of 930 batches: 2.442579507827759.
|
260 |
+
[ Wed Sep 14 14:45:19 2022 ] Top1: 48.40%
|
261 |
+
[ Wed Sep 14 14:45:19 2022 ] Top5: 79.57%
|
262 |
+
[ Wed Sep 14 14:45:20 2022 ] Training epoch: 40
|
263 |
+
[ Wed Sep 14 14:45:48 2022 ] Batch(81/162) done. Loss: 0.3952 lr:0.100000
|
264 |
+
[ Wed Sep 14 14:46:13 2022 ] Eval epoch: 40
|
265 |
+
[ Wed Sep 14 14:47:58 2022 ] Mean test loss of 930 batches: 2.7350897789001465.
|
266 |
+
[ Wed Sep 14 14:47:59 2022 ] Top1: 49.77%
|
267 |
+
[ Wed Sep 14 14:47:59 2022 ] Top5: 79.67%
|
268 |
+
[ Wed Sep 14 14:47:59 2022 ] Training epoch: 41
|
269 |
+
[ Wed Sep 14 14:48:10 2022 ] Batch(19/162) done. Loss: 0.4260 lr:0.100000
|
270 |
+
[ Wed Sep 14 14:48:40 2022 ] Batch(119/162) done. Loss: 0.5597 lr:0.100000
|
271 |
+
[ Wed Sep 14 14:48:53 2022 ] Eval epoch: 41
|
272 |
+
[ Wed Sep 14 14:50:38 2022 ] Mean test loss of 930 batches: 2.485272169113159.
|
273 |
+
[ Wed Sep 14 14:50:39 2022 ] Top1: 48.08%
|
274 |
+
[ Wed Sep 14 14:50:39 2022 ] Top5: 79.36%
|
275 |
+
[ Wed Sep 14 14:50:39 2022 ] Training epoch: 42
|
276 |
+
[ Wed Sep 14 14:51:01 2022 ] Batch(57/162) done. Loss: 0.3172 lr:0.100000
|
277 |
+
[ Wed Sep 14 14:51:31 2022 ] Batch(157/162) done. Loss: 0.5246 lr:0.100000
|
278 |
+
[ Wed Sep 14 14:51:33 2022 ] Eval epoch: 42
|
279 |
+
[ Wed Sep 14 14:53:18 2022 ] Mean test loss of 930 batches: 3.099501132965088.
|
280 |
+
[ Wed Sep 14 14:53:18 2022 ] Top1: 45.55%
|
281 |
+
[ Wed Sep 14 14:53:19 2022 ] Top5: 76.71%
|
282 |
+
[ Wed Sep 14 14:53:19 2022 ] Training epoch: 43
|
283 |
+
[ Wed Sep 14 14:53:52 2022 ] Batch(95/162) done. Loss: 0.6298 lr:0.100000
|
284 |
+
[ Wed Sep 14 14:54:13 2022 ] Eval epoch: 43
|
285 |
+
[ Wed Sep 14 14:55:58 2022 ] Mean test loss of 930 batches: 2.642946720123291.
|
286 |
+
[ Wed Sep 14 14:55:58 2022 ] Top1: 47.10%
|
287 |
+
[ Wed Sep 14 14:55:59 2022 ] Top5: 77.90%
|
288 |
+
[ Wed Sep 14 14:55:59 2022 ] Training epoch: 44
|
289 |
+
[ Wed Sep 14 14:56:13 2022 ] Batch(33/162) done. Loss: 0.3098 lr:0.100000
|
290 |
+
[ Wed Sep 14 14:56:43 2022 ] Batch(133/162) done. Loss: 0.4637 lr:0.100000
|
291 |
+
[ Wed Sep 14 14:56:52 2022 ] Eval epoch: 44
|
292 |
+
[ Wed Sep 14 14:58:37 2022 ] Mean test loss of 930 batches: 2.9123451709747314.
|
293 |
+
[ Wed Sep 14 14:58:38 2022 ] Top1: 48.74%
|
294 |
+
[ Wed Sep 14 14:58:38 2022 ] Top5: 78.19%
|
295 |
+
[ Wed Sep 14 14:58:39 2022 ] Training epoch: 45
|
296 |
+
[ Wed Sep 14 14:59:04 2022 ] Batch(71/162) done. Loss: 0.3870 lr:0.100000
|
297 |
+
[ Wed Sep 14 14:59:32 2022 ] Eval epoch: 45
|
298 |
+
[ Wed Sep 14 15:01:18 2022 ] Mean test loss of 930 batches: 2.6414036750793457.
|
299 |
+
[ Wed Sep 14 15:01:18 2022 ] Top1: 48.72%
|
300 |
+
[ Wed Sep 14 15:01:18 2022 ] Top5: 79.11%
|
301 |
+
[ Wed Sep 14 15:01:19 2022 ] Training epoch: 46
|
302 |
+
[ Wed Sep 14 15:01:26 2022 ] Batch(9/162) done. Loss: 0.4615 lr:0.100000
|
303 |
+
[ Wed Sep 14 15:01:56 2022 ] Batch(109/162) done. Loss: 0.4647 lr:0.100000
|
304 |
+
[ Wed Sep 14 15:02:12 2022 ] Eval epoch: 46
|
305 |
+
[ Wed Sep 14 15:03:57 2022 ] Mean test loss of 930 batches: 2.5460541248321533.
|
306 |
+
[ Wed Sep 14 15:03:58 2022 ] Top1: 48.67%
|
307 |
+
[ Wed Sep 14 15:03:58 2022 ] Top5: 78.74%
|
308 |
+
[ Wed Sep 14 15:03:58 2022 ] Training epoch: 47
|
309 |
+
[ Wed Sep 14 15:04:17 2022 ] Batch(47/162) done. Loss: 0.3628 lr:0.100000
|
310 |
+
[ Wed Sep 14 15:04:47 2022 ] Batch(147/162) done. Loss: 0.4634 lr:0.100000
|
311 |
+
[ Wed Sep 14 15:04:51 2022 ] Eval epoch: 47
|
312 |
+
[ Wed Sep 14 15:06:37 2022 ] Mean test loss of 930 batches: 2.6031250953674316.
|
313 |
+
[ Wed Sep 14 15:06:38 2022 ] Top1: 51.11%
|
314 |
+
[ Wed Sep 14 15:06:38 2022 ] Top5: 80.63%
|
315 |
+
[ Wed Sep 14 15:06:38 2022 ] Training epoch: 48
|
316 |
+
[ Wed Sep 14 15:07:08 2022 ] Batch(85/162) done. Loss: 0.4638 lr:0.100000
|
317 |
+
[ Wed Sep 14 15:07:32 2022 ] Eval epoch: 48
|
318 |
+
[ Wed Sep 14 15:09:17 2022 ] Mean test loss of 930 batches: 2.731889247894287.
|
319 |
+
[ Wed Sep 14 15:09:17 2022 ] Top1: 48.96%
|
320 |
+
[ Wed Sep 14 15:09:18 2022 ] Top5: 78.49%
|
321 |
+
[ Wed Sep 14 15:09:18 2022 ] Training epoch: 49
|
322 |
+
[ Wed Sep 14 15:09:29 2022 ] Batch(23/162) done. Loss: 0.3121 lr:0.100000
|
323 |
+
[ Wed Sep 14 15:09:59 2022 ] Batch(123/162) done. Loss: 0.5220 lr:0.100000
|
324 |
+
[ Wed Sep 14 15:10:11 2022 ] Eval epoch: 49
|
325 |
+
[ Wed Sep 14 15:11:56 2022 ] Mean test loss of 930 batches: 2.6333515644073486.
|
326 |
+
[ Wed Sep 14 15:11:57 2022 ] Top1: 50.57%
|
327 |
+
[ Wed Sep 14 15:11:57 2022 ] Top5: 79.49%
|
328 |
+
[ Wed Sep 14 15:11:57 2022 ] Training epoch: 50
|
329 |
+
[ Wed Sep 14 15:12:20 2022 ] Batch(61/162) done. Loss: 0.4016 lr:0.100000
|
330 |
+
[ Wed Sep 14 15:12:50 2022 ] Batch(161/162) done. Loss: 0.3247 lr:0.100000
|
331 |
+
[ Wed Sep 14 15:12:51 2022 ] Eval epoch: 50
|
332 |
+
[ Wed Sep 14 15:14:35 2022 ] Mean test loss of 930 batches: 2.8169643878936768.
|
333 |
+
[ Wed Sep 14 15:14:36 2022 ] Top1: 47.59%
|
334 |
+
[ Wed Sep 14 15:14:36 2022 ] Top5: 79.00%
|
335 |
+
[ Wed Sep 14 15:14:37 2022 ] Training epoch: 51
|
336 |
+
[ Wed Sep 14 15:15:11 2022 ] Batch(99/162) done. Loss: 0.2663 lr:0.100000
|
337 |
+
[ Wed Sep 14 15:15:30 2022 ] Eval epoch: 51
|
338 |
+
[ Wed Sep 14 15:17:15 2022 ] Mean test loss of 930 batches: 2.810101270675659.
|
339 |
+
[ Wed Sep 14 15:17:16 2022 ] Top1: 47.55%
|
340 |
+
[ Wed Sep 14 15:17:16 2022 ] Top5: 76.58%
|
341 |
+
[ Wed Sep 14 15:17:16 2022 ] Training epoch: 52
|
342 |
+
[ Wed Sep 14 15:17:32 2022 ] Batch(37/162) done. Loss: 0.3549 lr:0.100000
|
343 |
+
[ Wed Sep 14 15:18:02 2022 ] Batch(137/162) done. Loss: 0.1950 lr:0.100000
|
344 |
+
[ Wed Sep 14 15:18:09 2022 ] Eval epoch: 52
|
345 |
+
[ Wed Sep 14 15:19:55 2022 ] Mean test loss of 930 batches: 2.8436119556427.
|
346 |
+
[ Wed Sep 14 15:19:55 2022 ] Top1: 47.13%
|
347 |
+
[ Wed Sep 14 15:19:56 2022 ] Top5: 77.29%
|
348 |
+
[ Wed Sep 14 15:19:56 2022 ] Training epoch: 53
|
349 |
+
[ Wed Sep 14 15:20:23 2022 ] Batch(75/162) done. Loss: 0.2384 lr:0.100000
|
350 |
+
[ Wed Sep 14 15:20:49 2022 ] Eval epoch: 53
|
351 |
+
[ Wed Sep 14 15:22:34 2022 ] Mean test loss of 930 batches: 2.886247158050537.
|
352 |
+
[ Wed Sep 14 15:22:34 2022 ] Top1: 50.27%
|
353 |
+
[ Wed Sep 14 15:22:35 2022 ] Top5: 78.67%
|
354 |
+
[ Wed Sep 14 15:22:35 2022 ] Training epoch: 54
|
355 |
+
[ Wed Sep 14 15:22:43 2022 ] Batch(13/162) done. Loss: 0.2992 lr:0.100000
|
356 |
+
[ Wed Sep 14 15:23:13 2022 ] Batch(113/162) done. Loss: 0.5068 lr:0.100000
|
357 |
+
[ Wed Sep 14 15:23:28 2022 ] Eval epoch: 54
|
358 |
+
[ Wed Sep 14 15:25:14 2022 ] Mean test loss of 930 batches: 3.0723490715026855.
|
359 |
+
[ Wed Sep 14 15:25:14 2022 ] Top1: 44.91%
|
360 |
+
[ Wed Sep 14 15:25:15 2022 ] Top5: 75.83%
|
361 |
+
[ Wed Sep 14 15:25:15 2022 ] Training epoch: 55
|
362 |
+
[ Wed Sep 14 15:25:35 2022 ] Batch(51/162) done. Loss: 0.3286 lr:0.100000
|
363 |
+
[ Wed Sep 14 15:26:05 2022 ] Batch(151/162) done. Loss: 0.3133 lr:0.100000
|
364 |
+
[ Wed Sep 14 15:26:08 2022 ] Eval epoch: 55
|
365 |
+
[ Wed Sep 14 15:27:54 2022 ] Mean test loss of 930 batches: 3.1500563621520996.
|
366 |
+
[ Wed Sep 14 15:27:54 2022 ] Top1: 48.24%
|
367 |
+
[ Wed Sep 14 15:27:54 2022 ] Top5: 78.41%
|
368 |
+
[ Wed Sep 14 15:27:55 2022 ] Training epoch: 56
|
369 |
+
[ Wed Sep 14 15:28:26 2022 ] Batch(89/162) done. Loss: 0.6380 lr:0.100000
|
370 |
+
[ Wed Sep 14 15:28:48 2022 ] Eval epoch: 56
|
371 |
+
[ Wed Sep 14 15:30:33 2022 ] Mean test loss of 930 batches: 2.7924721240997314.
|
372 |
+
[ Wed Sep 14 15:30:33 2022 ] Top1: 49.68%
|
373 |
+
[ Wed Sep 14 15:30:34 2022 ] Top5: 78.68%
|
374 |
+
[ Wed Sep 14 15:30:34 2022 ] Training epoch: 57
|
375 |
+
[ Wed Sep 14 15:30:47 2022 ] Batch(27/162) done. Loss: 0.2815 lr:0.100000
|
376 |
+
[ Wed Sep 14 15:31:17 2022 ] Batch(127/162) done. Loss: 0.5505 lr:0.100000
|
377 |
+
[ Wed Sep 14 15:31:27 2022 ] Eval epoch: 57
|
378 |
+
[ Wed Sep 14 15:33:13 2022 ] Mean test loss of 930 batches: 3.1129703521728516.
|
379 |
+
[ Wed Sep 14 15:33:14 2022 ] Top1: 46.56%
|
380 |
+
[ Wed Sep 14 15:33:15 2022 ] Top5: 77.12%
|
381 |
+
[ Wed Sep 14 15:33:15 2022 ] Training epoch: 58
|
382 |
+
[ Wed Sep 14 15:33:39 2022 ] Batch(65/162) done. Loss: 0.4868 lr:0.100000
|
383 |
+
[ Wed Sep 14 15:34:08 2022 ] Eval epoch: 58
|
384 |
+
[ Wed Sep 14 15:35:53 2022 ] Mean test loss of 930 batches: 3231.03759765625.
|
385 |
+
[ Wed Sep 14 15:35:53 2022 ] Top1: 0.88%
|
386 |
+
[ Wed Sep 14 15:35:54 2022 ] Top5: 4.78%
|
387 |
+
[ Wed Sep 14 15:35:54 2022 ] Training epoch: 59
|
388 |
+
[ Wed Sep 14 15:35:59 2022 ] Batch(3/162) done. Loss: 0.4287 lr:0.100000
|
389 |
+
[ Wed Sep 14 15:36:29 2022 ] Batch(103/162) done. Loss: 0.3732 lr:0.100000
|
390 |
+
[ Wed Sep 14 15:36:47 2022 ] Eval epoch: 59
|
391 |
+
[ Wed Sep 14 15:38:32 2022 ] Mean test loss of 930 batches: 2.689409017562866.
|
392 |
+
[ Wed Sep 14 15:38:33 2022 ] Top1: 49.12%
|
393 |
+
[ Wed Sep 14 15:38:33 2022 ] Top5: 78.96%
|
394 |
+
[ Wed Sep 14 15:38:33 2022 ] Training epoch: 60
|
395 |
+
[ Wed Sep 14 15:38:50 2022 ] Batch(41/162) done. Loss: 0.5933 lr:0.100000
|
396 |
+
[ Wed Sep 14 15:39:20 2022 ] Batch(141/162) done. Loss: 0.2460 lr:0.100000
|
397 |
+
[ Wed Sep 14 15:39:26 2022 ] Eval epoch: 60
|
398 |
+
[ Wed Sep 14 15:41:11 2022 ] Mean test loss of 930 batches: 2.883899211883545.
|
399 |
+
[ Wed Sep 14 15:41:12 2022 ] Top1: 48.02%
|
400 |
+
[ Wed Sep 14 15:41:12 2022 ] Top5: 78.20%
|
401 |
+
[ Wed Sep 14 15:41:13 2022 ] Training epoch: 61
|
402 |
+
[ Wed Sep 14 15:41:41 2022 ] Batch(79/162) done. Loss: 0.1999 lr:0.010000
|
403 |
+
[ Wed Sep 14 15:42:06 2022 ] Eval epoch: 61
|
404 |
+
[ Wed Sep 14 15:43:51 2022 ] Mean test loss of 930 batches: 2.3885531425476074.
|
405 |
+
[ Wed Sep 14 15:43:52 2022 ] Top1: 56.03%
|
406 |
+
[ Wed Sep 14 15:43:52 2022 ] Top5: 84.00%
|
407 |
+
[ Wed Sep 14 15:43:53 2022 ] Training epoch: 62
|
408 |
+
[ Wed Sep 14 15:44:02 2022 ] Batch(17/162) done. Loss: 0.0761 lr:0.010000
|
409 |
+
[ Wed Sep 14 15:44:32 2022 ] Batch(117/162) done. Loss: 0.2031 lr:0.010000
|
410 |
+
[ Wed Sep 14 15:44:46 2022 ] Eval epoch: 62
|
411 |
+
[ Wed Sep 14 15:46:32 2022 ] Mean test loss of 930 batches: 2.4345755577087402.
|
412 |
+
[ Wed Sep 14 15:46:32 2022 ] Top1: 56.35%
|
413 |
+
[ Wed Sep 14 15:46:33 2022 ] Top5: 83.88%
|
414 |
+
[ Wed Sep 14 15:46:33 2022 ] Training epoch: 63
|
415 |
+
[ Wed Sep 14 15:46:54 2022 ] Batch(55/162) done. Loss: 0.0445 lr:0.010000
|
416 |
+
[ Wed Sep 14 15:47:24 2022 ] Batch(155/162) done. Loss: 0.0461 lr:0.010000
|
417 |
+
[ Wed Sep 14 15:47:26 2022 ] Eval epoch: 63
|
418 |
+
[ Wed Sep 14 15:49:12 2022 ] Mean test loss of 930 batches: 2.4465322494506836.
|
419 |
+
[ Wed Sep 14 15:49:13 2022 ] Top1: 56.64%
|
420 |
+
[ Wed Sep 14 15:49:13 2022 ] Top5: 83.96%
|
421 |
+
[ Wed Sep 14 15:49:14 2022 ] Training epoch: 64
|
422 |
+
[ Wed Sep 14 15:49:45 2022 ] Batch(93/162) done. Loss: 0.0613 lr:0.010000
|
423 |
+
[ Wed Sep 14 15:50:06 2022 ] Eval epoch: 64
|
424 |
+
[ Wed Sep 14 15:51:52 2022 ] Mean test loss of 930 batches: 2.425952434539795.
|
425 |
+
[ Wed Sep 14 15:51:52 2022 ] Top1: 56.74%
|
426 |
+
[ Wed Sep 14 15:51:53 2022 ] Top5: 84.13%
|
427 |
+
[ Wed Sep 14 15:51:53 2022 ] Training epoch: 65
|
428 |
+
[ Wed Sep 14 15:52:06 2022 ] Batch(31/162) done. Loss: 0.0433 lr:0.010000
|
429 |
+
[ Wed Sep 14 15:52:37 2022 ] Batch(131/162) done. Loss: 0.0838 lr:0.010000
|
430 |
+
[ Wed Sep 14 15:52:46 2022 ] Eval epoch: 65
|
431 |
+
[ Wed Sep 14 15:54:31 2022 ] Mean test loss of 930 batches: 2.4919207096099854.
|
432 |
+
[ Wed Sep 14 15:54:32 2022 ] Top1: 56.39%
|
433 |
+
[ Wed Sep 14 15:54:32 2022 ] Top5: 83.99%
|
434 |
+
[ Wed Sep 14 15:54:33 2022 ] Training epoch: 66
|
435 |
+
[ Wed Sep 14 15:54:57 2022 ] Batch(69/162) done. Loss: 0.1099 lr:0.010000
|
436 |
+
[ Wed Sep 14 15:55:25 2022 ] Eval epoch: 66
|
437 |
+
[ Wed Sep 14 15:57:11 2022 ] Mean test loss of 930 batches: 2.545520305633545.
|
438 |
+
[ Wed Sep 14 15:57:11 2022 ] Top1: 56.37%
|
439 |
+
[ Wed Sep 14 15:57:12 2022 ] Top5: 84.07%
|
440 |
+
[ Wed Sep 14 15:57:12 2022 ] Training epoch: 67
|
441 |
+
[ Wed Sep 14 15:57:19 2022 ] Batch(7/162) done. Loss: 0.0940 lr:0.010000
|
442 |
+
[ Wed Sep 14 15:57:48 2022 ] Batch(107/162) done. Loss: 0.0599 lr:0.010000
|
443 |
+
[ Wed Sep 14 15:58:05 2022 ] Eval epoch: 67
|
444 |
+
[ Wed Sep 14 15:59:50 2022 ] Mean test loss of 930 batches: 2.5489892959594727.
|
445 |
+
[ Wed Sep 14 15:59:51 2022 ] Top1: 56.97%
|
446 |
+
[ Wed Sep 14 15:59:51 2022 ] Top5: 84.13%
|
447 |
+
[ Wed Sep 14 15:59:52 2022 ] Training epoch: 68
|
448 |
+
[ Wed Sep 14 16:00:09 2022 ] Batch(45/162) done. Loss: 0.0327 lr:0.010000
|
449 |
+
[ Wed Sep 14 16:00:39 2022 ] Batch(145/162) done. Loss: 0.0760 lr:0.010000
|
450 |
+
[ Wed Sep 14 16:00:44 2022 ] Eval epoch: 68
|
451 |
+
[ Wed Sep 14 16:02:30 2022 ] Mean test loss of 930 batches: 2.5511016845703125.
|
452 |
+
[ Wed Sep 14 16:02:30 2022 ] Top1: 56.73%
|
453 |
+
[ Wed Sep 14 16:02:31 2022 ] Top5: 84.09%
|
454 |
+
[ Wed Sep 14 16:02:31 2022 ] Training epoch: 69
|
455 |
+
[ Wed Sep 14 16:03:00 2022 ] Batch(83/162) done. Loss: 0.0340 lr:0.010000
|
456 |
+
[ Wed Sep 14 16:03:24 2022 ] Eval epoch: 69
|
457 |
+
[ Wed Sep 14 16:05:09 2022 ] Mean test loss of 930 batches: 2.63191819190979.
|
458 |
+
[ Wed Sep 14 16:05:10 2022 ] Top1: 56.40%
|
459 |
+
[ Wed Sep 14 16:05:10 2022 ] Top5: 84.01%
|
460 |
+
[ Wed Sep 14 16:05:10 2022 ] Training epoch: 70
|
461 |
+
[ Wed Sep 14 16:05:21 2022 ] Batch(21/162) done. Loss: 0.0284 lr:0.010000
|
462 |
+
[ Wed Sep 14 16:05:51 2022 ] Batch(121/162) done. Loss: 0.0410 lr:0.010000
|
463 |
+
[ Wed Sep 14 16:06:03 2022 ] Eval epoch: 70
|
464 |
+
[ Wed Sep 14 16:07:50 2022 ] Mean test loss of 930 batches: 2.535250425338745.
|
465 |
+
[ Wed Sep 14 16:07:50 2022 ] Top1: 57.11%
|
466 |
+
[ Wed Sep 14 16:07:50 2022 ] Top5: 84.39%
|
467 |
+
[ Wed Sep 14 16:07:51 2022 ] Training epoch: 71
|
468 |
+
[ Wed Sep 14 16:08:13 2022 ] Batch(59/162) done. Loss: 0.0551 lr:0.010000
|
469 |
+
[ Wed Sep 14 16:08:43 2022 ] Batch(159/162) done. Loss: 0.1404 lr:0.010000
|
470 |
+
[ Wed Sep 14 16:08:44 2022 ] Eval epoch: 71
|
471 |
+
[ Wed Sep 14 16:10:29 2022 ] Mean test loss of 930 batches: 2.6160361766815186.
|
472 |
+
[ Wed Sep 14 16:10:29 2022 ] Top1: 56.59%
|
473 |
+
[ Wed Sep 14 16:10:30 2022 ] Top5: 83.92%
|
474 |
+
[ Wed Sep 14 16:10:30 2022 ] Training epoch: 72
|
475 |
+
[ Wed Sep 14 16:11:03 2022 ] Batch(97/162) done. Loss: 0.0723 lr:0.010000
|
476 |
+
[ Wed Sep 14 16:11:23 2022 ] Eval epoch: 72
|
477 |
+
[ Wed Sep 14 16:13:08 2022 ] Mean test loss of 930 batches: 2.5643980503082275.
|
478 |
+
[ Wed Sep 14 16:13:09 2022 ] Top1: 57.26%
|
479 |
+
[ Wed Sep 14 16:13:09 2022 ] Top5: 84.50%
|
480 |
+
[ Wed Sep 14 16:13:09 2022 ] Training epoch: 73
|
481 |
+
[ Wed Sep 14 16:13:24 2022 ] Batch(35/162) done. Loss: 0.1179 lr:0.010000
|
482 |
+
[ Wed Sep 14 16:13:55 2022 ] Batch(135/162) done. Loss: 0.0451 lr:0.010000
|
483 |
+
[ Wed Sep 14 16:14:03 2022 ] Eval epoch: 73
|
484 |
+
[ Wed Sep 14 16:15:49 2022 ] Mean test loss of 930 batches: 2.588409662246704.
|
485 |
+
[ Wed Sep 14 16:15:49 2022 ] Top1: 57.01%
|
486 |
+
[ Wed Sep 14 16:15:49 2022 ] Top5: 84.40%
|
487 |
+
[ Wed Sep 14 16:15:50 2022 ] Training epoch: 74
|
488 |
+
[ Wed Sep 14 16:16:16 2022 ] Batch(73/162) done. Loss: 0.0769 lr:0.010000
|
489 |
+
[ Wed Sep 14 16:16:43 2022 ] Eval epoch: 74
|
490 |
+
[ Wed Sep 14 16:18:28 2022 ] Mean test loss of 930 batches: 2.6296372413635254.
|
491 |
+
[ Wed Sep 14 16:18:29 2022 ] Top1: 57.02%
|
492 |
+
[ Wed Sep 14 16:18:29 2022 ] Top5: 84.30%
|
493 |
+
[ Wed Sep 14 16:18:29 2022 ] Training epoch: 75
|
494 |
+
[ Wed Sep 14 16:18:37 2022 ] Batch(11/162) done. Loss: 0.0481 lr:0.010000
|
495 |
+
[ Wed Sep 14 16:19:07 2022 ] Batch(111/162) done. Loss: 0.0518 lr:0.010000
|
496 |
+
[ Wed Sep 14 16:19:22 2022 ] Eval epoch: 75
|
497 |
+
[ Wed Sep 14 16:21:07 2022 ] Mean test loss of 930 batches: 2.7006022930145264.
|
498 |
+
[ Wed Sep 14 16:21:07 2022 ] Top1: 56.80%
|
499 |
+
[ Wed Sep 14 16:21:08 2022 ] Top5: 83.95%
|
500 |
+
[ Wed Sep 14 16:21:08 2022 ] Training epoch: 76
|
501 |
+
[ Wed Sep 14 16:21:27 2022 ] Batch(49/162) done. Loss: 0.0650 lr:0.010000
|
502 |
+
[ Wed Sep 14 16:21:58 2022 ] Batch(149/162) done. Loss: 0.1161 lr:0.010000
|
503 |
+
[ Wed Sep 14 16:22:02 2022 ] Eval epoch: 76
|
504 |
+
[ Wed Sep 14 16:23:47 2022 ] Mean test loss of 930 batches: 2.6388227939605713.
|
505 |
+
[ Wed Sep 14 16:23:47 2022 ] Top1: 56.99%
|
506 |
+
[ Wed Sep 14 16:23:48 2022 ] Top5: 84.26%
|
507 |
+
[ Wed Sep 14 16:23:48 2022 ] Training epoch: 77
|
508 |
+
[ Wed Sep 14 16:24:19 2022 ] Batch(87/162) done. Loss: 0.0455 lr:0.010000
|
509 |
+
[ Wed Sep 14 16:24:41 2022 ] Eval epoch: 77
|
510 |
+
[ Wed Sep 14 16:26:27 2022 ] Mean test loss of 930 batches: 2.7077810764312744.
|
511 |
+
[ Wed Sep 14 16:26:27 2022 ] Top1: 56.39%
|
512 |
+
[ Wed Sep 14 16:26:27 2022 ] Top5: 83.60%
|
513 |
+
[ Wed Sep 14 16:26:28 2022 ] Training epoch: 78
|
514 |
+
[ Wed Sep 14 16:26:39 2022 ] Batch(25/162) done. Loss: 0.0488 lr:0.010000
|
515 |
+
[ Wed Sep 14 16:27:09 2022 ] Batch(125/162) done. Loss: 0.0851 lr:0.010000
|
516 |
+
[ Wed Sep 14 16:27:21 2022 ] Eval epoch: 78
|
517 |
+
[ Wed Sep 14 16:29:06 2022 ] Mean test loss of 930 batches: 2.682739734649658.
|
518 |
+
[ Wed Sep 14 16:29:06 2022 ] Top1: 56.73%
|
519 |
+
[ Wed Sep 14 16:29:06 2022 ] Top5: 84.07%
|
520 |
+
[ Wed Sep 14 16:29:07 2022 ] Training epoch: 79
|
521 |
+
[ Wed Sep 14 16:29:30 2022 ] Batch(63/162) done. Loss: 0.0371 lr:0.010000
|
522 |
+
[ Wed Sep 14 16:30:00 2022 ] Eval epoch: 79
|
523 |
+
[ Wed Sep 14 16:31:45 2022 ] Mean test loss of 930 batches: 2.8168888092041016.
|
524 |
+
[ Wed Sep 14 16:31:46 2022 ] Top1: 56.25%
|
525 |
+
[ Wed Sep 14 16:31:46 2022 ] Top5: 83.55%
|
526 |
+
[ Wed Sep 14 16:31:46 2022 ] Training epoch: 80
|
527 |
+
[ Wed Sep 14 16:31:51 2022 ] Batch(1/162) done. Loss: 0.0522 lr:0.010000
|
528 |
+
[ Wed Sep 14 16:32:21 2022 ] Batch(101/162) done. Loss: 0.0506 lr:0.010000
|
529 |
+
[ Wed Sep 14 16:32:40 2022 ] Eval epoch: 80
|
530 |
+
[ Wed Sep 14 16:34:25 2022 ] Mean test loss of 930 batches: 2.7519114017486572.
|
531 |
+
[ Wed Sep 14 16:34:26 2022 ] Top1: 56.65%
|
532 |
+
[ Wed Sep 14 16:34:26 2022 ] Top5: 84.02%
|
533 |
+
[ Wed Sep 14 16:34:26 2022 ] Training epoch: 81
|
534 |
+
[ Wed Sep 14 16:34:42 2022 ] Batch(39/162) done. Loss: 0.0524 lr:0.001000
|
535 |
+
[ Wed Sep 14 16:35:12 2022 ] Batch(139/162) done. Loss: 0.1031 lr:0.001000
|
536 |
+
[ Wed Sep 14 16:35:19 2022 ] Eval epoch: 81
|
537 |
+
[ Wed Sep 14 16:37:04 2022 ] Mean test loss of 930 batches: 2.7525532245635986.
|
538 |
+
[ Wed Sep 14 16:37:05 2022 ] Top1: 56.75%
|
539 |
+
[ Wed Sep 14 16:37:05 2022 ] Top5: 84.01%
|
540 |
+
[ Wed Sep 14 16:37:06 2022 ] Training epoch: 82
|
541 |
+
[ Wed Sep 14 16:37:33 2022 ] Batch(77/162) done. Loss: 0.0096 lr:0.001000
|
542 |
+
[ Wed Sep 14 16:37:59 2022 ] Eval epoch: 82
|
543 |
+
[ Wed Sep 14 16:39:44 2022 ] Mean test loss of 930 batches: 2.7929162979125977.
|
544 |
+
[ Wed Sep 14 16:39:44 2022 ] Top1: 56.84%
|
545 |
+
[ Wed Sep 14 16:39:45 2022 ] Top5: 83.93%
|
546 |
+
[ Wed Sep 14 16:39:45 2022 ] Training epoch: 83
|
547 |
+
[ Wed Sep 14 16:39:54 2022 ] Batch(15/162) done. Loss: 0.0441 lr:0.001000
|
548 |
+
[ Wed Sep 14 16:40:24 2022 ] Batch(115/162) done. Loss: 0.0524 lr:0.001000
|
549 |
+
[ Wed Sep 14 16:40:39 2022 ] Eval epoch: 83
|
550 |
+
[ Wed Sep 14 16:42:23 2022 ] Mean test loss of 930 batches: 2.7270166873931885.
|
551 |
+
[ Wed Sep 14 16:42:24 2022 ] Top1: 56.75%
|
552 |
+
[ Wed Sep 14 16:42:24 2022 ] Top5: 83.99%
|
553 |
+
[ Wed Sep 14 16:42:24 2022 ] Training epoch: 84
|
554 |
+
[ Wed Sep 14 16:42:45 2022 ] Batch(53/162) done. Loss: 0.0641 lr:0.001000
|
555 |
+
[ Wed Sep 14 16:43:15 2022 ] Batch(153/162) done. Loss: 0.0311 lr:0.001000
|
556 |
+
[ Wed Sep 14 16:43:18 2022 ] Eval epoch: 84
|
557 |
+
[ Wed Sep 14 16:45:04 2022 ] Mean test loss of 930 batches: 2.7539007663726807.
|
558 |
+
[ Wed Sep 14 16:45:05 2022 ] Top1: 56.78%
|
559 |
+
[ Wed Sep 14 16:45:05 2022 ] Top5: 83.91%
|
560 |
+
[ Wed Sep 14 16:45:05 2022 ] Training epoch: 85
|
561 |
+
[ Wed Sep 14 16:45:37 2022 ] Batch(91/162) done. Loss: 0.0335 lr:0.001000
|
562 |
+
[ Wed Sep 14 16:45:58 2022 ] Eval epoch: 85
|
563 |
+
[ Wed Sep 14 16:47:44 2022 ] Mean test loss of 930 batches: 2.699486017227173.
|
564 |
+
[ Wed Sep 14 16:47:44 2022 ] Top1: 56.91%
|
565 |
+
[ Wed Sep 14 16:47:45 2022 ] Top5: 84.25%
|
566 |
+
[ Wed Sep 14 16:47:45 2022 ] Training epoch: 86
|
567 |
+
[ Wed Sep 14 16:47:58 2022 ] Batch(29/162) done. Loss: 0.1003 lr:0.001000
|
568 |
+
[ Wed Sep 14 16:48:28 2022 ] Batch(129/162) done. Loss: 0.1065 lr:0.001000
|
569 |
+
[ Wed Sep 14 16:48:38 2022 ] Eval epoch: 86
|
570 |
+
[ Wed Sep 14 16:50:23 2022 ] Mean test loss of 930 batches: 2.7011775970458984.
|
571 |
+
[ Wed Sep 14 16:50:23 2022 ] Top1: 57.23%
|
572 |
+
[ Wed Sep 14 16:50:24 2022 ] Top5: 84.35%
|
573 |
+
[ Wed Sep 14 16:50:24 2022 ] Training epoch: 87
|
574 |
+
[ Wed Sep 14 16:50:49 2022 ] Batch(67/162) done. Loss: 0.0922 lr:0.001000
|
575 |
+
[ Wed Sep 14 16:51:17 2022 ] Eval epoch: 87
|
576 |
+
[ Wed Sep 14 16:53:02 2022 ] Mean test loss of 930 batches: 2.7254478931427.
|
577 |
+
[ Wed Sep 14 16:53:03 2022 ] Top1: 56.81%
|
578 |
+
[ Wed Sep 14 16:53:03 2022 ] Top5: 84.22%
|
579 |
+
[ Wed Sep 14 16:53:04 2022 ] Training epoch: 88
|
580 |
+
[ Wed Sep 14 16:53:10 2022 ] Batch(5/162) done. Loss: 0.0968 lr:0.001000
|
581 |
+
[ Wed Sep 14 16:53:40 2022 ] Batch(105/162) done. Loss: 0.1023 lr:0.001000
|
582 |
+
[ Wed Sep 14 16:53:57 2022 ] Eval epoch: 88
|
583 |
+
[ Wed Sep 14 16:55:42 2022 ] Mean test loss of 930 batches: 2.6768813133239746.
|
584 |
+
[ Wed Sep 14 16:55:43 2022 ] Top1: 57.08%
|
585 |
+
[ Wed Sep 14 16:55:43 2022 ] Top5: 84.17%
|
586 |
+
[ Wed Sep 14 16:55:43 2022 ] Training epoch: 89
|
587 |
+
[ Wed Sep 14 16:56:00 2022 ] Batch(43/162) done. Loss: 0.0105 lr:0.001000
|
588 |
+
[ Wed Sep 14 16:56:31 2022 ] Batch(143/162) done. Loss: 0.0479 lr:0.001000
|
589 |
+
[ Wed Sep 14 16:56:37 2022 ] Eval epoch: 89
|
590 |
+
[ Wed Sep 14 16:58:23 2022 ] Mean test loss of 930 batches: 2.690788984298706.
|
591 |
+
[ Wed Sep 14 16:58:23 2022 ] Top1: 56.82%
|
592 |
+
[ Wed Sep 14 16:58:24 2022 ] Top5: 84.20%
|
593 |
+
[ Wed Sep 14 16:58:24 2022 ] Training epoch: 90
|
594 |
+
[ Wed Sep 14 16:58:53 2022 ] Batch(81/162) done. Loss: 0.0452 lr:0.001000
|
595 |
+
[ Wed Sep 14 16:59:18 2022 ] Eval epoch: 90
|
596 |
+
[ Wed Sep 14 17:01:03 2022 ] Mean test loss of 930 batches: 2.699519157409668.
|
597 |
+
[ Wed Sep 14 17:01:03 2022 ] Top1: 56.97%
|
598 |
+
[ Wed Sep 14 17:01:04 2022 ] Top5: 84.16%
|
599 |
+
[ Wed Sep 14 17:01:04 2022 ] Training epoch: 91
|
600 |
+
[ Wed Sep 14 17:01:14 2022 ] Batch(19/162) done. Loss: 0.0271 lr:0.001000
|
601 |
+
[ Wed Sep 14 17:01:44 2022 ] Batch(119/162) done. Loss: 0.0189 lr:0.001000
|
602 |
+
[ Wed Sep 14 17:01:58 2022 ] Eval epoch: 91
|
603 |
+
[ Wed Sep 14 17:03:43 2022 ] Mean test loss of 930 batches: 2.772402048110962.
|
604 |
+
[ Wed Sep 14 17:03:43 2022 ] Top1: 56.94%
|
605 |
+
[ Wed Sep 14 17:03:44 2022 ] Top5: 84.07%
|
606 |
+
[ Wed Sep 14 17:03:44 2022 ] Training epoch: 92
|
607 |
+
[ Wed Sep 14 17:04:05 2022 ] Batch(57/162) done. Loss: 0.0531 lr:0.001000
|
608 |
+
[ Wed Sep 14 17:04:36 2022 ] Batch(157/162) done. Loss: 0.1023 lr:0.001000
|
609 |
+
[ Wed Sep 14 17:04:37 2022 ] Eval epoch: 92
|
610 |
+
[ Wed Sep 14 17:06:23 2022 ] Mean test loss of 930 batches: 2.7590200901031494.
|
611 |
+
[ Wed Sep 14 17:06:23 2022 ] Top1: 56.91%
|
612 |
+
[ Wed Sep 14 17:06:24 2022 ] Top5: 83.99%
|
613 |
+
[ Wed Sep 14 17:06:24 2022 ] Training epoch: 93
|
614 |
+
[ Wed Sep 14 17:06:57 2022 ] Batch(95/162) done. Loss: 0.0669 lr:0.001000
|
615 |
+
[ Wed Sep 14 17:07:17 2022 ] Eval epoch: 93
|
616 |
+
[ Wed Sep 14 17:09:02 2022 ] Mean test loss of 930 batches: 2.83962082862854.
|
617 |
+
[ Wed Sep 14 17:09:03 2022 ] Top1: 56.15%
|
618 |
+
[ Wed Sep 14 17:09:03 2022 ] Top5: 83.64%
|
619 |
+
[ Wed Sep 14 17:09:03 2022 ] Training epoch: 94
|
620 |
+
[ Wed Sep 14 17:09:18 2022 ] Batch(33/162) done. Loss: 0.0561 lr:0.001000
|
621 |
+
[ Wed Sep 14 17:09:48 2022 ] Batch(133/162) done. Loss: 0.0357 lr:0.001000
|
622 |
+
[ Wed Sep 14 17:09:57 2022 ] Eval epoch: 94
|
623 |
+
[ Wed Sep 14 17:11:42 2022 ] Mean test loss of 930 batches: 2.825902223587036.
|
624 |
+
[ Wed Sep 14 17:11:42 2022 ] Top1: 56.73%
|
625 |
+
[ Wed Sep 14 17:11:42 2022 ] Top5: 83.91%
|
626 |
+
[ Wed Sep 14 17:11:43 2022 ] Training epoch: 95
|
627 |
+
[ Wed Sep 14 17:12:09 2022 ] Batch(71/162) done. Loss: 0.0574 lr:0.001000
|
628 |
+
[ Wed Sep 14 17:12:36 2022 ] Eval epoch: 95
|
629 |
+
[ Wed Sep 14 17:14:21 2022 ] Mean test loss of 930 batches: 2.796349287033081.
|
630 |
+
[ Wed Sep 14 17:14:22 2022 ] Top1: 56.57%
|
631 |
+
[ Wed Sep 14 17:14:22 2022 ] Top5: 83.89%
|
632 |
+
[ Wed Sep 14 17:14:23 2022 ] Training epoch: 96
|
633 |
+
[ Wed Sep 14 17:14:29 2022 ] Batch(9/162) done. Loss: 0.0458 lr:0.001000
|
634 |
+
[ Wed Sep 14 17:14:59 2022 ] Batch(109/162) done. Loss: 0.0299 lr:0.001000
|
635 |
+
[ Wed Sep 14 17:15:15 2022 ] Eval epoch: 96
|
636 |
+
[ Wed Sep 14 17:17:01 2022 ] Mean test loss of 930 batches: 2.685493230819702.
|
637 |
+
[ Wed Sep 14 17:17:02 2022 ] Top1: 56.98%
|
638 |
+
[ Wed Sep 14 17:17:02 2022 ] Top5: 84.24%
|
639 |
+
[ Wed Sep 14 17:17:03 2022 ] Training epoch: 97
|
640 |
+
[ Wed Sep 14 17:17:21 2022 ] Batch(47/162) done. Loss: 0.0455 lr:0.001000
|
641 |
+
[ Wed Sep 14 17:17:51 2022 ] Batch(147/162) done. Loss: 0.0995 lr:0.001000
|
642 |
+
[ Wed Sep 14 17:17:56 2022 ] Eval epoch: 97
|
643 |
+
[ Wed Sep 14 17:19:41 2022 ] Mean test loss of 930 batches: 2.769493818283081.
|
644 |
+
[ Wed Sep 14 17:19:42 2022 ] Top1: 56.75%
|
645 |
+
[ Wed Sep 14 17:19:42 2022 ] Top5: 83.97%
|
646 |
+
[ Wed Sep 14 17:19:43 2022 ] Training epoch: 98
|
647 |
+
[ Wed Sep 14 17:20:12 2022 ] Batch(85/162) done. Loss: 0.0896 lr:0.001000
|
648 |
+
[ Wed Sep 14 17:20:36 2022 ] Eval epoch: 98
|
649 |
+
[ Wed Sep 14 17:22:22 2022 ] Mean test loss of 930 batches: 2.7722766399383545.
|
650 |
+
[ Wed Sep 14 17:22:22 2022 ] Top1: 56.70%
|
651 |
+
[ Wed Sep 14 17:22:23 2022 ] Top5: 83.93%
|
652 |
+
[ Wed Sep 14 17:22:23 2022 ] Training epoch: 99
|
653 |
+
[ Wed Sep 14 17:22:34 2022 ] Batch(23/162) done. Loss: 0.0967 lr:0.001000
|
654 |
+
[ Wed Sep 14 17:23:04 2022 ] Batch(123/162) done. Loss: 0.0458 lr:0.001000
|
655 |
+
[ Wed Sep 14 17:23:16 2022 ] Eval epoch: 99
|
656 |
+
[ Wed Sep 14 17:25:01 2022 ] Mean test loss of 930 batches: 2.800001621246338.
|
657 |
+
[ Wed Sep 14 17:25:02 2022 ] Top1: 56.59%
|
658 |
+
[ Wed Sep 14 17:25:02 2022 ] Top5: 83.88%
|
659 |
+
[ Wed Sep 14 17:25:02 2022 ] Training epoch: 100
|
660 |
+
[ Wed Sep 14 17:25:25 2022 ] Batch(61/162) done. Loss: 0.0443 lr:0.001000
|
661 |
+
[ Wed Sep 14 17:25:55 2022 ] Batch(161/162) done. Loss: 0.0364 lr:0.001000
|
662 |
+
[ Wed Sep 14 17:25:55 2022 ] Eval epoch: 100
|
663 |
+
[ Wed Sep 14 17:27:41 2022 ] Mean test loss of 930 batches: 2.7916245460510254.
|
664 |
+
[ Wed Sep 14 17:27:41 2022 ] Top1: 56.77%
|
665 |
+
[ Wed Sep 14 17:27:41 2022 ] Top5: 83.95%
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_joint_motion_xset
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xset/train_joint_motion.yaml
|
5 |
+
device:
|
6 |
+
- 6
|
7 |
+
- 7
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_joint_motion_xset
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_joint_motion_xset
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cfa1cf0029564599e84c2ba25684a77dd065f91c6669170c0e43d0a99c77e693
|
3 |
+
size 34946665
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_motion_xset/log.txt
ADDED
@@ -0,0 +1,665 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Tue Sep 13 18:24:57 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_joint_motion_xset', 'model_saved_name': './save_models/ntu120_joint_motion_xset', 'Experiment_name': 'ntu120_joint_motion_xset', 'config': './config/ntu120_xset/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Tue Sep 13 18:24:57 2022 ] Training epoch: 1
|
5 |
+
[ Tue Sep 13 18:25:50 2022 ] Batch(99/162) done. Loss: 3.6515 lr:0.100000
|
6 |
+
[ Tue Sep 13 18:26:18 2022 ] Eval epoch: 1
|
7 |
+
[ Tue Sep 13 18:29:16 2022 ] Mean test loss of 930 batches: 5.235726833343506.
|
8 |
+
[ Tue Sep 13 18:29:16 2022 ] Top1: 2.53%
|
9 |
+
[ Tue Sep 13 18:29:17 2022 ] Top5: 12.36%
|
10 |
+
[ Tue Sep 13 18:29:17 2022 ] Training epoch: 2
|
11 |
+
[ Tue Sep 13 18:29:40 2022 ] Batch(37/162) done. Loss: 3.5670 lr:0.100000
|
12 |
+
[ Tue Sep 13 18:30:33 2022 ] Batch(137/162) done. Loss: 3.3648 lr:0.100000
|
13 |
+
[ Tue Sep 13 18:30:46 2022 ] Eval epoch: 2
|
14 |
+
[ Tue Sep 13 18:33:42 2022 ] Mean test loss of 930 batches: 4.729725360870361.
|
15 |
+
[ Tue Sep 13 18:33:43 2022 ] Top1: 8.87%
|
16 |
+
[ Tue Sep 13 18:33:43 2022 ] Top5: 24.11%
|
17 |
+
[ Tue Sep 13 18:33:43 2022 ] Training epoch: 3
|
18 |
+
[ Tue Sep 13 18:34:26 2022 ] Batch(75/162) done. Loss: 2.6142 lr:0.100000
|
19 |
+
[ Tue Sep 13 18:35:13 2022 ] Eval epoch: 3
|
20 |
+
[ Tue Sep 13 18:38:09 2022 ] Mean test loss of 930 batches: 5.563536643981934.
|
21 |
+
[ Tue Sep 13 18:38:09 2022 ] Top1: 11.57%
|
22 |
+
[ Tue Sep 13 18:38:09 2022 ] Top5: 34.96%
|
23 |
+
[ Tue Sep 13 18:38:10 2022 ] Training epoch: 4
|
24 |
+
[ Tue Sep 13 18:38:20 2022 ] Batch(13/162) done. Loss: 2.3234 lr:0.100000
|
25 |
+
[ Tue Sep 13 18:39:13 2022 ] Batch(113/162) done. Loss: 2.2909 lr:0.100000
|
26 |
+
[ Tue Sep 13 18:39:39 2022 ] Eval epoch: 4
|
27 |
+
[ Tue Sep 13 18:42:34 2022 ] Mean test loss of 930 batches: 5.212625980377197.
|
28 |
+
[ Tue Sep 13 18:42:35 2022 ] Top1: 14.93%
|
29 |
+
[ Tue Sep 13 18:42:35 2022 ] Top5: 39.18%
|
30 |
+
[ Tue Sep 13 18:42:35 2022 ] Training epoch: 5
|
31 |
+
[ Tue Sep 13 18:43:06 2022 ] Batch(51/162) done. Loss: 1.8288 lr:0.100000
|
32 |
+
[ Tue Sep 13 18:43:59 2022 ] Batch(151/162) done. Loss: 1.8895 lr:0.100000
|
33 |
+
[ Tue Sep 13 18:44:05 2022 ] Eval epoch: 5
|
34 |
+
[ Tue Sep 13 18:47:00 2022 ] Mean test loss of 930 batches: 4.0911173820495605.
|
35 |
+
[ Tue Sep 13 18:47:01 2022 ] Top1: 18.45%
|
36 |
+
[ Tue Sep 13 18:47:01 2022 ] Top5: 46.17%
|
37 |
+
[ Tue Sep 13 18:47:02 2022 ] Training epoch: 6
|
38 |
+
[ Tue Sep 13 18:47:52 2022 ] Batch(89/162) done. Loss: 1.3659 lr:0.100000
|
39 |
+
[ Tue Sep 13 18:48:31 2022 ] Eval epoch: 6
|
40 |
+
[ Tue Sep 13 18:51:27 2022 ] Mean test loss of 930 batches: 5.5289459228515625.
|
41 |
+
[ Tue Sep 13 18:51:28 2022 ] Top1: 18.95%
|
42 |
+
[ Tue Sep 13 18:51:28 2022 ] Top5: 45.14%
|
43 |
+
[ Tue Sep 13 18:51:28 2022 ] Training epoch: 7
|
44 |
+
[ Tue Sep 13 18:51:46 2022 ] Batch(27/162) done. Loss: 1.2843 lr:0.100000
|
45 |
+
[ Tue Sep 13 18:52:39 2022 ] Batch(127/162) done. Loss: 1.2709 lr:0.100000
|
46 |
+
[ Tue Sep 13 18:52:58 2022 ] Eval epoch: 7
|
47 |
+
[ Tue Sep 13 18:55:54 2022 ] Mean test loss of 930 batches: 3.4792916774749756.
|
48 |
+
[ Tue Sep 13 18:55:54 2022 ] Top1: 24.92%
|
49 |
+
[ Tue Sep 13 18:55:55 2022 ] Top5: 52.68%
|
50 |
+
[ Tue Sep 13 18:55:55 2022 ] Training epoch: 8
|
51 |
+
[ Tue Sep 13 18:56:33 2022 ] Batch(65/162) done. Loss: 1.8981 lr:0.100000
|
52 |
+
[ Tue Sep 13 18:57:25 2022 ] Eval epoch: 8
|
53 |
+
[ Tue Sep 13 19:00:21 2022 ] Mean test loss of 930 batches: 3.5104548931121826.
|
54 |
+
[ Tue Sep 13 19:00:21 2022 ] Top1: 27.18%
|
55 |
+
[ Tue Sep 13 19:00:21 2022 ] Top5: 55.61%
|
56 |
+
[ Tue Sep 13 19:00:22 2022 ] Training epoch: 9
|
57 |
+
[ Tue Sep 13 19:00:27 2022 ] Batch(3/162) done. Loss: 1.4355 lr:0.100000
|
58 |
+
[ Tue Sep 13 19:01:20 2022 ] Batch(103/162) done. Loss: 1.3561 lr:0.100000
|
59 |
+
[ Tue Sep 13 19:01:51 2022 ] Eval epoch: 9
|
60 |
+
[ Tue Sep 13 19:04:48 2022 ] Mean test loss of 930 batches: 3.0909206867218018.
|
61 |
+
[ Tue Sep 13 19:04:48 2022 ] Top1: 30.46%
|
62 |
+
[ Tue Sep 13 19:04:49 2022 ] Top5: 60.58%
|
63 |
+
[ Tue Sep 13 19:04:49 2022 ] Training epoch: 10
|
64 |
+
[ Tue Sep 13 19:05:14 2022 ] Batch(41/162) done. Loss: 1.1506 lr:0.100000
|
65 |
+
[ Tue Sep 13 19:06:07 2022 ] Batch(141/162) done. Loss: 1.2948 lr:0.100000
|
66 |
+
[ Tue Sep 13 19:06:18 2022 ] Eval epoch: 10
|
67 |
+
[ Tue Sep 13 19:09:14 2022 ] Mean test loss of 930 batches: 3.8743605613708496.
|
68 |
+
[ Tue Sep 13 19:09:15 2022 ] Top1: 25.93%
|
69 |
+
[ Tue Sep 13 19:09:15 2022 ] Top5: 53.59%
|
70 |
+
[ Tue Sep 13 19:09:16 2022 ] Training epoch: 11
|
71 |
+
[ Tue Sep 13 19:10:01 2022 ] Batch(79/162) done. Loss: 1.3390 lr:0.100000
|
72 |
+
[ Tue Sep 13 19:10:45 2022 ] Eval epoch: 11
|
73 |
+
[ Tue Sep 13 19:13:41 2022 ] Mean test loss of 930 batches: 3.0287907123565674.
|
74 |
+
[ Tue Sep 13 19:13:41 2022 ] Top1: 30.51%
|
75 |
+
[ Tue Sep 13 19:13:42 2022 ] Top5: 63.40%
|
76 |
+
[ Tue Sep 13 19:13:42 2022 ] Training epoch: 12
|
77 |
+
[ Tue Sep 13 19:13:54 2022 ] Batch(17/162) done. Loss: 1.0993 lr:0.100000
|
78 |
+
[ Tue Sep 13 19:14:48 2022 ] Batch(117/162) done. Loss: 1.5553 lr:0.100000
|
79 |
+
[ Tue Sep 13 19:15:11 2022 ] Eval epoch: 12
|
80 |
+
[ Tue Sep 13 19:18:08 2022 ] Mean test loss of 930 batches: 2.681297540664673.
|
81 |
+
[ Tue Sep 13 19:18:08 2022 ] Top1: 36.16%
|
82 |
+
[ Tue Sep 13 19:18:09 2022 ] Top5: 65.88%
|
83 |
+
[ Tue Sep 13 19:18:09 2022 ] Training epoch: 13
|
84 |
+
[ Tue Sep 13 19:18:41 2022 ] Batch(55/162) done. Loss: 1.2107 lr:0.100000
|
85 |
+
[ Tue Sep 13 19:19:35 2022 ] Batch(155/162) done. Loss: 1.1203 lr:0.100000
|
86 |
+
[ Tue Sep 13 19:19:38 2022 ] Eval epoch: 13
|
87 |
+
[ Tue Sep 13 19:22:34 2022 ] Mean test loss of 930 batches: 3.446503162384033.
|
88 |
+
[ Tue Sep 13 19:22:34 2022 ] Top1: 32.32%
|
89 |
+
[ Tue Sep 13 19:22:35 2022 ] Top5: 63.73%
|
90 |
+
[ Tue Sep 13 19:22:35 2022 ] Training epoch: 14
|
91 |
+
[ Tue Sep 13 19:23:28 2022 ] Batch(93/162) done. Loss: 1.0726 lr:0.100000
|
92 |
+
[ Tue Sep 13 19:24:05 2022 ] Eval epoch: 14
|
93 |
+
[ Tue Sep 13 19:27:01 2022 ] Mean test loss of 930 batches: 2.7903835773468018.
|
94 |
+
[ Tue Sep 13 19:27:01 2022 ] Top1: 39.31%
|
95 |
+
[ Tue Sep 13 19:27:02 2022 ] Top5: 69.23%
|
96 |
+
[ Tue Sep 13 19:27:02 2022 ] Training epoch: 15
|
97 |
+
[ Tue Sep 13 19:27:22 2022 ] Batch(31/162) done. Loss: 1.3534 lr:0.100000
|
98 |
+
[ Tue Sep 13 19:28:15 2022 ] Batch(131/162) done. Loss: 1.4148 lr:0.100000
|
99 |
+
[ Tue Sep 13 19:28:32 2022 ] Eval epoch: 15
|
100 |
+
[ Tue Sep 13 19:31:27 2022 ] Mean test loss of 930 batches: 3.082577705383301.
|
101 |
+
[ Tue Sep 13 19:31:28 2022 ] Top1: 37.26%
|
102 |
+
[ Tue Sep 13 19:31:28 2022 ] Top5: 68.58%
|
103 |
+
[ Tue Sep 13 19:31:28 2022 ] Training epoch: 16
|
104 |
+
[ Tue Sep 13 19:32:09 2022 ] Batch(69/162) done. Loss: 0.9850 lr:0.100000
|
105 |
+
[ Tue Sep 13 19:32:58 2022 ] Eval epoch: 16
|
106 |
+
[ Tue Sep 13 19:35:53 2022 ] Mean test loss of 930 batches: 2.5744996070861816.
|
107 |
+
[ Tue Sep 13 19:35:54 2022 ] Top1: 40.64%
|
108 |
+
[ Tue Sep 13 19:35:54 2022 ] Top5: 72.34%
|
109 |
+
[ Tue Sep 13 19:35:55 2022 ] Training epoch: 17
|
110 |
+
[ Tue Sep 13 19:36:01 2022 ] Batch(7/162) done. Loss: 0.6388 lr:0.100000
|
111 |
+
[ Tue Sep 13 19:36:55 2022 ] Batch(107/162) done. Loss: 0.7803 lr:0.100000
|
112 |
+
[ Tue Sep 13 19:37:24 2022 ] Eval epoch: 17
|
113 |
+
[ Tue Sep 13 19:40:20 2022 ] Mean test loss of 930 batches: 2.6255459785461426.
|
114 |
+
[ Tue Sep 13 19:40:20 2022 ] Top1: 41.27%
|
115 |
+
[ Tue Sep 13 19:40:21 2022 ] Top5: 73.48%
|
116 |
+
[ Tue Sep 13 19:40:21 2022 ] Training epoch: 18
|
117 |
+
[ Tue Sep 13 19:40:48 2022 ] Batch(45/162) done. Loss: 0.6380 lr:0.100000
|
118 |
+
[ Tue Sep 13 19:41:42 2022 ] Batch(145/162) done. Loss: 0.9929 lr:0.100000
|
119 |
+
[ Tue Sep 13 19:41:51 2022 ] Eval epoch: 18
|
120 |
+
[ Tue Sep 13 19:44:47 2022 ] Mean test loss of 930 batches: 2.5052144527435303.
|
121 |
+
[ Tue Sep 13 19:44:47 2022 ] Top1: 43.61%
|
122 |
+
[ Tue Sep 13 19:44:47 2022 ] Top5: 74.95%
|
123 |
+
[ Tue Sep 13 19:44:48 2022 ] Training epoch: 19
|
124 |
+
[ Tue Sep 13 19:45:35 2022 ] Batch(83/162) done. Loss: 0.9538 lr:0.100000
|
125 |
+
[ Tue Sep 13 19:46:17 2022 ] Eval epoch: 19
|
126 |
+
[ Tue Sep 13 19:49:12 2022 ] Mean test loss of 930 batches: 3.247473955154419.
|
127 |
+
[ Tue Sep 13 19:49:13 2022 ] Top1: 34.91%
|
128 |
+
[ Tue Sep 13 19:49:13 2022 ] Top5: 65.97%
|
129 |
+
[ Tue Sep 13 19:49:13 2022 ] Training epoch: 20
|
130 |
+
[ Tue Sep 13 19:49:28 2022 ] Batch(21/162) done. Loss: 0.8027 lr:0.100000
|
131 |
+
[ Tue Sep 13 19:50:21 2022 ] Batch(121/162) done. Loss: 0.9911 lr:0.100000
|
132 |
+
[ Tue Sep 13 19:50:43 2022 ] Eval epoch: 20
|
133 |
+
[ Tue Sep 13 19:53:39 2022 ] Mean test loss of 930 batches: 5.476712226867676.
|
134 |
+
[ Tue Sep 13 19:53:39 2022 ] Top1: 24.59%
|
135 |
+
[ Tue Sep 13 19:53:40 2022 ] Top5: 53.93%
|
136 |
+
[ Tue Sep 13 19:53:40 2022 ] Training epoch: 21
|
137 |
+
[ Tue Sep 13 19:54:15 2022 ] Batch(59/162) done. Loss: 0.8640 lr:0.100000
|
138 |
+
[ Tue Sep 13 19:55:08 2022 ] Batch(159/162) done. Loss: 0.8501 lr:0.100000
|
139 |
+
[ Tue Sep 13 19:55:09 2022 ] Eval epoch: 21
|
140 |
+
[ Tue Sep 13 19:58:05 2022 ] Mean test loss of 930 batches: 2.893510580062866.
|
141 |
+
[ Tue Sep 13 19:58:06 2022 ] Top1: 41.86%
|
142 |
+
[ Tue Sep 13 19:58:06 2022 ] Top5: 73.83%
|
143 |
+
[ Tue Sep 13 19:58:07 2022 ] Training epoch: 22
|
144 |
+
[ Tue Sep 13 19:59:02 2022 ] Batch(97/162) done. Loss: 0.8610 lr:0.100000
|
145 |
+
[ Tue Sep 13 19:59:36 2022 ] Eval epoch: 22
|
146 |
+
[ Tue Sep 13 20:02:33 2022 ] Mean test loss of 930 batches: 2.469637870788574.
|
147 |
+
[ Tue Sep 13 20:02:34 2022 ] Top1: 43.61%
|
148 |
+
[ Tue Sep 13 20:02:34 2022 ] Top5: 77.11%
|
149 |
+
[ Tue Sep 13 20:02:35 2022 ] Training epoch: 23
|
150 |
+
[ Tue Sep 13 20:02:57 2022 ] Batch(35/162) done. Loss: 0.8158 lr:0.100000
|
151 |
+
[ Tue Sep 13 20:03:50 2022 ] Batch(135/162) done. Loss: 0.5242 lr:0.100000
|
152 |
+
[ Tue Sep 13 20:04:05 2022 ] Eval epoch: 23
|
153 |
+
[ Tue Sep 13 20:07:01 2022 ] Mean test loss of 930 batches: 2.4923813343048096.
|
154 |
+
[ Tue Sep 13 20:07:01 2022 ] Top1: 44.61%
|
155 |
+
[ Tue Sep 13 20:07:02 2022 ] Top5: 76.41%
|
156 |
+
[ Tue Sep 13 20:07:02 2022 ] Training epoch: 24
|
157 |
+
[ Tue Sep 13 20:07:45 2022 ] Batch(73/162) done. Loss: 0.4336 lr:0.100000
|
158 |
+
[ Tue Sep 13 20:08:32 2022 ] Eval epoch: 24
|
159 |
+
[ Tue Sep 13 20:11:28 2022 ] Mean test loss of 930 batches: 2.9429335594177246.
|
160 |
+
[ Tue Sep 13 20:11:29 2022 ] Top1: 42.74%
|
161 |
+
[ Tue Sep 13 20:11:29 2022 ] Top5: 74.01%
|
162 |
+
[ Tue Sep 13 20:11:29 2022 ] Training epoch: 25
|
163 |
+
[ Tue Sep 13 20:11:39 2022 ] Batch(11/162) done. Loss: 0.6002 lr:0.100000
|
164 |
+
[ Tue Sep 13 20:12:32 2022 ] Batch(111/162) done. Loss: 0.4435 lr:0.100000
|
165 |
+
[ Tue Sep 13 20:12:59 2022 ] Eval epoch: 25
|
166 |
+
[ Tue Sep 13 20:15:55 2022 ] Mean test loss of 930 batches: 3.080543279647827.
|
167 |
+
[ Tue Sep 13 20:15:55 2022 ] Top1: 40.29%
|
168 |
+
[ Tue Sep 13 20:15:55 2022 ] Top5: 70.90%
|
169 |
+
[ Tue Sep 13 20:15:56 2022 ] Training epoch: 26
|
170 |
+
[ Tue Sep 13 20:16:25 2022 ] Batch(49/162) done. Loss: 0.3770 lr:0.100000
|
171 |
+
[ Tue Sep 13 20:17:18 2022 ] Batch(149/162) done. Loss: 0.3701 lr:0.100000
|
172 |
+
[ Tue Sep 13 20:17:25 2022 ] Eval epoch: 26
|
173 |
+
[ Tue Sep 13 20:20:21 2022 ] Mean test loss of 930 batches: 3.1447954177856445.
|
174 |
+
[ Tue Sep 13 20:20:21 2022 ] Top1: 40.04%
|
175 |
+
[ Tue Sep 13 20:20:22 2022 ] Top5: 71.77%
|
176 |
+
[ Tue Sep 13 20:20:22 2022 ] Training epoch: 27
|
177 |
+
[ Tue Sep 13 20:21:12 2022 ] Batch(87/162) done. Loss: 1.1478 lr:0.100000
|
178 |
+
[ Tue Sep 13 20:21:51 2022 ] Eval epoch: 27
|
179 |
+
[ Tue Sep 13 20:24:48 2022 ] Mean test loss of 930 batches: 3.4919514656066895.
|
180 |
+
[ Tue Sep 13 20:24:48 2022 ] Top1: 40.34%
|
181 |
+
[ Tue Sep 13 20:24:49 2022 ] Top5: 72.57%
|
182 |
+
[ Tue Sep 13 20:24:49 2022 ] Training epoch: 28
|
183 |
+
[ Tue Sep 13 20:25:06 2022 ] Batch(25/162) done. Loss: 0.5289 lr:0.100000
|
184 |
+
[ Tue Sep 13 20:26:00 2022 ] Batch(125/162) done. Loss: 0.3959 lr:0.100000
|
185 |
+
[ Tue Sep 13 20:26:19 2022 ] Eval epoch: 28
|
186 |
+
[ Tue Sep 13 20:29:16 2022 ] Mean test loss of 930 batches: 3.084304094314575.
|
187 |
+
[ Tue Sep 13 20:29:16 2022 ] Top1: 41.98%
|
188 |
+
[ Tue Sep 13 20:29:17 2022 ] Top5: 71.62%
|
189 |
+
[ Tue Sep 13 20:29:17 2022 ] Training epoch: 29
|
190 |
+
[ Tue Sep 13 20:29:54 2022 ] Batch(63/162) done. Loss: 0.5615 lr:0.100000
|
191 |
+
[ Tue Sep 13 20:30:47 2022 ] Eval epoch: 29
|
192 |
+
[ Tue Sep 13 20:33:42 2022 ] Mean test loss of 930 batches: 4.129952430725098.
|
193 |
+
[ Tue Sep 13 20:33:43 2022 ] Top1: 34.69%
|
194 |
+
[ Tue Sep 13 20:33:43 2022 ] Top5: 63.24%
|
195 |
+
[ Tue Sep 13 20:33:43 2022 ] Training epoch: 30
|
196 |
+
[ Tue Sep 13 20:33:47 2022 ] Batch(1/162) done. Loss: 0.3964 lr:0.100000
|
197 |
+
[ Tue Sep 13 20:34:41 2022 ] Batch(101/162) done. Loss: 0.6367 lr:0.100000
|
198 |
+
[ Tue Sep 13 20:35:13 2022 ] Eval epoch: 30
|
199 |
+
[ Tue Sep 13 20:38:08 2022 ] Mean test loss of 930 batches: 3.9512550830841064.
|
200 |
+
[ Tue Sep 13 20:38:09 2022 ] Top1: 38.94%
|
201 |
+
[ Tue Sep 13 20:38:09 2022 ] Top5: 70.13%
|
202 |
+
[ Tue Sep 13 20:38:09 2022 ] Training epoch: 31
|
203 |
+
[ Tue Sep 13 20:38:34 2022 ] Batch(39/162) done. Loss: 0.3871 lr:0.100000
|
204 |
+
[ Tue Sep 13 20:39:27 2022 ] Batch(139/162) done. Loss: 0.5418 lr:0.100000
|
205 |
+
[ Tue Sep 13 20:39:39 2022 ] Eval epoch: 31
|
206 |
+
[ Tue Sep 13 20:42:34 2022 ] Mean test loss of 930 batches: 3.003483533859253.
|
207 |
+
[ Tue Sep 13 20:42:35 2022 ] Top1: 44.00%
|
208 |
+
[ Tue Sep 13 20:42:35 2022 ] Top5: 75.59%
|
209 |
+
[ Tue Sep 13 20:42:35 2022 ] Training epoch: 32
|
210 |
+
[ Tue Sep 13 20:43:20 2022 ] Batch(77/162) done. Loss: 0.4624 lr:0.100000
|
211 |
+
[ Tue Sep 13 20:44:05 2022 ] Eval epoch: 32
|
212 |
+
[ Tue Sep 13 20:47:01 2022 ] Mean test loss of 930 batches: 2.6536295413970947.
|
213 |
+
[ Tue Sep 13 20:47:02 2022 ] Top1: 47.46%
|
214 |
+
[ Tue Sep 13 20:47:02 2022 ] Top5: 77.98%
|
215 |
+
[ Tue Sep 13 20:47:02 2022 ] Training epoch: 33
|
216 |
+
[ Tue Sep 13 20:47:14 2022 ] Batch(15/162) done. Loss: 0.2946 lr:0.100000
|
217 |
+
[ Tue Sep 13 20:48:07 2022 ] Batch(115/162) done. Loss: 0.8799 lr:0.100000
|
218 |
+
[ Tue Sep 13 20:48:32 2022 ] Eval epoch: 33
|
219 |
+
[ Tue Sep 13 20:51:28 2022 ] Mean test loss of 930 batches: 4.496440410614014.
|
220 |
+
[ Tue Sep 13 20:51:28 2022 ] Top1: 34.02%
|
221 |
+
[ Tue Sep 13 20:51:29 2022 ] Top5: 64.93%
|
222 |
+
[ Tue Sep 13 20:51:29 2022 ] Training epoch: 34
|
223 |
+
[ Tue Sep 13 20:52:01 2022 ] Batch(53/162) done. Loss: 0.4099 lr:0.100000
|
224 |
+
[ Tue Sep 13 20:52:54 2022 ] Batch(153/162) done. Loss: 0.2651 lr:0.100000
|
225 |
+
[ Tue Sep 13 20:52:58 2022 ] Eval epoch: 34
|
226 |
+
[ Tue Sep 13 20:55:54 2022 ] Mean test loss of 930 batches: 3.864461898803711.
|
227 |
+
[ Tue Sep 13 20:55:54 2022 ] Top1: 41.27%
|
228 |
+
[ Tue Sep 13 20:55:55 2022 ] Top5: 70.98%
|
229 |
+
[ Tue Sep 13 20:55:55 2022 ] Training epoch: 35
|
230 |
+
[ Tue Sep 13 20:56:47 2022 ] Batch(91/162) done. Loss: 0.1810 lr:0.100000
|
231 |
+
[ Tue Sep 13 20:57:25 2022 ] Eval epoch: 35
|
232 |
+
[ Tue Sep 13 21:00:20 2022 ] Mean test loss of 930 batches: 3.152431011199951.
|
233 |
+
[ Tue Sep 13 21:00:21 2022 ] Top1: 40.42%
|
234 |
+
[ Tue Sep 13 21:00:21 2022 ] Top5: 70.64%
|
235 |
+
[ Tue Sep 13 21:00:21 2022 ] Training epoch: 36
|
236 |
+
[ Tue Sep 13 21:00:40 2022 ] Batch(29/162) done. Loss: 0.3657 lr:0.100000
|
237 |
+
[ Tue Sep 13 21:01:34 2022 ] Batch(129/162) done. Loss: 0.4337 lr:0.100000
|
238 |
+
[ Tue Sep 13 21:01:51 2022 ] Eval epoch: 36
|
239 |
+
[ Tue Sep 13 21:04:47 2022 ] Mean test loss of 930 batches: 3.287761926651001.
|
240 |
+
[ Tue Sep 13 21:04:47 2022 ] Top1: 45.88%
|
241 |
+
[ Tue Sep 13 21:04:48 2022 ] Top5: 76.60%
|
242 |
+
[ Tue Sep 13 21:04:48 2022 ] Training epoch: 37
|
243 |
+
[ Tue Sep 13 21:05:27 2022 ] Batch(67/162) done. Loss: 0.2672 lr:0.100000
|
244 |
+
[ Tue Sep 13 21:06:17 2022 ] Eval epoch: 37
|
245 |
+
[ Tue Sep 13 21:09:13 2022 ] Mean test loss of 930 batches: 3.207329273223877.
|
246 |
+
[ Tue Sep 13 21:09:13 2022 ] Top1: 47.81%
|
247 |
+
[ Tue Sep 13 21:09:14 2022 ] Top5: 78.13%
|
248 |
+
[ Tue Sep 13 21:09:14 2022 ] Training epoch: 38
|
249 |
+
[ Tue Sep 13 21:09:20 2022 ] Batch(5/162) done. Loss: 0.1246 lr:0.100000
|
250 |
+
[ Tue Sep 13 21:10:13 2022 ] Batch(105/162) done. Loss: 0.3872 lr:0.100000
|
251 |
+
[ Tue Sep 13 21:10:43 2022 ] Eval epoch: 38
|
252 |
+
[ Tue Sep 13 21:13:39 2022 ] Mean test loss of 930 batches: 3.925532341003418.
|
253 |
+
[ Tue Sep 13 21:13:39 2022 ] Top1: 41.53%
|
254 |
+
[ Tue Sep 13 21:13:40 2022 ] Top5: 72.47%
|
255 |
+
[ Tue Sep 13 21:13:40 2022 ] Training epoch: 39
|
256 |
+
[ Tue Sep 13 21:14:06 2022 ] Batch(43/162) done. Loss: 0.4519 lr:0.100000
|
257 |
+
[ Tue Sep 13 21:14:59 2022 ] Batch(143/162) done. Loss: 0.3730 lr:0.100000
|
258 |
+
[ Tue Sep 13 21:15:09 2022 ] Eval epoch: 39
|
259 |
+
[ Tue Sep 13 21:18:05 2022 ] Mean test loss of 930 batches: 3.4361915588378906.
|
260 |
+
[ Tue Sep 13 21:18:05 2022 ] Top1: 41.13%
|
261 |
+
[ Tue Sep 13 21:18:06 2022 ] Top5: 72.29%
|
262 |
+
[ Tue Sep 13 21:18:06 2022 ] Training epoch: 40
|
263 |
+
[ Tue Sep 13 21:18:52 2022 ] Batch(81/162) done. Loss: 0.3425 lr:0.100000
|
264 |
+
[ Tue Sep 13 21:19:35 2022 ] Eval epoch: 40
|
265 |
+
[ Tue Sep 13 21:22:30 2022 ] Mean test loss of 930 batches: 3.311009407043457.
|
266 |
+
[ Tue Sep 13 21:22:31 2022 ] Top1: 44.99%
|
267 |
+
[ Tue Sep 13 21:22:31 2022 ] Top5: 72.37%
|
268 |
+
[ Tue Sep 13 21:22:31 2022 ] Training epoch: 41
|
269 |
+
[ Tue Sep 13 21:22:45 2022 ] Batch(19/162) done. Loss: 0.2725 lr:0.100000
|
270 |
+
[ Tue Sep 13 21:23:38 2022 ] Batch(119/162) done. Loss: 0.2891 lr:0.100000
|
271 |
+
[ Tue Sep 13 21:24:01 2022 ] Eval epoch: 41
|
272 |
+
[ Tue Sep 13 21:26:56 2022 ] Mean test loss of 930 batches: 3.1727330684661865.
|
273 |
+
[ Tue Sep 13 21:26:57 2022 ] Top1: 46.30%
|
274 |
+
[ Tue Sep 13 21:26:57 2022 ] Top5: 74.79%
|
275 |
+
[ Tue Sep 13 21:26:58 2022 ] Training epoch: 42
|
276 |
+
[ Tue Sep 13 21:27:31 2022 ] Batch(57/162) done. Loss: 0.2355 lr:0.100000
|
277 |
+
[ Tue Sep 13 21:28:24 2022 ] Batch(157/162) done. Loss: 0.3807 lr:0.100000
|
278 |
+
[ Tue Sep 13 21:28:27 2022 ] Eval epoch: 42
|
279 |
+
[ Tue Sep 13 21:31:22 2022 ] Mean test loss of 930 batches: 5.087271690368652.
|
280 |
+
[ Tue Sep 13 21:31:22 2022 ] Top1: 36.97%
|
281 |
+
[ Tue Sep 13 21:31:23 2022 ] Top5: 65.04%
|
282 |
+
[ Tue Sep 13 21:31:23 2022 ] Training epoch: 43
|
283 |
+
[ Tue Sep 13 21:32:17 2022 ] Batch(95/162) done. Loss: 0.5819 lr:0.100000
|
284 |
+
[ Tue Sep 13 21:32:52 2022 ] Eval epoch: 43
|
285 |
+
[ Tue Sep 13 21:35:48 2022 ] Mean test loss of 930 batches: 4.4214887619018555.
|
286 |
+
[ Tue Sep 13 21:35:48 2022 ] Top1: 32.79%
|
287 |
+
[ Tue Sep 13 21:35:49 2022 ] Top5: 61.90%
|
288 |
+
[ Tue Sep 13 21:35:49 2022 ] Training epoch: 44
|
289 |
+
[ Tue Sep 13 21:36:10 2022 ] Batch(33/162) done. Loss: 0.4286 lr:0.100000
|
290 |
+
[ Tue Sep 13 21:37:03 2022 ] Batch(133/162) done. Loss: 0.4537 lr:0.100000
|
291 |
+
[ Tue Sep 13 21:37:18 2022 ] Eval epoch: 44
|
292 |
+
[ Tue Sep 13 21:40:14 2022 ] Mean test loss of 930 batches: 3.6953487396240234.
|
293 |
+
[ Tue Sep 13 21:40:14 2022 ] Top1: 44.60%
|
294 |
+
[ Tue Sep 13 21:40:14 2022 ] Top5: 75.06%
|
295 |
+
[ Tue Sep 13 21:40:15 2022 ] Training epoch: 45
|
296 |
+
[ Tue Sep 13 21:40:55 2022 ] Batch(71/162) done. Loss: 0.3127 lr:0.100000
|
297 |
+
[ Tue Sep 13 21:41:44 2022 ] Eval epoch: 45
|
298 |
+
[ Tue Sep 13 21:44:39 2022 ] Mean test loss of 930 batches: 4.165959358215332.
|
299 |
+
[ Tue Sep 13 21:44:40 2022 ] Top1: 41.86%
|
300 |
+
[ Tue Sep 13 21:44:40 2022 ] Top5: 70.45%
|
301 |
+
[ Tue Sep 13 21:44:40 2022 ] Training epoch: 46
|
302 |
+
[ Tue Sep 13 21:44:48 2022 ] Batch(9/162) done. Loss: 0.3008 lr:0.100000
|
303 |
+
[ Tue Sep 13 21:45:42 2022 ] Batch(109/162) done. Loss: 0.3582 lr:0.100000
|
304 |
+
[ Tue Sep 13 21:46:10 2022 ] Eval epoch: 46
|
305 |
+
[ Tue Sep 13 21:49:06 2022 ] Mean test loss of 930 batches: 4.4879326820373535.
|
306 |
+
[ Tue Sep 13 21:49:06 2022 ] Top1: 39.41%
|
307 |
+
[ Tue Sep 13 21:49:07 2022 ] Top5: 70.74%
|
308 |
+
[ Tue Sep 13 21:49:07 2022 ] Training epoch: 47
|
309 |
+
[ Tue Sep 13 21:49:35 2022 ] Batch(47/162) done. Loss: 0.3825 lr:0.100000
|
310 |
+
[ Tue Sep 13 21:50:28 2022 ] Batch(147/162) done. Loss: 0.2699 lr:0.100000
|
311 |
+
[ Tue Sep 13 21:50:36 2022 ] Eval epoch: 47
|
312 |
+
[ Tue Sep 13 21:53:32 2022 ] Mean test loss of 930 batches: 3.048259735107422.
|
313 |
+
[ Tue Sep 13 21:53:32 2022 ] Top1: 48.36%
|
314 |
+
[ Tue Sep 13 21:53:33 2022 ] Top5: 77.78%
|
315 |
+
[ Tue Sep 13 21:53:33 2022 ] Training epoch: 48
|
316 |
+
[ Tue Sep 13 21:54:21 2022 ] Batch(85/162) done. Loss: 0.1663 lr:0.100000
|
317 |
+
[ Tue Sep 13 21:55:02 2022 ] Eval epoch: 48
|
318 |
+
[ Tue Sep 13 21:57:58 2022 ] Mean test loss of 930 batches: 3.4193732738494873.
|
319 |
+
[ Tue Sep 13 21:57:58 2022 ] Top1: 47.52%
|
320 |
+
[ Tue Sep 13 21:57:59 2022 ] Top5: 76.02%
|
321 |
+
[ Tue Sep 13 21:57:59 2022 ] Training epoch: 49
|
322 |
+
[ Tue Sep 13 21:58:14 2022 ] Batch(23/162) done. Loss: 0.2840 lr:0.100000
|
323 |
+
[ Tue Sep 13 21:59:07 2022 ] Batch(123/162) done. Loss: 0.2047 lr:0.100000
|
324 |
+
[ Tue Sep 13 21:59:28 2022 ] Eval epoch: 49
|
325 |
+
[ Tue Sep 13 22:02:24 2022 ] Mean test loss of 930 batches: 3.1765494346618652.
|
326 |
+
[ Tue Sep 13 22:02:25 2022 ] Top1: 49.31%
|
327 |
+
[ Tue Sep 13 22:02:25 2022 ] Top5: 77.90%
|
328 |
+
[ Tue Sep 13 22:02:26 2022 ] Training epoch: 50
|
329 |
+
[ Tue Sep 13 22:03:02 2022 ] Batch(61/162) done. Loss: 0.2112 lr:0.100000
|
330 |
+
[ Tue Sep 13 22:03:55 2022 ] Batch(161/162) done. Loss: 0.2298 lr:0.100000
|
331 |
+
[ Tue Sep 13 22:03:56 2022 ] Eval epoch: 50
|
332 |
+
[ Tue Sep 13 22:06:51 2022 ] Mean test loss of 930 batches: 4.108170986175537.
|
333 |
+
[ Tue Sep 13 22:06:52 2022 ] Top1: 41.71%
|
334 |
+
[ Tue Sep 13 22:06:52 2022 ] Top5: 70.49%
|
335 |
+
[ Tue Sep 13 22:06:52 2022 ] Training epoch: 51
|
336 |
+
[ Tue Sep 13 22:07:48 2022 ] Batch(99/162) done. Loss: 0.2841 lr:0.100000
|
337 |
+
[ Tue Sep 13 22:08:22 2022 ] Eval epoch: 51
|
338 |
+
[ Tue Sep 13 22:11:17 2022 ] Mean test loss of 930 batches: 3.8305201530456543.
|
339 |
+
[ Tue Sep 13 22:11:18 2022 ] Top1: 43.83%
|
340 |
+
[ Tue Sep 13 22:11:18 2022 ] Top5: 73.38%
|
341 |
+
[ Tue Sep 13 22:11:19 2022 ] Training epoch: 52
|
342 |
+
[ Tue Sep 13 22:11:41 2022 ] Batch(37/162) done. Loss: 0.2311 lr:0.100000
|
343 |
+
[ Tue Sep 13 22:12:35 2022 ] Batch(137/162) done. Loss: 0.3498 lr:0.100000
|
344 |
+
[ Tue Sep 13 22:12:48 2022 ] Eval epoch: 52
|
345 |
+
[ Tue Sep 13 22:15:43 2022 ] Mean test loss of 930 batches: 3.149076461791992.
|
346 |
+
[ Tue Sep 13 22:15:44 2022 ] Top1: 46.20%
|
347 |
+
[ Tue Sep 13 22:15:44 2022 ] Top5: 76.61%
|
348 |
+
[ Tue Sep 13 22:15:45 2022 ] Training epoch: 53
|
349 |
+
[ Tue Sep 13 22:16:28 2022 ] Batch(75/162) done. Loss: 0.2594 lr:0.100000
|
350 |
+
[ Tue Sep 13 22:17:14 2022 ] Eval epoch: 53
|
351 |
+
[ Tue Sep 13 22:20:10 2022 ] Mean test loss of 930 batches: 3.767601728439331.
|
352 |
+
[ Tue Sep 13 22:20:10 2022 ] Top1: 46.00%
|
353 |
+
[ Tue Sep 13 22:20:11 2022 ] Top5: 75.40%
|
354 |
+
[ Tue Sep 13 22:20:11 2022 ] Training epoch: 54
|
355 |
+
[ Tue Sep 13 22:20:21 2022 ] Batch(13/162) done. Loss: 0.2445 lr:0.100000
|
356 |
+
[ Tue Sep 13 22:21:15 2022 ] Batch(113/162) done. Loss: 0.1727 lr:0.100000
|
357 |
+
[ Tue Sep 13 22:21:40 2022 ] Eval epoch: 54
|
358 |
+
[ Tue Sep 13 22:24:36 2022 ] Mean test loss of 930 batches: 3.2365827560424805.
|
359 |
+
[ Tue Sep 13 22:24:36 2022 ] Top1: 47.60%
|
360 |
+
[ Tue Sep 13 22:24:37 2022 ] Top5: 77.30%
|
361 |
+
[ Tue Sep 13 22:24:37 2022 ] Training epoch: 55
|
362 |
+
[ Tue Sep 13 22:25:07 2022 ] Batch(51/162) done. Loss: 0.2887 lr:0.100000
|
363 |
+
[ Tue Sep 13 22:26:01 2022 ] Batch(151/162) done. Loss: 0.3823 lr:0.100000
|
364 |
+
[ Tue Sep 13 22:26:06 2022 ] Eval epoch: 55
|
365 |
+
[ Tue Sep 13 22:29:01 2022 ] Mean test loss of 930 batches: 3.87300968170166.
|
366 |
+
[ Tue Sep 13 22:29:02 2022 ] Top1: 43.79%
|
367 |
+
[ Tue Sep 13 22:29:02 2022 ] Top5: 70.42%
|
368 |
+
[ Tue Sep 13 22:29:03 2022 ] Training epoch: 56
|
369 |
+
[ Tue Sep 13 22:29:53 2022 ] Batch(89/162) done. Loss: 0.4036 lr:0.100000
|
370 |
+
[ Tue Sep 13 22:30:32 2022 ] Eval epoch: 56
|
371 |
+
[ Tue Sep 13 22:33:27 2022 ] Mean test loss of 930 batches: 3.2829749584198.
|
372 |
+
[ Tue Sep 13 22:33:28 2022 ] Top1: 47.72%
|
373 |
+
[ Tue Sep 13 22:33:28 2022 ] Top5: 78.42%
|
374 |
+
[ Tue Sep 13 22:33:29 2022 ] Training epoch: 57
|
375 |
+
[ Tue Sep 13 22:33:46 2022 ] Batch(27/162) done. Loss: 0.1076 lr:0.100000
|
376 |
+
[ Tue Sep 13 22:34:39 2022 ] Batch(127/162) done. Loss: 0.2584 lr:0.100000
|
377 |
+
[ Tue Sep 13 22:34:58 2022 ] Eval epoch: 57
|
378 |
+
[ Tue Sep 13 22:37:53 2022 ] Mean test loss of 930 batches: 3.3539950847625732.
|
379 |
+
[ Tue Sep 13 22:37:54 2022 ] Top1: 47.14%
|
380 |
+
[ Tue Sep 13 22:37:54 2022 ] Top5: 76.89%
|
381 |
+
[ Tue Sep 13 22:37:54 2022 ] Training epoch: 58
|
382 |
+
[ Tue Sep 13 22:38:32 2022 ] Batch(65/162) done. Loss: 0.6299 lr:0.100000
|
383 |
+
[ Tue Sep 13 22:39:24 2022 ] Eval epoch: 58
|
384 |
+
[ Tue Sep 13 22:42:19 2022 ] Mean test loss of 930 batches: 93.60208129882812.
|
385 |
+
[ Tue Sep 13 22:42:19 2022 ] Top1: 1.13%
|
386 |
+
[ Tue Sep 13 22:42:20 2022 ] Top5: 6.35%
|
387 |
+
[ Tue Sep 13 22:42:20 2022 ] Training epoch: 59
|
388 |
+
[ Tue Sep 13 22:42:25 2022 ] Batch(3/162) done. Loss: 0.4038 lr:0.100000
|
389 |
+
[ Tue Sep 13 22:43:18 2022 ] Batch(103/162) done. Loss: 0.1457 lr:0.100000
|
390 |
+
[ Tue Sep 13 22:43:49 2022 ] Eval epoch: 59
|
391 |
+
[ Tue Sep 13 22:46:44 2022 ] Mean test loss of 930 batches: 4.4294657707214355.
|
392 |
+
[ Tue Sep 13 22:46:45 2022 ] Top1: 39.32%
|
393 |
+
[ Tue Sep 13 22:46:45 2022 ] Top5: 68.19%
|
394 |
+
[ Tue Sep 13 22:46:46 2022 ] Training epoch: 60
|
395 |
+
[ Tue Sep 13 22:47:11 2022 ] Batch(41/162) done. Loss: 0.2347 lr:0.100000
|
396 |
+
[ Tue Sep 13 22:48:04 2022 ] Batch(141/162) done. Loss: 0.2860 lr:0.100000
|
397 |
+
[ Tue Sep 13 22:48:15 2022 ] Eval epoch: 60
|
398 |
+
[ Tue Sep 13 22:51:10 2022 ] Mean test loss of 930 batches: 3.385327100753784.
|
399 |
+
[ Tue Sep 13 22:51:11 2022 ] Top1: 46.98%
|
400 |
+
[ Tue Sep 13 22:51:11 2022 ] Top5: 74.47%
|
401 |
+
[ Tue Sep 13 22:51:11 2022 ] Training epoch: 61
|
402 |
+
[ Tue Sep 13 22:51:56 2022 ] Batch(79/162) done. Loss: 0.1330 lr:0.010000
|
403 |
+
[ Tue Sep 13 22:52:40 2022 ] Eval epoch: 61
|
404 |
+
[ Tue Sep 13 22:55:37 2022 ] Mean test loss of 930 batches: 3.0223019123077393.
|
405 |
+
[ Tue Sep 13 22:55:37 2022 ] Top1: 54.31%
|
406 |
+
[ Tue Sep 13 22:55:37 2022 ] Top5: 81.27%
|
407 |
+
[ Tue Sep 13 22:55:38 2022 ] Training epoch: 62
|
408 |
+
[ Tue Sep 13 22:55:50 2022 ] Batch(17/162) done. Loss: 0.0683 lr:0.010000
|
409 |
+
[ Tue Sep 13 22:56:43 2022 ] Batch(117/162) done. Loss: 0.1300 lr:0.010000
|
410 |
+
[ Tue Sep 13 22:57:07 2022 ] Eval epoch: 62
|
411 |
+
[ Tue Sep 13 23:00:03 2022 ] Mean test loss of 930 batches: 2.9251832962036133.
|
412 |
+
[ Tue Sep 13 23:00:03 2022 ] Top1: 54.89%
|
413 |
+
[ Tue Sep 13 23:00:04 2022 ] Top5: 81.63%
|
414 |
+
[ Tue Sep 13 23:00:04 2022 ] Training epoch: 63
|
415 |
+
[ Tue Sep 13 23:00:36 2022 ] Batch(55/162) done. Loss: 0.0774 lr:0.010000
|
416 |
+
[ Tue Sep 13 23:01:30 2022 ] Batch(155/162) done. Loss: 0.0291 lr:0.010000
|
417 |
+
[ Tue Sep 13 23:01:33 2022 ] Eval epoch: 63
|
418 |
+
[ Tue Sep 13 23:04:29 2022 ] Mean test loss of 930 batches: 2.900144100189209.
|
419 |
+
[ Tue Sep 13 23:04:30 2022 ] Top1: 55.64%
|
420 |
+
[ Tue Sep 13 23:04:30 2022 ] Top5: 82.05%
|
421 |
+
[ Tue Sep 13 23:04:30 2022 ] Training epoch: 64
|
422 |
+
[ Tue Sep 13 23:05:23 2022 ] Batch(93/162) done. Loss: 0.0377 lr:0.010000
|
423 |
+
[ Tue Sep 13 23:06:00 2022 ] Eval epoch: 64
|
424 |
+
[ Tue Sep 13 23:08:55 2022 ] Mean test loss of 930 batches: 3.0037639141082764.
|
425 |
+
[ Tue Sep 13 23:08:56 2022 ] Top1: 55.98%
|
426 |
+
[ Tue Sep 13 23:08:56 2022 ] Top5: 82.32%
|
427 |
+
[ Tue Sep 13 23:08:56 2022 ] Training epoch: 65
|
428 |
+
[ Tue Sep 13 23:09:16 2022 ] Batch(31/162) done. Loss: 0.0785 lr:0.010000
|
429 |
+
[ Tue Sep 13 23:10:09 2022 ] Batch(131/162) done. Loss: 0.0441 lr:0.010000
|
430 |
+
[ Tue Sep 13 23:10:25 2022 ] Eval epoch: 65
|
431 |
+
[ Tue Sep 13 23:13:21 2022 ] Mean test loss of 930 batches: 3.0986738204956055.
|
432 |
+
[ Tue Sep 13 23:13:21 2022 ] Top1: 54.00%
|
433 |
+
[ Tue Sep 13 23:13:22 2022 ] Top5: 81.36%
|
434 |
+
[ Tue Sep 13 23:13:22 2022 ] Training epoch: 66
|
435 |
+
[ Tue Sep 13 23:14:02 2022 ] Batch(69/162) done. Loss: 0.0315 lr:0.010000
|
436 |
+
[ Tue Sep 13 23:14:51 2022 ] Eval epoch: 66
|
437 |
+
[ Tue Sep 13 23:17:47 2022 ] Mean test loss of 930 batches: 2.9420394897460938.
|
438 |
+
[ Tue Sep 13 23:17:47 2022 ] Top1: 55.64%
|
439 |
+
[ Tue Sep 13 23:17:47 2022 ] Top5: 82.06%
|
440 |
+
[ Tue Sep 13 23:17:48 2022 ] Training epoch: 67
|
441 |
+
[ Tue Sep 13 23:17:55 2022 ] Batch(7/162) done. Loss: 0.0441 lr:0.010000
|
442 |
+
[ Tue Sep 13 23:18:48 2022 ] Batch(107/162) done. Loss: 0.1258 lr:0.010000
|
443 |
+
[ Tue Sep 13 23:19:17 2022 ] Eval epoch: 67
|
444 |
+
[ Tue Sep 13 23:22:12 2022 ] Mean test loss of 930 batches: 3.000858783721924.
|
445 |
+
[ Tue Sep 13 23:22:12 2022 ] Top1: 53.96%
|
446 |
+
[ Tue Sep 13 23:22:13 2022 ] Top5: 81.23%
|
447 |
+
[ Tue Sep 13 23:22:13 2022 ] Training epoch: 68
|
448 |
+
[ Tue Sep 13 23:22:40 2022 ] Batch(45/162) done. Loss: 0.0435 lr:0.010000
|
449 |
+
[ Tue Sep 13 23:23:34 2022 ] Batch(145/162) done. Loss: 0.0925 lr:0.010000
|
450 |
+
[ Tue Sep 13 23:23:42 2022 ] Eval epoch: 68
|
451 |
+
[ Tue Sep 13 23:26:38 2022 ] Mean test loss of 930 batches: 3.1119837760925293.
|
452 |
+
[ Tue Sep 13 23:26:38 2022 ] Top1: 54.25%
|
453 |
+
[ Tue Sep 13 23:26:39 2022 ] Top5: 81.33%
|
454 |
+
[ Tue Sep 13 23:26:39 2022 ] Training epoch: 69
|
455 |
+
[ Tue Sep 13 23:27:26 2022 ] Batch(83/162) done. Loss: 0.0659 lr:0.010000
|
456 |
+
[ Tue Sep 13 23:28:08 2022 ] Eval epoch: 69
|
457 |
+
[ Tue Sep 13 23:31:04 2022 ] Mean test loss of 930 batches: 3.1158344745635986.
|
458 |
+
[ Tue Sep 13 23:31:04 2022 ] Top1: 55.40%
|
459 |
+
[ Tue Sep 13 23:31:05 2022 ] Top5: 81.91%
|
460 |
+
[ Tue Sep 13 23:31:05 2022 ] Training epoch: 70
|
461 |
+
[ Tue Sep 13 23:31:19 2022 ] Batch(21/162) done. Loss: 0.1093 lr:0.010000
|
462 |
+
[ Tue Sep 13 23:32:13 2022 ] Batch(121/162) done. Loss: 0.0139 lr:0.010000
|
463 |
+
[ Tue Sep 13 23:32:34 2022 ] Eval epoch: 70
|
464 |
+
[ Tue Sep 13 23:35:30 2022 ] Mean test loss of 930 batches: 3.0413055419921875.
|
465 |
+
[ Tue Sep 13 23:35:30 2022 ] Top1: 55.38%
|
466 |
+
[ Tue Sep 13 23:35:31 2022 ] Top5: 82.09%
|
467 |
+
[ Tue Sep 13 23:35:31 2022 ] Training epoch: 71
|
468 |
+
[ Tue Sep 13 23:36:05 2022 ] Batch(59/162) done. Loss: 0.0630 lr:0.010000
|
469 |
+
[ Tue Sep 13 23:36:59 2022 ] Batch(159/162) done. Loss: 0.0268 lr:0.010000
|
470 |
+
[ Tue Sep 13 23:37:00 2022 ] Eval epoch: 71
|
471 |
+
[ Tue Sep 13 23:39:56 2022 ] Mean test loss of 930 batches: 3.042304515838623.
|
472 |
+
[ Tue Sep 13 23:39:56 2022 ] Top1: 55.49%
|
473 |
+
[ Tue Sep 13 23:39:56 2022 ] Top5: 81.92%
|
474 |
+
[ Tue Sep 13 23:39:57 2022 ] Training epoch: 72
|
475 |
+
[ Tue Sep 13 23:40:52 2022 ] Batch(97/162) done. Loss: 0.0265 lr:0.010000
|
476 |
+
[ Tue Sep 13 23:41:26 2022 ] Eval epoch: 72
|
477 |
+
[ Tue Sep 13 23:44:22 2022 ] Mean test loss of 930 batches: 2.9594500064849854.
|
478 |
+
[ Tue Sep 13 23:44:22 2022 ] Top1: 56.20%
|
479 |
+
[ Tue Sep 13 23:44:23 2022 ] Top5: 82.35%
|
480 |
+
[ Tue Sep 13 23:44:23 2022 ] Training epoch: 73
|
481 |
+
[ Tue Sep 13 23:44:45 2022 ] Batch(35/162) done. Loss: 0.0143 lr:0.010000
|
482 |
+
[ Tue Sep 13 23:45:38 2022 ] Batch(135/162) done. Loss: 0.0321 lr:0.010000
|
483 |
+
[ Tue Sep 13 23:45:53 2022 ] Eval epoch: 73
|
484 |
+
[ Tue Sep 13 23:48:48 2022 ] Mean test loss of 930 batches: 3.0188536643981934.
|
485 |
+
[ Tue Sep 13 23:48:49 2022 ] Top1: 55.75%
|
486 |
+
[ Tue Sep 13 23:48:49 2022 ] Top5: 82.11%
|
487 |
+
[ Tue Sep 13 23:48:49 2022 ] Training epoch: 74
|
488 |
+
[ Tue Sep 13 23:49:31 2022 ] Batch(73/162) done. Loss: 0.0452 lr:0.010000
|
489 |
+
[ Tue Sep 13 23:50:19 2022 ] Eval epoch: 74
|
490 |
+
[ Tue Sep 13 23:53:14 2022 ] Mean test loss of 930 batches: 3.0516719818115234.
|
491 |
+
[ Tue Sep 13 23:53:15 2022 ] Top1: 55.80%
|
492 |
+
[ Tue Sep 13 23:53:15 2022 ] Top5: 82.16%
|
493 |
+
[ Tue Sep 13 23:53:15 2022 ] Training epoch: 75
|
494 |
+
[ Tue Sep 13 23:53:24 2022 ] Batch(11/162) done. Loss: 0.0553 lr:0.010000
|
495 |
+
[ Tue Sep 13 23:54:18 2022 ] Batch(111/162) done. Loss: 0.0420 lr:0.010000
|
496 |
+
[ Tue Sep 13 23:54:45 2022 ] Eval epoch: 75
|
497 |
+
[ Tue Sep 13 23:57:41 2022 ] Mean test loss of 930 batches: 3.168367624282837.
|
498 |
+
[ Tue Sep 13 23:57:42 2022 ] Top1: 53.77%
|
499 |
+
[ Tue Sep 13 23:57:42 2022 ] Top5: 80.80%
|
500 |
+
[ Tue Sep 13 23:57:42 2022 ] Training epoch: 76
|
501 |
+
[ Tue Sep 13 23:58:12 2022 ] Batch(49/162) done. Loss: 0.0311 lr:0.010000
|
502 |
+
[ Tue Sep 13 23:59:05 2022 ] Batch(149/162) done. Loss: 0.0690 lr:0.010000
|
503 |
+
[ Tue Sep 13 23:59:12 2022 ] Eval epoch: 76
|
504 |
+
[ Wed Sep 14 00:02:07 2022 ] Mean test loss of 930 batches: 3.2538864612579346.
|
505 |
+
[ Wed Sep 14 00:02:08 2022 ] Top1: 55.50%
|
506 |
+
[ Wed Sep 14 00:02:08 2022 ] Top5: 81.82%
|
507 |
+
[ Wed Sep 14 00:02:08 2022 ] Training epoch: 77
|
508 |
+
[ Wed Sep 14 00:02:58 2022 ] Batch(87/162) done. Loss: 0.0741 lr:0.010000
|
509 |
+
[ Wed Sep 14 00:03:38 2022 ] Eval epoch: 77
|
510 |
+
[ Wed Sep 14 00:06:33 2022 ] Mean test loss of 930 batches: 3.240590810775757.
|
511 |
+
[ Wed Sep 14 00:06:33 2022 ] Top1: 55.54%
|
512 |
+
[ Wed Sep 14 00:06:34 2022 ] Top5: 82.00%
|
513 |
+
[ Wed Sep 14 00:06:34 2022 ] Training epoch: 78
|
514 |
+
[ Wed Sep 14 00:06:51 2022 ] Batch(25/162) done. Loss: 0.0154 lr:0.010000
|
515 |
+
[ Wed Sep 14 00:07:44 2022 ] Batch(125/162) done. Loss: 0.0165 lr:0.010000
|
516 |
+
[ Wed Sep 14 00:08:04 2022 ] Eval epoch: 78
|
517 |
+
[ Wed Sep 14 00:10:59 2022 ] Mean test loss of 930 batches: 3.1219122409820557.
|
518 |
+
[ Wed Sep 14 00:10:59 2022 ] Top1: 55.73%
|
519 |
+
[ Wed Sep 14 00:11:00 2022 ] Top5: 82.06%
|
520 |
+
[ Wed Sep 14 00:11:00 2022 ] Training epoch: 79
|
521 |
+
[ Wed Sep 14 00:11:37 2022 ] Batch(63/162) done. Loss: 0.0136 lr:0.010000
|
522 |
+
[ Wed Sep 14 00:12:29 2022 ] Eval epoch: 79
|
523 |
+
[ Wed Sep 14 00:15:25 2022 ] Mean test loss of 930 batches: 3.2684264183044434.
|
524 |
+
[ Wed Sep 14 00:15:25 2022 ] Top1: 54.38%
|
525 |
+
[ Wed Sep 14 00:15:25 2022 ] Top5: 81.33%
|
526 |
+
[ Wed Sep 14 00:15:26 2022 ] Training epoch: 80
|
527 |
+
[ Wed Sep 14 00:15:30 2022 ] Batch(1/162) done. Loss: 0.0127 lr:0.010000
|
528 |
+
[ Wed Sep 14 00:16:23 2022 ] Batch(101/162) done. Loss: 0.0567 lr:0.010000
|
529 |
+
[ Wed Sep 14 00:16:55 2022 ] Eval epoch: 80
|
530 |
+
[ Wed Sep 14 00:19:50 2022 ] Mean test loss of 930 batches: 3.172088623046875.
|
531 |
+
[ Wed Sep 14 00:19:50 2022 ] Top1: 55.38%
|
532 |
+
[ Wed Sep 14 00:19:51 2022 ] Top5: 81.78%
|
533 |
+
[ Wed Sep 14 00:19:51 2022 ] Training epoch: 81
|
534 |
+
[ Wed Sep 14 00:20:15 2022 ] Batch(39/162) done. Loss: 0.0603 lr:0.001000
|
535 |
+
[ Wed Sep 14 00:21:08 2022 ] Batch(139/162) done. Loss: 0.0486 lr:0.001000
|
536 |
+
[ Wed Sep 14 00:21:20 2022 ] Eval epoch: 81
|
537 |
+
[ Wed Sep 14 00:24:16 2022 ] Mean test loss of 930 batches: 3.256845474243164.
|
538 |
+
[ Wed Sep 14 00:24:16 2022 ] Top1: 55.29%
|
539 |
+
[ Wed Sep 14 00:24:17 2022 ] Top5: 81.75%
|
540 |
+
[ Wed Sep 14 00:24:17 2022 ] Training epoch: 82
|
541 |
+
[ Wed Sep 14 00:25:01 2022 ] Batch(77/162) done. Loss: 0.0205 lr:0.001000
|
542 |
+
[ Wed Sep 14 00:25:47 2022 ] Eval epoch: 82
|
543 |
+
[ Wed Sep 14 00:28:41 2022 ] Mean test loss of 930 batches: 3.354170322418213.
|
544 |
+
[ Wed Sep 14 00:28:42 2022 ] Top1: 54.68%
|
545 |
+
[ Wed Sep 14 00:28:42 2022 ] Top5: 81.45%
|
546 |
+
[ Wed Sep 14 00:28:42 2022 ] Training epoch: 83
|
547 |
+
[ Wed Sep 14 00:28:54 2022 ] Batch(15/162) done. Loss: 0.0281 lr:0.001000
|
548 |
+
[ Wed Sep 14 00:29:47 2022 ] Batch(115/162) done. Loss: 0.0510 lr:0.001000
|
549 |
+
[ Wed Sep 14 00:30:12 2022 ] Eval epoch: 83
|
550 |
+
[ Wed Sep 14 00:33:07 2022 ] Mean test loss of 930 batches: 3.1280291080474854.
|
551 |
+
[ Wed Sep 14 00:33:08 2022 ] Top1: 55.75%
|
552 |
+
[ Wed Sep 14 00:33:08 2022 ] Top5: 82.10%
|
553 |
+
[ Wed Sep 14 00:33:08 2022 ] Training epoch: 84
|
554 |
+
[ Wed Sep 14 00:33:40 2022 ] Batch(53/162) done. Loss: 0.0216 lr:0.001000
|
555 |
+
[ Wed Sep 14 00:34:33 2022 ] Batch(153/162) done. Loss: 0.0449 lr:0.001000
|
556 |
+
[ Wed Sep 14 00:34:38 2022 ] Eval epoch: 84
|
557 |
+
[ Wed Sep 14 00:37:33 2022 ] Mean test loss of 930 batches: 3.2356135845184326.
|
558 |
+
[ Wed Sep 14 00:37:33 2022 ] Top1: 55.73%
|
559 |
+
[ Wed Sep 14 00:37:34 2022 ] Top5: 82.04%
|
560 |
+
[ Wed Sep 14 00:37:34 2022 ] Training epoch: 85
|
561 |
+
[ Wed Sep 14 00:38:26 2022 ] Batch(91/162) done. Loss: 0.0204 lr:0.001000
|
562 |
+
[ Wed Sep 14 00:39:03 2022 ] Eval epoch: 85
|
563 |
+
[ Wed Sep 14 00:41:59 2022 ] Mean test loss of 930 batches: 3.166569471359253.
|
564 |
+
[ Wed Sep 14 00:42:00 2022 ] Top1: 55.83%
|
565 |
+
[ Wed Sep 14 00:42:00 2022 ] Top5: 82.28%
|
566 |
+
[ Wed Sep 14 00:42:01 2022 ] Training epoch: 86
|
567 |
+
[ Wed Sep 14 00:42:19 2022 ] Batch(29/162) done. Loss: 0.1386 lr:0.001000
|
568 |
+
[ Wed Sep 14 00:43:13 2022 ] Batch(129/162) done. Loss: 0.0539 lr:0.001000
|
569 |
+
[ Wed Sep 14 00:43:30 2022 ] Eval epoch: 86
|
570 |
+
[ Wed Sep 14 00:46:25 2022 ] Mean test loss of 930 batches: 3.0878560543060303.
|
571 |
+
[ Wed Sep 14 00:46:26 2022 ] Top1: 56.25%
|
572 |
+
[ Wed Sep 14 00:46:26 2022 ] Top5: 82.38%
|
573 |
+
[ Wed Sep 14 00:46:26 2022 ] Training epoch: 87
|
574 |
+
[ Wed Sep 14 00:47:05 2022 ] Batch(67/162) done. Loss: 0.0155 lr:0.001000
|
575 |
+
[ Wed Sep 14 00:47:56 2022 ] Eval epoch: 87
|
576 |
+
[ Wed Sep 14 00:50:51 2022 ] Mean test loss of 930 batches: 3.159611940383911.
|
577 |
+
[ Wed Sep 14 00:50:51 2022 ] Top1: 55.64%
|
578 |
+
[ Wed Sep 14 00:50:52 2022 ] Top5: 82.13%
|
579 |
+
[ Wed Sep 14 00:50:52 2022 ] Training epoch: 88
|
580 |
+
[ Wed Sep 14 00:50:58 2022 ] Batch(5/162) done. Loss: 0.0725 lr:0.001000
|
581 |
+
[ Wed Sep 14 00:51:51 2022 ] Batch(105/162) done. Loss: 0.0445 lr:0.001000
|
582 |
+
[ Wed Sep 14 00:52:21 2022 ] Eval epoch: 88
|
583 |
+
[ Wed Sep 14 00:55:17 2022 ] Mean test loss of 930 batches: 3.1161746978759766.
|
584 |
+
[ Wed Sep 14 00:55:18 2022 ] Top1: 56.49%
|
585 |
+
[ Wed Sep 14 00:55:18 2022 ] Top5: 82.62%
|
586 |
+
[ Wed Sep 14 00:55:18 2022 ] Training epoch: 89
|
587 |
+
[ Wed Sep 14 00:55:44 2022 ] Batch(43/162) done. Loss: 0.0231 lr:0.001000
|
588 |
+
[ Wed Sep 14 00:56:38 2022 ] Batch(143/162) done. Loss: 0.0668 lr:0.001000
|
589 |
+
[ Wed Sep 14 00:56:48 2022 ] Eval epoch: 89
|
590 |
+
[ Wed Sep 14 00:59:43 2022 ] Mean test loss of 930 batches: 3.1481363773345947.
|
591 |
+
[ Wed Sep 14 00:59:43 2022 ] Top1: 55.74%
|
592 |
+
[ Wed Sep 14 00:59:44 2022 ] Top5: 82.12%
|
593 |
+
[ Wed Sep 14 00:59:44 2022 ] Training epoch: 90
|
594 |
+
[ Wed Sep 14 01:00:31 2022 ] Batch(81/162) done. Loss: 0.0568 lr:0.001000
|
595 |
+
[ Wed Sep 14 01:01:14 2022 ] Eval epoch: 90
|
596 |
+
[ Wed Sep 14 01:04:09 2022 ] Mean test loss of 930 batches: 3.179015874862671.
|
597 |
+
[ Wed Sep 14 01:04:09 2022 ] Top1: 55.97%
|
598 |
+
[ Wed Sep 14 01:04:10 2022 ] Top5: 82.25%
|
599 |
+
[ Wed Sep 14 01:04:10 2022 ] Training epoch: 91
|
600 |
+
[ Wed Sep 14 01:04:23 2022 ] Batch(19/162) done. Loss: 0.0575 lr:0.001000
|
601 |
+
[ Wed Sep 14 01:05:17 2022 ] Batch(119/162) done. Loss: 0.0452 lr:0.001000
|
602 |
+
[ Wed Sep 14 01:05:40 2022 ] Eval epoch: 91
|
603 |
+
[ Wed Sep 14 01:08:35 2022 ] Mean test loss of 930 batches: 3.1520650386810303.
|
604 |
+
[ Wed Sep 14 01:08:36 2022 ] Top1: 55.99%
|
605 |
+
[ Wed Sep 14 01:08:36 2022 ] Top5: 82.21%
|
606 |
+
[ Wed Sep 14 01:08:36 2022 ] Training epoch: 92
|
607 |
+
[ Wed Sep 14 01:09:10 2022 ] Batch(57/162) done. Loss: 0.0443 lr:0.001000
|
608 |
+
[ Wed Sep 14 01:10:03 2022 ] Batch(157/162) done. Loss: 0.0655 lr:0.001000
|
609 |
+
[ Wed Sep 14 01:10:06 2022 ] Eval epoch: 92
|
610 |
+
[ Wed Sep 14 01:13:01 2022 ] Mean test loss of 930 batches: 3.122722864151001.
|
611 |
+
[ Wed Sep 14 01:13:01 2022 ] Top1: 55.44%
|
612 |
+
[ Wed Sep 14 01:13:02 2022 ] Top5: 81.86%
|
613 |
+
[ Wed Sep 14 01:13:02 2022 ] Training epoch: 93
|
614 |
+
[ Wed Sep 14 01:13:56 2022 ] Batch(95/162) done. Loss: 0.0390 lr:0.001000
|
615 |
+
[ Wed Sep 14 01:14:31 2022 ] Eval epoch: 93
|
616 |
+
[ Wed Sep 14 01:17:27 2022 ] Mean test loss of 930 batches: 3.3127288818359375.
|
617 |
+
[ Wed Sep 14 01:17:27 2022 ] Top1: 53.86%
|
618 |
+
[ Wed Sep 14 01:17:28 2022 ] Top5: 81.14%
|
619 |
+
[ Wed Sep 14 01:17:28 2022 ] Training epoch: 94
|
620 |
+
[ Wed Sep 14 01:17:49 2022 ] Batch(33/162) done. Loss: 0.0923 lr:0.001000
|
621 |
+
[ Wed Sep 14 01:18:42 2022 ] Batch(133/162) done. Loss: 0.0343 lr:0.001000
|
622 |
+
[ Wed Sep 14 01:18:58 2022 ] Eval epoch: 94
|
623 |
+
[ Wed Sep 14 01:21:53 2022 ] Mean test loss of 930 batches: 3.1064746379852295.
|
624 |
+
[ Wed Sep 14 01:21:53 2022 ] Top1: 56.02%
|
625 |
+
[ Wed Sep 14 01:21:54 2022 ] Top5: 82.27%
|
626 |
+
[ Wed Sep 14 01:21:54 2022 ] Training epoch: 95
|
627 |
+
[ Wed Sep 14 01:22:35 2022 ] Batch(71/162) done. Loss: 0.0166 lr:0.001000
|
628 |
+
[ Wed Sep 14 01:23:24 2022 ] Eval epoch: 95
|
629 |
+
[ Wed Sep 14 01:26:19 2022 ] Mean test loss of 930 batches: 3.1581296920776367.
|
630 |
+
[ Wed Sep 14 01:26:19 2022 ] Top1: 53.96%
|
631 |
+
[ Wed Sep 14 01:26:20 2022 ] Top5: 81.01%
|
632 |
+
[ Wed Sep 14 01:26:20 2022 ] Training epoch: 96
|
633 |
+
[ Wed Sep 14 01:26:28 2022 ] Batch(9/162) done. Loss: 0.0111 lr:0.001000
|
634 |
+
[ Wed Sep 14 01:27:22 2022 ] Batch(109/162) done. Loss: 0.0800 lr:0.001000
|
635 |
+
[ Wed Sep 14 01:27:50 2022 ] Eval epoch: 96
|
636 |
+
[ Wed Sep 14 01:30:45 2022 ] Mean test loss of 930 batches: 3.182723045349121.
|
637 |
+
[ Wed Sep 14 01:30:45 2022 ] Top1: 55.66%
|
638 |
+
[ Wed Sep 14 01:30:46 2022 ] Top5: 82.06%
|
639 |
+
[ Wed Sep 14 01:30:46 2022 ] Training epoch: 97
|
640 |
+
[ Wed Sep 14 01:31:14 2022 ] Batch(47/162) done. Loss: 0.0364 lr:0.001000
|
641 |
+
[ Wed Sep 14 01:32:08 2022 ] Batch(147/162) done. Loss: 0.0555 lr:0.001000
|
642 |
+
[ Wed Sep 14 01:32:16 2022 ] Eval epoch: 97
|
643 |
+
[ Wed Sep 14 01:35:11 2022 ] Mean test loss of 930 batches: 3.2519757747650146.
|
644 |
+
[ Wed Sep 14 01:35:12 2022 ] Top1: 55.02%
|
645 |
+
[ Wed Sep 14 01:35:12 2022 ] Top5: 81.75%
|
646 |
+
[ Wed Sep 14 01:35:12 2022 ] Training epoch: 98
|
647 |
+
[ Wed Sep 14 01:36:01 2022 ] Batch(85/162) done. Loss: 0.0209 lr:0.001000
|
648 |
+
[ Wed Sep 14 01:36:42 2022 ] Eval epoch: 98
|
649 |
+
[ Wed Sep 14 01:39:37 2022 ] Mean test loss of 930 batches: 3.2562928199768066.
|
650 |
+
[ Wed Sep 14 01:39:37 2022 ] Top1: 55.94%
|
651 |
+
[ Wed Sep 14 01:39:38 2022 ] Top5: 82.22%
|
652 |
+
[ Wed Sep 14 01:39:38 2022 ] Training epoch: 99
|
653 |
+
[ Wed Sep 14 01:39:54 2022 ] Batch(23/162) done. Loss: 0.1453 lr:0.001000
|
654 |
+
[ Wed Sep 14 01:40:47 2022 ] Batch(123/162) done. Loss: 0.0869 lr:0.001000
|
655 |
+
[ Wed Sep 14 01:41:08 2022 ] Eval epoch: 99
|
656 |
+
[ Wed Sep 14 01:44:03 2022 ] Mean test loss of 930 batches: 3.168196201324463.
|
657 |
+
[ Wed Sep 14 01:44:03 2022 ] Top1: 55.50%
|
658 |
+
[ Wed Sep 14 01:44:04 2022 ] Top5: 81.94%
|
659 |
+
[ Wed Sep 14 01:44:04 2022 ] Training epoch: 100
|
660 |
+
[ Wed Sep 14 01:44:40 2022 ] Batch(61/162) done. Loss: 0.0889 lr:0.001000
|
661 |
+
[ Wed Sep 14 01:45:33 2022 ] Batch(161/162) done. Loss: 0.0700 lr:0.001000
|
662 |
+
[ Wed Sep 14 01:45:34 2022 ] Eval epoch: 100
|
663 |
+
[ Wed Sep 14 01:48:28 2022 ] Mean test loss of 930 batches: 3.1338253021240234.
|
664 |
+
[ Wed Sep 14 01:48:29 2022 ] Top1: 55.56%
|
665 |
+
[ Wed Sep 14 01:48:29 2022 ] Top5: 82.05%
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_joint_xset
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xset/train_joint.yaml
|
5 |
+
device:
|
6 |
+
- 4
|
7 |
+
- 5
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_joint_xset
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_joint_xset
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00b889d584186751762a5588f6ce7eb9e76bf9cee0dde92ea70cd0316a4f3d37
|
3 |
+
size 34946665
|
ckpt/Others/DC-GCN+ADG/ntu120_xset/ntu120_joint_xset/log.txt
ADDED
@@ -0,0 +1,665 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Tue Sep 13 18:24:44 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_joint_xset', 'model_saved_name': './save_models/ntu120_joint_xset', 'Experiment_name': 'ntu120_joint_xset', 'config': './config/ntu120_xset/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Tue Sep 13 18:24:44 2022 ] Training epoch: 1
|
5 |
+
[ Tue Sep 13 18:25:39 2022 ] Batch(99/162) done. Loss: 3.8180 lr:0.100000
|
6 |
+
[ Tue Sep 13 18:26:07 2022 ] Eval epoch: 1
|
7 |
+
[ Tue Sep 13 18:29:00 2022 ] Mean test loss of 930 batches: 5.057918071746826.
|
8 |
+
[ Tue Sep 13 18:29:00 2022 ] Top1: 5.31%
|
9 |
+
[ Tue Sep 13 18:29:01 2022 ] Top5: 19.98%
|
10 |
+
[ Tue Sep 13 18:29:01 2022 ] Training epoch: 2
|
11 |
+
[ Tue Sep 13 18:29:24 2022 ] Batch(37/162) done. Loss: 3.4480 lr:0.100000
|
12 |
+
[ Tue Sep 13 18:30:17 2022 ] Batch(137/162) done. Loss: 3.0256 lr:0.100000
|
13 |
+
[ Tue Sep 13 18:30:29 2022 ] Eval epoch: 2
|
14 |
+
[ Tue Sep 13 18:33:23 2022 ] Mean test loss of 930 batches: 4.520117282867432.
|
15 |
+
[ Tue Sep 13 18:33:23 2022 ] Top1: 12.39%
|
16 |
+
[ Tue Sep 13 18:33:24 2022 ] Top5: 32.08%
|
17 |
+
[ Tue Sep 13 18:33:24 2022 ] Training epoch: 3
|
18 |
+
[ Tue Sep 13 18:34:07 2022 ] Batch(75/162) done. Loss: 2.6454 lr:0.100000
|
19 |
+
[ Tue Sep 13 18:34:53 2022 ] Eval epoch: 3
|
20 |
+
[ Tue Sep 13 18:37:45 2022 ] Mean test loss of 930 batches: 4.802359580993652.
|
21 |
+
[ Tue Sep 13 18:37:46 2022 ] Top1: 12.20%
|
22 |
+
[ Tue Sep 13 18:37:46 2022 ] Top5: 35.43%
|
23 |
+
[ Tue Sep 13 18:37:46 2022 ] Training epoch: 4
|
24 |
+
[ Tue Sep 13 18:37:57 2022 ] Batch(13/162) done. Loss: 2.7732 lr:0.100000
|
25 |
+
[ Tue Sep 13 18:38:49 2022 ] Batch(113/162) done. Loss: 2.5822 lr:0.100000
|
26 |
+
[ Tue Sep 13 18:39:15 2022 ] Eval epoch: 4
|
27 |
+
[ Tue Sep 13 18:42:07 2022 ] Mean test loss of 930 batches: 4.510881423950195.
|
28 |
+
[ Tue Sep 13 18:42:08 2022 ] Top1: 15.24%
|
29 |
+
[ Tue Sep 13 18:42:08 2022 ] Top5: 40.40%
|
30 |
+
[ Tue Sep 13 18:42:08 2022 ] Training epoch: 5
|
31 |
+
[ Tue Sep 13 18:42:39 2022 ] Batch(51/162) done. Loss: 1.9599 lr:0.100000
|
32 |
+
[ Tue Sep 13 18:43:31 2022 ] Batch(151/162) done. Loss: 2.2079 lr:0.100000
|
33 |
+
[ Tue Sep 13 18:43:37 2022 ] Eval epoch: 5
|
34 |
+
[ Tue Sep 13 18:46:30 2022 ] Mean test loss of 930 batches: 3.514641523361206.
|
35 |
+
[ Tue Sep 13 18:46:30 2022 ] Top1: 21.95%
|
36 |
+
[ Tue Sep 13 18:46:30 2022 ] Top5: 45.98%
|
37 |
+
[ Tue Sep 13 18:46:31 2022 ] Training epoch: 6
|
38 |
+
[ Tue Sep 13 18:47:21 2022 ] Batch(89/162) done. Loss: 1.8766 lr:0.100000
|
39 |
+
[ Tue Sep 13 18:47:59 2022 ] Eval epoch: 6
|
40 |
+
[ Tue Sep 13 18:50:53 2022 ] Mean test loss of 930 batches: 3.3838963508605957.
|
41 |
+
[ Tue Sep 13 18:50:54 2022 ] Top1: 24.29%
|
42 |
+
[ Tue Sep 13 18:50:54 2022 ] Top5: 51.41%
|
43 |
+
[ Tue Sep 13 18:50:54 2022 ] Training epoch: 7
|
44 |
+
[ Tue Sep 13 18:51:12 2022 ] Batch(27/162) done. Loss: 1.7406 lr:0.100000
|
45 |
+
[ Tue Sep 13 18:52:05 2022 ] Batch(127/162) done. Loss: 1.5248 lr:0.100000
|
46 |
+
[ Tue Sep 13 18:52:23 2022 ] Eval epoch: 7
|
47 |
+
[ Tue Sep 13 18:55:16 2022 ] Mean test loss of 930 batches: 3.594515562057495.
|
48 |
+
[ Tue Sep 13 18:55:16 2022 ] Top1: 22.54%
|
49 |
+
[ Tue Sep 13 18:55:17 2022 ] Top5: 49.68%
|
50 |
+
[ Tue Sep 13 18:55:17 2022 ] Training epoch: 8
|
51 |
+
[ Tue Sep 13 18:55:55 2022 ] Batch(65/162) done. Loss: 1.9312 lr:0.100000
|
52 |
+
[ Tue Sep 13 18:56:46 2022 ] Eval epoch: 8
|
53 |
+
[ Tue Sep 13 18:59:39 2022 ] Mean test loss of 930 batches: 3.6537744998931885.
|
54 |
+
[ Tue Sep 13 18:59:40 2022 ] Top1: 23.86%
|
55 |
+
[ Tue Sep 13 18:59:40 2022 ] Top5: 51.98%
|
56 |
+
[ Tue Sep 13 18:59:41 2022 ] Training epoch: 9
|
57 |
+
[ Tue Sep 13 18:59:46 2022 ] Batch(3/162) done. Loss: 1.4093 lr:0.100000
|
58 |
+
[ Tue Sep 13 19:00:38 2022 ] Batch(103/162) done. Loss: 2.0028 lr:0.100000
|
59 |
+
[ Tue Sep 13 19:01:09 2022 ] Eval epoch: 9
|
60 |
+
[ Tue Sep 13 19:04:03 2022 ] Mean test loss of 930 batches: 3.2135202884674072.
|
61 |
+
[ Tue Sep 13 19:04:03 2022 ] Top1: 27.69%
|
62 |
+
[ Tue Sep 13 19:04:04 2022 ] Top5: 57.09%
|
63 |
+
[ Tue Sep 13 19:04:04 2022 ] Training epoch: 10
|
64 |
+
[ Tue Sep 13 19:04:29 2022 ] Batch(41/162) done. Loss: 1.3819 lr:0.100000
|
65 |
+
[ Tue Sep 13 19:05:22 2022 ] Batch(141/162) done. Loss: 1.3964 lr:0.100000
|
66 |
+
[ Tue Sep 13 19:05:32 2022 ] Eval epoch: 10
|
67 |
+
[ Tue Sep 13 19:08:26 2022 ] Mean test loss of 930 batches: 3.041006088256836.
|
68 |
+
[ Tue Sep 13 19:08:26 2022 ] Top1: 32.01%
|
69 |
+
[ Tue Sep 13 19:08:27 2022 ] Top5: 60.68%
|
70 |
+
[ Tue Sep 13 19:08:27 2022 ] Training epoch: 11
|
71 |
+
[ Tue Sep 13 19:09:12 2022 ] Batch(79/162) done. Loss: 1.6535 lr:0.100000
|
72 |
+
[ Tue Sep 13 19:09:55 2022 ] Eval epoch: 11
|
73 |
+
[ Tue Sep 13 19:12:48 2022 ] Mean test loss of 930 batches: 2.9078786373138428.
|
74 |
+
[ Tue Sep 13 19:12:48 2022 ] Top1: 31.72%
|
75 |
+
[ Tue Sep 13 19:12:49 2022 ] Top5: 63.92%
|
76 |
+
[ Tue Sep 13 19:12:49 2022 ] Training epoch: 12
|
77 |
+
[ Tue Sep 13 19:13:01 2022 ] Batch(17/162) done. Loss: 1.0866 lr:0.100000
|
78 |
+
[ Tue Sep 13 19:13:54 2022 ] Batch(117/162) done. Loss: 1.6844 lr:0.100000
|
79 |
+
[ Tue Sep 13 19:14:17 2022 ] Eval epoch: 12
|
80 |
+
[ Tue Sep 13 19:17:10 2022 ] Mean test loss of 930 batches: 2.8232834339141846.
|
81 |
+
[ Tue Sep 13 19:17:11 2022 ] Top1: 34.05%
|
82 |
+
[ Tue Sep 13 19:17:11 2022 ] Top5: 65.62%
|
83 |
+
[ Tue Sep 13 19:17:12 2022 ] Training epoch: 13
|
84 |
+
[ Tue Sep 13 19:17:44 2022 ] Batch(55/162) done. Loss: 1.5754 lr:0.100000
|
85 |
+
[ Tue Sep 13 19:18:36 2022 ] Batch(155/162) done. Loss: 1.3233 lr:0.100000
|
86 |
+
[ Tue Sep 13 19:18:40 2022 ] Eval epoch: 13
|
87 |
+
[ Tue Sep 13 19:21:33 2022 ] Mean test loss of 930 batches: 2.576730966567993.
|
88 |
+
[ Tue Sep 13 19:21:34 2022 ] Top1: 37.25%
|
89 |
+
[ Tue Sep 13 19:21:34 2022 ] Top5: 67.63%
|
90 |
+
[ Tue Sep 13 19:21:34 2022 ] Training epoch: 14
|
91 |
+
[ Tue Sep 13 19:22:27 2022 ] Batch(93/162) done. Loss: 0.9363 lr:0.100000
|
92 |
+
[ Tue Sep 13 19:23:03 2022 ] Eval epoch: 14
|
93 |
+
[ Tue Sep 13 19:25:56 2022 ] Mean test loss of 930 batches: 2.4898667335510254.
|
94 |
+
[ Tue Sep 13 19:25:57 2022 ] Top1: 39.04%
|
95 |
+
[ Tue Sep 13 19:25:57 2022 ] Top5: 71.56%
|
96 |
+
[ Tue Sep 13 19:25:57 2022 ] Training epoch: 15
|
97 |
+
[ Tue Sep 13 19:26:17 2022 ] Batch(31/162) done. Loss: 1.2431 lr:0.100000
|
98 |
+
[ Tue Sep 13 19:27:10 2022 ] Batch(131/162) done. Loss: 1.2638 lr:0.100000
|
99 |
+
[ Tue Sep 13 19:27:26 2022 ] Eval epoch: 15
|
100 |
+
[ Tue Sep 13 19:30:19 2022 ] Mean test loss of 930 batches: 8.205449104309082.
|
101 |
+
[ Tue Sep 13 19:30:19 2022 ] Top1: 15.85%
|
102 |
+
[ Tue Sep 13 19:30:20 2022 ] Top5: 45.73%
|
103 |
+
[ Tue Sep 13 19:30:20 2022 ] Training epoch: 16
|
104 |
+
[ Tue Sep 13 19:31:00 2022 ] Batch(69/162) done. Loss: 0.6821 lr:0.100000
|
105 |
+
[ Tue Sep 13 19:31:48 2022 ] Eval epoch: 16
|
106 |
+
[ Tue Sep 13 19:34:42 2022 ] Mean test loss of 930 batches: 2.558546781539917.
|
107 |
+
[ Tue Sep 13 19:34:43 2022 ] Top1: 37.41%
|
108 |
+
[ Tue Sep 13 19:34:43 2022 ] Top5: 69.58%
|
109 |
+
[ Tue Sep 13 19:34:44 2022 ] Training epoch: 17
|
110 |
+
[ Tue Sep 13 19:34:51 2022 ] Batch(7/162) done. Loss: 0.5426 lr:0.100000
|
111 |
+
[ Tue Sep 13 19:35:43 2022 ] Batch(107/162) done. Loss: 0.9380 lr:0.100000
|
112 |
+
[ Tue Sep 13 19:36:12 2022 ] Eval epoch: 17
|
113 |
+
[ Tue Sep 13 19:39:05 2022 ] Mean test loss of 930 batches: 2.8448023796081543.
|
114 |
+
[ Tue Sep 13 19:39:05 2022 ] Top1: 38.49%
|
115 |
+
[ Tue Sep 13 19:39:06 2022 ] Top5: 71.85%
|
116 |
+
[ Tue Sep 13 19:39:06 2022 ] Training epoch: 18
|
117 |
+
[ Tue Sep 13 19:39:33 2022 ] Batch(45/162) done. Loss: 0.8881 lr:0.100000
|
118 |
+
[ Tue Sep 13 19:40:26 2022 ] Batch(145/162) done. Loss: 1.2085 lr:0.100000
|
119 |
+
[ Tue Sep 13 19:40:35 2022 ] Eval epoch: 18
|
120 |
+
[ Tue Sep 13 19:43:28 2022 ] Mean test loss of 930 batches: 2.3035261631011963.
|
121 |
+
[ Tue Sep 13 19:43:29 2022 ] Top1: 44.94%
|
122 |
+
[ Tue Sep 13 19:43:30 2022 ] Top5: 76.12%
|
123 |
+
[ Tue Sep 13 19:43:30 2022 ] Training epoch: 19
|
124 |
+
[ Tue Sep 13 19:44:17 2022 ] Batch(83/162) done. Loss: 1.0136 lr:0.100000
|
125 |
+
[ Tue Sep 13 19:44:58 2022 ] Eval epoch: 19
|
126 |
+
[ Tue Sep 13 19:47:51 2022 ] Mean test loss of 930 batches: 2.334582805633545.
|
127 |
+
[ Tue Sep 13 19:47:51 2022 ] Top1: 43.35%
|
128 |
+
[ Tue Sep 13 19:47:52 2022 ] Top5: 75.37%
|
129 |
+
[ Tue Sep 13 19:47:52 2022 ] Training epoch: 20
|
130 |
+
[ Tue Sep 13 19:48:07 2022 ] Batch(21/162) done. Loss: 0.6658 lr:0.100000
|
131 |
+
[ Tue Sep 13 19:48:59 2022 ] Batch(121/162) done. Loss: 1.1169 lr:0.100000
|
132 |
+
[ Tue Sep 13 19:49:21 2022 ] Eval epoch: 20
|
133 |
+
[ Tue Sep 13 19:52:14 2022 ] Mean test loss of 930 batches: 2.371380567550659.
|
134 |
+
[ Tue Sep 13 19:52:15 2022 ] Top1: 43.80%
|
135 |
+
[ Tue Sep 13 19:52:16 2022 ] Top5: 74.84%
|
136 |
+
[ Tue Sep 13 19:52:16 2022 ] Training epoch: 21
|
137 |
+
[ Tue Sep 13 19:52:51 2022 ] Batch(59/162) done. Loss: 0.8850 lr:0.100000
|
138 |
+
[ Tue Sep 13 19:53:44 2022 ] Batch(159/162) done. Loss: 1.1289 lr:0.100000
|
139 |
+
[ Tue Sep 13 19:53:45 2022 ] Eval epoch: 21
|
140 |
+
[ Tue Sep 13 19:56:39 2022 ] Mean test loss of 930 batches: 4.497220039367676.
|
141 |
+
[ Tue Sep 13 19:56:39 2022 ] Top1: 32.04%
|
142 |
+
[ Tue Sep 13 19:56:40 2022 ] Top5: 64.57%
|
143 |
+
[ Tue Sep 13 19:56:40 2022 ] Training epoch: 22
|
144 |
+
[ Tue Sep 13 19:57:35 2022 ] Batch(97/162) done. Loss: 1.0195 lr:0.100000
|
145 |
+
[ Tue Sep 13 19:58:09 2022 ] Eval epoch: 22
|
146 |
+
[ Tue Sep 13 20:01:02 2022 ] Mean test loss of 930 batches: 2.406083345413208.
|
147 |
+
[ Tue Sep 13 20:01:03 2022 ] Top1: 45.50%
|
148 |
+
[ Tue Sep 13 20:01:04 2022 ] Top5: 76.21%
|
149 |
+
[ Tue Sep 13 20:01:04 2022 ] Training epoch: 23
|
150 |
+
[ Tue Sep 13 20:01:26 2022 ] Batch(35/162) done. Loss: 0.7738 lr:0.100000
|
151 |
+
[ Tue Sep 13 20:02:19 2022 ] Batch(135/162) done. Loss: 0.8067 lr:0.100000
|
152 |
+
[ Tue Sep 13 20:02:33 2022 ] Eval epoch: 23
|
153 |
+
[ Tue Sep 13 20:05:27 2022 ] Mean test loss of 930 batches: 3.3997855186462402.
|
154 |
+
[ Tue Sep 13 20:05:28 2022 ] Top1: 40.54%
|
155 |
+
[ Tue Sep 13 20:05:28 2022 ] Top5: 71.48%
|
156 |
+
[ Tue Sep 13 20:05:29 2022 ] Training epoch: 24
|
157 |
+
[ Tue Sep 13 20:06:10 2022 ] Batch(73/162) done. Loss: 0.7124 lr:0.100000
|
158 |
+
[ Tue Sep 13 20:06:57 2022 ] Eval epoch: 24
|
159 |
+
[ Tue Sep 13 20:09:51 2022 ] Mean test loss of 930 batches: 2.1338369846343994.
|
160 |
+
[ Tue Sep 13 20:09:51 2022 ] Top1: 48.53%
|
161 |
+
[ Tue Sep 13 20:09:52 2022 ] Top5: 78.32%
|
162 |
+
[ Tue Sep 13 20:09:52 2022 ] Training epoch: 25
|
163 |
+
[ Tue Sep 13 20:10:02 2022 ] Batch(11/162) done. Loss: 0.7499 lr:0.100000
|
164 |
+
[ Tue Sep 13 20:10:54 2022 ] Batch(111/162) done. Loss: 0.6216 lr:0.100000
|
165 |
+
[ Tue Sep 13 20:11:21 2022 ] Eval epoch: 25
|
166 |
+
[ Tue Sep 13 20:14:14 2022 ] Mean test loss of 930 batches: 3.167142152786255.
|
167 |
+
[ Tue Sep 13 20:14:15 2022 ] Top1: 41.00%
|
168 |
+
[ Tue Sep 13 20:14:16 2022 ] Top5: 73.00%
|
169 |
+
[ Tue Sep 13 20:14:16 2022 ] Training epoch: 26
|
170 |
+
[ Tue Sep 13 20:14:45 2022 ] Batch(49/162) done. Loss: 0.5860 lr:0.100000
|
171 |
+
[ Tue Sep 13 20:15:38 2022 ] Batch(149/162) done. Loss: 0.5114 lr:0.100000
|
172 |
+
[ Tue Sep 13 20:15:44 2022 ] Eval epoch: 26
|
173 |
+
[ Tue Sep 13 20:18:38 2022 ] Mean test loss of 930 batches: 2.810288667678833.
|
174 |
+
[ Tue Sep 13 20:18:39 2022 ] Top1: 44.65%
|
175 |
+
[ Tue Sep 13 20:18:39 2022 ] Top5: 75.26%
|
176 |
+
[ Tue Sep 13 20:18:40 2022 ] Training epoch: 27
|
177 |
+
[ Tue Sep 13 20:19:29 2022 ] Batch(87/162) done. Loss: 1.0034 lr:0.100000
|
178 |
+
[ Tue Sep 13 20:20:08 2022 ] Eval epoch: 27
|
179 |
+
[ Tue Sep 13 20:23:01 2022 ] Mean test loss of 930 batches: 2.8310000896453857.
|
180 |
+
[ Tue Sep 13 20:23:02 2022 ] Top1: 44.29%
|
181 |
+
[ Tue Sep 13 20:23:03 2022 ] Top5: 75.71%
|
182 |
+
[ Tue Sep 13 20:23:03 2022 ] Training epoch: 28
|
183 |
+
[ Tue Sep 13 20:23:19 2022 ] Batch(25/162) done. Loss: 0.5400 lr:0.100000
|
184 |
+
[ Tue Sep 13 20:24:12 2022 ] Batch(125/162) done. Loss: 0.6086 lr:0.100000
|
185 |
+
[ Tue Sep 13 20:24:31 2022 ] Eval epoch: 28
|
186 |
+
[ Tue Sep 13 20:27:25 2022 ] Mean test loss of 930 batches: 2.7002975940704346.
|
187 |
+
[ Tue Sep 13 20:27:26 2022 ] Top1: 43.56%
|
188 |
+
[ Tue Sep 13 20:27:26 2022 ] Top5: 75.30%
|
189 |
+
[ Tue Sep 13 20:27:26 2022 ] Training epoch: 29
|
190 |
+
[ Tue Sep 13 20:28:03 2022 ] Batch(63/162) done. Loss: 0.5889 lr:0.100000
|
191 |
+
[ Tue Sep 13 20:28:55 2022 ] Eval epoch: 29
|
192 |
+
[ Tue Sep 13 20:31:49 2022 ] Mean test loss of 930 batches: 2.3055806159973145.
|
193 |
+
[ Tue Sep 13 20:31:50 2022 ] Top1: 49.78%
|
194 |
+
[ Tue Sep 13 20:31:50 2022 ] Top5: 79.30%
|
195 |
+
[ Tue Sep 13 20:31:50 2022 ] Training epoch: 30
|
196 |
+
[ Tue Sep 13 20:31:54 2022 ] Batch(1/162) done. Loss: 0.5570 lr:0.100000
|
197 |
+
[ Tue Sep 13 20:32:47 2022 ] Batch(101/162) done. Loss: 0.4686 lr:0.100000
|
198 |
+
[ Tue Sep 13 20:33:19 2022 ] Eval epoch: 30
|
199 |
+
[ Tue Sep 13 20:36:11 2022 ] Mean test loss of 930 batches: 2.272409200668335.
|
200 |
+
[ Tue Sep 13 20:36:12 2022 ] Top1: 49.19%
|
201 |
+
[ Tue Sep 13 20:36:12 2022 ] Top5: 79.82%
|
202 |
+
[ Tue Sep 13 20:36:12 2022 ] Training epoch: 31
|
203 |
+
[ Tue Sep 13 20:36:36 2022 ] Batch(39/162) done. Loss: 0.4463 lr:0.100000
|
204 |
+
[ Tue Sep 13 20:37:29 2022 ] Batch(139/162) done. Loss: 0.4721 lr:0.100000
|
205 |
+
[ Tue Sep 13 20:37:41 2022 ] Eval epoch: 31
|
206 |
+
[ Tue Sep 13 20:40:34 2022 ] Mean test loss of 930 batches: 2.351616144180298.
|
207 |
+
[ Tue Sep 13 20:40:35 2022 ] Top1: 48.35%
|
208 |
+
[ Tue Sep 13 20:40:35 2022 ] Top5: 79.29%
|
209 |
+
[ Tue Sep 13 20:40:35 2022 ] Training epoch: 32
|
210 |
+
[ Tue Sep 13 20:41:20 2022 ] Batch(77/162) done. Loss: 0.4002 lr:0.100000
|
211 |
+
[ Tue Sep 13 20:42:04 2022 ] Eval epoch: 32
|
212 |
+
[ Tue Sep 13 20:44:57 2022 ] Mean test loss of 930 batches: 2.726222276687622.
|
213 |
+
[ Tue Sep 13 20:44:58 2022 ] Top1: 46.53%
|
214 |
+
[ Tue Sep 13 20:44:58 2022 ] Top5: 78.03%
|
215 |
+
[ Tue Sep 13 20:44:59 2022 ] Training epoch: 33
|
216 |
+
[ Tue Sep 13 20:45:10 2022 ] Batch(15/162) done. Loss: 0.4803 lr:0.100000
|
217 |
+
[ Tue Sep 13 20:46:03 2022 ] Batch(115/162) done. Loss: 0.8060 lr:0.100000
|
218 |
+
[ Tue Sep 13 20:46:27 2022 ] Eval epoch: 33
|
219 |
+
[ Tue Sep 13 20:49:21 2022 ] Mean test loss of 930 batches: 2.3054656982421875.
|
220 |
+
[ Tue Sep 13 20:49:21 2022 ] Top1: 48.85%
|
221 |
+
[ Tue Sep 13 20:49:22 2022 ] Top5: 79.23%
|
222 |
+
[ Tue Sep 13 20:49:22 2022 ] Training epoch: 34
|
223 |
+
[ Tue Sep 13 20:49:54 2022 ] Batch(53/162) done. Loss: 0.3195 lr:0.100000
|
224 |
+
[ Tue Sep 13 20:50:46 2022 ] Batch(153/162) done. Loss: 0.5020 lr:0.100000
|
225 |
+
[ Tue Sep 13 20:50:51 2022 ] Eval epoch: 34
|
226 |
+
[ Tue Sep 13 20:53:45 2022 ] Mean test loss of 930 batches: 2.202313184738159.
|
227 |
+
[ Tue Sep 13 20:53:45 2022 ] Top1: 51.52%
|
228 |
+
[ Tue Sep 13 20:53:46 2022 ] Top5: 81.74%
|
229 |
+
[ Tue Sep 13 20:53:46 2022 ] Training epoch: 35
|
230 |
+
[ Tue Sep 13 20:54:38 2022 ] Batch(91/162) done. Loss: 0.3751 lr:0.100000
|
231 |
+
[ Tue Sep 13 20:55:15 2022 ] Eval epoch: 35
|
232 |
+
[ Tue Sep 13 20:58:08 2022 ] Mean test loss of 930 batches: 2.8869104385375977.
|
233 |
+
[ Tue Sep 13 20:58:08 2022 ] Top1: 41.64%
|
234 |
+
[ Tue Sep 13 20:58:09 2022 ] Top5: 73.56%
|
235 |
+
[ Tue Sep 13 20:58:09 2022 ] Training epoch: 36
|
236 |
+
[ Tue Sep 13 20:58:28 2022 ] Batch(29/162) done. Loss: 0.4301 lr:0.100000
|
237 |
+
[ Tue Sep 13 20:59:20 2022 ] Batch(129/162) done. Loss: 0.5671 lr:0.100000
|
238 |
+
[ Tue Sep 13 20:59:37 2022 ] Eval epoch: 36
|
239 |
+
[ Tue Sep 13 21:02:31 2022 ] Mean test loss of 930 batches: 2.3880438804626465.
|
240 |
+
[ Tue Sep 13 21:02:31 2022 ] Top1: 49.24%
|
241 |
+
[ Tue Sep 13 21:02:32 2022 ] Top5: 79.65%
|
242 |
+
[ Tue Sep 13 21:02:32 2022 ] Training epoch: 37
|
243 |
+
[ Tue Sep 13 21:03:11 2022 ] Batch(67/162) done. Loss: 0.5165 lr:0.100000
|
244 |
+
[ Tue Sep 13 21:04:00 2022 ] Eval epoch: 37
|
245 |
+
[ Tue Sep 13 21:06:54 2022 ] Mean test loss of 930 batches: 3.3565597534179688.
|
246 |
+
[ Tue Sep 13 21:06:54 2022 ] Top1: 41.14%
|
247 |
+
[ Tue Sep 13 21:06:55 2022 ] Top5: 73.15%
|
248 |
+
[ Tue Sep 13 21:06:55 2022 ] Training epoch: 38
|
249 |
+
[ Tue Sep 13 21:07:01 2022 ] Batch(5/162) done. Loss: 0.1957 lr:0.100000
|
250 |
+
[ Tue Sep 13 21:07:54 2022 ] Batch(105/162) done. Loss: 0.4742 lr:0.100000
|
251 |
+
[ Tue Sep 13 21:08:24 2022 ] Eval epoch: 38
|
252 |
+
[ Tue Sep 13 21:11:17 2022 ] Mean test loss of 930 batches: 2.8878660202026367.
|
253 |
+
[ Tue Sep 13 21:11:18 2022 ] Top1: 46.58%
|
254 |
+
[ Tue Sep 13 21:11:19 2022 ] Top5: 77.30%
|
255 |
+
[ Tue Sep 13 21:11:19 2022 ] Training epoch: 39
|
256 |
+
[ Tue Sep 13 21:11:45 2022 ] Batch(43/162) done. Loss: 0.4849 lr:0.100000
|
257 |
+
[ Tue Sep 13 21:12:37 2022 ] Batch(143/162) done. Loss: 0.3489 lr:0.100000
|
258 |
+
[ Tue Sep 13 21:12:47 2022 ] Eval epoch: 39
|
259 |
+
[ Tue Sep 13 21:15:41 2022 ] Mean test loss of 930 batches: 2.192498207092285.
|
260 |
+
[ Tue Sep 13 21:15:41 2022 ] Top1: 52.32%
|
261 |
+
[ Tue Sep 13 21:15:42 2022 ] Top5: 81.30%
|
262 |
+
[ Tue Sep 13 21:15:42 2022 ] Training epoch: 40
|
263 |
+
[ Tue Sep 13 21:16:28 2022 ] Batch(81/162) done. Loss: 0.3842 lr:0.100000
|
264 |
+
[ Tue Sep 13 21:17:11 2022 ] Eval epoch: 40
|
265 |
+
[ Tue Sep 13 21:20:04 2022 ] Mean test loss of 930 batches: 2.615729331970215.
|
266 |
+
[ Tue Sep 13 21:20:04 2022 ] Top1: 46.91%
|
267 |
+
[ Tue Sep 13 21:20:05 2022 ] Top5: 78.14%
|
268 |
+
[ Tue Sep 13 21:20:05 2022 ] Training epoch: 41
|
269 |
+
[ Tue Sep 13 21:20:19 2022 ] Batch(19/162) done. Loss: 0.1957 lr:0.100000
|
270 |
+
[ Tue Sep 13 21:21:12 2022 ] Batch(119/162) done. Loss: 0.4244 lr:0.100000
|
271 |
+
[ Tue Sep 13 21:21:34 2022 ] Eval epoch: 41
|
272 |
+
[ Tue Sep 13 21:24:28 2022 ] Mean test loss of 930 batches: 2.267261505126953.
|
273 |
+
[ Tue Sep 13 21:24:28 2022 ] Top1: 52.96%
|
274 |
+
[ Tue Sep 13 21:24:29 2022 ] Top5: 81.85%
|
275 |
+
[ Tue Sep 13 21:24:29 2022 ] Training epoch: 42
|
276 |
+
[ Tue Sep 13 21:25:03 2022 ] Batch(57/162) done. Loss: 0.2303 lr:0.100000
|
277 |
+
[ Tue Sep 13 21:25:56 2022 ] Batch(157/162) done. Loss: 0.4598 lr:0.100000
|
278 |
+
[ Tue Sep 13 21:25:58 2022 ] Eval epoch: 42
|
279 |
+
[ Tue Sep 13 21:28:51 2022 ] Mean test loss of 930 batches: 2.647237777709961.
|
280 |
+
[ Tue Sep 13 21:28:51 2022 ] Top1: 47.21%
|
281 |
+
[ Tue Sep 13 21:28:52 2022 ] Top5: 77.54%
|
282 |
+
[ Tue Sep 13 21:28:53 2022 ] Training epoch: 43
|
283 |
+
[ Tue Sep 13 21:29:46 2022 ] Batch(95/162) done. Loss: 0.7358 lr:0.100000
|
284 |
+
[ Tue Sep 13 21:30:21 2022 ] Eval epoch: 43
|
285 |
+
[ Tue Sep 13 21:33:14 2022 ] Mean test loss of 930 batches: 2.5410149097442627.
|
286 |
+
[ Tue Sep 13 21:33:14 2022 ] Top1: 51.14%
|
287 |
+
[ Tue Sep 13 21:33:15 2022 ] Top5: 81.06%
|
288 |
+
[ Tue Sep 13 21:33:15 2022 ] Training epoch: 44
|
289 |
+
[ Tue Sep 13 21:33:36 2022 ] Batch(33/162) done. Loss: 0.3927 lr:0.100000
|
290 |
+
[ Tue Sep 13 21:34:28 2022 ] Batch(133/162) done. Loss: 0.4016 lr:0.100000
|
291 |
+
[ Tue Sep 13 21:34:43 2022 ] Eval epoch: 44
|
292 |
+
[ Tue Sep 13 21:37:36 2022 ] Mean test loss of 930 batches: 2.4503626823425293.
|
293 |
+
[ Tue Sep 13 21:37:37 2022 ] Top1: 51.08%
|
294 |
+
[ Tue Sep 13 21:37:37 2022 ] Top5: 80.13%
|
295 |
+
[ Tue Sep 13 21:37:37 2022 ] Training epoch: 45
|
296 |
+
[ Tue Sep 13 21:38:18 2022 ] Batch(71/162) done. Loss: 0.5829 lr:0.100000
|
297 |
+
[ Tue Sep 13 21:39:06 2022 ] Eval epoch: 45
|
298 |
+
[ Tue Sep 13 21:41:59 2022 ] Mean test loss of 930 batches: 2.5172011852264404.
|
299 |
+
[ Tue Sep 13 21:41:59 2022 ] Top1: 51.35%
|
300 |
+
[ Tue Sep 13 21:42:00 2022 ] Top5: 81.10%
|
301 |
+
[ Tue Sep 13 21:42:00 2022 ] Training epoch: 46
|
302 |
+
[ Tue Sep 13 21:42:08 2022 ] Batch(9/162) done. Loss: 0.3420 lr:0.100000
|
303 |
+
[ Tue Sep 13 21:43:01 2022 ] Batch(109/162) done. Loss: 0.4147 lr:0.100000
|
304 |
+
[ Tue Sep 13 21:43:29 2022 ] Eval epoch: 46
|
305 |
+
[ Tue Sep 13 21:46:21 2022 ] Mean test loss of 930 batches: 2.650909900665283.
|
306 |
+
[ Tue Sep 13 21:46:22 2022 ] Top1: 50.69%
|
307 |
+
[ Tue Sep 13 21:46:23 2022 ] Top5: 79.20%
|
308 |
+
[ Tue Sep 13 21:46:23 2022 ] Training epoch: 47
|
309 |
+
[ Tue Sep 13 21:46:51 2022 ] Batch(47/162) done. Loss: 0.5927 lr:0.100000
|
310 |
+
[ Tue Sep 13 21:47:44 2022 ] Batch(147/162) done. Loss: 0.5132 lr:0.100000
|
311 |
+
[ Tue Sep 13 21:47:51 2022 ] Eval epoch: 47
|
312 |
+
[ Tue Sep 13 21:50:45 2022 ] Mean test loss of 930 batches: 2.458667039871216.
|
313 |
+
[ Tue Sep 13 21:50:46 2022 ] Top1: 53.88%
|
314 |
+
[ Tue Sep 13 21:50:47 2022 ] Top5: 81.86%
|
315 |
+
[ Tue Sep 13 21:50:47 2022 ] Training epoch: 48
|
316 |
+
[ Tue Sep 13 21:51:35 2022 ] Batch(85/162) done. Loss: 0.3732 lr:0.100000
|
317 |
+
[ Tue Sep 13 21:52:15 2022 ] Eval epoch: 48
|
318 |
+
[ Tue Sep 13 21:55:09 2022 ] Mean test loss of 930 batches: 2.575839042663574.
|
319 |
+
[ Tue Sep 13 21:55:10 2022 ] Top1: 52.06%
|
320 |
+
[ Tue Sep 13 21:55:10 2022 ] Top5: 79.94%
|
321 |
+
[ Tue Sep 13 21:55:10 2022 ] Training epoch: 49
|
322 |
+
[ Tue Sep 13 21:55:26 2022 ] Batch(23/162) done. Loss: 0.4156 lr:0.100000
|
323 |
+
[ Tue Sep 13 21:56:19 2022 ] Batch(123/162) done. Loss: 0.4479 lr:0.100000
|
324 |
+
[ Tue Sep 13 21:56:39 2022 ] Eval epoch: 49
|
325 |
+
[ Tue Sep 13 21:59:32 2022 ] Mean test loss of 930 batches: 2.6112258434295654.
|
326 |
+
[ Tue Sep 13 21:59:32 2022 ] Top1: 50.85%
|
327 |
+
[ Tue Sep 13 21:59:33 2022 ] Top5: 78.85%
|
328 |
+
[ Tue Sep 13 21:59:33 2022 ] Training epoch: 50
|
329 |
+
[ Tue Sep 13 22:00:09 2022 ] Batch(61/162) done. Loss: 0.3971 lr:0.100000
|
330 |
+
[ Tue Sep 13 22:01:01 2022 ] Batch(161/162) done. Loss: 0.3934 lr:0.100000
|
331 |
+
[ Tue Sep 13 22:01:02 2022 ] Eval epoch: 50
|
332 |
+
[ Tue Sep 13 22:03:55 2022 ] Mean test loss of 930 batches: 2.7453091144561768.
|
333 |
+
[ Tue Sep 13 22:03:56 2022 ] Top1: 48.41%
|
334 |
+
[ Tue Sep 13 22:03:56 2022 ] Top5: 77.79%
|
335 |
+
[ Tue Sep 13 22:03:57 2022 ] Training epoch: 51
|
336 |
+
[ Tue Sep 13 22:04:52 2022 ] Batch(99/162) done. Loss: 0.4055 lr:0.100000
|
337 |
+
[ Tue Sep 13 22:05:25 2022 ] Eval epoch: 51
|
338 |
+
[ Tue Sep 13 22:08:18 2022 ] Mean test loss of 930 batches: 2.782780408859253.
|
339 |
+
[ Tue Sep 13 22:08:18 2022 ] Top1: 47.93%
|
340 |
+
[ Tue Sep 13 22:08:19 2022 ] Top5: 78.11%
|
341 |
+
[ Tue Sep 13 22:08:19 2022 ] Training epoch: 52
|
342 |
+
[ Tue Sep 13 22:08:42 2022 ] Batch(37/162) done. Loss: 0.2562 lr:0.100000
|
343 |
+
[ Tue Sep 13 22:09:35 2022 ] Batch(137/162) done. Loss: 0.2431 lr:0.100000
|
344 |
+
[ Tue Sep 13 22:09:48 2022 ] Eval epoch: 52
|
345 |
+
[ Tue Sep 13 22:12:41 2022 ] Mean test loss of 930 batches: 4.7305097579956055.
|
346 |
+
[ Tue Sep 13 22:12:41 2022 ] Top1: 37.37%
|
347 |
+
[ Tue Sep 13 22:12:42 2022 ] Top5: 64.51%
|
348 |
+
[ Tue Sep 13 22:12:42 2022 ] Training epoch: 53
|
349 |
+
[ Tue Sep 13 22:13:25 2022 ] Batch(75/162) done. Loss: 0.2918 lr:0.100000
|
350 |
+
[ Tue Sep 13 22:14:10 2022 ] Eval epoch: 53
|
351 |
+
[ Tue Sep 13 22:17:03 2022 ] Mean test loss of 930 batches: 2.7816007137298584.
|
352 |
+
[ Tue Sep 13 22:17:04 2022 ] Top1: 51.75%
|
353 |
+
[ Tue Sep 13 22:17:05 2022 ] Top5: 79.28%
|
354 |
+
[ Tue Sep 13 22:17:05 2022 ] Training epoch: 54
|
355 |
+
[ Tue Sep 13 22:17:15 2022 ] Batch(13/162) done. Loss: 0.1914 lr:0.100000
|
356 |
+
[ Tue Sep 13 22:18:08 2022 ] Batch(113/162) done. Loss: 0.4339 lr:0.100000
|
357 |
+
[ Tue Sep 13 22:18:33 2022 ] Eval epoch: 54
|
358 |
+
[ Tue Sep 13 22:21:26 2022 ] Mean test loss of 930 batches: 2.4547810554504395.
|
359 |
+
[ Tue Sep 13 22:21:26 2022 ] Top1: 53.56%
|
360 |
+
[ Tue Sep 13 22:21:27 2022 ] Top5: 80.69%
|
361 |
+
[ Tue Sep 13 22:21:27 2022 ] Training epoch: 55
|
362 |
+
[ Tue Sep 13 22:21:57 2022 ] Batch(51/162) done. Loss: 0.4438 lr:0.100000
|
363 |
+
[ Tue Sep 13 22:22:49 2022 ] Batch(151/162) done. Loss: 0.1667 lr:0.100000
|
364 |
+
[ Tue Sep 13 22:22:55 2022 ] Eval epoch: 55
|
365 |
+
[ Tue Sep 13 22:25:48 2022 ] Mean test loss of 930 batches: 2.4871363639831543.
|
366 |
+
[ Tue Sep 13 22:25:48 2022 ] Top1: 52.28%
|
367 |
+
[ Tue Sep 13 22:25:49 2022 ] Top5: 80.72%
|
368 |
+
[ Tue Sep 13 22:25:49 2022 ] Training epoch: 56
|
369 |
+
[ Tue Sep 13 22:26:39 2022 ] Batch(89/162) done. Loss: 0.4151 lr:0.100000
|
370 |
+
[ Tue Sep 13 22:27:18 2022 ] Eval epoch: 56
|
371 |
+
[ Tue Sep 13 22:30:11 2022 ] Mean test loss of 930 batches: 2.8810834884643555.
|
372 |
+
[ Tue Sep 13 22:30:11 2022 ] Top1: 51.17%
|
373 |
+
[ Tue Sep 13 22:30:12 2022 ] Top5: 79.63%
|
374 |
+
[ Tue Sep 13 22:30:12 2022 ] Training epoch: 57
|
375 |
+
[ Tue Sep 13 22:30:30 2022 ] Batch(27/162) done. Loss: 0.1794 lr:0.100000
|
376 |
+
[ Tue Sep 13 22:31:22 2022 ] Batch(127/162) done. Loss: 0.7188 lr:0.100000
|
377 |
+
[ Tue Sep 13 22:31:40 2022 ] Eval epoch: 57
|
378 |
+
[ Tue Sep 13 22:34:33 2022 ] Mean test loss of 930 batches: 3.043966293334961.
|
379 |
+
[ Tue Sep 13 22:34:34 2022 ] Top1: 46.67%
|
380 |
+
[ Tue Sep 13 22:34:34 2022 ] Top5: 76.08%
|
381 |
+
[ Tue Sep 13 22:34:34 2022 ] Training epoch: 58
|
382 |
+
[ Tue Sep 13 22:35:12 2022 ] Batch(65/162) done. Loss: 0.2419 lr:0.100000
|
383 |
+
[ Tue Sep 13 22:36:03 2022 ] Eval epoch: 58
|
384 |
+
[ Tue Sep 13 22:38:56 2022 ] Mean test loss of 930 batches: 2.534623861312866.
|
385 |
+
[ Tue Sep 13 22:38:56 2022 ] Top1: 51.77%
|
386 |
+
[ Tue Sep 13 22:38:57 2022 ] Top5: 80.38%
|
387 |
+
[ Tue Sep 13 22:38:57 2022 ] Training epoch: 59
|
388 |
+
[ Tue Sep 13 22:39:02 2022 ] Batch(3/162) done. Loss: 0.2717 lr:0.100000
|
389 |
+
[ Tue Sep 13 22:39:55 2022 ] Batch(103/162) done. Loss: 0.4790 lr:0.100000
|
390 |
+
[ Tue Sep 13 22:40:25 2022 ] Eval epoch: 59
|
391 |
+
[ Tue Sep 13 22:43:18 2022 ] Mean test loss of 930 batches: 2.920837163925171.
|
392 |
+
[ Tue Sep 13 22:43:18 2022 ] Top1: 53.35%
|
393 |
+
[ Tue Sep 13 22:43:19 2022 ] Top5: 81.02%
|
394 |
+
[ Tue Sep 13 22:43:19 2022 ] Training epoch: 60
|
395 |
+
[ Tue Sep 13 22:43:44 2022 ] Batch(41/162) done. Loss: 0.1751 lr:0.100000
|
396 |
+
[ Tue Sep 13 22:44:37 2022 ] Batch(141/162) done. Loss: 0.4344 lr:0.100000
|
397 |
+
[ Tue Sep 13 22:44:48 2022 ] Eval epoch: 60
|
398 |
+
[ Tue Sep 13 22:47:40 2022 ] Mean test loss of 930 batches: 2.9989264011383057.
|
399 |
+
[ Tue Sep 13 22:47:41 2022 ] Top1: 48.94%
|
400 |
+
[ Tue Sep 13 22:47:41 2022 ] Top5: 76.92%
|
401 |
+
[ Tue Sep 13 22:47:41 2022 ] Training epoch: 61
|
402 |
+
[ Tue Sep 13 22:48:26 2022 ] Batch(79/162) done. Loss: 0.2523 lr:0.010000
|
403 |
+
[ Tue Sep 13 22:49:10 2022 ] Eval epoch: 61
|
404 |
+
[ Tue Sep 13 22:52:03 2022 ] Mean test loss of 930 batches: 2.2626051902770996.
|
405 |
+
[ Tue Sep 13 22:52:04 2022 ] Top1: 58.74%
|
406 |
+
[ Tue Sep 13 22:52:04 2022 ] Top5: 84.23%
|
407 |
+
[ Tue Sep 13 22:52:05 2022 ] Training epoch: 62
|
408 |
+
[ Tue Sep 13 22:52:17 2022 ] Batch(17/162) done. Loss: 0.0895 lr:0.010000
|
409 |
+
[ Tue Sep 13 22:53:10 2022 ] Batch(117/162) done. Loss: 0.1551 lr:0.010000
|
410 |
+
[ Tue Sep 13 22:53:33 2022 ] Eval epoch: 62
|
411 |
+
[ Tue Sep 13 22:56:27 2022 ] Mean test loss of 930 batches: 2.2402141094207764.
|
412 |
+
[ Tue Sep 13 22:56:27 2022 ] Top1: 59.54%
|
413 |
+
[ Tue Sep 13 22:56:28 2022 ] Top5: 84.73%
|
414 |
+
[ Tue Sep 13 22:56:28 2022 ] Training epoch: 63
|
415 |
+
[ Tue Sep 13 22:57:00 2022 ] Batch(55/162) done. Loss: 0.0543 lr:0.010000
|
416 |
+
[ Tue Sep 13 22:57:53 2022 ] Batch(155/162) done. Loss: 0.0686 lr:0.010000
|
417 |
+
[ Tue Sep 13 22:57:56 2022 ] Eval epoch: 63
|
418 |
+
[ Tue Sep 13 23:00:50 2022 ] Mean test loss of 930 batches: 2.3638341426849365.
|
419 |
+
[ Tue Sep 13 23:00:50 2022 ] Top1: 58.94%
|
420 |
+
[ Tue Sep 13 23:00:50 2022 ] Top5: 84.32%
|
421 |
+
[ Tue Sep 13 23:00:51 2022 ] Training epoch: 64
|
422 |
+
[ Tue Sep 13 23:01:43 2022 ] Batch(93/162) done. Loss: 0.0921 lr:0.010000
|
423 |
+
[ Tue Sep 13 23:02:19 2022 ] Eval epoch: 64
|
424 |
+
[ Tue Sep 13 23:05:13 2022 ] Mean test loss of 930 batches: 2.352214813232422.
|
425 |
+
[ Tue Sep 13 23:05:14 2022 ] Top1: 59.51%
|
426 |
+
[ Tue Sep 13 23:05:14 2022 ] Top5: 84.59%
|
427 |
+
[ Tue Sep 13 23:05:15 2022 ] Training epoch: 65
|
428 |
+
[ Tue Sep 13 23:05:34 2022 ] Batch(31/162) done. Loss: 0.0716 lr:0.010000
|
429 |
+
[ Tue Sep 13 23:06:27 2022 ] Batch(131/162) done. Loss: 0.0759 lr:0.010000
|
430 |
+
[ Tue Sep 13 23:06:43 2022 ] Eval epoch: 65
|
431 |
+
[ Tue Sep 13 23:09:36 2022 ] Mean test loss of 930 batches: 2.398852586746216.
|
432 |
+
[ Tue Sep 13 23:09:37 2022 ] Top1: 59.31%
|
433 |
+
[ Tue Sep 13 23:09:37 2022 ] Top5: 84.54%
|
434 |
+
[ Tue Sep 13 23:09:38 2022 ] Training epoch: 66
|
435 |
+
[ Tue Sep 13 23:10:17 2022 ] Batch(69/162) done. Loss: 0.0377 lr:0.010000
|
436 |
+
[ Tue Sep 13 23:11:06 2022 ] Eval epoch: 66
|
437 |
+
[ Tue Sep 13 23:13:59 2022 ] Mean test loss of 930 batches: 2.3643901348114014.
|
438 |
+
[ Tue Sep 13 23:13:59 2022 ] Top1: 59.43%
|
439 |
+
[ Tue Sep 13 23:14:00 2022 ] Top5: 84.64%
|
440 |
+
[ Tue Sep 13 23:14:00 2022 ] Training epoch: 67
|
441 |
+
[ Tue Sep 13 23:14:07 2022 ] Batch(7/162) done. Loss: 0.0666 lr:0.010000
|
442 |
+
[ Tue Sep 13 23:15:00 2022 ] Batch(107/162) done. Loss: 0.1195 lr:0.010000
|
443 |
+
[ Tue Sep 13 23:15:28 2022 ] Eval epoch: 67
|
444 |
+
[ Tue Sep 13 23:18:21 2022 ] Mean test loss of 930 batches: 2.4129855632781982.
|
445 |
+
[ Tue Sep 13 23:18:22 2022 ] Top1: 59.50%
|
446 |
+
[ Tue Sep 13 23:18:23 2022 ] Top5: 84.60%
|
447 |
+
[ Tue Sep 13 23:18:23 2022 ] Training epoch: 68
|
448 |
+
[ Tue Sep 13 23:18:50 2022 ] Batch(45/162) done. Loss: 0.0946 lr:0.010000
|
449 |
+
[ Tue Sep 13 23:19:43 2022 ] Batch(145/162) done. Loss: 0.0837 lr:0.010000
|
450 |
+
[ Tue Sep 13 23:19:52 2022 ] Eval epoch: 68
|
451 |
+
[ Tue Sep 13 23:22:45 2022 ] Mean test loss of 930 batches: 2.399461269378662.
|
452 |
+
[ Tue Sep 13 23:22:46 2022 ] Top1: 59.54%
|
453 |
+
[ Tue Sep 13 23:22:46 2022 ] Top5: 84.72%
|
454 |
+
[ Tue Sep 13 23:22:46 2022 ] Training epoch: 69
|
455 |
+
[ Tue Sep 13 23:23:34 2022 ] Batch(83/162) done. Loss: 0.0429 lr:0.010000
|
456 |
+
[ Tue Sep 13 23:24:15 2022 ] Eval epoch: 69
|
457 |
+
[ Tue Sep 13 23:27:09 2022 ] Mean test loss of 930 batches: 2.4148502349853516.
|
458 |
+
[ Tue Sep 13 23:27:10 2022 ] Top1: 59.44%
|
459 |
+
[ Tue Sep 13 23:27:10 2022 ] Top5: 84.70%
|
460 |
+
[ Tue Sep 13 23:27:10 2022 ] Training epoch: 70
|
461 |
+
[ Tue Sep 13 23:27:25 2022 ] Batch(21/162) done. Loss: 0.1547 lr:0.010000
|
462 |
+
[ Tue Sep 13 23:28:18 2022 ] Batch(121/162) done. Loss: 0.0320 lr:0.010000
|
463 |
+
[ Tue Sep 13 23:28:39 2022 ] Eval epoch: 70
|
464 |
+
[ Tue Sep 13 23:31:32 2022 ] Mean test loss of 930 batches: 2.3825840950012207.
|
465 |
+
[ Tue Sep 13 23:31:33 2022 ] Top1: 59.81%
|
466 |
+
[ Tue Sep 13 23:31:33 2022 ] Top5: 84.77%
|
467 |
+
[ Tue Sep 13 23:31:34 2022 ] Training epoch: 71
|
468 |
+
[ Tue Sep 13 23:32:08 2022 ] Batch(59/162) done. Loss: 0.1290 lr:0.010000
|
469 |
+
[ Tue Sep 13 23:33:01 2022 ] Batch(159/162) done. Loss: 0.0549 lr:0.010000
|
470 |
+
[ Tue Sep 13 23:33:02 2022 ] Eval epoch: 71
|
471 |
+
[ Tue Sep 13 23:35:56 2022 ] Mean test loss of 930 batches: 2.3638386726379395.
|
472 |
+
[ Tue Sep 13 23:35:56 2022 ] Top1: 59.85%
|
473 |
+
[ Tue Sep 13 23:35:57 2022 ] Top5: 85.02%
|
474 |
+
[ Tue Sep 13 23:35:57 2022 ] Training epoch: 72
|
475 |
+
[ Tue Sep 13 23:36:52 2022 ] Batch(97/162) done. Loss: 0.0311 lr:0.010000
|
476 |
+
[ Tue Sep 13 23:37:26 2022 ] Eval epoch: 72
|
477 |
+
[ Tue Sep 13 23:40:19 2022 ] Mean test loss of 930 batches: 2.419126510620117.
|
478 |
+
[ Tue Sep 13 23:40:19 2022 ] Top1: 59.65%
|
479 |
+
[ Tue Sep 13 23:40:20 2022 ] Top5: 84.89%
|
480 |
+
[ Tue Sep 13 23:40:20 2022 ] Training epoch: 73
|
481 |
+
[ Tue Sep 13 23:40:42 2022 ] Batch(35/162) done. Loss: 0.0438 lr:0.010000
|
482 |
+
[ Tue Sep 13 23:41:35 2022 ] Batch(135/162) done. Loss: 0.0685 lr:0.010000
|
483 |
+
[ Tue Sep 13 23:41:49 2022 ] Eval epoch: 73
|
484 |
+
[ Tue Sep 13 23:44:42 2022 ] Mean test loss of 930 batches: 2.403249979019165.
|
485 |
+
[ Tue Sep 13 23:44:42 2022 ] Top1: 59.70%
|
486 |
+
[ Tue Sep 13 23:44:43 2022 ] Top5: 84.84%
|
487 |
+
[ Tue Sep 13 23:44:43 2022 ] Training epoch: 74
|
488 |
+
[ Tue Sep 13 23:45:25 2022 ] Batch(73/162) done. Loss: 0.0443 lr:0.010000
|
489 |
+
[ Tue Sep 13 23:46:12 2022 ] Eval epoch: 74
|
490 |
+
[ Tue Sep 13 23:49:06 2022 ] Mean test loss of 930 batches: 2.4638261795043945.
|
491 |
+
[ Tue Sep 13 23:49:06 2022 ] Top1: 59.42%
|
492 |
+
[ Tue Sep 13 23:49:07 2022 ] Top5: 84.60%
|
493 |
+
[ Tue Sep 13 23:49:07 2022 ] Training epoch: 75
|
494 |
+
[ Tue Sep 13 23:49:16 2022 ] Batch(11/162) done. Loss: 0.0248 lr:0.010000
|
495 |
+
[ Tue Sep 13 23:50:09 2022 ] Batch(111/162) done. Loss: 0.0414 lr:0.010000
|
496 |
+
[ Tue Sep 13 23:50:35 2022 ] Eval epoch: 75
|
497 |
+
[ Tue Sep 13 23:53:28 2022 ] Mean test loss of 930 batches: 2.473863363265991.
|
498 |
+
[ Tue Sep 13 23:53:29 2022 ] Top1: 59.64%
|
499 |
+
[ Tue Sep 13 23:53:29 2022 ] Top5: 84.75%
|
500 |
+
[ Tue Sep 13 23:53:29 2022 ] Training epoch: 76
|
501 |
+
[ Tue Sep 13 23:53:58 2022 ] Batch(49/162) done. Loss: 0.0623 lr:0.010000
|
502 |
+
[ Tue Sep 13 23:54:51 2022 ] Batch(149/162) done. Loss: 0.1217 lr:0.010000
|
503 |
+
[ Tue Sep 13 23:54:58 2022 ] Eval epoch: 76
|
504 |
+
[ Tue Sep 13 23:57:51 2022 ] Mean test loss of 930 batches: 2.529571294784546.
|
505 |
+
[ Tue Sep 13 23:57:52 2022 ] Top1: 59.31%
|
506 |
+
[ Tue Sep 13 23:57:52 2022 ] Top5: 84.50%
|
507 |
+
[ Tue Sep 13 23:57:52 2022 ] Training epoch: 77
|
508 |
+
[ Tue Sep 13 23:58:41 2022 ] Batch(87/162) done. Loss: 0.1024 lr:0.010000
|
509 |
+
[ Tue Sep 13 23:59:21 2022 ] Eval epoch: 77
|
510 |
+
[ Wed Sep 14 00:02:14 2022 ] Mean test loss of 930 batches: 2.4722189903259277.
|
511 |
+
[ Wed Sep 14 00:02:14 2022 ] Top1: 59.78%
|
512 |
+
[ Wed Sep 14 00:02:15 2022 ] Top5: 84.88%
|
513 |
+
[ Wed Sep 14 00:02:15 2022 ] Training epoch: 78
|
514 |
+
[ Wed Sep 14 00:02:32 2022 ] Batch(25/162) done. Loss: 0.0177 lr:0.010000
|
515 |
+
[ Wed Sep 14 00:03:24 2022 ] Batch(125/162) done. Loss: 0.0622 lr:0.010000
|
516 |
+
[ Wed Sep 14 00:03:44 2022 ] Eval epoch: 78
|
517 |
+
[ Wed Sep 14 00:06:37 2022 ] Mean test loss of 930 batches: 2.524034261703491.
|
518 |
+
[ Wed Sep 14 00:06:38 2022 ] Top1: 59.45%
|
519 |
+
[ Wed Sep 14 00:06:39 2022 ] Top5: 84.39%
|
520 |
+
[ Wed Sep 14 00:06:39 2022 ] Training epoch: 79
|
521 |
+
[ Wed Sep 14 00:07:16 2022 ] Batch(63/162) done. Loss: 0.0366 lr:0.010000
|
522 |
+
[ Wed Sep 14 00:08:08 2022 ] Eval epoch: 79
|
523 |
+
[ Wed Sep 14 00:11:01 2022 ] Mean test loss of 930 batches: 2.5322084426879883.
|
524 |
+
[ Wed Sep 14 00:11:02 2022 ] Top1: 59.45%
|
525 |
+
[ Wed Sep 14 00:11:02 2022 ] Top5: 84.63%
|
526 |
+
[ Wed Sep 14 00:11:03 2022 ] Training epoch: 80
|
527 |
+
[ Wed Sep 14 00:11:07 2022 ] Batch(1/162) done. Loss: 0.0272 lr:0.010000
|
528 |
+
[ Wed Sep 14 00:11:59 2022 ] Batch(101/162) done. Loss: 0.0790 lr:0.010000
|
529 |
+
[ Wed Sep 14 00:12:31 2022 ] Eval epoch: 80
|
530 |
+
[ Wed Sep 14 00:15:25 2022 ] Mean test loss of 930 batches: 2.545362949371338.
|
531 |
+
[ Wed Sep 14 00:15:25 2022 ] Top1: 59.38%
|
532 |
+
[ Wed Sep 14 00:15:26 2022 ] Top5: 84.49%
|
533 |
+
[ Wed Sep 14 00:15:26 2022 ] Training epoch: 81
|
534 |
+
[ Wed Sep 14 00:15:50 2022 ] Batch(39/162) done. Loss: 0.0313 lr:0.001000
|
535 |
+
[ Wed Sep 14 00:16:43 2022 ] Batch(139/162) done. Loss: 0.0836 lr:0.001000
|
536 |
+
[ Wed Sep 14 00:16:55 2022 ] Eval epoch: 81
|
537 |
+
[ Wed Sep 14 00:19:48 2022 ] Mean test loss of 930 batches: 2.5255229473114014.
|
538 |
+
[ Wed Sep 14 00:19:49 2022 ] Top1: 59.54%
|
539 |
+
[ Wed Sep 14 00:19:49 2022 ] Top5: 84.60%
|
540 |
+
[ Wed Sep 14 00:19:50 2022 ] Training epoch: 82
|
541 |
+
[ Wed Sep 14 00:20:34 2022 ] Batch(77/162) done. Loss: 0.0274 lr:0.001000
|
542 |
+
[ Wed Sep 14 00:21:18 2022 ] Eval epoch: 82
|
543 |
+
[ Wed Sep 14 00:24:11 2022 ] Mean test loss of 930 batches: 2.5230913162231445.
|
544 |
+
[ Wed Sep 14 00:24:12 2022 ] Top1: 59.72%
|
545 |
+
[ Wed Sep 14 00:24:12 2022 ] Top5: 84.71%
|
546 |
+
[ Wed Sep 14 00:24:13 2022 ] Training epoch: 83
|
547 |
+
[ Wed Sep 14 00:24:24 2022 ] Batch(15/162) done. Loss: 0.0518 lr:0.001000
|
548 |
+
[ Wed Sep 14 00:25:17 2022 ] Batch(115/162) done. Loss: 0.0615 lr:0.001000
|
549 |
+
[ Wed Sep 14 00:25:41 2022 ] Eval epoch: 83
|
550 |
+
[ Wed Sep 14 00:28:34 2022 ] Mean test loss of 930 batches: 2.510268449783325.
|
551 |
+
[ Wed Sep 14 00:28:35 2022 ] Top1: 59.71%
|
552 |
+
[ Wed Sep 14 00:28:35 2022 ] Top5: 84.64%
|
553 |
+
[ Wed Sep 14 00:28:36 2022 ] Training epoch: 84
|
554 |
+
[ Wed Sep 14 00:29:07 2022 ] Batch(53/162) done. Loss: 0.0111 lr:0.001000
|
555 |
+
[ Wed Sep 14 00:30:00 2022 ] Batch(153/162) done. Loss: 0.0526 lr:0.001000
|
556 |
+
[ Wed Sep 14 00:30:04 2022 ] Eval epoch: 84
|
557 |
+
[ Wed Sep 14 00:32:58 2022 ] Mean test loss of 930 batches: 2.5461337566375732.
|
558 |
+
[ Wed Sep 14 00:32:58 2022 ] Top1: 59.51%
|
559 |
+
[ Wed Sep 14 00:32:59 2022 ] Top5: 84.61%
|
560 |
+
[ Wed Sep 14 00:32:59 2022 ] Training epoch: 85
|
561 |
+
[ Wed Sep 14 00:33:51 2022 ] Batch(91/162) done. Loss: 0.0426 lr:0.001000
|
562 |
+
[ Wed Sep 14 00:34:28 2022 ] Eval epoch: 85
|
563 |
+
[ Wed Sep 14 00:37:21 2022 ] Mean test loss of 930 batches: 2.5131518840789795.
|
564 |
+
[ Wed Sep 14 00:37:22 2022 ] Top1: 59.86%
|
565 |
+
[ Wed Sep 14 00:37:22 2022 ] Top5: 84.93%
|
566 |
+
[ Wed Sep 14 00:37:22 2022 ] Training epoch: 86
|
567 |
+
[ Wed Sep 14 00:37:41 2022 ] Batch(29/162) done. Loss: 0.1134 lr:0.001000
|
568 |
+
[ Wed Sep 14 00:38:34 2022 ] Batch(129/162) done. Loss: 0.0646 lr:0.001000
|
569 |
+
[ Wed Sep 14 00:38:51 2022 ] Eval epoch: 86
|
570 |
+
[ Wed Sep 14 00:41:44 2022 ] Mean test loss of 930 batches: 2.4873545169830322.
|
571 |
+
[ Wed Sep 14 00:41:45 2022 ] Top1: 59.94%
|
572 |
+
[ Wed Sep 14 00:41:46 2022 ] Top5: 84.92%
|
573 |
+
[ Wed Sep 14 00:41:46 2022 ] Training epoch: 87
|
574 |
+
[ Wed Sep 14 00:42:25 2022 ] Batch(67/162) done. Loss: 0.0480 lr:0.001000
|
575 |
+
[ Wed Sep 14 00:43:14 2022 ] Eval epoch: 87
|
576 |
+
[ Wed Sep 14 00:46:08 2022 ] Mean test loss of 930 batches: 2.504473924636841.
|
577 |
+
[ Wed Sep 14 00:46:08 2022 ] Top1: 59.91%
|
578 |
+
[ Wed Sep 14 00:46:09 2022 ] Top5: 84.94%
|
579 |
+
[ Wed Sep 14 00:46:09 2022 ] Training epoch: 88
|
580 |
+
[ Wed Sep 14 00:46:15 2022 ] Batch(5/162) done. Loss: 0.0890 lr:0.001000
|
581 |
+
[ Wed Sep 14 00:47:08 2022 ] Batch(105/162) done. Loss: 0.1213 lr:0.001000
|
582 |
+
[ Wed Sep 14 00:47:38 2022 ] Eval epoch: 88
|
583 |
+
[ Wed Sep 14 00:50:31 2022 ] Mean test loss of 930 batches: 2.4991095066070557.
|
584 |
+
[ Wed Sep 14 00:50:32 2022 ] Top1: 60.00%
|
585 |
+
[ Wed Sep 14 00:50:32 2022 ] Top5: 84.83%
|
586 |
+
[ Wed Sep 14 00:50:33 2022 ] Training epoch: 89
|
587 |
+
[ Wed Sep 14 00:50:59 2022 ] Batch(43/162) done. Loss: 0.0259 lr:0.001000
|
588 |
+
[ Wed Sep 14 00:51:52 2022 ] Batch(143/162) done. Loss: 0.1193 lr:0.001000
|
589 |
+
[ Wed Sep 14 00:52:01 2022 ] Eval epoch: 89
|
590 |
+
[ Wed Sep 14 00:54:55 2022 ] Mean test loss of 930 batches: 2.496248483657837.
|
591 |
+
[ Wed Sep 14 00:54:55 2022 ] Top1: 59.66%
|
592 |
+
[ Wed Sep 14 00:54:56 2022 ] Top5: 84.75%
|
593 |
+
[ Wed Sep 14 00:54:56 2022 ] Training epoch: 90
|
594 |
+
[ Wed Sep 14 00:55:42 2022 ] Batch(81/162) done. Loss: 0.0691 lr:0.001000
|
595 |
+
[ Wed Sep 14 00:56:25 2022 ] Eval epoch: 90
|
596 |
+
[ Wed Sep 14 00:59:19 2022 ] Mean test loss of 930 batches: 2.4718194007873535.
|
597 |
+
[ Wed Sep 14 00:59:19 2022 ] Top1: 60.11%
|
598 |
+
[ Wed Sep 14 00:59:20 2022 ] Top5: 84.91%
|
599 |
+
[ Wed Sep 14 00:59:20 2022 ] Training epoch: 91
|
600 |
+
[ Wed Sep 14 00:59:33 2022 ] Batch(19/162) done. Loss: 0.0215 lr:0.001000
|
601 |
+
[ Wed Sep 14 01:00:26 2022 ] Batch(119/162) done. Loss: 0.0698 lr:0.001000
|
602 |
+
[ Wed Sep 14 01:00:49 2022 ] Eval epoch: 91
|
603 |
+
[ Wed Sep 14 01:03:41 2022 ] Mean test loss of 930 batches: 2.5102038383483887.
|
604 |
+
[ Wed Sep 14 01:03:42 2022 ] Top1: 60.02%
|
605 |
+
[ Wed Sep 14 01:03:42 2022 ] Top5: 84.80%
|
606 |
+
[ Wed Sep 14 01:03:43 2022 ] Training epoch: 92
|
607 |
+
[ Wed Sep 14 01:04:16 2022 ] Batch(57/162) done. Loss: 0.0306 lr:0.001000
|
608 |
+
[ Wed Sep 14 01:05:09 2022 ] Batch(157/162) done. Loss: 0.1090 lr:0.001000
|
609 |
+
[ Wed Sep 14 01:05:11 2022 ] Eval epoch: 92
|
610 |
+
[ Wed Sep 14 01:08:05 2022 ] Mean test loss of 930 batches: 2.5122852325439453.
|
611 |
+
[ Wed Sep 14 01:08:05 2022 ] Top1: 59.70%
|
612 |
+
[ Wed Sep 14 01:08:06 2022 ] Top5: 84.54%
|
613 |
+
[ Wed Sep 14 01:08:06 2022 ] Training epoch: 93
|
614 |
+
[ Wed Sep 14 01:08:59 2022 ] Batch(95/162) done. Loss: 0.0734 lr:0.001000
|
615 |
+
[ Wed Sep 14 01:09:34 2022 ] Eval epoch: 93
|
616 |
+
[ Wed Sep 14 01:12:28 2022 ] Mean test loss of 930 batches: 2.5456082820892334.
|
617 |
+
[ Wed Sep 14 01:12:29 2022 ] Top1: 59.85%
|
618 |
+
[ Wed Sep 14 01:12:29 2022 ] Top5: 84.79%
|
619 |
+
[ Wed Sep 14 01:12:29 2022 ] Training epoch: 94
|
620 |
+
[ Wed Sep 14 01:12:50 2022 ] Batch(33/162) done. Loss: 0.1057 lr:0.001000
|
621 |
+
[ Wed Sep 14 01:13:43 2022 ] Batch(133/162) done. Loss: 0.0681 lr:0.001000
|
622 |
+
[ Wed Sep 14 01:13:58 2022 ] Eval epoch: 94
|
623 |
+
[ Wed Sep 14 01:16:51 2022 ] Mean test loss of 930 batches: 2.493978500366211.
|
624 |
+
[ Wed Sep 14 01:16:52 2022 ] Top1: 60.04%
|
625 |
+
[ Wed Sep 14 01:16:52 2022 ] Top5: 84.90%
|
626 |
+
[ Wed Sep 14 01:16:52 2022 ] Training epoch: 95
|
627 |
+
[ Wed Sep 14 01:17:33 2022 ] Batch(71/162) done. Loss: 0.0440 lr:0.001000
|
628 |
+
[ Wed Sep 14 01:18:21 2022 ] Eval epoch: 95
|
629 |
+
[ Wed Sep 14 01:21:15 2022 ] Mean test loss of 930 batches: 2.4783060550689697.
|
630 |
+
[ Wed Sep 14 01:21:16 2022 ] Top1: 60.17%
|
631 |
+
[ Wed Sep 14 01:21:16 2022 ] Top5: 85.04%
|
632 |
+
[ Wed Sep 14 01:21:17 2022 ] Training epoch: 96
|
633 |
+
[ Wed Sep 14 01:21:25 2022 ] Batch(9/162) done. Loss: 0.0118 lr:0.001000
|
634 |
+
[ Wed Sep 14 01:22:18 2022 ] Batch(109/162) done. Loss: 0.1139 lr:0.001000
|
635 |
+
[ Wed Sep 14 01:22:45 2022 ] Eval epoch: 96
|
636 |
+
[ Wed Sep 14 01:25:38 2022 ] Mean test loss of 930 batches: 2.4734930992126465.
|
637 |
+
[ Wed Sep 14 01:25:39 2022 ] Top1: 59.92%
|
638 |
+
[ Wed Sep 14 01:25:39 2022 ] Top5: 84.71%
|
639 |
+
[ Wed Sep 14 01:25:40 2022 ] Training epoch: 97
|
640 |
+
[ Wed Sep 14 01:26:08 2022 ] Batch(47/162) done. Loss: 0.0519 lr:0.001000
|
641 |
+
[ Wed Sep 14 01:27:00 2022 ] Batch(147/162) done. Loss: 0.0832 lr:0.001000
|
642 |
+
[ Wed Sep 14 01:27:08 2022 ] Eval epoch: 97
|
643 |
+
[ Wed Sep 14 01:30:02 2022 ] Mean test loss of 930 batches: 2.523946762084961.
|
644 |
+
[ Wed Sep 14 01:30:02 2022 ] Top1: 59.79%
|
645 |
+
[ Wed Sep 14 01:30:03 2022 ] Top5: 84.84%
|
646 |
+
[ Wed Sep 14 01:30:03 2022 ] Training epoch: 98
|
647 |
+
[ Wed Sep 14 01:30:52 2022 ] Batch(85/162) done. Loss: 0.0325 lr:0.001000
|
648 |
+
[ Wed Sep 14 01:31:32 2022 ] Eval epoch: 98
|
649 |
+
[ Wed Sep 14 01:34:25 2022 ] Mean test loss of 930 batches: 2.539496421813965.
|
650 |
+
[ Wed Sep 14 01:34:26 2022 ] Top1: 59.73%
|
651 |
+
[ Wed Sep 14 01:34:27 2022 ] Top5: 84.72%
|
652 |
+
[ Wed Sep 14 01:34:27 2022 ] Training epoch: 99
|
653 |
+
[ Wed Sep 14 01:34:42 2022 ] Batch(23/162) done. Loss: 0.0896 lr:0.001000
|
654 |
+
[ Wed Sep 14 01:35:35 2022 ] Batch(123/162) done. Loss: 0.1063 lr:0.001000
|
655 |
+
[ Wed Sep 14 01:35:55 2022 ] Eval epoch: 99
|
656 |
+
[ Wed Sep 14 01:38:49 2022 ] Mean test loss of 930 batches: 2.492906332015991.
|
657 |
+
[ Wed Sep 14 01:38:50 2022 ] Top1: 59.87%
|
658 |
+
[ Wed Sep 14 01:38:50 2022 ] Top5: 84.83%
|
659 |
+
[ Wed Sep 14 01:38:51 2022 ] Training epoch: 100
|
660 |
+
[ Wed Sep 14 01:39:26 2022 ] Batch(61/162) done. Loss: 0.0529 lr:0.001000
|
661 |
+
[ Wed Sep 14 01:40:19 2022 ] Batch(161/162) done. Loss: 0.0567 lr:0.001000
|
662 |
+
[ Wed Sep 14 01:40:19 2022 ] Eval epoch: 100
|
663 |
+
[ Wed Sep 14 01:43:12 2022 ] Mean test loss of 930 batches: 2.551042079925537.
|
664 |
+
[ Wed Sep 14 01:43:12 2022 ] Top1: 59.58%
|
665 |
+
[ Wed Sep 14 01:43:13 2022 ] Top5: 84.58%
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_bone_motion_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xsub/train_bone_motion.yaml
|
5 |
+
device:
|
6 |
+
- 2
|
7 |
+
- 3
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_bone_motion_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_bone_motion_xsub
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3d319d34cbeed08e8f709b4a01a345ff4a510879b95bf04b23ba714830db2d8e
|
3 |
+
size 29946137
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_motion_xsub/log.txt
ADDED
@@ -0,0 +1,746 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Mon Sep 12 17:08:02 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_bone_motion_xsub', 'model_saved_name': './save_models/ntu120_bone_motion_xsub', 'Experiment_name': 'ntu120_bone_motion_xsub', 'config': './config/ntu120_xsub/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Mon Sep 12 17:08:02 2022 ] Training epoch: 1
|
5 |
+
[ Mon Sep 12 17:08:53 2022 ] Batch(99/243) done. Loss: 3.9996 lr:0.100000
|
6 |
+
[ Mon Sep 12 17:09:37 2022 ] Batch(199/243) done. Loss: 3.4128 lr:0.100000
|
7 |
+
[ Mon Sep 12 17:09:56 2022 ] Eval epoch: 1
|
8 |
+
[ Mon Sep 12 17:12:26 2022 ] Mean test loss of 796 batches: 5.401659965515137.
|
9 |
+
[ Mon Sep 12 17:12:27 2022 ] Top1: 4.08%
|
10 |
+
[ Mon Sep 12 17:12:27 2022 ] Top5: 14.92%
|
11 |
+
[ Mon Sep 12 17:12:27 2022 ] Training epoch: 2
|
12 |
+
[ Mon Sep 12 17:13:01 2022 ] Batch(56/243) done. Loss: 3.3313 lr:0.100000
|
13 |
+
[ Mon Sep 12 17:13:54 2022 ] Batch(156/243) done. Loss: 2.7961 lr:0.100000
|
14 |
+
[ Mon Sep 12 17:14:39 2022 ] Eval epoch: 2
|
15 |
+
[ Mon Sep 12 17:17:10 2022 ] Mean test loss of 796 batches: 5.036680698394775.
|
16 |
+
[ Mon Sep 12 17:17:10 2022 ] Top1: 5.15%
|
17 |
+
[ Mon Sep 12 17:17:10 2022 ] Top5: 17.48%
|
18 |
+
[ Mon Sep 12 17:17:11 2022 ] Training epoch: 3
|
19 |
+
[ Mon Sep 12 17:17:22 2022 ] Batch(13/243) done. Loss: 2.6744 lr:0.100000
|
20 |
+
[ Mon Sep 12 17:18:15 2022 ] Batch(113/243) done. Loss: 2.3809 lr:0.100000
|
21 |
+
[ Mon Sep 12 17:19:08 2022 ] Batch(213/243) done. Loss: 2.1960 lr:0.100000
|
22 |
+
[ Mon Sep 12 17:19:23 2022 ] Eval epoch: 3
|
23 |
+
[ Mon Sep 12 17:21:54 2022 ] Mean test loss of 796 batches: 3.8832848072052.
|
24 |
+
[ Mon Sep 12 17:21:54 2022 ] Top1: 13.53%
|
25 |
+
[ Mon Sep 12 17:21:54 2022 ] Top5: 34.60%
|
26 |
+
[ Mon Sep 12 17:21:55 2022 ] Training epoch: 4
|
27 |
+
[ Mon Sep 12 17:22:36 2022 ] Batch(70/243) done. Loss: 1.9707 lr:0.100000
|
28 |
+
[ Mon Sep 12 17:23:29 2022 ] Batch(170/243) done. Loss: 1.7194 lr:0.100000
|
29 |
+
[ Mon Sep 12 17:24:08 2022 ] Eval epoch: 4
|
30 |
+
[ Mon Sep 12 17:26:38 2022 ] Mean test loss of 796 batches: 5.208371162414551.
|
31 |
+
[ Mon Sep 12 17:26:39 2022 ] Top1: 9.02%
|
32 |
+
[ Mon Sep 12 17:26:39 2022 ] Top5: 27.73%
|
33 |
+
[ Mon Sep 12 17:26:39 2022 ] Training epoch: 5
|
34 |
+
[ Mon Sep 12 17:26:58 2022 ] Batch(27/243) done. Loss: 1.9087 lr:0.100000
|
35 |
+
[ Mon Sep 12 17:27:51 2022 ] Batch(127/243) done. Loss: 1.8244 lr:0.100000
|
36 |
+
[ Mon Sep 12 17:28:44 2022 ] Batch(227/243) done. Loss: 1.6479 lr:0.100000
|
37 |
+
[ Mon Sep 12 17:28:52 2022 ] Eval epoch: 5
|
38 |
+
[ Mon Sep 12 17:31:22 2022 ] Mean test loss of 796 batches: 3.828791856765747.
|
39 |
+
[ Mon Sep 12 17:31:23 2022 ] Top1: 17.41%
|
40 |
+
[ Mon Sep 12 17:31:23 2022 ] Top5: 45.41%
|
41 |
+
[ Mon Sep 12 17:31:23 2022 ] Training epoch: 6
|
42 |
+
[ Mon Sep 12 17:32:11 2022 ] Batch(84/243) done. Loss: 1.4387 lr:0.100000
|
43 |
+
[ Mon Sep 12 17:33:04 2022 ] Batch(184/243) done. Loss: 1.5802 lr:0.100000
|
44 |
+
[ Mon Sep 12 17:33:35 2022 ] Eval epoch: 6
|
45 |
+
[ Mon Sep 12 17:36:05 2022 ] Mean test loss of 796 batches: 2.96073055267334.
|
46 |
+
[ Mon Sep 12 17:36:05 2022 ] Top1: 24.78%
|
47 |
+
[ Mon Sep 12 17:36:06 2022 ] Top5: 55.59%
|
48 |
+
[ Mon Sep 12 17:36:06 2022 ] Training epoch: 7
|
49 |
+
[ Mon Sep 12 17:36:31 2022 ] Batch(41/243) done. Loss: 1.4074 lr:0.100000
|
50 |
+
[ Mon Sep 12 17:37:24 2022 ] Batch(141/243) done. Loss: 1.2927 lr:0.100000
|
51 |
+
[ Mon Sep 12 17:38:17 2022 ] Batch(241/243) done. Loss: 1.3877 lr:0.100000
|
52 |
+
[ Mon Sep 12 17:38:17 2022 ] Eval epoch: 7
|
53 |
+
[ Mon Sep 12 17:40:47 2022 ] Mean test loss of 796 batches: 3.244659662246704.
|
54 |
+
[ Mon Sep 12 17:40:48 2022 ] Top1: 24.42%
|
55 |
+
[ Mon Sep 12 17:40:48 2022 ] Top5: 55.99%
|
56 |
+
[ Mon Sep 12 17:40:48 2022 ] Training epoch: 8
|
57 |
+
[ Mon Sep 12 17:41:44 2022 ] Batch(98/243) done. Loss: 1.2548 lr:0.100000
|
58 |
+
[ Mon Sep 12 17:42:37 2022 ] Batch(198/243) done. Loss: 1.1425 lr:0.100000
|
59 |
+
[ Mon Sep 12 17:43:00 2022 ] Eval epoch: 8
|
60 |
+
[ Mon Sep 12 17:45:29 2022 ] Mean test loss of 796 batches: 3.560640811920166.
|
61 |
+
[ Mon Sep 12 17:45:30 2022 ] Top1: 23.09%
|
62 |
+
[ Mon Sep 12 17:45:30 2022 ] Top5: 54.99%
|
63 |
+
[ Mon Sep 12 17:45:31 2022 ] Training epoch: 9
|
64 |
+
[ Mon Sep 12 17:46:03 2022 ] Batch(55/243) done. Loss: 1.1511 lr:0.100000
|
65 |
+
[ Mon Sep 12 17:46:56 2022 ] Batch(155/243) done. Loss: 1.0868 lr:0.100000
|
66 |
+
[ Mon Sep 12 17:47:42 2022 ] Eval epoch: 9
|
67 |
+
[ Mon Sep 12 17:50:13 2022 ] Mean test loss of 796 batches: 3.479330539703369.
|
68 |
+
[ Mon Sep 12 17:50:13 2022 ] Top1: 28.47%
|
69 |
+
[ Mon Sep 12 17:50:14 2022 ] Top5: 62.28%
|
70 |
+
[ Mon Sep 12 17:50:14 2022 ] Training epoch: 10
|
71 |
+
[ Mon Sep 12 17:50:24 2022 ] Batch(12/243) done. Loss: 1.1992 lr:0.100000
|
72 |
+
[ Mon Sep 12 17:51:17 2022 ] Batch(112/243) done. Loss: 0.9321 lr:0.100000
|
73 |
+
[ Mon Sep 12 17:52:10 2022 ] Batch(212/243) done. Loss: 1.1290 lr:0.100000
|
74 |
+
[ Mon Sep 12 17:52:26 2022 ] Eval epoch: 10
|
75 |
+
[ Mon Sep 12 17:54:55 2022 ] Mean test loss of 796 batches: 3.548513650894165.
|
76 |
+
[ Mon Sep 12 17:54:56 2022 ] Top1: 23.18%
|
77 |
+
[ Mon Sep 12 17:54:56 2022 ] Top5: 57.00%
|
78 |
+
[ Mon Sep 12 17:54:56 2022 ] Training epoch: 11
|
79 |
+
[ Mon Sep 12 17:55:36 2022 ] Batch(69/243) done. Loss: 0.7892 lr:0.100000
|
80 |
+
[ Mon Sep 12 17:56:29 2022 ] Batch(169/243) done. Loss: 0.8913 lr:0.100000
|
81 |
+
[ Mon Sep 12 17:57:08 2022 ] Eval epoch: 11
|
82 |
+
[ Mon Sep 12 17:59:38 2022 ] Mean test loss of 796 batches: 3.6393656730651855.
|
83 |
+
[ Mon Sep 12 17:59:38 2022 ] Top1: 29.19%
|
84 |
+
[ Mon Sep 12 17:59:39 2022 ] Top5: 64.37%
|
85 |
+
[ Mon Sep 12 17:59:39 2022 ] Training epoch: 12
|
86 |
+
[ Mon Sep 12 17:59:56 2022 ] Batch(26/243) done. Loss: 1.1150 lr:0.100000
|
87 |
+
[ Mon Sep 12 18:00:49 2022 ] Batch(126/243) done. Loss: 1.1215 lr:0.100000
|
88 |
+
[ Mon Sep 12 18:01:42 2022 ] Batch(226/243) done. Loss: 0.8452 lr:0.100000
|
89 |
+
[ Mon Sep 12 18:01:51 2022 ] Eval epoch: 12
|
90 |
+
[ Mon Sep 12 18:04:20 2022 ] Mean test loss of 796 batches: 3.349566698074341.
|
91 |
+
[ Mon Sep 12 18:04:21 2022 ] Top1: 27.39%
|
92 |
+
[ Mon Sep 12 18:04:21 2022 ] Top5: 61.31%
|
93 |
+
[ Mon Sep 12 18:04:21 2022 ] Training epoch: 13
|
94 |
+
[ Mon Sep 12 18:05:09 2022 ] Batch(83/243) done. Loss: 0.7315 lr:0.100000
|
95 |
+
[ Mon Sep 12 18:06:02 2022 ] Batch(183/243) done. Loss: 0.9912 lr:0.100000
|
96 |
+
[ Mon Sep 12 18:06:33 2022 ] Eval epoch: 13
|
97 |
+
[ Mon Sep 12 18:09:03 2022 ] Mean test loss of 796 batches: 3.827038526535034.
|
98 |
+
[ Mon Sep 12 18:09:03 2022 ] Top1: 24.15%
|
99 |
+
[ Mon Sep 12 18:09:03 2022 ] Top5: 54.18%
|
100 |
+
[ Mon Sep 12 18:09:04 2022 ] Training epoch: 14
|
101 |
+
[ Mon Sep 12 18:09:28 2022 ] Batch(40/243) done. Loss: 0.7388 lr:0.100000
|
102 |
+
[ Mon Sep 12 18:10:21 2022 ] Batch(140/243) done. Loss: 0.9804 lr:0.100000
|
103 |
+
[ Mon Sep 12 18:11:14 2022 ] Batch(240/243) done. Loss: 1.0120 lr:0.100000
|
104 |
+
[ Mon Sep 12 18:11:15 2022 ] Eval epoch: 14
|
105 |
+
[ Mon Sep 12 18:13:45 2022 ] Mean test loss of 796 batches: 2.664210081100464.
|
106 |
+
[ Mon Sep 12 18:13:46 2022 ] Top1: 38.95%
|
107 |
+
[ Mon Sep 12 18:13:46 2022 ] Top5: 74.01%
|
108 |
+
[ Mon Sep 12 18:13:46 2022 ] Training epoch: 15
|
109 |
+
[ Mon Sep 12 18:14:41 2022 ] Batch(97/243) done. Loss: 0.6941 lr:0.100000
|
110 |
+
[ Mon Sep 12 18:15:34 2022 ] Batch(197/243) done. Loss: 0.9211 lr:0.100000
|
111 |
+
[ Mon Sep 12 18:15:58 2022 ] Eval epoch: 15
|
112 |
+
[ Mon Sep 12 18:18:28 2022 ] Mean test loss of 796 batches: 3.5780069828033447.
|
113 |
+
[ Mon Sep 12 18:18:28 2022 ] Top1: 25.70%
|
114 |
+
[ Mon Sep 12 18:18:28 2022 ] Top5: 56.70%
|
115 |
+
[ Mon Sep 12 18:18:29 2022 ] Training epoch: 16
|
116 |
+
[ Mon Sep 12 18:19:01 2022 ] Batch(54/243) done. Loss: 0.9974 lr:0.100000
|
117 |
+
[ Mon Sep 12 18:19:54 2022 ] Batch(154/243) done. Loss: 0.7709 lr:0.100000
|
118 |
+
[ Mon Sep 12 18:20:41 2022 ] Eval epoch: 16
|
119 |
+
[ Mon Sep 12 18:23:10 2022 ] Mean test loss of 796 batches: 3.6213204860687256.
|
120 |
+
[ Mon Sep 12 18:23:10 2022 ] Top1: 29.02%
|
121 |
+
[ Mon Sep 12 18:23:11 2022 ] Top5: 64.59%
|
122 |
+
[ Mon Sep 12 18:23:11 2022 ] Training epoch: 17
|
123 |
+
[ Mon Sep 12 18:23:20 2022 ] Batch(11/243) done. Loss: 0.6434 lr:0.100000
|
124 |
+
[ Mon Sep 12 18:24:13 2022 ] Batch(111/243) done. Loss: 0.8069 lr:0.100000
|
125 |
+
[ Mon Sep 12 18:25:06 2022 ] Batch(211/243) done. Loss: 0.7576 lr:0.100000
|
126 |
+
[ Mon Sep 12 18:25:23 2022 ] Eval epoch: 17
|
127 |
+
[ Mon Sep 12 18:27:52 2022 ] Mean test loss of 796 batches: 7.608621120452881.
|
128 |
+
[ Mon Sep 12 18:27:52 2022 ] Top1: 14.36%
|
129 |
+
[ Mon Sep 12 18:27:52 2022 ] Top5: 36.52%
|
130 |
+
[ Mon Sep 12 18:27:53 2022 ] Training epoch: 18
|
131 |
+
[ Mon Sep 12 18:28:32 2022 ] Batch(68/243) done. Loss: 0.5996 lr:0.100000
|
132 |
+
[ Mon Sep 12 18:29:25 2022 ] Batch(168/243) done. Loss: 0.7372 lr:0.100000
|
133 |
+
[ Mon Sep 12 18:30:04 2022 ] Eval epoch: 18
|
134 |
+
[ Mon Sep 12 18:32:34 2022 ] Mean test loss of 796 batches: 3.6257283687591553.
|
135 |
+
[ Mon Sep 12 18:32:34 2022 ] Top1: 32.63%
|
136 |
+
[ Mon Sep 12 18:32:35 2022 ] Top5: 68.10%
|
137 |
+
[ Mon Sep 12 18:32:35 2022 ] Training epoch: 19
|
138 |
+
[ Mon Sep 12 18:32:52 2022 ] Batch(25/243) done. Loss: 0.7385 lr:0.100000
|
139 |
+
[ Mon Sep 12 18:33:45 2022 ] Batch(125/243) done. Loss: 0.5734 lr:0.100000
|
140 |
+
[ Mon Sep 12 18:34:37 2022 ] Batch(225/243) done. Loss: 0.9544 lr:0.100000
|
141 |
+
[ Mon Sep 12 18:34:47 2022 ] Eval epoch: 19
|
142 |
+
[ Mon Sep 12 18:37:15 2022 ] Mean test loss of 796 batches: 5.148682117462158.
|
143 |
+
[ Mon Sep 12 18:37:16 2022 ] Top1: 26.56%
|
144 |
+
[ Mon Sep 12 18:37:16 2022 ] Top5: 59.36%
|
145 |
+
[ Mon Sep 12 18:37:17 2022 ] Training epoch: 20
|
146 |
+
[ Mon Sep 12 18:38:03 2022 ] Batch(82/243) done. Loss: 0.6392 lr:0.100000
|
147 |
+
[ Mon Sep 12 18:38:56 2022 ] Batch(182/243) done. Loss: 0.6307 lr:0.100000
|
148 |
+
[ Mon Sep 12 18:39:28 2022 ] Eval epoch: 20
|
149 |
+
[ Mon Sep 12 18:41:57 2022 ] Mean test loss of 796 batches: 4.334875583648682.
|
150 |
+
[ Mon Sep 12 18:41:58 2022 ] Top1: 25.49%
|
151 |
+
[ Mon Sep 12 18:41:58 2022 ] Top5: 58.52%
|
152 |
+
[ Mon Sep 12 18:41:58 2022 ] Training epoch: 21
|
153 |
+
[ Mon Sep 12 18:42:23 2022 ] Batch(39/243) done. Loss: 0.4598 lr:0.100000
|
154 |
+
[ Mon Sep 12 18:43:15 2022 ] Batch(139/243) done. Loss: 0.6031 lr:0.100000
|
155 |
+
[ Mon Sep 12 18:44:09 2022 ] Batch(239/243) done. Loss: 0.5268 lr:0.100000
|
156 |
+
[ Mon Sep 12 18:44:10 2022 ] Eval epoch: 21
|
157 |
+
[ Mon Sep 12 18:46:39 2022 ] Mean test loss of 796 batches: 3.004617929458618.
|
158 |
+
[ Mon Sep 12 18:46:40 2022 ] Top1: 37.08%
|
159 |
+
[ Mon Sep 12 18:46:40 2022 ] Top5: 71.58%
|
160 |
+
[ Mon Sep 12 18:46:40 2022 ] Training epoch: 22
|
161 |
+
[ Mon Sep 12 18:47:35 2022 ] Batch(96/243) done. Loss: 0.5180 lr:0.100000
|
162 |
+
[ Mon Sep 12 18:48:28 2022 ] Batch(196/243) done. Loss: 0.6307 lr:0.100000
|
163 |
+
[ Mon Sep 12 18:48:53 2022 ] Eval epoch: 22
|
164 |
+
[ Mon Sep 12 18:51:22 2022 ] Mean test loss of 796 batches: 4.371796607971191.
|
165 |
+
[ Mon Sep 12 18:51:22 2022 ] Top1: 27.89%
|
166 |
+
[ Mon Sep 12 18:51:23 2022 ] Top5: 64.40%
|
167 |
+
[ Mon Sep 12 18:51:23 2022 ] Training epoch: 23
|
168 |
+
[ Mon Sep 12 18:51:54 2022 ] Batch(53/243) done. Loss: 0.5186 lr:0.100000
|
169 |
+
[ Mon Sep 12 18:52:47 2022 ] Batch(153/243) done. Loss: 0.6799 lr:0.100000
|
170 |
+
[ Mon Sep 12 18:53:34 2022 ] Eval epoch: 23
|
171 |
+
[ Mon Sep 12 18:56:04 2022 ] Mean test loss of 796 batches: 2.7760326862335205.
|
172 |
+
[ Mon Sep 12 18:56:04 2022 ] Top1: 43.91%
|
173 |
+
[ Mon Sep 12 18:56:04 2022 ] Top5: 77.80%
|
174 |
+
[ Mon Sep 12 18:56:05 2022 ] Training epoch: 24
|
175 |
+
[ Mon Sep 12 18:56:14 2022 ] Batch(10/243) done. Loss: 0.3992 lr:0.100000
|
176 |
+
[ Mon Sep 12 18:57:06 2022 ] Batch(110/243) done. Loss: 0.5360 lr:0.100000
|
177 |
+
[ Mon Sep 12 18:57:59 2022 ] Batch(210/243) done. Loss: 0.6967 lr:0.100000
|
178 |
+
[ Mon Sep 12 18:58:16 2022 ] Eval epoch: 24
|
179 |
+
[ Mon Sep 12 19:00:46 2022 ] Mean test loss of 796 batches: 3.683417797088623.
|
180 |
+
[ Mon Sep 12 19:00:46 2022 ] Top1: 32.41%
|
181 |
+
[ Mon Sep 12 19:00:47 2022 ] Top5: 69.33%
|
182 |
+
[ Mon Sep 12 19:00:47 2022 ] Training epoch: 25
|
183 |
+
[ Mon Sep 12 19:01:26 2022 ] Batch(67/243) done. Loss: 0.3724 lr:0.100000
|
184 |
+
[ Mon Sep 12 19:02:19 2022 ] Batch(167/243) done. Loss: 0.5667 lr:0.100000
|
185 |
+
[ Mon Sep 12 19:02:59 2022 ] Eval epoch: 25
|
186 |
+
[ Mon Sep 12 19:05:28 2022 ] Mean test loss of 796 batches: 3.7700467109680176.
|
187 |
+
[ Mon Sep 12 19:05:29 2022 ] Top1: 35.64%
|
188 |
+
[ Mon Sep 12 19:05:29 2022 ] Top5: 71.80%
|
189 |
+
[ Mon Sep 12 19:05:29 2022 ] Training epoch: 26
|
190 |
+
[ Mon Sep 12 19:05:45 2022 ] Batch(24/243) done. Loss: 0.5268 lr:0.100000
|
191 |
+
[ Mon Sep 12 19:06:38 2022 ] Batch(124/243) done. Loss: 0.5311 lr:0.100000
|
192 |
+
[ Mon Sep 12 19:07:31 2022 ] Batch(224/243) done. Loss: 0.4188 lr:0.100000
|
193 |
+
[ Mon Sep 12 19:07:41 2022 ] Eval epoch: 26
|
194 |
+
[ Mon Sep 12 19:10:10 2022 ] Mean test loss of 796 batches: 3.242473602294922.
|
195 |
+
[ Mon Sep 12 19:10:10 2022 ] Top1: 36.40%
|
196 |
+
[ Mon Sep 12 19:10:11 2022 ] Top5: 71.52%
|
197 |
+
[ Mon Sep 12 19:10:11 2022 ] Training epoch: 27
|
198 |
+
[ Mon Sep 12 19:10:58 2022 ] Batch(81/243) done. Loss: 0.5183 lr:0.100000
|
199 |
+
[ Mon Sep 12 19:11:50 2022 ] Batch(181/243) done. Loss: 0.3025 lr:0.100000
|
200 |
+
[ Mon Sep 12 19:12:23 2022 ] Eval epoch: 27
|
201 |
+
[ Mon Sep 12 19:14:53 2022 ] Mean test loss of 796 batches: 2.796579599380493.
|
202 |
+
[ Mon Sep 12 19:14:53 2022 ] Top1: 39.60%
|
203 |
+
[ Mon Sep 12 19:14:54 2022 ] Top5: 76.10%
|
204 |
+
[ Mon Sep 12 19:14:54 2022 ] Training epoch: 28
|
205 |
+
[ Mon Sep 12 19:15:18 2022 ] Batch(38/243) done. Loss: 0.3922 lr:0.100000
|
206 |
+
[ Mon Sep 12 19:16:11 2022 ] Batch(138/243) done. Loss: 0.3928 lr:0.100000
|
207 |
+
[ Mon Sep 12 19:17:04 2022 ] Batch(238/243) done. Loss: 0.4935 lr:0.100000
|
208 |
+
[ Mon Sep 12 19:17:06 2022 ] Eval epoch: 28
|
209 |
+
[ Mon Sep 12 19:19:35 2022 ] Mean test loss of 796 batches: 3.149681568145752.
|
210 |
+
[ Mon Sep 12 19:19:36 2022 ] Top1: 39.86%
|
211 |
+
[ Mon Sep 12 19:19:36 2022 ] Top5: 71.70%
|
212 |
+
[ Mon Sep 12 19:19:36 2022 ] Training epoch: 29
|
213 |
+
[ Mon Sep 12 19:20:30 2022 ] Batch(95/243) done. Loss: 0.4636 lr:0.100000
|
214 |
+
[ Mon Sep 12 19:21:23 2022 ] Batch(195/243) done. Loss: 0.4148 lr:0.100000
|
215 |
+
[ Mon Sep 12 19:21:48 2022 ] Eval epoch: 29
|
216 |
+
[ Mon Sep 12 19:24:18 2022 ] Mean test loss of 796 batches: 2.6393115520477295.
|
217 |
+
[ Mon Sep 12 19:24:18 2022 ] Top1: 44.02%
|
218 |
+
[ Mon Sep 12 19:24:19 2022 ] Top5: 76.26%
|
219 |
+
[ Mon Sep 12 19:24:19 2022 ] Training epoch: 30
|
220 |
+
[ Mon Sep 12 19:24:50 2022 ] Batch(52/243) done. Loss: 0.2787 lr:0.100000
|
221 |
+
[ Mon Sep 12 19:25:43 2022 ] Batch(152/243) done. Loss: 0.2371 lr:0.100000
|
222 |
+
[ Mon Sep 12 19:26:31 2022 ] Eval epoch: 30
|
223 |
+
[ Mon Sep 12 19:29:00 2022 ] Mean test loss of 796 batches: 4.372233867645264.
|
224 |
+
[ Mon Sep 12 19:29:01 2022 ] Top1: 32.23%
|
225 |
+
[ Mon Sep 12 19:29:01 2022 ] Top5: 66.81%
|
226 |
+
[ Mon Sep 12 19:29:01 2022 ] Training epoch: 31
|
227 |
+
[ Mon Sep 12 19:29:09 2022 ] Batch(9/243) done. Loss: 0.3249 lr:0.100000
|
228 |
+
[ Mon Sep 12 19:30:02 2022 ] Batch(109/243) done. Loss: 0.3918 lr:0.100000
|
229 |
+
[ Mon Sep 12 19:30:55 2022 ] Batch(209/243) done. Loss: 0.4728 lr:0.100000
|
230 |
+
[ Mon Sep 12 19:31:13 2022 ] Eval epoch: 31
|
231 |
+
[ Mon Sep 12 19:33:42 2022 ] Mean test loss of 796 batches: 3.062802314758301.
|
232 |
+
[ Mon Sep 12 19:33:43 2022 ] Top1: 37.70%
|
233 |
+
[ Mon Sep 12 19:33:43 2022 ] Top5: 70.54%
|
234 |
+
[ Mon Sep 12 19:33:43 2022 ] Training epoch: 32
|
235 |
+
[ Mon Sep 12 19:34:22 2022 ] Batch(66/243) done. Loss: 0.2712 lr:0.100000
|
236 |
+
[ Mon Sep 12 19:35:14 2022 ] Batch(166/243) done. Loss: 0.3010 lr:0.100000
|
237 |
+
[ Mon Sep 12 19:35:55 2022 ] Eval epoch: 32
|
238 |
+
[ Mon Sep 12 19:38:24 2022 ] Mean test loss of 796 batches: 3.370488405227661.
|
239 |
+
[ Mon Sep 12 19:38:25 2022 ] Top1: 34.81%
|
240 |
+
[ Mon Sep 12 19:38:25 2022 ] Top5: 65.00%
|
241 |
+
[ Mon Sep 12 19:38:25 2022 ] Training epoch: 33
|
242 |
+
[ Mon Sep 12 19:38:42 2022 ] Batch(23/243) done. Loss: 0.2757 lr:0.100000
|
243 |
+
[ Mon Sep 12 19:39:35 2022 ] Batch(123/243) done. Loss: 0.4600 lr:0.100000
|
244 |
+
[ Mon Sep 12 19:40:28 2022 ] Batch(223/243) done. Loss: 0.5677 lr:0.100000
|
245 |
+
[ Mon Sep 12 19:40:38 2022 ] Eval epoch: 33
|
246 |
+
[ Mon Sep 12 19:43:08 2022 ] Mean test loss of 796 batches: 16.024497985839844.
|
247 |
+
[ Mon Sep 12 19:43:08 2022 ] Top1: 6.50%
|
248 |
+
[ Mon Sep 12 19:43:09 2022 ] Top5: 26.47%
|
249 |
+
[ Mon Sep 12 19:43:09 2022 ] Training epoch: 34
|
250 |
+
[ Mon Sep 12 19:43:56 2022 ] Batch(80/243) done. Loss: 0.6097 lr:0.100000
|
251 |
+
[ Mon Sep 12 19:44:49 2022 ] Batch(180/243) done. Loss: 0.3325 lr:0.100000
|
252 |
+
[ Mon Sep 12 19:45:22 2022 ] Eval epoch: 34
|
253 |
+
[ Mon Sep 12 19:47:52 2022 ] Mean test loss of 796 batches: 3.6056463718414307.
|
254 |
+
[ Mon Sep 12 19:47:52 2022 ] Top1: 36.16%
|
255 |
+
[ Mon Sep 12 19:47:53 2022 ] Top5: 68.21%
|
256 |
+
[ Mon Sep 12 19:47:53 2022 ] Training epoch: 35
|
257 |
+
[ Mon Sep 12 19:48:17 2022 ] Batch(37/243) done. Loss: 0.2264 lr:0.100000
|
258 |
+
[ Mon Sep 12 19:49:10 2022 ] Batch(137/243) done. Loss: 0.4211 lr:0.100000
|
259 |
+
[ Mon Sep 12 19:50:03 2022 ] Batch(237/243) done. Loss: 0.2841 lr:0.100000
|
260 |
+
[ Mon Sep 12 19:50:06 2022 ] Eval epoch: 35
|
261 |
+
[ Mon Sep 12 19:52:36 2022 ] Mean test loss of 796 batches: 2.8799333572387695.
|
262 |
+
[ Mon Sep 12 19:52:36 2022 ] Top1: 39.62%
|
263 |
+
[ Mon Sep 12 19:52:36 2022 ] Top5: 72.87%
|
264 |
+
[ Mon Sep 12 19:52:37 2022 ] Training epoch: 36
|
265 |
+
[ Mon Sep 12 19:53:31 2022 ] Batch(94/243) done. Loss: 0.6247 lr:0.100000
|
266 |
+
[ Mon Sep 12 19:54:24 2022 ] Batch(194/243) done. Loss: 0.4778 lr:0.100000
|
267 |
+
[ Mon Sep 12 19:54:50 2022 ] Eval epoch: 36
|
268 |
+
[ Mon Sep 12 19:57:19 2022 ] Mean test loss of 796 batches: 7.829808235168457.
|
269 |
+
[ Mon Sep 12 19:57:20 2022 ] Top1: 17.71%
|
270 |
+
[ Mon Sep 12 19:57:20 2022 ] Top5: 46.19%
|
271 |
+
[ Mon Sep 12 19:57:20 2022 ] Training epoch: 37
|
272 |
+
[ Mon Sep 12 19:57:52 2022 ] Batch(51/243) done. Loss: 0.3482 lr:0.100000
|
273 |
+
[ Mon Sep 12 19:58:45 2022 ] Batch(151/243) done. Loss: 0.4126 lr:0.100000
|
274 |
+
[ Mon Sep 12 19:59:33 2022 ] Eval epoch: 37
|
275 |
+
[ Mon Sep 12 20:02:03 2022 ] Mean test loss of 796 batches: 3.139714002609253.
|
276 |
+
[ Mon Sep 12 20:02:03 2022 ] Top1: 40.64%
|
277 |
+
[ Mon Sep 12 20:02:04 2022 ] Top5: 75.03%
|
278 |
+
[ Mon Sep 12 20:02:04 2022 ] Training epoch: 38
|
279 |
+
[ Mon Sep 12 20:02:13 2022 ] Batch(8/243) done. Loss: 0.3178 lr:0.100000
|
280 |
+
[ Mon Sep 12 20:03:06 2022 ] Batch(108/243) done. Loss: 0.4396 lr:0.100000
|
281 |
+
[ Mon Sep 12 20:03:59 2022 ] Batch(208/243) done. Loss: 0.5314 lr:0.100000
|
282 |
+
[ Mon Sep 12 20:04:17 2022 ] Eval epoch: 38
|
283 |
+
[ Mon Sep 12 20:06:47 2022 ] Mean test loss of 796 batches: 3.982567310333252.
|
284 |
+
[ Mon Sep 12 20:06:47 2022 ] Top1: 34.04%
|
285 |
+
[ Mon Sep 12 20:06:47 2022 ] Top5: 65.26%
|
286 |
+
[ Mon Sep 12 20:06:48 2022 ] Training epoch: 39
|
287 |
+
[ Mon Sep 12 20:07:26 2022 ] Batch(65/243) done. Loss: 0.4634 lr:0.100000
|
288 |
+
[ Mon Sep 12 20:08:19 2022 ] Batch(165/243) done. Loss: 0.3818 lr:0.100000
|
289 |
+
[ Mon Sep 12 20:09:00 2022 ] Eval epoch: 39
|
290 |
+
[ Mon Sep 12 20:11:30 2022 ] Mean test loss of 796 batches: 16.22344970703125.
|
291 |
+
[ Mon Sep 12 20:11:30 2022 ] Top1: 8.39%
|
292 |
+
[ Mon Sep 12 20:11:30 2022 ] Top5: 24.31%
|
293 |
+
[ Mon Sep 12 20:11:31 2022 ] Training epoch: 40
|
294 |
+
[ Mon Sep 12 20:11:47 2022 ] Batch(22/243) done. Loss: 0.1639 lr:0.100000
|
295 |
+
[ Mon Sep 12 20:12:39 2022 ] Batch(122/243) done. Loss: 0.2482 lr:0.100000
|
296 |
+
[ Mon Sep 12 20:13:32 2022 ] Batch(222/243) done. Loss: 0.4236 lr:0.100000
|
297 |
+
[ Mon Sep 12 20:13:43 2022 ] Eval epoch: 40
|
298 |
+
[ Mon Sep 12 20:16:13 2022 ] Mean test loss of 796 batches: 4.428019046783447.
|
299 |
+
[ Mon Sep 12 20:16:14 2022 ] Top1: 35.63%
|
300 |
+
[ Mon Sep 12 20:16:14 2022 ] Top5: 68.37%
|
301 |
+
[ Mon Sep 12 20:16:14 2022 ] Training epoch: 41
|
302 |
+
[ Mon Sep 12 20:17:00 2022 ] Batch(79/243) done. Loss: 0.2300 lr:0.100000
|
303 |
+
[ Mon Sep 12 20:17:53 2022 ] Batch(179/243) done. Loss: 0.2221 lr:0.100000
|
304 |
+
[ Mon Sep 12 20:18:27 2022 ] Eval epoch: 41
|
305 |
+
[ Mon Sep 12 20:20:57 2022 ] Mean test loss of 796 batches: 3.410799503326416.
|
306 |
+
[ Mon Sep 12 20:20:57 2022 ] Top1: 38.42%
|
307 |
+
[ Mon Sep 12 20:20:57 2022 ] Top5: 68.94%
|
308 |
+
[ Mon Sep 12 20:20:58 2022 ] Training epoch: 42
|
309 |
+
[ Mon Sep 12 20:21:21 2022 ] Batch(36/243) done. Loss: 0.3772 lr:0.100000
|
310 |
+
[ Mon Sep 12 20:22:14 2022 ] Batch(136/243) done. Loss: 0.4248 lr:0.100000
|
311 |
+
[ Mon Sep 12 20:23:06 2022 ] Batch(236/243) done. Loss: 0.5014 lr:0.100000
|
312 |
+
[ Mon Sep 12 20:23:10 2022 ] Eval epoch: 42
|
313 |
+
[ Mon Sep 12 20:25:40 2022 ] Mean test loss of 796 batches: 5.514777660369873.
|
314 |
+
[ Mon Sep 12 20:25:40 2022 ] Top1: 26.86%
|
315 |
+
[ Mon Sep 12 20:25:40 2022 ] Top5: 56.38%
|
316 |
+
[ Mon Sep 12 20:25:41 2022 ] Training epoch: 43
|
317 |
+
[ Mon Sep 12 20:26:34 2022 ] Batch(93/243) done. Loss: 0.3111 lr:0.100000
|
318 |
+
[ Mon Sep 12 20:27:27 2022 ] Batch(193/243) done. Loss: 0.3367 lr:0.100000
|
319 |
+
[ Mon Sep 12 20:27:53 2022 ] Eval epoch: 43
|
320 |
+
[ Mon Sep 12 20:30:22 2022 ] Mean test loss of 796 batches: 4.449807643890381.
|
321 |
+
[ Mon Sep 12 20:30:23 2022 ] Top1: 31.61%
|
322 |
+
[ Mon Sep 12 20:30:23 2022 ] Top5: 61.38%
|
323 |
+
[ Mon Sep 12 20:30:23 2022 ] Training epoch: 44
|
324 |
+
[ Mon Sep 12 20:30:54 2022 ] Batch(50/243) done. Loss: 0.2808 lr:0.100000
|
325 |
+
[ Mon Sep 12 20:31:47 2022 ] Batch(150/243) done. Loss: 0.4795 lr:0.100000
|
326 |
+
[ Mon Sep 12 20:32:35 2022 ] Eval epoch: 44
|
327 |
+
[ Mon Sep 12 20:35:05 2022 ] Mean test loss of 796 batches: 3.5568363666534424.
|
328 |
+
[ Mon Sep 12 20:35:05 2022 ] Top1: 41.52%
|
329 |
+
[ Mon Sep 12 20:35:06 2022 ] Top5: 73.82%
|
330 |
+
[ Mon Sep 12 20:35:06 2022 ] Training epoch: 45
|
331 |
+
[ Mon Sep 12 20:35:14 2022 ] Batch(7/243) done. Loss: 0.3210 lr:0.100000
|
332 |
+
[ Mon Sep 12 20:36:07 2022 ] Batch(107/243) done. Loss: 0.4242 lr:0.100000
|
333 |
+
[ Mon Sep 12 20:37:00 2022 ] Batch(207/243) done. Loss: 0.3617 lr:0.100000
|
334 |
+
[ Mon Sep 12 20:37:18 2022 ] Eval epoch: 45
|
335 |
+
[ Mon Sep 12 20:39:48 2022 ] Mean test loss of 796 batches: 4.839462757110596.
|
336 |
+
[ Mon Sep 12 20:39:49 2022 ] Top1: 34.52%
|
337 |
+
[ Mon Sep 12 20:39:49 2022 ] Top5: 65.73%
|
338 |
+
[ Mon Sep 12 20:39:49 2022 ] Training epoch: 46
|
339 |
+
[ Mon Sep 12 20:40:27 2022 ] Batch(64/243) done. Loss: 0.2956 lr:0.100000
|
340 |
+
[ Mon Sep 12 20:41:20 2022 ] Batch(164/243) done. Loss: 0.3429 lr:0.100000
|
341 |
+
[ Mon Sep 12 20:42:02 2022 ] Eval epoch: 46
|
342 |
+
[ Mon Sep 12 20:44:31 2022 ] Mean test loss of 796 batches: 3.5616369247436523.
|
343 |
+
[ Mon Sep 12 20:44:32 2022 ] Top1: 41.18%
|
344 |
+
[ Mon Sep 12 20:44:32 2022 ] Top5: 72.87%
|
345 |
+
[ Mon Sep 12 20:44:32 2022 ] Training epoch: 47
|
346 |
+
[ Mon Sep 12 20:44:48 2022 ] Batch(21/243) done. Loss: 0.1968 lr:0.100000
|
347 |
+
[ Mon Sep 12 20:45:41 2022 ] Batch(121/243) done. Loss: 0.1955 lr:0.100000
|
348 |
+
[ Mon Sep 12 20:46:33 2022 ] Batch(221/243) done. Loss: 0.3667 lr:0.100000
|
349 |
+
[ Mon Sep 12 20:46:45 2022 ] Eval epoch: 47
|
350 |
+
[ Mon Sep 12 20:49:14 2022 ] Mean test loss of 796 batches: 3.485515594482422.
|
351 |
+
[ Mon Sep 12 20:49:15 2022 ] Top1: 38.04%
|
352 |
+
[ Mon Sep 12 20:49:15 2022 ] Top5: 70.00%
|
353 |
+
[ Mon Sep 12 20:49:15 2022 ] Training epoch: 48
|
354 |
+
[ Mon Sep 12 20:50:01 2022 ] Batch(78/243) done. Loss: 0.3435 lr:0.100000
|
355 |
+
[ Mon Sep 12 20:50:54 2022 ] Batch(178/243) done. Loss: 0.1893 lr:0.100000
|
356 |
+
[ Mon Sep 12 20:51:28 2022 ] Eval epoch: 48
|
357 |
+
[ Mon Sep 12 20:53:57 2022 ] Mean test loss of 796 batches: 4.158741474151611.
|
358 |
+
[ Mon Sep 12 20:53:58 2022 ] Top1: 38.58%
|
359 |
+
[ Mon Sep 12 20:53:58 2022 ] Top5: 71.56%
|
360 |
+
[ Mon Sep 12 20:53:58 2022 ] Training epoch: 49
|
361 |
+
[ Mon Sep 12 20:54:21 2022 ] Batch(35/243) done. Loss: 0.3753 lr:0.100000
|
362 |
+
[ Mon Sep 12 20:55:14 2022 ] Batch(135/243) done. Loss: 0.3850 lr:0.100000
|
363 |
+
[ Mon Sep 12 20:56:07 2022 ] Batch(235/243) done. Loss: 0.3775 lr:0.100000
|
364 |
+
[ Mon Sep 12 20:56:11 2022 ] Eval epoch: 49
|
365 |
+
[ Mon Sep 12 20:58:41 2022 ] Mean test loss of 796 batches: 6.0918073654174805.
|
366 |
+
[ Mon Sep 12 20:58:41 2022 ] Top1: 24.65%
|
367 |
+
[ Mon Sep 12 20:58:41 2022 ] Top5: 52.45%
|
368 |
+
[ Mon Sep 12 20:58:42 2022 ] Training epoch: 50
|
369 |
+
[ Mon Sep 12 20:59:35 2022 ] Batch(92/243) done. Loss: 0.3753 lr:0.100000
|
370 |
+
[ Mon Sep 12 21:00:28 2022 ] Batch(192/243) done. Loss: 0.2491 lr:0.100000
|
371 |
+
[ Mon Sep 12 21:00:54 2022 ] Eval epoch: 50
|
372 |
+
[ Mon Sep 12 21:03:24 2022 ] Mean test loss of 796 batches: 4.761442184448242.
|
373 |
+
[ Mon Sep 12 21:03:24 2022 ] Top1: 29.23%
|
374 |
+
[ Mon Sep 12 21:03:24 2022 ] Top5: 61.77%
|
375 |
+
[ Mon Sep 12 21:03:25 2022 ] Training epoch: 51
|
376 |
+
[ Mon Sep 12 21:03:55 2022 ] Batch(49/243) done. Loss: 0.2470 lr:0.100000
|
377 |
+
[ Mon Sep 12 21:04:47 2022 ] Batch(149/243) done. Loss: 0.1682 lr:0.100000
|
378 |
+
[ Mon Sep 12 21:05:37 2022 ] Eval epoch: 51
|
379 |
+
[ Mon Sep 12 21:08:07 2022 ] Mean test loss of 796 batches: 4.20699405670166.
|
380 |
+
[ Mon Sep 12 21:08:07 2022 ] Top1: 39.81%
|
381 |
+
[ Mon Sep 12 21:08:07 2022 ] Top5: 68.85%
|
382 |
+
[ Mon Sep 12 21:08:08 2022 ] Training epoch: 52
|
383 |
+
[ Mon Sep 12 21:08:15 2022 ] Batch(6/243) done. Loss: 0.3685 lr:0.100000
|
384 |
+
[ Mon Sep 12 21:09:08 2022 ] Batch(106/243) done. Loss: 0.2466 lr:0.100000
|
385 |
+
[ Mon Sep 12 21:10:01 2022 ] Batch(206/243) done. Loss: 0.3860 lr:0.100000
|
386 |
+
[ Mon Sep 12 21:10:20 2022 ] Eval epoch: 52
|
387 |
+
[ Mon Sep 12 21:12:49 2022 ] Mean test loss of 796 batches: 3.3168282508850098.
|
388 |
+
[ Mon Sep 12 21:12:50 2022 ] Top1: 42.73%
|
389 |
+
[ Mon Sep 12 21:12:50 2022 ] Top5: 76.45%
|
390 |
+
[ Mon Sep 12 21:12:50 2022 ] Training epoch: 53
|
391 |
+
[ Mon Sep 12 21:13:28 2022 ] Batch(63/243) done. Loss: 0.4394 lr:0.100000
|
392 |
+
[ Mon Sep 12 21:14:21 2022 ] Batch(163/243) done. Loss: 0.3355 lr:0.100000
|
393 |
+
[ Mon Sep 12 21:15:03 2022 ] Eval epoch: 53
|
394 |
+
[ Mon Sep 12 21:17:32 2022 ] Mean test loss of 796 batches: 3.2490127086639404.
|
395 |
+
[ Mon Sep 12 21:17:33 2022 ] Top1: 42.81%
|
396 |
+
[ Mon Sep 12 21:17:33 2022 ] Top5: 74.85%
|
397 |
+
[ Mon Sep 12 21:17:33 2022 ] Training epoch: 54
|
398 |
+
[ Mon Sep 12 21:17:48 2022 ] Batch(20/243) done. Loss: 0.4547 lr:0.100000
|
399 |
+
[ Mon Sep 12 21:18:41 2022 ] Batch(120/243) done. Loss: 0.2926 lr:0.100000
|
400 |
+
[ Mon Sep 12 21:19:34 2022 ] Batch(220/243) done. Loss: 0.6257 lr:0.100000
|
401 |
+
[ Mon Sep 12 21:19:46 2022 ] Eval epoch: 54
|
402 |
+
[ Mon Sep 12 21:22:15 2022 ] Mean test loss of 796 batches: 4.4578704833984375.
|
403 |
+
[ Mon Sep 12 21:22:15 2022 ] Top1: 38.24%
|
404 |
+
[ Mon Sep 12 21:22:16 2022 ] Top5: 70.75%
|
405 |
+
[ Mon Sep 12 21:22:16 2022 ] Training epoch: 55
|
406 |
+
[ Mon Sep 12 21:23:01 2022 ] Batch(77/243) done. Loss: 0.5628 lr:0.100000
|
407 |
+
[ Mon Sep 12 21:23:54 2022 ] Batch(177/243) done. Loss: 0.6820 lr:0.100000
|
408 |
+
[ Mon Sep 12 21:24:29 2022 ] Eval epoch: 55
|
409 |
+
[ Mon Sep 12 21:26:59 2022 ] Mean test loss of 796 batches: 4.338413715362549.
|
410 |
+
[ Mon Sep 12 21:26:59 2022 ] Top1: 39.27%
|
411 |
+
[ Mon Sep 12 21:26:59 2022 ] Top5: 70.55%
|
412 |
+
[ Mon Sep 12 21:27:00 2022 ] Training epoch: 56
|
413 |
+
[ Mon Sep 12 21:27:22 2022 ] Batch(34/243) done. Loss: 0.3782 lr:0.100000
|
414 |
+
[ Mon Sep 12 21:28:15 2022 ] Batch(134/243) done. Loss: 0.1938 lr:0.100000
|
415 |
+
[ Mon Sep 12 21:29:07 2022 ] Batch(234/243) done. Loss: 0.2396 lr:0.100000
|
416 |
+
[ Mon Sep 12 21:29:12 2022 ] Eval epoch: 56
|
417 |
+
[ Mon Sep 12 21:31:41 2022 ] Mean test loss of 796 batches: 4.485368251800537.
|
418 |
+
[ Mon Sep 12 21:31:42 2022 ] Top1: 35.11%
|
419 |
+
[ Mon Sep 12 21:31:42 2022 ] Top5: 67.80%
|
420 |
+
[ Mon Sep 12 21:31:42 2022 ] Training epoch: 57
|
421 |
+
[ Mon Sep 12 21:32:35 2022 ] Batch(91/243) done. Loss: 0.3136 lr:0.100000
|
422 |
+
[ Mon Sep 12 21:33:27 2022 ] Batch(191/243) done. Loss: 0.3970 lr:0.100000
|
423 |
+
[ Mon Sep 12 21:33:55 2022 ] Eval epoch: 57
|
424 |
+
[ Mon Sep 12 21:36:24 2022 ] Mean test loss of 796 batches: 4.196974754333496.
|
425 |
+
[ Mon Sep 12 21:36:24 2022 ] Top1: 35.71%
|
426 |
+
[ Mon Sep 12 21:36:25 2022 ] Top5: 69.00%
|
427 |
+
[ Mon Sep 12 21:36:25 2022 ] Training epoch: 58
|
428 |
+
[ Mon Sep 12 21:36:55 2022 ] Batch(48/243) done. Loss: 0.2219 lr:0.100000
|
429 |
+
[ Mon Sep 12 21:37:48 2022 ] Batch(148/243) done. Loss: 0.3556 lr:0.100000
|
430 |
+
[ Mon Sep 12 21:38:38 2022 ] Eval epoch: 58
|
431 |
+
[ Mon Sep 12 21:41:07 2022 ] Mean test loss of 796 batches: 6.65744686126709.
|
432 |
+
[ Mon Sep 12 21:41:07 2022 ] Top1: 20.99%
|
433 |
+
[ Mon Sep 12 21:41:08 2022 ] Top5: 47.98%
|
434 |
+
[ Mon Sep 12 21:41:08 2022 ] Training epoch: 59
|
435 |
+
[ Mon Sep 12 21:41:15 2022 ] Batch(5/243) done. Loss: 0.1090 lr:0.100000
|
436 |
+
[ Mon Sep 12 21:42:08 2022 ] Batch(105/243) done. Loss: 0.2668 lr:0.100000
|
437 |
+
[ Mon Sep 12 21:43:01 2022 ] Batch(205/243) done. Loss: 0.6769 lr:0.100000
|
438 |
+
[ Mon Sep 12 21:43:20 2022 ] Eval epoch: 59
|
439 |
+
[ Mon Sep 12 21:45:50 2022 ] Mean test loss of 796 batches: 4.866194248199463.
|
440 |
+
[ Mon Sep 12 21:45:50 2022 ] Top1: 36.23%
|
441 |
+
[ Mon Sep 12 21:45:50 2022 ] Top5: 67.78%
|
442 |
+
[ Mon Sep 12 21:45:51 2022 ] Training epoch: 60
|
443 |
+
[ Mon Sep 12 21:46:28 2022 ] Batch(62/243) done. Loss: 0.1987 lr:0.100000
|
444 |
+
[ Mon Sep 12 21:47:20 2022 ] Batch(162/243) done. Loss: 0.3460 lr:0.100000
|
445 |
+
[ Mon Sep 12 21:48:03 2022 ] Eval epoch: 60
|
446 |
+
[ Mon Sep 12 21:50:33 2022 ] Mean test loss of 796 batches: 3.260272979736328.
|
447 |
+
[ Mon Sep 12 21:50:33 2022 ] Top1: 41.98%
|
448 |
+
[ Mon Sep 12 21:50:33 2022 ] Top5: 74.38%
|
449 |
+
[ Mon Sep 12 21:50:34 2022 ] Training epoch: 61
|
450 |
+
[ Mon Sep 12 21:50:48 2022 ] Batch(19/243) done. Loss: 0.1512 lr:0.010000
|
451 |
+
[ Mon Sep 12 21:51:41 2022 ] Batch(119/243) done. Loss: 0.1251 lr:0.010000
|
452 |
+
[ Mon Sep 12 21:52:34 2022 ] Batch(219/243) done. Loss: 0.1873 lr:0.010000
|
453 |
+
[ Mon Sep 12 21:52:46 2022 ] Eval epoch: 61
|
454 |
+
[ Mon Sep 12 21:55:16 2022 ] Mean test loss of 796 batches: 2.871124029159546.
|
455 |
+
[ Mon Sep 12 21:55:16 2022 ] Top1: 49.90%
|
456 |
+
[ Mon Sep 12 21:55:17 2022 ] Top5: 80.83%
|
457 |
+
[ Mon Sep 12 21:55:17 2022 ] Training epoch: 62
|
458 |
+
[ Mon Sep 12 21:56:01 2022 ] Batch(76/243) done. Loss: 0.0721 lr:0.010000
|
459 |
+
[ Mon Sep 12 21:56:54 2022 ] Batch(176/243) done. Loss: 0.0414 lr:0.010000
|
460 |
+
[ Mon Sep 12 21:57:29 2022 ] Eval epoch: 62
|
461 |
+
[ Mon Sep 12 21:59:59 2022 ] Mean test loss of 796 batches: 2.853978157043457.
|
462 |
+
[ Mon Sep 12 21:59:59 2022 ] Top1: 50.46%
|
463 |
+
[ Mon Sep 12 22:00:00 2022 ] Top5: 81.38%
|
464 |
+
[ Mon Sep 12 22:00:00 2022 ] Training epoch: 63
|
465 |
+
[ Mon Sep 12 22:00:22 2022 ] Batch(33/243) done. Loss: 0.0897 lr:0.010000
|
466 |
+
[ Mon Sep 12 22:01:14 2022 ] Batch(133/243) done. Loss: 0.0934 lr:0.010000
|
467 |
+
[ Mon Sep 12 22:02:07 2022 ] Batch(233/243) done. Loss: 0.1227 lr:0.010000
|
468 |
+
[ Mon Sep 12 22:02:12 2022 ] Eval epoch: 63
|
469 |
+
[ Mon Sep 12 22:04:42 2022 ] Mean test loss of 796 batches: 2.8253042697906494.
|
470 |
+
[ Mon Sep 12 22:04:42 2022 ] Top1: 51.65%
|
471 |
+
[ Mon Sep 12 22:04:43 2022 ] Top5: 82.35%
|
472 |
+
[ Mon Sep 12 22:04:43 2022 ] Training epoch: 64
|
473 |
+
[ Mon Sep 12 22:05:35 2022 ] Batch(90/243) done. Loss: 0.0743 lr:0.010000
|
474 |
+
[ Mon Sep 12 22:06:28 2022 ] Batch(190/243) done. Loss: 0.0875 lr:0.010000
|
475 |
+
[ Mon Sep 12 22:06:55 2022 ] Eval epoch: 64
|
476 |
+
[ Mon Sep 12 22:09:25 2022 ] Mean test loss of 796 batches: 2.9669079780578613.
|
477 |
+
[ Mon Sep 12 22:09:25 2022 ] Top1: 50.87%
|
478 |
+
[ Mon Sep 12 22:09:26 2022 ] Top5: 82.07%
|
479 |
+
[ Mon Sep 12 22:09:26 2022 ] Training epoch: 65
|
480 |
+
[ Mon Sep 12 22:09:55 2022 ] Batch(47/243) done. Loss: 0.0185 lr:0.010000
|
481 |
+
[ Mon Sep 12 22:10:48 2022 ] Batch(147/243) done. Loss: 0.0595 lr:0.010000
|
482 |
+
[ Mon Sep 12 22:11:38 2022 ] Eval epoch: 65
|
483 |
+
[ Mon Sep 12 22:14:08 2022 ] Mean test loss of 796 batches: 2.926304578781128.
|
484 |
+
[ Mon Sep 12 22:14:08 2022 ] Top1: 51.17%
|
485 |
+
[ Mon Sep 12 22:14:09 2022 ] Top5: 81.96%
|
486 |
+
[ Mon Sep 12 22:14:09 2022 ] Training epoch: 66
|
487 |
+
[ Mon Sep 12 22:14:15 2022 ] Batch(4/243) done. Loss: 0.0893 lr:0.010000
|
488 |
+
[ Mon Sep 12 22:15:08 2022 ] Batch(104/243) done. Loss: 0.0399 lr:0.010000
|
489 |
+
[ Mon Sep 12 22:16:01 2022 ] Batch(204/243) done. Loss: 0.0083 lr:0.010000
|
490 |
+
[ Mon Sep 12 22:16:21 2022 ] Eval epoch: 66
|
491 |
+
[ Mon Sep 12 22:18:51 2022 ] Mean test loss of 796 batches: 3.071596622467041.
|
492 |
+
[ Mon Sep 12 22:18:52 2022 ] Top1: 51.42%
|
493 |
+
[ Mon Sep 12 22:18:52 2022 ] Top5: 82.05%
|
494 |
+
[ Mon Sep 12 22:18:52 2022 ] Training epoch: 67
|
495 |
+
[ Mon Sep 12 22:19:29 2022 ] Batch(61/243) done. Loss: 0.0227 lr:0.010000
|
496 |
+
[ Mon Sep 12 22:20:22 2022 ] Batch(161/243) done. Loss: 0.0420 lr:0.010000
|
497 |
+
[ Mon Sep 12 22:21:05 2022 ] Eval epoch: 67
|
498 |
+
[ Mon Sep 12 22:23:34 2022 ] Mean test loss of 796 batches: 3.0440354347229004.
|
499 |
+
[ Mon Sep 12 22:23:35 2022 ] Top1: 51.06%
|
500 |
+
[ Mon Sep 12 22:23:35 2022 ] Top5: 81.90%
|
501 |
+
[ Mon Sep 12 22:23:35 2022 ] Training epoch: 68
|
502 |
+
[ Mon Sep 12 22:23:49 2022 ] Batch(18/243) done. Loss: 0.0644 lr:0.010000
|
503 |
+
[ Mon Sep 12 22:24:42 2022 ] Batch(118/243) done. Loss: 0.0124 lr:0.010000
|
504 |
+
[ Mon Sep 12 22:25:35 2022 ] Batch(218/243) done. Loss: 0.0345 lr:0.010000
|
505 |
+
[ Mon Sep 12 22:25:48 2022 ] Eval epoch: 68
|
506 |
+
[ Mon Sep 12 22:28:18 2022 ] Mean test loss of 796 batches: 3.002916097640991.
|
507 |
+
[ Mon Sep 12 22:28:18 2022 ] Top1: 51.48%
|
508 |
+
[ Mon Sep 12 22:28:19 2022 ] Top5: 82.18%
|
509 |
+
[ Mon Sep 12 22:28:19 2022 ] Training epoch: 69
|
510 |
+
[ Mon Sep 12 22:29:03 2022 ] Batch(75/243) done. Loss: 0.0655 lr:0.010000
|
511 |
+
[ Mon Sep 12 22:29:56 2022 ] Batch(175/243) done. Loss: 0.1207 lr:0.010000
|
512 |
+
[ Mon Sep 12 22:30:31 2022 ] Eval epoch: 69
|
513 |
+
[ Mon Sep 12 22:33:01 2022 ] Mean test loss of 796 batches: 3.0260205268859863.
|
514 |
+
[ Mon Sep 12 22:33:02 2022 ] Top1: 51.93%
|
515 |
+
[ Mon Sep 12 22:33:02 2022 ] Top5: 82.39%
|
516 |
+
[ Mon Sep 12 22:33:02 2022 ] Training epoch: 70
|
517 |
+
[ Mon Sep 12 22:33:23 2022 ] Batch(32/243) done. Loss: 0.0986 lr:0.010000
|
518 |
+
[ Mon Sep 12 22:34:16 2022 ] Batch(132/243) done. Loss: 0.0726 lr:0.010000
|
519 |
+
[ Mon Sep 12 22:35:09 2022 ] Batch(232/243) done. Loss: 0.0555 lr:0.010000
|
520 |
+
[ Mon Sep 12 22:35:15 2022 ] Eval epoch: 70
|
521 |
+
[ Mon Sep 12 22:37:45 2022 ] Mean test loss of 796 batches: 2.9354891777038574.
|
522 |
+
[ Mon Sep 12 22:37:45 2022 ] Top1: 52.34%
|
523 |
+
[ Mon Sep 12 22:37:46 2022 ] Top5: 82.85%
|
524 |
+
[ Mon Sep 12 22:37:46 2022 ] Training epoch: 71
|
525 |
+
[ Mon Sep 12 22:38:37 2022 ] Batch(89/243) done. Loss: 0.0333 lr:0.010000
|
526 |
+
[ Mon Sep 12 22:39:30 2022 ] Batch(189/243) done. Loss: 0.0335 lr:0.010000
|
527 |
+
[ Mon Sep 12 22:39:58 2022 ] Eval epoch: 71
|
528 |
+
[ Mon Sep 12 22:42:29 2022 ] Mean test loss of 796 batches: 3.0900039672851562.
|
529 |
+
[ Mon Sep 12 22:42:29 2022 ] Top1: 51.37%
|
530 |
+
[ Mon Sep 12 22:42:29 2022 ] Top5: 82.26%
|
531 |
+
[ Mon Sep 12 22:42:30 2022 ] Training epoch: 72
|
532 |
+
[ Mon Sep 12 22:42:58 2022 ] Batch(46/243) done. Loss: 0.0120 lr:0.010000
|
533 |
+
[ Mon Sep 12 22:43:51 2022 ] Batch(146/243) done. Loss: 0.0276 lr:0.010000
|
534 |
+
[ Mon Sep 12 22:44:42 2022 ] Eval epoch: 72
|
535 |
+
[ Mon Sep 12 22:47:12 2022 ] Mean test loss of 796 batches: 3.181964874267578.
|
536 |
+
[ Mon Sep 12 22:47:12 2022 ] Top1: 50.43%
|
537 |
+
[ Mon Sep 12 22:47:13 2022 ] Top5: 81.50%
|
538 |
+
[ Mon Sep 12 22:47:13 2022 ] Training epoch: 73
|
539 |
+
[ Mon Sep 12 22:47:19 2022 ] Batch(3/243) done. Loss: 0.0571 lr:0.010000
|
540 |
+
[ Mon Sep 12 22:48:12 2022 ] Batch(103/243) done. Loss: 0.0637 lr:0.010000
|
541 |
+
[ Mon Sep 12 22:49:04 2022 ] Batch(203/243) done. Loss: 0.0149 lr:0.010000
|
542 |
+
[ Mon Sep 12 22:49:25 2022 ] Eval epoch: 73
|
543 |
+
[ Mon Sep 12 22:51:55 2022 ] Mean test loss of 796 batches: 3.086237668991089.
|
544 |
+
[ Mon Sep 12 22:51:56 2022 ] Top1: 51.54%
|
545 |
+
[ Mon Sep 12 22:51:56 2022 ] Top5: 82.37%
|
546 |
+
[ Mon Sep 12 22:51:56 2022 ] Training epoch: 74
|
547 |
+
[ Mon Sep 12 22:52:32 2022 ] Batch(60/243) done. Loss: 0.0192 lr:0.010000
|
548 |
+
[ Mon Sep 12 22:53:25 2022 ] Batch(160/243) done. Loss: 0.0189 lr:0.010000
|
549 |
+
[ Mon Sep 12 22:54:09 2022 ] Eval epoch: 74
|
550 |
+
[ Mon Sep 12 22:56:39 2022 ] Mean test loss of 796 batches: 3.1431281566619873.
|
551 |
+
[ Mon Sep 12 22:56:39 2022 ] Top1: 51.25%
|
552 |
+
[ Mon Sep 12 22:56:39 2022 ] Top5: 82.38%
|
553 |
+
[ Mon Sep 12 22:56:40 2022 ] Training epoch: 75
|
554 |
+
[ Mon Sep 12 22:56:52 2022 ] Batch(17/243) done. Loss: 0.0360 lr:0.010000
|
555 |
+
[ Mon Sep 12 22:57:45 2022 ] Batch(117/243) done. Loss: 0.0251 lr:0.010000
|
556 |
+
[ Mon Sep 12 22:58:38 2022 ] Batch(217/243) done. Loss: 0.0442 lr:0.010000
|
557 |
+
[ Mon Sep 12 22:58:52 2022 ] Eval epoch: 75
|
558 |
+
[ Mon Sep 12 23:01:21 2022 ] Mean test loss of 796 batches: 3.1644086837768555.
|
559 |
+
[ Mon Sep 12 23:01:22 2022 ] Top1: 51.29%
|
560 |
+
[ Mon Sep 12 23:01:22 2022 ] Top5: 82.37%
|
561 |
+
[ Mon Sep 12 23:01:22 2022 ] Training epoch: 76
|
562 |
+
[ Mon Sep 12 23:02:06 2022 ] Batch(74/243) done. Loss: 0.1041 lr:0.010000
|
563 |
+
[ Mon Sep 12 23:02:59 2022 ] Batch(174/243) done. Loss: 0.0044 lr:0.010000
|
564 |
+
[ Mon Sep 12 23:03:35 2022 ] Eval epoch: 76
|
565 |
+
[ Mon Sep 12 23:06:05 2022 ] Mean test loss of 796 batches: 3.1715903282165527.
|
566 |
+
[ Mon Sep 12 23:06:05 2022 ] Top1: 50.93%
|
567 |
+
[ Mon Sep 12 23:06:05 2022 ] Top5: 82.09%
|
568 |
+
[ Mon Sep 12 23:06:06 2022 ] Training epoch: 77
|
569 |
+
[ Mon Sep 12 23:06:27 2022 ] Batch(31/243) done. Loss: 0.0590 lr:0.010000
|
570 |
+
[ Mon Sep 12 23:07:19 2022 ] Batch(131/243) done. Loss: 0.0440 lr:0.010000
|
571 |
+
[ Mon Sep 12 23:08:12 2022 ] Batch(231/243) done. Loss: 0.0496 lr:0.010000
|
572 |
+
[ Mon Sep 12 23:08:18 2022 ] Eval epoch: 77
|
573 |
+
[ Mon Sep 12 23:10:48 2022 ] Mean test loss of 796 batches: 3.0519044399261475.
|
574 |
+
[ Mon Sep 12 23:10:48 2022 ] Top1: 52.19%
|
575 |
+
[ Mon Sep 12 23:10:49 2022 ] Top5: 82.47%
|
576 |
+
[ Mon Sep 12 23:10:49 2022 ] Training epoch: 78
|
577 |
+
[ Mon Sep 12 23:11:40 2022 ] Batch(88/243) done. Loss: 0.0440 lr:0.010000
|
578 |
+
[ Mon Sep 12 23:12:33 2022 ] Batch(188/243) done. Loss: 0.0604 lr:0.010000
|
579 |
+
[ Mon Sep 12 23:13:01 2022 ] Eval epoch: 78
|
580 |
+
[ Mon Sep 12 23:15:31 2022 ] Mean test loss of 796 batches: 3.2088100910186768.
|
581 |
+
[ Mon Sep 12 23:15:31 2022 ] Top1: 51.09%
|
582 |
+
[ Mon Sep 12 23:15:32 2022 ] Top5: 81.82%
|
583 |
+
[ Mon Sep 12 23:15:32 2022 ] Training epoch: 79
|
584 |
+
[ Mon Sep 12 23:16:00 2022 ] Batch(45/243) done. Loss: 0.0630 lr:0.010000
|
585 |
+
[ Mon Sep 12 23:16:53 2022 ] Batch(145/243) done. Loss: 0.0380 lr:0.010000
|
586 |
+
[ Mon Sep 12 23:17:44 2022 ] Eval epoch: 79
|
587 |
+
[ Mon Sep 12 23:20:14 2022 ] Mean test loss of 796 batches: 3.236017942428589.
|
588 |
+
[ Mon Sep 12 23:20:14 2022 ] Top1: 51.50%
|
589 |
+
[ Mon Sep 12 23:20:14 2022 ] Top5: 82.01%
|
590 |
+
[ Mon Sep 12 23:20:15 2022 ] Training epoch: 80
|
591 |
+
[ Mon Sep 12 23:20:20 2022 ] Batch(2/243) done. Loss: 0.0371 lr:0.010000
|
592 |
+
[ Mon Sep 12 23:21:13 2022 ] Batch(102/243) done. Loss: 0.0451 lr:0.010000
|
593 |
+
[ Mon Sep 12 23:22:06 2022 ] Batch(202/243) done. Loss: 0.0111 lr:0.010000
|
594 |
+
[ Mon Sep 12 23:22:27 2022 ] Eval epoch: 80
|
595 |
+
[ Mon Sep 12 23:24:57 2022 ] Mean test loss of 796 batches: 3.183028221130371.
|
596 |
+
[ Mon Sep 12 23:24:57 2022 ] Top1: 51.42%
|
597 |
+
[ Mon Sep 12 23:24:57 2022 ] Top5: 82.27%
|
598 |
+
[ Mon Sep 12 23:24:58 2022 ] Training epoch: 81
|
599 |
+
[ Mon Sep 12 23:25:33 2022 ] Batch(59/243) done. Loss: 0.0484 lr:0.001000
|
600 |
+
[ Mon Sep 12 23:26:26 2022 ] Batch(159/243) done. Loss: 0.0440 lr:0.001000
|
601 |
+
[ Mon Sep 12 23:27:10 2022 ] Eval epoch: 81
|
602 |
+
[ Mon Sep 12 23:29:40 2022 ] Mean test loss of 796 batches: 3.242577075958252.
|
603 |
+
[ Mon Sep 12 23:29:40 2022 ] Top1: 50.64%
|
604 |
+
[ Mon Sep 12 23:29:41 2022 ] Top5: 81.85%
|
605 |
+
[ Mon Sep 12 23:29:41 2022 ] Training epoch: 82
|
606 |
+
[ Mon Sep 12 23:29:53 2022 ] Batch(16/243) done. Loss: 0.0190 lr:0.001000
|
607 |
+
[ Mon Sep 12 23:30:46 2022 ] Batch(116/243) done. Loss: 0.0148 lr:0.001000
|
608 |
+
[ Mon Sep 12 23:31:39 2022 ] Batch(216/243) done. Loss: 0.0409 lr:0.001000
|
609 |
+
[ Mon Sep 12 23:31:53 2022 ] Eval epoch: 82
|
610 |
+
[ Mon Sep 12 23:34:23 2022 ] Mean test loss of 796 batches: 3.20725154876709.
|
611 |
+
[ Mon Sep 12 23:34:23 2022 ] Top1: 51.45%
|
612 |
+
[ Mon Sep 12 23:34:24 2022 ] Top5: 82.32%
|
613 |
+
[ Mon Sep 12 23:34:24 2022 ] Training epoch: 83
|
614 |
+
[ Mon Sep 12 23:35:07 2022 ] Batch(73/243) done. Loss: 0.0874 lr:0.001000
|
615 |
+
[ Mon Sep 12 23:35:59 2022 ] Batch(173/243) done. Loss: 0.0335 lr:0.001000
|
616 |
+
[ Mon Sep 12 23:36:36 2022 ] Eval epoch: 83
|
617 |
+
[ Mon Sep 12 23:39:06 2022 ] Mean test loss of 796 batches: 3.1513242721557617.
|
618 |
+
[ Mon Sep 12 23:39:06 2022 ] Top1: 51.75%
|
619 |
+
[ Mon Sep 12 23:39:07 2022 ] Top5: 82.37%
|
620 |
+
[ Mon Sep 12 23:39:07 2022 ] Training epoch: 84
|
621 |
+
[ Mon Sep 12 23:39:27 2022 ] Batch(30/243) done. Loss: 0.0181 lr:0.001000
|
622 |
+
[ Mon Sep 12 23:40:20 2022 ] Batch(130/243) done. Loss: 0.0753 lr:0.001000
|
623 |
+
[ Mon Sep 12 23:41:13 2022 ] Batch(230/243) done. Loss: 0.0129 lr:0.001000
|
624 |
+
[ Mon Sep 12 23:41:20 2022 ] Eval epoch: 84
|
625 |
+
[ Mon Sep 12 23:43:49 2022 ] Mean test loss of 796 batches: 3.1976230144500732.
|
626 |
+
[ Mon Sep 12 23:43:50 2022 ] Top1: 51.62%
|
627 |
+
[ Mon Sep 12 23:43:50 2022 ] Top5: 82.35%
|
628 |
+
[ Mon Sep 12 23:43:50 2022 ] Training epoch: 85
|
629 |
+
[ Mon Sep 12 23:44:41 2022 ] Batch(87/243) done. Loss: 0.0158 lr:0.001000
|
630 |
+
[ Mon Sep 12 23:45:34 2022 ] Batch(187/243) done. Loss: 0.0364 lr:0.001000
|
631 |
+
[ Mon Sep 12 23:46:03 2022 ] Eval epoch: 85
|
632 |
+
[ Mon Sep 12 23:48:32 2022 ] Mean test loss of 796 batches: 3.187116861343384.
|
633 |
+
[ Mon Sep 12 23:48:33 2022 ] Top1: 51.59%
|
634 |
+
[ Mon Sep 12 23:48:33 2022 ] Top5: 82.31%
|
635 |
+
[ Mon Sep 12 23:48:33 2022 ] Training epoch: 86
|
636 |
+
[ Mon Sep 12 23:49:01 2022 ] Batch(44/243) done. Loss: 0.0126 lr:0.001000
|
637 |
+
[ Mon Sep 12 23:49:54 2022 ] Batch(144/243) done. Loss: 0.0161 lr:0.001000
|
638 |
+
[ Mon Sep 12 23:50:46 2022 ] Eval epoch: 86
|
639 |
+
[ Mon Sep 12 23:53:15 2022 ] Mean test loss of 796 batches: 3.2308666706085205.
|
640 |
+
[ Mon Sep 12 23:53:16 2022 ] Top1: 51.43%
|
641 |
+
[ Mon Sep 12 23:53:16 2022 ] Top5: 82.06%
|
642 |
+
[ Mon Sep 12 23:53:16 2022 ] Training epoch: 87
|
643 |
+
[ Mon Sep 12 23:53:21 2022 ] Batch(1/243) done. Loss: 0.0844 lr:0.001000
|
644 |
+
[ Mon Sep 12 23:54:14 2022 ] Batch(101/243) done. Loss: 0.0450 lr:0.001000
|
645 |
+
[ Mon Sep 12 23:55:07 2022 ] Batch(201/243) done. Loss: 0.0782 lr:0.001000
|
646 |
+
[ Mon Sep 12 23:55:29 2022 ] Eval epoch: 87
|
647 |
+
[ Mon Sep 12 23:57:59 2022 ] Mean test loss of 796 batches: 3.226652145385742.
|
648 |
+
[ Mon Sep 12 23:57:59 2022 ] Top1: 51.32%
|
649 |
+
[ Mon Sep 12 23:57:59 2022 ] Top5: 82.01%
|
650 |
+
[ Mon Sep 12 23:58:00 2022 ] Training epoch: 88
|
651 |
+
[ Mon Sep 12 23:58:35 2022 ] Batch(58/243) done. Loss: 0.0215 lr:0.001000
|
652 |
+
[ Mon Sep 12 23:59:28 2022 ] Batch(158/243) done. Loss: 0.0571 lr:0.001000
|
653 |
+
[ Tue Sep 13 00:00:12 2022 ] Eval epoch: 88
|
654 |
+
[ Tue Sep 13 00:02:41 2022 ] Mean test loss of 796 batches: 3.2637722492218018.
|
655 |
+
[ Tue Sep 13 00:02:42 2022 ] Top1: 50.40%
|
656 |
+
[ Tue Sep 13 00:02:42 2022 ] Top5: 81.56%
|
657 |
+
[ Tue Sep 13 00:02:42 2022 ] Training epoch: 89
|
658 |
+
[ Tue Sep 13 00:02:55 2022 ] Batch(15/243) done. Loss: 0.0201 lr:0.001000
|
659 |
+
[ Tue Sep 13 00:03:48 2022 ] Batch(115/243) done. Loss: 0.0867 lr:0.001000
|
660 |
+
[ Tue Sep 13 00:04:40 2022 ] Batch(215/243) done. Loss: 0.0216 lr:0.001000
|
661 |
+
[ Tue Sep 13 00:04:55 2022 ] Eval epoch: 89
|
662 |
+
[ Tue Sep 13 00:07:24 2022 ] Mean test loss of 796 batches: 3.2266507148742676.
|
663 |
+
[ Tue Sep 13 00:07:24 2022 ] Top1: 50.92%
|
664 |
+
[ Tue Sep 13 00:07:25 2022 ] Top5: 81.87%
|
665 |
+
[ Tue Sep 13 00:07:25 2022 ] Training epoch: 90
|
666 |
+
[ Tue Sep 13 00:08:08 2022 ] Batch(72/243) done. Loss: 0.0154 lr:0.001000
|
667 |
+
[ Tue Sep 13 00:09:01 2022 ] Batch(172/243) done. Loss: 0.0784 lr:0.001000
|
668 |
+
[ Tue Sep 13 00:09:38 2022 ] Eval epoch: 90
|
669 |
+
[ Tue Sep 13 00:12:07 2022 ] Mean test loss of 796 batches: 3.225825071334839.
|
670 |
+
[ Tue Sep 13 00:12:08 2022 ] Top1: 51.16%
|
671 |
+
[ Tue Sep 13 00:12:08 2022 ] Top5: 81.81%
|
672 |
+
[ Tue Sep 13 00:12:08 2022 ] Training epoch: 91
|
673 |
+
[ Tue Sep 13 00:12:28 2022 ] Batch(29/243) done. Loss: 0.0316 lr:0.001000
|
674 |
+
[ Tue Sep 13 00:13:21 2022 ] Batch(129/243) done. Loss: 0.0128 lr:0.001000
|
675 |
+
[ Tue Sep 13 00:14:14 2022 ] Batch(229/243) done. Loss: 0.0234 lr:0.001000
|
676 |
+
[ Tue Sep 13 00:14:21 2022 ] Eval epoch: 91
|
677 |
+
[ Tue Sep 13 00:16:51 2022 ] Mean test loss of 796 batches: 3.2642405033111572.
|
678 |
+
[ Tue Sep 13 00:16:51 2022 ] Top1: 50.66%
|
679 |
+
[ Tue Sep 13 00:16:52 2022 ] Top5: 81.73%
|
680 |
+
[ Tue Sep 13 00:16:52 2022 ] Training epoch: 92
|
681 |
+
[ Tue Sep 13 00:17:42 2022 ] Batch(86/243) done. Loss: 0.0553 lr:0.001000
|
682 |
+
[ Tue Sep 13 00:18:34 2022 ] Batch(186/243) done. Loss: 0.0227 lr:0.001000
|
683 |
+
[ Tue Sep 13 00:19:04 2022 ] Eval epoch: 92
|
684 |
+
[ Tue Sep 13 00:21:34 2022 ] Mean test loss of 796 batches: 3.32381010055542.
|
685 |
+
[ Tue Sep 13 00:21:34 2022 ] Top1: 49.58%
|
686 |
+
[ Tue Sep 13 00:21:35 2022 ] Top5: 81.17%
|
687 |
+
[ Tue Sep 13 00:21:35 2022 ] Training epoch: 93
|
688 |
+
[ Tue Sep 13 00:22:02 2022 ] Batch(43/243) done. Loss: 0.0260 lr:0.001000
|
689 |
+
[ Tue Sep 13 00:22:55 2022 ] Batch(143/243) done. Loss: 0.0413 lr:0.001000
|
690 |
+
[ Tue Sep 13 00:23:47 2022 ] Eval epoch: 93
|
691 |
+
[ Tue Sep 13 00:26:17 2022 ] Mean test loss of 796 batches: 3.227717638015747.
|
692 |
+
[ Tue Sep 13 00:26:17 2022 ] Top1: 51.69%
|
693 |
+
[ Tue Sep 13 00:26:18 2022 ] Top5: 82.23%
|
694 |
+
[ Tue Sep 13 00:26:18 2022 ] Training epoch: 94
|
695 |
+
[ Tue Sep 13 00:26:22 2022 ] Batch(0/243) done. Loss: 0.0988 lr:0.001000
|
696 |
+
[ Tue Sep 13 00:27:15 2022 ] Batch(100/243) done. Loss: 0.0238 lr:0.001000
|
697 |
+
[ Tue Sep 13 00:28:08 2022 ] Batch(200/243) done. Loss: 0.0388 lr:0.001000
|
698 |
+
[ Tue Sep 13 00:28:31 2022 ] Eval epoch: 94
|
699 |
+
[ Tue Sep 13 00:31:00 2022 ] Mean test loss of 796 batches: 3.270937204360962.
|
700 |
+
[ Tue Sep 13 00:31:01 2022 ] Top1: 51.21%
|
701 |
+
[ Tue Sep 13 00:31:01 2022 ] Top5: 81.94%
|
702 |
+
[ Tue Sep 13 00:31:01 2022 ] Training epoch: 95
|
703 |
+
[ Tue Sep 13 00:31:35 2022 ] Batch(57/243) done. Loss: 0.2001 lr:0.001000
|
704 |
+
[ Tue Sep 13 00:32:28 2022 ] Batch(157/243) done. Loss: 0.0202 lr:0.001000
|
705 |
+
[ Tue Sep 13 00:33:13 2022 ] Eval epoch: 95
|
706 |
+
[ Tue Sep 13 00:35:43 2022 ] Mean test loss of 796 batches: 3.2287380695343018.
|
707 |
+
[ Tue Sep 13 00:35:44 2022 ] Top1: 50.79%
|
708 |
+
[ Tue Sep 13 00:35:44 2022 ] Top5: 81.82%
|
709 |
+
[ Tue Sep 13 00:35:44 2022 ] Training epoch: 96
|
710 |
+
[ Tue Sep 13 00:35:56 2022 ] Batch(14/243) done. Loss: 0.0420 lr:0.001000
|
711 |
+
[ Tue Sep 13 00:36:49 2022 ] Batch(114/243) done. Loss: 0.0295 lr:0.001000
|
712 |
+
[ Tue Sep 13 00:37:42 2022 ] Batch(214/243) done. Loss: 0.0117 lr:0.001000
|
713 |
+
[ Tue Sep 13 00:37:57 2022 ] Eval epoch: 96
|
714 |
+
[ Tue Sep 13 00:40:26 2022 ] Mean test loss of 796 batches: 3.366297721862793.
|
715 |
+
[ Tue Sep 13 00:40:27 2022 ] Top1: 49.64%
|
716 |
+
[ Tue Sep 13 00:40:27 2022 ] Top5: 81.07%
|
717 |
+
[ Tue Sep 13 00:40:27 2022 ] Training epoch: 97
|
718 |
+
[ Tue Sep 13 00:41:09 2022 ] Batch(71/243) done. Loss: 0.0625 lr:0.001000
|
719 |
+
[ Tue Sep 13 00:42:02 2022 ] Batch(171/243) done. Loss: 0.0306 lr:0.001000
|
720 |
+
[ Tue Sep 13 00:42:40 2022 ] Eval epoch: 97
|
721 |
+
[ Tue Sep 13 00:45:10 2022 ] Mean test loss of 796 batches: 3.252760887145996.
|
722 |
+
[ Tue Sep 13 00:45:11 2022 ] Top1: 50.98%
|
723 |
+
[ Tue Sep 13 00:45:11 2022 ] Top5: 82.04%
|
724 |
+
[ Tue Sep 13 00:45:11 2022 ] Training epoch: 98
|
725 |
+
[ Tue Sep 13 00:45:30 2022 ] Batch(28/243) done. Loss: 0.0788 lr:0.001000
|
726 |
+
[ Tue Sep 13 00:46:23 2022 ] Batch(128/243) done. Loss: 0.0487 lr:0.001000
|
727 |
+
[ Tue Sep 13 00:47:16 2022 ] Batch(228/243) done. Loss: 0.0122 lr:0.001000
|
728 |
+
[ Tue Sep 13 00:47:24 2022 ] Eval epoch: 98
|
729 |
+
[ Tue Sep 13 00:49:54 2022 ] Mean test loss of 796 batches: 3.223783016204834.
|
730 |
+
[ Tue Sep 13 00:49:54 2022 ] Top1: 51.54%
|
731 |
+
[ Tue Sep 13 00:49:54 2022 ] Top5: 82.20%
|
732 |
+
[ Tue Sep 13 00:49:55 2022 ] Training epoch: 99
|
733 |
+
[ Tue Sep 13 00:50:44 2022 ] Batch(85/243) done. Loss: 0.1317 lr:0.001000
|
734 |
+
[ Tue Sep 13 00:51:37 2022 ] Batch(185/243) done. Loss: 0.1158 lr:0.001000
|
735 |
+
[ Tue Sep 13 00:52:07 2022 ] Eval epoch: 99
|
736 |
+
[ Tue Sep 13 00:54:37 2022 ] Mean test loss of 796 batches: 3.285264015197754.
|
737 |
+
[ Tue Sep 13 00:54:38 2022 ] Top1: 50.84%
|
738 |
+
[ Tue Sep 13 00:54:38 2022 ] Top5: 81.73%
|
739 |
+
[ Tue Sep 13 00:54:38 2022 ] Training epoch: 100
|
740 |
+
[ Tue Sep 13 00:55:05 2022 ] Batch(42/243) done. Loss: 0.0879 lr:0.001000
|
741 |
+
[ Tue Sep 13 00:55:57 2022 ] Batch(142/243) done. Loss: 0.0337 lr:0.001000
|
742 |
+
[ Tue Sep 13 00:56:50 2022 ] Batch(242/243) done. Loss: 0.0754 lr:0.001000
|
743 |
+
[ Tue Sep 13 00:56:51 2022 ] Eval epoch: 100
|
744 |
+
[ Tue Sep 13 00:59:21 2022 ] Mean test loss of 796 batches: 3.213009834289551.
|
745 |
+
[ Tue Sep 13 00:59:21 2022 ] Top1: 51.19%
|
746 |
+
[ Tue Sep 13 00:59:22 2022 ] Top5: 82.15%
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/config.yaml
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_bone_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xsub/train_bone.yaml
|
5 |
+
device:
|
6 |
+
- 0
|
7 |
+
- 1
|
8 |
+
- 2
|
9 |
+
- 3
|
10 |
+
eval_interval: 5
|
11 |
+
feeder: feeders.feeder.Feeder
|
12 |
+
groups: 8
|
13 |
+
ignore_weights: []
|
14 |
+
keep_rate: 0.9
|
15 |
+
log_interval: 100
|
16 |
+
model: model.decouple_gcn.Model
|
17 |
+
model_args:
|
18 |
+
block_size: 41
|
19 |
+
graph: graph.ntu_rgb_d.Graph
|
20 |
+
graph_args:
|
21 |
+
labeling_mode: spatial
|
22 |
+
groups: 16
|
23 |
+
num_class: 120
|
24 |
+
num_person: 2
|
25 |
+
num_point: 25
|
26 |
+
model_saved_name: ./save_models/ntu120_bone_xsub
|
27 |
+
nesterov: true
|
28 |
+
num_epoch: 100
|
29 |
+
num_worker: 32
|
30 |
+
only_train_epoch: 1
|
31 |
+
only_train_part: true
|
32 |
+
optimizer: SGD
|
33 |
+
phase: train
|
34 |
+
print_log: true
|
35 |
+
save_interval: 2
|
36 |
+
save_score: false
|
37 |
+
seed: 1
|
38 |
+
show_topk:
|
39 |
+
- 1
|
40 |
+
- 5
|
41 |
+
start_epoch: 0
|
42 |
+
step:
|
43 |
+
- 60
|
44 |
+
- 80
|
45 |
+
test_batch_size: 64
|
46 |
+
test_feeder_args:
|
47 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy
|
48 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
|
49 |
+
train_feeder_args:
|
50 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy
|
51 |
+
debug: false
|
52 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
|
53 |
+
normalization: false
|
54 |
+
random_choose: false
|
55 |
+
random_move: false
|
56 |
+
random_shift: false
|
57 |
+
window_size: -1
|
58 |
+
warm_up_epoch: 0
|
59 |
+
weight_decay: 0.0001
|
60 |
+
weights: null
|
61 |
+
work_dir: ./work_dir/ntu120_bone_xsub
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:403481d5eb96956925c0876602005017491fd110130aac32242ef00938ab8bb9
|
3 |
+
size 29946137
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_bone_xsub/log.txt
ADDED
@@ -0,0 +1,757 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Mon Sep 12 17:07:56 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_bone_xsub', 'model_saved_name': './save_models/ntu120_bone_xsub', 'Experiment_name': 'ntu120_bone_xsub', 'config': './config/ntu120_xsub/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [0, 1], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Mon Sep 12 17:07:56 2022 ] Training epoch: 1
|
5 |
+
[ Mon Sep 12 17:08:46 2022 ] Batch(99/243) done. Loss: 3.9044 lr:0.100000
|
6 |
+
[ Mon Sep 12 17:09:31 2022 ] Batch(199/243) done. Loss: 3.1255 lr:0.100000
|
7 |
+
[ Mon Sep 12 17:09:50 2022 ] Eval epoch: 1
|
8 |
+
[ Mon Sep 12 17:12:22 2022 ] Mean test loss of 796 batches: 5.347392559051514.
|
9 |
+
[ Mon Sep 12 17:12:22 2022 ] Top1: 3.80%
|
10 |
+
[ Mon Sep 12 17:12:23 2022 ] Top5: 15.10%
|
11 |
+
[ Mon Sep 12 17:12:23 2022 ] Training epoch: 2
|
12 |
+
[ Tue Sep 13 10:03:02 2022 ] Parameters:
|
13 |
+
{'work_dir': './work_dir/ntu120_bone_xsub', 'model_saved_name': './save_models/ntu120_bone_xsub', 'Experiment_name': 'ntu120_bone_xsub', 'config': './config/ntu120_xsub/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [0, 1, 2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
14 |
+
|
15 |
+
[ Tue Sep 13 10:03:02 2022 ] Training epoch: 1
|
16 |
+
[ Tue Sep 13 10:03:36 2022 ] Batch(99/243) done. Loss: 3.9577 lr:0.100000
|
17 |
+
[ Tue Sep 13 10:04:01 2022 ] Batch(199/243) done. Loss: 3.2154 lr:0.100000
|
18 |
+
[ Tue Sep 13 10:04:11 2022 ] Eval epoch: 1
|
19 |
+
[ Tue Sep 13 10:05:41 2022 ] Mean test loss of 796 batches: 5.444550514221191.
|
20 |
+
[ Tue Sep 13 10:05:42 2022 ] Top1: 4.22%
|
21 |
+
[ Tue Sep 13 10:05:42 2022 ] Top5: 15.97%
|
22 |
+
[ Tue Sep 13 10:05:42 2022 ] Training epoch: 2
|
23 |
+
[ Tue Sep 13 10:06:02 2022 ] Batch(56/243) done. Loss: 3.1889 lr:0.100000
|
24 |
+
[ Tue Sep 13 10:06:32 2022 ] Batch(156/243) done. Loss: 2.5144 lr:0.100000
|
25 |
+
[ Tue Sep 13 10:06:58 2022 ] Eval epoch: 2
|
26 |
+
[ Tue Sep 13 10:08:29 2022 ] Mean test loss of 796 batches: 5.415019989013672.
|
27 |
+
[ Tue Sep 13 10:08:30 2022 ] Top1: 7.58%
|
28 |
+
[ Tue Sep 13 10:08:30 2022 ] Top5: 23.15%
|
29 |
+
[ Tue Sep 13 10:08:30 2022 ] Training epoch: 3
|
30 |
+
[ Tue Sep 13 10:08:37 2022 ] Batch(13/243) done. Loss: 2.4849 lr:0.100000
|
31 |
+
[ Tue Sep 13 10:09:07 2022 ] Batch(113/243) done. Loss: 2.2850 lr:0.100000
|
32 |
+
[ Tue Sep 13 10:09:37 2022 ] Batch(213/243) done. Loss: 2.3303 lr:0.100000
|
33 |
+
[ Tue Sep 13 10:09:46 2022 ] Eval epoch: 3
|
34 |
+
[ Tue Sep 13 10:11:17 2022 ] Mean test loss of 796 batches: 3.962733030319214.
|
35 |
+
[ Tue Sep 13 10:11:17 2022 ] Top1: 11.96%
|
36 |
+
[ Tue Sep 13 10:11:17 2022 ] Top5: 33.16%
|
37 |
+
[ Tue Sep 13 10:11:18 2022 ] Training epoch: 4
|
38 |
+
[ Tue Sep 13 10:11:42 2022 ] Batch(70/243) done. Loss: 1.9193 lr:0.100000
|
39 |
+
[ Tue Sep 13 10:12:12 2022 ] Batch(170/243) done. Loss: 2.0948 lr:0.100000
|
40 |
+
[ Tue Sep 13 10:12:33 2022 ] Eval epoch: 4
|
41 |
+
[ Tue Sep 13 10:14:04 2022 ] Mean test loss of 796 batches: 3.8267576694488525.
|
42 |
+
[ Tue Sep 13 10:14:04 2022 ] Top1: 17.78%
|
43 |
+
[ Tue Sep 13 10:14:04 2022 ] Top5: 40.34%
|
44 |
+
[ Tue Sep 13 10:14:05 2022 ] Training epoch: 5
|
45 |
+
[ Tue Sep 13 10:14:16 2022 ] Batch(27/243) done. Loss: 1.8219 lr:0.100000
|
46 |
+
[ Tue Sep 13 10:14:46 2022 ] Batch(127/243) done. Loss: 2.0562 lr:0.100000
|
47 |
+
[ Tue Sep 13 10:15:16 2022 ] Batch(227/243) done. Loss: 1.7730 lr:0.100000
|
48 |
+
[ Tue Sep 13 10:15:21 2022 ] Eval epoch: 5
|
49 |
+
[ Tue Sep 13 10:16:50 2022 ] Mean test loss of 796 batches: 3.385392427444458.
|
50 |
+
[ Tue Sep 13 10:16:51 2022 ] Top1: 19.85%
|
51 |
+
[ Tue Sep 13 10:16:51 2022 ] Top5: 45.67%
|
52 |
+
[ Tue Sep 13 10:16:51 2022 ] Training epoch: 6
|
53 |
+
[ Tue Sep 13 10:17:20 2022 ] Batch(84/243) done. Loss: 1.7124 lr:0.100000
|
54 |
+
[ Tue Sep 13 10:17:50 2022 ] Batch(184/243) done. Loss: 1.7753 lr:0.100000
|
55 |
+
[ Tue Sep 13 10:18:07 2022 ] Eval epoch: 6
|
56 |
+
[ Tue Sep 13 10:19:37 2022 ] Mean test loss of 796 batches: 3.1966803073883057.
|
57 |
+
[ Tue Sep 13 10:19:38 2022 ] Top1: 22.52%
|
58 |
+
[ Tue Sep 13 10:19:38 2022 ] Top5: 51.96%
|
59 |
+
[ Tue Sep 13 10:19:38 2022 ] Training epoch: 7
|
60 |
+
[ Tue Sep 13 10:19:54 2022 ] Batch(41/243) done. Loss: 1.6634 lr:0.100000
|
61 |
+
[ Tue Sep 13 10:20:24 2022 ] Batch(141/243) done. Loss: 1.2232 lr:0.100000
|
62 |
+
[ Tue Sep 13 10:20:54 2022 ] Batch(241/243) done. Loss: 1.4194 lr:0.100000
|
63 |
+
[ Tue Sep 13 10:20:55 2022 ] Eval epoch: 7
|
64 |
+
[ Tue Sep 13 10:22:25 2022 ] Mean test loss of 796 batches: 3.186509132385254.
|
65 |
+
[ Tue Sep 13 10:22:25 2022 ] Top1: 25.69%
|
66 |
+
[ Tue Sep 13 10:22:26 2022 ] Top5: 58.77%
|
67 |
+
[ Tue Sep 13 10:22:26 2022 ] Training epoch: 8
|
68 |
+
[ Tue Sep 13 10:22:59 2022 ] Batch(98/243) done. Loss: 1.3116 lr:0.100000
|
69 |
+
[ Tue Sep 13 10:23:29 2022 ] Batch(198/243) done. Loss: 1.3068 lr:0.100000
|
70 |
+
[ Tue Sep 13 10:23:43 2022 ] Eval epoch: 8
|
71 |
+
[ Tue Sep 13 10:25:13 2022 ] Mean test loss of 796 batches: 2.9324722290039062.
|
72 |
+
[ Tue Sep 13 10:25:13 2022 ] Top1: 27.67%
|
73 |
+
[ Tue Sep 13 10:25:14 2022 ] Top5: 59.88%
|
74 |
+
[ Tue Sep 13 10:25:14 2022 ] Training epoch: 9
|
75 |
+
[ Tue Sep 13 10:25:34 2022 ] Batch(55/243) done. Loss: 1.2367 lr:0.100000
|
76 |
+
[ Tue Sep 13 10:26:05 2022 ] Batch(155/243) done. Loss: 1.4505 lr:0.100000
|
77 |
+
[ Tue Sep 13 10:26:31 2022 ] Eval epoch: 9
|
78 |
+
[ Tue Sep 13 10:28:01 2022 ] Mean test loss of 796 batches: 2.934443712234497.
|
79 |
+
[ Tue Sep 13 10:28:02 2022 ] Top1: 30.04%
|
80 |
+
[ Tue Sep 13 10:28:02 2022 ] Top5: 61.77%
|
81 |
+
[ Tue Sep 13 10:28:02 2022 ] Training epoch: 10
|
82 |
+
[ Tue Sep 13 10:28:09 2022 ] Batch(12/243) done. Loss: 1.4612 lr:0.100000
|
83 |
+
[ Tue Sep 13 10:28:39 2022 ] Batch(112/243) done. Loss: 0.9946 lr:0.100000
|
84 |
+
[ Tue Sep 13 10:29:09 2022 ] Batch(212/243) done. Loss: 1.2065 lr:0.100000
|
85 |
+
[ Tue Sep 13 10:29:18 2022 ] Eval epoch: 10
|
86 |
+
[ Tue Sep 13 10:30:49 2022 ] Mean test loss of 796 batches: 2.912278652191162.
|
87 |
+
[ Tue Sep 13 10:30:49 2022 ] Top1: 29.90%
|
88 |
+
[ Tue Sep 13 10:30:49 2022 ] Top5: 61.19%
|
89 |
+
[ Tue Sep 13 10:30:50 2022 ] Training epoch: 11
|
90 |
+
[ Tue Sep 13 10:31:14 2022 ] Batch(69/243) done. Loss: 0.7080 lr:0.100000
|
91 |
+
[ Tue Sep 13 10:31:44 2022 ] Batch(169/243) done. Loss: 1.0251 lr:0.100000
|
92 |
+
[ Tue Sep 13 10:32:06 2022 ] Eval epoch: 11
|
93 |
+
[ Tue Sep 13 10:33:37 2022 ] Mean test loss of 796 batches: 3.0440070629119873.
|
94 |
+
[ Tue Sep 13 10:33:37 2022 ] Top1: 29.37%
|
95 |
+
[ Tue Sep 13 10:33:38 2022 ] Top5: 62.99%
|
96 |
+
[ Tue Sep 13 10:33:38 2022 ] Training epoch: 12
|
97 |
+
[ Tue Sep 13 10:33:49 2022 ] Batch(26/243) done. Loss: 0.8521 lr:0.100000
|
98 |
+
[ Tue Sep 13 10:34:19 2022 ] Batch(126/243) done. Loss: 1.3272 lr:0.100000
|
99 |
+
[ Tue Sep 13 10:34:49 2022 ] Batch(226/243) done. Loss: 1.0632 lr:0.100000
|
100 |
+
[ Tue Sep 13 10:34:54 2022 ] Eval epoch: 12
|
101 |
+
[ Tue Sep 13 10:36:24 2022 ] Mean test loss of 796 batches: 3.03654146194458.
|
102 |
+
[ Tue Sep 13 10:36:25 2022 ] Top1: 32.36%
|
103 |
+
[ Tue Sep 13 10:36:25 2022 ] Top5: 65.46%
|
104 |
+
[ Tue Sep 13 10:36:25 2022 ] Training epoch: 13
|
105 |
+
[ Tue Sep 13 10:36:53 2022 ] Batch(83/243) done. Loss: 1.0905 lr:0.100000
|
106 |
+
[ Tue Sep 13 10:37:23 2022 ] Batch(183/243) done. Loss: 1.0714 lr:0.100000
|
107 |
+
[ Tue Sep 13 10:37:41 2022 ] Eval epoch: 13
|
108 |
+
[ Tue Sep 13 10:39:11 2022 ] Mean test loss of 796 batches: 2.899965524673462.
|
109 |
+
[ Tue Sep 13 10:39:12 2022 ] Top1: 33.87%
|
110 |
+
[ Tue Sep 13 10:39:12 2022 ] Top5: 68.04%
|
111 |
+
[ Tue Sep 13 10:39:12 2022 ] Training epoch: 14
|
112 |
+
[ Tue Sep 13 10:39:27 2022 ] Batch(40/243) done. Loss: 1.0002 lr:0.100000
|
113 |
+
[ Tue Sep 13 10:39:57 2022 ] Batch(140/243) done. Loss: 0.8327 lr:0.100000
|
114 |
+
[ Tue Sep 13 10:40:27 2022 ] Batch(240/243) done. Loss: 1.0224 lr:0.100000
|
115 |
+
[ Tue Sep 13 10:40:27 2022 ] Eval epoch: 14
|
116 |
+
[ Tue Sep 13 10:41:58 2022 ] Mean test loss of 796 batches: 2.6744863986968994.
|
117 |
+
[ Tue Sep 13 10:41:58 2022 ] Top1: 36.46%
|
118 |
+
[ Tue Sep 13 10:41:58 2022 ] Top5: 71.09%
|
119 |
+
[ Tue Sep 13 10:41:59 2022 ] Training epoch: 15
|
120 |
+
[ Tue Sep 13 10:42:31 2022 ] Batch(97/243) done. Loss: 0.7399 lr:0.100000
|
121 |
+
[ Tue Sep 13 10:43:01 2022 ] Batch(197/243) done. Loss: 1.1364 lr:0.100000
|
122 |
+
[ Tue Sep 13 10:43:15 2022 ] Eval epoch: 15
|
123 |
+
[ Tue Sep 13 10:44:45 2022 ] Mean test loss of 796 batches: 2.4184412956237793.
|
124 |
+
[ Tue Sep 13 10:44:45 2022 ] Top1: 39.46%
|
125 |
+
[ Tue Sep 13 10:44:46 2022 ] Top5: 74.47%
|
126 |
+
[ Tue Sep 13 10:44:46 2022 ] Training epoch: 16
|
127 |
+
[ Tue Sep 13 10:45:05 2022 ] Batch(54/243) done. Loss: 1.0675 lr:0.100000
|
128 |
+
[ Tue Sep 13 10:45:35 2022 ] Batch(154/243) done. Loss: 0.7498 lr:0.100000
|
129 |
+
[ Tue Sep 13 10:46:02 2022 ] Eval epoch: 16
|
130 |
+
[ Tue Sep 13 10:47:32 2022 ] Mean test loss of 796 batches: 2.5964293479919434.
|
131 |
+
[ Tue Sep 13 10:47:32 2022 ] Top1: 38.40%
|
132 |
+
[ Tue Sep 13 10:47:33 2022 ] Top5: 72.80%
|
133 |
+
[ Tue Sep 13 10:47:33 2022 ] Training epoch: 17
|
134 |
+
[ Tue Sep 13 10:47:40 2022 ] Batch(11/243) done. Loss: 0.7426 lr:0.100000
|
135 |
+
[ Tue Sep 13 10:48:10 2022 ] Batch(111/243) done. Loss: 0.9214 lr:0.100000
|
136 |
+
[ Tue Sep 13 10:48:40 2022 ] Batch(211/243) done. Loss: 0.9561 lr:0.100000
|
137 |
+
[ Tue Sep 13 10:48:49 2022 ] Eval epoch: 17
|
138 |
+
[ Tue Sep 13 10:50:20 2022 ] Mean test loss of 796 batches: 2.5357229709625244.
|
139 |
+
[ Tue Sep 13 10:50:20 2022 ] Top1: 40.98%
|
140 |
+
[ Tue Sep 13 10:50:21 2022 ] Top5: 75.79%
|
141 |
+
[ Tue Sep 13 10:50:21 2022 ] Training epoch: 18
|
142 |
+
[ Tue Sep 13 10:50:45 2022 ] Batch(68/243) done. Loss: 0.6866 lr:0.100000
|
143 |
+
[ Tue Sep 13 10:51:15 2022 ] Batch(168/243) done. Loss: 0.7612 lr:0.100000
|
144 |
+
[ Tue Sep 13 10:51:37 2022 ] Eval epoch: 18
|
145 |
+
[ Tue Sep 13 10:53:07 2022 ] Mean test loss of 796 batches: 2.3321893215179443.
|
146 |
+
[ Tue Sep 13 10:53:08 2022 ] Top1: 39.66%
|
147 |
+
[ Tue Sep 13 10:53:08 2022 ] Top5: 76.27%
|
148 |
+
[ Tue Sep 13 10:53:08 2022 ] Training epoch: 19
|
149 |
+
[ Tue Sep 13 10:53:19 2022 ] Batch(25/243) done. Loss: 0.7873 lr:0.100000
|
150 |
+
[ Tue Sep 13 10:53:49 2022 ] Batch(125/243) done. Loss: 0.6658 lr:0.100000
|
151 |
+
[ Tue Sep 13 10:54:19 2022 ] Batch(225/243) done. Loss: 0.8716 lr:0.100000
|
152 |
+
[ Tue Sep 13 10:54:25 2022 ] Eval epoch: 19
|
153 |
+
[ Tue Sep 13 10:55:54 2022 ] Mean test loss of 796 batches: 2.633082628250122.
|
154 |
+
[ Tue Sep 13 10:55:55 2022 ] Top1: 38.99%
|
155 |
+
[ Tue Sep 13 10:55:55 2022 ] Top5: 74.64%
|
156 |
+
[ Tue Sep 13 10:55:55 2022 ] Training epoch: 20
|
157 |
+
[ Tue Sep 13 10:56:24 2022 ] Batch(82/243) done. Loss: 0.7991 lr:0.100000
|
158 |
+
[ Tue Sep 13 10:56:54 2022 ] Batch(182/243) done. Loss: 0.7026 lr:0.100000
|
159 |
+
[ Tue Sep 13 10:57:12 2022 ] Eval epoch: 20
|
160 |
+
[ Tue Sep 13 10:58:42 2022 ] Mean test loss of 796 batches: 2.5945448875427246.
|
161 |
+
[ Tue Sep 13 10:58:42 2022 ] Top1: 41.06%
|
162 |
+
[ Tue Sep 13 10:58:43 2022 ] Top5: 77.05%
|
163 |
+
[ Tue Sep 13 10:58:43 2022 ] Training epoch: 21
|
164 |
+
[ Tue Sep 13 10:58:58 2022 ] Batch(39/243) done. Loss: 0.6007 lr:0.100000
|
165 |
+
[ Tue Sep 13 10:59:28 2022 ] Batch(139/243) done. Loss: 0.8379 lr:0.100000
|
166 |
+
[ Tue Sep 13 10:59:58 2022 ] Batch(239/243) done. Loss: 0.6651 lr:0.100000
|
167 |
+
[ Tue Sep 13 10:59:59 2022 ] Eval epoch: 21
|
168 |
+
[ Tue Sep 13 11:01:29 2022 ] Mean test loss of 796 batches: 2.3643059730529785.
|
169 |
+
[ Tue Sep 13 11:01:30 2022 ] Top1: 44.31%
|
170 |
+
[ Tue Sep 13 11:01:30 2022 ] Top5: 77.77%
|
171 |
+
[ Tue Sep 13 11:01:30 2022 ] Training epoch: 22
|
172 |
+
[ Tue Sep 13 11:02:03 2022 ] Batch(96/243) done. Loss: 0.7236 lr:0.100000
|
173 |
+
[ Tue Sep 13 11:02:33 2022 ] Batch(196/243) done. Loss: 0.6861 lr:0.100000
|
174 |
+
[ Tue Sep 13 11:02:47 2022 ] Eval epoch: 22
|
175 |
+
[ Tue Sep 13 11:04:17 2022 ] Mean test loss of 796 batches: 2.5485894680023193.
|
176 |
+
[ Tue Sep 13 11:04:17 2022 ] Top1: 41.21%
|
177 |
+
[ Tue Sep 13 11:04:18 2022 ] Top5: 76.45%
|
178 |
+
[ Tue Sep 13 11:04:18 2022 ] Training epoch: 23
|
179 |
+
[ Tue Sep 13 11:04:37 2022 ] Batch(53/243) done. Loss: 0.4884 lr:0.100000
|
180 |
+
[ Tue Sep 13 11:05:07 2022 ] Batch(153/243) done. Loss: 0.6766 lr:0.100000
|
181 |
+
[ Tue Sep 13 11:05:34 2022 ] Eval epoch: 23
|
182 |
+
[ Tue Sep 13 11:07:04 2022 ] Mean test loss of 796 batches: 2.6978600025177.
|
183 |
+
[ Tue Sep 13 11:07:04 2022 ] Top1: 41.24%
|
184 |
+
[ Tue Sep 13 11:07:05 2022 ] Top5: 77.83%
|
185 |
+
[ Tue Sep 13 11:07:05 2022 ] Training epoch: 24
|
186 |
+
[ Tue Sep 13 11:07:11 2022 ] Batch(10/243) done. Loss: 0.4772 lr:0.100000
|
187 |
+
[ Tue Sep 13 11:07:41 2022 ] Batch(110/243) done. Loss: 0.4487 lr:0.100000
|
188 |
+
[ Tue Sep 13 11:08:11 2022 ] Batch(210/243) done. Loss: 0.7073 lr:0.100000
|
189 |
+
[ Tue Sep 13 11:08:21 2022 ] Eval epoch: 24
|
190 |
+
[ Tue Sep 13 11:09:51 2022 ] Mean test loss of 796 batches: 9.196643829345703.
|
191 |
+
[ Tue Sep 13 11:09:51 2022 ] Top1: 9.55%
|
192 |
+
[ Tue Sep 13 11:09:51 2022 ] Top5: 31.08%
|
193 |
+
[ Tue Sep 13 11:09:52 2022 ] Training epoch: 25
|
194 |
+
[ Tue Sep 13 11:10:15 2022 ] Batch(67/243) done. Loss: 0.5691 lr:0.100000
|
195 |
+
[ Tue Sep 13 11:10:45 2022 ] Batch(167/243) done. Loss: 0.7838 lr:0.100000
|
196 |
+
[ Tue Sep 13 11:11:08 2022 ] Eval epoch: 25
|
197 |
+
[ Tue Sep 13 11:12:38 2022 ] Mean test loss of 796 batches: 2.3803958892822266.
|
198 |
+
[ Tue Sep 13 11:12:39 2022 ] Top1: 44.11%
|
199 |
+
[ Tue Sep 13 11:12:39 2022 ] Top5: 79.53%
|
200 |
+
[ Tue Sep 13 11:12:39 2022 ] Training epoch: 26
|
201 |
+
[ Tue Sep 13 11:12:50 2022 ] Batch(24/243) done. Loss: 0.6260 lr:0.100000
|
202 |
+
[ Tue Sep 13 11:13:20 2022 ] Batch(124/243) done. Loss: 0.5755 lr:0.100000
|
203 |
+
[ Tue Sep 13 11:13:50 2022 ] Batch(224/243) done. Loss: 0.5419 lr:0.100000
|
204 |
+
[ Tue Sep 13 11:13:55 2022 ] Eval epoch: 26
|
205 |
+
[ Tue Sep 13 11:15:25 2022 ] Mean test loss of 796 batches: 2.658560037612915.
|
206 |
+
[ Tue Sep 13 11:15:25 2022 ] Top1: 42.29%
|
207 |
+
[ Tue Sep 13 11:15:26 2022 ] Top5: 75.35%
|
208 |
+
[ Tue Sep 13 11:15:26 2022 ] Training epoch: 27
|
209 |
+
[ Tue Sep 13 11:15:53 2022 ] Batch(81/243) done. Loss: 0.6493 lr:0.100000
|
210 |
+
[ Tue Sep 13 11:16:23 2022 ] Batch(181/243) done. Loss: 0.4724 lr:0.100000
|
211 |
+
[ Tue Sep 13 11:16:42 2022 ] Eval epoch: 27
|
212 |
+
[ Tue Sep 13 11:18:11 2022 ] Mean test loss of 796 batches: 2.3870038986206055.
|
213 |
+
[ Tue Sep 13 11:18:12 2022 ] Top1: 45.12%
|
214 |
+
[ Tue Sep 13 11:18:12 2022 ] Top5: 79.03%
|
215 |
+
[ Tue Sep 13 11:18:12 2022 ] Training epoch: 28
|
216 |
+
[ Tue Sep 13 11:18:28 2022 ] Batch(38/243) done. Loss: 0.5939 lr:0.100000
|
217 |
+
[ Tue Sep 13 11:18:57 2022 ] Batch(138/243) done. Loss: 0.4462 lr:0.100000
|
218 |
+
[ Tue Sep 13 11:19:27 2022 ] Batch(238/243) done. Loss: 0.4096 lr:0.100000
|
219 |
+
[ Tue Sep 13 11:19:29 2022 ] Eval epoch: 28
|
220 |
+
[ Tue Sep 13 11:20:59 2022 ] Mean test loss of 796 batches: 2.431807279586792.
|
221 |
+
[ Tue Sep 13 11:21:00 2022 ] Top1: 45.66%
|
222 |
+
[ Tue Sep 13 11:21:00 2022 ] Top5: 78.43%
|
223 |
+
[ Tue Sep 13 11:21:00 2022 ] Training epoch: 29
|
224 |
+
[ Tue Sep 13 11:21:32 2022 ] Batch(95/243) done. Loss: 0.5137 lr:0.100000
|
225 |
+
[ Tue Sep 13 11:22:02 2022 ] Batch(195/243) done. Loss: 0.4970 lr:0.100000
|
226 |
+
[ Tue Sep 13 11:22:16 2022 ] Eval epoch: 29
|
227 |
+
[ Tue Sep 13 11:23:46 2022 ] Mean test loss of 796 batches: 2.3383309841156006.
|
228 |
+
[ Tue Sep 13 11:23:47 2022 ] Top1: 45.98%
|
229 |
+
[ Tue Sep 13 11:23:47 2022 ] Top5: 78.55%
|
230 |
+
[ Tue Sep 13 11:23:47 2022 ] Training epoch: 30
|
231 |
+
[ Tue Sep 13 11:24:06 2022 ] Batch(52/243) done. Loss: 0.5574 lr:0.100000
|
232 |
+
[ Tue Sep 13 11:24:36 2022 ] Batch(152/243) done. Loss: 0.3848 lr:0.100000
|
233 |
+
[ Tue Sep 13 11:25:03 2022 ] Eval epoch: 30
|
234 |
+
[ Tue Sep 13 11:26:32 2022 ] Mean test loss of 796 batches: 2.407961368560791.
|
235 |
+
[ Tue Sep 13 11:26:33 2022 ] Top1: 45.24%
|
236 |
+
[ Tue Sep 13 11:26:33 2022 ] Top5: 79.51%
|
237 |
+
[ Tue Sep 13 11:26:33 2022 ] Training epoch: 31
|
238 |
+
[ Tue Sep 13 11:26:39 2022 ] Batch(9/243) done. Loss: 0.5381 lr:0.100000
|
239 |
+
[ Tue Sep 13 11:27:09 2022 ] Batch(109/243) done. Loss: 0.7146 lr:0.100000
|
240 |
+
[ Tue Sep 13 11:27:39 2022 ] Batch(209/243) done. Loss: 0.7419 lr:0.100000
|
241 |
+
[ Tue Sep 13 11:27:49 2022 ] Eval epoch: 31
|
242 |
+
[ Tue Sep 13 11:29:19 2022 ] Mean test loss of 796 batches: 2.7233266830444336.
|
243 |
+
[ Tue Sep 13 11:29:19 2022 ] Top1: 42.41%
|
244 |
+
[ Tue Sep 13 11:29:20 2022 ] Top5: 77.12%
|
245 |
+
[ Tue Sep 13 11:29:20 2022 ] Training epoch: 32
|
246 |
+
[ Tue Sep 13 11:29:43 2022 ] Batch(66/243) done. Loss: 0.3441 lr:0.100000
|
247 |
+
[ Tue Sep 13 11:30:13 2022 ] Batch(166/243) done. Loss: 0.3795 lr:0.100000
|
248 |
+
[ Tue Sep 13 11:30:36 2022 ] Eval epoch: 32
|
249 |
+
[ Tue Sep 13 11:32:05 2022 ] Mean test loss of 796 batches: 2.897885322570801.
|
250 |
+
[ Tue Sep 13 11:32:06 2022 ] Top1: 42.48%
|
251 |
+
[ Tue Sep 13 11:32:06 2022 ] Top5: 78.75%
|
252 |
+
[ Tue Sep 13 11:32:06 2022 ] Training epoch: 33
|
253 |
+
[ Tue Sep 13 11:32:17 2022 ] Batch(23/243) done. Loss: 0.3501 lr:0.100000
|
254 |
+
[ Tue Sep 13 11:32:47 2022 ] Batch(123/243) done. Loss: 0.6153 lr:0.100000
|
255 |
+
[ Tue Sep 13 11:33:17 2022 ] Batch(223/243) done. Loss: 0.6015 lr:0.100000
|
256 |
+
[ Tue Sep 13 11:33:23 2022 ] Eval epoch: 33
|
257 |
+
[ Tue Sep 13 11:34:52 2022 ] Mean test loss of 796 batches: 2.5278916358947754.
|
258 |
+
[ Tue Sep 13 11:34:53 2022 ] Top1: 45.33%
|
259 |
+
[ Tue Sep 13 11:34:53 2022 ] Top5: 79.27%
|
260 |
+
[ Tue Sep 13 11:34:53 2022 ] Training epoch: 34
|
261 |
+
[ Tue Sep 13 11:35:21 2022 ] Batch(80/243) done. Loss: 0.7118 lr:0.100000
|
262 |
+
[ Tue Sep 13 11:35:51 2022 ] Batch(180/243) done. Loss: 0.3857 lr:0.100000
|
263 |
+
[ Tue Sep 13 11:36:10 2022 ] Eval epoch: 34
|
264 |
+
[ Tue Sep 13 11:37:39 2022 ] Mean test loss of 796 batches: 2.578415870666504.
|
265 |
+
[ Tue Sep 13 11:37:40 2022 ] Top1: 44.19%
|
266 |
+
[ Tue Sep 13 11:37:40 2022 ] Top5: 77.54%
|
267 |
+
[ Tue Sep 13 11:37:40 2022 ] Training epoch: 35
|
268 |
+
[ Tue Sep 13 11:37:55 2022 ] Batch(37/243) done. Loss: 0.3720 lr:0.100000
|
269 |
+
[ Tue Sep 13 11:38:25 2022 ] Batch(137/243) done. Loss: 0.4876 lr:0.100000
|
270 |
+
[ Tue Sep 13 11:38:55 2022 ] Batch(237/243) done. Loss: 0.5665 lr:0.100000
|
271 |
+
[ Tue Sep 13 11:38:56 2022 ] Eval epoch: 35
|
272 |
+
[ Tue Sep 13 11:40:27 2022 ] Mean test loss of 796 batches: 2.3181099891662598.
|
273 |
+
[ Tue Sep 13 11:40:27 2022 ] Top1: 47.44%
|
274 |
+
[ Tue Sep 13 11:40:28 2022 ] Top5: 80.46%
|
275 |
+
[ Tue Sep 13 11:40:28 2022 ] Training epoch: 36
|
276 |
+
[ Tue Sep 13 11:41:00 2022 ] Batch(94/243) done. Loss: 0.7410 lr:0.100000
|
277 |
+
[ Tue Sep 13 11:41:30 2022 ] Batch(194/243) done. Loss: 0.6214 lr:0.100000
|
278 |
+
[ Tue Sep 13 11:41:44 2022 ] Eval epoch: 36
|
279 |
+
[ Tue Sep 13 11:43:15 2022 ] Mean test loss of 796 batches: 3.2811484336853027.
|
280 |
+
[ Tue Sep 13 11:43:15 2022 ] Top1: 41.43%
|
281 |
+
[ Tue Sep 13 11:43:15 2022 ] Top5: 76.30%
|
282 |
+
[ Tue Sep 13 11:43:16 2022 ] Training epoch: 37
|
283 |
+
[ Tue Sep 13 11:43:35 2022 ] Batch(51/243) done. Loss: 0.5524 lr:0.100000
|
284 |
+
[ Tue Sep 13 11:44:05 2022 ] Batch(151/243) done. Loss: 0.6357 lr:0.100000
|
285 |
+
[ Tue Sep 13 11:44:32 2022 ] Eval epoch: 37
|
286 |
+
[ Tue Sep 13 11:46:02 2022 ] Mean test loss of 796 batches: 2.4665732383728027.
|
287 |
+
[ Tue Sep 13 11:46:03 2022 ] Top1: 47.75%
|
288 |
+
[ Tue Sep 13 11:46:03 2022 ] Top5: 79.85%
|
289 |
+
[ Tue Sep 13 11:46:03 2022 ] Training epoch: 38
|
290 |
+
[ Tue Sep 13 11:46:09 2022 ] Batch(8/243) done. Loss: 0.5600 lr:0.100000
|
291 |
+
[ Tue Sep 13 11:46:39 2022 ] Batch(108/243) done. Loss: 0.4920 lr:0.100000
|
292 |
+
[ Tue Sep 13 11:47:09 2022 ] Batch(208/243) done. Loss: 0.5169 lr:0.100000
|
293 |
+
[ Tue Sep 13 11:47:19 2022 ] Eval epoch: 38
|
294 |
+
[ Tue Sep 13 11:48:49 2022 ] Mean test loss of 796 batches: 2.9676082134246826.
|
295 |
+
[ Tue Sep 13 11:48:50 2022 ] Top1: 43.14%
|
296 |
+
[ Tue Sep 13 11:48:50 2022 ] Top5: 78.24%
|
297 |
+
[ Tue Sep 13 11:48:50 2022 ] Training epoch: 39
|
298 |
+
[ Tue Sep 13 11:49:13 2022 ] Batch(65/243) done. Loss: 0.4457 lr:0.100000
|
299 |
+
[ Tue Sep 13 11:49:43 2022 ] Batch(165/243) done. Loss: 0.4225 lr:0.100000
|
300 |
+
[ Tue Sep 13 11:50:06 2022 ] Eval epoch: 39
|
301 |
+
[ Tue Sep 13 11:51:36 2022 ] Mean test loss of 796 batches: 2.3777058124542236.
|
302 |
+
[ Tue Sep 13 11:51:36 2022 ] Top1: 48.60%
|
303 |
+
[ Tue Sep 13 11:51:37 2022 ] Top5: 81.73%
|
304 |
+
[ Tue Sep 13 11:51:37 2022 ] Training epoch: 40
|
305 |
+
[ Tue Sep 13 11:51:47 2022 ] Batch(22/243) done. Loss: 0.2437 lr:0.100000
|
306 |
+
[ Tue Sep 13 11:52:17 2022 ] Batch(122/243) done. Loss: 0.6209 lr:0.100000
|
307 |
+
[ Tue Sep 13 11:52:46 2022 ] Batch(222/243) done. Loss: 0.4602 lr:0.100000
|
308 |
+
[ Tue Sep 13 11:52:53 2022 ] Eval epoch: 40
|
309 |
+
[ Tue Sep 13 11:54:23 2022 ] Mean test loss of 796 batches: 2.2834651470184326.
|
310 |
+
[ Tue Sep 13 11:54:23 2022 ] Top1: 48.18%
|
311 |
+
[ Tue Sep 13 11:54:23 2022 ] Top5: 81.19%
|
312 |
+
[ Tue Sep 13 11:54:24 2022 ] Training epoch: 41
|
313 |
+
[ Tue Sep 13 11:54:51 2022 ] Batch(79/243) done. Loss: 0.3084 lr:0.100000
|
314 |
+
[ Tue Sep 13 11:55:21 2022 ] Batch(179/243) done. Loss: 0.4477 lr:0.100000
|
315 |
+
[ Tue Sep 13 11:55:40 2022 ] Eval epoch: 41
|
316 |
+
[ Tue Sep 13 11:57:11 2022 ] Mean test loss of 796 batches: 2.2875711917877197.
|
317 |
+
[ Tue Sep 13 11:57:12 2022 ] Top1: 47.92%
|
318 |
+
[ Tue Sep 13 11:57:12 2022 ] Top5: 81.64%
|
319 |
+
[ Tue Sep 13 11:57:12 2022 ] Training epoch: 42
|
320 |
+
[ Tue Sep 13 11:57:27 2022 ] Batch(36/243) done. Loss: 0.5149 lr:0.100000
|
321 |
+
[ Tue Sep 13 11:57:56 2022 ] Batch(136/243) done. Loss: 0.3281 lr:0.100000
|
322 |
+
[ Tue Sep 13 11:58:26 2022 ] Batch(236/243) done. Loss: 0.5555 lr:0.100000
|
323 |
+
[ Tue Sep 13 11:58:28 2022 ] Eval epoch: 42
|
324 |
+
[ Tue Sep 13 11:59:59 2022 ] Mean test loss of 796 batches: 2.9600913524627686.
|
325 |
+
[ Tue Sep 13 11:59:59 2022 ] Top1: 42.34%
|
326 |
+
[ Tue Sep 13 11:59:59 2022 ] Top5: 76.59%
|
327 |
+
[ Tue Sep 13 12:00:00 2022 ] Training epoch: 43
|
328 |
+
[ Tue Sep 13 12:00:31 2022 ] Batch(93/243) done. Loss: 0.5678 lr:0.100000
|
329 |
+
[ Tue Sep 13 12:01:01 2022 ] Batch(193/243) done. Loss: 0.2838 lr:0.100000
|
330 |
+
[ Tue Sep 13 12:01:16 2022 ] Eval epoch: 43
|
331 |
+
[ Tue Sep 13 12:02:46 2022 ] Mean test loss of 796 batches: 2.416943311691284.
|
332 |
+
[ Tue Sep 13 12:02:47 2022 ] Top1: 48.39%
|
333 |
+
[ Tue Sep 13 12:02:47 2022 ] Top5: 80.53%
|
334 |
+
[ Tue Sep 13 12:02:47 2022 ] Training epoch: 44
|
335 |
+
[ Tue Sep 13 12:03:06 2022 ] Batch(50/243) done. Loss: 0.4872 lr:0.100000
|
336 |
+
[ Tue Sep 13 12:03:36 2022 ] Batch(150/243) done. Loss: 0.5165 lr:0.100000
|
337 |
+
[ Tue Sep 13 12:04:03 2022 ] Eval epoch: 44
|
338 |
+
[ Tue Sep 13 12:05:34 2022 ] Mean test loss of 796 batches: 2.670891284942627.
|
339 |
+
[ Tue Sep 13 12:05:34 2022 ] Top1: 46.07%
|
340 |
+
[ Tue Sep 13 12:05:34 2022 ] Top5: 79.14%
|
341 |
+
[ Tue Sep 13 12:05:35 2022 ] Training epoch: 45
|
342 |
+
[ Tue Sep 13 12:05:41 2022 ] Batch(7/243) done. Loss: 0.4505 lr:0.100000
|
343 |
+
[ Tue Sep 13 12:06:11 2022 ] Batch(107/243) done. Loss: 0.5073 lr:0.100000
|
344 |
+
[ Tue Sep 13 12:06:41 2022 ] Batch(207/243) done. Loss: 0.3428 lr:0.100000
|
345 |
+
[ Tue Sep 13 12:06:51 2022 ] Eval epoch: 45
|
346 |
+
[ Tue Sep 13 12:08:21 2022 ] Mean test loss of 796 batches: 2.291255474090576.
|
347 |
+
[ Tue Sep 13 12:08:22 2022 ] Top1: 47.96%
|
348 |
+
[ Tue Sep 13 12:08:22 2022 ] Top5: 81.30%
|
349 |
+
[ Tue Sep 13 12:08:22 2022 ] Training epoch: 46
|
350 |
+
[ Tue Sep 13 12:08:45 2022 ] Batch(64/243) done. Loss: 0.3668 lr:0.100000
|
351 |
+
[ Tue Sep 13 12:09:15 2022 ] Batch(164/243) done. Loss: 0.3639 lr:0.100000
|
352 |
+
[ Tue Sep 13 12:09:38 2022 ] Eval epoch: 46
|
353 |
+
[ Tue Sep 13 12:11:09 2022 ] Mean test loss of 796 batches: 3.0566868782043457.
|
354 |
+
[ Tue Sep 13 12:11:09 2022 ] Top1: 43.69%
|
355 |
+
[ Tue Sep 13 12:11:09 2022 ] Top5: 76.28%
|
356 |
+
[ Tue Sep 13 12:11:10 2022 ] Training epoch: 47
|
357 |
+
[ Tue Sep 13 12:11:19 2022 ] Batch(21/243) done. Loss: 0.3062 lr:0.100000
|
358 |
+
[ Tue Sep 13 12:11:49 2022 ] Batch(121/243) done. Loss: 0.4540 lr:0.100000
|
359 |
+
[ Tue Sep 13 12:12:19 2022 ] Batch(221/243) done. Loss: 0.5952 lr:0.100000
|
360 |
+
[ Tue Sep 13 12:12:26 2022 ] Eval epoch: 47
|
361 |
+
[ Tue Sep 13 12:13:55 2022 ] Mean test loss of 796 batches: 14.264094352722168.
|
362 |
+
[ Tue Sep 13 12:13:56 2022 ] Top1: 5.31%
|
363 |
+
[ Tue Sep 13 12:13:56 2022 ] Top5: 16.96%
|
364 |
+
[ Tue Sep 13 12:13:56 2022 ] Training epoch: 48
|
365 |
+
[ Tue Sep 13 12:14:23 2022 ] Batch(78/243) done. Loss: 0.3861 lr:0.100000
|
366 |
+
[ Tue Sep 13 12:14:53 2022 ] Batch(178/243) done. Loss: 0.5330 lr:0.100000
|
367 |
+
[ Tue Sep 13 12:15:12 2022 ] Eval epoch: 48
|
368 |
+
[ Tue Sep 13 12:16:42 2022 ] Mean test loss of 796 batches: 2.734037399291992.
|
369 |
+
[ Tue Sep 13 12:16:42 2022 ] Top1: 42.76%
|
370 |
+
[ Tue Sep 13 12:16:43 2022 ] Top5: 77.40%
|
371 |
+
[ Tue Sep 13 12:16:43 2022 ] Training epoch: 49
|
372 |
+
[ Tue Sep 13 12:16:57 2022 ] Batch(35/243) done. Loss: 0.3180 lr:0.100000
|
373 |
+
[ Tue Sep 13 12:17:27 2022 ] Batch(135/243) done. Loss: 0.5142 lr:0.100000
|
374 |
+
[ Tue Sep 13 12:17:58 2022 ] Batch(235/243) done. Loss: 0.4537 lr:0.100000
|
375 |
+
[ Tue Sep 13 12:18:00 2022 ] Eval epoch: 49
|
376 |
+
[ Tue Sep 13 12:19:30 2022 ] Mean test loss of 796 batches: 2.4061684608459473.
|
377 |
+
[ Tue Sep 13 12:19:30 2022 ] Top1: 47.99%
|
378 |
+
[ Tue Sep 13 12:19:31 2022 ] Top5: 81.06%
|
379 |
+
[ Tue Sep 13 12:19:31 2022 ] Training epoch: 50
|
380 |
+
[ Tue Sep 13 12:20:02 2022 ] Batch(92/243) done. Loss: 0.5954 lr:0.100000
|
381 |
+
[ Tue Sep 13 12:20:32 2022 ] Batch(192/243) done. Loss: 0.6014 lr:0.100000
|
382 |
+
[ Tue Sep 13 12:20:47 2022 ] Eval epoch: 50
|
383 |
+
[ Tue Sep 13 12:22:17 2022 ] Mean test loss of 796 batches: 2.785815954208374.
|
384 |
+
[ Tue Sep 13 12:22:18 2022 ] Top1: 45.54%
|
385 |
+
[ Tue Sep 13 12:22:18 2022 ] Top5: 78.75%
|
386 |
+
[ Tue Sep 13 12:22:18 2022 ] Training epoch: 51
|
387 |
+
[ Tue Sep 13 12:22:36 2022 ] Batch(49/243) done. Loss: 0.4449 lr:0.100000
|
388 |
+
[ Tue Sep 13 12:23:06 2022 ] Batch(149/243) done. Loss: 0.2752 lr:0.100000
|
389 |
+
[ Tue Sep 13 12:23:34 2022 ] Eval epoch: 51
|
390 |
+
[ Tue Sep 13 12:25:05 2022 ] Mean test loss of 796 batches: 2.936903476715088.
|
391 |
+
[ Tue Sep 13 12:25:05 2022 ] Top1: 43.72%
|
392 |
+
[ Tue Sep 13 12:25:05 2022 ] Top5: 76.89%
|
393 |
+
[ Tue Sep 13 12:25:06 2022 ] Training epoch: 52
|
394 |
+
[ Tue Sep 13 12:25:11 2022 ] Batch(6/243) done. Loss: 0.1977 lr:0.100000
|
395 |
+
[ Tue Sep 13 12:25:41 2022 ] Batch(106/243) done. Loss: 0.3755 lr:0.100000
|
396 |
+
[ Tue Sep 13 12:26:10 2022 ] Batch(206/243) done. Loss: 0.4178 lr:0.100000
|
397 |
+
[ Tue Sep 13 12:26:21 2022 ] Eval epoch: 52
|
398 |
+
[ Tue Sep 13 12:27:52 2022 ] Mean test loss of 796 batches: 2.6056711673736572.
|
399 |
+
[ Tue Sep 13 12:27:53 2022 ] Top1: 47.38%
|
400 |
+
[ Tue Sep 13 12:27:53 2022 ] Top5: 80.59%
|
401 |
+
[ Tue Sep 13 12:27:53 2022 ] Training epoch: 53
|
402 |
+
[ Tue Sep 13 12:28:15 2022 ] Batch(63/243) done. Loss: 0.5516 lr:0.100000
|
403 |
+
[ Tue Sep 13 12:28:46 2022 ] Batch(163/243) done. Loss: 0.4919 lr:0.100000
|
404 |
+
[ Tue Sep 13 12:29:10 2022 ] Eval epoch: 53
|
405 |
+
[ Tue Sep 13 12:30:40 2022 ] Mean test loss of 796 batches: 2.7067835330963135.
|
406 |
+
[ Tue Sep 13 12:30:40 2022 ] Top1: 46.74%
|
407 |
+
[ Tue Sep 13 12:30:40 2022 ] Top5: 80.81%
|
408 |
+
[ Tue Sep 13 12:30:41 2022 ] Training epoch: 54
|
409 |
+
[ Tue Sep 13 12:30:50 2022 ] Batch(20/243) done. Loss: 0.2022 lr:0.100000
|
410 |
+
[ Tue Sep 13 12:31:20 2022 ] Batch(120/243) done. Loss: 0.3183 lr:0.100000
|
411 |
+
[ Tue Sep 13 12:31:50 2022 ] Batch(220/243) done. Loss: 0.4903 lr:0.100000
|
412 |
+
[ Tue Sep 13 12:31:57 2022 ] Eval epoch: 54
|
413 |
+
[ Tue Sep 13 12:33:27 2022 ] Mean test loss of 796 batches: 2.815014362335205.
|
414 |
+
[ Tue Sep 13 12:33:27 2022 ] Top1: 45.23%
|
415 |
+
[ Tue Sep 13 12:33:27 2022 ] Top5: 78.11%
|
416 |
+
[ Tue Sep 13 12:33:28 2022 ] Training epoch: 55
|
417 |
+
[ Tue Sep 13 12:33:54 2022 ] Batch(77/243) done. Loss: 0.4042 lr:0.100000
|
418 |
+
[ Tue Sep 13 12:34:24 2022 ] Batch(177/243) done. Loss: 0.5307 lr:0.100000
|
419 |
+
[ Tue Sep 13 12:34:44 2022 ] Eval epoch: 55
|
420 |
+
[ Tue Sep 13 12:36:13 2022 ] Mean test loss of 796 batches: 2.895005941390991.
|
421 |
+
[ Tue Sep 13 12:36:13 2022 ] Top1: 45.16%
|
422 |
+
[ Tue Sep 13 12:36:14 2022 ] Top5: 77.35%
|
423 |
+
[ Tue Sep 13 12:36:14 2022 ] Training epoch: 56
|
424 |
+
[ Tue Sep 13 12:36:28 2022 ] Batch(34/243) done. Loss: 0.6129 lr:0.100000
|
425 |
+
[ Tue Sep 13 12:36:58 2022 ] Batch(134/243) done. Loss: 0.3129 lr:0.100000
|
426 |
+
[ Tue Sep 13 12:37:28 2022 ] Batch(234/243) done. Loss: 0.5452 lr:0.100000
|
427 |
+
[ Tue Sep 13 12:37:30 2022 ] Eval epoch: 56
|
428 |
+
[ Tue Sep 13 12:39:00 2022 ] Mean test loss of 796 batches: 3.1033451557159424.
|
429 |
+
[ Tue Sep 13 12:39:01 2022 ] Top1: 44.08%
|
430 |
+
[ Tue Sep 13 12:39:01 2022 ] Top5: 78.70%
|
431 |
+
[ Tue Sep 13 12:39:01 2022 ] Training epoch: 57
|
432 |
+
[ Tue Sep 13 12:39:32 2022 ] Batch(91/243) done. Loss: 0.2330 lr:0.100000
|
433 |
+
[ Tue Sep 13 12:40:02 2022 ] Batch(191/243) done. Loss: 0.5911 lr:0.100000
|
434 |
+
[ Tue Sep 13 12:40:17 2022 ] Eval epoch: 57
|
435 |
+
[ Tue Sep 13 12:41:47 2022 ] Mean test loss of 796 batches: 28.345247268676758.
|
436 |
+
[ Tue Sep 13 12:41:47 2022 ] Top1: 5.01%
|
437 |
+
[ Tue Sep 13 12:41:47 2022 ] Top5: 13.06%
|
438 |
+
[ Tue Sep 13 12:41:48 2022 ] Training epoch: 58
|
439 |
+
[ Tue Sep 13 12:42:05 2022 ] Batch(48/243) done. Loss: 0.3507 lr:0.100000
|
440 |
+
[ Tue Sep 13 12:42:35 2022 ] Batch(148/243) done. Loss: 0.3725 lr:0.100000
|
441 |
+
[ Tue Sep 13 12:43:03 2022 ] Eval epoch: 58
|
442 |
+
[ Tue Sep 13 12:44:34 2022 ] Mean test loss of 796 batches: 2.8092286586761475.
|
443 |
+
[ Tue Sep 13 12:44:34 2022 ] Top1: 46.88%
|
444 |
+
[ Tue Sep 13 12:44:35 2022 ] Top5: 79.48%
|
445 |
+
[ Tue Sep 13 12:44:35 2022 ] Training epoch: 59
|
446 |
+
[ Tue Sep 13 12:44:40 2022 ] Batch(5/243) done. Loss: 0.3029 lr:0.100000
|
447 |
+
[ Tue Sep 13 12:45:10 2022 ] Batch(105/243) done. Loss: 0.2342 lr:0.100000
|
448 |
+
[ Tue Sep 13 12:45:40 2022 ] Batch(205/243) done. Loss: 0.6002 lr:0.100000
|
449 |
+
[ Tue Sep 13 12:45:51 2022 ] Eval epoch: 59
|
450 |
+
[ Tue Sep 13 12:47:21 2022 ] Mean test loss of 796 batches: 2.6384665966033936.
|
451 |
+
[ Tue Sep 13 12:47:21 2022 ] Top1: 46.77%
|
452 |
+
[ Tue Sep 13 12:47:22 2022 ] Top5: 79.31%
|
453 |
+
[ Tue Sep 13 12:47:22 2022 ] Training epoch: 60
|
454 |
+
[ Tue Sep 13 12:47:44 2022 ] Batch(62/243) done. Loss: 0.2674 lr:0.100000
|
455 |
+
[ Tue Sep 13 12:48:14 2022 ] Batch(162/243) done. Loss: 0.3390 lr:0.100000
|
456 |
+
[ Tue Sep 13 12:48:38 2022 ] Eval epoch: 60
|
457 |
+
[ Tue Sep 13 12:50:07 2022 ] Mean test loss of 796 batches: 3.1235268115997314.
|
458 |
+
[ Tue Sep 13 12:50:08 2022 ] Top1: 43.82%
|
459 |
+
[ Tue Sep 13 12:50:08 2022 ] Top5: 77.60%
|
460 |
+
[ Tue Sep 13 12:50:08 2022 ] Training epoch: 61
|
461 |
+
[ Tue Sep 13 12:50:17 2022 ] Batch(19/243) done. Loss: 0.2343 lr:0.010000
|
462 |
+
[ Tue Sep 13 12:50:47 2022 ] Batch(119/243) done. Loss: 0.2469 lr:0.010000
|
463 |
+
[ Tue Sep 13 12:51:17 2022 ] Batch(219/243) done. Loss: 0.1518 lr:0.010000
|
464 |
+
[ Tue Sep 13 12:51:25 2022 ] Eval epoch: 61
|
465 |
+
[ Tue Sep 13 12:52:54 2022 ] Mean test loss of 796 batches: 2.237048864364624.
|
466 |
+
[ Tue Sep 13 12:52:55 2022 ] Top1: 55.25%
|
467 |
+
[ Tue Sep 13 12:52:55 2022 ] Top5: 85.63%
|
468 |
+
[ Tue Sep 13 12:52:55 2022 ] Training epoch: 62
|
469 |
+
[ Tue Sep 13 12:53:21 2022 ] Batch(76/243) done. Loss: 0.1355 lr:0.010000
|
470 |
+
[ Tue Sep 13 12:53:51 2022 ] Batch(176/243) done. Loss: 0.0464 lr:0.010000
|
471 |
+
[ Tue Sep 13 12:54:11 2022 ] Eval epoch: 62
|
472 |
+
[ Tue Sep 13 12:55:41 2022 ] Mean test loss of 796 batches: 2.2395992279052734.
|
473 |
+
[ Tue Sep 13 12:55:42 2022 ] Top1: 55.93%
|
474 |
+
[ Tue Sep 13 12:55:42 2022 ] Top5: 85.76%
|
475 |
+
[ Tue Sep 13 12:55:42 2022 ] Training epoch: 63
|
476 |
+
[ Tue Sep 13 12:55:56 2022 ] Batch(33/243) done. Loss: 0.0812 lr:0.010000
|
477 |
+
[ Tue Sep 13 12:56:26 2022 ] Batch(133/243) done. Loss: 0.0868 lr:0.010000
|
478 |
+
[ Tue Sep 13 12:56:56 2022 ] Batch(233/243) done. Loss: 0.1094 lr:0.010000
|
479 |
+
[ Tue Sep 13 12:56:58 2022 ] Eval epoch: 63
|
480 |
+
[ Tue Sep 13 12:58:29 2022 ] Mean test loss of 796 batches: 2.2251181602478027.
|
481 |
+
[ Tue Sep 13 12:58:29 2022 ] Top1: 56.46%
|
482 |
+
[ Tue Sep 13 12:58:30 2022 ] Top5: 86.06%
|
483 |
+
[ Tue Sep 13 12:58:30 2022 ] Training epoch: 64
|
484 |
+
[ Tue Sep 13 12:59:00 2022 ] Batch(90/243) done. Loss: 0.1751 lr:0.010000
|
485 |
+
[ Tue Sep 13 12:59:30 2022 ] Batch(190/243) done. Loss: 0.0929 lr:0.010000
|
486 |
+
[ Tue Sep 13 12:59:46 2022 ] Eval epoch: 64
|
487 |
+
[ Tue Sep 13 13:01:16 2022 ] Mean test loss of 796 batches: 2.337252378463745.
|
488 |
+
[ Tue Sep 13 13:01:17 2022 ] Top1: 55.47%
|
489 |
+
[ Tue Sep 13 13:01:17 2022 ] Top5: 85.48%
|
490 |
+
[ Tue Sep 13 13:01:17 2022 ] Training epoch: 65
|
491 |
+
[ Tue Sep 13 13:01:35 2022 ] Batch(47/243) done. Loss: 0.0395 lr:0.010000
|
492 |
+
[ Tue Sep 13 13:02:05 2022 ] Batch(147/243) done. Loss: 0.1822 lr:0.010000
|
493 |
+
[ Tue Sep 13 13:02:34 2022 ] Eval epoch: 65
|
494 |
+
[ Tue Sep 13 13:04:04 2022 ] Mean test loss of 796 batches: 2.358794927597046.
|
495 |
+
[ Tue Sep 13 13:04:04 2022 ] Top1: 55.68%
|
496 |
+
[ Tue Sep 13 13:04:04 2022 ] Top5: 85.67%
|
497 |
+
[ Tue Sep 13 13:04:05 2022 ] Training epoch: 66
|
498 |
+
[ Tue Sep 13 13:04:10 2022 ] Batch(4/243) done. Loss: 0.1156 lr:0.010000
|
499 |
+
[ Tue Sep 13 13:04:40 2022 ] Batch(104/243) done. Loss: 0.0366 lr:0.010000
|
500 |
+
[ Tue Sep 13 13:05:10 2022 ] Batch(204/243) done. Loss: 0.1222 lr:0.010000
|
501 |
+
[ Tue Sep 13 13:05:22 2022 ] Eval epoch: 66
|
502 |
+
[ Tue Sep 13 13:06:52 2022 ] Mean test loss of 796 batches: 2.4481406211853027.
|
503 |
+
[ Tue Sep 13 13:06:53 2022 ] Top1: 55.74%
|
504 |
+
[ Tue Sep 13 13:06:53 2022 ] Top5: 85.42%
|
505 |
+
[ Tue Sep 13 13:06:53 2022 ] Training epoch: 67
|
506 |
+
[ Tue Sep 13 13:07:16 2022 ] Batch(61/243) done. Loss: 0.0795 lr:0.010000
|
507 |
+
[ Tue Sep 13 13:07:46 2022 ] Batch(161/243) done. Loss: 0.0534 lr:0.010000
|
508 |
+
[ Tue Sep 13 13:08:10 2022 ] Eval epoch: 67
|
509 |
+
[ Tue Sep 13 13:09:40 2022 ] Mean test loss of 796 batches: 2.4075491428375244.
|
510 |
+
[ Tue Sep 13 13:09:40 2022 ] Top1: 55.97%
|
511 |
+
[ Tue Sep 13 13:09:41 2022 ] Top5: 85.72%
|
512 |
+
[ Tue Sep 13 13:09:41 2022 ] Training epoch: 68
|
513 |
+
[ Tue Sep 13 13:09:50 2022 ] Batch(18/243) done. Loss: 0.1113 lr:0.010000
|
514 |
+
[ Tue Sep 13 13:10:20 2022 ] Batch(118/243) done. Loss: 0.0638 lr:0.010000
|
515 |
+
[ Tue Sep 13 13:10:50 2022 ] Batch(218/243) done. Loss: 0.0918 lr:0.010000
|
516 |
+
[ Tue Sep 13 13:10:58 2022 ] Eval epoch: 68
|
517 |
+
[ Tue Sep 13 13:12:27 2022 ] Mean test loss of 796 batches: 2.3931756019592285.
|
518 |
+
[ Tue Sep 13 13:12:28 2022 ] Top1: 56.16%
|
519 |
+
[ Tue Sep 13 13:12:28 2022 ] Top5: 85.73%
|
520 |
+
[ Tue Sep 13 13:12:28 2022 ] Training epoch: 69
|
521 |
+
[ Tue Sep 13 13:12:54 2022 ] Batch(75/243) done. Loss: 0.1031 lr:0.010000
|
522 |
+
[ Tue Sep 13 13:13:24 2022 ] Batch(175/243) done. Loss: 0.0616 lr:0.010000
|
523 |
+
[ Tue Sep 13 13:13:45 2022 ] Eval epoch: 69
|
524 |
+
[ Tue Sep 13 13:15:14 2022 ] Mean test loss of 796 batches: 2.514704942703247.
|
525 |
+
[ Tue Sep 13 13:15:15 2022 ] Top1: 55.21%
|
526 |
+
[ Tue Sep 13 13:15:15 2022 ] Top5: 85.25%
|
527 |
+
[ Tue Sep 13 13:15:15 2022 ] Training epoch: 70
|
528 |
+
[ Tue Sep 13 13:15:29 2022 ] Batch(32/243) done. Loss: 0.0328 lr:0.010000
|
529 |
+
[ Tue Sep 13 13:15:58 2022 ] Batch(132/243) done. Loss: 0.0665 lr:0.010000
|
530 |
+
[ Tue Sep 13 13:16:29 2022 ] Batch(232/243) done. Loss: 0.1057 lr:0.010000
|
531 |
+
[ Tue Sep 13 13:16:32 2022 ] Eval epoch: 70
|
532 |
+
[ Tue Sep 13 13:18:03 2022 ] Mean test loss of 796 batches: 2.4123992919921875.
|
533 |
+
[ Tue Sep 13 13:18:03 2022 ] Top1: 55.97%
|
534 |
+
[ Tue Sep 13 13:18:03 2022 ] Top5: 85.94%
|
535 |
+
[ Tue Sep 13 13:18:04 2022 ] Training epoch: 71
|
536 |
+
[ Tue Sep 13 13:18:34 2022 ] Batch(89/243) done. Loss: 0.0821 lr:0.010000
|
537 |
+
[ Tue Sep 13 13:19:04 2022 ] Batch(189/243) done. Loss: 0.0117 lr:0.010000
|
538 |
+
[ Tue Sep 13 13:19:20 2022 ] Eval epoch: 71
|
539 |
+
[ Tue Sep 13 13:20:50 2022 ] Mean test loss of 796 batches: 2.4410130977630615.
|
540 |
+
[ Tue Sep 13 13:20:50 2022 ] Top1: 56.26%
|
541 |
+
[ Tue Sep 13 13:20:51 2022 ] Top5: 86.02%
|
542 |
+
[ Tue Sep 13 13:20:51 2022 ] Training epoch: 72
|
543 |
+
[ Tue Sep 13 13:21:08 2022 ] Batch(46/243) done. Loss: 0.0303 lr:0.010000
|
544 |
+
[ Tue Sep 13 13:21:39 2022 ] Batch(146/243) done. Loss: 0.0665 lr:0.010000
|
545 |
+
[ Tue Sep 13 13:22:08 2022 ] Eval epoch: 72
|
546 |
+
[ Tue Sep 13 13:23:37 2022 ] Mean test loss of 796 batches: 2.438089370727539.
|
547 |
+
[ Tue Sep 13 13:23:37 2022 ] Top1: 56.12%
|
548 |
+
[ Tue Sep 13 13:23:37 2022 ] Top5: 85.96%
|
549 |
+
[ Tue Sep 13 13:23:38 2022 ] Training epoch: 73
|
550 |
+
[ Tue Sep 13 13:23:42 2022 ] Batch(3/243) done. Loss: 0.0759 lr:0.010000
|
551 |
+
[ Tue Sep 13 13:24:12 2022 ] Batch(103/243) done. Loss: 0.1013 lr:0.010000
|
552 |
+
[ Tue Sep 13 13:24:42 2022 ] Batch(203/243) done. Loss: 0.0546 lr:0.010000
|
553 |
+
[ Tue Sep 13 13:24:54 2022 ] Eval epoch: 73
|
554 |
+
[ Tue Sep 13 13:26:24 2022 ] Mean test loss of 796 batches: 2.5160343647003174.
|
555 |
+
[ Tue Sep 13 13:26:24 2022 ] Top1: 55.85%
|
556 |
+
[ Tue Sep 13 13:26:24 2022 ] Top5: 85.90%
|
557 |
+
[ Tue Sep 13 13:26:25 2022 ] Training epoch: 74
|
558 |
+
[ Tue Sep 13 13:26:46 2022 ] Batch(60/243) done. Loss: 0.0480 lr:0.010000
|
559 |
+
[ Tue Sep 13 13:27:16 2022 ] Batch(160/243) done. Loss: 0.0445 lr:0.010000
|
560 |
+
[ Tue Sep 13 13:27:41 2022 ] Eval epoch: 74
|
561 |
+
[ Tue Sep 13 13:29:11 2022 ] Mean test loss of 796 batches: 2.597693681716919.
|
562 |
+
[ Tue Sep 13 13:29:11 2022 ] Top1: 55.46%
|
563 |
+
[ Tue Sep 13 13:29:11 2022 ] Top5: 85.56%
|
564 |
+
[ Tue Sep 13 13:29:12 2022 ] Training epoch: 75
|
565 |
+
[ Tue Sep 13 13:29:20 2022 ] Batch(17/243) done. Loss: 0.0589 lr:0.010000
|
566 |
+
[ Tue Sep 13 13:29:50 2022 ] Batch(117/243) done. Loss: 0.0458 lr:0.010000
|
567 |
+
[ Tue Sep 13 13:30:20 2022 ] Batch(217/243) done. Loss: 0.0797 lr:0.010000
|
568 |
+
[ Tue Sep 13 13:30:28 2022 ] Eval epoch: 75
|
569 |
+
[ Tue Sep 13 13:31:58 2022 ] Mean test loss of 796 batches: 2.51838755607605.
|
570 |
+
[ Tue Sep 13 13:31:59 2022 ] Top1: 56.59%
|
571 |
+
[ Tue Sep 13 13:31:59 2022 ] Top5: 86.06%
|
572 |
+
[ Tue Sep 13 13:31:59 2022 ] Training epoch: 76
|
573 |
+
[ Tue Sep 13 13:32:25 2022 ] Batch(74/243) done. Loss: 0.0864 lr:0.010000
|
574 |
+
[ Tue Sep 13 13:32:54 2022 ] Batch(174/243) done. Loss: 0.0252 lr:0.010000
|
575 |
+
[ Tue Sep 13 13:33:15 2022 ] Eval epoch: 76
|
576 |
+
[ Tue Sep 13 13:34:46 2022 ] Mean test loss of 796 batches: 2.5579476356506348.
|
577 |
+
[ Tue Sep 13 13:34:46 2022 ] Top1: 55.87%
|
578 |
+
[ Tue Sep 13 13:34:47 2022 ] Top5: 85.64%
|
579 |
+
[ Tue Sep 13 13:34:47 2022 ] Training epoch: 77
|
580 |
+
[ Tue Sep 13 13:35:00 2022 ] Batch(31/243) done. Loss: 0.0412 lr:0.010000
|
581 |
+
[ Tue Sep 13 13:35:30 2022 ] Batch(131/243) done. Loss: 0.0723 lr:0.010000
|
582 |
+
[ Tue Sep 13 13:36:00 2022 ] Batch(231/243) done. Loss: 0.1002 lr:0.010000
|
583 |
+
[ Tue Sep 13 13:36:03 2022 ] Eval epoch: 77
|
584 |
+
[ Tue Sep 13 13:37:34 2022 ] Mean test loss of 796 batches: 2.5376739501953125.
|
585 |
+
[ Tue Sep 13 13:37:35 2022 ] Top1: 56.41%
|
586 |
+
[ Tue Sep 13 13:37:35 2022 ] Top5: 85.96%
|
587 |
+
[ Tue Sep 13 13:37:35 2022 ] Training epoch: 78
|
588 |
+
[ Tue Sep 13 13:38:05 2022 ] Batch(88/243) done. Loss: 0.0484 lr:0.010000
|
589 |
+
[ Tue Sep 13 13:38:35 2022 ] Batch(188/243) done. Loss: 0.0305 lr:0.010000
|
590 |
+
[ Tue Sep 13 13:38:51 2022 ] Eval epoch: 78
|
591 |
+
[ Tue Sep 13 13:40:21 2022 ] Mean test loss of 796 batches: 2.5792503356933594.
|
592 |
+
[ Tue Sep 13 13:40:21 2022 ] Top1: 55.75%
|
593 |
+
[ Tue Sep 13 13:40:22 2022 ] Top5: 85.50%
|
594 |
+
[ Tue Sep 13 13:40:22 2022 ] Training epoch: 79
|
595 |
+
[ Tue Sep 13 13:40:39 2022 ] Batch(45/243) done. Loss: 0.0676 lr:0.010000
|
596 |
+
[ Tue Sep 13 13:41:09 2022 ] Batch(145/243) done. Loss: 0.0301 lr:0.010000
|
597 |
+
[ Tue Sep 13 13:41:38 2022 ] Eval epoch: 79
|
598 |
+
[ Tue Sep 13 13:43:08 2022 ] Mean test loss of 796 batches: 2.5704731941223145.
|
599 |
+
[ Tue Sep 13 13:43:09 2022 ] Top1: 56.07%
|
600 |
+
[ Tue Sep 13 13:43:09 2022 ] Top5: 85.69%
|
601 |
+
[ Tue Sep 13 13:43:09 2022 ] Training epoch: 80
|
602 |
+
[ Tue Sep 13 13:43:14 2022 ] Batch(2/243) done. Loss: 0.0936 lr:0.010000
|
603 |
+
[ Tue Sep 13 13:43:43 2022 ] Batch(102/243) done. Loss: 0.0752 lr:0.010000
|
604 |
+
[ Tue Sep 13 13:44:13 2022 ] Batch(202/243) done. Loss: 0.0291 lr:0.010000
|
605 |
+
[ Tue Sep 13 13:44:26 2022 ] Eval epoch: 80
|
606 |
+
[ Tue Sep 13 13:45:56 2022 ] Mean test loss of 796 batches: 2.6504592895507812.
|
607 |
+
[ Tue Sep 13 13:45:56 2022 ] Top1: 55.93%
|
608 |
+
[ Tue Sep 13 13:45:56 2022 ] Top5: 85.44%
|
609 |
+
[ Tue Sep 13 13:45:57 2022 ] Training epoch: 81
|
610 |
+
[ Tue Sep 13 13:46:18 2022 ] Batch(59/243) done. Loss: 0.0546 lr:0.001000
|
611 |
+
[ Tue Sep 13 13:46:48 2022 ] Batch(159/243) done. Loss: 0.1036 lr:0.001000
|
612 |
+
[ Tue Sep 13 13:47:13 2022 ] Eval epoch: 81
|
613 |
+
[ Tue Sep 13 13:48:42 2022 ] Mean test loss of 796 batches: 2.6293492317199707.
|
614 |
+
[ Tue Sep 13 13:48:42 2022 ] Top1: 55.86%
|
615 |
+
[ Tue Sep 13 13:48:42 2022 ] Top5: 85.53%
|
616 |
+
[ Tue Sep 13 13:48:43 2022 ] Training epoch: 82
|
617 |
+
[ Tue Sep 13 13:48:51 2022 ] Batch(16/243) done. Loss: 0.0634 lr:0.001000
|
618 |
+
[ Tue Sep 13 13:49:21 2022 ] Batch(116/243) done. Loss: 0.0092 lr:0.001000
|
619 |
+
[ Tue Sep 13 13:49:51 2022 ] Batch(216/243) done. Loss: 0.0490 lr:0.001000
|
620 |
+
[ Tue Sep 13 13:49:59 2022 ] Eval epoch: 82
|
621 |
+
[ Tue Sep 13 13:51:30 2022 ] Mean test loss of 796 batches: 2.653411388397217.
|
622 |
+
[ Tue Sep 13 13:51:30 2022 ] Top1: 55.83%
|
623 |
+
[ Tue Sep 13 13:51:31 2022 ] Top5: 85.50%
|
624 |
+
[ Tue Sep 13 13:51:31 2022 ] Training epoch: 83
|
625 |
+
[ Tue Sep 13 13:51:57 2022 ] Batch(73/243) done. Loss: 0.0265 lr:0.001000
|
626 |
+
[ Tue Sep 13 13:52:27 2022 ] Batch(173/243) done. Loss: 0.0194 lr:0.001000
|
627 |
+
[ Tue Sep 13 13:52:48 2022 ] Eval epoch: 83
|
628 |
+
[ Tue Sep 13 13:54:17 2022 ] Mean test loss of 796 batches: 2.6267552375793457.
|
629 |
+
[ Tue Sep 13 13:54:17 2022 ] Top1: 55.94%
|
630 |
+
[ Tue Sep 13 13:54:17 2022 ] Top5: 85.60%
|
631 |
+
[ Tue Sep 13 13:54:18 2022 ] Training epoch: 84
|
632 |
+
[ Tue Sep 13 13:54:30 2022 ] Batch(30/243) done. Loss: 0.0394 lr:0.001000
|
633 |
+
[ Tue Sep 13 13:55:00 2022 ] Batch(130/243) done. Loss: 0.0827 lr:0.001000
|
634 |
+
[ Tue Sep 13 13:55:30 2022 ] Batch(230/243) done. Loss: 0.0686 lr:0.001000
|
635 |
+
[ Tue Sep 13 13:55:34 2022 ] Eval epoch: 84
|
636 |
+
[ Tue Sep 13 13:57:03 2022 ] Mean test loss of 796 batches: 2.5898735523223877.
|
637 |
+
[ Tue Sep 13 13:57:03 2022 ] Top1: 56.22%
|
638 |
+
[ Tue Sep 13 13:57:04 2022 ] Top5: 85.74%
|
639 |
+
[ Tue Sep 13 13:57:04 2022 ] Training epoch: 85
|
640 |
+
[ Tue Sep 13 13:57:33 2022 ] Batch(87/243) done. Loss: 0.0559 lr:0.001000
|
641 |
+
[ Tue Sep 13 13:58:03 2022 ] Batch(187/243) done. Loss: 0.0420 lr:0.001000
|
642 |
+
[ Tue Sep 13 13:58:20 2022 ] Eval epoch: 85
|
643 |
+
[ Tue Sep 13 13:59:49 2022 ] Mean test loss of 796 batches: 2.5916905403137207.
|
644 |
+
[ Tue Sep 13 13:59:50 2022 ] Top1: 56.13%
|
645 |
+
[ Tue Sep 13 13:59:50 2022 ] Top5: 85.68%
|
646 |
+
[ Tue Sep 13 13:59:50 2022 ] Training epoch: 86
|
647 |
+
[ Tue Sep 13 14:00:07 2022 ] Batch(44/243) done. Loss: 0.0396 lr:0.001000
|
648 |
+
[ Tue Sep 13 14:00:37 2022 ] Batch(144/243) done. Loss: 0.0288 lr:0.001000
|
649 |
+
[ Tue Sep 13 14:01:07 2022 ] Eval epoch: 86
|
650 |
+
[ Tue Sep 13 14:02:37 2022 ] Mean test loss of 796 batches: 2.5492138862609863.
|
651 |
+
[ Tue Sep 13 14:02:37 2022 ] Top1: 56.50%
|
652 |
+
[ Tue Sep 13 14:02:38 2022 ] Top5: 85.82%
|
653 |
+
[ Tue Sep 13 14:02:38 2022 ] Training epoch: 87
|
654 |
+
[ Tue Sep 13 14:02:42 2022 ] Batch(1/243) done. Loss: 0.0766 lr:0.001000
|
655 |
+
[ Tue Sep 13 14:03:12 2022 ] Batch(101/243) done. Loss: 0.0827 lr:0.001000
|
656 |
+
[ Tue Sep 13 14:03:41 2022 ] Batch(201/243) done. Loss: 0.0746 lr:0.001000
|
657 |
+
[ Tue Sep 13 14:03:54 2022 ] Eval epoch: 87
|
658 |
+
[ Tue Sep 13 14:05:24 2022 ] Mean test loss of 796 batches: 2.5682311058044434.
|
659 |
+
[ Tue Sep 13 14:05:24 2022 ] Top1: 56.08%
|
660 |
+
[ Tue Sep 13 14:05:24 2022 ] Top5: 85.66%
|
661 |
+
[ Tue Sep 13 14:05:25 2022 ] Training epoch: 88
|
662 |
+
[ Tue Sep 13 14:05:45 2022 ] Batch(58/243) done. Loss: 0.0400 lr:0.001000
|
663 |
+
[ Tue Sep 13 14:06:15 2022 ] Batch(158/243) done. Loss: 0.0518 lr:0.001000
|
664 |
+
[ Tue Sep 13 14:06:41 2022 ] Eval epoch: 88
|
665 |
+
[ Tue Sep 13 14:08:13 2022 ] Mean test loss of 796 batches: 2.5862810611724854.
|
666 |
+
[ Tue Sep 13 14:08:14 2022 ] Top1: 56.15%
|
667 |
+
[ Tue Sep 13 14:08:14 2022 ] Top5: 85.86%
|
668 |
+
[ Tue Sep 13 14:08:14 2022 ] Training epoch: 89
|
669 |
+
[ Tue Sep 13 14:08:22 2022 ] Batch(15/243) done. Loss: 0.0697 lr:0.001000
|
670 |
+
[ Tue Sep 13 14:08:52 2022 ] Batch(115/243) done. Loss: 0.0347 lr:0.001000
|
671 |
+
[ Tue Sep 13 14:09:22 2022 ] Batch(215/243) done. Loss: 0.0376 lr:0.001000
|
672 |
+
[ Tue Sep 13 14:09:30 2022 ] Eval epoch: 89
|
673 |
+
[ Tue Sep 13 14:11:02 2022 ] Mean test loss of 796 batches: 2.6040351390838623.
|
674 |
+
[ Tue Sep 13 14:11:02 2022 ] Top1: 55.94%
|
675 |
+
[ Tue Sep 13 14:11:02 2022 ] Top5: 85.50%
|
676 |
+
[ Tue Sep 13 14:11:03 2022 ] Training epoch: 90
|
677 |
+
[ Tue Sep 13 14:11:30 2022 ] Batch(72/243) done. Loss: 0.0266 lr:0.001000
|
678 |
+
[ Tue Sep 13 14:12:00 2022 ] Batch(172/243) done. Loss: 0.0592 lr:0.001000
|
679 |
+
[ Tue Sep 13 14:12:21 2022 ] Eval epoch: 90
|
680 |
+
[ Tue Sep 13 14:13:52 2022 ] Mean test loss of 796 batches: 2.6085898876190186.
|
681 |
+
[ Tue Sep 13 14:13:53 2022 ] Top1: 55.85%
|
682 |
+
[ Tue Sep 13 14:13:53 2022 ] Top5: 85.52%
|
683 |
+
[ Tue Sep 13 14:13:53 2022 ] Training epoch: 91
|
684 |
+
[ Tue Sep 13 14:14:06 2022 ] Batch(29/243) done. Loss: 0.0219 lr:0.001000
|
685 |
+
[ Tue Sep 13 14:14:36 2022 ] Batch(129/243) done. Loss: 0.0494 lr:0.001000
|
686 |
+
[ Tue Sep 13 14:15:06 2022 ] Batch(229/243) done. Loss: 0.0842 lr:0.001000
|
687 |
+
[ Tue Sep 13 14:15:10 2022 ] Eval epoch: 91
|
688 |
+
[ Tue Sep 13 14:16:41 2022 ] Mean test loss of 796 batches: 2.601207971572876.
|
689 |
+
[ Tue Sep 13 14:16:41 2022 ] Top1: 55.89%
|
690 |
+
[ Tue Sep 13 14:16:42 2022 ] Top5: 85.64%
|
691 |
+
[ Tue Sep 13 14:16:42 2022 ] Training epoch: 92
|
692 |
+
[ Tue Sep 13 14:17:11 2022 ] Batch(86/243) done. Loss: 0.0986 lr:0.001000
|
693 |
+
[ Tue Sep 13 14:17:41 2022 ] Batch(186/243) done. Loss: 0.1233 lr:0.001000
|
694 |
+
[ Tue Sep 13 14:17:58 2022 ] Eval epoch: 92
|
695 |
+
[ Tue Sep 13 14:19:28 2022 ] Mean test loss of 796 batches: 2.6894383430480957.
|
696 |
+
[ Tue Sep 13 14:19:28 2022 ] Top1: 55.42%
|
697 |
+
[ Tue Sep 13 14:19:29 2022 ] Top5: 85.34%
|
698 |
+
[ Tue Sep 13 14:19:29 2022 ] Training epoch: 93
|
699 |
+
[ Tue Sep 13 14:19:46 2022 ] Batch(43/243) done. Loss: 0.0524 lr:0.001000
|
700 |
+
[ Tue Sep 13 14:20:16 2022 ] Batch(143/243) done. Loss: 0.0981 lr:0.001000
|
701 |
+
[ Tue Sep 13 14:20:45 2022 ] Eval epoch: 93
|
702 |
+
[ Tue Sep 13 14:22:16 2022 ] Mean test loss of 796 batches: 2.6224026679992676.
|
703 |
+
[ Tue Sep 13 14:22:16 2022 ] Top1: 56.13%
|
704 |
+
[ Tue Sep 13 14:22:17 2022 ] Top5: 85.60%
|
705 |
+
[ Tue Sep 13 14:22:17 2022 ] Training epoch: 94
|
706 |
+
[ Tue Sep 13 14:22:21 2022 ] Batch(0/243) done. Loss: 0.0663 lr:0.001000
|
707 |
+
[ Tue Sep 13 14:22:51 2022 ] Batch(100/243) done. Loss: 0.0486 lr:0.001000
|
708 |
+
[ Tue Sep 13 14:23:20 2022 ] Batch(200/243) done. Loss: 0.0635 lr:0.001000
|
709 |
+
[ Tue Sep 13 14:23:33 2022 ] Eval epoch: 94
|
710 |
+
[ Tue Sep 13 14:25:03 2022 ] Mean test loss of 796 batches: 2.6107020378112793.
|
711 |
+
[ Tue Sep 13 14:25:04 2022 ] Top1: 56.09%
|
712 |
+
[ Tue Sep 13 14:25:04 2022 ] Top5: 85.62%
|
713 |
+
[ Tue Sep 13 14:25:04 2022 ] Training epoch: 95
|
714 |
+
[ Tue Sep 13 14:25:25 2022 ] Batch(57/243) done. Loss: 0.1862 lr:0.001000
|
715 |
+
[ Tue Sep 13 14:25:55 2022 ] Batch(157/243) done. Loss: 0.0362 lr:0.001000
|
716 |
+
[ Tue Sep 13 14:26:21 2022 ] Eval epoch: 95
|
717 |
+
[ Tue Sep 13 14:27:51 2022 ] Mean test loss of 796 batches: 2.630995035171509.
|
718 |
+
[ Tue Sep 13 14:27:51 2022 ] Top1: 56.15%
|
719 |
+
[ Tue Sep 13 14:27:51 2022 ] Top5: 85.78%
|
720 |
+
[ Tue Sep 13 14:27:52 2022 ] Training epoch: 96
|
721 |
+
[ Tue Sep 13 14:28:00 2022 ] Batch(14/243) done. Loss: 0.0357 lr:0.001000
|
722 |
+
[ Tue Sep 13 14:28:30 2022 ] Batch(114/243) done. Loss: 0.0910 lr:0.001000
|
723 |
+
[ Tue Sep 13 14:28:59 2022 ] Batch(214/243) done. Loss: 0.0290 lr:0.001000
|
724 |
+
[ Tue Sep 13 14:29:08 2022 ] Eval epoch: 96
|
725 |
+
[ Tue Sep 13 14:30:37 2022 ] Mean test loss of 796 batches: 2.5851011276245117.
|
726 |
+
[ Tue Sep 13 14:30:38 2022 ] Top1: 56.15%
|
727 |
+
[ Tue Sep 13 14:30:38 2022 ] Top5: 85.95%
|
728 |
+
[ Tue Sep 13 14:30:38 2022 ] Training epoch: 97
|
729 |
+
[ Tue Sep 13 14:31:03 2022 ] Batch(71/243) done. Loss: 0.0985 lr:0.001000
|
730 |
+
[ Tue Sep 13 14:31:33 2022 ] Batch(171/243) done. Loss: 0.0368 lr:0.001000
|
731 |
+
[ Tue Sep 13 14:31:55 2022 ] Eval epoch: 97
|
732 |
+
[ Tue Sep 13 14:33:25 2022 ] Mean test loss of 796 batches: 2.600297689437866.
|
733 |
+
[ Tue Sep 13 14:33:25 2022 ] Top1: 56.21%
|
734 |
+
[ Tue Sep 13 14:33:25 2022 ] Top5: 85.83%
|
735 |
+
[ Tue Sep 13 14:33:26 2022 ] Training epoch: 98
|
736 |
+
[ Tue Sep 13 14:33:38 2022 ] Batch(28/243) done. Loss: 0.1044 lr:0.001000
|
737 |
+
[ Tue Sep 13 14:34:07 2022 ] Batch(128/243) done. Loss: 0.0899 lr:0.001000
|
738 |
+
[ Tue Sep 13 14:34:37 2022 ] Batch(228/243) done. Loss: 0.0325 lr:0.001000
|
739 |
+
[ Tue Sep 13 14:34:42 2022 ] Eval epoch: 98
|
740 |
+
[ Tue Sep 13 14:36:11 2022 ] Mean test loss of 796 batches: 2.640855073928833.
|
741 |
+
[ Tue Sep 13 14:36:11 2022 ] Top1: 56.13%
|
742 |
+
[ Tue Sep 13 14:36:12 2022 ] Top5: 85.73%
|
743 |
+
[ Tue Sep 13 14:36:12 2022 ] Training epoch: 99
|
744 |
+
[ Tue Sep 13 14:36:41 2022 ] Batch(85/243) done. Loss: 0.0878 lr:0.001000
|
745 |
+
[ Tue Sep 13 14:37:11 2022 ] Batch(185/243) done. Loss: 0.0576 lr:0.001000
|
746 |
+
[ Tue Sep 13 14:37:29 2022 ] Eval epoch: 99
|
747 |
+
[ Tue Sep 13 14:38:58 2022 ] Mean test loss of 796 batches: 2.7020773887634277.
|
748 |
+
[ Tue Sep 13 14:38:58 2022 ] Top1: 55.70%
|
749 |
+
[ Tue Sep 13 14:38:58 2022 ] Top5: 85.29%
|
750 |
+
[ Tue Sep 13 14:38:59 2022 ] Training epoch: 100
|
751 |
+
[ Tue Sep 13 14:39:15 2022 ] Batch(42/243) done. Loss: 0.0629 lr:0.001000
|
752 |
+
[ Tue Sep 13 14:39:45 2022 ] Batch(142/243) done. Loss: 0.0454 lr:0.001000
|
753 |
+
[ Tue Sep 13 14:40:15 2022 ] Batch(242/243) done. Loss: 0.0956 lr:0.001000
|
754 |
+
[ Tue Sep 13 14:40:15 2022 ] Eval epoch: 100
|
755 |
+
[ Tue Sep 13 14:41:44 2022 ] Mean test loss of 796 batches: 2.707759380340576.
|
756 |
+
[ Tue Sep 13 14:41:45 2022 ] Top1: 55.49%
|
757 |
+
[ Tue Sep 13 14:41:45 2022 ] Top5: 85.48%
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_joint_motion_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xsub/train_joint_motion.yaml
|
5 |
+
device:
|
6 |
+
- 6
|
7 |
+
- 7
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_joint_motion_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_joint_motion_xsub
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:947c4edde0efb481f67a7c241ab8cf025f7d42583324823dd917eedc526fe344
|
3 |
+
size 29946137
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_motion_xsub/log.txt
ADDED
@@ -0,0 +1,746 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Mon Sep 12 17:08:18 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_joint_motion_xsub', 'model_saved_name': './save_models/ntu120_joint_motion_xsub', 'Experiment_name': 'ntu120_joint_motion_xsub', 'config': './config/ntu120_xsub/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Mon Sep 12 17:08:18 2022 ] Training epoch: 1
|
5 |
+
[ Mon Sep 12 17:09:08 2022 ] Batch(99/243) done. Loss: 3.9828 lr:0.100000
|
6 |
+
[ Mon Sep 12 17:09:53 2022 ] Batch(199/243) done. Loss: 3.5107 lr:0.100000
|
7 |
+
[ Mon Sep 12 17:10:13 2022 ] Eval epoch: 1
|
8 |
+
[ Mon Sep 12 17:12:45 2022 ] Mean test loss of 796 batches: 5.743283271789551.
|
9 |
+
[ Mon Sep 12 17:12:46 2022 ] Top1: 4.45%
|
10 |
+
[ Mon Sep 12 17:12:46 2022 ] Top5: 12.60%
|
11 |
+
[ Mon Sep 12 17:12:46 2022 ] Training epoch: 2
|
12 |
+
[ Mon Sep 12 17:13:20 2022 ] Batch(56/243) done. Loss: 3.4010 lr:0.100000
|
13 |
+
[ Mon Sep 12 17:14:13 2022 ] Batch(156/243) done. Loss: 2.9352 lr:0.100000
|
14 |
+
[ Mon Sep 12 17:15:00 2022 ] Eval epoch: 2
|
15 |
+
[ Mon Sep 12 17:17:32 2022 ] Mean test loss of 796 batches: 5.4759979248046875.
|
16 |
+
[ Mon Sep 12 17:17:32 2022 ] Top1: 6.80%
|
17 |
+
[ Mon Sep 12 17:17:33 2022 ] Top5: 21.29%
|
18 |
+
[ Mon Sep 12 17:17:33 2022 ] Training epoch: 3
|
19 |
+
[ Mon Sep 12 17:17:43 2022 ] Batch(13/243) done. Loss: 2.8028 lr:0.100000
|
20 |
+
[ Mon Sep 12 17:18:36 2022 ] Batch(113/243) done. Loss: 2.3965 lr:0.100000
|
21 |
+
[ Mon Sep 12 17:19:30 2022 ] Batch(213/243) done. Loss: 2.2430 lr:0.100000
|
22 |
+
[ Mon Sep 12 17:19:46 2022 ] Eval epoch: 3
|
23 |
+
[ Mon Sep 12 17:22:17 2022 ] Mean test loss of 796 batches: 4.726529598236084.
|
24 |
+
[ Mon Sep 12 17:22:17 2022 ] Top1: 9.04%
|
25 |
+
[ Mon Sep 12 17:22:18 2022 ] Top5: 24.75%
|
26 |
+
[ Mon Sep 12 17:22:18 2022 ] Training epoch: 4
|
27 |
+
[ Mon Sep 12 17:22:58 2022 ] Batch(70/243) done. Loss: 2.0684 lr:0.100000
|
28 |
+
[ Mon Sep 12 17:23:52 2022 ] Batch(170/243) done. Loss: 1.6667 lr:0.100000
|
29 |
+
[ Mon Sep 12 17:24:30 2022 ] Eval epoch: 4
|
30 |
+
[ Mon Sep 12 17:27:01 2022 ] Mean test loss of 796 batches: 4.703661918640137.
|
31 |
+
[ Mon Sep 12 17:27:02 2022 ] Top1: 13.90%
|
32 |
+
[ Mon Sep 12 17:27:02 2022 ] Top5: 34.63%
|
33 |
+
[ Mon Sep 12 17:27:02 2022 ] Training epoch: 5
|
34 |
+
[ Mon Sep 12 17:27:20 2022 ] Batch(27/243) done. Loss: 2.0104 lr:0.100000
|
35 |
+
[ Mon Sep 12 17:28:13 2022 ] Batch(127/243) done. Loss: 1.8591 lr:0.100000
|
36 |
+
[ Mon Sep 12 17:29:07 2022 ] Batch(227/243) done. Loss: 1.4994 lr:0.100000
|
37 |
+
[ Mon Sep 12 17:29:15 2022 ] Eval epoch: 5
|
38 |
+
[ Mon Sep 12 17:31:45 2022 ] Mean test loss of 796 batches: 3.6374218463897705.
|
39 |
+
[ Mon Sep 12 17:31:46 2022 ] Top1: 16.08%
|
40 |
+
[ Mon Sep 12 17:31:46 2022 ] Top5: 42.62%
|
41 |
+
[ Mon Sep 12 17:31:46 2022 ] Training epoch: 6
|
42 |
+
[ Mon Sep 12 17:32:34 2022 ] Batch(84/243) done. Loss: 1.6156 lr:0.100000
|
43 |
+
[ Mon Sep 12 17:33:28 2022 ] Batch(184/243) done. Loss: 1.4599 lr:0.100000
|
44 |
+
[ Mon Sep 12 17:33:59 2022 ] Eval epoch: 6
|
45 |
+
[ Mon Sep 12 17:36:30 2022 ] Mean test loss of 796 batches: 4.115126609802246.
|
46 |
+
[ Mon Sep 12 17:36:31 2022 ] Top1: 19.99%
|
47 |
+
[ Mon Sep 12 17:36:31 2022 ] Top5: 49.28%
|
48 |
+
[ Mon Sep 12 17:36:31 2022 ] Training epoch: 7
|
49 |
+
[ Mon Sep 12 17:36:56 2022 ] Batch(41/243) done. Loss: 1.2778 lr:0.100000
|
50 |
+
[ Mon Sep 12 17:37:50 2022 ] Batch(141/243) done. Loss: 1.1354 lr:0.100000
|
51 |
+
[ Mon Sep 12 17:38:43 2022 ] Batch(241/243) done. Loss: 1.4545 lr:0.100000
|
52 |
+
[ Mon Sep 12 17:38:44 2022 ] Eval epoch: 7
|
53 |
+
[ Mon Sep 12 17:41:15 2022 ] Mean test loss of 796 batches: 3.3611857891082764.
|
54 |
+
[ Mon Sep 12 17:41:15 2022 ] Top1: 25.49%
|
55 |
+
[ Mon Sep 12 17:41:15 2022 ] Top5: 55.62%
|
56 |
+
[ Mon Sep 12 17:41:16 2022 ] Training epoch: 8
|
57 |
+
[ Mon Sep 12 17:42:11 2022 ] Batch(98/243) done. Loss: 1.1384 lr:0.100000
|
58 |
+
[ Mon Sep 12 17:43:04 2022 ] Batch(198/243) done. Loss: 1.0976 lr:0.100000
|
59 |
+
[ Mon Sep 12 17:43:28 2022 ] Eval epoch: 8
|
60 |
+
[ Mon Sep 12 17:45:59 2022 ] Mean test loss of 796 batches: 4.76453971862793.
|
61 |
+
[ Mon Sep 12 17:45:59 2022 ] Top1: 20.45%
|
62 |
+
[ Mon Sep 12 17:46:00 2022 ] Top5: 47.84%
|
63 |
+
[ Mon Sep 12 17:46:00 2022 ] Training epoch: 9
|
64 |
+
[ Mon Sep 12 17:46:32 2022 ] Batch(55/243) done. Loss: 0.9948 lr:0.100000
|
65 |
+
[ Mon Sep 12 17:47:26 2022 ] Batch(155/243) done. Loss: 1.2244 lr:0.100000
|
66 |
+
[ Mon Sep 12 17:48:12 2022 ] Eval epoch: 9
|
67 |
+
[ Mon Sep 12 17:50:43 2022 ] Mean test loss of 796 batches: 3.4281227588653564.
|
68 |
+
[ Mon Sep 12 17:50:44 2022 ] Top1: 28.01%
|
69 |
+
[ Mon Sep 12 17:50:44 2022 ] Top5: 58.91%
|
70 |
+
[ Mon Sep 12 17:50:44 2022 ] Training epoch: 10
|
71 |
+
[ Mon Sep 12 17:50:54 2022 ] Batch(12/243) done. Loss: 1.1751 lr:0.100000
|
72 |
+
[ Mon Sep 12 17:51:47 2022 ] Batch(112/243) done. Loss: 0.9554 lr:0.100000
|
73 |
+
[ Mon Sep 12 17:52:41 2022 ] Batch(212/243) done. Loss: 1.1116 lr:0.100000
|
74 |
+
[ Mon Sep 12 17:52:57 2022 ] Eval epoch: 10
|
75 |
+
[ Mon Sep 12 17:55:28 2022 ] Mean test loss of 796 batches: 3.9069652557373047.
|
76 |
+
[ Mon Sep 12 17:55:28 2022 ] Top1: 26.39%
|
77 |
+
[ Mon Sep 12 17:55:29 2022 ] Top5: 59.63%
|
78 |
+
[ Mon Sep 12 17:55:29 2022 ] Training epoch: 11
|
79 |
+
[ Mon Sep 12 17:56:09 2022 ] Batch(69/243) done. Loss: 0.8494 lr:0.100000
|
80 |
+
[ Mon Sep 12 17:57:02 2022 ] Batch(169/243) done. Loss: 1.1055 lr:0.100000
|
81 |
+
[ Mon Sep 12 17:57:41 2022 ] Eval epoch: 11
|
82 |
+
[ Mon Sep 12 18:00:12 2022 ] Mean test loss of 796 batches: 3.2280941009521484.
|
83 |
+
[ Mon Sep 12 18:00:13 2022 ] Top1: 31.25%
|
84 |
+
[ Mon Sep 12 18:00:13 2022 ] Top5: 64.52%
|
85 |
+
[ Mon Sep 12 18:00:13 2022 ] Training epoch: 12
|
86 |
+
[ Mon Sep 12 18:00:30 2022 ] Batch(26/243) done. Loss: 0.9573 lr:0.100000
|
87 |
+
[ Mon Sep 12 18:01:24 2022 ] Batch(126/243) done. Loss: 1.2943 lr:0.100000
|
88 |
+
[ Mon Sep 12 18:02:17 2022 ] Batch(226/243) done. Loss: 0.9637 lr:0.100000
|
89 |
+
[ Mon Sep 12 18:02:26 2022 ] Eval epoch: 12
|
90 |
+
[ Mon Sep 12 18:04:57 2022 ] Mean test loss of 796 batches: 4.624372482299805.
|
91 |
+
[ Mon Sep 12 18:04:58 2022 ] Top1: 19.33%
|
92 |
+
[ Mon Sep 12 18:04:58 2022 ] Top5: 47.97%
|
93 |
+
[ Mon Sep 12 18:04:59 2022 ] Training epoch: 13
|
94 |
+
[ Mon Sep 12 18:05:46 2022 ] Batch(83/243) done. Loss: 1.0534 lr:0.100000
|
95 |
+
[ Mon Sep 12 18:06:39 2022 ] Batch(183/243) done. Loss: 1.0734 lr:0.100000
|
96 |
+
[ Mon Sep 12 18:07:11 2022 ] Eval epoch: 13
|
97 |
+
[ Mon Sep 12 18:09:41 2022 ] Mean test loss of 796 batches: 3.4322330951690674.
|
98 |
+
[ Mon Sep 12 18:09:42 2022 ] Top1: 29.96%
|
99 |
+
[ Mon Sep 12 18:09:42 2022 ] Top5: 65.63%
|
100 |
+
[ Mon Sep 12 18:09:42 2022 ] Training epoch: 14
|
101 |
+
[ Mon Sep 12 18:10:07 2022 ] Batch(40/243) done. Loss: 0.9375 lr:0.100000
|
102 |
+
[ Mon Sep 12 18:11:00 2022 ] Batch(140/243) done. Loss: 0.8313 lr:0.100000
|
103 |
+
[ Mon Sep 12 18:11:54 2022 ] Batch(240/243) done. Loss: 1.1521 lr:0.100000
|
104 |
+
[ Mon Sep 12 18:11:55 2022 ] Eval epoch: 14
|
105 |
+
[ Mon Sep 12 18:14:26 2022 ] Mean test loss of 796 batches: 2.9193553924560547.
|
106 |
+
[ Mon Sep 12 18:14:26 2022 ] Top1: 35.86%
|
107 |
+
[ Mon Sep 12 18:14:27 2022 ] Top5: 72.78%
|
108 |
+
[ Mon Sep 12 18:14:27 2022 ] Training epoch: 15
|
109 |
+
[ Mon Sep 12 18:15:22 2022 ] Batch(97/243) done. Loss: 0.7764 lr:0.100000
|
110 |
+
[ Mon Sep 12 18:16:15 2022 ] Batch(197/243) done. Loss: 0.8692 lr:0.100000
|
111 |
+
[ Mon Sep 12 18:16:39 2022 ] Eval epoch: 15
|
112 |
+
[ Mon Sep 12 18:19:10 2022 ] Mean test loss of 796 batches: 2.9887285232543945.
|
113 |
+
[ Mon Sep 12 18:19:11 2022 ] Top1: 31.90%
|
114 |
+
[ Mon Sep 12 18:19:11 2022 ] Top5: 68.86%
|
115 |
+
[ Mon Sep 12 18:19:11 2022 ] Training epoch: 16
|
116 |
+
[ Mon Sep 12 18:19:43 2022 ] Batch(54/243) done. Loss: 1.1223 lr:0.100000
|
117 |
+
[ Mon Sep 12 18:20:36 2022 ] Batch(154/243) done. Loss: 0.9452 lr:0.100000
|
118 |
+
[ Mon Sep 12 18:21:24 2022 ] Eval epoch: 16
|
119 |
+
[ Mon Sep 12 18:23:55 2022 ] Mean test loss of 796 batches: 3.189964771270752.
|
120 |
+
[ Mon Sep 12 18:23:55 2022 ] Top1: 31.94%
|
121 |
+
[ Mon Sep 12 18:23:56 2022 ] Top5: 66.61%
|
122 |
+
[ Mon Sep 12 18:23:56 2022 ] Training epoch: 17
|
123 |
+
[ Mon Sep 12 18:24:05 2022 ] Batch(11/243) done. Loss: 0.7831 lr:0.100000
|
124 |
+
[ Mon Sep 12 18:24:58 2022 ] Batch(111/243) done. Loss: 0.9609 lr:0.100000
|
125 |
+
[ Mon Sep 12 18:25:52 2022 ] Batch(211/243) done. Loss: 0.8769 lr:0.100000
|
126 |
+
[ Mon Sep 12 18:26:09 2022 ] Eval epoch: 17
|
127 |
+
[ Mon Sep 12 18:28:39 2022 ] Mean test loss of 796 batches: 2.984318494796753.
|
128 |
+
[ Mon Sep 12 18:28:40 2022 ] Top1: 32.78%
|
129 |
+
[ Mon Sep 12 18:28:40 2022 ] Top5: 67.75%
|
130 |
+
[ Mon Sep 12 18:28:40 2022 ] Training epoch: 18
|
131 |
+
[ Mon Sep 12 18:29:20 2022 ] Batch(68/243) done. Loss: 0.6888 lr:0.100000
|
132 |
+
[ Mon Sep 12 18:30:13 2022 ] Batch(168/243) done. Loss: 0.8678 lr:0.100000
|
133 |
+
[ Mon Sep 12 18:30:53 2022 ] Eval epoch: 18
|
134 |
+
[ Mon Sep 12 18:33:24 2022 ] Mean test loss of 796 batches: 3.6525020599365234.
|
135 |
+
[ Mon Sep 12 18:33:24 2022 ] Top1: 26.64%
|
136 |
+
[ Mon Sep 12 18:33:24 2022 ] Top5: 60.06%
|
137 |
+
[ Mon Sep 12 18:33:25 2022 ] Training epoch: 19
|
138 |
+
[ Mon Sep 12 18:33:41 2022 ] Batch(25/243) done. Loss: 0.6830 lr:0.100000
|
139 |
+
[ Mon Sep 12 18:34:35 2022 ] Batch(125/243) done. Loss: 0.5905 lr:0.100000
|
140 |
+
[ Mon Sep 12 18:35:28 2022 ] Batch(225/243) done. Loss: 0.9013 lr:0.100000
|
141 |
+
[ Mon Sep 12 18:35:37 2022 ] Eval epoch: 19
|
142 |
+
[ Mon Sep 12 18:38:09 2022 ] Mean test loss of 796 batches: 4.428297519683838.
|
143 |
+
[ Mon Sep 12 18:38:09 2022 ] Top1: 23.64%
|
144 |
+
[ Mon Sep 12 18:38:09 2022 ] Top5: 57.18%
|
145 |
+
[ Mon Sep 12 18:38:10 2022 ] Training epoch: 20
|
146 |
+
[ Mon Sep 12 18:38:56 2022 ] Batch(82/243) done. Loss: 0.7506 lr:0.100000
|
147 |
+
[ Mon Sep 12 18:39:50 2022 ] Batch(182/243) done. Loss: 0.6540 lr:0.100000
|
148 |
+
[ Mon Sep 12 18:40:22 2022 ] Eval epoch: 20
|
149 |
+
[ Mon Sep 12 18:42:53 2022 ] Mean test loss of 796 batches: 6.080588340759277.
|
150 |
+
[ Mon Sep 12 18:42:54 2022 ] Top1: 10.31%
|
151 |
+
[ Mon Sep 12 18:42:54 2022 ] Top5: 32.39%
|
152 |
+
[ Mon Sep 12 18:42:54 2022 ] Training epoch: 21
|
153 |
+
[ Mon Sep 12 18:43:18 2022 ] Batch(39/243) done. Loss: 0.5286 lr:0.100000
|
154 |
+
[ Mon Sep 12 18:44:11 2022 ] Batch(139/243) done. Loss: 0.5689 lr:0.100000
|
155 |
+
[ Mon Sep 12 18:45:05 2022 ] Batch(239/243) done. Loss: 0.6534 lr:0.100000
|
156 |
+
[ Mon Sep 12 18:45:07 2022 ] Eval epoch: 21
|
157 |
+
[ Mon Sep 12 18:47:38 2022 ] Mean test loss of 796 batches: 3.9268245697021484.
|
158 |
+
[ Mon Sep 12 18:47:38 2022 ] Top1: 25.51%
|
159 |
+
[ Mon Sep 12 18:47:38 2022 ] Top5: 58.81%
|
160 |
+
[ Mon Sep 12 18:47:39 2022 ] Training epoch: 22
|
161 |
+
[ Mon Sep 12 18:48:33 2022 ] Batch(96/243) done. Loss: 0.7457 lr:0.100000
|
162 |
+
[ Mon Sep 12 18:49:26 2022 ] Batch(196/243) done. Loss: 0.8362 lr:0.100000
|
163 |
+
[ Mon Sep 12 18:49:51 2022 ] Eval epoch: 22
|
164 |
+
[ Mon Sep 12 18:52:22 2022 ] Mean test loss of 796 batches: 3.1748151779174805.
|
165 |
+
[ Mon Sep 12 18:52:22 2022 ] Top1: 35.90%
|
166 |
+
[ Mon Sep 12 18:52:23 2022 ] Top5: 69.46%
|
167 |
+
[ Mon Sep 12 18:52:23 2022 ] Training epoch: 23
|
168 |
+
[ Mon Sep 12 18:52:54 2022 ] Batch(53/243) done. Loss: 0.7031 lr:0.100000
|
169 |
+
[ Mon Sep 12 18:53:48 2022 ] Batch(153/243) done. Loss: 0.7082 lr:0.100000
|
170 |
+
[ Mon Sep 12 18:54:36 2022 ] Eval epoch: 23
|
171 |
+
[ Mon Sep 12 18:57:07 2022 ] Mean test loss of 796 batches: 3.1078102588653564.
|
172 |
+
[ Mon Sep 12 18:57:07 2022 ] Top1: 34.99%
|
173 |
+
[ Mon Sep 12 18:57:07 2022 ] Top5: 69.51%
|
174 |
+
[ Mon Sep 12 18:57:08 2022 ] Training epoch: 24
|
175 |
+
[ Mon Sep 12 18:57:16 2022 ] Batch(10/243) done. Loss: 0.4838 lr:0.100000
|
176 |
+
[ Mon Sep 12 18:58:09 2022 ] Batch(110/243) done. Loss: 0.6159 lr:0.100000
|
177 |
+
[ Mon Sep 12 18:59:03 2022 ] Batch(210/243) done. Loss: 0.8235 lr:0.100000
|
178 |
+
[ Mon Sep 12 18:59:20 2022 ] Eval epoch: 24
|
179 |
+
[ Mon Sep 12 19:01:50 2022 ] Mean test loss of 796 batches: 3.1660282611846924.
|
180 |
+
[ Mon Sep 12 19:01:51 2022 ] Top1: 30.58%
|
181 |
+
[ Mon Sep 12 19:01:51 2022 ] Top5: 63.14%
|
182 |
+
[ Mon Sep 12 19:01:51 2022 ] Training epoch: 25
|
183 |
+
[ Mon Sep 12 19:02:30 2022 ] Batch(67/243) done. Loss: 0.5538 lr:0.100000
|
184 |
+
[ Mon Sep 12 19:03:24 2022 ] Batch(167/243) done. Loss: 0.7944 lr:0.100000
|
185 |
+
[ Mon Sep 12 19:04:04 2022 ] Eval epoch: 25
|
186 |
+
[ Mon Sep 12 19:06:35 2022 ] Mean test loss of 796 batches: 3.2890923023223877.
|
187 |
+
[ Mon Sep 12 19:06:35 2022 ] Top1: 35.01%
|
188 |
+
[ Mon Sep 12 19:06:36 2022 ] Top5: 70.55%
|
189 |
+
[ Mon Sep 12 19:06:36 2022 ] Training epoch: 26
|
190 |
+
[ Mon Sep 12 19:06:52 2022 ] Batch(24/243) done. Loss: 0.6708 lr:0.100000
|
191 |
+
[ Mon Sep 12 19:07:45 2022 ] Batch(124/243) done. Loss: 0.5950 lr:0.100000
|
192 |
+
[ Mon Sep 12 19:08:39 2022 ] Batch(224/243) done. Loss: 0.5146 lr:0.100000
|
193 |
+
[ Mon Sep 12 19:08:49 2022 ] Eval epoch: 26
|
194 |
+
[ Mon Sep 12 19:11:20 2022 ] Mean test loss of 796 batches: 3.0431575775146484.
|
195 |
+
[ Mon Sep 12 19:11:20 2022 ] Top1: 39.31%
|
196 |
+
[ Mon Sep 12 19:11:21 2022 ] Top5: 73.00%
|
197 |
+
[ Mon Sep 12 19:11:21 2022 ] Training epoch: 27
|
198 |
+
[ Mon Sep 12 19:12:07 2022 ] Batch(81/243) done. Loss: 0.5579 lr:0.100000
|
199 |
+
[ Mon Sep 12 19:13:01 2022 ] Batch(181/243) done. Loss: 0.4032 lr:0.100000
|
200 |
+
[ Mon Sep 12 19:13:34 2022 ] Eval epoch: 27
|
201 |
+
[ Mon Sep 12 19:16:05 2022 ] Mean test loss of 796 batches: 2.9798786640167236.
|
202 |
+
[ Mon Sep 12 19:16:05 2022 ] Top1: 39.50%
|
203 |
+
[ Mon Sep 12 19:16:06 2022 ] Top5: 72.99%
|
204 |
+
[ Mon Sep 12 19:16:06 2022 ] Training epoch: 28
|
205 |
+
[ Mon Sep 12 19:16:29 2022 ] Batch(38/243) done. Loss: 0.6820 lr:0.100000
|
206 |
+
[ Mon Sep 12 19:17:23 2022 ] Batch(138/243) done. Loss: 0.4468 lr:0.100000
|
207 |
+
[ Mon Sep 12 19:18:16 2022 ] Batch(238/243) done. Loss: 0.5453 lr:0.100000
|
208 |
+
[ Mon Sep 12 19:18:18 2022 ] Eval epoch: 28
|
209 |
+
[ Mon Sep 12 19:20:49 2022 ] Mean test loss of 796 batches: 3.0454938411712646.
|
210 |
+
[ Mon Sep 12 19:20:49 2022 ] Top1: 38.20%
|
211 |
+
[ Mon Sep 12 19:20:50 2022 ] Top5: 71.75%
|
212 |
+
[ Mon Sep 12 19:20:50 2022 ] Training epoch: 29
|
213 |
+
[ Mon Sep 12 19:21:44 2022 ] Batch(95/243) done. Loss: 0.4766 lr:0.100000
|
214 |
+
[ Mon Sep 12 19:22:38 2022 ] Batch(195/243) done. Loss: 0.3668 lr:0.100000
|
215 |
+
[ Mon Sep 12 19:23:03 2022 ] Eval epoch: 29
|
216 |
+
[ Mon Sep 12 19:25:33 2022 ] Mean test loss of 796 batches: 2.752485513687134.
|
217 |
+
[ Mon Sep 12 19:25:34 2022 ] Top1: 43.64%
|
218 |
+
[ Mon Sep 12 19:25:34 2022 ] Top5: 77.60%
|
219 |
+
[ Mon Sep 12 19:25:34 2022 ] Training epoch: 30
|
220 |
+
[ Mon Sep 12 19:26:05 2022 ] Batch(52/243) done. Loss: 0.5540 lr:0.100000
|
221 |
+
[ Mon Sep 12 19:26:59 2022 ] Batch(152/243) done. Loss: 0.3592 lr:0.100000
|
222 |
+
[ Mon Sep 12 19:27:47 2022 ] Eval epoch: 30
|
223 |
+
[ Mon Sep 12 19:30:18 2022 ] Mean test loss of 796 batches: 2.984344720840454.
|
224 |
+
[ Mon Sep 12 19:30:18 2022 ] Top1: 39.46%
|
225 |
+
[ Mon Sep 12 19:30:19 2022 ] Top5: 73.23%
|
226 |
+
[ Mon Sep 12 19:30:19 2022 ] Training epoch: 31
|
227 |
+
[ Mon Sep 12 19:30:27 2022 ] Batch(9/243) done. Loss: 0.4950 lr:0.100000
|
228 |
+
[ Mon Sep 12 19:31:20 2022 ] Batch(109/243) done. Loss: 0.4737 lr:0.100000
|
229 |
+
[ Mon Sep 12 19:32:14 2022 ] Batch(209/243) done. Loss: 0.4834 lr:0.100000
|
230 |
+
[ Mon Sep 12 19:32:32 2022 ] Eval epoch: 31
|
231 |
+
[ Mon Sep 12 19:35:03 2022 ] Mean test loss of 796 batches: 3.027348279953003.
|
232 |
+
[ Mon Sep 12 19:35:03 2022 ] Top1: 37.86%
|
233 |
+
[ Mon Sep 12 19:35:04 2022 ] Top5: 71.76%
|
234 |
+
[ Mon Sep 12 19:35:04 2022 ] Training epoch: 32
|
235 |
+
[ Mon Sep 12 19:35:42 2022 ] Batch(66/243) done. Loss: 0.3802 lr:0.100000
|
236 |
+
[ Mon Sep 12 19:36:36 2022 ] Batch(166/243) done. Loss: 0.2585 lr:0.100000
|
237 |
+
[ Mon Sep 12 19:37:17 2022 ] Eval epoch: 32
|
238 |
+
[ Mon Sep 12 19:39:47 2022 ] Mean test loss of 796 batches: 4.240810871124268.
|
239 |
+
[ Mon Sep 12 19:39:47 2022 ] Top1: 30.77%
|
240 |
+
[ Mon Sep 12 19:39:48 2022 ] Top5: 63.77%
|
241 |
+
[ Mon Sep 12 19:39:48 2022 ] Training epoch: 33
|
242 |
+
[ Mon Sep 12 19:40:03 2022 ] Batch(23/243) done. Loss: 0.3715 lr:0.100000
|
243 |
+
[ Mon Sep 12 19:40:57 2022 ] Batch(123/243) done. Loss: 0.5635 lr:0.100000
|
244 |
+
[ Mon Sep 12 19:41:50 2022 ] Batch(223/243) done. Loss: 0.5962 lr:0.100000
|
245 |
+
[ Mon Sep 12 19:42:01 2022 ] Eval epoch: 33
|
246 |
+
[ Mon Sep 12 19:44:31 2022 ] Mean test loss of 796 batches: 2.9045612812042236.
|
247 |
+
[ Mon Sep 12 19:44:31 2022 ] Top1: 39.17%
|
248 |
+
[ Mon Sep 12 19:44:32 2022 ] Top5: 74.38%
|
249 |
+
[ Mon Sep 12 19:44:32 2022 ] Training epoch: 34
|
250 |
+
[ Mon Sep 12 19:45:18 2022 ] Batch(80/243) done. Loss: 0.5678 lr:0.100000
|
251 |
+
[ Mon Sep 12 19:46:11 2022 ] Batch(180/243) done. Loss: 0.4247 lr:0.100000
|
252 |
+
[ Mon Sep 12 19:46:45 2022 ] Eval epoch: 34
|
253 |
+
[ Mon Sep 12 19:49:15 2022 ] Mean test loss of 796 batches: 3.31986403465271.
|
254 |
+
[ Mon Sep 12 19:49:15 2022 ] Top1: 39.28%
|
255 |
+
[ Mon Sep 12 19:49:16 2022 ] Top5: 70.77%
|
256 |
+
[ Mon Sep 12 19:49:16 2022 ] Training epoch: 35
|
257 |
+
[ Mon Sep 12 19:49:39 2022 ] Batch(37/243) done. Loss: 0.4078 lr:0.100000
|
258 |
+
[ Mon Sep 12 19:50:32 2022 ] Batch(137/243) done. Loss: 0.5305 lr:0.100000
|
259 |
+
[ Mon Sep 12 19:51:26 2022 ] Batch(237/243) done. Loss: 0.4316 lr:0.100000
|
260 |
+
[ Mon Sep 12 19:51:28 2022 ] Eval epoch: 35
|
261 |
+
[ Mon Sep 12 19:54:00 2022 ] Mean test loss of 796 batches: 2.73232102394104.
|
262 |
+
[ Mon Sep 12 19:54:00 2022 ] Top1: 43.68%
|
263 |
+
[ Mon Sep 12 19:54:00 2022 ] Top5: 76.74%
|
264 |
+
[ Mon Sep 12 19:54:01 2022 ] Training epoch: 36
|
265 |
+
[ Mon Sep 12 19:54:54 2022 ] Batch(94/243) done. Loss: 0.6683 lr:0.100000
|
266 |
+
[ Mon Sep 12 19:55:47 2022 ] Batch(194/243) done. Loss: 0.5727 lr:0.100000
|
267 |
+
[ Mon Sep 12 19:56:13 2022 ] Eval epoch: 36
|
268 |
+
[ Mon Sep 12 19:58:44 2022 ] Mean test loss of 796 batches: 3.422870635986328.
|
269 |
+
[ Mon Sep 12 19:58:44 2022 ] Top1: 37.66%
|
270 |
+
[ Mon Sep 12 19:58:44 2022 ] Top5: 70.87%
|
271 |
+
[ Mon Sep 12 19:58:45 2022 ] Training epoch: 37
|
272 |
+
[ Mon Sep 12 19:59:15 2022 ] Batch(51/243) done. Loss: 0.4763 lr:0.100000
|
273 |
+
[ Mon Sep 12 20:00:08 2022 ] Batch(151/243) done. Loss: 0.5658 lr:0.100000
|
274 |
+
[ Mon Sep 12 20:00:57 2022 ] Eval epoch: 37
|
275 |
+
[ Mon Sep 12 20:03:28 2022 ] Mean test loss of 796 batches: 4.982846260070801.
|
276 |
+
[ Mon Sep 12 20:03:28 2022 ] Top1: 26.71%
|
277 |
+
[ Mon Sep 12 20:03:28 2022 ] Top5: 56.68%
|
278 |
+
[ Mon Sep 12 20:03:29 2022 ] Training epoch: 38
|
279 |
+
[ Mon Sep 12 20:03:36 2022 ] Batch(8/243) done. Loss: 0.3946 lr:0.100000
|
280 |
+
[ Mon Sep 12 20:04:29 2022 ] Batch(108/243) done. Loss: 0.5022 lr:0.100000
|
281 |
+
[ Mon Sep 12 20:05:23 2022 ] Batch(208/243) done. Loss: 0.4371 lr:0.100000
|
282 |
+
[ Mon Sep 12 20:05:41 2022 ] Eval epoch: 38
|
283 |
+
[ Mon Sep 12 20:08:12 2022 ] Mean test loss of 796 batches: 4.276895999908447.
|
284 |
+
[ Mon Sep 12 20:08:12 2022 ] Top1: 35.45%
|
285 |
+
[ Mon Sep 12 20:08:13 2022 ] Top5: 66.57%
|
286 |
+
[ Mon Sep 12 20:08:13 2022 ] Training epoch: 39
|
287 |
+
[ Mon Sep 12 20:08:50 2022 ] Batch(65/243) done. Loss: 0.8096 lr:0.100000
|
288 |
+
[ Mon Sep 12 20:09:44 2022 ] Batch(165/243) done. Loss: 0.4515 lr:0.100000
|
289 |
+
[ Mon Sep 12 20:10:25 2022 ] Eval epoch: 39
|
290 |
+
[ Mon Sep 12 20:12:56 2022 ] Mean test loss of 796 batches: 3.5807816982269287.
|
291 |
+
[ Mon Sep 12 20:12:56 2022 ] Top1: 36.28%
|
292 |
+
[ Mon Sep 12 20:12:57 2022 ] Top5: 68.17%
|
293 |
+
[ Mon Sep 12 20:12:57 2022 ] Training epoch: 40
|
294 |
+
[ Mon Sep 12 20:13:12 2022 ] Batch(22/243) done. Loss: 0.2295 lr:0.100000
|
295 |
+
[ Mon Sep 12 20:14:05 2022 ] Batch(122/243) done. Loss: 0.3097 lr:0.100000
|
296 |
+
[ Mon Sep 12 20:14:59 2022 ] Batch(222/243) done. Loss: 0.3564 lr:0.100000
|
297 |
+
[ Mon Sep 12 20:15:10 2022 ] Eval epoch: 40
|
298 |
+
[ Mon Sep 12 20:17:41 2022 ] Mean test loss of 796 batches: 2.955436944961548.
|
299 |
+
[ Mon Sep 12 20:17:41 2022 ] Top1: 44.20%
|
300 |
+
[ Mon Sep 12 20:17:41 2022 ] Top5: 75.99%
|
301 |
+
[ Mon Sep 12 20:17:42 2022 ] Training epoch: 41
|
302 |
+
[ Mon Sep 12 20:18:27 2022 ] Batch(79/243) done. Loss: 0.5303 lr:0.100000
|
303 |
+
[ Mon Sep 12 20:19:20 2022 ] Batch(179/243) done. Loss: 0.3567 lr:0.100000
|
304 |
+
[ Mon Sep 12 20:19:54 2022 ] Eval epoch: 41
|
305 |
+
[ Mon Sep 12 20:22:25 2022 ] Mean test loss of 796 batches: 3.0165064334869385.
|
306 |
+
[ Mon Sep 12 20:22:25 2022 ] Top1: 41.44%
|
307 |
+
[ Mon Sep 12 20:22:26 2022 ] Top5: 74.77%
|
308 |
+
[ Mon Sep 12 20:22:26 2022 ] Training epoch: 42
|
309 |
+
[ Mon Sep 12 20:22:48 2022 ] Batch(36/243) done. Loss: 0.2645 lr:0.100000
|
310 |
+
[ Mon Sep 12 20:23:42 2022 ] Batch(136/243) done. Loss: 0.4502 lr:0.100000
|
311 |
+
[ Mon Sep 12 20:24:35 2022 ] Batch(236/243) done. Loss: 0.5138 lr:0.100000
|
312 |
+
[ Mon Sep 12 20:24:39 2022 ] Eval epoch: 42
|
313 |
+
[ Mon Sep 12 20:27:10 2022 ] Mean test loss of 796 batches: 3.1357388496398926.
|
314 |
+
[ Mon Sep 12 20:27:10 2022 ] Top1: 41.20%
|
315 |
+
[ Mon Sep 12 20:27:11 2022 ] Top5: 74.33%
|
316 |
+
[ Mon Sep 12 20:27:11 2022 ] Training epoch: 43
|
317 |
+
[ Mon Sep 12 20:28:04 2022 ] Batch(93/243) done. Loss: 0.5577 lr:0.100000
|
318 |
+
[ Mon Sep 12 20:28:57 2022 ] Batch(193/243) done. Loss: 0.4311 lr:0.100000
|
319 |
+
[ Mon Sep 12 20:29:24 2022 ] Eval epoch: 43
|
320 |
+
[ Mon Sep 12 20:31:55 2022 ] Mean test loss of 796 batches: 4.484673023223877.
|
321 |
+
[ Mon Sep 12 20:31:55 2022 ] Top1: 34.84%
|
322 |
+
[ Mon Sep 12 20:31:56 2022 ] Top5: 67.95%
|
323 |
+
[ Mon Sep 12 20:31:56 2022 ] Training epoch: 44
|
324 |
+
[ Mon Sep 12 20:32:26 2022 ] Batch(50/243) done. Loss: 0.5115 lr:0.100000
|
325 |
+
[ Mon Sep 12 20:33:19 2022 ] Batch(150/243) done. Loss: 0.6260 lr:0.100000
|
326 |
+
[ Mon Sep 12 20:34:09 2022 ] Eval epoch: 44
|
327 |
+
[ Mon Sep 12 20:36:40 2022 ] Mean test loss of 796 batches: 3.408787488937378.
|
328 |
+
[ Mon Sep 12 20:36:40 2022 ] Top1: 41.38%
|
329 |
+
[ Mon Sep 12 20:36:41 2022 ] Top5: 75.54%
|
330 |
+
[ Mon Sep 12 20:36:41 2022 ] Training epoch: 45
|
331 |
+
[ Mon Sep 12 20:36:48 2022 ] Batch(7/243) done. Loss: 0.4537 lr:0.100000
|
332 |
+
[ Mon Sep 12 20:37:41 2022 ] Batch(107/243) done. Loss: 0.4634 lr:0.100000
|
333 |
+
[ Mon Sep 12 20:38:35 2022 ] Batch(207/243) done. Loss: 0.3688 lr:0.100000
|
334 |
+
[ Mon Sep 12 20:38:54 2022 ] Eval epoch: 45
|
335 |
+
[ Mon Sep 12 20:41:25 2022 ] Mean test loss of 796 batches: 3.4471657276153564.
|
336 |
+
[ Mon Sep 12 20:41:25 2022 ] Top1: 39.65%
|
337 |
+
[ Mon Sep 12 20:41:26 2022 ] Top5: 72.64%
|
338 |
+
[ Mon Sep 12 20:41:26 2022 ] Training epoch: 46
|
339 |
+
[ Mon Sep 12 20:42:03 2022 ] Batch(64/243) done. Loss: 0.2999 lr:0.100000
|
340 |
+
[ Mon Sep 12 20:42:57 2022 ] Batch(164/243) done. Loss: 0.4602 lr:0.100000
|
341 |
+
[ Mon Sep 12 20:43:39 2022 ] Eval epoch: 46
|
342 |
+
[ Mon Sep 12 20:46:10 2022 ] Mean test loss of 796 batches: 3.1922051906585693.
|
343 |
+
[ Mon Sep 12 20:46:10 2022 ] Top1: 38.30%
|
344 |
+
[ Mon Sep 12 20:46:11 2022 ] Top5: 71.94%
|
345 |
+
[ Mon Sep 12 20:46:11 2022 ] Training epoch: 47
|
346 |
+
[ Mon Sep 12 20:46:25 2022 ] Batch(21/243) done. Loss: 0.4091 lr:0.100000
|
347 |
+
[ Mon Sep 12 20:47:18 2022 ] Batch(121/243) done. Loss: 0.2515 lr:0.100000
|
348 |
+
[ Mon Sep 12 20:48:12 2022 ] Batch(221/243) done. Loss: 0.5852 lr:0.100000
|
349 |
+
[ Mon Sep 12 20:48:24 2022 ] Eval epoch: 47
|
350 |
+
[ Mon Sep 12 20:50:54 2022 ] Mean test loss of 796 batches: 3.2509422302246094.
|
351 |
+
[ Mon Sep 12 20:50:55 2022 ] Top1: 39.08%
|
352 |
+
[ Mon Sep 12 20:50:55 2022 ] Top5: 74.01%
|
353 |
+
[ Mon Sep 12 20:50:55 2022 ] Training epoch: 48
|
354 |
+
[ Mon Sep 12 20:51:40 2022 ] Batch(78/243) done. Loss: 0.4491 lr:0.100000
|
355 |
+
[ Mon Sep 12 20:52:33 2022 ] Batch(178/243) done. Loss: 0.4859 lr:0.100000
|
356 |
+
[ Mon Sep 12 20:53:08 2022 ] Eval epoch: 48
|
357 |
+
[ Mon Sep 12 20:55:39 2022 ] Mean test loss of 796 batches: 4.199170112609863.
|
358 |
+
[ Mon Sep 12 20:55:39 2022 ] Top1: 36.34%
|
359 |
+
[ Mon Sep 12 20:55:40 2022 ] Top5: 68.84%
|
360 |
+
[ Mon Sep 12 20:55:40 2022 ] Training epoch: 49
|
361 |
+
[ Mon Sep 12 20:56:02 2022 ] Batch(35/243) done. Loss: 0.5163 lr:0.100000
|
362 |
+
[ Mon Sep 12 20:56:55 2022 ] Batch(135/243) done. Loss: 0.5192 lr:0.100000
|
363 |
+
[ Mon Sep 12 20:57:49 2022 ] Batch(235/243) done. Loss: 0.2901 lr:0.100000
|
364 |
+
[ Mon Sep 12 20:57:53 2022 ] Eval epoch: 49
|
365 |
+
[ Mon Sep 12 21:00:24 2022 ] Mean test loss of 796 batches: 4.176433563232422.
|
366 |
+
[ Mon Sep 12 21:00:24 2022 ] Top1: 39.08%
|
367 |
+
[ Mon Sep 12 21:00:25 2022 ] Top5: 70.88%
|
368 |
+
[ Mon Sep 12 21:00:25 2022 ] Training epoch: 50
|
369 |
+
[ Mon Sep 12 21:01:17 2022 ] Batch(92/243) done. Loss: 0.3617 lr:0.100000
|
370 |
+
[ Mon Sep 12 21:02:11 2022 ] Batch(192/243) done. Loss: 0.3877 lr:0.100000
|
371 |
+
[ Mon Sep 12 21:02:38 2022 ] Eval epoch: 50
|
372 |
+
[ Mon Sep 12 21:05:09 2022 ] Mean test loss of 796 batches: 3.0291857719421387.
|
373 |
+
[ Mon Sep 12 21:05:09 2022 ] Top1: 41.97%
|
374 |
+
[ Mon Sep 12 21:05:09 2022 ] Top5: 76.29%
|
375 |
+
[ Mon Sep 12 21:05:10 2022 ] Training epoch: 51
|
376 |
+
[ Mon Sep 12 21:05:39 2022 ] Batch(49/243) done. Loss: 0.3313 lr:0.100000
|
377 |
+
[ Mon Sep 12 21:06:32 2022 ] Batch(149/243) done. Loss: 0.4774 lr:0.100000
|
378 |
+
[ Mon Sep 12 21:07:23 2022 ] Eval epoch: 51
|
379 |
+
[ Mon Sep 12 21:09:54 2022 ] Mean test loss of 796 batches: 4.730879306793213.
|
380 |
+
[ Mon Sep 12 21:09:54 2022 ] Top1: 29.74%
|
381 |
+
[ Mon Sep 12 21:09:54 2022 ] Top5: 62.17%
|
382 |
+
[ Mon Sep 12 21:09:55 2022 ] Training epoch: 52
|
383 |
+
[ Mon Sep 12 21:10:01 2022 ] Batch(6/243) done. Loss: 0.2274 lr:0.100000
|
384 |
+
[ Mon Sep 12 21:10:54 2022 ] Batch(106/243) done. Loss: 0.1653 lr:0.100000
|
385 |
+
[ Mon Sep 12 21:11:48 2022 ] Batch(206/243) done. Loss: 0.5538 lr:0.100000
|
386 |
+
[ Mon Sep 12 21:12:08 2022 ] Eval epoch: 52
|
387 |
+
[ Mon Sep 12 21:14:39 2022 ] Mean test loss of 796 batches: 3.6701395511627197.
|
388 |
+
[ Mon Sep 12 21:14:39 2022 ] Top1: 40.39%
|
389 |
+
[ Mon Sep 12 21:14:39 2022 ] Top5: 71.54%
|
390 |
+
[ Mon Sep 12 21:14:40 2022 ] Training epoch: 53
|
391 |
+
[ Mon Sep 12 21:15:16 2022 ] Batch(63/243) done. Loss: 0.3948 lr:0.100000
|
392 |
+
[ Mon Sep 12 21:16:10 2022 ] Batch(163/243) done. Loss: 0.4890 lr:0.100000
|
393 |
+
[ Mon Sep 12 21:16:52 2022 ] Eval epoch: 53
|
394 |
+
[ Mon Sep 12 21:19:23 2022 ] Mean test loss of 796 batches: 3.335566759109497.
|
395 |
+
[ Mon Sep 12 21:19:24 2022 ] Top1: 39.80%
|
396 |
+
[ Mon Sep 12 21:19:24 2022 ] Top5: 71.85%
|
397 |
+
[ Mon Sep 12 21:19:24 2022 ] Training epoch: 54
|
398 |
+
[ Mon Sep 12 21:19:38 2022 ] Batch(20/243) done. Loss: 0.3850 lr:0.100000
|
399 |
+
[ Mon Sep 12 21:20:32 2022 ] Batch(120/243) done. Loss: 0.5277 lr:0.100000
|
400 |
+
[ Mon Sep 12 21:21:25 2022 ] Batch(220/243) done. Loss: 0.4992 lr:0.100000
|
401 |
+
[ Mon Sep 12 21:21:37 2022 ] Eval epoch: 54
|
402 |
+
[ Mon Sep 12 21:24:09 2022 ] Mean test loss of 796 batches: 3.5634169578552246.
|
403 |
+
[ Mon Sep 12 21:24:09 2022 ] Top1: 43.06%
|
404 |
+
[ Mon Sep 12 21:24:09 2022 ] Top5: 74.42%
|
405 |
+
[ Mon Sep 12 21:24:10 2022 ] Training epoch: 55
|
406 |
+
[ Mon Sep 12 21:24:54 2022 ] Batch(77/243) done. Loss: 0.4606 lr:0.100000
|
407 |
+
[ Mon Sep 12 21:25:47 2022 ] Batch(177/243) done. Loss: 0.8160 lr:0.100000
|
408 |
+
[ Mon Sep 12 21:26:22 2022 ] Eval epoch: 55
|
409 |
+
[ Mon Sep 12 21:28:53 2022 ] Mean test loss of 796 batches: 3.872100353240967.
|
410 |
+
[ Mon Sep 12 21:28:54 2022 ] Top1: 39.43%
|
411 |
+
[ Mon Sep 12 21:28:54 2022 ] Top5: 73.59%
|
412 |
+
[ Mon Sep 12 21:28:54 2022 ] Training epoch: 56
|
413 |
+
[ Mon Sep 12 21:29:15 2022 ] Batch(34/243) done. Loss: 0.5868 lr:0.100000
|
414 |
+
[ Mon Sep 12 21:30:09 2022 ] Batch(134/243) done. Loss: 0.2790 lr:0.100000
|
415 |
+
[ Mon Sep 12 21:31:02 2022 ] Batch(234/243) done. Loss: 0.3649 lr:0.100000
|
416 |
+
[ Mon Sep 12 21:31:07 2022 ] Eval epoch: 56
|
417 |
+
[ Mon Sep 12 21:33:38 2022 ] Mean test loss of 796 batches: 3.6488142013549805.
|
418 |
+
[ Mon Sep 12 21:33:39 2022 ] Top1: 39.34%
|
419 |
+
[ Mon Sep 12 21:33:39 2022 ] Top5: 71.31%
|
420 |
+
[ Mon Sep 12 21:33:39 2022 ] Training epoch: 57
|
421 |
+
[ Mon Sep 12 21:34:31 2022 ] Batch(91/243) done. Loss: 0.2369 lr:0.100000
|
422 |
+
[ Mon Sep 12 21:35:24 2022 ] Batch(191/243) done. Loss: 0.4638 lr:0.100000
|
423 |
+
[ Mon Sep 12 21:35:52 2022 ] Eval epoch: 57
|
424 |
+
[ Mon Sep 12 21:38:23 2022 ] Mean test loss of 796 batches: 4.045055866241455.
|
425 |
+
[ Mon Sep 12 21:38:23 2022 ] Top1: 39.11%
|
426 |
+
[ Mon Sep 12 21:38:24 2022 ] Top5: 71.53%
|
427 |
+
[ Mon Sep 12 21:38:24 2022 ] Training epoch: 58
|
428 |
+
[ Mon Sep 12 21:38:53 2022 ] Batch(48/243) done. Loss: 0.2509 lr:0.100000
|
429 |
+
[ Mon Sep 12 21:39:46 2022 ] Batch(148/243) done. Loss: 0.4252 lr:0.100000
|
430 |
+
[ Mon Sep 12 21:40:37 2022 ] Eval epoch: 58
|
431 |
+
[ Mon Sep 12 21:43:08 2022 ] Mean test loss of 796 batches: 3.6366567611694336.
|
432 |
+
[ Mon Sep 12 21:43:08 2022 ] Top1: 40.15%
|
433 |
+
[ Mon Sep 12 21:43:09 2022 ] Top5: 73.47%
|
434 |
+
[ Mon Sep 12 21:43:09 2022 ] Training epoch: 59
|
435 |
+
[ Mon Sep 12 21:43:14 2022 ] Batch(5/243) done. Loss: 0.1725 lr:0.100000
|
436 |
+
[ Mon Sep 12 21:44:08 2022 ] Batch(105/243) done. Loss: 0.1798 lr:0.100000
|
437 |
+
[ Mon Sep 12 21:45:01 2022 ] Batch(205/243) done. Loss: 0.5699 lr:0.100000
|
438 |
+
[ Mon Sep 12 21:45:21 2022 ] Eval epoch: 59
|
439 |
+
[ Mon Sep 12 21:47:52 2022 ] Mean test loss of 796 batches: 4.1476850509643555.
|
440 |
+
[ Mon Sep 12 21:47:53 2022 ] Top1: 36.29%
|
441 |
+
[ Mon Sep 12 21:47:53 2022 ] Top5: 68.46%
|
442 |
+
[ Mon Sep 12 21:47:53 2022 ] Training epoch: 60
|
443 |
+
[ Mon Sep 12 21:48:30 2022 ] Batch(62/243) done. Loss: 0.1565 lr:0.100000
|
444 |
+
[ Mon Sep 12 21:49:23 2022 ] Batch(162/243) done. Loss: 0.3137 lr:0.100000
|
445 |
+
[ Mon Sep 12 21:50:06 2022 ] Eval epoch: 60
|
446 |
+
[ Mon Sep 12 21:52:38 2022 ] Mean test loss of 796 batches: 3.4781711101531982.
|
447 |
+
[ Mon Sep 12 21:52:38 2022 ] Top1: 44.30%
|
448 |
+
[ Mon Sep 12 21:52:39 2022 ] Top5: 76.36%
|
449 |
+
[ Mon Sep 12 21:52:39 2022 ] Training epoch: 61
|
450 |
+
[ Mon Sep 12 21:52:52 2022 ] Batch(19/243) done. Loss: 0.2289 lr:0.010000
|
451 |
+
[ Mon Sep 12 21:53:46 2022 ] Batch(119/243) done. Loss: 0.1061 lr:0.010000
|
452 |
+
[ Mon Sep 12 21:54:39 2022 ] Batch(219/243) done. Loss: 0.1921 lr:0.010000
|
453 |
+
[ Mon Sep 12 21:54:52 2022 ] Eval epoch: 61
|
454 |
+
[ Mon Sep 12 21:57:23 2022 ] Mean test loss of 796 batches: 2.852508068084717.
|
455 |
+
[ Mon Sep 12 21:57:23 2022 ] Top1: 49.14%
|
456 |
+
[ Mon Sep 12 21:57:24 2022 ] Top5: 80.56%
|
457 |
+
[ Mon Sep 12 21:57:24 2022 ] Training epoch: 62
|
458 |
+
[ Mon Sep 12 21:58:07 2022 ] Batch(76/243) done. Loss: 0.2023 lr:0.010000
|
459 |
+
[ Mon Sep 12 21:59:01 2022 ] Batch(176/243) done. Loss: 0.0368 lr:0.010000
|
460 |
+
[ Mon Sep 12 21:59:36 2022 ] Eval epoch: 62
|
461 |
+
[ Mon Sep 12 22:02:07 2022 ] Mean test loss of 796 batches: 3.0011351108551025.
|
462 |
+
[ Mon Sep 12 22:02:08 2022 ] Top1: 47.58%
|
463 |
+
[ Mon Sep 12 22:02:08 2022 ] Top5: 79.43%
|
464 |
+
[ Mon Sep 12 22:02:08 2022 ] Training epoch: 63
|
465 |
+
[ Mon Sep 12 22:02:29 2022 ] Batch(33/243) done. Loss: 0.1184 lr:0.010000
|
466 |
+
[ Mon Sep 12 22:03:22 2022 ] Batch(133/243) done. Loss: 0.1296 lr:0.010000
|
467 |
+
[ Mon Sep 12 22:04:16 2022 ] Batch(233/243) done. Loss: 0.2059 lr:0.010000
|
468 |
+
[ Mon Sep 12 22:04:21 2022 ] Eval epoch: 63
|
469 |
+
[ Mon Sep 12 22:06:52 2022 ] Mean test loss of 796 batches: 2.870868682861328.
|
470 |
+
[ Mon Sep 12 22:06:53 2022 ] Top1: 51.09%
|
471 |
+
[ Mon Sep 12 22:06:53 2022 ] Top5: 81.82%
|
472 |
+
[ Mon Sep 12 22:06:53 2022 ] Training epoch: 64
|
473 |
+
[ Mon Sep 12 22:07:44 2022 ] Batch(90/243) done. Loss: 0.1001 lr:0.010000
|
474 |
+
[ Mon Sep 12 22:08:38 2022 ] Batch(190/243) done. Loss: 0.1054 lr:0.010000
|
475 |
+
[ Mon Sep 12 22:09:06 2022 ] Eval epoch: 64
|
476 |
+
[ Mon Sep 12 22:11:38 2022 ] Mean test loss of 796 batches: 2.920163631439209.
|
477 |
+
[ Mon Sep 12 22:11:38 2022 ] Top1: 51.21%
|
478 |
+
[ Mon Sep 12 22:11:39 2022 ] Top5: 81.85%
|
479 |
+
[ Mon Sep 12 22:11:39 2022 ] Training epoch: 65
|
480 |
+
[ Mon Sep 12 22:12:07 2022 ] Batch(47/243) done. Loss: 0.0761 lr:0.010000
|
481 |
+
[ Mon Sep 12 22:13:01 2022 ] Batch(147/243) done. Loss: 0.0371 lr:0.010000
|
482 |
+
[ Mon Sep 12 22:13:52 2022 ] Eval epoch: 65
|
483 |
+
[ Mon Sep 12 22:16:23 2022 ] Mean test loss of 796 batches: 3.0438802242279053.
|
484 |
+
[ Mon Sep 12 22:16:23 2022 ] Top1: 49.43%
|
485 |
+
[ Mon Sep 12 22:16:24 2022 ] Top5: 80.56%
|
486 |
+
[ Mon Sep 12 22:16:24 2022 ] Training epoch: 66
|
487 |
+
[ Mon Sep 12 22:16:29 2022 ] Batch(4/243) done. Loss: 0.0809 lr:0.010000
|
488 |
+
[ Mon Sep 12 22:17:23 2022 ] Batch(104/243) done. Loss: 0.0946 lr:0.010000
|
489 |
+
[ Mon Sep 12 22:18:16 2022 ] Batch(204/243) done. Loss: 0.0191 lr:0.010000
|
490 |
+
[ Mon Sep 12 22:18:37 2022 ] Eval epoch: 66
|
491 |
+
[ Mon Sep 12 22:21:08 2022 ] Mean test loss of 796 batches: 3.0824527740478516.
|
492 |
+
[ Mon Sep 12 22:21:09 2022 ] Top1: 51.53%
|
493 |
+
[ Mon Sep 12 22:21:09 2022 ] Top5: 81.71%
|
494 |
+
[ Mon Sep 12 22:21:09 2022 ] Training epoch: 67
|
495 |
+
[ Mon Sep 12 22:21:45 2022 ] Batch(61/243) done. Loss: 0.0327 lr:0.010000
|
496 |
+
[ Mon Sep 12 22:22:39 2022 ] Batch(161/243) done. Loss: 0.0732 lr:0.010000
|
497 |
+
[ Mon Sep 12 22:23:22 2022 ] Eval epoch: 67
|
498 |
+
[ Mon Sep 12 22:25:53 2022 ] Mean test loss of 796 batches: 3.083371162414551.
|
499 |
+
[ Mon Sep 12 22:25:54 2022 ] Top1: 51.02%
|
500 |
+
[ Mon Sep 12 22:25:54 2022 ] Top5: 81.61%
|
501 |
+
[ Mon Sep 12 22:25:54 2022 ] Training epoch: 68
|
502 |
+
[ Mon Sep 12 22:26:07 2022 ] Batch(18/243) done. Loss: 0.0614 lr:0.010000
|
503 |
+
[ Mon Sep 12 22:27:00 2022 ] Batch(118/243) done. Loss: 0.0235 lr:0.010000
|
504 |
+
[ Mon Sep 12 22:27:54 2022 ] Batch(218/243) done. Loss: 0.0443 lr:0.010000
|
505 |
+
[ Mon Sep 12 22:28:07 2022 ] Eval epoch: 68
|
506 |
+
[ Mon Sep 12 22:30:38 2022 ] Mean test loss of 796 batches: 3.059816360473633.
|
507 |
+
[ Mon Sep 12 22:30:39 2022 ] Top1: 51.32%
|
508 |
+
[ Mon Sep 12 22:30:39 2022 ] Top5: 81.60%
|
509 |
+
[ Mon Sep 12 22:30:39 2022 ] Training epoch: 69
|
510 |
+
[ Mon Sep 12 22:31:22 2022 ] Batch(75/243) done. Loss: 0.0619 lr:0.010000
|
511 |
+
[ Mon Sep 12 22:32:16 2022 ] Batch(175/243) done. Loss: 0.2430 lr:0.010000
|
512 |
+
[ Mon Sep 12 22:32:52 2022 ] Eval epoch: 69
|
513 |
+
[ Mon Sep 12 22:35:24 2022 ] Mean test loss of 796 batches: 3.0981345176696777.
|
514 |
+
[ Mon Sep 12 22:35:24 2022 ] Top1: 51.92%
|
515 |
+
[ Mon Sep 12 22:35:24 2022 ] Top5: 82.02%
|
516 |
+
[ Mon Sep 12 22:35:25 2022 ] Training epoch: 70
|
517 |
+
[ Mon Sep 12 22:35:45 2022 ] Batch(32/243) done. Loss: 0.0729 lr:0.010000
|
518 |
+
[ Mon Sep 12 22:36:38 2022 ] Batch(132/243) done. Loss: 0.0404 lr:0.010000
|
519 |
+
[ Mon Sep 12 22:37:32 2022 ] Batch(232/243) done. Loss: 0.0802 lr:0.010000
|
520 |
+
[ Mon Sep 12 22:37:38 2022 ] Eval epoch: 70
|
521 |
+
[ Mon Sep 12 22:40:08 2022 ] Mean test loss of 796 batches: 3.0480141639709473.
|
522 |
+
[ Mon Sep 12 22:40:09 2022 ] Top1: 51.89%
|
523 |
+
[ Mon Sep 12 22:40:09 2022 ] Top5: 81.99%
|
524 |
+
[ Mon Sep 12 22:40:09 2022 ] Training epoch: 71
|
525 |
+
[ Mon Sep 12 22:41:00 2022 ] Batch(89/243) done. Loss: 0.0413 lr:0.010000
|
526 |
+
[ Mon Sep 12 22:41:54 2022 ] Batch(189/243) done. Loss: 0.0038 lr:0.010000
|
527 |
+
[ Mon Sep 12 22:42:22 2022 ] Eval epoch: 71
|
528 |
+
[ Mon Sep 12 22:44:53 2022 ] Mean test loss of 796 batches: 3.1876227855682373.
|
529 |
+
[ Mon Sep 12 22:44:54 2022 ] Top1: 51.41%
|
530 |
+
[ Mon Sep 12 22:44:54 2022 ] Top5: 81.72%
|
531 |
+
[ Mon Sep 12 22:44:54 2022 ] Training epoch: 72
|
532 |
+
[ Mon Sep 12 22:45:22 2022 ] Batch(46/243) done. Loss: 0.0979 lr:0.010000
|
533 |
+
[ Mon Sep 12 22:46:15 2022 ] Batch(146/243) done. Loss: 0.0448 lr:0.010000
|
534 |
+
[ Mon Sep 12 22:47:07 2022 ] Eval epoch: 72
|
535 |
+
[ Mon Sep 12 22:49:37 2022 ] Mean test loss of 796 batches: 3.284703254699707.
|
536 |
+
[ Mon Sep 12 22:49:38 2022 ] Top1: 49.42%
|
537 |
+
[ Mon Sep 12 22:49:38 2022 ] Top5: 80.76%
|
538 |
+
[ Mon Sep 12 22:49:38 2022 ] Training epoch: 73
|
539 |
+
[ Mon Sep 12 22:49:43 2022 ] Batch(3/243) done. Loss: 0.0261 lr:0.010000
|
540 |
+
[ Mon Sep 12 22:50:36 2022 ] Batch(103/243) done. Loss: 0.0331 lr:0.010000
|
541 |
+
[ Mon Sep 12 22:51:30 2022 ] Batch(203/243) done. Loss: 0.0445 lr:0.010000
|
542 |
+
[ Mon Sep 12 22:51:51 2022 ] Eval epoch: 73
|
543 |
+
[ Mon Sep 12 22:54:22 2022 ] Mean test loss of 796 batches: 3.1807875633239746.
|
544 |
+
[ Mon Sep 12 22:54:23 2022 ] Top1: 51.38%
|
545 |
+
[ Mon Sep 12 22:54:23 2022 ] Top5: 81.59%
|
546 |
+
[ Mon Sep 12 22:54:23 2022 ] Training epoch: 74
|
547 |
+
[ Mon Sep 12 22:54:58 2022 ] Batch(60/243) done. Loss: 0.0416 lr:0.010000
|
548 |
+
[ Mon Sep 12 22:55:52 2022 ] Batch(160/243) done. Loss: 0.0468 lr:0.010000
|
549 |
+
[ Mon Sep 12 22:56:36 2022 ] Eval epoch: 74
|
550 |
+
[ Mon Sep 12 22:59:07 2022 ] Mean test loss of 796 batches: 3.26816725730896.
|
551 |
+
[ Mon Sep 12 22:59:07 2022 ] Top1: 50.86%
|
552 |
+
[ Mon Sep 12 22:59:08 2022 ] Top5: 81.36%
|
553 |
+
[ Mon Sep 12 22:59:08 2022 ] Training epoch: 75
|
554 |
+
[ Mon Sep 12 22:59:20 2022 ] Batch(17/243) done. Loss: 0.0461 lr:0.010000
|
555 |
+
[ Mon Sep 12 23:00:14 2022 ] Batch(117/243) done. Loss: 0.0289 lr:0.010000
|
556 |
+
[ Mon Sep 12 23:01:07 2022 ] Batch(217/243) done. Loss: 0.0427 lr:0.010000
|
557 |
+
[ Mon Sep 12 23:01:21 2022 ] Eval epoch: 75
|
558 |
+
[ Mon Sep 12 23:03:52 2022 ] Mean test loss of 796 batches: 3.1034815311431885.
|
559 |
+
[ Mon Sep 12 23:03:52 2022 ] Top1: 52.44%
|
560 |
+
[ Mon Sep 12 23:03:53 2022 ] Top5: 82.28%
|
561 |
+
[ Mon Sep 12 23:03:53 2022 ] Training epoch: 76
|
562 |
+
[ Mon Sep 12 23:04:36 2022 ] Batch(74/243) done. Loss: 0.0704 lr:0.010000
|
563 |
+
[ Mon Sep 12 23:05:29 2022 ] Batch(174/243) done. Loss: 0.0075 lr:0.010000
|
564 |
+
[ Mon Sep 12 23:06:06 2022 ] Eval epoch: 76
|
565 |
+
[ Mon Sep 12 23:08:37 2022 ] Mean test loss of 796 batches: 3.2890477180480957.
|
566 |
+
[ Mon Sep 12 23:08:37 2022 ] Top1: 49.28%
|
567 |
+
[ Mon Sep 12 23:08:37 2022 ] Top5: 80.70%
|
568 |
+
[ Mon Sep 12 23:08:38 2022 ] Training epoch: 77
|
569 |
+
[ Mon Sep 12 23:08:57 2022 ] Batch(31/243) done. Loss: 0.0824 lr:0.010000
|
570 |
+
[ Mon Sep 12 23:09:51 2022 ] Batch(131/243) done. Loss: 0.0392 lr:0.010000
|
571 |
+
[ Mon Sep 12 23:10:44 2022 ] Batch(231/243) done. Loss: 0.0310 lr:0.010000
|
572 |
+
[ Mon Sep 12 23:10:51 2022 ] Eval epoch: 77
|
573 |
+
[ Mon Sep 12 23:13:21 2022 ] Mean test loss of 796 batches: 3.286661386489868.
|
574 |
+
[ Mon Sep 12 23:13:22 2022 ] Top1: 51.69%
|
575 |
+
[ Mon Sep 12 23:13:22 2022 ] Top5: 82.00%
|
576 |
+
[ Mon Sep 12 23:13:22 2022 ] Training epoch: 78
|
577 |
+
[ Mon Sep 12 23:14:12 2022 ] Batch(88/243) done. Loss: 0.0495 lr:0.010000
|
578 |
+
[ Mon Sep 12 23:15:06 2022 ] Batch(188/243) done. Loss: 0.0499 lr:0.010000
|
579 |
+
[ Mon Sep 12 23:15:35 2022 ] Eval epoch: 78
|
580 |
+
[ Mon Sep 12 23:18:06 2022 ] Mean test loss of 796 batches: 3.410419464111328.
|
581 |
+
[ Mon Sep 12 23:18:06 2022 ] Top1: 50.12%
|
582 |
+
[ Mon Sep 12 23:18:07 2022 ] Top5: 80.87%
|
583 |
+
[ Mon Sep 12 23:18:07 2022 ] Training epoch: 79
|
584 |
+
[ Mon Sep 12 23:18:34 2022 ] Batch(45/243) done. Loss: 0.0719 lr:0.010000
|
585 |
+
[ Mon Sep 12 23:19:28 2022 ] Batch(145/243) done. Loss: 0.0136 lr:0.010000
|
586 |
+
[ Mon Sep 12 23:20:20 2022 ] Eval epoch: 79
|
587 |
+
[ Mon Sep 12 23:22:51 2022 ] Mean test loss of 796 batches: 3.340691566467285.
|
588 |
+
[ Mon Sep 12 23:22:51 2022 ] Top1: 51.51%
|
589 |
+
[ Mon Sep 12 23:22:52 2022 ] Top5: 81.60%
|
590 |
+
[ Mon Sep 12 23:22:52 2022 ] Training epoch: 80
|
591 |
+
[ Mon Sep 12 23:22:56 2022 ] Batch(2/243) done. Loss: 0.0512 lr:0.010000
|
592 |
+
[ Mon Sep 12 23:23:49 2022 ] Batch(102/243) done. Loss: 0.0190 lr:0.010000
|
593 |
+
[ Mon Sep 12 23:24:43 2022 ] Batch(202/243) done. Loss: 0.0227 lr:0.010000
|
594 |
+
[ Mon Sep 12 23:25:05 2022 ] Eval epoch: 80
|
595 |
+
[ Mon Sep 12 23:27:36 2022 ] Mean test loss of 796 batches: 3.4196999073028564.
|
596 |
+
[ Mon Sep 12 23:27:36 2022 ] Top1: 50.91%
|
597 |
+
[ Mon Sep 12 23:27:37 2022 ] Top5: 81.32%
|
598 |
+
[ Mon Sep 12 23:27:37 2022 ] Training epoch: 81
|
599 |
+
[ Mon Sep 12 23:28:12 2022 ] Batch(59/243) done. Loss: 0.1012 lr:0.001000
|
600 |
+
[ Mon Sep 12 23:29:05 2022 ] Batch(159/243) done. Loss: 0.0594 lr:0.001000
|
601 |
+
[ Mon Sep 12 23:29:50 2022 ] Eval epoch: 81
|
602 |
+
[ Mon Sep 12 23:32:21 2022 ] Mean test loss of 796 batches: 3.4129106998443604.
|
603 |
+
[ Mon Sep 12 23:32:21 2022 ] Top1: 49.94%
|
604 |
+
[ Mon Sep 12 23:32:22 2022 ] Top5: 80.92%
|
605 |
+
[ Mon Sep 12 23:32:22 2022 ] Training epoch: 82
|
606 |
+
[ Mon Sep 12 23:32:33 2022 ] Batch(16/243) done. Loss: 0.0366 lr:0.001000
|
607 |
+
[ Mon Sep 12 23:33:27 2022 ] Batch(116/243) done. Loss: 0.0414 lr:0.001000
|
608 |
+
[ Mon Sep 12 23:34:20 2022 ] Batch(216/243) done. Loss: 0.0294 lr:0.001000
|
609 |
+
[ Mon Sep 12 23:34:35 2022 ] Eval epoch: 82
|
610 |
+
[ Mon Sep 12 23:37:05 2022 ] Mean test loss of 796 batches: 3.4732067584991455.
|
611 |
+
[ Mon Sep 12 23:37:06 2022 ] Top1: 50.00%
|
612 |
+
[ Mon Sep 12 23:37:06 2022 ] Top5: 80.94%
|
613 |
+
[ Mon Sep 12 23:37:06 2022 ] Training epoch: 83
|
614 |
+
[ Mon Sep 12 23:37:49 2022 ] Batch(73/243) done. Loss: 0.0921 lr:0.001000
|
615 |
+
[ Mon Sep 12 23:38:42 2022 ] Batch(173/243) done. Loss: 0.0280 lr:0.001000
|
616 |
+
[ Mon Sep 12 23:39:19 2022 ] Eval epoch: 83
|
617 |
+
[ Mon Sep 12 23:41:50 2022 ] Mean test loss of 796 batches: 3.2555062770843506.
|
618 |
+
[ Mon Sep 12 23:41:50 2022 ] Top1: 51.72%
|
619 |
+
[ Mon Sep 12 23:41:51 2022 ] Top5: 82.01%
|
620 |
+
[ Mon Sep 12 23:41:51 2022 ] Training epoch: 84
|
621 |
+
[ Mon Sep 12 23:42:10 2022 ] Batch(30/243) done. Loss: 0.0519 lr:0.001000
|
622 |
+
[ Mon Sep 12 23:43:03 2022 ] Batch(130/243) done. Loss: 0.0613 lr:0.001000
|
623 |
+
[ Mon Sep 12 23:43:57 2022 ] Batch(230/243) done. Loss: 0.0280 lr:0.001000
|
624 |
+
[ Mon Sep 12 23:44:03 2022 ] Eval epoch: 84
|
625 |
+
[ Mon Sep 12 23:46:34 2022 ] Mean test loss of 796 batches: 3.338073968887329.
|
626 |
+
[ Mon Sep 12 23:46:35 2022 ] Top1: 51.04%
|
627 |
+
[ Mon Sep 12 23:46:35 2022 ] Top5: 81.55%
|
628 |
+
[ Mon Sep 12 23:46:35 2022 ] Training epoch: 85
|
629 |
+
[ Mon Sep 12 23:47:25 2022 ] Batch(87/243) done. Loss: 0.0245 lr:0.001000
|
630 |
+
[ Mon Sep 12 23:48:19 2022 ] Batch(187/243) done. Loss: 0.0188 lr:0.001000
|
631 |
+
[ Mon Sep 12 23:48:48 2022 ] Eval epoch: 85
|
632 |
+
[ Mon Sep 12 23:51:19 2022 ] Mean test loss of 796 batches: 3.3033416271209717.
|
633 |
+
[ Mon Sep 12 23:51:19 2022 ] Top1: 51.45%
|
634 |
+
[ Mon Sep 12 23:51:20 2022 ] Top5: 81.72%
|
635 |
+
[ Mon Sep 12 23:51:20 2022 ] Training epoch: 86
|
636 |
+
[ Mon Sep 12 23:51:47 2022 ] Batch(44/243) done. Loss: 0.0434 lr:0.001000
|
637 |
+
[ Mon Sep 12 23:52:40 2022 ] Batch(144/243) done. Loss: 0.0236 lr:0.001000
|
638 |
+
[ Mon Sep 12 23:53:33 2022 ] Eval epoch: 86
|
639 |
+
[ Mon Sep 12 23:56:04 2022 ] Mean test loss of 796 batches: 3.372443199157715.
|
640 |
+
[ Mon Sep 12 23:56:04 2022 ] Top1: 51.01%
|
641 |
+
[ Mon Sep 12 23:56:05 2022 ] Top5: 81.37%
|
642 |
+
[ Mon Sep 12 23:56:05 2022 ] Training epoch: 87
|
643 |
+
[ Mon Sep 12 23:56:08 2022 ] Batch(1/243) done. Loss: 0.0489 lr:0.001000
|
644 |
+
[ Mon Sep 12 23:57:02 2022 ] Batch(101/243) done. Loss: 0.0604 lr:0.001000
|
645 |
+
[ Mon Sep 12 23:57:55 2022 ] Batch(201/243) done. Loss: 0.0703 lr:0.001000
|
646 |
+
[ Mon Sep 12 23:58:17 2022 ] Eval epoch: 87
|
647 |
+
[ Tue Sep 13 00:00:48 2022 ] Mean test loss of 796 batches: 3.3413257598876953.
|
648 |
+
[ Tue Sep 13 00:00:48 2022 ] Top1: 51.24%
|
649 |
+
[ Tue Sep 13 00:00:49 2022 ] Top5: 81.68%
|
650 |
+
[ Tue Sep 13 00:00:49 2022 ] Training epoch: 88
|
651 |
+
[ Tue Sep 13 00:01:23 2022 ] Batch(58/243) done. Loss: 0.0489 lr:0.001000
|
652 |
+
[ Tue Sep 13 00:02:17 2022 ] Batch(158/243) done. Loss: 0.0582 lr:0.001000
|
653 |
+
[ Tue Sep 13 00:03:02 2022 ] Eval epoch: 88
|
654 |
+
[ Tue Sep 13 00:05:33 2022 ] Mean test loss of 796 batches: 3.504627227783203.
|
655 |
+
[ Tue Sep 13 00:05:33 2022 ] Top1: 48.36%
|
656 |
+
[ Tue Sep 13 00:05:33 2022 ] Top5: 79.56%
|
657 |
+
[ Tue Sep 13 00:05:34 2022 ] Training epoch: 89
|
658 |
+
[ Tue Sep 13 00:05:45 2022 ] Batch(15/243) done. Loss: 0.1064 lr:0.001000
|
659 |
+
[ Tue Sep 13 00:06:38 2022 ] Batch(115/243) done. Loss: 0.0455 lr:0.001000
|
660 |
+
[ Tue Sep 13 00:07:32 2022 ] Batch(215/243) done. Loss: 0.0330 lr:0.001000
|
661 |
+
[ Tue Sep 13 00:07:47 2022 ] Eval epoch: 89
|
662 |
+
[ Tue Sep 13 00:10:17 2022 ] Mean test loss of 796 batches: 3.399523973464966.
|
663 |
+
[ Tue Sep 13 00:10:18 2022 ] Top1: 49.74%
|
664 |
+
[ Tue Sep 13 00:10:18 2022 ] Top5: 80.94%
|
665 |
+
[ Tue Sep 13 00:10:18 2022 ] Training epoch: 90
|
666 |
+
[ Tue Sep 13 00:11:00 2022 ] Batch(72/243) done. Loss: 0.0055 lr:0.001000
|
667 |
+
[ Tue Sep 13 00:11:54 2022 ] Batch(172/243) done. Loss: 0.1521 lr:0.001000
|
668 |
+
[ Tue Sep 13 00:12:31 2022 ] Eval epoch: 90
|
669 |
+
[ Tue Sep 13 00:15:02 2022 ] Mean test loss of 796 batches: 3.3444113731384277.
|
670 |
+
[ Tue Sep 13 00:15:02 2022 ] Top1: 51.10%
|
671 |
+
[ Tue Sep 13 00:15:03 2022 ] Top5: 81.42%
|
672 |
+
[ Tue Sep 13 00:15:03 2022 ] Training epoch: 91
|
673 |
+
[ Tue Sep 13 00:15:21 2022 ] Batch(29/243) done. Loss: 0.0395 lr:0.001000
|
674 |
+
[ Tue Sep 13 00:16:15 2022 ] Batch(129/243) done. Loss: 0.0075 lr:0.001000
|
675 |
+
[ Tue Sep 13 00:17:08 2022 ] Batch(229/243) done. Loss: 0.0953 lr:0.001000
|
676 |
+
[ Tue Sep 13 00:17:16 2022 ] Eval epoch: 91
|
677 |
+
[ Tue Sep 13 00:19:47 2022 ] Mean test loss of 796 batches: 3.4517605304718018.
|
678 |
+
[ Tue Sep 13 00:19:47 2022 ] Top1: 49.49%
|
679 |
+
[ Tue Sep 13 00:19:47 2022 ] Top5: 80.58%
|
680 |
+
[ Tue Sep 13 00:19:48 2022 ] Training epoch: 92
|
681 |
+
[ Tue Sep 13 00:20:37 2022 ] Batch(86/243) done. Loss: 0.0396 lr:0.001000
|
682 |
+
[ Tue Sep 13 00:21:30 2022 ] Batch(186/243) done. Loss: 0.0267 lr:0.001000
|
683 |
+
[ Tue Sep 13 00:22:01 2022 ] Eval epoch: 92
|
684 |
+
[ Tue Sep 13 00:24:32 2022 ] Mean test loss of 796 batches: 3.5669608116149902.
|
685 |
+
[ Tue Sep 13 00:24:32 2022 ] Top1: 47.84%
|
686 |
+
[ Tue Sep 13 00:24:33 2022 ] Top5: 79.81%
|
687 |
+
[ Tue Sep 13 00:24:33 2022 ] Training epoch: 93
|
688 |
+
[ Tue Sep 13 00:24:59 2022 ] Batch(43/243) done. Loss: 0.0418 lr:0.001000
|
689 |
+
[ Tue Sep 13 00:25:52 2022 ] Batch(143/243) done. Loss: 0.0722 lr:0.001000
|
690 |
+
[ Tue Sep 13 00:26:46 2022 ] Eval epoch: 93
|
691 |
+
[ Tue Sep 13 00:29:16 2022 ] Mean test loss of 796 batches: 3.3956525325775146.
|
692 |
+
[ Tue Sep 13 00:29:17 2022 ] Top1: 51.18%
|
693 |
+
[ Tue Sep 13 00:29:17 2022 ] Top5: 81.62%
|
694 |
+
[ Tue Sep 13 00:29:17 2022 ] Training epoch: 94
|
695 |
+
[ Tue Sep 13 00:29:20 2022 ] Batch(0/243) done. Loss: 0.0832 lr:0.001000
|
696 |
+
[ Tue Sep 13 00:30:14 2022 ] Batch(100/243) done. Loss: 0.0261 lr:0.001000
|
697 |
+
[ Tue Sep 13 00:31:07 2022 ] Batch(200/243) done. Loss: 0.0256 lr:0.001000
|
698 |
+
[ Tue Sep 13 00:31:30 2022 ] Eval epoch: 94
|
699 |
+
[ Tue Sep 13 00:34:00 2022 ] Mean test loss of 796 batches: 3.412087917327881.
|
700 |
+
[ Tue Sep 13 00:34:01 2022 ] Top1: 50.98%
|
701 |
+
[ Tue Sep 13 00:34:01 2022 ] Top5: 81.43%
|
702 |
+
[ Tue Sep 13 00:34:01 2022 ] Training epoch: 95
|
703 |
+
[ Tue Sep 13 00:34:35 2022 ] Batch(57/243) done. Loss: 0.2316 lr:0.001000
|
704 |
+
[ Tue Sep 13 00:35:28 2022 ] Batch(157/243) done. Loss: 0.0360 lr:0.001000
|
705 |
+
[ Tue Sep 13 00:36:14 2022 ] Eval epoch: 95
|
706 |
+
[ Tue Sep 13 00:38:45 2022 ] Mean test loss of 796 batches: 3.4245078563690186.
|
707 |
+
[ Tue Sep 13 00:38:45 2022 ] Top1: 50.40%
|
708 |
+
[ Tue Sep 13 00:38:45 2022 ] Top5: 81.34%
|
709 |
+
[ Tue Sep 13 00:38:46 2022 ] Training epoch: 96
|
710 |
+
[ Tue Sep 13 00:38:56 2022 ] Batch(14/243) done. Loss: 0.0458 lr:0.001000
|
711 |
+
[ Tue Sep 13 00:39:50 2022 ] Batch(114/243) done. Loss: 0.0580 lr:0.001000
|
712 |
+
[ Tue Sep 13 00:40:43 2022 ] Batch(214/243) done. Loss: 0.0146 lr:0.001000
|
713 |
+
[ Tue Sep 13 00:40:58 2022 ] Eval epoch: 96
|
714 |
+
[ Tue Sep 13 00:43:29 2022 ] Mean test loss of 796 batches: 3.6255176067352295.
|
715 |
+
[ Tue Sep 13 00:43:29 2022 ] Top1: 47.00%
|
716 |
+
[ Tue Sep 13 00:43:30 2022 ] Top5: 79.52%
|
717 |
+
[ Tue Sep 13 00:43:30 2022 ] Training epoch: 97
|
718 |
+
[ Tue Sep 13 00:44:11 2022 ] Batch(71/243) done. Loss: 0.0982 lr:0.001000
|
719 |
+
[ Tue Sep 13 00:45:05 2022 ] Batch(171/243) done. Loss: 0.0173 lr:0.001000
|
720 |
+
[ Tue Sep 13 00:45:43 2022 ] Eval epoch: 97
|
721 |
+
[ Tue Sep 13 00:48:14 2022 ] Mean test loss of 796 batches: 3.4557762145996094.
|
722 |
+
[ Tue Sep 13 00:48:14 2022 ] Top1: 50.08%
|
723 |
+
[ Tue Sep 13 00:48:14 2022 ] Top5: 80.97%
|
724 |
+
[ Tue Sep 13 00:48:15 2022 ] Training epoch: 98
|
725 |
+
[ Tue Sep 13 00:48:33 2022 ] Batch(28/243) done. Loss: 0.0425 lr:0.001000
|
726 |
+
[ Tue Sep 13 00:49:26 2022 ] Batch(128/243) done. Loss: 0.0405 lr:0.001000
|
727 |
+
[ Tue Sep 13 00:50:20 2022 ] Batch(228/243) done. Loss: 0.0122 lr:0.001000
|
728 |
+
[ Tue Sep 13 00:50:27 2022 ] Eval epoch: 98
|
729 |
+
[ Tue Sep 13 00:52:58 2022 ] Mean test loss of 796 batches: 3.3831961154937744.
|
730 |
+
[ Tue Sep 13 00:52:59 2022 ] Top1: 51.53%
|
731 |
+
[ Tue Sep 13 00:52:59 2022 ] Top5: 81.86%
|
732 |
+
[ Tue Sep 13 00:52:59 2022 ] Training epoch: 99
|
733 |
+
[ Tue Sep 13 00:53:48 2022 ] Batch(85/243) done. Loss: 0.0917 lr:0.001000
|
734 |
+
[ Tue Sep 13 00:54:41 2022 ] Batch(185/243) done. Loss: 0.1321 lr:0.001000
|
735 |
+
[ Tue Sep 13 00:55:12 2022 ] Eval epoch: 99
|
736 |
+
[ Tue Sep 13 00:57:43 2022 ] Mean test loss of 796 batches: 3.407810926437378.
|
737 |
+
[ Tue Sep 13 00:57:44 2022 ] Top1: 50.55%
|
738 |
+
[ Tue Sep 13 00:57:44 2022 ] Top5: 81.20%
|
739 |
+
[ Tue Sep 13 00:57:44 2022 ] Training epoch: 100
|
740 |
+
[ Tue Sep 13 00:58:09 2022 ] Batch(42/243) done. Loss: 0.0693 lr:0.001000
|
741 |
+
[ Tue Sep 13 00:59:03 2022 ] Batch(142/243) done. Loss: 0.0106 lr:0.001000
|
742 |
+
[ Tue Sep 13 00:59:56 2022 ] Batch(242/243) done. Loss: 0.0785 lr:0.001000
|
743 |
+
[ Tue Sep 13 00:59:57 2022 ] Eval epoch: 100
|
744 |
+
[ Tue Sep 13 01:02:27 2022 ] Mean test loss of 796 batches: 3.423464298248291.
|
745 |
+
[ Tue Sep 13 01:02:27 2022 ] Top1: 50.07%
|
746 |
+
[ Tue Sep 13 01:02:27 2022 ] Top5: 81.12%
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu120_joint_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/ntu120_xsub/train_joint.yaml
|
5 |
+
device:
|
6 |
+
- 4
|
7 |
+
- 5
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 120
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu120_joint_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu120_joint_xsub
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7357c0977e0bbbe458e55546b83a0cb9e5690d38fe02c45612fe69dcb289fe38
|
3 |
+
size 29946137
|
ckpt/Others/DC-GCN+ADG/ntu120_xsub/ntu120_joint_xsub/log.txt
ADDED
@@ -0,0 +1,746 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Mon Sep 12 17:08:11 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu120_joint_xsub', 'model_saved_name': './save_models/ntu120_joint_xsub', 'Experiment_name': 'ntu120_joint_xsub', 'config': './config/ntu120_xsub/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Mon Sep 12 17:08:11 2022 ] Training epoch: 1
|
5 |
+
[ Mon Sep 12 17:09:01 2022 ] Batch(99/243) done. Loss: 3.7190 lr:0.100000
|
6 |
+
[ Mon Sep 12 17:09:46 2022 ] Batch(199/243) done. Loss: 3.1809 lr:0.100000
|
7 |
+
[ Mon Sep 12 17:10:05 2022 ] Eval epoch: 1
|
8 |
+
[ Mon Sep 12 17:12:34 2022 ] Mean test loss of 796 batches: 5.361838340759277.
|
9 |
+
[ Mon Sep 12 17:12:35 2022 ] Top1: 4.77%
|
10 |
+
[ Mon Sep 12 17:12:35 2022 ] Top5: 17.25%
|
11 |
+
[ Mon Sep 12 17:12:35 2022 ] Training epoch: 2
|
12 |
+
[ Mon Sep 12 17:13:08 2022 ] Batch(56/243) done. Loss: 3.0372 lr:0.100000
|
13 |
+
[ Mon Sep 12 17:14:01 2022 ] Batch(156/243) done. Loss: 2.4914 lr:0.100000
|
14 |
+
[ Mon Sep 12 17:14:46 2022 ] Eval epoch: 2
|
15 |
+
[ Mon Sep 12 17:17:15 2022 ] Mean test loss of 796 batches: 4.679137229919434.
|
16 |
+
[ Mon Sep 12 17:17:16 2022 ] Top1: 10.01%
|
17 |
+
[ Mon Sep 12 17:17:16 2022 ] Top5: 25.83%
|
18 |
+
[ Mon Sep 12 17:17:16 2022 ] Training epoch: 3
|
19 |
+
[ Mon Sep 12 17:17:26 2022 ] Batch(13/243) done. Loss: 2.5587 lr:0.100000
|
20 |
+
[ Mon Sep 12 17:18:19 2022 ] Batch(113/243) done. Loss: 2.1768 lr:0.100000
|
21 |
+
[ Mon Sep 12 17:19:12 2022 ] Batch(213/243) done. Loss: 2.3778 lr:0.100000
|
22 |
+
[ Mon Sep 12 17:19:27 2022 ] Eval epoch: 3
|
23 |
+
[ Mon Sep 12 17:21:56 2022 ] Mean test loss of 796 batches: 4.291816234588623.
|
24 |
+
[ Mon Sep 12 17:21:56 2022 ] Top1: 11.05%
|
25 |
+
[ Mon Sep 12 17:21:57 2022 ] Top5: 31.20%
|
26 |
+
[ Mon Sep 12 17:21:57 2022 ] Training epoch: 4
|
27 |
+
[ Mon Sep 12 17:22:37 2022 ] Batch(70/243) done. Loss: 2.0107 lr:0.100000
|
28 |
+
[ Mon Sep 12 17:23:30 2022 ] Batch(170/243) done. Loss: 2.0625 lr:0.100000
|
29 |
+
[ Mon Sep 12 17:24:08 2022 ] Eval epoch: 4
|
30 |
+
[ Mon Sep 12 17:26:37 2022 ] Mean test loss of 796 batches: 4.092510223388672.
|
31 |
+
[ Mon Sep 12 17:26:38 2022 ] Top1: 14.37%
|
32 |
+
[ Mon Sep 12 17:26:38 2022 ] Top5: 36.23%
|
33 |
+
[ Mon Sep 12 17:26:38 2022 ] Training epoch: 5
|
34 |
+
[ Mon Sep 12 17:26:56 2022 ] Batch(27/243) done. Loss: 1.7477 lr:0.100000
|
35 |
+
[ Mon Sep 12 17:27:49 2022 ] Batch(127/243) done. Loss: 1.9247 lr:0.100000
|
36 |
+
[ Mon Sep 12 17:28:41 2022 ] Batch(227/243) done. Loss: 1.6601 lr:0.100000
|
37 |
+
[ Mon Sep 12 17:28:49 2022 ] Eval epoch: 5
|
38 |
+
[ Mon Sep 12 17:31:18 2022 ] Mean test loss of 796 batches: 3.604010581970215.
|
39 |
+
[ Mon Sep 12 17:31:19 2022 ] Top1: 18.38%
|
40 |
+
[ Mon Sep 12 17:31:19 2022 ] Top5: 44.76%
|
41 |
+
[ Mon Sep 12 17:31:19 2022 ] Training epoch: 6
|
42 |
+
[ Mon Sep 12 17:32:07 2022 ] Batch(84/243) done. Loss: 1.8004 lr:0.100000
|
43 |
+
[ Mon Sep 12 17:32:59 2022 ] Batch(184/243) done. Loss: 1.7991 lr:0.100000
|
44 |
+
[ Mon Sep 12 17:33:30 2022 ] Eval epoch: 6
|
45 |
+
[ Mon Sep 12 17:35:59 2022 ] Mean test loss of 796 batches: 3.3129656314849854.
|
46 |
+
[ Mon Sep 12 17:35:59 2022 ] Top1: 21.87%
|
47 |
+
[ Mon Sep 12 17:35:59 2022 ] Top5: 50.46%
|
48 |
+
[ Mon Sep 12 17:35:59 2022 ] Training epoch: 7
|
49 |
+
[ Mon Sep 12 17:36:24 2022 ] Batch(41/243) done. Loss: 1.6051 lr:0.100000
|
50 |
+
[ Mon Sep 12 17:37:17 2022 ] Batch(141/243) done. Loss: 1.1784 lr:0.100000
|
51 |
+
[ Mon Sep 12 17:38:09 2022 ] Batch(241/243) done. Loss: 1.3763 lr:0.100000
|
52 |
+
[ Mon Sep 12 17:38:10 2022 ] Eval epoch: 7
|
53 |
+
[ Mon Sep 12 17:40:38 2022 ] Mean test loss of 796 batches: 2.904848575592041.
|
54 |
+
[ Mon Sep 12 17:40:39 2022 ] Top1: 27.32%
|
55 |
+
[ Mon Sep 12 17:40:39 2022 ] Top5: 61.45%
|
56 |
+
[ Mon Sep 12 17:40:39 2022 ] Training epoch: 8
|
57 |
+
[ Mon Sep 12 17:41:34 2022 ] Batch(98/243) done. Loss: 1.2853 lr:0.100000
|
58 |
+
[ Mon Sep 12 17:42:27 2022 ] Batch(198/243) done. Loss: 1.2021 lr:0.100000
|
59 |
+
[ Mon Sep 12 17:42:50 2022 ] Eval epoch: 8
|
60 |
+
[ Mon Sep 12 17:45:19 2022 ] Mean test loss of 796 batches: 3.138392210006714.
|
61 |
+
[ Mon Sep 12 17:45:19 2022 ] Top1: 26.41%
|
62 |
+
[ Mon Sep 12 17:45:20 2022 ] Top5: 57.90%
|
63 |
+
[ Mon Sep 12 17:45:20 2022 ] Training epoch: 9
|
64 |
+
[ Mon Sep 12 17:45:52 2022 ] Batch(55/243) done. Loss: 1.0583 lr:0.100000
|
65 |
+
[ Mon Sep 12 17:46:45 2022 ] Batch(155/243) done. Loss: 1.0930 lr:0.100000
|
66 |
+
[ Mon Sep 12 17:47:31 2022 ] Eval epoch: 9
|
67 |
+
[ Mon Sep 12 17:50:00 2022 ] Mean test loss of 796 batches: 2.994957685470581.
|
68 |
+
[ Mon Sep 12 17:50:00 2022 ] Top1: 29.95%
|
69 |
+
[ Mon Sep 12 17:50:00 2022 ] Top5: 61.35%
|
70 |
+
[ Mon Sep 12 17:50:01 2022 ] Training epoch: 10
|
71 |
+
[ Mon Sep 12 17:50:10 2022 ] Batch(12/243) done. Loss: 1.3079 lr:0.100000
|
72 |
+
[ Mon Sep 12 17:51:03 2022 ] Batch(112/243) done. Loss: 1.0368 lr:0.100000
|
73 |
+
[ Mon Sep 12 17:51:56 2022 ] Batch(212/243) done. Loss: 1.2674 lr:0.100000
|
74 |
+
[ Mon Sep 12 17:52:12 2022 ] Eval epoch: 10
|
75 |
+
[ Mon Sep 12 17:54:40 2022 ] Mean test loss of 796 batches: 3.029468059539795.
|
76 |
+
[ Mon Sep 12 17:54:41 2022 ] Top1: 28.90%
|
77 |
+
[ Mon Sep 12 17:54:41 2022 ] Top5: 60.87%
|
78 |
+
[ Mon Sep 12 17:54:41 2022 ] Training epoch: 11
|
79 |
+
[ Mon Sep 12 17:55:21 2022 ] Batch(69/243) done. Loss: 0.8488 lr:0.100000
|
80 |
+
[ Mon Sep 12 17:56:14 2022 ] Batch(169/243) done. Loss: 1.1231 lr:0.100000
|
81 |
+
[ Mon Sep 12 17:56:52 2022 ] Eval epoch: 11
|
82 |
+
[ Mon Sep 12 17:59:21 2022 ] Mean test loss of 796 batches: 2.9211504459381104.
|
83 |
+
[ Mon Sep 12 17:59:21 2022 ] Top1: 31.10%
|
84 |
+
[ Mon Sep 12 17:59:22 2022 ] Top5: 65.88%
|
85 |
+
[ Mon Sep 12 17:59:22 2022 ] Training epoch: 12
|
86 |
+
[ Mon Sep 12 17:59:39 2022 ] Batch(26/243) done. Loss: 0.8822 lr:0.100000
|
87 |
+
[ Mon Sep 12 18:00:32 2022 ] Batch(126/243) done. Loss: 1.1657 lr:0.100000
|
88 |
+
[ Mon Sep 12 18:01:24 2022 ] Batch(226/243) done. Loss: 0.9553 lr:0.100000
|
89 |
+
[ Mon Sep 12 18:01:33 2022 ] Eval epoch: 12
|
90 |
+
[ Mon Sep 12 18:04:01 2022 ] Mean test loss of 796 batches: 3.866846799850464.
|
91 |
+
[ Mon Sep 12 18:04:01 2022 ] Top1: 25.76%
|
92 |
+
[ Mon Sep 12 18:04:02 2022 ] Top5: 56.17%
|
93 |
+
[ Mon Sep 12 18:04:02 2022 ] Training epoch: 13
|
94 |
+
[ Mon Sep 12 18:04:48 2022 ] Batch(83/243) done. Loss: 1.1388 lr:0.100000
|
95 |
+
[ Mon Sep 12 18:05:41 2022 ] Batch(183/243) done. Loss: 1.1985 lr:0.100000
|
96 |
+
[ Mon Sep 12 18:06:12 2022 ] Eval epoch: 13
|
97 |
+
[ Mon Sep 12 18:08:41 2022 ] Mean test loss of 796 batches: 2.716705560684204.
|
98 |
+
[ Mon Sep 12 18:08:41 2022 ] Top1: 37.91%
|
99 |
+
[ Mon Sep 12 18:08:42 2022 ] Top5: 74.10%
|
100 |
+
[ Mon Sep 12 18:08:42 2022 ] Training epoch: 14
|
101 |
+
[ Mon Sep 12 18:09:06 2022 ] Batch(40/243) done. Loss: 0.8329 lr:0.100000
|
102 |
+
[ Mon Sep 12 18:09:59 2022 ] Batch(140/243) done. Loss: 0.8772 lr:0.100000
|
103 |
+
[ Mon Sep 12 18:10:52 2022 ] Batch(240/243) done. Loss: 1.2715 lr:0.100000
|
104 |
+
[ Mon Sep 12 18:10:53 2022 ] Eval epoch: 14
|
105 |
+
[ Mon Sep 12 18:13:22 2022 ] Mean test loss of 796 batches: 2.4711339473724365.
|
106 |
+
[ Mon Sep 12 18:13:22 2022 ] Top1: 36.33%
|
107 |
+
[ Mon Sep 12 18:13:22 2022 ] Top5: 72.33%
|
108 |
+
[ Mon Sep 12 18:13:23 2022 ] Training epoch: 15
|
109 |
+
[ Mon Sep 12 18:14:17 2022 ] Batch(97/243) done. Loss: 0.9205 lr:0.100000
|
110 |
+
[ Mon Sep 12 18:15:10 2022 ] Batch(197/243) done. Loss: 1.1311 lr:0.100000
|
111 |
+
[ Mon Sep 12 18:15:34 2022 ] Eval epoch: 15
|
112 |
+
[ Mon Sep 12 18:18:03 2022 ] Mean test loss of 796 batches: 2.4340267181396484.
|
113 |
+
[ Mon Sep 12 18:18:03 2022 ] Top1: 41.34%
|
114 |
+
[ Mon Sep 12 18:18:03 2022 ] Top5: 74.69%
|
115 |
+
[ Mon Sep 12 18:18:03 2022 ] Training epoch: 16
|
116 |
+
[ Mon Sep 12 18:18:35 2022 ] Batch(54/243) done. Loss: 1.0560 lr:0.100000
|
117 |
+
[ Mon Sep 12 18:19:28 2022 ] Batch(154/243) done. Loss: 0.7556 lr:0.100000
|
118 |
+
[ Mon Sep 12 18:20:14 2022 ] Eval epoch: 16
|
119 |
+
[ Mon Sep 12 18:22:42 2022 ] Mean test loss of 796 batches: 2.4173901081085205.
|
120 |
+
[ Mon Sep 12 18:22:42 2022 ] Top1: 38.66%
|
121 |
+
[ Mon Sep 12 18:22:43 2022 ] Top5: 75.33%
|
122 |
+
[ Mon Sep 12 18:22:43 2022 ] Training epoch: 17
|
123 |
+
[ Mon Sep 12 18:22:52 2022 ] Batch(11/243) done. Loss: 0.5570 lr:0.100000
|
124 |
+
[ Mon Sep 12 18:23:45 2022 ] Batch(111/243) done. Loss: 0.7542 lr:0.100000
|
125 |
+
[ Mon Sep 12 18:24:38 2022 ] Batch(211/243) done. Loss: 0.7955 lr:0.100000
|
126 |
+
[ Mon Sep 12 18:24:54 2022 ] Eval epoch: 17
|
127 |
+
[ Mon Sep 12 18:27:23 2022 ] Mean test loss of 796 batches: 2.4517364501953125.
|
128 |
+
[ Mon Sep 12 18:27:23 2022 ] Top1: 40.98%
|
129 |
+
[ Mon Sep 12 18:27:23 2022 ] Top5: 74.23%
|
130 |
+
[ Mon Sep 12 18:27:24 2022 ] Training epoch: 18
|
131 |
+
[ Mon Sep 12 18:28:03 2022 ] Batch(68/243) done. Loss: 0.6977 lr:0.100000
|
132 |
+
[ Mon Sep 12 18:28:55 2022 ] Batch(168/243) done. Loss: 0.7670 lr:0.100000
|
133 |
+
[ Mon Sep 12 18:29:34 2022 ] Eval epoch: 18
|
134 |
+
[ Mon Sep 12 18:32:03 2022 ] Mean test loss of 796 batches: 2.8006997108459473.
|
135 |
+
[ Mon Sep 12 18:32:03 2022 ] Top1: 37.71%
|
136 |
+
[ Mon Sep 12 18:32:03 2022 ] Top5: 74.40%
|
137 |
+
[ Mon Sep 12 18:32:04 2022 ] Training epoch: 19
|
138 |
+
[ Mon Sep 12 18:32:20 2022 ] Batch(25/243) done. Loss: 0.8622 lr:0.100000
|
139 |
+
[ Mon Sep 12 18:33:12 2022 ] Batch(125/243) done. Loss: 0.6451 lr:0.100000
|
140 |
+
[ Mon Sep 12 18:34:05 2022 ] Batch(225/243) done. Loss: 0.7216 lr:0.100000
|
141 |
+
[ Mon Sep 12 18:34:14 2022 ] Eval epoch: 19
|
142 |
+
[ Mon Sep 12 18:36:43 2022 ] Mean test loss of 796 batches: 3.1234707832336426.
|
143 |
+
[ Mon Sep 12 18:36:43 2022 ] Top1: 33.68%
|
144 |
+
[ Mon Sep 12 18:36:44 2022 ] Top5: 67.92%
|
145 |
+
[ Mon Sep 12 18:36:44 2022 ] Training epoch: 20
|
146 |
+
[ Mon Sep 12 18:37:30 2022 ] Batch(82/243) done. Loss: 0.7481 lr:0.100000
|
147 |
+
[ Mon Sep 12 18:38:23 2022 ] Batch(182/243) done. Loss: 0.6089 lr:0.100000
|
148 |
+
[ Mon Sep 12 18:38:55 2022 ] Eval epoch: 20
|
149 |
+
[ Mon Sep 12 18:41:24 2022 ] Mean test loss of 796 batches: 2.6673736572265625.
|
150 |
+
[ Mon Sep 12 18:41:24 2022 ] Top1: 38.96%
|
151 |
+
[ Mon Sep 12 18:41:24 2022 ] Top5: 74.41%
|
152 |
+
[ Mon Sep 12 18:41:24 2022 ] Training epoch: 21
|
153 |
+
[ Mon Sep 12 18:41:48 2022 ] Batch(39/243) done. Loss: 0.4091 lr:0.100000
|
154 |
+
[ Mon Sep 12 18:42:41 2022 ] Batch(139/243) done. Loss: 0.6318 lr:0.100000
|
155 |
+
[ Mon Sep 12 18:43:34 2022 ] Batch(239/243) done. Loss: 0.7306 lr:0.100000
|
156 |
+
[ Mon Sep 12 18:43:36 2022 ] Eval epoch: 21
|
157 |
+
[ Mon Sep 12 18:46:04 2022 ] Mean test loss of 796 batches: 2.3942465782165527.
|
158 |
+
[ Mon Sep 12 18:46:04 2022 ] Top1: 43.69%
|
159 |
+
[ Mon Sep 12 18:46:05 2022 ] Top5: 78.36%
|
160 |
+
[ Mon Sep 12 18:46:05 2022 ] Training epoch: 22
|
161 |
+
[ Mon Sep 12 18:46:59 2022 ] Batch(96/243) done. Loss: 0.6980 lr:0.100000
|
162 |
+
[ Mon Sep 12 18:47:51 2022 ] Batch(196/243) done. Loss: 0.6690 lr:0.100000
|
163 |
+
[ Mon Sep 12 18:48:16 2022 ] Eval epoch: 22
|
164 |
+
[ Mon Sep 12 18:50:44 2022 ] Mean test loss of 796 batches: 2.5864007472991943.
|
165 |
+
[ Mon Sep 12 18:50:44 2022 ] Top1: 39.41%
|
166 |
+
[ Mon Sep 12 18:50:44 2022 ] Top5: 75.04%
|
167 |
+
[ Mon Sep 12 18:50:45 2022 ] Training epoch: 23
|
168 |
+
[ Mon Sep 12 18:51:16 2022 ] Batch(53/243) done. Loss: 0.4845 lr:0.100000
|
169 |
+
[ Mon Sep 12 18:52:08 2022 ] Batch(153/243) done. Loss: 0.7480 lr:0.100000
|
170 |
+
[ Mon Sep 12 18:52:56 2022 ] Eval epoch: 23
|
171 |
+
[ Mon Sep 12 18:55:25 2022 ] Mean test loss of 796 batches: 2.513529062271118.
|
172 |
+
[ Mon Sep 12 18:55:25 2022 ] Top1: 43.67%
|
173 |
+
[ Mon Sep 12 18:55:25 2022 ] Top5: 78.55%
|
174 |
+
[ Mon Sep 12 18:55:26 2022 ] Training epoch: 24
|
175 |
+
[ Mon Sep 12 18:55:34 2022 ] Batch(10/243) done. Loss: 0.4489 lr:0.100000
|
176 |
+
[ Mon Sep 12 18:56:27 2022 ] Batch(110/243) done. Loss: 0.5259 lr:0.100000
|
177 |
+
[ Mon Sep 12 18:57:20 2022 ] Batch(210/243) done. Loss: 0.9508 lr:0.100000
|
178 |
+
[ Mon Sep 12 18:57:37 2022 ] Eval epoch: 24
|
179 |
+
[ Mon Sep 12 19:00:05 2022 ] Mean test loss of 796 batches: 2.2841122150421143.
|
180 |
+
[ Mon Sep 12 19:00:05 2022 ] Top1: 43.42%
|
181 |
+
[ Mon Sep 12 19:00:06 2022 ] Top5: 78.57%
|
182 |
+
[ Mon Sep 12 19:00:06 2022 ] Training epoch: 25
|
183 |
+
[ Mon Sep 12 19:00:44 2022 ] Batch(67/243) done. Loss: 0.5586 lr:0.100000
|
184 |
+
[ Mon Sep 12 19:01:37 2022 ] Batch(167/243) done. Loss: 0.7997 lr:0.100000
|
185 |
+
[ Mon Sep 12 19:02:17 2022 ] Eval epoch: 25
|
186 |
+
[ Mon Sep 12 19:04:45 2022 ] Mean test loss of 796 batches: 2.3876681327819824.
|
187 |
+
[ Mon Sep 12 19:04:46 2022 ] Top1: 44.44%
|
188 |
+
[ Mon Sep 12 19:04:46 2022 ] Top5: 77.63%
|
189 |
+
[ Mon Sep 12 19:04:46 2022 ] Training epoch: 26
|
190 |
+
[ Mon Sep 12 19:05:02 2022 ] Batch(24/243) done. Loss: 0.6328 lr:0.100000
|
191 |
+
[ Mon Sep 12 19:05:55 2022 ] Batch(124/243) done. Loss: 0.8385 lr:0.100000
|
192 |
+
[ Mon Sep 12 19:06:48 2022 ] Batch(224/243) done. Loss: 0.4274 lr:0.100000
|
193 |
+
[ Mon Sep 12 19:06:57 2022 ] Eval epoch: 26
|
194 |
+
[ Mon Sep 12 19:09:26 2022 ] Mean test loss of 796 batches: 2.849940538406372.
|
195 |
+
[ Mon Sep 12 19:09:26 2022 ] Top1: 41.05%
|
196 |
+
[ Mon Sep 12 19:09:27 2022 ] Top5: 75.96%
|
197 |
+
[ Mon Sep 12 19:09:27 2022 ] Training epoch: 27
|
198 |
+
[ Mon Sep 12 19:10:13 2022 ] Batch(81/243) done. Loss: 0.4895 lr:0.100000
|
199 |
+
[ Mon Sep 12 19:11:05 2022 ] Batch(181/243) done. Loss: 0.4380 lr:0.100000
|
200 |
+
[ Mon Sep 12 19:11:38 2022 ] Eval epoch: 27
|
201 |
+
[ Mon Sep 12 19:14:06 2022 ] Mean test loss of 796 batches: 2.4651989936828613.
|
202 |
+
[ Mon Sep 12 19:14:07 2022 ] Top1: 43.31%
|
203 |
+
[ Mon Sep 12 19:14:07 2022 ] Top5: 78.15%
|
204 |
+
[ Mon Sep 12 19:14:07 2022 ] Training epoch: 28
|
205 |
+
[ Mon Sep 12 19:14:31 2022 ] Batch(38/243) done. Loss: 0.5685 lr:0.100000
|
206 |
+
[ Mon Sep 12 19:15:23 2022 ] Batch(138/243) done. Loss: 0.4821 lr:0.100000
|
207 |
+
[ Mon Sep 12 19:16:16 2022 ] Batch(238/243) done. Loss: 0.3740 lr:0.100000
|
208 |
+
[ Mon Sep 12 19:16:18 2022 ] Eval epoch: 28
|
209 |
+
[ Mon Sep 12 19:18:47 2022 ] Mean test loss of 796 batches: 2.620584011077881.
|
210 |
+
[ Mon Sep 12 19:18:47 2022 ] Top1: 42.39%
|
211 |
+
[ Mon Sep 12 19:18:47 2022 ] Top5: 76.08%
|
212 |
+
[ Mon Sep 12 19:18:48 2022 ] Training epoch: 29
|
213 |
+
[ Mon Sep 12 19:19:41 2022 ] Batch(95/243) done. Loss: 0.6795 lr:0.100000
|
214 |
+
[ Mon Sep 12 19:20:34 2022 ] Batch(195/243) done. Loss: 0.5652 lr:0.100000
|
215 |
+
[ Mon Sep 12 19:20:59 2022 ] Eval epoch: 29
|
216 |
+
[ Mon Sep 12 19:23:27 2022 ] Mean test loss of 796 batches: 2.435502767562866.
|
217 |
+
[ Mon Sep 12 19:23:27 2022 ] Top1: 43.64%
|
218 |
+
[ Mon Sep 12 19:23:28 2022 ] Top5: 78.20%
|
219 |
+
[ Mon Sep 12 19:23:28 2022 ] Training epoch: 30
|
220 |
+
[ Mon Sep 12 19:23:58 2022 ] Batch(52/243) done. Loss: 0.3756 lr:0.100000
|
221 |
+
[ Mon Sep 12 19:24:51 2022 ] Batch(152/243) done. Loss: 0.2534 lr:0.100000
|
222 |
+
[ Mon Sep 12 19:25:39 2022 ] Eval epoch: 30
|
223 |
+
[ Mon Sep 12 19:28:07 2022 ] Mean test loss of 796 batches: 2.9882123470306396.
|
224 |
+
[ Mon Sep 12 19:28:07 2022 ] Top1: 40.10%
|
225 |
+
[ Mon Sep 12 19:28:07 2022 ] Top5: 73.79%
|
226 |
+
[ Mon Sep 12 19:28:08 2022 ] Training epoch: 31
|
227 |
+
[ Mon Sep 12 19:28:16 2022 ] Batch(9/243) done. Loss: 0.3546 lr:0.100000
|
228 |
+
[ Mon Sep 12 19:29:09 2022 ] Batch(109/243) done. Loss: 0.4524 lr:0.100000
|
229 |
+
[ Mon Sep 12 19:30:01 2022 ] Batch(209/243) done. Loss: 0.5393 lr:0.100000
|
230 |
+
[ Mon Sep 12 19:30:19 2022 ] Eval epoch: 31
|
231 |
+
[ Mon Sep 12 19:32:47 2022 ] Mean test loss of 796 batches: 2.3977699279785156.
|
232 |
+
[ Mon Sep 12 19:32:48 2022 ] Top1: 44.57%
|
233 |
+
[ Mon Sep 12 19:32:48 2022 ] Top5: 78.84%
|
234 |
+
[ Mon Sep 12 19:32:48 2022 ] Training epoch: 32
|
235 |
+
[ Mon Sep 12 19:33:26 2022 ] Batch(66/243) done. Loss: 0.2371 lr:0.100000
|
236 |
+
[ Mon Sep 12 19:34:19 2022 ] Batch(166/243) done. Loss: 0.3477 lr:0.100000
|
237 |
+
[ Mon Sep 12 19:35:00 2022 ] Eval epoch: 32
|
238 |
+
[ Mon Sep 12 19:37:28 2022 ] Mean test loss of 796 batches: 2.502819061279297.
|
239 |
+
[ Mon Sep 12 19:37:28 2022 ] Top1: 44.61%
|
240 |
+
[ Mon Sep 12 19:37:29 2022 ] Top5: 79.77%
|
241 |
+
[ Mon Sep 12 19:37:29 2022 ] Training epoch: 33
|
242 |
+
[ Mon Sep 12 19:37:44 2022 ] Batch(23/243) done. Loss: 0.3132 lr:0.100000
|
243 |
+
[ Mon Sep 12 19:38:37 2022 ] Batch(123/243) done. Loss: 0.6401 lr:0.100000
|
244 |
+
[ Mon Sep 12 19:39:29 2022 ] Batch(223/243) done. Loss: 0.4580 lr:0.100000
|
245 |
+
[ Mon Sep 12 19:39:39 2022 ] Eval epoch: 33
|
246 |
+
[ Mon Sep 12 19:42:08 2022 ] Mean test loss of 796 batches: 3.035712957382202.
|
247 |
+
[ Mon Sep 12 19:42:09 2022 ] Top1: 41.71%
|
248 |
+
[ Mon Sep 12 19:42:09 2022 ] Top5: 75.48%
|
249 |
+
[ Mon Sep 12 19:42:09 2022 ] Training epoch: 34
|
250 |
+
[ Mon Sep 12 19:42:54 2022 ] Batch(80/243) done. Loss: 0.5582 lr:0.100000
|
251 |
+
[ Mon Sep 12 19:43:47 2022 ] Batch(180/243) done. Loss: 0.3260 lr:0.100000
|
252 |
+
[ Mon Sep 12 19:44:20 2022 ] Eval epoch: 34
|
253 |
+
[ Mon Sep 12 19:46:49 2022 ] Mean test loss of 796 batches: 2.2783524990081787.
|
254 |
+
[ Mon Sep 12 19:46:49 2022 ] Top1: 46.67%
|
255 |
+
[ Mon Sep 12 19:46:50 2022 ] Top5: 79.04%
|
256 |
+
[ Mon Sep 12 19:46:50 2022 ] Training epoch: 35
|
257 |
+
[ Mon Sep 12 19:47:12 2022 ] Batch(37/243) done. Loss: 0.2910 lr:0.100000
|
258 |
+
[ Mon Sep 12 19:48:05 2022 ] Batch(137/243) done. Loss: 0.5316 lr:0.100000
|
259 |
+
[ Mon Sep 12 19:48:58 2022 ] Batch(237/243) done. Loss: 0.3858 lr:0.100000
|
260 |
+
[ Mon Sep 12 19:49:01 2022 ] Eval epoch: 35
|
261 |
+
[ Mon Sep 12 19:51:29 2022 ] Mean test loss of 796 batches: 2.168560028076172.
|
262 |
+
[ Mon Sep 12 19:51:30 2022 ] Top1: 47.69%
|
263 |
+
[ Mon Sep 12 19:51:30 2022 ] Top5: 82.49%
|
264 |
+
[ Mon Sep 12 19:51:30 2022 ] Training epoch: 36
|
265 |
+
[ Mon Sep 12 19:52:23 2022 ] Batch(94/243) done. Loss: 0.7607 lr:0.100000
|
266 |
+
[ Mon Sep 12 19:53:16 2022 ] Batch(194/243) done. Loss: 0.6586 lr:0.100000
|
267 |
+
[ Mon Sep 12 19:53:41 2022 ] Eval epoch: 36
|
268 |
+
[ Mon Sep 12 19:56:10 2022 ] Mean test loss of 796 batches: 2.7959372997283936.
|
269 |
+
[ Mon Sep 12 19:56:10 2022 ] Top1: 43.37%
|
270 |
+
[ Mon Sep 12 19:56:10 2022 ] Top5: 77.52%
|
271 |
+
[ Mon Sep 12 19:56:11 2022 ] Training epoch: 37
|
272 |
+
[ Mon Sep 12 19:56:41 2022 ] Batch(51/243) done. Loss: 0.5353 lr:0.100000
|
273 |
+
[ Mon Sep 12 19:57:34 2022 ] Batch(151/243) done. Loss: 0.4690 lr:0.100000
|
274 |
+
[ Mon Sep 12 19:58:22 2022 ] Eval epoch: 37
|
275 |
+
[ Mon Sep 12 20:00:50 2022 ] Mean test loss of 796 batches: 2.983930826187134.
|
276 |
+
[ Mon Sep 12 20:00:51 2022 ] Top1: 45.45%
|
277 |
+
[ Mon Sep 12 20:00:51 2022 ] Top5: 78.63%
|
278 |
+
[ Mon Sep 12 20:00:51 2022 ] Training epoch: 38
|
279 |
+
[ Mon Sep 12 20:00:59 2022 ] Batch(8/243) done. Loss: 0.4120 lr:0.100000
|
280 |
+
[ Mon Sep 12 20:01:52 2022 ] Batch(108/243) done. Loss: 0.5134 lr:0.100000
|
281 |
+
[ Mon Sep 12 20:02:44 2022 ] Batch(208/243) done. Loss: 0.4337 lr:0.100000
|
282 |
+
[ Mon Sep 12 20:03:02 2022 ] Eval epoch: 38
|
283 |
+
[ Mon Sep 12 20:05:31 2022 ] Mean test loss of 796 batches: 2.54374623298645.
|
284 |
+
[ Mon Sep 12 20:05:31 2022 ] Top1: 46.76%
|
285 |
+
[ Mon Sep 12 20:05:32 2022 ] Top5: 80.62%
|
286 |
+
[ Mon Sep 12 20:05:32 2022 ] Training epoch: 39
|
287 |
+
[ Mon Sep 12 20:06:09 2022 ] Batch(65/243) done. Loss: 0.1754 lr:0.100000
|
288 |
+
[ Mon Sep 12 20:07:02 2022 ] Batch(165/243) done. Loss: 0.4286 lr:0.100000
|
289 |
+
[ Mon Sep 12 20:07:43 2022 ] Eval epoch: 39
|
290 |
+
[ Mon Sep 12 20:10:12 2022 ] Mean test loss of 796 batches: 2.713900327682495.
|
291 |
+
[ Mon Sep 12 20:10:13 2022 ] Top1: 43.11%
|
292 |
+
[ Mon Sep 12 20:10:13 2022 ] Top5: 78.05%
|
293 |
+
[ Mon Sep 12 20:10:13 2022 ] Training epoch: 40
|
294 |
+
[ Mon Sep 12 20:10:28 2022 ] Batch(22/243) done. Loss: 0.2603 lr:0.100000
|
295 |
+
[ Mon Sep 12 20:11:21 2022 ] Batch(122/243) done. Loss: 0.3049 lr:0.100000
|
296 |
+
[ Mon Sep 12 20:12:14 2022 ] Batch(222/243) done. Loss: 0.5437 lr:0.100000
|
297 |
+
[ Mon Sep 12 20:12:24 2022 ] Eval epoch: 40
|
298 |
+
[ Mon Sep 12 20:14:53 2022 ] Mean test loss of 796 batches: 2.497074604034424.
|
299 |
+
[ Mon Sep 12 20:14:53 2022 ] Top1: 48.94%
|
300 |
+
[ Mon Sep 12 20:14:54 2022 ] Top5: 80.40%
|
301 |
+
[ Mon Sep 12 20:14:54 2022 ] Training epoch: 41
|
302 |
+
[ Mon Sep 12 20:15:39 2022 ] Batch(79/243) done. Loss: 0.4706 lr:0.100000
|
303 |
+
[ Mon Sep 12 20:16:32 2022 ] Batch(179/243) done. Loss: 0.5966 lr:0.100000
|
304 |
+
[ Mon Sep 12 20:17:05 2022 ] Eval epoch: 41
|
305 |
+
[ Mon Sep 12 20:19:34 2022 ] Mean test loss of 796 batches: 2.306436777114868.
|
306 |
+
[ Mon Sep 12 20:19:34 2022 ] Top1: 49.38%
|
307 |
+
[ Mon Sep 12 20:19:35 2022 ] Top5: 81.72%
|
308 |
+
[ Mon Sep 12 20:19:35 2022 ] Training epoch: 42
|
309 |
+
[ Mon Sep 12 20:19:58 2022 ] Batch(36/243) done. Loss: 0.4055 lr:0.100000
|
310 |
+
[ Mon Sep 12 20:20:51 2022 ] Batch(136/243) done. Loss: 0.4943 lr:0.100000
|
311 |
+
[ Mon Sep 12 20:21:44 2022 ] Batch(236/243) done. Loss: 0.4343 lr:0.100000
|
312 |
+
[ Mon Sep 12 20:21:47 2022 ] Eval epoch: 42
|
313 |
+
[ Mon Sep 12 20:24:18 2022 ] Mean test loss of 796 batches: 2.6163108348846436.
|
314 |
+
[ Mon Sep 12 20:24:18 2022 ] Top1: 45.79%
|
315 |
+
[ Mon Sep 12 20:24:18 2022 ] Top5: 78.65%
|
316 |
+
[ Mon Sep 12 20:24:19 2022 ] Training epoch: 43
|
317 |
+
[ Mon Sep 12 20:25:12 2022 ] Batch(93/243) done. Loss: 0.4468 lr:0.100000
|
318 |
+
[ Mon Sep 12 20:26:05 2022 ] Batch(193/243) done. Loss: 0.5667 lr:0.100000
|
319 |
+
[ Mon Sep 12 20:26:31 2022 ] Eval epoch: 43
|
320 |
+
[ Mon Sep 12 20:29:01 2022 ] Mean test loss of 796 batches: 2.8471124172210693.
|
321 |
+
[ Mon Sep 12 20:29:01 2022 ] Top1: 44.80%
|
322 |
+
[ Mon Sep 12 20:29:02 2022 ] Top5: 77.51%
|
323 |
+
[ Mon Sep 12 20:29:02 2022 ] Training epoch: 44
|
324 |
+
[ Mon Sep 12 20:29:32 2022 ] Batch(50/243) done. Loss: 0.3060 lr:0.100000
|
325 |
+
[ Mon Sep 12 20:30:25 2022 ] Batch(150/243) done. Loss: 0.5914 lr:0.100000
|
326 |
+
[ Mon Sep 12 20:31:14 2022 ] Eval epoch: 44
|
327 |
+
[ Mon Sep 12 20:33:44 2022 ] Mean test loss of 796 batches: 2.6193630695343018.
|
328 |
+
[ Mon Sep 12 20:33:45 2022 ] Top1: 47.18%
|
329 |
+
[ Mon Sep 12 20:33:45 2022 ] Top5: 80.08%
|
330 |
+
[ Mon Sep 12 20:33:46 2022 ] Training epoch: 45
|
331 |
+
[ Mon Sep 12 20:33:53 2022 ] Batch(7/243) done. Loss: 0.3902 lr:0.100000
|
332 |
+
[ Mon Sep 12 20:34:46 2022 ] Batch(107/243) done. Loss: 0.5657 lr:0.100000
|
333 |
+
[ Mon Sep 12 20:35:39 2022 ] Batch(207/243) done. Loss: 0.2205 lr:0.100000
|
334 |
+
[ Mon Sep 12 20:35:58 2022 ] Eval epoch: 45
|
335 |
+
[ Mon Sep 12 20:38:28 2022 ] Mean test loss of 796 batches: 2.66359806060791.
|
336 |
+
[ Mon Sep 12 20:38:29 2022 ] Top1: 44.93%
|
337 |
+
[ Mon Sep 12 20:38:29 2022 ] Top5: 78.03%
|
338 |
+
[ Mon Sep 12 20:38:29 2022 ] Training epoch: 46
|
339 |
+
[ Mon Sep 12 20:39:07 2022 ] Batch(64/243) done. Loss: 0.1533 lr:0.100000
|
340 |
+
[ Mon Sep 12 20:40:00 2022 ] Batch(164/243) done. Loss: 0.3249 lr:0.100000
|
341 |
+
[ Mon Sep 12 20:40:42 2022 ] Eval epoch: 46
|
342 |
+
[ Mon Sep 12 20:43:12 2022 ] Mean test loss of 796 batches: 2.7212274074554443.
|
343 |
+
[ Mon Sep 12 20:43:12 2022 ] Top1: 43.07%
|
344 |
+
[ Mon Sep 12 20:43:13 2022 ] Top5: 76.16%
|
345 |
+
[ Mon Sep 12 20:43:13 2022 ] Training epoch: 47
|
346 |
+
[ Mon Sep 12 20:43:28 2022 ] Batch(21/243) done. Loss: 0.3087 lr:0.100000
|
347 |
+
[ Mon Sep 12 20:44:21 2022 ] Batch(121/243) done. Loss: 0.4566 lr:0.100000
|
348 |
+
[ Mon Sep 12 20:45:14 2022 ] Batch(221/243) done. Loss: 0.2735 lr:0.100000
|
349 |
+
[ Mon Sep 12 20:45:25 2022 ] Eval epoch: 47
|
350 |
+
[ Mon Sep 12 20:47:56 2022 ] Mean test loss of 796 batches: 2.5096700191497803.
|
351 |
+
[ Mon Sep 12 20:47:56 2022 ] Top1: 48.75%
|
352 |
+
[ Mon Sep 12 20:47:56 2022 ] Top5: 81.11%
|
353 |
+
[ Mon Sep 12 20:47:57 2022 ] Training epoch: 48
|
354 |
+
[ Mon Sep 12 20:48:42 2022 ] Batch(78/243) done. Loss: 0.3789 lr:0.100000
|
355 |
+
[ Mon Sep 12 20:49:35 2022 ] Batch(178/243) done. Loss: 0.3563 lr:0.100000
|
356 |
+
[ Mon Sep 12 20:50:09 2022 ] Eval epoch: 48
|
357 |
+
[ Mon Sep 12 20:52:39 2022 ] Mean test loss of 796 batches: 2.9559221267700195.
|
358 |
+
[ Mon Sep 12 20:52:39 2022 ] Top1: 44.30%
|
359 |
+
[ Mon Sep 12 20:52:40 2022 ] Top5: 76.90%
|
360 |
+
[ Mon Sep 12 20:52:40 2022 ] Training epoch: 49
|
361 |
+
[ Mon Sep 12 20:53:02 2022 ] Batch(35/243) done. Loss: 0.3420 lr:0.100000
|
362 |
+
[ Mon Sep 12 20:53:55 2022 ] Batch(135/243) done. Loss: 0.6071 lr:0.100000
|
363 |
+
[ Mon Sep 12 20:54:48 2022 ] Batch(235/243) done. Loss: 0.2530 lr:0.100000
|
364 |
+
[ Mon Sep 12 20:54:52 2022 ] Eval epoch: 49
|
365 |
+
[ Mon Sep 12 20:57:22 2022 ] Mean test loss of 796 batches: 3.282691478729248.
|
366 |
+
[ Mon Sep 12 20:57:23 2022 ] Top1: 42.52%
|
367 |
+
[ Mon Sep 12 20:57:23 2022 ] Top5: 74.37%
|
368 |
+
[ Mon Sep 12 20:57:23 2022 ] Training epoch: 50
|
369 |
+
[ Mon Sep 12 20:58:16 2022 ] Batch(92/243) done. Loss: 0.4806 lr:0.100000
|
370 |
+
[ Mon Sep 12 20:59:09 2022 ] Batch(192/243) done. Loss: 0.3802 lr:0.100000
|
371 |
+
[ Mon Sep 12 20:59:36 2022 ] Eval epoch: 50
|
372 |
+
[ Mon Sep 12 21:02:06 2022 ] Mean test loss of 796 batches: 3.1574456691741943.
|
373 |
+
[ Mon Sep 12 21:02:06 2022 ] Top1: 43.83%
|
374 |
+
[ Mon Sep 12 21:02:07 2022 ] Top5: 76.17%
|
375 |
+
[ Mon Sep 12 21:02:07 2022 ] Training epoch: 51
|
376 |
+
[ Mon Sep 12 21:02:37 2022 ] Batch(49/243) done. Loss: 0.2924 lr:0.100000
|
377 |
+
[ Mon Sep 12 21:03:30 2022 ] Batch(149/243) done. Loss: 0.3555 lr:0.100000
|
378 |
+
[ Mon Sep 12 21:04:20 2022 ] Eval epoch: 51
|
379 |
+
[ Mon Sep 12 21:06:50 2022 ] Mean test loss of 796 batches: 2.7255911827087402.
|
380 |
+
[ Mon Sep 12 21:06:50 2022 ] Top1: 45.44%
|
381 |
+
[ Mon Sep 12 21:06:51 2022 ] Top5: 80.12%
|
382 |
+
[ Mon Sep 12 21:06:51 2022 ] Training epoch: 52
|
383 |
+
[ Mon Sep 12 21:06:58 2022 ] Batch(6/243) done. Loss: 0.2800 lr:0.100000
|
384 |
+
[ Mon Sep 12 21:07:51 2022 ] Batch(106/243) done. Loss: 0.4368 lr:0.100000
|
385 |
+
[ Mon Sep 12 21:08:44 2022 ] Batch(206/243) done. Loss: 0.3939 lr:0.100000
|
386 |
+
[ Mon Sep 12 21:09:03 2022 ] Eval epoch: 52
|
387 |
+
[ Mon Sep 12 21:11:33 2022 ] Mean test loss of 796 batches: 2.8366401195526123.
|
388 |
+
[ Mon Sep 12 21:11:33 2022 ] Top1: 45.62%
|
389 |
+
[ Mon Sep 12 21:11:34 2022 ] Top5: 78.17%
|
390 |
+
[ Mon Sep 12 21:11:34 2022 ] Training epoch: 53
|
391 |
+
[ Mon Sep 12 21:12:12 2022 ] Batch(63/243) done. Loss: 0.2960 lr:0.100000
|
392 |
+
[ Mon Sep 12 21:13:04 2022 ] Batch(163/243) done. Loss: 0.3067 lr:0.100000
|
393 |
+
[ Mon Sep 12 21:13:46 2022 ] Eval epoch: 53
|
394 |
+
[ Mon Sep 12 21:16:17 2022 ] Mean test loss of 796 batches: 2.702021837234497.
|
395 |
+
[ Mon Sep 12 21:16:17 2022 ] Top1: 48.21%
|
396 |
+
[ Mon Sep 12 21:16:18 2022 ] Top5: 80.06%
|
397 |
+
[ Mon Sep 12 21:16:18 2022 ] Training epoch: 54
|
398 |
+
[ Mon Sep 12 21:16:32 2022 ] Batch(20/243) done. Loss: 0.2548 lr:0.100000
|
399 |
+
[ Mon Sep 12 21:17:25 2022 ] Batch(120/243) done. Loss: 0.3072 lr:0.100000
|
400 |
+
[ Mon Sep 12 21:18:18 2022 ] Batch(220/243) done. Loss: 0.4308 lr:0.100000
|
401 |
+
[ Mon Sep 12 21:18:30 2022 ] Eval epoch: 54
|
402 |
+
[ Mon Sep 12 21:21:00 2022 ] Mean test loss of 796 batches: 2.8809432983398438.
|
403 |
+
[ Mon Sep 12 21:21:01 2022 ] Top1: 44.77%
|
404 |
+
[ Mon Sep 12 21:21:01 2022 ] Top5: 76.19%
|
405 |
+
[ Mon Sep 12 21:21:02 2022 ] Training epoch: 55
|
406 |
+
[ Mon Sep 12 21:21:46 2022 ] Batch(77/243) done. Loss: 0.5762 lr:0.100000
|
407 |
+
[ Mon Sep 12 21:22:39 2022 ] Batch(177/243) done. Loss: 0.5683 lr:0.100000
|
408 |
+
[ Mon Sep 12 21:23:14 2022 ] Eval epoch: 55
|
409 |
+
[ Mon Sep 12 21:25:44 2022 ] Mean test loss of 796 batches: 2.5277981758117676.
|
410 |
+
[ Mon Sep 12 21:25:44 2022 ] Top1: 48.81%
|
411 |
+
[ Mon Sep 12 21:25:44 2022 ] Top5: 80.88%
|
412 |
+
[ Mon Sep 12 21:25:45 2022 ] Training epoch: 56
|
413 |
+
[ Mon Sep 12 21:26:07 2022 ] Batch(34/243) done. Loss: 0.5117 lr:0.100000
|
414 |
+
[ Mon Sep 12 21:27:00 2022 ] Batch(134/243) done. Loss: 0.1882 lr:0.100000
|
415 |
+
[ Mon Sep 12 21:27:53 2022 ] Batch(234/243) done. Loss: 0.3638 lr:0.100000
|
416 |
+
[ Mon Sep 12 21:27:57 2022 ] Eval epoch: 56
|
417 |
+
[ Mon Sep 12 21:30:27 2022 ] Mean test loss of 796 batches: 3.64302396774292.
|
418 |
+
[ Mon Sep 12 21:30:27 2022 ] Top1: 40.97%
|
419 |
+
[ Mon Sep 12 21:30:28 2022 ] Top5: 72.24%
|
420 |
+
[ Mon Sep 12 21:30:28 2022 ] Training epoch: 57
|
421 |
+
[ Mon Sep 12 21:31:20 2022 ] Batch(91/243) done. Loss: 0.3306 lr:0.100000
|
422 |
+
[ Mon Sep 12 21:32:13 2022 ] Batch(191/243) done. Loss: 0.5866 lr:0.100000
|
423 |
+
[ Mon Sep 12 21:32:40 2022 ] Eval epoch: 57
|
424 |
+
[ Mon Sep 12 21:35:10 2022 ] Mean test loss of 796 batches: 2.9249634742736816.
|
425 |
+
[ Mon Sep 12 21:35:10 2022 ] Top1: 45.85%
|
426 |
+
[ Mon Sep 12 21:35:11 2022 ] Top5: 77.70%
|
427 |
+
[ Mon Sep 12 21:35:11 2022 ] Training epoch: 58
|
428 |
+
[ Mon Sep 12 21:35:40 2022 ] Batch(48/243) done. Loss: 0.1647 lr:0.100000
|
429 |
+
[ Mon Sep 12 21:36:33 2022 ] Batch(148/243) done. Loss: 0.5696 lr:0.100000
|
430 |
+
[ Mon Sep 12 21:37:23 2022 ] Eval epoch: 58
|
431 |
+
[ Mon Sep 12 21:39:54 2022 ] Mean test loss of 796 batches: 3.296038866043091.
|
432 |
+
[ Mon Sep 12 21:39:54 2022 ] Top1: 44.77%
|
433 |
+
[ Mon Sep 12 21:39:54 2022 ] Top5: 75.72%
|
434 |
+
[ Mon Sep 12 21:39:55 2022 ] Training epoch: 59
|
435 |
+
[ Mon Sep 12 21:40:01 2022 ] Batch(5/243) done. Loss: 0.1390 lr:0.100000
|
436 |
+
[ Mon Sep 12 21:40:54 2022 ] Batch(105/243) done. Loss: 0.1552 lr:0.100000
|
437 |
+
[ Mon Sep 12 21:41:47 2022 ] Batch(205/243) done. Loss: 0.6132 lr:0.100000
|
438 |
+
[ Mon Sep 12 21:42:07 2022 ] Eval epoch: 59
|
439 |
+
[ Mon Sep 12 21:44:37 2022 ] Mean test loss of 796 batches: 4.113931655883789.
|
440 |
+
[ Mon Sep 12 21:44:37 2022 ] Top1: 31.77%
|
441 |
+
[ Mon Sep 12 21:44:38 2022 ] Top5: 66.28%
|
442 |
+
[ Mon Sep 12 21:44:38 2022 ] Training epoch: 60
|
443 |
+
[ Mon Sep 12 21:45:15 2022 ] Batch(62/243) done. Loss: 0.3674 lr:0.100000
|
444 |
+
[ Mon Sep 12 21:46:08 2022 ] Batch(162/243) done. Loss: 0.5138 lr:0.100000
|
445 |
+
[ Mon Sep 12 21:46:50 2022 ] Eval epoch: 60
|
446 |
+
[ Mon Sep 12 21:49:20 2022 ] Mean test loss of 796 batches: 2.752439498901367.
|
447 |
+
[ Mon Sep 12 21:49:21 2022 ] Top1: 48.44%
|
448 |
+
[ Mon Sep 12 21:49:21 2022 ] Top5: 79.91%
|
449 |
+
[ Mon Sep 12 21:49:21 2022 ] Training epoch: 61
|
450 |
+
[ Mon Sep 12 21:49:35 2022 ] Batch(19/243) done. Loss: 0.1299 lr:0.010000
|
451 |
+
[ Mon Sep 12 21:50:28 2022 ] Batch(119/243) done. Loss: 0.2408 lr:0.010000
|
452 |
+
[ Mon Sep 12 21:51:21 2022 ] Batch(219/243) done. Loss: 0.2369 lr:0.010000
|
453 |
+
[ Mon Sep 12 21:51:34 2022 ] Eval epoch: 61
|
454 |
+
[ Mon Sep 12 21:54:05 2022 ] Mean test loss of 796 batches: 2.21579909324646.
|
455 |
+
[ Mon Sep 12 21:54:05 2022 ] Top1: 55.86%
|
456 |
+
[ Mon Sep 12 21:54:05 2022 ] Top5: 85.32%
|
457 |
+
[ Mon Sep 12 21:54:05 2022 ] Training epoch: 62
|
458 |
+
[ Mon Sep 12 21:54:50 2022 ] Batch(76/243) done. Loss: 0.0834 lr:0.010000
|
459 |
+
[ Mon Sep 12 21:55:43 2022 ] Batch(176/243) done. Loss: 0.0547 lr:0.010000
|
460 |
+
[ Mon Sep 12 21:56:18 2022 ] Eval epoch: 62
|
461 |
+
[ Mon Sep 12 21:58:49 2022 ] Mean test loss of 796 batches: 2.238507032394409.
|
462 |
+
[ Mon Sep 12 21:58:49 2022 ] Top1: 56.63%
|
463 |
+
[ Mon Sep 12 21:58:49 2022 ] Top5: 85.71%
|
464 |
+
[ Mon Sep 12 21:58:50 2022 ] Training epoch: 63
|
465 |
+
[ Mon Sep 12 21:59:11 2022 ] Batch(33/243) done. Loss: 0.0954 lr:0.010000
|
466 |
+
[ Mon Sep 12 22:00:04 2022 ] Batch(133/243) done. Loss: 0.1910 lr:0.010000
|
467 |
+
[ Mon Sep 12 22:00:57 2022 ] Batch(233/243) done. Loss: 0.0895 lr:0.010000
|
468 |
+
[ Mon Sep 12 22:01:02 2022 ] Eval epoch: 63
|
469 |
+
[ Mon Sep 12 22:03:32 2022 ] Mean test loss of 796 batches: 2.265852928161621.
|
470 |
+
[ Mon Sep 12 22:03:32 2022 ] Top1: 56.58%
|
471 |
+
[ Mon Sep 12 22:03:33 2022 ] Top5: 85.87%
|
472 |
+
[ Mon Sep 12 22:03:33 2022 ] Training epoch: 64
|
473 |
+
[ Mon Sep 12 22:04:24 2022 ] Batch(90/243) done. Loss: 0.1179 lr:0.010000
|
474 |
+
[ Mon Sep 12 22:05:17 2022 ] Batch(190/243) done. Loss: 0.1409 lr:0.010000
|
475 |
+
[ Mon Sep 12 22:05:45 2022 ] Eval epoch: 64
|
476 |
+
[ Mon Sep 12 22:08:15 2022 ] Mean test loss of 796 batches: 2.3048434257507324.
|
477 |
+
[ Mon Sep 12 22:08:15 2022 ] Top1: 56.37%
|
478 |
+
[ Mon Sep 12 22:08:16 2022 ] Top5: 85.68%
|
479 |
+
[ Mon Sep 12 22:08:16 2022 ] Training epoch: 65
|
480 |
+
[ Mon Sep 12 22:08:45 2022 ] Batch(47/243) done. Loss: 0.0954 lr:0.010000
|
481 |
+
[ Mon Sep 12 22:09:38 2022 ] Batch(147/243) done. Loss: 0.0779 lr:0.010000
|
482 |
+
[ Mon Sep 12 22:10:28 2022 ] Eval epoch: 65
|
483 |
+
[ Mon Sep 12 22:12:59 2022 ] Mean test loss of 796 batches: 2.3024234771728516.
|
484 |
+
[ Mon Sep 12 22:12:59 2022 ] Top1: 56.74%
|
485 |
+
[ Mon Sep 12 22:12:59 2022 ] Top5: 85.74%
|
486 |
+
[ Mon Sep 12 22:13:00 2022 ] Training epoch: 66
|
487 |
+
[ Mon Sep 12 22:13:06 2022 ] Batch(4/243) done. Loss: 0.1016 lr:0.010000
|
488 |
+
[ Mon Sep 12 22:13:58 2022 ] Batch(104/243) done. Loss: 0.0461 lr:0.010000
|
489 |
+
[ Mon Sep 12 22:14:51 2022 ] Batch(204/243) done. Loss: 0.0116 lr:0.010000
|
490 |
+
[ Mon Sep 12 22:15:12 2022 ] Eval epoch: 66
|
491 |
+
[ Mon Sep 12 22:17:41 2022 ] Mean test loss of 796 batches: 2.4010677337646484.
|
492 |
+
[ Mon Sep 12 22:17:42 2022 ] Top1: 56.48%
|
493 |
+
[ Mon Sep 12 22:17:42 2022 ] Top5: 85.45%
|
494 |
+
[ Mon Sep 12 22:17:42 2022 ] Training epoch: 67
|
495 |
+
[ Mon Sep 12 22:18:19 2022 ] Batch(61/243) done. Loss: 0.0483 lr:0.010000
|
496 |
+
[ Mon Sep 12 22:19:12 2022 ] Batch(161/243) done. Loss: 0.0378 lr:0.010000
|
497 |
+
[ Mon Sep 12 22:19:55 2022 ] Eval epoch: 67
|
498 |
+
[ Mon Sep 12 22:22:25 2022 ] Mean test loss of 796 batches: 2.4693613052368164.
|
499 |
+
[ Mon Sep 12 22:22:25 2022 ] Top1: 55.82%
|
500 |
+
[ Mon Sep 12 22:22:25 2022 ] Top5: 85.19%
|
501 |
+
[ Mon Sep 12 22:22:26 2022 ] Training epoch: 68
|
502 |
+
[ Mon Sep 12 22:22:39 2022 ] Batch(18/243) done. Loss: 0.0381 lr:0.010000
|
503 |
+
[ Mon Sep 12 22:23:32 2022 ] Batch(118/243) done. Loss: 0.0438 lr:0.010000
|
504 |
+
[ Mon Sep 12 22:24:25 2022 ] Batch(218/243) done. Loss: 0.0356 lr:0.010000
|
505 |
+
[ Mon Sep 12 22:24:38 2022 ] Eval epoch: 68
|
506 |
+
[ Mon Sep 12 22:27:08 2022 ] Mean test loss of 796 batches: 2.441556692123413.
|
507 |
+
[ Mon Sep 12 22:27:09 2022 ] Top1: 56.56%
|
508 |
+
[ Mon Sep 12 22:27:09 2022 ] Top5: 85.39%
|
509 |
+
[ Mon Sep 12 22:27:09 2022 ] Training epoch: 69
|
510 |
+
[ Mon Sep 12 22:27:53 2022 ] Batch(75/243) done. Loss: 0.0639 lr:0.010000
|
511 |
+
[ Mon Sep 12 22:28:46 2022 ] Batch(175/243) done. Loss: 0.0246 lr:0.010000
|
512 |
+
[ Mon Sep 12 22:29:21 2022 ] Eval epoch: 69
|
513 |
+
[ Mon Sep 12 22:31:50 2022 ] Mean test loss of 796 batches: 2.452775239944458.
|
514 |
+
[ Mon Sep 12 22:31:50 2022 ] Top1: 56.46%
|
515 |
+
[ Mon Sep 12 22:31:50 2022 ] Top5: 85.42%
|
516 |
+
[ Mon Sep 12 22:31:51 2022 ] Training epoch: 70
|
517 |
+
[ Mon Sep 12 22:32:11 2022 ] Batch(32/243) done. Loss: 0.0645 lr:0.010000
|
518 |
+
[ Mon Sep 12 22:33:03 2022 ] Batch(132/243) done. Loss: 0.0337 lr:0.010000
|
519 |
+
[ Mon Sep 12 22:33:57 2022 ] Batch(232/243) done. Loss: 0.1710 lr:0.010000
|
520 |
+
[ Mon Sep 12 22:34:02 2022 ] Eval epoch: 70
|
521 |
+
[ Mon Sep 12 22:36:30 2022 ] Mean test loss of 796 batches: 2.438685178756714.
|
522 |
+
[ Mon Sep 12 22:36:31 2022 ] Top1: 56.90%
|
523 |
+
[ Mon Sep 12 22:36:31 2022 ] Top5: 85.70%
|
524 |
+
[ Mon Sep 12 22:36:31 2022 ] Training epoch: 71
|
525 |
+
[ Mon Sep 12 22:37:22 2022 ] Batch(89/243) done. Loss: 0.0785 lr:0.010000
|
526 |
+
[ Mon Sep 12 22:38:14 2022 ] Batch(189/243) done. Loss: 0.0159 lr:0.010000
|
527 |
+
[ Mon Sep 12 22:38:43 2022 ] Eval epoch: 71
|
528 |
+
[ Mon Sep 12 22:41:11 2022 ] Mean test loss of 796 batches: 2.4772839546203613.
|
529 |
+
[ Mon Sep 12 22:41:11 2022 ] Top1: 56.68%
|
530 |
+
[ Mon Sep 12 22:41:12 2022 ] Top5: 85.52%
|
531 |
+
[ Mon Sep 12 22:41:12 2022 ] Training epoch: 72
|
532 |
+
[ Mon Sep 12 22:41:39 2022 ] Batch(46/243) done. Loss: 0.0166 lr:0.010000
|
533 |
+
[ Mon Sep 12 22:42:32 2022 ] Batch(146/243) done. Loss: 0.1071 lr:0.010000
|
534 |
+
[ Mon Sep 12 22:43:23 2022 ] Eval epoch: 72
|
535 |
+
[ Mon Sep 12 22:45:51 2022 ] Mean test loss of 796 batches: 2.5877044200897217.
|
536 |
+
[ Mon Sep 12 22:45:51 2022 ] Top1: 55.97%
|
537 |
+
[ Mon Sep 12 22:45:52 2022 ] Top5: 85.10%
|
538 |
+
[ Mon Sep 12 22:45:52 2022 ] Training epoch: 73
|
539 |
+
[ Mon Sep 12 22:45:57 2022 ] Batch(3/243) done. Loss: 0.0275 lr:0.010000
|
540 |
+
[ Mon Sep 12 22:46:49 2022 ] Batch(103/243) done. Loss: 0.0644 lr:0.010000
|
541 |
+
[ Mon Sep 12 22:47:42 2022 ] Batch(203/243) done. Loss: 0.0433 lr:0.010000
|
542 |
+
[ Mon Sep 12 22:48:03 2022 ] Eval epoch: 73
|
543 |
+
[ Mon Sep 12 22:50:32 2022 ] Mean test loss of 796 batches: 2.651918649673462.
|
544 |
+
[ Mon Sep 12 22:50:32 2022 ] Top1: 55.48%
|
545 |
+
[ Mon Sep 12 22:50:32 2022 ] Top5: 84.69%
|
546 |
+
[ Mon Sep 12 22:50:33 2022 ] Training epoch: 74
|
547 |
+
[ Mon Sep 12 22:51:07 2022 ] Batch(60/243) done. Loss: 0.0305 lr:0.010000
|
548 |
+
[ Mon Sep 12 22:52:00 2022 ] Batch(160/243) done. Loss: 0.0308 lr:0.010000
|
549 |
+
[ Mon Sep 12 22:52:43 2022 ] Eval epoch: 74
|
550 |
+
[ Mon Sep 12 22:55:12 2022 ] Mean test loss of 796 batches: 2.5842037200927734.
|
551 |
+
[ Mon Sep 12 22:55:12 2022 ] Top1: 56.39%
|
552 |
+
[ Mon Sep 12 22:55:12 2022 ] Top5: 85.13%
|
553 |
+
[ Mon Sep 12 22:55:13 2022 ] Training epoch: 75
|
554 |
+
[ Mon Sep 12 22:55:25 2022 ] Batch(17/243) done. Loss: 0.0366 lr:0.010000
|
555 |
+
[ Mon Sep 12 22:56:18 2022 ] Batch(117/243) done. Loss: 0.0302 lr:0.010000
|
556 |
+
[ Mon Sep 12 22:57:10 2022 ] Batch(217/243) done. Loss: 0.0747 lr:0.010000
|
557 |
+
[ Mon Sep 12 22:57:24 2022 ] Eval epoch: 75
|
558 |
+
[ Mon Sep 12 22:59:52 2022 ] Mean test loss of 796 batches: 2.570859909057617.
|
559 |
+
[ Mon Sep 12 22:59:53 2022 ] Top1: 56.14%
|
560 |
+
[ Mon Sep 12 22:59:53 2022 ] Top5: 85.25%
|
561 |
+
[ Mon Sep 12 22:59:53 2022 ] Training epoch: 76
|
562 |
+
[ Mon Sep 12 23:00:36 2022 ] Batch(74/243) done. Loss: 0.0796 lr:0.010000
|
563 |
+
[ Mon Sep 12 23:01:28 2022 ] Batch(174/243) done. Loss: 0.0257 lr:0.010000
|
564 |
+
[ Mon Sep 12 23:02:05 2022 ] Eval epoch: 76
|
565 |
+
[ Mon Sep 12 23:04:33 2022 ] Mean test loss of 796 batches: 2.592128038406372.
|
566 |
+
[ Mon Sep 12 23:04:34 2022 ] Top1: 55.84%
|
567 |
+
[ Mon Sep 12 23:04:34 2022 ] Top5: 85.30%
|
568 |
+
[ Mon Sep 12 23:04:34 2022 ] Training epoch: 77
|
569 |
+
[ Mon Sep 12 23:04:54 2022 ] Batch(31/243) done. Loss: 0.0744 lr:0.010000
|
570 |
+
[ Mon Sep 12 23:05:46 2022 ] Batch(131/243) done. Loss: 0.1931 lr:0.010000
|
571 |
+
[ Mon Sep 12 23:06:39 2022 ] Batch(231/243) done. Loss: 0.1079 lr:0.010000
|
572 |
+
[ Mon Sep 12 23:06:45 2022 ] Eval epoch: 77
|
573 |
+
[ Mon Sep 12 23:09:14 2022 ] Mean test loss of 796 batches: 2.520390033721924.
|
574 |
+
[ Mon Sep 12 23:09:15 2022 ] Top1: 57.01%
|
575 |
+
[ Mon Sep 12 23:09:15 2022 ] Top5: 85.66%
|
576 |
+
[ Mon Sep 12 23:09:15 2022 ] Training epoch: 78
|
577 |
+
[ Mon Sep 12 23:10:05 2022 ] Batch(88/243) done. Loss: 0.0603 lr:0.010000
|
578 |
+
[ Mon Sep 12 23:10:57 2022 ] Batch(188/243) done. Loss: 0.0310 lr:0.010000
|
579 |
+
[ Mon Sep 12 23:11:26 2022 ] Eval epoch: 78
|
580 |
+
[ Mon Sep 12 23:13:55 2022 ] Mean test loss of 796 batches: 2.6386399269104004.
|
581 |
+
[ Mon Sep 12 23:13:55 2022 ] Top1: 56.20%
|
582 |
+
[ Mon Sep 12 23:13:55 2022 ] Top5: 85.21%
|
583 |
+
[ Mon Sep 12 23:13:56 2022 ] Training epoch: 79
|
584 |
+
[ Mon Sep 12 23:14:22 2022 ] Batch(45/243) done. Loss: 0.0765 lr:0.010000
|
585 |
+
[ Mon Sep 12 23:15:15 2022 ] Batch(145/243) done. Loss: 0.0186 lr:0.010000
|
586 |
+
[ Mon Sep 12 23:16:06 2022 ] Eval epoch: 79
|
587 |
+
[ Mon Sep 12 23:18:35 2022 ] Mean test loss of 796 batches: 2.681422233581543.
|
588 |
+
[ Mon Sep 12 23:18:36 2022 ] Top1: 55.56%
|
589 |
+
[ Mon Sep 12 23:18:36 2022 ] Top5: 84.93%
|
590 |
+
[ Mon Sep 12 23:18:36 2022 ] Training epoch: 80
|
591 |
+
[ Mon Sep 12 23:18:40 2022 ] Batch(2/243) done. Loss: 0.0715 lr:0.010000
|
592 |
+
[ Mon Sep 12 23:19:33 2022 ] Batch(102/243) done. Loss: 0.0329 lr:0.010000
|
593 |
+
[ Mon Sep 12 23:20:26 2022 ] Batch(202/243) done. Loss: 0.0207 lr:0.010000
|
594 |
+
[ Mon Sep 12 23:20:47 2022 ] Eval epoch: 80
|
595 |
+
[ Mon Sep 12 23:23:16 2022 ] Mean test loss of 796 batches: 2.7165815830230713.
|
596 |
+
[ Mon Sep 12 23:23:16 2022 ] Top1: 55.60%
|
597 |
+
[ Mon Sep 12 23:23:16 2022 ] Top5: 84.92%
|
598 |
+
[ Mon Sep 12 23:23:17 2022 ] Training epoch: 81
|
599 |
+
[ Mon Sep 12 23:23:51 2022 ] Batch(59/243) done. Loss: 0.0494 lr:0.001000
|
600 |
+
[ Mon Sep 12 23:24:44 2022 ] Batch(159/243) done. Loss: 0.0567 lr:0.001000
|
601 |
+
[ Mon Sep 12 23:25:28 2022 ] Eval epoch: 81
|
602 |
+
[ Mon Sep 12 23:27:56 2022 ] Mean test loss of 796 batches: 2.6882925033569336.
|
603 |
+
[ Mon Sep 12 23:27:56 2022 ] Top1: 55.76%
|
604 |
+
[ Mon Sep 12 23:27:57 2022 ] Top5: 85.11%
|
605 |
+
[ Mon Sep 12 23:27:57 2022 ] Training epoch: 82
|
606 |
+
[ Mon Sep 12 23:28:08 2022 ] Batch(16/243) done. Loss: 0.0430 lr:0.001000
|
607 |
+
[ Mon Sep 12 23:29:01 2022 ] Batch(116/243) done. Loss: 0.0271 lr:0.001000
|
608 |
+
[ Mon Sep 12 23:29:54 2022 ] Batch(216/243) done. Loss: 0.0803 lr:0.001000
|
609 |
+
[ Mon Sep 12 23:30:08 2022 ] Eval epoch: 82
|
610 |
+
[ Mon Sep 12 23:32:37 2022 ] Mean test loss of 796 batches: 2.646885633468628.
|
611 |
+
[ Mon Sep 12 23:32:37 2022 ] Top1: 56.28%
|
612 |
+
[ Mon Sep 12 23:32:38 2022 ] Top5: 85.33%
|
613 |
+
[ Mon Sep 12 23:32:38 2022 ] Training epoch: 83
|
614 |
+
[ Mon Sep 12 23:33:20 2022 ] Batch(73/243) done. Loss: 0.1074 lr:0.001000
|
615 |
+
[ Mon Sep 12 23:34:12 2022 ] Batch(173/243) done. Loss: 0.0116 lr:0.001000
|
616 |
+
[ Mon Sep 12 23:34:49 2022 ] Eval epoch: 83
|
617 |
+
[ Mon Sep 12 23:37:18 2022 ] Mean test loss of 796 batches: 2.597583770751953.
|
618 |
+
[ Mon Sep 12 23:37:19 2022 ] Top1: 56.53%
|
619 |
+
[ Mon Sep 12 23:37:19 2022 ] Top5: 85.37%
|
620 |
+
[ Mon Sep 12 23:37:19 2022 ] Training epoch: 84
|
621 |
+
[ Mon Sep 12 23:37:38 2022 ] Batch(30/243) done. Loss: 0.0481 lr:0.001000
|
622 |
+
[ Mon Sep 12 23:38:31 2022 ] Batch(130/243) done. Loss: 0.0732 lr:0.001000
|
623 |
+
[ Mon Sep 12 23:39:24 2022 ] Batch(230/243) done. Loss: 0.0687 lr:0.001000
|
624 |
+
[ Mon Sep 12 23:39:31 2022 ] Eval epoch: 84
|
625 |
+
[ Mon Sep 12 23:41:59 2022 ] Mean test loss of 796 batches: 2.664794683456421.
|
626 |
+
[ Mon Sep 12 23:41:59 2022 ] Top1: 56.09%
|
627 |
+
[ Mon Sep 12 23:42:00 2022 ] Top5: 85.21%
|
628 |
+
[ Mon Sep 12 23:42:00 2022 ] Training epoch: 85
|
629 |
+
[ Mon Sep 12 23:42:49 2022 ] Batch(87/243) done. Loss: 0.0488 lr:0.001000
|
630 |
+
[ Mon Sep 12 23:43:42 2022 ] Batch(187/243) done. Loss: 0.0218 lr:0.001000
|
631 |
+
[ Mon Sep 12 23:44:11 2022 ] Eval epoch: 85
|
632 |
+
[ Mon Sep 12 23:46:40 2022 ] Mean test loss of 796 batches: 2.662086009979248.
|
633 |
+
[ Mon Sep 12 23:46:40 2022 ] Top1: 56.15%
|
634 |
+
[ Mon Sep 12 23:46:40 2022 ] Top5: 85.20%
|
635 |
+
[ Mon Sep 12 23:46:41 2022 ] Training epoch: 86
|
636 |
+
[ Mon Sep 12 23:47:07 2022 ] Batch(44/243) done. Loss: 0.0633 lr:0.001000
|
637 |
+
[ Mon Sep 12 23:48:00 2022 ] Batch(144/243) done. Loss: 0.0310 lr:0.001000
|
638 |
+
[ Mon Sep 12 23:48:52 2022 ] Eval epoch: 86
|
639 |
+
[ Mon Sep 12 23:51:20 2022 ] Mean test loss of 796 batches: 2.6869003772735596.
|
640 |
+
[ Mon Sep 12 23:51:21 2022 ] Top1: 56.17%
|
641 |
+
[ Mon Sep 12 23:51:21 2022 ] Top5: 85.22%
|
642 |
+
[ Mon Sep 12 23:51:21 2022 ] Training epoch: 87
|
643 |
+
[ Mon Sep 12 23:51:25 2022 ] Batch(1/243) done. Loss: 0.1027 lr:0.001000
|
644 |
+
[ Mon Sep 12 23:52:18 2022 ] Batch(101/243) done. Loss: 0.0944 lr:0.001000
|
645 |
+
[ Mon Sep 12 23:53:11 2022 ] Batch(201/243) done. Loss: 0.1088 lr:0.001000
|
646 |
+
[ Mon Sep 12 23:53:32 2022 ] Eval epoch: 87
|
647 |
+
[ Mon Sep 12 23:56:00 2022 ] Mean test loss of 796 batches: 2.6837778091430664.
|
648 |
+
[ Mon Sep 12 23:56:01 2022 ] Top1: 56.26%
|
649 |
+
[ Mon Sep 12 23:56:01 2022 ] Top5: 85.23%
|
650 |
+
[ Mon Sep 12 23:56:01 2022 ] Training epoch: 88
|
651 |
+
[ Mon Sep 12 23:56:35 2022 ] Batch(58/243) done. Loss: 0.0394 lr:0.001000
|
652 |
+
[ Mon Sep 12 23:57:28 2022 ] Batch(158/243) done. Loss: 0.0608 lr:0.001000
|
653 |
+
[ Mon Sep 12 23:58:12 2022 ] Eval epoch: 88
|
654 |
+
[ Tue Sep 13 00:00:41 2022 ] Mean test loss of 796 batches: 2.6194639205932617.
|
655 |
+
[ Tue Sep 13 00:00:41 2022 ] Top1: 56.45%
|
656 |
+
[ Tue Sep 13 00:00:41 2022 ] Top5: 85.51%
|
657 |
+
[ Tue Sep 13 00:00:42 2022 ] Training epoch: 89
|
658 |
+
[ Tue Sep 13 00:00:53 2022 ] Batch(15/243) done. Loss: 0.0463 lr:0.001000
|
659 |
+
[ Tue Sep 13 00:01:46 2022 ] Batch(115/243) done. Loss: 0.0355 lr:0.001000
|
660 |
+
[ Tue Sep 13 00:02:39 2022 ] Batch(215/243) done. Loss: 0.0294 lr:0.001000
|
661 |
+
[ Tue Sep 13 00:02:53 2022 ] Eval epoch: 89
|
662 |
+
[ Tue Sep 13 00:05:23 2022 ] Mean test loss of 796 batches: 2.6501572132110596.
|
663 |
+
[ Tue Sep 13 00:05:24 2022 ] Top1: 56.37%
|
664 |
+
[ Tue Sep 13 00:05:24 2022 ] Top5: 85.40%
|
665 |
+
[ Tue Sep 13 00:05:24 2022 ] Training epoch: 90
|
666 |
+
[ Tue Sep 13 00:06:05 2022 ] Batch(72/243) done. Loss: 0.0114 lr:0.001000
|
667 |
+
[ Tue Sep 13 00:06:58 2022 ] Batch(172/243) done. Loss: 0.1095 lr:0.001000
|
668 |
+
[ Tue Sep 13 00:07:35 2022 ] Eval epoch: 90
|
669 |
+
[ Tue Sep 13 00:10:03 2022 ] Mean test loss of 796 batches: 2.677971363067627.
|
670 |
+
[ Tue Sep 13 00:10:04 2022 ] Top1: 56.01%
|
671 |
+
[ Tue Sep 13 00:10:04 2022 ] Top5: 85.13%
|
672 |
+
[ Tue Sep 13 00:10:04 2022 ] Training epoch: 91
|
673 |
+
[ Tue Sep 13 00:10:23 2022 ] Batch(29/243) done. Loss: 0.0262 lr:0.001000
|
674 |
+
[ Tue Sep 13 00:11:15 2022 ] Batch(129/243) done. Loss: 0.0242 lr:0.001000
|
675 |
+
[ Tue Sep 13 00:12:08 2022 ] Batch(229/243) done. Loss: 0.0689 lr:0.001000
|
676 |
+
[ Tue Sep 13 00:12:15 2022 ] Eval epoch: 91
|
677 |
+
[ Tue Sep 13 00:14:44 2022 ] Mean test loss of 796 batches: 2.676117420196533.
|
678 |
+
[ Tue Sep 13 00:14:44 2022 ] Top1: 56.30%
|
679 |
+
[ Tue Sep 13 00:14:44 2022 ] Top5: 85.33%
|
680 |
+
[ Tue Sep 13 00:14:45 2022 ] Training epoch: 92
|
681 |
+
[ Tue Sep 13 00:15:33 2022 ] Batch(86/243) done. Loss: 0.0536 lr:0.001000
|
682 |
+
[ Tue Sep 13 00:16:26 2022 ] Batch(186/243) done. Loss: 0.0407 lr:0.001000
|
683 |
+
[ Tue Sep 13 00:16:56 2022 ] Eval epoch: 92
|
684 |
+
[ Tue Sep 13 00:19:24 2022 ] Mean test loss of 796 batches: 2.6445698738098145.
|
685 |
+
[ Tue Sep 13 00:19:25 2022 ] Top1: 56.10%
|
686 |
+
[ Tue Sep 13 00:19:25 2022 ] Top5: 85.17%
|
687 |
+
[ Tue Sep 13 00:19:25 2022 ] Training epoch: 93
|
688 |
+
[ Tue Sep 13 00:19:51 2022 ] Batch(43/243) done. Loss: 0.0268 lr:0.001000
|
689 |
+
[ Tue Sep 13 00:20:44 2022 ] Batch(143/243) done. Loss: 0.0451 lr:0.001000
|
690 |
+
[ Tue Sep 13 00:21:36 2022 ] Eval epoch: 93
|
691 |
+
[ Tue Sep 13 00:24:04 2022 ] Mean test loss of 796 batches: 2.6552574634552.
|
692 |
+
[ Tue Sep 13 00:24:05 2022 ] Top1: 56.38%
|
693 |
+
[ Tue Sep 13 00:24:05 2022 ] Top5: 85.42%
|
694 |
+
[ Tue Sep 13 00:24:05 2022 ] Training epoch: 94
|
695 |
+
[ Tue Sep 13 00:24:08 2022 ] Batch(0/243) done. Loss: 0.1094 lr:0.001000
|
696 |
+
[ Tue Sep 13 00:25:01 2022 ] Batch(100/243) done. Loss: 0.0419 lr:0.001000
|
697 |
+
[ Tue Sep 13 00:25:53 2022 ] Batch(200/243) done. Loss: 0.0359 lr:0.001000
|
698 |
+
[ Tue Sep 13 00:26:16 2022 ] Eval epoch: 94
|
699 |
+
[ Tue Sep 13 00:28:44 2022 ] Mean test loss of 796 batches: 2.687640428543091.
|
700 |
+
[ Tue Sep 13 00:28:44 2022 ] Top1: 56.05%
|
701 |
+
[ Tue Sep 13 00:28:44 2022 ] Top5: 85.21%
|
702 |
+
[ Tue Sep 13 00:28:45 2022 ] Training epoch: 95
|
703 |
+
[ Tue Sep 13 00:29:18 2022 ] Batch(57/243) done. Loss: 0.1638 lr:0.001000
|
704 |
+
[ Tue Sep 13 00:30:11 2022 ] Batch(157/243) done. Loss: 0.0316 lr:0.001000
|
705 |
+
[ Tue Sep 13 00:30:55 2022 ] Eval epoch: 95
|
706 |
+
[ Tue Sep 13 00:33:24 2022 ] Mean test loss of 796 batches: 2.674198865890503.
|
707 |
+
[ Tue Sep 13 00:33:25 2022 ] Top1: 56.44%
|
708 |
+
[ Tue Sep 13 00:33:25 2022 ] Top5: 85.35%
|
709 |
+
[ Tue Sep 13 00:33:25 2022 ] Training epoch: 96
|
710 |
+
[ Tue Sep 13 00:33:36 2022 ] Batch(14/243) done. Loss: 0.0519 lr:0.001000
|
711 |
+
[ Tue Sep 13 00:34:29 2022 ] Batch(114/243) done. Loss: 0.0252 lr:0.001000
|
712 |
+
[ Tue Sep 13 00:35:21 2022 ] Batch(214/243) done. Loss: 0.0330 lr:0.001000
|
713 |
+
[ Tue Sep 13 00:35:36 2022 ] Eval epoch: 96
|
714 |
+
[ Tue Sep 13 00:38:05 2022 ] Mean test loss of 796 batches: 2.697723865509033.
|
715 |
+
[ Tue Sep 13 00:38:05 2022 ] Top1: 55.86%
|
716 |
+
[ Tue Sep 13 00:38:05 2022 ] Top5: 85.19%
|
717 |
+
[ Tue Sep 13 00:38:06 2022 ] Training epoch: 97
|
718 |
+
[ Tue Sep 13 00:38:46 2022 ] Batch(71/243) done. Loss: 0.0756 lr:0.001000
|
719 |
+
[ Tue Sep 13 00:39:39 2022 ] Batch(171/243) done. Loss: 0.0225 lr:0.001000
|
720 |
+
[ Tue Sep 13 00:40:16 2022 ] Eval epoch: 97
|
721 |
+
[ Tue Sep 13 00:42:45 2022 ] Mean test loss of 796 batches: 2.7128243446350098.
|
722 |
+
[ Tue Sep 13 00:42:45 2022 ] Top1: 55.78%
|
723 |
+
[ Tue Sep 13 00:42:45 2022 ] Top5: 85.03%
|
724 |
+
[ Tue Sep 13 00:42:46 2022 ] Training epoch: 98
|
725 |
+
[ Tue Sep 13 00:43:04 2022 ] Batch(28/243) done. Loss: 0.0676 lr:0.001000
|
726 |
+
[ Tue Sep 13 00:43:57 2022 ] Batch(128/243) done. Loss: 0.0412 lr:0.001000
|
727 |
+
[ Tue Sep 13 00:44:49 2022 ] Batch(228/243) done. Loss: 0.0327 lr:0.001000
|
728 |
+
[ Tue Sep 13 00:44:57 2022 ] Eval epoch: 98
|
729 |
+
[ Tue Sep 13 00:47:25 2022 ] Mean test loss of 796 batches: 2.72189998626709.
|
730 |
+
[ Tue Sep 13 00:47:26 2022 ] Top1: 55.88%
|
731 |
+
[ Tue Sep 13 00:47:26 2022 ] Top5: 84.96%
|
732 |
+
[ Tue Sep 13 00:47:26 2022 ] Training epoch: 99
|
733 |
+
[ Tue Sep 13 00:48:15 2022 ] Batch(85/243) done. Loss: 0.0767 lr:0.001000
|
734 |
+
[ Tue Sep 13 00:49:07 2022 ] Batch(185/243) done. Loss: 0.1553 lr:0.001000
|
735 |
+
[ Tue Sep 13 00:49:37 2022 ] Eval epoch: 99
|
736 |
+
[ Tue Sep 13 00:52:06 2022 ] Mean test loss of 796 batches: 2.7023370265960693.
|
737 |
+
[ Tue Sep 13 00:52:06 2022 ] Top1: 56.12%
|
738 |
+
[ Tue Sep 13 00:52:07 2022 ] Top5: 84.94%
|
739 |
+
[ Tue Sep 13 00:52:07 2022 ] Training epoch: 100
|
740 |
+
[ Tue Sep 13 00:52:33 2022 ] Batch(42/243) done. Loss: 0.0636 lr:0.001000
|
741 |
+
[ Tue Sep 13 00:53:25 2022 ] Batch(142/243) done. Loss: 0.0703 lr:0.001000
|
742 |
+
[ Tue Sep 13 00:54:18 2022 ] Batch(242/243) done. Loss: 0.0472 lr:0.001000
|
743 |
+
[ Tue Sep 13 00:54:19 2022 ] Eval epoch: 100
|
744 |
+
[ Tue Sep 13 00:56:47 2022 ] Mean test loss of 796 batches: 2.7367427349090576.
|
745 |
+
[ Tue Sep 13 00:56:48 2022 ] Top1: 56.10%
|
746 |
+
[ Tue Sep 13 00:56:48 2022 ] Top5: 85.17%
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu_bone_motion_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/nturgbd-cross-subject/train_bone_motion.yaml
|
5 |
+
device:
|
6 |
+
- 6
|
7 |
+
- 7
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 60
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu_bone_motion_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu_bone_motion_xsub
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3791fa834c3f09decb16e0f7ec4152d246b53aaa6cb3ba55b7804744365641bd
|
3 |
+
size 4979902
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_motion_xsub/log.txt
ADDED
@@ -0,0 +1,626 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Tue Sep 13 10:03:55 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu_bone_motion_xsub', 'model_saved_name': './save_models/ntu_bone_motion_xsub', 'Experiment_name': 'ntu_bone_motion_xsub', 'config': './config/nturgbd-cross-subject/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Tue Sep 13 10:03:55 2022 ] Training epoch: 1
|
5 |
+
[ Tue Sep 13 10:04:44 2022 ] Batch(99/123) done. Loss: 2.9891 lr:0.100000
|
6 |
+
[ Tue Sep 13 10:04:55 2022 ] Eval epoch: 1
|
7 |
+
[ Tue Sep 13 10:05:45 2022 ] Mean test loss of 258 batches: 5.3130412101745605.
|
8 |
+
[ Tue Sep 13 10:05:45 2022 ] Top1: 4.62%
|
9 |
+
[ Tue Sep 13 10:05:45 2022 ] Top5: 17.12%
|
10 |
+
[ Tue Sep 13 10:05:46 2022 ] Training epoch: 2
|
11 |
+
[ Tue Sep 13 10:06:29 2022 ] Batch(76/123) done. Loss: 2.5252 lr:0.100000
|
12 |
+
[ Tue Sep 13 10:06:54 2022 ] Eval epoch: 2
|
13 |
+
[ Tue Sep 13 10:07:45 2022 ] Mean test loss of 258 batches: 4.381324291229248.
|
14 |
+
[ Tue Sep 13 10:07:45 2022 ] Top1: 8.96%
|
15 |
+
[ Tue Sep 13 10:07:45 2022 ] Top5: 27.62%
|
16 |
+
[ Tue Sep 13 10:07:45 2022 ] Training epoch: 3
|
17 |
+
[ Tue Sep 13 10:08:16 2022 ] Batch(53/123) done. Loss: 2.8846 lr:0.100000
|
18 |
+
[ Tue Sep 13 10:08:54 2022 ] Eval epoch: 3
|
19 |
+
[ Tue Sep 13 10:09:44 2022 ] Mean test loss of 258 batches: 4.099518299102783.
|
20 |
+
[ Tue Sep 13 10:09:44 2022 ] Top1: 13.61%
|
21 |
+
[ Tue Sep 13 10:09:44 2022 ] Top5: 42.49%
|
22 |
+
[ Tue Sep 13 10:09:44 2022 ] Training epoch: 4
|
23 |
+
[ Tue Sep 13 10:10:03 2022 ] Batch(30/123) done. Loss: 1.9974 lr:0.100000
|
24 |
+
[ Tue Sep 13 10:10:53 2022 ] Eval epoch: 4
|
25 |
+
[ Tue Sep 13 10:11:43 2022 ] Mean test loss of 258 batches: 3.317542791366577.
|
26 |
+
[ Tue Sep 13 10:11:43 2022 ] Top1: 17.44%
|
27 |
+
[ Tue Sep 13 10:11:43 2022 ] Top5: 48.21%
|
28 |
+
[ Tue Sep 13 10:11:43 2022 ] Training epoch: 5
|
29 |
+
[ Tue Sep 13 10:11:50 2022 ] Batch(7/123) done. Loss: 1.7207 lr:0.100000
|
30 |
+
[ Tue Sep 13 10:12:44 2022 ] Batch(107/123) done. Loss: 1.3655 lr:0.100000
|
31 |
+
[ Tue Sep 13 10:12:52 2022 ] Eval epoch: 5
|
32 |
+
[ Tue Sep 13 10:13:42 2022 ] Mean test loss of 258 batches: 3.480855703353882.
|
33 |
+
[ Tue Sep 13 10:13:42 2022 ] Top1: 22.24%
|
34 |
+
[ Tue Sep 13 10:13:42 2022 ] Top5: 59.80%
|
35 |
+
[ Tue Sep 13 10:13:42 2022 ] Training epoch: 6
|
36 |
+
[ Tue Sep 13 10:14:30 2022 ] Batch(84/123) done. Loss: 1.3335 lr:0.100000
|
37 |
+
[ Tue Sep 13 10:14:50 2022 ] Eval epoch: 6
|
38 |
+
[ Tue Sep 13 10:15:41 2022 ] Mean test loss of 258 batches: 6.322797775268555.
|
39 |
+
[ Tue Sep 13 10:15:41 2022 ] Top1: 12.08%
|
40 |
+
[ Tue Sep 13 10:15:41 2022 ] Top5: 34.04%
|
41 |
+
[ Tue Sep 13 10:15:41 2022 ] Training epoch: 7
|
42 |
+
[ Tue Sep 13 10:16:16 2022 ] Batch(61/123) done. Loss: 1.4482 lr:0.100000
|
43 |
+
[ Tue Sep 13 10:16:49 2022 ] Eval epoch: 7
|
44 |
+
[ Tue Sep 13 10:17:40 2022 ] Mean test loss of 258 batches: 3.362936496734619.
|
45 |
+
[ Tue Sep 13 10:17:40 2022 ] Top1: 24.81%
|
46 |
+
[ Tue Sep 13 10:17:40 2022 ] Top5: 59.66%
|
47 |
+
[ Tue Sep 13 10:17:40 2022 ] Training epoch: 8
|
48 |
+
[ Tue Sep 13 10:18:03 2022 ] Batch(38/123) done. Loss: 1.2326 lr:0.100000
|
49 |
+
[ Tue Sep 13 10:18:48 2022 ] Eval epoch: 8
|
50 |
+
[ Tue Sep 13 10:19:38 2022 ] Mean test loss of 258 batches: 2.9007482528686523.
|
51 |
+
[ Tue Sep 13 10:19:38 2022 ] Top1: 30.63%
|
52 |
+
[ Tue Sep 13 10:19:39 2022 ] Top5: 68.46%
|
53 |
+
[ Tue Sep 13 10:19:39 2022 ] Training epoch: 9
|
54 |
+
[ Tue Sep 13 10:19:50 2022 ] Batch(15/123) done. Loss: 1.0663 lr:0.100000
|
55 |
+
[ Tue Sep 13 10:20:43 2022 ] Batch(115/123) done. Loss: 0.8887 lr:0.100000
|
56 |
+
[ Tue Sep 13 10:20:47 2022 ] Eval epoch: 9
|
57 |
+
[ Tue Sep 13 10:21:38 2022 ] Mean test loss of 258 batches: 3.1942429542541504.
|
58 |
+
[ Tue Sep 13 10:21:38 2022 ] Top1: 28.76%
|
59 |
+
[ Tue Sep 13 10:21:38 2022 ] Top5: 64.62%
|
60 |
+
[ Tue Sep 13 10:21:38 2022 ] Training epoch: 10
|
61 |
+
[ Tue Sep 13 10:22:30 2022 ] Batch(92/123) done. Loss: 0.9215 lr:0.100000
|
62 |
+
[ Tue Sep 13 10:22:46 2022 ] Eval epoch: 10
|
63 |
+
[ Tue Sep 13 10:23:36 2022 ] Mean test loss of 258 batches: 3.464066982269287.
|
64 |
+
[ Tue Sep 13 10:23:36 2022 ] Top1: 24.20%
|
65 |
+
[ Tue Sep 13 10:23:37 2022 ] Top5: 57.41%
|
66 |
+
[ Tue Sep 13 10:23:37 2022 ] Training epoch: 11
|
67 |
+
[ Tue Sep 13 10:24:17 2022 ] Batch(69/123) done. Loss: 1.1655 lr:0.100000
|
68 |
+
[ Tue Sep 13 10:24:45 2022 ] Eval epoch: 11
|
69 |
+
[ Tue Sep 13 10:25:35 2022 ] Mean test loss of 258 batches: 4.239729881286621.
|
70 |
+
[ Tue Sep 13 10:25:35 2022 ] Top1: 26.44%
|
71 |
+
[ Tue Sep 13 10:25:36 2022 ] Top5: 61.07%
|
72 |
+
[ Tue Sep 13 10:25:36 2022 ] Training epoch: 12
|
73 |
+
[ Tue Sep 13 10:26:03 2022 ] Batch(46/123) done. Loss: 0.8782 lr:0.100000
|
74 |
+
[ Tue Sep 13 10:26:44 2022 ] Eval epoch: 12
|
75 |
+
[ Tue Sep 13 10:27:34 2022 ] Mean test loss of 258 batches: 2.6224913597106934.
|
76 |
+
[ Tue Sep 13 10:27:34 2022 ] Top1: 40.12%
|
77 |
+
[ Tue Sep 13 10:27:35 2022 ] Top5: 77.62%
|
78 |
+
[ Tue Sep 13 10:27:35 2022 ] Training epoch: 13
|
79 |
+
[ Tue Sep 13 10:27:50 2022 ] Batch(23/123) done. Loss: 1.0548 lr:0.100000
|
80 |
+
[ Tue Sep 13 10:28:43 2022 ] Eval epoch: 13
|
81 |
+
[ Tue Sep 13 10:29:33 2022 ] Mean test loss of 258 batches: 2.6538028717041016.
|
82 |
+
[ Tue Sep 13 10:29:33 2022 ] Top1: 34.23%
|
83 |
+
[ Tue Sep 13 10:29:34 2022 ] Top5: 70.93%
|
84 |
+
[ Tue Sep 13 10:29:34 2022 ] Training epoch: 14
|
85 |
+
[ Tue Sep 13 10:29:37 2022 ] Batch(0/123) done. Loss: 0.8820 lr:0.100000
|
86 |
+
[ Tue Sep 13 10:30:30 2022 ] Batch(100/123) done. Loss: 1.2009 lr:0.100000
|
87 |
+
[ Tue Sep 13 10:30:42 2022 ] Eval epoch: 14
|
88 |
+
[ Tue Sep 13 10:31:32 2022 ] Mean test loss of 258 batches: 3.0920660495758057.
|
89 |
+
[ Tue Sep 13 10:31:32 2022 ] Top1: 30.11%
|
90 |
+
[ Tue Sep 13 10:31:32 2022 ] Top5: 65.30%
|
91 |
+
[ Tue Sep 13 10:31:33 2022 ] Training epoch: 15
|
92 |
+
[ Tue Sep 13 10:32:17 2022 ] Batch(77/123) done. Loss: 0.8715 lr:0.100000
|
93 |
+
[ Tue Sep 13 10:32:41 2022 ] Eval epoch: 15
|
94 |
+
[ Tue Sep 13 10:33:31 2022 ] Mean test loss of 258 batches: 2.1878817081451416.
|
95 |
+
[ Tue Sep 13 10:33:31 2022 ] Top1: 40.13%
|
96 |
+
[ Tue Sep 13 10:33:31 2022 ] Top5: 78.24%
|
97 |
+
[ Tue Sep 13 10:33:31 2022 ] Training epoch: 16
|
98 |
+
[ Tue Sep 13 10:34:03 2022 ] Batch(54/123) done. Loss: 0.6270 lr:0.100000
|
99 |
+
[ Tue Sep 13 10:34:40 2022 ] Eval epoch: 16
|
100 |
+
[ Tue Sep 13 10:35:30 2022 ] Mean test loss of 258 batches: 2.6736347675323486.
|
101 |
+
[ Tue Sep 13 10:35:30 2022 ] Top1: 37.96%
|
102 |
+
[ Tue Sep 13 10:35:30 2022 ] Top5: 75.54%
|
103 |
+
[ Tue Sep 13 10:35:30 2022 ] Training epoch: 17
|
104 |
+
[ Tue Sep 13 10:35:50 2022 ] Batch(31/123) done. Loss: 0.6887 lr:0.100000
|
105 |
+
[ Tue Sep 13 10:36:38 2022 ] Eval epoch: 17
|
106 |
+
[ Tue Sep 13 10:37:29 2022 ] Mean test loss of 258 batches: 3.669713258743286.
|
107 |
+
[ Tue Sep 13 10:37:29 2022 ] Top1: 37.02%
|
108 |
+
[ Tue Sep 13 10:37:29 2022 ] Top5: 76.65%
|
109 |
+
[ Tue Sep 13 10:37:29 2022 ] Training epoch: 18
|
110 |
+
[ Tue Sep 13 10:37:36 2022 ] Batch(8/123) done. Loss: 0.8408 lr:0.100000
|
111 |
+
[ Tue Sep 13 10:38:29 2022 ] Batch(108/123) done. Loss: 0.8188 lr:0.100000
|
112 |
+
[ Tue Sep 13 10:38:37 2022 ] Eval epoch: 18
|
113 |
+
[ Tue Sep 13 10:39:28 2022 ] Mean test loss of 258 batches: 3.194699764251709.
|
114 |
+
[ Tue Sep 13 10:39:28 2022 ] Top1: 42.10%
|
115 |
+
[ Tue Sep 13 10:39:28 2022 ] Top5: 79.03%
|
116 |
+
[ Tue Sep 13 10:39:28 2022 ] Training epoch: 19
|
117 |
+
[ Tue Sep 13 10:40:16 2022 ] Batch(85/123) done. Loss: 0.8010 lr:0.100000
|
118 |
+
[ Tue Sep 13 10:40:36 2022 ] Eval epoch: 19
|
119 |
+
[ Tue Sep 13 10:41:27 2022 ] Mean test loss of 258 batches: 2.3766753673553467.
|
120 |
+
[ Tue Sep 13 10:41:27 2022 ] Top1: 42.75%
|
121 |
+
[ Tue Sep 13 10:41:27 2022 ] Top5: 80.23%
|
122 |
+
[ Tue Sep 13 10:41:27 2022 ] Training epoch: 20
|
123 |
+
[ Tue Sep 13 10:42:03 2022 ] Batch(62/123) done. Loss: 1.0108 lr:0.100000
|
124 |
+
[ Tue Sep 13 10:42:35 2022 ] Eval epoch: 20
|
125 |
+
[ Tue Sep 13 10:43:25 2022 ] Mean test loss of 258 batches: 3.601966142654419.
|
126 |
+
[ Tue Sep 13 10:43:25 2022 ] Top1: 39.66%
|
127 |
+
[ Tue Sep 13 10:43:25 2022 ] Top5: 77.24%
|
128 |
+
[ Tue Sep 13 10:43:26 2022 ] Training epoch: 21
|
129 |
+
[ Tue Sep 13 10:43:49 2022 ] Batch(39/123) done. Loss: 0.4037 lr:0.100000
|
130 |
+
[ Tue Sep 13 10:44:34 2022 ] Eval epoch: 21
|
131 |
+
[ Tue Sep 13 10:45:24 2022 ] Mean test loss of 258 batches: 2.5317587852478027.
|
132 |
+
[ Tue Sep 13 10:45:24 2022 ] Top1: 40.53%
|
133 |
+
[ Tue Sep 13 10:45:24 2022 ] Top5: 82.08%
|
134 |
+
[ Tue Sep 13 10:45:24 2022 ] Training epoch: 22
|
135 |
+
[ Tue Sep 13 10:45:36 2022 ] Batch(16/123) done. Loss: 0.6688 lr:0.100000
|
136 |
+
[ Tue Sep 13 10:46:29 2022 ] Batch(116/123) done. Loss: 0.6076 lr:0.100000
|
137 |
+
[ Tue Sep 13 10:46:33 2022 ] Eval epoch: 22
|
138 |
+
[ Tue Sep 13 10:47:23 2022 ] Mean test loss of 258 batches: 2.8892345428466797.
|
139 |
+
[ Tue Sep 13 10:47:23 2022 ] Top1: 39.92%
|
140 |
+
[ Tue Sep 13 10:47:23 2022 ] Top5: 76.73%
|
141 |
+
[ Tue Sep 13 10:47:23 2022 ] Training epoch: 23
|
142 |
+
[ Tue Sep 13 10:48:15 2022 ] Batch(93/123) done. Loss: 0.5551 lr:0.100000
|
143 |
+
[ Tue Sep 13 10:48:31 2022 ] Eval epoch: 23
|
144 |
+
[ Tue Sep 13 10:49:21 2022 ] Mean test loss of 258 batches: 3.017786741256714.
|
145 |
+
[ Tue Sep 13 10:49:21 2022 ] Top1: 41.66%
|
146 |
+
[ Tue Sep 13 10:49:22 2022 ] Top5: 77.39%
|
147 |
+
[ Tue Sep 13 10:49:22 2022 ] Training epoch: 24
|
148 |
+
[ Tue Sep 13 10:50:02 2022 ] Batch(70/123) done. Loss: 0.5009 lr:0.100000
|
149 |
+
[ Tue Sep 13 10:50:30 2022 ] Eval epoch: 24
|
150 |
+
[ Tue Sep 13 10:51:20 2022 ] Mean test loss of 258 batches: 3.0127227306365967.
|
151 |
+
[ Tue Sep 13 10:51:20 2022 ] Top1: 37.76%
|
152 |
+
[ Tue Sep 13 10:51:20 2022 ] Top5: 76.32%
|
153 |
+
[ Tue Sep 13 10:51:20 2022 ] Training epoch: 25
|
154 |
+
[ Tue Sep 13 10:51:49 2022 ] Batch(47/123) done. Loss: 0.5799 lr:0.100000
|
155 |
+
[ Tue Sep 13 10:52:29 2022 ] Eval epoch: 25
|
156 |
+
[ Tue Sep 13 10:53:19 2022 ] Mean test loss of 258 batches: 2.429211139678955.
|
157 |
+
[ Tue Sep 13 10:53:19 2022 ] Top1: 47.58%
|
158 |
+
[ Tue Sep 13 10:53:19 2022 ] Top5: 82.33%
|
159 |
+
[ Tue Sep 13 10:53:19 2022 ] Training epoch: 26
|
160 |
+
[ Tue Sep 13 10:53:35 2022 ] Batch(24/123) done. Loss: 0.3484 lr:0.100000
|
161 |
+
[ Tue Sep 13 10:54:28 2022 ] Eval epoch: 26
|
162 |
+
[ Tue Sep 13 10:55:18 2022 ] Mean test loss of 258 batches: 2.745936393737793.
|
163 |
+
[ Tue Sep 13 10:55:18 2022 ] Top1: 44.03%
|
164 |
+
[ Tue Sep 13 10:55:18 2022 ] Top5: 80.53%
|
165 |
+
[ Tue Sep 13 10:55:18 2022 ] Training epoch: 27
|
166 |
+
[ Tue Sep 13 10:55:22 2022 ] Batch(1/123) done. Loss: 0.5052 lr:0.100000
|
167 |
+
[ Tue Sep 13 10:56:15 2022 ] Batch(101/123) done. Loss: 0.4921 lr:0.100000
|
168 |
+
[ Tue Sep 13 10:56:27 2022 ] Eval epoch: 27
|
169 |
+
[ Tue Sep 13 10:57:17 2022 ] Mean test loss of 258 batches: 2.7233641147613525.
|
170 |
+
[ Tue Sep 13 10:57:17 2022 ] Top1: 43.19%
|
171 |
+
[ Tue Sep 13 10:57:17 2022 ] Top5: 82.20%
|
172 |
+
[ Tue Sep 13 10:57:17 2022 ] Training epoch: 28
|
173 |
+
[ Tue Sep 13 10:58:02 2022 ] Batch(78/123) done. Loss: 0.5670 lr:0.100000
|
174 |
+
[ Tue Sep 13 10:58:25 2022 ] Eval epoch: 28
|
175 |
+
[ Tue Sep 13 10:59:16 2022 ] Mean test loss of 258 batches: 39.702354431152344.
|
176 |
+
[ Tue Sep 13 10:59:16 2022 ] Top1: 3.87%
|
177 |
+
[ Tue Sep 13 10:59:16 2022 ] Top5: 15.35%
|
178 |
+
[ Tue Sep 13 10:59:16 2022 ] Training epoch: 29
|
179 |
+
[ Tue Sep 13 10:59:48 2022 ] Batch(55/123) done. Loss: 0.5970 lr:0.100000
|
180 |
+
[ Tue Sep 13 11:00:24 2022 ] Eval epoch: 29
|
181 |
+
[ Tue Sep 13 11:01:14 2022 ] Mean test loss of 258 batches: 4.118558883666992.
|
182 |
+
[ Tue Sep 13 11:01:15 2022 ] Top1: 34.68%
|
183 |
+
[ Tue Sep 13 11:01:15 2022 ] Top5: 68.70%
|
184 |
+
[ Tue Sep 13 11:01:15 2022 ] Training epoch: 30
|
185 |
+
[ Tue Sep 13 11:01:35 2022 ] Batch(32/123) done. Loss: 0.3338 lr:0.100000
|
186 |
+
[ Tue Sep 13 11:02:23 2022 ] Eval epoch: 30
|
187 |
+
[ Tue Sep 13 11:03:13 2022 ] Mean test loss of 258 batches: 3.561509847640991.
|
188 |
+
[ Tue Sep 13 11:03:13 2022 ] Top1: 42.97%
|
189 |
+
[ Tue Sep 13 11:03:13 2022 ] Top5: 80.15%
|
190 |
+
[ Tue Sep 13 11:03:14 2022 ] Training epoch: 31
|
191 |
+
[ Tue Sep 13 11:03:21 2022 ] Batch(9/123) done. Loss: 0.2743 lr:0.100000
|
192 |
+
[ Tue Sep 13 11:04:15 2022 ] Batch(109/123) done. Loss: 0.3693 lr:0.100000
|
193 |
+
[ Tue Sep 13 11:04:22 2022 ] Eval epoch: 31
|
194 |
+
[ Tue Sep 13 11:05:12 2022 ] Mean test loss of 258 batches: 2.743839740753174.
|
195 |
+
[ Tue Sep 13 11:05:12 2022 ] Top1: 40.47%
|
196 |
+
[ Tue Sep 13 11:05:12 2022 ] Top5: 80.02%
|
197 |
+
[ Tue Sep 13 11:05:12 2022 ] Training epoch: 32
|
198 |
+
[ Tue Sep 13 11:06:01 2022 ] Batch(86/123) done. Loss: 0.3306 lr:0.100000
|
199 |
+
[ Tue Sep 13 11:06:21 2022 ] Eval epoch: 32
|
200 |
+
[ Tue Sep 13 11:07:11 2022 ] Mean test loss of 258 batches: 6.932523727416992.
|
201 |
+
[ Tue Sep 13 11:07:11 2022 ] Top1: 25.40%
|
202 |
+
[ Tue Sep 13 11:07:11 2022 ] Top5: 58.93%
|
203 |
+
[ Tue Sep 13 11:07:11 2022 ] Training epoch: 33
|
204 |
+
[ Tue Sep 13 11:07:48 2022 ] Batch(63/123) done. Loss: 0.2861 lr:0.100000
|
205 |
+
[ Tue Sep 13 11:08:20 2022 ] Eval epoch: 33
|
206 |
+
[ Tue Sep 13 11:09:10 2022 ] Mean test loss of 258 batches: 2.9469125270843506.
|
207 |
+
[ Tue Sep 13 11:09:10 2022 ] Top1: 47.30%
|
208 |
+
[ Tue Sep 13 11:09:11 2022 ] Top5: 84.76%
|
209 |
+
[ Tue Sep 13 11:09:11 2022 ] Training epoch: 34
|
210 |
+
[ Tue Sep 13 11:09:35 2022 ] Batch(40/123) done. Loss: 0.3769 lr:0.100000
|
211 |
+
[ Tue Sep 13 11:10:19 2022 ] Eval epoch: 34
|
212 |
+
[ Tue Sep 13 11:11:09 2022 ] Mean test loss of 258 batches: 2.7189626693725586.
|
213 |
+
[ Tue Sep 13 11:11:09 2022 ] Top1: 45.64%
|
214 |
+
[ Tue Sep 13 11:11:09 2022 ] Top5: 81.31%
|
215 |
+
[ Tue Sep 13 11:11:09 2022 ] Training epoch: 35
|
216 |
+
[ Tue Sep 13 11:11:22 2022 ] Batch(17/123) done. Loss: 0.7309 lr:0.100000
|
217 |
+
[ Tue Sep 13 11:12:15 2022 ] Batch(117/123) done. Loss: 0.3021 lr:0.100000
|
218 |
+
[ Tue Sep 13 11:12:18 2022 ] Eval epoch: 35
|
219 |
+
[ Tue Sep 13 11:13:08 2022 ] Mean test loss of 258 batches: 4.895280838012695.
|
220 |
+
[ Tue Sep 13 11:13:08 2022 ] Top1: 29.75%
|
221 |
+
[ Tue Sep 13 11:13:08 2022 ] Top5: 58.14%
|
222 |
+
[ Tue Sep 13 11:13:08 2022 ] Training epoch: 36
|
223 |
+
[ Tue Sep 13 11:14:01 2022 ] Batch(94/123) done. Loss: 0.3497 lr:0.100000
|
224 |
+
[ Tue Sep 13 11:14:16 2022 ] Eval epoch: 36
|
225 |
+
[ Tue Sep 13 11:15:06 2022 ] Mean test loss of 258 batches: 3.470442056655884.
|
226 |
+
[ Tue Sep 13 11:15:06 2022 ] Top1: 37.30%
|
227 |
+
[ Tue Sep 13 11:15:06 2022 ] Top5: 76.48%
|
228 |
+
[ Tue Sep 13 11:15:07 2022 ] Training epoch: 37
|
229 |
+
[ Tue Sep 13 11:15:47 2022 ] Batch(71/123) done. Loss: 0.2233 lr:0.100000
|
230 |
+
[ Tue Sep 13 11:16:15 2022 ] Eval epoch: 37
|
231 |
+
[ Tue Sep 13 11:17:05 2022 ] Mean test loss of 258 batches: 8.42135238647461.
|
232 |
+
[ Tue Sep 13 11:17:05 2022 ] Top1: 20.71%
|
233 |
+
[ Tue Sep 13 11:17:05 2022 ] Top5: 52.13%
|
234 |
+
[ Tue Sep 13 11:17:05 2022 ] Training epoch: 38
|
235 |
+
[ Tue Sep 13 11:17:34 2022 ] Batch(48/123) done. Loss: 0.3567 lr:0.100000
|
236 |
+
[ Tue Sep 13 11:18:13 2022 ] Eval epoch: 38
|
237 |
+
[ Tue Sep 13 11:19:04 2022 ] Mean test loss of 258 batches: 6.156604766845703.
|
238 |
+
[ Tue Sep 13 11:19:04 2022 ] Top1: 32.16%
|
239 |
+
[ Tue Sep 13 11:19:04 2022 ] Top5: 61.78%
|
240 |
+
[ Tue Sep 13 11:19:04 2022 ] Training epoch: 39
|
241 |
+
[ Tue Sep 13 11:19:21 2022 ] Batch(25/123) done. Loss: 0.2740 lr:0.100000
|
242 |
+
[ Tue Sep 13 11:20:13 2022 ] Eval epoch: 39
|
243 |
+
[ Tue Sep 13 11:21:03 2022 ] Mean test loss of 258 batches: 3.301473379135132.
|
244 |
+
[ Tue Sep 13 11:21:03 2022 ] Top1: 44.66%
|
245 |
+
[ Tue Sep 13 11:21:04 2022 ] Top5: 79.78%
|
246 |
+
[ Tue Sep 13 11:21:04 2022 ] Training epoch: 40
|
247 |
+
[ Tue Sep 13 11:21:08 2022 ] Batch(2/123) done. Loss: 0.4649 lr:0.100000
|
248 |
+
[ Tue Sep 13 11:22:01 2022 ] Batch(102/123) done. Loss: 0.4007 lr:0.100000
|
249 |
+
[ Tue Sep 13 11:22:11 2022 ] Eval epoch: 40
|
250 |
+
[ Tue Sep 13 11:23:02 2022 ] Mean test loss of 258 batches: 4.057832717895508.
|
251 |
+
[ Tue Sep 13 11:23:02 2022 ] Top1: 42.69%
|
252 |
+
[ Tue Sep 13 11:23:02 2022 ] Top5: 79.61%
|
253 |
+
[ Tue Sep 13 11:23:02 2022 ] Training epoch: 41
|
254 |
+
[ Tue Sep 13 11:23:47 2022 ] Batch(79/123) done. Loss: 0.3378 lr:0.100000
|
255 |
+
[ Tue Sep 13 11:24:11 2022 ] Eval epoch: 41
|
256 |
+
[ Tue Sep 13 11:25:01 2022 ] Mean test loss of 258 batches: 4.960639476776123.
|
257 |
+
[ Tue Sep 13 11:25:01 2022 ] Top1: 34.41%
|
258 |
+
[ Tue Sep 13 11:25:01 2022 ] Top5: 74.06%
|
259 |
+
[ Tue Sep 13 11:25:01 2022 ] Training epoch: 42
|
260 |
+
[ Tue Sep 13 11:25:34 2022 ] Batch(56/123) done. Loss: 0.0757 lr:0.100000
|
261 |
+
[ Tue Sep 13 11:26:10 2022 ] Eval epoch: 42
|
262 |
+
[ Tue Sep 13 11:27:00 2022 ] Mean test loss of 258 batches: 6.401731491088867.
|
263 |
+
[ Tue Sep 13 11:27:00 2022 ] Top1: 36.89%
|
264 |
+
[ Tue Sep 13 11:27:00 2022 ] Top5: 75.11%
|
265 |
+
[ Tue Sep 13 11:27:01 2022 ] Training epoch: 43
|
266 |
+
[ Tue Sep 13 11:27:21 2022 ] Batch(33/123) done. Loss: 0.2113 lr:0.100000
|
267 |
+
[ Tue Sep 13 11:28:09 2022 ] Eval epoch: 43
|
268 |
+
[ Tue Sep 13 11:28:59 2022 ] Mean test loss of 258 batches: 2.578129768371582.
|
269 |
+
[ Tue Sep 13 11:28:59 2022 ] Top1: 49.45%
|
270 |
+
[ Tue Sep 13 11:28:59 2022 ] Top5: 84.18%
|
271 |
+
[ Tue Sep 13 11:28:59 2022 ] Training epoch: 44
|
272 |
+
[ Tue Sep 13 11:29:08 2022 ] Batch(10/123) done. Loss: 0.1640 lr:0.100000
|
273 |
+
[ Tue Sep 13 11:30:01 2022 ] Batch(110/123) done. Loss: 0.2625 lr:0.100000
|
274 |
+
[ Tue Sep 13 11:30:07 2022 ] Eval epoch: 44
|
275 |
+
[ Tue Sep 13 11:30:58 2022 ] Mean test loss of 258 batches: 3.3866872787475586.
|
276 |
+
[ Tue Sep 13 11:30:58 2022 ] Top1: 46.43%
|
277 |
+
[ Tue Sep 13 11:30:58 2022 ] Top5: 78.41%
|
278 |
+
[ Tue Sep 13 11:30:58 2022 ] Training epoch: 45
|
279 |
+
[ Tue Sep 13 11:31:47 2022 ] Batch(87/123) done. Loss: 0.2016 lr:0.100000
|
280 |
+
[ Tue Sep 13 11:32:06 2022 ] Eval epoch: 45
|
281 |
+
[ Tue Sep 13 11:32:56 2022 ] Mean test loss of 258 batches: 2.310946226119995.
|
282 |
+
[ Tue Sep 13 11:32:56 2022 ] Top1: 47.83%
|
283 |
+
[ Tue Sep 13 11:32:56 2022 ] Top5: 82.22%
|
284 |
+
[ Tue Sep 13 11:32:57 2022 ] Training epoch: 46
|
285 |
+
[ Tue Sep 13 11:33:34 2022 ] Batch(64/123) done. Loss: 0.1365 lr:0.100000
|
286 |
+
[ Tue Sep 13 11:34:05 2022 ] Eval epoch: 46
|
287 |
+
[ Tue Sep 13 11:34:55 2022 ] Mean test loss of 258 batches: 3.6921119689941406.
|
288 |
+
[ Tue Sep 13 11:34:55 2022 ] Top1: 45.54%
|
289 |
+
[ Tue Sep 13 11:34:55 2022 ] Top5: 78.01%
|
290 |
+
[ Tue Sep 13 11:34:55 2022 ] Training epoch: 47
|
291 |
+
[ Tue Sep 13 11:35:20 2022 ] Batch(41/123) done. Loss: 0.1992 lr:0.100000
|
292 |
+
[ Tue Sep 13 11:36:04 2022 ] Eval epoch: 47
|
293 |
+
[ Tue Sep 13 11:36:54 2022 ] Mean test loss of 258 batches: 3.4982993602752686.
|
294 |
+
[ Tue Sep 13 11:36:54 2022 ] Top1: 41.20%
|
295 |
+
[ Tue Sep 13 11:36:54 2022 ] Top5: 78.97%
|
296 |
+
[ Tue Sep 13 11:36:54 2022 ] Training epoch: 48
|
297 |
+
[ Tue Sep 13 11:37:07 2022 ] Batch(18/123) done. Loss: 0.1265 lr:0.100000
|
298 |
+
[ Tue Sep 13 11:38:01 2022 ] Batch(118/123) done. Loss: 0.3362 lr:0.100000
|
299 |
+
[ Tue Sep 13 11:38:03 2022 ] Eval epoch: 48
|
300 |
+
[ Tue Sep 13 11:38:53 2022 ] Mean test loss of 258 batches: 6.748397350311279.
|
301 |
+
[ Tue Sep 13 11:38:53 2022 ] Top1: 31.89%
|
302 |
+
[ Tue Sep 13 11:38:53 2022 ] Top5: 63.62%
|
303 |
+
[ Tue Sep 13 11:38:53 2022 ] Training epoch: 49
|
304 |
+
[ Tue Sep 13 11:39:47 2022 ] Batch(95/123) done. Loss: 0.3824 lr:0.100000
|
305 |
+
[ Tue Sep 13 11:40:01 2022 ] Eval epoch: 49
|
306 |
+
[ Tue Sep 13 11:40:52 2022 ] Mean test loss of 258 batches: 4.883387565612793.
|
307 |
+
[ Tue Sep 13 11:40:52 2022 ] Top1: 31.64%
|
308 |
+
[ Tue Sep 13 11:40:52 2022 ] Top5: 64.11%
|
309 |
+
[ Tue Sep 13 11:40:52 2022 ] Training epoch: 50
|
310 |
+
[ Tue Sep 13 11:41:34 2022 ] Batch(72/123) done. Loss: 0.0751 lr:0.100000
|
311 |
+
[ Tue Sep 13 11:42:00 2022 ] Eval epoch: 50
|
312 |
+
[ Tue Sep 13 11:42:50 2022 ] Mean test loss of 258 batches: 3.0714542865753174.
|
313 |
+
[ Tue Sep 13 11:42:51 2022 ] Top1: 49.65%
|
314 |
+
[ Tue Sep 13 11:42:51 2022 ] Top5: 82.91%
|
315 |
+
[ Tue Sep 13 11:42:51 2022 ] Training epoch: 51
|
316 |
+
[ Tue Sep 13 11:43:20 2022 ] Batch(49/123) done. Loss: 0.1486 lr:0.100000
|
317 |
+
[ Tue Sep 13 11:43:59 2022 ] Eval epoch: 51
|
318 |
+
[ Tue Sep 13 11:44:49 2022 ] Mean test loss of 258 batches: 2.816194772720337.
|
319 |
+
[ Tue Sep 13 11:44:49 2022 ] Top1: 53.70%
|
320 |
+
[ Tue Sep 13 11:44:49 2022 ] Top5: 86.68%
|
321 |
+
[ Tue Sep 13 11:44:49 2022 ] Training epoch: 52
|
322 |
+
[ Tue Sep 13 11:45:06 2022 ] Batch(26/123) done. Loss: 0.2205 lr:0.100000
|
323 |
+
[ Tue Sep 13 11:45:57 2022 ] Eval epoch: 52
|
324 |
+
[ Tue Sep 13 11:46:48 2022 ] Mean test loss of 258 batches: 3.8996641635894775.
|
325 |
+
[ Tue Sep 13 11:46:48 2022 ] Top1: 43.43%
|
326 |
+
[ Tue Sep 13 11:46:48 2022 ] Top5: 74.88%
|
327 |
+
[ Tue Sep 13 11:46:48 2022 ] Training epoch: 53
|
328 |
+
[ Tue Sep 13 11:46:53 2022 ] Batch(3/123) done. Loss: 0.1119 lr:0.100000
|
329 |
+
[ Tue Sep 13 11:47:46 2022 ] Batch(103/123) done. Loss: 0.0956 lr:0.100000
|
330 |
+
[ Tue Sep 13 11:47:56 2022 ] Eval epoch: 53
|
331 |
+
[ Tue Sep 13 11:48:46 2022 ] Mean test loss of 258 batches: 8.677329063415527.
|
332 |
+
[ Tue Sep 13 11:48:46 2022 ] Top1: 29.01%
|
333 |
+
[ Tue Sep 13 11:48:47 2022 ] Top5: 61.95%
|
334 |
+
[ Tue Sep 13 11:48:47 2022 ] Training epoch: 54
|
335 |
+
[ Tue Sep 13 11:49:33 2022 ] Batch(80/123) done. Loss: 0.1684 lr:0.100000
|
336 |
+
[ Tue Sep 13 11:49:55 2022 ] Eval epoch: 54
|
337 |
+
[ Tue Sep 13 11:50:46 2022 ] Mean test loss of 258 batches: 3.663032293319702.
|
338 |
+
[ Tue Sep 13 11:50:46 2022 ] Top1: 40.10%
|
339 |
+
[ Tue Sep 13 11:50:46 2022 ] Top5: 73.03%
|
340 |
+
[ Tue Sep 13 11:50:46 2022 ] Training epoch: 55
|
341 |
+
[ Tue Sep 13 11:51:19 2022 ] Batch(57/123) done. Loss: 0.4345 lr:0.100000
|
342 |
+
[ Tue Sep 13 11:51:54 2022 ] Eval epoch: 55
|
343 |
+
[ Tue Sep 13 11:52:44 2022 ] Mean test loss of 258 batches: 5.364523410797119.
|
344 |
+
[ Tue Sep 13 11:52:44 2022 ] Top1: 44.34%
|
345 |
+
[ Tue Sep 13 11:52:45 2022 ] Top5: 76.92%
|
346 |
+
[ Tue Sep 13 11:52:45 2022 ] Training epoch: 56
|
347 |
+
[ Tue Sep 13 11:53:06 2022 ] Batch(34/123) done. Loss: 0.1112 lr:0.100000
|
348 |
+
[ Tue Sep 13 11:53:53 2022 ] Eval epoch: 56
|
349 |
+
[ Tue Sep 13 11:54:43 2022 ] Mean test loss of 258 batches: 6.318780422210693.
|
350 |
+
[ Tue Sep 13 11:54:43 2022 ] Top1: 35.79%
|
351 |
+
[ Tue Sep 13 11:54:43 2022 ] Top5: 69.65%
|
352 |
+
[ Tue Sep 13 11:54:43 2022 ] Training epoch: 57
|
353 |
+
[ Tue Sep 13 11:54:52 2022 ] Batch(11/123) done. Loss: 0.2920 lr:0.100000
|
354 |
+
[ Tue Sep 13 11:55:46 2022 ] Batch(111/123) done. Loss: 0.2156 lr:0.100000
|
355 |
+
[ Tue Sep 13 11:55:52 2022 ] Eval epoch: 57
|
356 |
+
[ Tue Sep 13 11:56:42 2022 ] Mean test loss of 258 batches: 3.506568670272827.
|
357 |
+
[ Tue Sep 13 11:56:42 2022 ] Top1: 45.98%
|
358 |
+
[ Tue Sep 13 11:56:42 2022 ] Top5: 79.67%
|
359 |
+
[ Tue Sep 13 11:56:42 2022 ] Training epoch: 58
|
360 |
+
[ Tue Sep 13 11:57:32 2022 ] Batch(88/123) done. Loss: 0.2862 lr:0.100000
|
361 |
+
[ Tue Sep 13 11:57:50 2022 ] Eval epoch: 58
|
362 |
+
[ Tue Sep 13 11:58:41 2022 ] Mean test loss of 258 batches: 2.384162425994873.
|
363 |
+
[ Tue Sep 13 11:58:41 2022 ] Top1: 51.17%
|
364 |
+
[ Tue Sep 13 11:58:41 2022 ] Top5: 84.31%
|
365 |
+
[ Tue Sep 13 11:58:41 2022 ] Training epoch: 59
|
366 |
+
[ Tue Sep 13 11:59:19 2022 ] Batch(65/123) done. Loss: 0.2303 lr:0.100000
|
367 |
+
[ Tue Sep 13 11:59:50 2022 ] Eval epoch: 59
|
368 |
+
[ Tue Sep 13 12:00:40 2022 ] Mean test loss of 258 batches: 49.66264724731445.
|
369 |
+
[ Tue Sep 13 12:00:40 2022 ] Top1: 5.65%
|
370 |
+
[ Tue Sep 13 12:00:40 2022 ] Top5: 19.45%
|
371 |
+
[ Tue Sep 13 12:00:40 2022 ] Training epoch: 60
|
372 |
+
[ Tue Sep 13 12:01:06 2022 ] Batch(42/123) done. Loss: 0.2216 lr:0.100000
|
373 |
+
[ Tue Sep 13 12:01:49 2022 ] Eval epoch: 60
|
374 |
+
[ Tue Sep 13 12:02:39 2022 ] Mean test loss of 258 batches: 4.438194751739502.
|
375 |
+
[ Tue Sep 13 12:02:39 2022 ] Top1: 45.27%
|
376 |
+
[ Tue Sep 13 12:02:39 2022 ] Top5: 78.47%
|
377 |
+
[ Tue Sep 13 12:02:39 2022 ] Training epoch: 61
|
378 |
+
[ Tue Sep 13 12:02:52 2022 ] Batch(19/123) done. Loss: 0.1225 lr:0.010000
|
379 |
+
[ Tue Sep 13 12:03:46 2022 ] Batch(119/123) done. Loss: 0.0254 lr:0.010000
|
380 |
+
[ Tue Sep 13 12:03:47 2022 ] Eval epoch: 61
|
381 |
+
[ Tue Sep 13 12:04:38 2022 ] Mean test loss of 258 batches: 2.1404409408569336.
|
382 |
+
[ Tue Sep 13 12:04:38 2022 ] Top1: 60.50%
|
383 |
+
[ Tue Sep 13 12:04:38 2022 ] Top5: 89.84%
|
384 |
+
[ Tue Sep 13 12:04:38 2022 ] Training epoch: 62
|
385 |
+
[ Tue Sep 13 12:05:33 2022 ] Batch(96/123) done. Loss: 0.0363 lr:0.010000
|
386 |
+
[ Tue Sep 13 12:05:47 2022 ] Eval epoch: 62
|
387 |
+
[ Tue Sep 13 12:06:37 2022 ] Mean test loss of 258 batches: 1.9845335483551025.
|
388 |
+
[ Tue Sep 13 12:06:37 2022 ] Top1: 62.73%
|
389 |
+
[ Tue Sep 13 12:06:37 2022 ] Top5: 90.69%
|
390 |
+
[ Tue Sep 13 12:06:37 2022 ] Training epoch: 63
|
391 |
+
[ Tue Sep 13 12:07:19 2022 ] Batch(73/123) done. Loss: 0.0227 lr:0.010000
|
392 |
+
[ Tue Sep 13 12:07:46 2022 ] Eval epoch: 63
|
393 |
+
[ Tue Sep 13 12:08:36 2022 ] Mean test loss of 258 batches: 2.0647549629211426.
|
394 |
+
[ Tue Sep 13 12:08:36 2022 ] Top1: 62.84%
|
395 |
+
[ Tue Sep 13 12:08:36 2022 ] Top5: 90.76%
|
396 |
+
[ Tue Sep 13 12:08:36 2022 ] Training epoch: 64
|
397 |
+
[ Tue Sep 13 12:09:06 2022 ] Batch(50/123) done. Loss: 0.0432 lr:0.010000
|
398 |
+
[ Tue Sep 13 12:09:45 2022 ] Eval epoch: 64
|
399 |
+
[ Tue Sep 13 12:10:35 2022 ] Mean test loss of 258 batches: 2.042346477508545.
|
400 |
+
[ Tue Sep 13 12:10:35 2022 ] Top1: 63.35%
|
401 |
+
[ Tue Sep 13 12:10:35 2022 ] Top5: 90.82%
|
402 |
+
[ Tue Sep 13 12:10:35 2022 ] Training epoch: 65
|
403 |
+
[ Tue Sep 13 12:10:53 2022 ] Batch(27/123) done. Loss: 0.0294 lr:0.010000
|
404 |
+
[ Tue Sep 13 12:11:43 2022 ] Eval epoch: 65
|
405 |
+
[ Tue Sep 13 12:12:34 2022 ] Mean test loss of 258 batches: 2.1357216835021973.
|
406 |
+
[ Tue Sep 13 12:12:34 2022 ] Top1: 62.56%
|
407 |
+
[ Tue Sep 13 12:12:34 2022 ] Top5: 90.68%
|
408 |
+
[ Tue Sep 13 12:12:34 2022 ] Training epoch: 66
|
409 |
+
[ Tue Sep 13 12:12:39 2022 ] Batch(4/123) done. Loss: 0.0282 lr:0.010000
|
410 |
+
[ Tue Sep 13 12:13:32 2022 ] Batch(104/123) done. Loss: 0.0211 lr:0.010000
|
411 |
+
[ Tue Sep 13 12:13:42 2022 ] Eval epoch: 66
|
412 |
+
[ Tue Sep 13 12:14:32 2022 ] Mean test loss of 258 batches: 2.2517709732055664.
|
413 |
+
[ Tue Sep 13 12:14:32 2022 ] Top1: 60.81%
|
414 |
+
[ Tue Sep 13 12:14:33 2022 ] Top5: 89.84%
|
415 |
+
[ Tue Sep 13 12:14:33 2022 ] Training epoch: 67
|
416 |
+
[ Tue Sep 13 12:15:19 2022 ] Batch(81/123) done. Loss: 0.0229 lr:0.010000
|
417 |
+
[ Tue Sep 13 12:15:41 2022 ] Eval epoch: 67
|
418 |
+
[ Tue Sep 13 12:16:31 2022 ] Mean test loss of 258 batches: 2.1569643020629883.
|
419 |
+
[ Tue Sep 13 12:16:31 2022 ] Top1: 62.36%
|
420 |
+
[ Tue Sep 13 12:16:31 2022 ] Top5: 90.28%
|
421 |
+
[ Tue Sep 13 12:16:31 2022 ] Training epoch: 68
|
422 |
+
[ Tue Sep 13 12:17:05 2022 ] Batch(58/123) done. Loss: 0.0267 lr:0.010000
|
423 |
+
[ Tue Sep 13 12:17:39 2022 ] Eval epoch: 68
|
424 |
+
[ Tue Sep 13 12:18:30 2022 ] Mean test loss of 258 batches: 2.168156385421753.
|
425 |
+
[ Tue Sep 13 12:18:30 2022 ] Top1: 62.16%
|
426 |
+
[ Tue Sep 13 12:18:30 2022 ] Top5: 90.57%
|
427 |
+
[ Tue Sep 13 12:18:30 2022 ] Training epoch: 69
|
428 |
+
[ Tue Sep 13 12:18:52 2022 ] Batch(35/123) done. Loss: 0.0098 lr:0.010000
|
429 |
+
[ Tue Sep 13 12:19:38 2022 ] Eval epoch: 69
|
430 |
+
[ Tue Sep 13 12:20:28 2022 ] Mean test loss of 258 batches: 2.1420187950134277.
|
431 |
+
[ Tue Sep 13 12:20:28 2022 ] Top1: 62.91%
|
432 |
+
[ Tue Sep 13 12:20:29 2022 ] Top5: 90.71%
|
433 |
+
[ Tue Sep 13 12:20:29 2022 ] Training epoch: 70
|
434 |
+
[ Tue Sep 13 12:20:38 2022 ] Batch(12/123) done. Loss: 0.0279 lr:0.010000
|
435 |
+
[ Tue Sep 13 12:21:31 2022 ] Batch(112/123) done. Loss: 0.0426 lr:0.010000
|
436 |
+
[ Tue Sep 13 12:21:37 2022 ] Eval epoch: 70
|
437 |
+
[ Tue Sep 13 12:22:27 2022 ] Mean test loss of 258 batches: 2.2135446071624756.
|
438 |
+
[ Tue Sep 13 12:22:27 2022 ] Top1: 62.32%
|
439 |
+
[ Tue Sep 13 12:22:27 2022 ] Top5: 90.07%
|
440 |
+
[ Tue Sep 13 12:22:27 2022 ] Training epoch: 71
|
441 |
+
[ Tue Sep 13 12:23:18 2022 ] Batch(89/123) done. Loss: 0.0542 lr:0.010000
|
442 |
+
[ Tue Sep 13 12:23:36 2022 ] Eval epoch: 71
|
443 |
+
[ Tue Sep 13 12:24:26 2022 ] Mean test loss of 258 batches: 2.23406720161438.
|
444 |
+
[ Tue Sep 13 12:24:26 2022 ] Top1: 62.39%
|
445 |
+
[ Tue Sep 13 12:24:26 2022 ] Top5: 90.33%
|
446 |
+
[ Tue Sep 13 12:24:27 2022 ] Training epoch: 72
|
447 |
+
[ Tue Sep 13 12:25:05 2022 ] Batch(66/123) done. Loss: 0.0167 lr:0.010000
|
448 |
+
[ Tue Sep 13 12:25:35 2022 ] Eval epoch: 72
|
449 |
+
[ Tue Sep 13 12:26:25 2022 ] Mean test loss of 258 batches: 2.200518846511841.
|
450 |
+
[ Tue Sep 13 12:26:25 2022 ] Top1: 62.86%
|
451 |
+
[ Tue Sep 13 12:26:25 2022 ] Top5: 90.67%
|
452 |
+
[ Tue Sep 13 12:26:25 2022 ] Training epoch: 73
|
453 |
+
[ Tue Sep 13 12:26:51 2022 ] Batch(43/123) done. Loss: 0.0226 lr:0.010000
|
454 |
+
[ Tue Sep 13 12:27:33 2022 ] Eval epoch: 73
|
455 |
+
[ Tue Sep 13 12:28:24 2022 ] Mean test loss of 258 batches: 2.1740310192108154.
|
456 |
+
[ Tue Sep 13 12:28:24 2022 ] Top1: 63.28%
|
457 |
+
[ Tue Sep 13 12:28:24 2022 ] Top5: 90.87%
|
458 |
+
[ Tue Sep 13 12:28:24 2022 ] Training epoch: 74
|
459 |
+
[ Tue Sep 13 12:28:38 2022 ] Batch(20/123) done. Loss: 0.0171 lr:0.010000
|
460 |
+
[ Tue Sep 13 12:29:31 2022 ] Batch(120/123) done. Loss: 0.0205 lr:0.010000
|
461 |
+
[ Tue Sep 13 12:29:32 2022 ] Eval epoch: 74
|
462 |
+
[ Tue Sep 13 12:30:22 2022 ] Mean test loss of 258 batches: 2.2767016887664795.
|
463 |
+
[ Tue Sep 13 12:30:22 2022 ] Top1: 62.21%
|
464 |
+
[ Tue Sep 13 12:30:23 2022 ] Top5: 90.45%
|
465 |
+
[ Tue Sep 13 12:30:23 2022 ] Training epoch: 75
|
466 |
+
[ Tue Sep 13 12:31:17 2022 ] Batch(97/123) done. Loss: 0.0200 lr:0.010000
|
467 |
+
[ Tue Sep 13 12:31:31 2022 ] Eval epoch: 75
|
468 |
+
[ Tue Sep 13 12:32:21 2022 ] Mean test loss of 258 batches: 2.21061372756958.
|
469 |
+
[ Tue Sep 13 12:32:21 2022 ] Top1: 62.63%
|
470 |
+
[ Tue Sep 13 12:32:21 2022 ] Top5: 90.77%
|
471 |
+
[ Tue Sep 13 12:32:21 2022 ] Training epoch: 76
|
472 |
+
[ Tue Sep 13 12:33:04 2022 ] Batch(74/123) done. Loss: 0.0267 lr:0.010000
|
473 |
+
[ Tue Sep 13 12:33:30 2022 ] Eval epoch: 76
|
474 |
+
[ Tue Sep 13 12:34:20 2022 ] Mean test loss of 258 batches: 2.37528920173645.
|
475 |
+
[ Tue Sep 13 12:34:20 2022 ] Top1: 62.09%
|
476 |
+
[ Tue Sep 13 12:34:20 2022 ] Top5: 90.17%
|
477 |
+
[ Tue Sep 13 12:34:20 2022 ] Training epoch: 77
|
478 |
+
[ Tue Sep 13 12:34:51 2022 ] Batch(51/123) done. Loss: 0.0355 lr:0.010000
|
479 |
+
[ Tue Sep 13 12:35:29 2022 ] Eval epoch: 77
|
480 |
+
[ Tue Sep 13 12:36:19 2022 ] Mean test loss of 258 batches: 2.359511375427246.
|
481 |
+
[ Tue Sep 13 12:36:19 2022 ] Top1: 61.38%
|
482 |
+
[ Tue Sep 13 12:36:19 2022 ] Top5: 90.07%
|
483 |
+
[ Tue Sep 13 12:36:19 2022 ] Training epoch: 78
|
484 |
+
[ Tue Sep 13 12:36:37 2022 ] Batch(28/123) done. Loss: 0.0080 lr:0.010000
|
485 |
+
[ Tue Sep 13 12:37:27 2022 ] Eval epoch: 78
|
486 |
+
[ Tue Sep 13 12:38:18 2022 ] Mean test loss of 258 batches: 2.328934907913208.
|
487 |
+
[ Tue Sep 13 12:38:18 2022 ] Top1: 62.75%
|
488 |
+
[ Tue Sep 13 12:38:18 2022 ] Top5: 90.51%
|
489 |
+
[ Tue Sep 13 12:38:18 2022 ] Training epoch: 79
|
490 |
+
[ Tue Sep 13 12:38:23 2022 ] Batch(5/123) done. Loss: 0.0317 lr:0.010000
|
491 |
+
[ Tue Sep 13 12:39:17 2022 ] Batch(105/123) done. Loss: 0.0051 lr:0.010000
|
492 |
+
[ Tue Sep 13 12:39:26 2022 ] Eval epoch: 79
|
493 |
+
[ Tue Sep 13 12:40:16 2022 ] Mean test loss of 258 batches: 2.375866651535034.
|
494 |
+
[ Tue Sep 13 12:40:16 2022 ] Top1: 62.17%
|
495 |
+
[ Tue Sep 13 12:40:16 2022 ] Top5: 90.34%
|
496 |
+
[ Tue Sep 13 12:40:16 2022 ] Training epoch: 80
|
497 |
+
[ Tue Sep 13 12:41:03 2022 ] Batch(82/123) done. Loss: 0.0211 lr:0.010000
|
498 |
+
[ Tue Sep 13 12:41:25 2022 ] Eval epoch: 80
|
499 |
+
[ Tue Sep 13 12:42:16 2022 ] Mean test loss of 258 batches: 2.460627555847168.
|
500 |
+
[ Tue Sep 13 12:42:16 2022 ] Top1: 61.86%
|
501 |
+
[ Tue Sep 13 12:42:16 2022 ] Top5: 90.10%
|
502 |
+
[ Tue Sep 13 12:42:16 2022 ] Training epoch: 81
|
503 |
+
[ Tue Sep 13 12:42:50 2022 ] Batch(59/123) done. Loss: 0.0085 lr:0.001000
|
504 |
+
[ Tue Sep 13 12:43:24 2022 ] Eval epoch: 81
|
505 |
+
[ Tue Sep 13 12:44:15 2022 ] Mean test loss of 258 batches: 2.350675106048584.
|
506 |
+
[ Tue Sep 13 12:44:15 2022 ] Top1: 62.21%
|
507 |
+
[ Tue Sep 13 12:44:15 2022 ] Top5: 90.43%
|
508 |
+
[ Tue Sep 13 12:44:15 2022 ] Training epoch: 82
|
509 |
+
[ Tue Sep 13 12:44:37 2022 ] Batch(36/123) done. Loss: 0.0070 lr:0.001000
|
510 |
+
[ Tue Sep 13 12:45:23 2022 ] Eval epoch: 82
|
511 |
+
[ Tue Sep 13 12:46:14 2022 ] Mean test loss of 258 batches: 2.3446762561798096.
|
512 |
+
[ Tue Sep 13 12:46:14 2022 ] Top1: 62.28%
|
513 |
+
[ Tue Sep 13 12:46:14 2022 ] Top5: 90.34%
|
514 |
+
[ Tue Sep 13 12:46:14 2022 ] Training epoch: 83
|
515 |
+
[ Tue Sep 13 12:46:24 2022 ] Batch(13/123) done. Loss: 0.0161 lr:0.001000
|
516 |
+
[ Tue Sep 13 12:47:17 2022 ] Batch(113/123) done. Loss: 0.0418 lr:0.001000
|
517 |
+
[ Tue Sep 13 12:47:22 2022 ] Eval epoch: 83
|
518 |
+
[ Tue Sep 13 12:48:13 2022 ] Mean test loss of 258 batches: 2.4073634147644043.
|
519 |
+
[ Tue Sep 13 12:48:13 2022 ] Top1: 61.75%
|
520 |
+
[ Tue Sep 13 12:48:13 2022 ] Top5: 90.11%
|
521 |
+
[ Tue Sep 13 12:48:13 2022 ] Training epoch: 84
|
522 |
+
[ Tue Sep 13 12:49:04 2022 ] Batch(90/123) done. Loss: 0.0404 lr:0.001000
|
523 |
+
[ Tue Sep 13 12:49:21 2022 ] Eval epoch: 84
|
524 |
+
[ Tue Sep 13 12:50:12 2022 ] Mean test loss of 258 batches: 2.3524913787841797.
|
525 |
+
[ Tue Sep 13 12:50:12 2022 ] Top1: 62.50%
|
526 |
+
[ Tue Sep 13 12:50:12 2022 ] Top5: 90.44%
|
527 |
+
[ Tue Sep 13 12:50:12 2022 ] Training epoch: 85
|
528 |
+
[ Tue Sep 13 12:50:51 2022 ] Batch(67/123) done. Loss: 0.0288 lr:0.001000
|
529 |
+
[ Tue Sep 13 12:51:20 2022 ] Eval epoch: 85
|
530 |
+
[ Tue Sep 13 12:52:11 2022 ] Mean test loss of 258 batches: 2.361574172973633.
|
531 |
+
[ Tue Sep 13 12:52:11 2022 ] Top1: 62.50%
|
532 |
+
[ Tue Sep 13 12:52:11 2022 ] Top5: 90.63%
|
533 |
+
[ Tue Sep 13 12:52:11 2022 ] Training epoch: 86
|
534 |
+
[ Tue Sep 13 12:52:37 2022 ] Batch(44/123) done. Loss: 0.0119 lr:0.001000
|
535 |
+
[ Tue Sep 13 12:53:19 2022 ] Eval epoch: 86
|
536 |
+
[ Tue Sep 13 12:54:09 2022 ] Mean test loss of 258 batches: 2.2960431575775146.
|
537 |
+
[ Tue Sep 13 12:54:09 2022 ] Top1: 62.96%
|
538 |
+
[ Tue Sep 13 12:54:09 2022 ] Top5: 90.70%
|
539 |
+
[ Tue Sep 13 12:54:09 2022 ] Training epoch: 87
|
540 |
+
[ Tue Sep 13 12:54:24 2022 ] Batch(21/123) done. Loss: 0.0492 lr:0.001000
|
541 |
+
[ Tue Sep 13 12:55:17 2022 ] Batch(121/123) done. Loss: 0.0159 lr:0.001000
|
542 |
+
[ Tue Sep 13 12:55:18 2022 ] Eval epoch: 87
|
543 |
+
[ Tue Sep 13 12:56:08 2022 ] Mean test loss of 258 batches: 2.3140199184417725.
|
544 |
+
[ Tue Sep 13 12:56:08 2022 ] Top1: 62.86%
|
545 |
+
[ Tue Sep 13 12:56:08 2022 ] Top5: 90.69%
|
546 |
+
[ Tue Sep 13 12:56:08 2022 ] Training epoch: 88
|
547 |
+
[ Tue Sep 13 12:57:03 2022 ] Batch(98/123) done. Loss: 0.0238 lr:0.001000
|
548 |
+
[ Tue Sep 13 12:57:16 2022 ] Eval epoch: 88
|
549 |
+
[ Tue Sep 13 12:58:07 2022 ] Mean test loss of 258 batches: 2.533504009246826.
|
550 |
+
[ Tue Sep 13 12:58:07 2022 ] Top1: 60.68%
|
551 |
+
[ Tue Sep 13 12:58:07 2022 ] Top5: 89.55%
|
552 |
+
[ Tue Sep 13 12:58:07 2022 ] Training epoch: 89
|
553 |
+
[ Tue Sep 13 12:58:50 2022 ] Batch(75/123) done. Loss: 0.0200 lr:0.001000
|
554 |
+
[ Tue Sep 13 12:59:15 2022 ] Eval epoch: 89
|
555 |
+
[ Tue Sep 13 13:00:05 2022 ] Mean test loss of 258 batches: 2.3764359951019287.
|
556 |
+
[ Tue Sep 13 13:00:06 2022 ] Top1: 62.40%
|
557 |
+
[ Tue Sep 13 13:00:06 2022 ] Top5: 90.54%
|
558 |
+
[ Tue Sep 13 13:00:06 2022 ] Training epoch: 90
|
559 |
+
[ Tue Sep 13 13:00:36 2022 ] Batch(52/123) done. Loss: 0.0210 lr:0.001000
|
560 |
+
[ Tue Sep 13 13:01:14 2022 ] Eval epoch: 90
|
561 |
+
[ Tue Sep 13 13:02:05 2022 ] Mean test loss of 258 batches: 2.293139934539795.
|
562 |
+
[ Tue Sep 13 13:02:05 2022 ] Top1: 63.02%
|
563 |
+
[ Tue Sep 13 13:02:05 2022 ] Top5: 90.78%
|
564 |
+
[ Tue Sep 13 13:02:05 2022 ] Training epoch: 91
|
565 |
+
[ Tue Sep 13 13:02:23 2022 ] Batch(29/123) done. Loss: 0.0316 lr:0.001000
|
566 |
+
[ Tue Sep 13 13:03:13 2022 ] Eval epoch: 91
|
567 |
+
[ Tue Sep 13 13:04:03 2022 ] Mean test loss of 258 batches: 2.3484678268432617.
|
568 |
+
[ Tue Sep 13 13:04:03 2022 ] Top1: 62.70%
|
569 |
+
[ Tue Sep 13 13:04:04 2022 ] Top5: 90.44%
|
570 |
+
[ Tue Sep 13 13:04:04 2022 ] Training epoch: 92
|
571 |
+
[ Tue Sep 13 13:04:10 2022 ] Batch(6/123) done. Loss: 0.0527 lr:0.001000
|
572 |
+
[ Tue Sep 13 13:05:03 2022 ] Batch(106/123) done. Loss: 0.0304 lr:0.001000
|
573 |
+
[ Tue Sep 13 13:05:12 2022 ] Eval epoch: 92
|
574 |
+
[ Tue Sep 13 13:06:03 2022 ] Mean test loss of 258 batches: 2.3265533447265625.
|
575 |
+
[ Tue Sep 13 13:06:03 2022 ] Top1: 62.99%
|
576 |
+
[ Tue Sep 13 13:06:03 2022 ] Top5: 90.57%
|
577 |
+
[ Tue Sep 13 13:06:03 2022 ] Training epoch: 93
|
578 |
+
[ Tue Sep 13 13:06:50 2022 ] Batch(83/123) done. Loss: 0.0153 lr:0.001000
|
579 |
+
[ Tue Sep 13 13:07:11 2022 ] Eval epoch: 93
|
580 |
+
[ Tue Sep 13 13:08:02 2022 ] Mean test loss of 258 batches: 2.3389744758605957.
|
581 |
+
[ Tue Sep 13 13:08:02 2022 ] Top1: 62.42%
|
582 |
+
[ Tue Sep 13 13:08:02 2022 ] Top5: 90.62%
|
583 |
+
[ Tue Sep 13 13:08:02 2022 ] Training epoch: 94
|
584 |
+
[ Tue Sep 13 13:08:37 2022 ] Batch(60/123) done. Loss: 0.0240 lr:0.001000
|
585 |
+
[ Tue Sep 13 13:09:10 2022 ] Eval epoch: 94
|
586 |
+
[ Tue Sep 13 13:10:00 2022 ] Mean test loss of 258 batches: 2.357041835784912.
|
587 |
+
[ Tue Sep 13 13:10:01 2022 ] Top1: 62.46%
|
588 |
+
[ Tue Sep 13 13:10:01 2022 ] Top5: 90.50%
|
589 |
+
[ Tue Sep 13 13:10:01 2022 ] Training epoch: 95
|
590 |
+
[ Tue Sep 13 13:10:23 2022 ] Batch(37/123) done. Loss: 0.0098 lr:0.001000
|
591 |
+
[ Tue Sep 13 13:11:09 2022 ] Eval epoch: 95
|
592 |
+
[ Tue Sep 13 13:11:59 2022 ] Mean test loss of 258 batches: 2.3830180168151855.
|
593 |
+
[ Tue Sep 13 13:11:59 2022 ] Top1: 62.68%
|
594 |
+
[ Tue Sep 13 13:11:59 2022 ] Top5: 90.45%
|
595 |
+
[ Tue Sep 13 13:11:59 2022 ] Training epoch: 96
|
596 |
+
[ Tue Sep 13 13:12:10 2022 ] Batch(14/123) done. Loss: 0.0331 lr:0.001000
|
597 |
+
[ Tue Sep 13 13:13:03 2022 ] Batch(114/123) done. Loss: 0.0135 lr:0.001000
|
598 |
+
[ Tue Sep 13 13:13:08 2022 ] Eval epoch: 96
|
599 |
+
[ Tue Sep 13 13:13:58 2022 ] Mean test loss of 258 batches: 2.356888771057129.
|
600 |
+
[ Tue Sep 13 13:13:58 2022 ] Top1: 62.37%
|
601 |
+
[ Tue Sep 13 13:13:58 2022 ] Top5: 90.57%
|
602 |
+
[ Tue Sep 13 13:13:58 2022 ] Training epoch: 97
|
603 |
+
[ Tue Sep 13 13:14:49 2022 ] Batch(91/123) done. Loss: 0.0269 lr:0.001000
|
604 |
+
[ Tue Sep 13 13:15:06 2022 ] Eval epoch: 97
|
605 |
+
[ Tue Sep 13 13:15:56 2022 ] Mean test loss of 258 batches: 2.3394312858581543.
|
606 |
+
[ Tue Sep 13 13:15:56 2022 ] Top1: 62.86%
|
607 |
+
[ Tue Sep 13 13:15:57 2022 ] Top5: 90.85%
|
608 |
+
[ Tue Sep 13 13:15:57 2022 ] Training epoch: 98
|
609 |
+
[ Tue Sep 13 13:16:36 2022 ] Batch(68/123) done. Loss: 0.0099 lr:0.001000
|
610 |
+
[ Tue Sep 13 13:17:05 2022 ] Eval epoch: 98
|
611 |
+
[ Tue Sep 13 13:17:55 2022 ] Mean test loss of 258 batches: 2.516493082046509.
|
612 |
+
[ Tue Sep 13 13:17:55 2022 ] Top1: 60.17%
|
613 |
+
[ Tue Sep 13 13:17:55 2022 ] Top5: 89.34%
|
614 |
+
[ Tue Sep 13 13:17:55 2022 ] Training epoch: 99
|
615 |
+
[ Tue Sep 13 13:18:23 2022 ] Batch(45/123) done. Loss: 0.0344 lr:0.001000
|
616 |
+
[ Tue Sep 13 13:19:04 2022 ] Eval epoch: 99
|
617 |
+
[ Tue Sep 13 13:19:54 2022 ] Mean test loss of 258 batches: 2.386345863342285.
|
618 |
+
[ Tue Sep 13 13:19:54 2022 ] Top1: 61.84%
|
619 |
+
[ Tue Sep 13 13:19:54 2022 ] Top5: 90.10%
|
620 |
+
[ Tue Sep 13 13:19:54 2022 ] Training epoch: 100
|
621 |
+
[ Tue Sep 13 13:20:09 2022 ] Batch(22/123) done. Loss: 0.0235 lr:0.001000
|
622 |
+
[ Tue Sep 13 13:21:02 2022 ] Batch(122/123) done. Loss: 0.2032 lr:0.001000
|
623 |
+
[ Tue Sep 13 13:21:03 2022 ] Eval epoch: 100
|
624 |
+
[ Tue Sep 13 13:21:53 2022 ] Mean test loss of 258 batches: 2.4683544635772705.
|
625 |
+
[ Tue Sep 13 13:21:53 2022 ] Top1: 60.51%
|
626 |
+
[ Tue Sep 13 13:21:53 2022 ] Top5: 89.61%
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu_bone_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/nturgbd-cross-subject/train_bone.yaml
|
5 |
+
device:
|
6 |
+
- 4
|
7 |
+
- 5
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 60
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu_bone_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu_bone_xsub
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ea3dc32c58240f32971f5233f9f3164840ae3822b8c8bad7232fdf9ff4d227dd
|
3 |
+
size 4979902
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_bone_xsub/log.txt
ADDED
@@ -0,0 +1,626 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Tue Sep 13 10:03:48 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu_bone_xsub', 'model_saved_name': './save_models/ntu_bone_xsub', 'Experiment_name': 'ntu_bone_xsub', 'config': './config/nturgbd-cross-subject/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Tue Sep 13 10:03:48 2022 ] Training epoch: 1
|
5 |
+
[ Tue Sep 13 10:04:38 2022 ] Batch(99/123) done. Loss: 3.4120 lr:0.100000
|
6 |
+
[ Tue Sep 13 10:04:48 2022 ] Eval epoch: 1
|
7 |
+
[ Tue Sep 13 10:05:38 2022 ] Mean test loss of 258 batches: 4.742095947265625.
|
8 |
+
[ Tue Sep 13 10:05:38 2022 ] Top1: 4.19%
|
9 |
+
[ Tue Sep 13 10:05:38 2022 ] Top5: 14.60%
|
10 |
+
[ Tue Sep 13 10:05:38 2022 ] Training epoch: 2
|
11 |
+
[ Tue Sep 13 10:06:22 2022 ] Batch(76/123) done. Loss: 2.7536 lr:0.100000
|
12 |
+
[ Tue Sep 13 10:06:46 2022 ] Eval epoch: 2
|
13 |
+
[ Tue Sep 13 10:07:36 2022 ] Mean test loss of 258 batches: 4.383267402648926.
|
14 |
+
[ Tue Sep 13 10:07:36 2022 ] Top1: 7.83%
|
15 |
+
[ Tue Sep 13 10:07:36 2022 ] Top5: 27.60%
|
16 |
+
[ Tue Sep 13 10:07:36 2022 ] Training epoch: 3
|
17 |
+
[ Tue Sep 13 10:08:08 2022 ] Batch(53/123) done. Loss: 2.8688 lr:0.100000
|
18 |
+
[ Tue Sep 13 10:08:44 2022 ] Eval epoch: 3
|
19 |
+
[ Tue Sep 13 10:09:34 2022 ] Mean test loss of 258 batches: 3.981781244277954.
|
20 |
+
[ Tue Sep 13 10:09:34 2022 ] Top1: 11.83%
|
21 |
+
[ Tue Sep 13 10:09:34 2022 ] Top5: 36.82%
|
22 |
+
[ Tue Sep 13 10:09:34 2022 ] Training epoch: 4
|
23 |
+
[ Tue Sep 13 10:09:54 2022 ] Batch(30/123) done. Loss: 2.2916 lr:0.100000
|
24 |
+
[ Tue Sep 13 10:10:42 2022 ] Eval epoch: 4
|
25 |
+
[ Tue Sep 13 10:11:32 2022 ] Mean test loss of 258 batches: 2.9862473011016846.
|
26 |
+
[ Tue Sep 13 10:11:33 2022 ] Top1: 18.34%
|
27 |
+
[ Tue Sep 13 10:11:33 2022 ] Top5: 58.25%
|
28 |
+
[ Tue Sep 13 10:11:33 2022 ] Training epoch: 5
|
29 |
+
[ Tue Sep 13 10:11:40 2022 ] Batch(7/123) done. Loss: 2.0996 lr:0.100000
|
30 |
+
[ Tue Sep 13 10:12:33 2022 ] Batch(107/123) done. Loss: 1.7637 lr:0.100000
|
31 |
+
[ Tue Sep 13 10:12:41 2022 ] Eval epoch: 5
|
32 |
+
[ Tue Sep 13 10:13:31 2022 ] Mean test loss of 258 batches: 3.5700297355651855.
|
33 |
+
[ Tue Sep 13 10:13:31 2022 ] Top1: 19.28%
|
34 |
+
[ Tue Sep 13 10:13:31 2022 ] Top5: 57.46%
|
35 |
+
[ Tue Sep 13 10:13:31 2022 ] Training epoch: 6
|
36 |
+
[ Tue Sep 13 10:14:19 2022 ] Batch(84/123) done. Loss: 1.4333 lr:0.100000
|
37 |
+
[ Tue Sep 13 10:14:39 2022 ] Eval epoch: 6
|
38 |
+
[ Tue Sep 13 10:15:29 2022 ] Mean test loss of 258 batches: 2.9239397048950195.
|
39 |
+
[ Tue Sep 13 10:15:29 2022 ] Top1: 24.19%
|
40 |
+
[ Tue Sep 13 10:15:29 2022 ] Top5: 66.11%
|
41 |
+
[ Tue Sep 13 10:15:29 2022 ] Training epoch: 7
|
42 |
+
[ Tue Sep 13 10:16:05 2022 ] Batch(61/123) done. Loss: 1.5305 lr:0.100000
|
43 |
+
[ Tue Sep 13 10:16:37 2022 ] Eval epoch: 7
|
44 |
+
[ Tue Sep 13 10:17:27 2022 ] Mean test loss of 258 batches: 2.789067268371582.
|
45 |
+
[ Tue Sep 13 10:17:27 2022 ] Top1: 24.83%
|
46 |
+
[ Tue Sep 13 10:17:27 2022 ] Top5: 64.15%
|
47 |
+
[ Tue Sep 13 10:17:27 2022 ] Training epoch: 8
|
48 |
+
[ Tue Sep 13 10:17:51 2022 ] Batch(38/123) done. Loss: 1.2631 lr:0.100000
|
49 |
+
[ Tue Sep 13 10:18:35 2022 ] Eval epoch: 8
|
50 |
+
[ Tue Sep 13 10:19:25 2022 ] Mean test loss of 258 batches: 2.5744893550872803.
|
51 |
+
[ Tue Sep 13 10:19:25 2022 ] Top1: 32.38%
|
52 |
+
[ Tue Sep 13 10:19:25 2022 ] Top5: 71.55%
|
53 |
+
[ Tue Sep 13 10:19:25 2022 ] Training epoch: 9
|
54 |
+
[ Tue Sep 13 10:19:37 2022 ] Batch(15/123) done. Loss: 1.2294 lr:0.100000
|
55 |
+
[ Tue Sep 13 10:20:29 2022 ] Batch(115/123) done. Loss: 1.0048 lr:0.100000
|
56 |
+
[ Tue Sep 13 10:20:33 2022 ] Eval epoch: 9
|
57 |
+
[ Tue Sep 13 10:21:24 2022 ] Mean test loss of 258 batches: 2.6254217624664307.
|
58 |
+
[ Tue Sep 13 10:21:24 2022 ] Top1: 30.32%
|
59 |
+
[ Tue Sep 13 10:21:24 2022 ] Top5: 70.99%
|
60 |
+
[ Tue Sep 13 10:21:24 2022 ] Training epoch: 10
|
61 |
+
[ Tue Sep 13 10:22:15 2022 ] Batch(92/123) done. Loss: 0.9860 lr:0.100000
|
62 |
+
[ Tue Sep 13 10:22:31 2022 ] Eval epoch: 10
|
63 |
+
[ Tue Sep 13 10:23:22 2022 ] Mean test loss of 258 batches: 3.1184659004211426.
|
64 |
+
[ Tue Sep 13 10:23:22 2022 ] Top1: 29.32%
|
65 |
+
[ Tue Sep 13 10:23:22 2022 ] Top5: 67.40%
|
66 |
+
[ Tue Sep 13 10:23:22 2022 ] Training epoch: 11
|
67 |
+
[ Tue Sep 13 10:24:02 2022 ] Batch(69/123) done. Loss: 1.1361 lr:0.100000
|
68 |
+
[ Tue Sep 13 10:24:30 2022 ] Eval epoch: 11
|
69 |
+
[ Tue Sep 13 10:25:20 2022 ] Mean test loss of 258 batches: 2.622861385345459.
|
70 |
+
[ Tue Sep 13 10:25:20 2022 ] Top1: 35.36%
|
71 |
+
[ Tue Sep 13 10:25:20 2022 ] Top5: 74.38%
|
72 |
+
[ Tue Sep 13 10:25:20 2022 ] Training epoch: 12
|
73 |
+
[ Tue Sep 13 10:25:48 2022 ] Batch(46/123) done. Loss: 1.0729 lr:0.100000
|
74 |
+
[ Tue Sep 13 10:26:28 2022 ] Eval epoch: 12
|
75 |
+
[ Tue Sep 13 10:27:19 2022 ] Mean test loss of 258 batches: 2.334763765335083.
|
76 |
+
[ Tue Sep 13 10:27:19 2022 ] Top1: 38.00%
|
77 |
+
[ Tue Sep 13 10:27:19 2022 ] Top5: 78.23%
|
78 |
+
[ Tue Sep 13 10:27:19 2022 ] Training epoch: 13
|
79 |
+
[ Tue Sep 13 10:27:34 2022 ] Batch(23/123) done. Loss: 0.7770 lr:0.100000
|
80 |
+
[ Tue Sep 13 10:28:26 2022 ] Eval epoch: 13
|
81 |
+
[ Tue Sep 13 10:29:17 2022 ] Mean test loss of 258 batches: 2.3467118740081787.
|
82 |
+
[ Tue Sep 13 10:29:17 2022 ] Top1: 36.44%
|
83 |
+
[ Tue Sep 13 10:29:17 2022 ] Top5: 73.37%
|
84 |
+
[ Tue Sep 13 10:29:17 2022 ] Training epoch: 14
|
85 |
+
[ Tue Sep 13 10:29:21 2022 ] Batch(0/123) done. Loss: 0.7911 lr:0.100000
|
86 |
+
[ Tue Sep 13 10:30:13 2022 ] Batch(100/123) done. Loss: 1.0416 lr:0.100000
|
87 |
+
[ Tue Sep 13 10:30:25 2022 ] Eval epoch: 14
|
88 |
+
[ Tue Sep 13 10:31:15 2022 ] Mean test loss of 258 batches: 2.2429208755493164.
|
89 |
+
[ Tue Sep 13 10:31:15 2022 ] Top1: 37.54%
|
90 |
+
[ Tue Sep 13 10:31:15 2022 ] Top5: 77.70%
|
91 |
+
[ Tue Sep 13 10:31:15 2022 ] Training epoch: 15
|
92 |
+
[ Tue Sep 13 10:31:59 2022 ] Batch(77/123) done. Loss: 0.8666 lr:0.100000
|
93 |
+
[ Tue Sep 13 10:32:23 2022 ] Eval epoch: 15
|
94 |
+
[ Tue Sep 13 10:33:13 2022 ] Mean test loss of 258 batches: 2.169420003890991.
|
95 |
+
[ Tue Sep 13 10:33:13 2022 ] Top1: 44.99%
|
96 |
+
[ Tue Sep 13 10:33:13 2022 ] Top5: 82.46%
|
97 |
+
[ Tue Sep 13 10:33:13 2022 ] Training epoch: 16
|
98 |
+
[ Tue Sep 13 10:33:45 2022 ] Batch(54/123) done. Loss: 0.5842 lr:0.100000
|
99 |
+
[ Tue Sep 13 10:34:21 2022 ] Eval epoch: 16
|
100 |
+
[ Tue Sep 13 10:35:12 2022 ] Mean test loss of 258 batches: 1.9139314889907837.
|
101 |
+
[ Tue Sep 13 10:35:12 2022 ] Top1: 46.06%
|
102 |
+
[ Tue Sep 13 10:35:12 2022 ] Top5: 84.39%
|
103 |
+
[ Tue Sep 13 10:35:12 2022 ] Training epoch: 17
|
104 |
+
[ Tue Sep 13 10:35:32 2022 ] Batch(31/123) done. Loss: 0.6978 lr:0.100000
|
105 |
+
[ Tue Sep 13 10:36:20 2022 ] Eval epoch: 17
|
106 |
+
[ Tue Sep 13 10:37:10 2022 ] Mean test loss of 258 batches: 2.0210671424865723.
|
107 |
+
[ Tue Sep 13 10:37:10 2022 ] Top1: 46.59%
|
108 |
+
[ Tue Sep 13 10:37:10 2022 ] Top5: 81.80%
|
109 |
+
[ Tue Sep 13 10:37:10 2022 ] Training epoch: 18
|
110 |
+
[ Tue Sep 13 10:37:18 2022 ] Batch(8/123) done. Loss: 0.9801 lr:0.100000
|
111 |
+
[ Tue Sep 13 10:38:11 2022 ] Batch(108/123) done. Loss: 0.8500 lr:0.100000
|
112 |
+
[ Tue Sep 13 10:38:18 2022 ] Eval epoch: 18
|
113 |
+
[ Tue Sep 13 10:39:09 2022 ] Mean test loss of 258 batches: 2.0550177097320557.
|
114 |
+
[ Tue Sep 13 10:39:09 2022 ] Top1: 45.24%
|
115 |
+
[ Tue Sep 13 10:39:09 2022 ] Top5: 82.14%
|
116 |
+
[ Tue Sep 13 10:39:09 2022 ] Training epoch: 19
|
117 |
+
[ Tue Sep 13 10:39:57 2022 ] Batch(85/123) done. Loss: 0.9634 lr:0.100000
|
118 |
+
[ Tue Sep 13 10:40:17 2022 ] Eval epoch: 19
|
119 |
+
[ Tue Sep 13 10:41:07 2022 ] Mean test loss of 258 batches: 2.3090696334838867.
|
120 |
+
[ Tue Sep 13 10:41:07 2022 ] Top1: 44.53%
|
121 |
+
[ Tue Sep 13 10:41:07 2022 ] Top5: 82.95%
|
122 |
+
[ Tue Sep 13 10:41:07 2022 ] Training epoch: 20
|
123 |
+
[ Tue Sep 13 10:41:43 2022 ] Batch(62/123) done. Loss: 1.0622 lr:0.100000
|
124 |
+
[ Tue Sep 13 10:42:15 2022 ] Eval epoch: 20
|
125 |
+
[ Tue Sep 13 10:43:05 2022 ] Mean test loss of 258 batches: 2.014781951904297.
|
126 |
+
[ Tue Sep 13 10:43:05 2022 ] Top1: 47.81%
|
127 |
+
[ Tue Sep 13 10:43:05 2022 ] Top5: 82.90%
|
128 |
+
[ Tue Sep 13 10:43:05 2022 ] Training epoch: 21
|
129 |
+
[ Tue Sep 13 10:43:29 2022 ] Batch(39/123) done. Loss: 0.7243 lr:0.100000
|
130 |
+
[ Tue Sep 13 10:44:13 2022 ] Eval epoch: 21
|
131 |
+
[ Tue Sep 13 10:45:03 2022 ] Mean test loss of 258 batches: 2.071990728378296.
|
132 |
+
[ Tue Sep 13 10:45:03 2022 ] Top1: 47.46%
|
133 |
+
[ Tue Sep 13 10:45:03 2022 ] Top5: 83.40%
|
134 |
+
[ Tue Sep 13 10:45:03 2022 ] Training epoch: 22
|
135 |
+
[ Tue Sep 13 10:45:15 2022 ] Batch(16/123) done. Loss: 0.6351 lr:0.100000
|
136 |
+
[ Tue Sep 13 10:46:08 2022 ] Batch(116/123) done. Loss: 0.5568 lr:0.100000
|
137 |
+
[ Tue Sep 13 10:46:11 2022 ] Eval epoch: 22
|
138 |
+
[ Tue Sep 13 10:47:01 2022 ] Mean test loss of 258 batches: 1.7614084482192993.
|
139 |
+
[ Tue Sep 13 10:47:01 2022 ] Top1: 52.73%
|
140 |
+
[ Tue Sep 13 10:47:01 2022 ] Top5: 86.97%
|
141 |
+
[ Tue Sep 13 10:47:01 2022 ] Training epoch: 23
|
142 |
+
[ Tue Sep 13 10:47:54 2022 ] Batch(93/123) done. Loss: 0.7639 lr:0.100000
|
143 |
+
[ Tue Sep 13 10:48:09 2022 ] Eval epoch: 23
|
144 |
+
[ Tue Sep 13 10:48:59 2022 ] Mean test loss of 258 batches: 9.128730773925781.
|
145 |
+
[ Tue Sep 13 10:48:59 2022 ] Top1: 20.48%
|
146 |
+
[ Tue Sep 13 10:48:59 2022 ] Top5: 53.67%
|
147 |
+
[ Tue Sep 13 10:48:59 2022 ] Training epoch: 24
|
148 |
+
[ Tue Sep 13 10:49:40 2022 ] Batch(70/123) done. Loss: 0.5800 lr:0.100000
|
149 |
+
[ Tue Sep 13 10:50:08 2022 ] Eval epoch: 24
|
150 |
+
[ Tue Sep 13 10:50:58 2022 ] Mean test loss of 258 batches: 1.7863599061965942.
|
151 |
+
[ Tue Sep 13 10:50:58 2022 ] Top1: 53.04%
|
152 |
+
[ Tue Sep 13 10:50:58 2022 ] Top5: 86.94%
|
153 |
+
[ Tue Sep 13 10:50:58 2022 ] Training epoch: 25
|
154 |
+
[ Tue Sep 13 10:51:26 2022 ] Batch(47/123) done. Loss: 0.4499 lr:0.100000
|
155 |
+
[ Tue Sep 13 10:52:06 2022 ] Eval epoch: 25
|
156 |
+
[ Tue Sep 13 10:52:56 2022 ] Mean test loss of 258 batches: 2.5577802658081055.
|
157 |
+
[ Tue Sep 13 10:52:56 2022 ] Top1: 44.30%
|
158 |
+
[ Tue Sep 13 10:52:56 2022 ] Top5: 82.32%
|
159 |
+
[ Tue Sep 13 10:52:56 2022 ] Training epoch: 26
|
160 |
+
[ Tue Sep 13 10:53:12 2022 ] Batch(24/123) done. Loss: 0.5527 lr:0.100000
|
161 |
+
[ Tue Sep 13 10:54:04 2022 ] Eval epoch: 26
|
162 |
+
[ Tue Sep 13 10:54:55 2022 ] Mean test loss of 258 batches: 1.9369274377822876.
|
163 |
+
[ Tue Sep 13 10:54:55 2022 ] Top1: 52.55%
|
164 |
+
[ Tue Sep 13 10:54:55 2022 ] Top5: 86.72%
|
165 |
+
[ Tue Sep 13 10:54:55 2022 ] Training epoch: 27
|
166 |
+
[ Tue Sep 13 10:54:59 2022 ] Batch(1/123) done. Loss: 0.4778 lr:0.100000
|
167 |
+
[ Tue Sep 13 10:55:51 2022 ] Batch(101/123) done. Loss: 0.3616 lr:0.100000
|
168 |
+
[ Tue Sep 13 10:56:03 2022 ] Eval epoch: 27
|
169 |
+
[ Tue Sep 13 10:56:53 2022 ] Mean test loss of 258 batches: 2.151625633239746.
|
170 |
+
[ Tue Sep 13 10:56:53 2022 ] Top1: 51.28%
|
171 |
+
[ Tue Sep 13 10:56:53 2022 ] Top5: 85.77%
|
172 |
+
[ Tue Sep 13 10:56:53 2022 ] Training epoch: 28
|
173 |
+
[ Tue Sep 13 10:57:38 2022 ] Batch(78/123) done. Loss: 0.4822 lr:0.100000
|
174 |
+
[ Tue Sep 13 10:58:01 2022 ] Eval epoch: 28
|
175 |
+
[ Tue Sep 13 10:58:51 2022 ] Mean test loss of 258 batches: 2.2599592208862305.
|
176 |
+
[ Tue Sep 13 10:58:51 2022 ] Top1: 52.19%
|
177 |
+
[ Tue Sep 13 10:58:51 2022 ] Top5: 86.54%
|
178 |
+
[ Tue Sep 13 10:58:52 2022 ] Training epoch: 29
|
179 |
+
[ Tue Sep 13 10:59:24 2022 ] Batch(55/123) done. Loss: 0.5920 lr:0.100000
|
180 |
+
[ Tue Sep 13 11:00:00 2022 ] Eval epoch: 29
|
181 |
+
[ Tue Sep 13 11:00:50 2022 ] Mean test loss of 258 batches: 3.8784642219543457.
|
182 |
+
[ Tue Sep 13 11:00:50 2022 ] Top1: 40.59%
|
183 |
+
[ Tue Sep 13 11:00:50 2022 ] Top5: 78.65%
|
184 |
+
[ Tue Sep 13 11:00:50 2022 ] Training epoch: 30
|
185 |
+
[ Tue Sep 13 11:01:10 2022 ] Batch(32/123) done. Loss: 0.5519 lr:0.100000
|
186 |
+
[ Tue Sep 13 11:01:58 2022 ] Eval epoch: 30
|
187 |
+
[ Tue Sep 13 11:02:48 2022 ] Mean test loss of 258 batches: 1.8356668949127197.
|
188 |
+
[ Tue Sep 13 11:02:48 2022 ] Top1: 53.78%
|
189 |
+
[ Tue Sep 13 11:02:48 2022 ] Top5: 87.10%
|
190 |
+
[ Tue Sep 13 11:02:48 2022 ] Training epoch: 31
|
191 |
+
[ Tue Sep 13 11:02:57 2022 ] Batch(9/123) done. Loss: 0.4373 lr:0.100000
|
192 |
+
[ Tue Sep 13 11:03:49 2022 ] Batch(109/123) done. Loss: 0.4378 lr:0.100000
|
193 |
+
[ Tue Sep 13 11:03:56 2022 ] Eval epoch: 31
|
194 |
+
[ Tue Sep 13 11:04:46 2022 ] Mean test loss of 258 batches: 3.0565996170043945.
|
195 |
+
[ Tue Sep 13 11:04:47 2022 ] Top1: 44.87%
|
196 |
+
[ Tue Sep 13 11:04:47 2022 ] Top5: 79.94%
|
197 |
+
[ Tue Sep 13 11:04:47 2022 ] Training epoch: 32
|
198 |
+
[ Tue Sep 13 11:05:36 2022 ] Batch(86/123) done. Loss: 0.3620 lr:0.100000
|
199 |
+
[ Tue Sep 13 11:05:55 2022 ] Eval epoch: 32
|
200 |
+
[ Tue Sep 13 11:06:45 2022 ] Mean test loss of 258 batches: 1.9578102827072144.
|
201 |
+
[ Tue Sep 13 11:06:45 2022 ] Top1: 54.48%
|
202 |
+
[ Tue Sep 13 11:06:45 2022 ] Top5: 87.03%
|
203 |
+
[ Tue Sep 13 11:06:45 2022 ] Training epoch: 33
|
204 |
+
[ Tue Sep 13 11:07:22 2022 ] Batch(63/123) done. Loss: 0.3515 lr:0.100000
|
205 |
+
[ Tue Sep 13 11:07:53 2022 ] Eval epoch: 33
|
206 |
+
[ Tue Sep 13 11:08:43 2022 ] Mean test loss of 258 batches: 1.8989536762237549.
|
207 |
+
[ Tue Sep 13 11:08:43 2022 ] Top1: 56.63%
|
208 |
+
[ Tue Sep 13 11:08:43 2022 ] Top5: 88.63%
|
209 |
+
[ Tue Sep 13 11:08:44 2022 ] Training epoch: 34
|
210 |
+
[ Tue Sep 13 11:09:08 2022 ] Batch(40/123) done. Loss: 0.2738 lr:0.100000
|
211 |
+
[ Tue Sep 13 11:09:51 2022 ] Eval epoch: 34
|
212 |
+
[ Tue Sep 13 11:10:42 2022 ] Mean test loss of 258 batches: 1.9924609661102295.
|
213 |
+
[ Tue Sep 13 11:10:42 2022 ] Top1: 54.73%
|
214 |
+
[ Tue Sep 13 11:10:42 2022 ] Top5: 87.83%
|
215 |
+
[ Tue Sep 13 11:10:42 2022 ] Training epoch: 35
|
216 |
+
[ Tue Sep 13 11:10:54 2022 ] Batch(17/123) done. Loss: 0.3679 lr:0.100000
|
217 |
+
[ Tue Sep 13 11:11:47 2022 ] Batch(117/123) done. Loss: 0.4535 lr:0.100000
|
218 |
+
[ Tue Sep 13 11:11:50 2022 ] Eval epoch: 35
|
219 |
+
[ Tue Sep 13 11:12:40 2022 ] Mean test loss of 258 batches: 2.53005051612854.
|
220 |
+
[ Tue Sep 13 11:12:40 2022 ] Top1: 50.18%
|
221 |
+
[ Tue Sep 13 11:12:40 2022 ] Top5: 85.02%
|
222 |
+
[ Tue Sep 13 11:12:41 2022 ] Training epoch: 36
|
223 |
+
[ Tue Sep 13 11:13:34 2022 ] Batch(94/123) done. Loss: 0.5516 lr:0.100000
|
224 |
+
[ Tue Sep 13 11:13:48 2022 ] Eval epoch: 36
|
225 |
+
[ Tue Sep 13 11:14:38 2022 ] Mean test loss of 258 batches: 2.025127649307251.
|
226 |
+
[ Tue Sep 13 11:14:39 2022 ] Top1: 54.23%
|
227 |
+
[ Tue Sep 13 11:14:39 2022 ] Top5: 87.36%
|
228 |
+
[ Tue Sep 13 11:14:39 2022 ] Training epoch: 37
|
229 |
+
[ Tue Sep 13 11:15:20 2022 ] Batch(71/123) done. Loss: 0.3084 lr:0.100000
|
230 |
+
[ Tue Sep 13 11:15:47 2022 ] Eval epoch: 37
|
231 |
+
[ Tue Sep 13 11:16:37 2022 ] Mean test loss of 258 batches: 2.328878879547119.
|
232 |
+
[ Tue Sep 13 11:16:37 2022 ] Top1: 51.76%
|
233 |
+
[ Tue Sep 13 11:16:37 2022 ] Top5: 86.99%
|
234 |
+
[ Tue Sep 13 11:16:37 2022 ] Training epoch: 38
|
235 |
+
[ Tue Sep 13 11:17:05 2022 ] Batch(48/123) done. Loss: 0.4784 lr:0.100000
|
236 |
+
[ Tue Sep 13 11:17:45 2022 ] Eval epoch: 38
|
237 |
+
[ Tue Sep 13 11:18:35 2022 ] Mean test loss of 258 batches: 1.6894583702087402.
|
238 |
+
[ Tue Sep 13 11:18:35 2022 ] Top1: 58.65%
|
239 |
+
[ Tue Sep 13 11:18:35 2022 ] Top5: 89.92%
|
240 |
+
[ Tue Sep 13 11:18:35 2022 ] Training epoch: 39
|
241 |
+
[ Tue Sep 13 11:18:52 2022 ] Batch(25/123) done. Loss: 0.2738 lr:0.100000
|
242 |
+
[ Tue Sep 13 11:19:43 2022 ] Eval epoch: 39
|
243 |
+
[ Tue Sep 13 11:20:33 2022 ] Mean test loss of 258 batches: 2.2575957775115967.
|
244 |
+
[ Tue Sep 13 11:20:33 2022 ] Top1: 54.05%
|
245 |
+
[ Tue Sep 13 11:20:33 2022 ] Top5: 87.58%
|
246 |
+
[ Tue Sep 13 11:20:33 2022 ] Training epoch: 40
|
247 |
+
[ Tue Sep 13 11:20:38 2022 ] Batch(2/123) done. Loss: 0.4132 lr:0.100000
|
248 |
+
[ Tue Sep 13 11:21:31 2022 ] Batch(102/123) done. Loss: 0.3322 lr:0.100000
|
249 |
+
[ Tue Sep 13 11:21:41 2022 ] Eval epoch: 40
|
250 |
+
[ Tue Sep 13 11:22:31 2022 ] Mean test loss of 258 batches: 2.2194809913635254.
|
251 |
+
[ Tue Sep 13 11:22:31 2022 ] Top1: 51.83%
|
252 |
+
[ Tue Sep 13 11:22:31 2022 ] Top5: 85.48%
|
253 |
+
[ Tue Sep 13 11:22:31 2022 ] Training epoch: 41
|
254 |
+
[ Tue Sep 13 11:23:16 2022 ] Batch(79/123) done. Loss: 0.2911 lr:0.100000
|
255 |
+
[ Tue Sep 13 11:23:39 2022 ] Eval epoch: 41
|
256 |
+
[ Tue Sep 13 11:24:29 2022 ] Mean test loss of 258 batches: 2.0743963718414307.
|
257 |
+
[ Tue Sep 13 11:24:29 2022 ] Top1: 56.13%
|
258 |
+
[ Tue Sep 13 11:24:29 2022 ] Top5: 86.22%
|
259 |
+
[ Tue Sep 13 11:24:29 2022 ] Training epoch: 42
|
260 |
+
[ Tue Sep 13 11:25:02 2022 ] Batch(56/123) done. Loss: 0.1734 lr:0.100000
|
261 |
+
[ Tue Sep 13 11:25:37 2022 ] Eval epoch: 42
|
262 |
+
[ Tue Sep 13 11:26:27 2022 ] Mean test loss of 258 batches: 2.168731927871704.
|
263 |
+
[ Tue Sep 13 11:26:27 2022 ] Top1: 55.61%
|
264 |
+
[ Tue Sep 13 11:26:27 2022 ] Top5: 88.45%
|
265 |
+
[ Tue Sep 13 11:26:27 2022 ] Training epoch: 43
|
266 |
+
[ Tue Sep 13 11:26:49 2022 ] Batch(33/123) done. Loss: 0.1579 lr:0.100000
|
267 |
+
[ Tue Sep 13 11:27:36 2022 ] Eval epoch: 43
|
268 |
+
[ Tue Sep 13 11:28:26 2022 ] Mean test loss of 258 batches: 2.3242321014404297.
|
269 |
+
[ Tue Sep 13 11:28:26 2022 ] Top1: 53.47%
|
270 |
+
[ Tue Sep 13 11:28:26 2022 ] Top5: 85.38%
|
271 |
+
[ Tue Sep 13 11:28:26 2022 ] Training epoch: 44
|
272 |
+
[ Tue Sep 13 11:28:35 2022 ] Batch(10/123) done. Loss: 0.2867 lr:0.100000
|
273 |
+
[ Tue Sep 13 11:29:27 2022 ] Batch(110/123) done. Loss: 0.3666 lr:0.100000
|
274 |
+
[ Tue Sep 13 11:29:34 2022 ] Eval epoch: 44
|
275 |
+
[ Tue Sep 13 11:30:24 2022 ] Mean test loss of 258 batches: 1.895141839981079.
|
276 |
+
[ Tue Sep 13 11:30:24 2022 ] Top1: 57.17%
|
277 |
+
[ Tue Sep 13 11:30:24 2022 ] Top5: 88.19%
|
278 |
+
[ Tue Sep 13 11:30:24 2022 ] Training epoch: 45
|
279 |
+
[ Tue Sep 13 11:31:14 2022 ] Batch(87/123) done. Loss: 0.3557 lr:0.100000
|
280 |
+
[ Tue Sep 13 11:31:32 2022 ] Eval epoch: 45
|
281 |
+
[ Tue Sep 13 11:32:23 2022 ] Mean test loss of 258 batches: 2.182115077972412.
|
282 |
+
[ Tue Sep 13 11:32:23 2022 ] Top1: 56.97%
|
283 |
+
[ Tue Sep 13 11:32:23 2022 ] Top5: 87.57%
|
284 |
+
[ Tue Sep 13 11:32:23 2022 ] Training epoch: 46
|
285 |
+
[ Tue Sep 13 11:33:00 2022 ] Batch(64/123) done. Loss: 0.2151 lr:0.100000
|
286 |
+
[ Tue Sep 13 11:33:31 2022 ] Eval epoch: 46
|
287 |
+
[ Tue Sep 13 11:34:21 2022 ] Mean test loss of 258 batches: 2.016373634338379.
|
288 |
+
[ Tue Sep 13 11:34:21 2022 ] Top1: 56.32%
|
289 |
+
[ Tue Sep 13 11:34:21 2022 ] Top5: 88.48%
|
290 |
+
[ Tue Sep 13 11:34:21 2022 ] Training epoch: 47
|
291 |
+
[ Tue Sep 13 11:34:46 2022 ] Batch(41/123) done. Loss: 0.1683 lr:0.100000
|
292 |
+
[ Tue Sep 13 11:35:29 2022 ] Eval epoch: 47
|
293 |
+
[ Tue Sep 13 11:36:19 2022 ] Mean test loss of 258 batches: 2.290623426437378.
|
294 |
+
[ Tue Sep 13 11:36:20 2022 ] Top1: 55.28%
|
295 |
+
[ Tue Sep 13 11:36:20 2022 ] Top5: 88.23%
|
296 |
+
[ Tue Sep 13 11:36:20 2022 ] Training epoch: 48
|
297 |
+
[ Tue Sep 13 11:36:33 2022 ] Batch(18/123) done. Loss: 0.1629 lr:0.100000
|
298 |
+
[ Tue Sep 13 11:37:25 2022 ] Batch(118/123) done. Loss: 0.2851 lr:0.100000
|
299 |
+
[ Tue Sep 13 11:37:28 2022 ] Eval epoch: 48
|
300 |
+
[ Tue Sep 13 11:38:18 2022 ] Mean test loss of 258 batches: 2.250434398651123.
|
301 |
+
[ Tue Sep 13 11:38:18 2022 ] Top1: 55.78%
|
302 |
+
[ Tue Sep 13 11:38:18 2022 ] Top5: 87.46%
|
303 |
+
[ Tue Sep 13 11:38:18 2022 ] Training epoch: 49
|
304 |
+
[ Tue Sep 13 11:39:12 2022 ] Batch(95/123) done. Loss: 0.2600 lr:0.100000
|
305 |
+
[ Tue Sep 13 11:39:26 2022 ] Eval epoch: 49
|
306 |
+
[ Tue Sep 13 11:40:16 2022 ] Mean test loss of 258 batches: 2.6489593982696533.
|
307 |
+
[ Tue Sep 13 11:40:16 2022 ] Top1: 53.65%
|
308 |
+
[ Tue Sep 13 11:40:16 2022 ] Top5: 85.38%
|
309 |
+
[ Tue Sep 13 11:40:17 2022 ] Training epoch: 50
|
310 |
+
[ Tue Sep 13 11:40:58 2022 ] Batch(72/123) done. Loss: 0.1840 lr:0.100000
|
311 |
+
[ Tue Sep 13 11:41:24 2022 ] Eval epoch: 50
|
312 |
+
[ Tue Sep 13 11:42:14 2022 ] Mean test loss of 258 batches: 2.3897714614868164.
|
313 |
+
[ Tue Sep 13 11:42:15 2022 ] Top1: 53.97%
|
314 |
+
[ Tue Sep 13 11:42:15 2022 ] Top5: 87.15%
|
315 |
+
[ Tue Sep 13 11:42:15 2022 ] Training epoch: 51
|
316 |
+
[ Tue Sep 13 11:42:44 2022 ] Batch(49/123) done. Loss: 0.1052 lr:0.100000
|
317 |
+
[ Tue Sep 13 11:43:23 2022 ] Eval epoch: 51
|
318 |
+
[ Tue Sep 13 11:44:13 2022 ] Mean test loss of 258 batches: 2.0851056575775146.
|
319 |
+
[ Tue Sep 13 11:44:13 2022 ] Top1: 56.27%
|
320 |
+
[ Tue Sep 13 11:44:13 2022 ] Top5: 88.23%
|
321 |
+
[ Tue Sep 13 11:44:13 2022 ] Training epoch: 52
|
322 |
+
[ Tue Sep 13 11:44:31 2022 ] Batch(26/123) done. Loss: 0.1345 lr:0.100000
|
323 |
+
[ Tue Sep 13 11:45:21 2022 ] Eval epoch: 52
|
324 |
+
[ Tue Sep 13 11:46:11 2022 ] Mean test loss of 258 batches: 2.1101233959198.
|
325 |
+
[ Tue Sep 13 11:46:12 2022 ] Top1: 55.91%
|
326 |
+
[ Tue Sep 13 11:46:12 2022 ] Top5: 87.54%
|
327 |
+
[ Tue Sep 13 11:46:12 2022 ] Training epoch: 53
|
328 |
+
[ Tue Sep 13 11:46:17 2022 ] Batch(3/123) done. Loss: 0.2526 lr:0.100000
|
329 |
+
[ Tue Sep 13 11:47:09 2022 ] Batch(103/123) done. Loss: 0.0950 lr:0.100000
|
330 |
+
[ Tue Sep 13 11:47:20 2022 ] Eval epoch: 53
|
331 |
+
[ Tue Sep 13 11:48:10 2022 ] Mean test loss of 258 batches: 2.9930808544158936.
|
332 |
+
[ Tue Sep 13 11:48:10 2022 ] Top1: 51.38%
|
333 |
+
[ Tue Sep 13 11:48:10 2022 ] Top5: 84.08%
|
334 |
+
[ Tue Sep 13 11:48:10 2022 ] Training epoch: 54
|
335 |
+
[ Tue Sep 13 11:48:56 2022 ] Batch(80/123) done. Loss: 0.2940 lr:0.100000
|
336 |
+
[ Tue Sep 13 11:49:18 2022 ] Eval epoch: 54
|
337 |
+
[ Tue Sep 13 11:50:09 2022 ] Mean test loss of 258 batches: 2.608795404434204.
|
338 |
+
[ Tue Sep 13 11:50:09 2022 ] Top1: 53.64%
|
339 |
+
[ Tue Sep 13 11:50:09 2022 ] Top5: 86.75%
|
340 |
+
[ Tue Sep 13 11:50:09 2022 ] Training epoch: 55
|
341 |
+
[ Tue Sep 13 11:50:43 2022 ] Batch(57/123) done. Loss: 0.2380 lr:0.100000
|
342 |
+
[ Tue Sep 13 11:51:17 2022 ] Eval epoch: 55
|
343 |
+
[ Tue Sep 13 11:52:07 2022 ] Mean test loss of 258 batches: 2.0771994590759277.
|
344 |
+
[ Tue Sep 13 11:52:07 2022 ] Top1: 58.88%
|
345 |
+
[ Tue Sep 13 11:52:07 2022 ] Top5: 89.08%
|
346 |
+
[ Tue Sep 13 11:52:07 2022 ] Training epoch: 56
|
347 |
+
[ Tue Sep 13 11:52:29 2022 ] Batch(34/123) done. Loss: 0.1672 lr:0.100000
|
348 |
+
[ Tue Sep 13 11:53:15 2022 ] Eval epoch: 56
|
349 |
+
[ Tue Sep 13 11:54:05 2022 ] Mean test loss of 258 batches: 2.453749179840088.
|
350 |
+
[ Tue Sep 13 11:54:05 2022 ] Top1: 54.90%
|
351 |
+
[ Tue Sep 13 11:54:06 2022 ] Top5: 86.35%
|
352 |
+
[ Tue Sep 13 11:54:06 2022 ] Training epoch: 57
|
353 |
+
[ Tue Sep 13 11:54:15 2022 ] Batch(11/123) done. Loss: 0.1159 lr:0.100000
|
354 |
+
[ Tue Sep 13 11:55:07 2022 ] Batch(111/123) done. Loss: 0.2282 lr:0.100000
|
355 |
+
[ Tue Sep 13 11:55:13 2022 ] Eval epoch: 57
|
356 |
+
[ Tue Sep 13 11:56:03 2022 ] Mean test loss of 258 batches: 2.2608907222747803.
|
357 |
+
[ Tue Sep 13 11:56:04 2022 ] Top1: 57.20%
|
358 |
+
[ Tue Sep 13 11:56:04 2022 ] Top5: 88.04%
|
359 |
+
[ Tue Sep 13 11:56:04 2022 ] Training epoch: 58
|
360 |
+
[ Tue Sep 13 11:56:54 2022 ] Batch(88/123) done. Loss: 0.3523 lr:0.100000
|
361 |
+
[ Tue Sep 13 11:57:12 2022 ] Eval epoch: 58
|
362 |
+
[ Tue Sep 13 11:58:02 2022 ] Mean test loss of 258 batches: 4.464827060699463.
|
363 |
+
[ Tue Sep 13 11:58:02 2022 ] Top1: 41.90%
|
364 |
+
[ Tue Sep 13 11:58:03 2022 ] Top5: 78.26%
|
365 |
+
[ Tue Sep 13 11:58:03 2022 ] Training epoch: 59
|
366 |
+
[ Tue Sep 13 11:58:40 2022 ] Batch(65/123) done. Loss: 0.2308 lr:0.100000
|
367 |
+
[ Tue Sep 13 11:59:11 2022 ] Eval epoch: 59
|
368 |
+
[ Tue Sep 13 12:00:00 2022 ] Mean test loss of 258 batches: 2.351600408554077.
|
369 |
+
[ Tue Sep 13 12:00:01 2022 ] Top1: 56.53%
|
370 |
+
[ Tue Sep 13 12:00:01 2022 ] Top5: 87.81%
|
371 |
+
[ Tue Sep 13 12:00:01 2022 ] Training epoch: 60
|
372 |
+
[ Tue Sep 13 12:00:26 2022 ] Batch(42/123) done. Loss: 0.2450 lr:0.100000
|
373 |
+
[ Tue Sep 13 12:01:09 2022 ] Eval epoch: 60
|
374 |
+
[ Tue Sep 13 12:01:59 2022 ] Mean test loss of 258 batches: 2.6445326805114746.
|
375 |
+
[ Tue Sep 13 12:01:59 2022 ] Top1: 53.40%
|
376 |
+
[ Tue Sep 13 12:01:59 2022 ] Top5: 86.48%
|
377 |
+
[ Tue Sep 13 12:01:59 2022 ] Training epoch: 61
|
378 |
+
[ Tue Sep 13 12:02:12 2022 ] Batch(19/123) done. Loss: 0.1235 lr:0.010000
|
379 |
+
[ Tue Sep 13 12:03:05 2022 ] Batch(119/123) done. Loss: 0.0991 lr:0.010000
|
380 |
+
[ Tue Sep 13 12:03:07 2022 ] Eval epoch: 61
|
381 |
+
[ Tue Sep 13 12:03:57 2022 ] Mean test loss of 258 batches: 1.8522855043411255.
|
382 |
+
[ Tue Sep 13 12:03:57 2022 ] Top1: 63.30%
|
383 |
+
[ Tue Sep 13 12:03:57 2022 ] Top5: 90.65%
|
384 |
+
[ Tue Sep 13 12:03:57 2022 ] Training epoch: 62
|
385 |
+
[ Tue Sep 13 12:04:52 2022 ] Batch(96/123) done. Loss: 0.1267 lr:0.010000
|
386 |
+
[ Tue Sep 13 12:05:06 2022 ] Eval epoch: 62
|
387 |
+
[ Tue Sep 13 12:05:56 2022 ] Mean test loss of 258 batches: 1.808545708656311.
|
388 |
+
[ Tue Sep 13 12:05:56 2022 ] Top1: 64.12%
|
389 |
+
[ Tue Sep 13 12:05:56 2022 ] Top5: 91.04%
|
390 |
+
[ Tue Sep 13 12:05:56 2022 ] Training epoch: 63
|
391 |
+
[ Tue Sep 13 12:06:38 2022 ] Batch(73/123) done. Loss: 0.0333 lr:0.010000
|
392 |
+
[ Tue Sep 13 12:07:04 2022 ] Eval epoch: 63
|
393 |
+
[ Tue Sep 13 12:07:54 2022 ] Mean test loss of 258 batches: 1.7879505157470703.
|
394 |
+
[ Tue Sep 13 12:07:54 2022 ] Top1: 65.01%
|
395 |
+
[ Tue Sep 13 12:07:54 2022 ] Top5: 91.40%
|
396 |
+
[ Tue Sep 13 12:07:55 2022 ] Training epoch: 64
|
397 |
+
[ Tue Sep 13 12:08:24 2022 ] Batch(50/123) done. Loss: 0.0799 lr:0.010000
|
398 |
+
[ Tue Sep 13 12:09:02 2022 ] Eval epoch: 64
|
399 |
+
[ Tue Sep 13 12:09:53 2022 ] Mean test loss of 258 batches: 1.8374004364013672.
|
400 |
+
[ Tue Sep 13 12:09:53 2022 ] Top1: 65.17%
|
401 |
+
[ Tue Sep 13 12:09:53 2022 ] Top5: 91.44%
|
402 |
+
[ Tue Sep 13 12:09:53 2022 ] Training epoch: 65
|
403 |
+
[ Tue Sep 13 12:10:10 2022 ] Batch(27/123) done. Loss: 0.0559 lr:0.010000
|
404 |
+
[ Tue Sep 13 12:11:00 2022 ] Eval epoch: 65
|
405 |
+
[ Tue Sep 13 12:11:51 2022 ] Mean test loss of 258 batches: 1.817768931388855.
|
406 |
+
[ Tue Sep 13 12:11:51 2022 ] Top1: 65.39%
|
407 |
+
[ Tue Sep 13 12:11:51 2022 ] Top5: 91.55%
|
408 |
+
[ Tue Sep 13 12:11:51 2022 ] Training epoch: 66
|
409 |
+
[ Tue Sep 13 12:11:56 2022 ] Batch(4/123) done. Loss: 0.0192 lr:0.010000
|
410 |
+
[ Tue Sep 13 12:12:49 2022 ] Batch(104/123) done. Loss: 0.0499 lr:0.010000
|
411 |
+
[ Tue Sep 13 12:12:59 2022 ] Eval epoch: 66
|
412 |
+
[ Tue Sep 13 12:13:49 2022 ] Mean test loss of 258 batches: 1.8281463384628296.
|
413 |
+
[ Tue Sep 13 12:13:49 2022 ] Top1: 65.28%
|
414 |
+
[ Tue Sep 13 12:13:49 2022 ] Top5: 91.49%
|
415 |
+
[ Tue Sep 13 12:13:49 2022 ] Training epoch: 67
|
416 |
+
[ Tue Sep 13 12:14:35 2022 ] Batch(81/123) done. Loss: 0.0402 lr:0.010000
|
417 |
+
[ Tue Sep 13 12:14:57 2022 ] Eval epoch: 67
|
418 |
+
[ Tue Sep 13 12:15:47 2022 ] Mean test loss of 258 batches: 1.8663133382797241.
|
419 |
+
[ Tue Sep 13 12:15:47 2022 ] Top1: 65.43%
|
420 |
+
[ Tue Sep 13 12:15:47 2022 ] Top5: 91.58%
|
421 |
+
[ Tue Sep 13 12:15:47 2022 ] Training epoch: 68
|
422 |
+
[ Tue Sep 13 12:16:22 2022 ] Batch(58/123) done. Loss: 0.0263 lr:0.010000
|
423 |
+
[ Tue Sep 13 12:16:55 2022 ] Eval epoch: 68
|
424 |
+
[ Tue Sep 13 12:17:46 2022 ] Mean test loss of 258 batches: 1.8553483486175537.
|
425 |
+
[ Tue Sep 13 12:17:46 2022 ] Top1: 64.97%
|
426 |
+
[ Tue Sep 13 12:17:46 2022 ] Top5: 91.37%
|
427 |
+
[ Tue Sep 13 12:17:46 2022 ] Training epoch: 69
|
428 |
+
[ Tue Sep 13 12:18:08 2022 ] Batch(35/123) done. Loss: 0.0370 lr:0.010000
|
429 |
+
[ Tue Sep 13 12:18:54 2022 ] Eval epoch: 69
|
430 |
+
[ Tue Sep 13 12:19:44 2022 ] Mean test loss of 258 batches: 1.8636685609817505.
|
431 |
+
[ Tue Sep 13 12:19:44 2022 ] Top1: 65.08%
|
432 |
+
[ Tue Sep 13 12:19:44 2022 ] Top5: 91.50%
|
433 |
+
[ Tue Sep 13 12:19:44 2022 ] Training epoch: 70
|
434 |
+
[ Tue Sep 13 12:19:54 2022 ] Batch(12/123) done. Loss: 0.0232 lr:0.010000
|
435 |
+
[ Tue Sep 13 12:20:47 2022 ] Batch(112/123) done. Loss: 0.0477 lr:0.010000
|
436 |
+
[ Tue Sep 13 12:20:52 2022 ] Eval epoch: 70
|
437 |
+
[ Tue Sep 13 12:21:42 2022 ] Mean test loss of 258 batches: 1.8849055767059326.
|
438 |
+
[ Tue Sep 13 12:21:42 2022 ] Top1: 65.17%
|
439 |
+
[ Tue Sep 13 12:21:42 2022 ] Top5: 91.31%
|
440 |
+
[ Tue Sep 13 12:21:42 2022 ] Training epoch: 71
|
441 |
+
[ Tue Sep 13 12:22:33 2022 ] Batch(89/123) done. Loss: 0.1645 lr:0.010000
|
442 |
+
[ Tue Sep 13 12:22:50 2022 ] Eval epoch: 71
|
443 |
+
[ Tue Sep 13 12:23:40 2022 ] Mean test loss of 258 batches: 1.9196712970733643.
|
444 |
+
[ Tue Sep 13 12:23:40 2022 ] Top1: 64.89%
|
445 |
+
[ Tue Sep 13 12:23:40 2022 ] Top5: 91.48%
|
446 |
+
[ Tue Sep 13 12:23:40 2022 ] Training epoch: 72
|
447 |
+
[ Tue Sep 13 12:24:19 2022 ] Batch(66/123) done. Loss: 0.0282 lr:0.010000
|
448 |
+
[ Tue Sep 13 12:24:48 2022 ] Eval epoch: 72
|
449 |
+
[ Tue Sep 13 12:25:38 2022 ] Mean test loss of 258 batches: 1.9522173404693604.
|
450 |
+
[ Tue Sep 13 12:25:38 2022 ] Top1: 64.70%
|
451 |
+
[ Tue Sep 13 12:25:38 2022 ] Top5: 91.11%
|
452 |
+
[ Tue Sep 13 12:25:38 2022 ] Training epoch: 73
|
453 |
+
[ Tue Sep 13 12:26:05 2022 ] Batch(43/123) done. Loss: 0.0220 lr:0.010000
|
454 |
+
[ Tue Sep 13 12:26:46 2022 ] Eval epoch: 73
|
455 |
+
[ Tue Sep 13 12:27:37 2022 ] Mean test loss of 258 batches: 1.919583797454834.
|
456 |
+
[ Tue Sep 13 12:27:37 2022 ] Top1: 65.20%
|
457 |
+
[ Tue Sep 13 12:27:37 2022 ] Top5: 91.27%
|
458 |
+
[ Tue Sep 13 12:27:37 2022 ] Training epoch: 74
|
459 |
+
[ Tue Sep 13 12:27:51 2022 ] Batch(20/123) done. Loss: 0.0347 lr:0.010000
|
460 |
+
[ Tue Sep 13 12:28:43 2022 ] Batch(120/123) done. Loss: 0.0459 lr:0.010000
|
461 |
+
[ Tue Sep 13 12:28:45 2022 ] Eval epoch: 74
|
462 |
+
[ Tue Sep 13 12:29:35 2022 ] Mean test loss of 258 batches: 1.946401596069336.
|
463 |
+
[ Tue Sep 13 12:29:35 2022 ] Top1: 64.86%
|
464 |
+
[ Tue Sep 13 12:29:35 2022 ] Top5: 91.20%
|
465 |
+
[ Tue Sep 13 12:29:35 2022 ] Training epoch: 75
|
466 |
+
[ Tue Sep 13 12:30:30 2022 ] Batch(97/123) done. Loss: 0.0223 lr:0.010000
|
467 |
+
[ Tue Sep 13 12:30:43 2022 ] Eval epoch: 75
|
468 |
+
[ Tue Sep 13 12:31:33 2022 ] Mean test loss of 258 batches: 1.9171396493911743.
|
469 |
+
[ Tue Sep 13 12:31:33 2022 ] Top1: 65.28%
|
470 |
+
[ Tue Sep 13 12:31:33 2022 ] Top5: 91.25%
|
471 |
+
[ Tue Sep 13 12:31:33 2022 ] Training epoch: 76
|
472 |
+
[ Tue Sep 13 12:32:16 2022 ] Batch(74/123) done. Loss: 0.0539 lr:0.010000
|
473 |
+
[ Tue Sep 13 12:32:41 2022 ] Eval epoch: 76
|
474 |
+
[ Tue Sep 13 12:33:31 2022 ] Mean test loss of 258 batches: 1.967031717300415.
|
475 |
+
[ Tue Sep 13 12:33:32 2022 ] Top1: 64.91%
|
476 |
+
[ Tue Sep 13 12:33:32 2022 ] Top5: 91.35%
|
477 |
+
[ Tue Sep 13 12:33:32 2022 ] Training epoch: 77
|
478 |
+
[ Tue Sep 13 12:34:02 2022 ] Batch(51/123) done. Loss: 0.0534 lr:0.010000
|
479 |
+
[ Tue Sep 13 12:34:40 2022 ] Eval epoch: 77
|
480 |
+
[ Tue Sep 13 12:35:29 2022 ] Mean test loss of 258 batches: 1.9728081226348877.
|
481 |
+
[ Tue Sep 13 12:35:29 2022 ] Top1: 65.31%
|
482 |
+
[ Tue Sep 13 12:35:30 2022 ] Top5: 91.17%
|
483 |
+
[ Tue Sep 13 12:35:30 2022 ] Training epoch: 78
|
484 |
+
[ Tue Sep 13 12:35:48 2022 ] Batch(28/123) done. Loss: 0.0317 lr:0.010000
|
485 |
+
[ Tue Sep 13 12:36:38 2022 ] Eval epoch: 78
|
486 |
+
[ Tue Sep 13 12:37:28 2022 ] Mean test loss of 258 batches: 1.949669361114502.
|
487 |
+
[ Tue Sep 13 12:37:28 2022 ] Top1: 65.11%
|
488 |
+
[ Tue Sep 13 12:37:28 2022 ] Top5: 91.25%
|
489 |
+
[ Tue Sep 13 12:37:28 2022 ] Training epoch: 79
|
490 |
+
[ Tue Sep 13 12:37:35 2022 ] Batch(5/123) done. Loss: 0.0147 lr:0.010000
|
491 |
+
[ Tue Sep 13 12:38:27 2022 ] Batch(105/123) done. Loss: 0.0136 lr:0.010000
|
492 |
+
[ Tue Sep 13 12:38:36 2022 ] Eval epoch: 79
|
493 |
+
[ Tue Sep 13 12:39:27 2022 ] Mean test loss of 258 batches: 1.9937751293182373.
|
494 |
+
[ Tue Sep 13 12:39:27 2022 ] Top1: 65.29%
|
495 |
+
[ Tue Sep 13 12:39:27 2022 ] Top5: 91.31%
|
496 |
+
[ Tue Sep 13 12:39:27 2022 ] Training epoch: 80
|
497 |
+
[ Tue Sep 13 12:40:14 2022 ] Batch(82/123) done. Loss: 0.0356 lr:0.010000
|
498 |
+
[ Tue Sep 13 12:40:35 2022 ] Eval epoch: 80
|
499 |
+
[ Tue Sep 13 12:41:25 2022 ] Mean test loss of 258 batches: 2.0242815017700195.
|
500 |
+
[ Tue Sep 13 12:41:25 2022 ] Top1: 64.96%
|
501 |
+
[ Tue Sep 13 12:41:25 2022 ] Top5: 91.13%
|
502 |
+
[ Tue Sep 13 12:41:25 2022 ] Training epoch: 81
|
503 |
+
[ Tue Sep 13 12:42:00 2022 ] Batch(59/123) done. Loss: 0.0186 lr:0.001000
|
504 |
+
[ Tue Sep 13 12:42:33 2022 ] Eval epoch: 81
|
505 |
+
[ Tue Sep 13 12:43:23 2022 ] Mean test loss of 258 batches: 1.977448582649231.
|
506 |
+
[ Tue Sep 13 12:43:24 2022 ] Top1: 65.55%
|
507 |
+
[ Tue Sep 13 12:43:24 2022 ] Top5: 91.38%
|
508 |
+
[ Tue Sep 13 12:43:24 2022 ] Training epoch: 82
|
509 |
+
[ Tue Sep 13 12:43:46 2022 ] Batch(36/123) done. Loss: 0.0176 lr:0.001000
|
510 |
+
[ Tue Sep 13 12:44:32 2022 ] Eval epoch: 82
|
511 |
+
[ Tue Sep 13 12:45:22 2022 ] Mean test loss of 258 batches: 2.012622356414795.
|
512 |
+
[ Tue Sep 13 12:45:22 2022 ] Top1: 65.12%
|
513 |
+
[ Tue Sep 13 12:45:22 2022 ] Top5: 91.13%
|
514 |
+
[ Tue Sep 13 12:45:22 2022 ] Training epoch: 83
|
515 |
+
[ Tue Sep 13 12:45:33 2022 ] Batch(13/123) done. Loss: 0.1106 lr:0.001000
|
516 |
+
[ Tue Sep 13 12:46:25 2022 ] Batch(113/123) done. Loss: 0.1164 lr:0.001000
|
517 |
+
[ Tue Sep 13 12:46:30 2022 ] Eval epoch: 83
|
518 |
+
[ Tue Sep 13 12:47:21 2022 ] Mean test loss of 258 batches: 2.000906467437744.
|
519 |
+
[ Tue Sep 13 12:47:21 2022 ] Top1: 65.42%
|
520 |
+
[ Tue Sep 13 12:47:21 2022 ] Top5: 91.39%
|
521 |
+
[ Tue Sep 13 12:47:21 2022 ] Training epoch: 84
|
522 |
+
[ Tue Sep 13 12:48:12 2022 ] Batch(90/123) done. Loss: 0.0565 lr:0.001000
|
523 |
+
[ Tue Sep 13 12:48:29 2022 ] Eval epoch: 84
|
524 |
+
[ Tue Sep 13 12:49:19 2022 ] Mean test loss of 258 batches: 2.0127618312835693.
|
525 |
+
[ Tue Sep 13 12:49:19 2022 ] Top1: 65.14%
|
526 |
+
[ Tue Sep 13 12:49:19 2022 ] Top5: 91.21%
|
527 |
+
[ Tue Sep 13 12:49:20 2022 ] Training epoch: 85
|
528 |
+
[ Tue Sep 13 12:49:59 2022 ] Batch(67/123) done. Loss: 0.0713 lr:0.001000
|
529 |
+
[ Tue Sep 13 12:50:28 2022 ] Eval epoch: 85
|
530 |
+
[ Tue Sep 13 12:51:18 2022 ] Mean test loss of 258 batches: 2.0610320568084717.
|
531 |
+
[ Tue Sep 13 12:51:18 2022 ] Top1: 64.55%
|
532 |
+
[ Tue Sep 13 12:51:18 2022 ] Top5: 91.02%
|
533 |
+
[ Tue Sep 13 12:51:19 2022 ] Training epoch: 86
|
534 |
+
[ Tue Sep 13 12:51:45 2022 ] Batch(44/123) done. Loss: 0.0331 lr:0.001000
|
535 |
+
[ Tue Sep 13 12:52:26 2022 ] Eval epoch: 86
|
536 |
+
[ Tue Sep 13 12:53:17 2022 ] Mean test loss of 258 batches: 1.9696850776672363.
|
537 |
+
[ Tue Sep 13 12:53:17 2022 ] Top1: 65.51%
|
538 |
+
[ Tue Sep 13 12:53:17 2022 ] Top5: 91.43%
|
539 |
+
[ Tue Sep 13 12:53:17 2022 ] Training epoch: 87
|
540 |
+
[ Tue Sep 13 12:53:32 2022 ] Batch(21/123) done. Loss: 0.1194 lr:0.001000
|
541 |
+
[ Tue Sep 13 12:54:24 2022 ] Batch(121/123) done. Loss: 0.0801 lr:0.001000
|
542 |
+
[ Tue Sep 13 12:54:25 2022 ] Eval epoch: 87
|
543 |
+
[ Tue Sep 13 12:55:15 2022 ] Mean test loss of 258 batches: 2.010471820831299.
|
544 |
+
[ Tue Sep 13 12:55:15 2022 ] Top1: 65.19%
|
545 |
+
[ Tue Sep 13 12:55:15 2022 ] Top5: 91.33%
|
546 |
+
[ Tue Sep 13 12:55:16 2022 ] Training epoch: 88
|
547 |
+
[ Tue Sep 13 12:56:11 2022 ] Batch(98/123) done. Loss: 0.0768 lr:0.001000
|
548 |
+
[ Tue Sep 13 12:56:24 2022 ] Eval epoch: 88
|
549 |
+
[ Tue Sep 13 12:57:15 2022 ] Mean test loss of 258 batches: 2.0580830574035645.
|
550 |
+
[ Tue Sep 13 12:57:15 2022 ] Top1: 64.70%
|
551 |
+
[ Tue Sep 13 12:57:15 2022 ] Top5: 91.13%
|
552 |
+
[ Tue Sep 13 12:57:15 2022 ] Training epoch: 89
|
553 |
+
[ Tue Sep 13 12:57:58 2022 ] Batch(75/123) done. Loss: 0.0448 lr:0.001000
|
554 |
+
[ Tue Sep 13 12:58:23 2022 ] Eval epoch: 89
|
555 |
+
[ Tue Sep 13 12:59:13 2022 ] Mean test loss of 258 batches: 2.0660269260406494.
|
556 |
+
[ Tue Sep 13 12:59:13 2022 ] Top1: 64.53%
|
557 |
+
[ Tue Sep 13 12:59:13 2022 ] Top5: 91.05%
|
558 |
+
[ Tue Sep 13 12:59:13 2022 ] Training epoch: 90
|
559 |
+
[ Tue Sep 13 12:59:44 2022 ] Batch(52/123) done. Loss: 0.0275 lr:0.001000
|
560 |
+
[ Tue Sep 13 13:00:21 2022 ] Eval epoch: 90
|
561 |
+
[ Tue Sep 13 13:01:11 2022 ] Mean test loss of 258 batches: 1.99518883228302.
|
562 |
+
[ Tue Sep 13 13:01:11 2022 ] Top1: 65.26%
|
563 |
+
[ Tue Sep 13 13:01:11 2022 ] Top5: 91.37%
|
564 |
+
[ Tue Sep 13 13:01:12 2022 ] Training epoch: 91
|
565 |
+
[ Tue Sep 13 13:01:30 2022 ] Batch(29/123) done. Loss: 0.0468 lr:0.001000
|
566 |
+
[ Tue Sep 13 13:02:20 2022 ] Eval epoch: 91
|
567 |
+
[ Tue Sep 13 13:03:10 2022 ] Mean test loss of 258 batches: 2.032771587371826.
|
568 |
+
[ Tue Sep 13 13:03:10 2022 ] Top1: 64.77%
|
569 |
+
[ Tue Sep 13 13:03:10 2022 ] Top5: 91.21%
|
570 |
+
[ Tue Sep 13 13:03:10 2022 ] Training epoch: 92
|
571 |
+
[ Tue Sep 13 13:03:17 2022 ] Batch(6/123) done. Loss: 0.0512 lr:0.001000
|
572 |
+
[ Tue Sep 13 13:04:10 2022 ] Batch(106/123) done. Loss: 0.0190 lr:0.001000
|
573 |
+
[ Tue Sep 13 13:04:18 2022 ] Eval epoch: 92
|
574 |
+
[ Tue Sep 13 13:05:08 2022 ] Mean test loss of 258 batches: 1.9861313104629517.
|
575 |
+
[ Tue Sep 13 13:05:08 2022 ] Top1: 65.41%
|
576 |
+
[ Tue Sep 13 13:05:09 2022 ] Top5: 91.44%
|
577 |
+
[ Tue Sep 13 13:05:09 2022 ] Training epoch: 93
|
578 |
+
[ Tue Sep 13 13:05:56 2022 ] Batch(83/123) done. Loss: 0.0261 lr:0.001000
|
579 |
+
[ Tue Sep 13 13:06:17 2022 ] Eval epoch: 93
|
580 |
+
[ Tue Sep 13 13:07:07 2022 ] Mean test loss of 258 batches: 2.015868663787842.
|
581 |
+
[ Tue Sep 13 13:07:07 2022 ] Top1: 65.24%
|
582 |
+
[ Tue Sep 13 13:07:07 2022 ] Top5: 91.29%
|
583 |
+
[ Tue Sep 13 13:07:07 2022 ] Training epoch: 94
|
584 |
+
[ Tue Sep 13 13:07:43 2022 ] Batch(60/123) done. Loss: 0.0185 lr:0.001000
|
585 |
+
[ Tue Sep 13 13:08:15 2022 ] Eval epoch: 94
|
586 |
+
[ Tue Sep 13 13:09:05 2022 ] Mean test loss of 258 batches: 2.033536195755005.
|
587 |
+
[ Tue Sep 13 13:09:05 2022 ] Top1: 65.03%
|
588 |
+
[ Tue Sep 13 13:09:05 2022 ] Top5: 91.31%
|
589 |
+
[ Tue Sep 13 13:09:06 2022 ] Training epoch: 95
|
590 |
+
[ Tue Sep 13 13:09:29 2022 ] Batch(37/123) done. Loss: 0.0353 lr:0.001000
|
591 |
+
[ Tue Sep 13 13:10:14 2022 ] Eval epoch: 95
|
592 |
+
[ Tue Sep 13 13:11:04 2022 ] Mean test loss of 258 batches: 2.0294594764709473.
|
593 |
+
[ Tue Sep 13 13:11:04 2022 ] Top1: 65.16%
|
594 |
+
[ Tue Sep 13 13:11:04 2022 ] Top5: 91.18%
|
595 |
+
[ Tue Sep 13 13:11:04 2022 ] Training epoch: 96
|
596 |
+
[ Tue Sep 13 13:11:15 2022 ] Batch(14/123) done. Loss: 0.0521 lr:0.001000
|
597 |
+
[ Tue Sep 13 13:12:07 2022 ] Batch(114/123) done. Loss: 0.0375 lr:0.001000
|
598 |
+
[ Tue Sep 13 13:12:12 2022 ] Eval epoch: 96
|
599 |
+
[ Tue Sep 13 13:13:02 2022 ] Mean test loss of 258 batches: 2.0271966457366943.
|
600 |
+
[ Tue Sep 13 13:13:02 2022 ] Top1: 65.34%
|
601 |
+
[ Tue Sep 13 13:13:02 2022 ] Top5: 91.39%
|
602 |
+
[ Tue Sep 13 13:13:02 2022 ] Training epoch: 97
|
603 |
+
[ Tue Sep 13 13:13:53 2022 ] Batch(91/123) done. Loss: 0.0273 lr:0.001000
|
604 |
+
[ Tue Sep 13 13:14:10 2022 ] Eval epoch: 97
|
605 |
+
[ Tue Sep 13 13:15:00 2022 ] Mean test loss of 258 batches: 2.0177931785583496.
|
606 |
+
[ Tue Sep 13 13:15:00 2022 ] Top1: 65.08%
|
607 |
+
[ Tue Sep 13 13:15:00 2022 ] Top5: 91.16%
|
608 |
+
[ Tue Sep 13 13:15:00 2022 ] Training epoch: 98
|
609 |
+
[ Tue Sep 13 13:15:39 2022 ] Batch(68/123) done. Loss: 0.0194 lr:0.001000
|
610 |
+
[ Tue Sep 13 13:16:08 2022 ] Eval epoch: 98
|
611 |
+
[ Tue Sep 13 13:16:58 2022 ] Mean test loss of 258 batches: 2.0503251552581787.
|
612 |
+
[ Tue Sep 13 13:16:58 2022 ] Top1: 64.79%
|
613 |
+
[ Tue Sep 13 13:16:58 2022 ] Top5: 91.08%
|
614 |
+
[ Tue Sep 13 13:16:58 2022 ] Training epoch: 99
|
615 |
+
[ Tue Sep 13 13:17:25 2022 ] Batch(45/123) done. Loss: 0.0339 lr:0.001000
|
616 |
+
[ Tue Sep 13 13:18:06 2022 ] Eval epoch: 99
|
617 |
+
[ Tue Sep 13 13:18:56 2022 ] Mean test loss of 258 batches: 2.0308101177215576.
|
618 |
+
[ Tue Sep 13 13:18:57 2022 ] Top1: 65.18%
|
619 |
+
[ Tue Sep 13 13:18:57 2022 ] Top5: 91.13%
|
620 |
+
[ Tue Sep 13 13:18:57 2022 ] Training epoch: 100
|
621 |
+
[ Tue Sep 13 13:19:12 2022 ] Batch(22/123) done. Loss: 0.0428 lr:0.001000
|
622 |
+
[ Tue Sep 13 13:20:05 2022 ] Batch(122/123) done. Loss: 0.0780 lr:0.001000
|
623 |
+
[ Tue Sep 13 13:20:05 2022 ] Eval epoch: 100
|
624 |
+
[ Tue Sep 13 13:20:55 2022 ] Mean test loss of 258 batches: 2.0031473636627197.
|
625 |
+
[ Tue Sep 13 13:20:55 2022 ] Top1: 65.13%
|
626 |
+
[ Tue Sep 13 13:20:55 2022 ] Top5: 91.29%
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu_joint_motion_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/nturgbd-cross-subject/train_joint_motion.yaml
|
5 |
+
device:
|
6 |
+
- 4
|
7 |
+
- 5
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 60
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu_joint_motion_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu_joint_motion_xsub
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e80ca63a7e7eaab88a1f1a22672471a1ed95998eb32386b95211268824ea1ba
|
3 |
+
size 4979902
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_motion_xsub/log.txt
ADDED
@@ -0,0 +1,626 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Wed Sep 14 08:59:06 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu_joint_motion_xsub', 'model_saved_name': './save_models/ntu_joint_motion_xsub', 'Experiment_name': 'ntu_joint_motion_xsub', 'config': './config/nturgbd-cross-subject/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Wed Sep 14 08:59:06 2022 ] Training epoch: 1
|
5 |
+
[ Wed Sep 14 08:59:56 2022 ] Batch(99/123) done. Loss: 2.9676 lr:0.100000
|
6 |
+
[ Wed Sep 14 09:00:06 2022 ] Eval epoch: 1
|
7 |
+
[ Wed Sep 14 09:00:56 2022 ] Mean test loss of 258 batches: 7.117305278778076.
|
8 |
+
[ Wed Sep 14 09:00:56 2022 ] Top1: 4.80%
|
9 |
+
[ Wed Sep 14 09:00:56 2022 ] Top5: 19.06%
|
10 |
+
[ Wed Sep 14 09:00:56 2022 ] Training epoch: 2
|
11 |
+
[ Wed Sep 14 09:01:40 2022 ] Batch(76/123) done. Loss: 2.6146 lr:0.100000
|
12 |
+
[ Wed Sep 14 09:02:04 2022 ] Eval epoch: 2
|
13 |
+
[ Wed Sep 14 09:02:55 2022 ] Mean test loss of 258 batches: 5.275425434112549.
|
14 |
+
[ Wed Sep 14 09:02:55 2022 ] Top1: 10.10%
|
15 |
+
[ Wed Sep 14 09:02:55 2022 ] Top5: 28.08%
|
16 |
+
[ Wed Sep 14 09:02:55 2022 ] Training epoch: 3
|
17 |
+
[ Wed Sep 14 09:03:26 2022 ] Batch(53/123) done. Loss: 2.8425 lr:0.100000
|
18 |
+
[ Wed Sep 14 09:04:03 2022 ] Eval epoch: 3
|
19 |
+
[ Wed Sep 14 09:04:53 2022 ] Mean test loss of 258 batches: 4.473618507385254.
|
20 |
+
[ Wed Sep 14 09:04:53 2022 ] Top1: 10.51%
|
21 |
+
[ Wed Sep 14 09:04:53 2022 ] Top5: 34.15%
|
22 |
+
[ Wed Sep 14 09:04:54 2022 ] Training epoch: 4
|
23 |
+
[ Wed Sep 14 09:05:13 2022 ] Batch(30/123) done. Loss: 2.3812 lr:0.100000
|
24 |
+
[ Wed Sep 14 09:06:01 2022 ] Eval epoch: 4
|
25 |
+
[ Wed Sep 14 09:06:51 2022 ] Mean test loss of 258 batches: 4.136472702026367.
|
26 |
+
[ Wed Sep 14 09:06:52 2022 ] Top1: 15.39%
|
27 |
+
[ Wed Sep 14 09:06:52 2022 ] Top5: 41.83%
|
28 |
+
[ Wed Sep 14 09:06:52 2022 ] Training epoch: 5
|
29 |
+
[ Wed Sep 14 09:06:59 2022 ] Batch(7/123) done. Loss: 2.0284 lr:0.100000
|
30 |
+
[ Wed Sep 14 09:07:51 2022 ] Batch(107/123) done. Loss: 1.6414 lr:0.100000
|
31 |
+
[ Wed Sep 14 09:08:00 2022 ] Eval epoch: 5
|
32 |
+
[ Wed Sep 14 09:08:50 2022 ] Mean test loss of 258 batches: 3.7556955814361572.
|
33 |
+
[ Wed Sep 14 09:08:50 2022 ] Top1: 20.32%
|
34 |
+
[ Wed Sep 14 09:08:50 2022 ] Top5: 48.43%
|
35 |
+
[ Wed Sep 14 09:08:50 2022 ] Training epoch: 6
|
36 |
+
[ Wed Sep 14 09:09:38 2022 ] Batch(84/123) done. Loss: 1.3047 lr:0.100000
|
37 |
+
[ Wed Sep 14 09:09:58 2022 ] Eval epoch: 6
|
38 |
+
[ Wed Sep 14 09:10:48 2022 ] Mean test loss of 258 batches: 3.519871473312378.
|
39 |
+
[ Wed Sep 14 09:10:48 2022 ] Top1: 21.75%
|
40 |
+
[ Wed Sep 14 09:10:49 2022 ] Top5: 53.95%
|
41 |
+
[ Wed Sep 14 09:10:49 2022 ] Training epoch: 7
|
42 |
+
[ Wed Sep 14 09:11:24 2022 ] Batch(61/123) done. Loss: 1.5018 lr:0.100000
|
43 |
+
[ Wed Sep 14 09:11:57 2022 ] Eval epoch: 7
|
44 |
+
[ Wed Sep 14 09:12:47 2022 ] Mean test loss of 258 batches: 3.1029582023620605.
|
45 |
+
[ Wed Sep 14 09:12:47 2022 ] Top1: 28.23%
|
46 |
+
[ Wed Sep 14 09:12:47 2022 ] Top5: 59.74%
|
47 |
+
[ Wed Sep 14 09:12:47 2022 ] Training epoch: 8
|
48 |
+
[ Wed Sep 14 09:13:11 2022 ] Batch(38/123) done. Loss: 1.2094 lr:0.100000
|
49 |
+
[ Wed Sep 14 09:13:55 2022 ] Eval epoch: 8
|
50 |
+
[ Wed Sep 14 09:14:46 2022 ] Mean test loss of 258 batches: 3.524228572845459.
|
51 |
+
[ Wed Sep 14 09:14:46 2022 ] Top1: 26.87%
|
52 |
+
[ Wed Sep 14 09:14:46 2022 ] Top5: 59.79%
|
53 |
+
[ Wed Sep 14 09:14:46 2022 ] Training epoch: 9
|
54 |
+
[ Wed Sep 14 09:14:58 2022 ] Batch(15/123) done. Loss: 1.1210 lr:0.100000
|
55 |
+
[ Wed Sep 14 09:15:50 2022 ] Batch(115/123) done. Loss: 1.0431 lr:0.100000
|
56 |
+
[ Wed Sep 14 09:15:54 2022 ] Eval epoch: 9
|
57 |
+
[ Wed Sep 14 09:16:44 2022 ] Mean test loss of 258 batches: 3.543813705444336.
|
58 |
+
[ Wed Sep 14 09:16:44 2022 ] Top1: 31.44%
|
59 |
+
[ Wed Sep 14 09:16:44 2022 ] Top5: 68.33%
|
60 |
+
[ Wed Sep 14 09:16:45 2022 ] Training epoch: 10
|
61 |
+
[ Wed Sep 14 09:17:37 2022 ] Batch(92/123) done. Loss: 0.9419 lr:0.100000
|
62 |
+
[ Wed Sep 14 09:17:53 2022 ] Eval epoch: 10
|
63 |
+
[ Wed Sep 14 09:18:43 2022 ] Mean test loss of 258 batches: 2.7530345916748047.
|
64 |
+
[ Wed Sep 14 09:18:43 2022 ] Top1: 36.08%
|
65 |
+
[ Wed Sep 14 09:18:44 2022 ] Top5: 70.69%
|
66 |
+
[ Wed Sep 14 09:18:44 2022 ] Training epoch: 11
|
67 |
+
[ Wed Sep 14 09:19:23 2022 ] Batch(69/123) done. Loss: 1.0906 lr:0.100000
|
68 |
+
[ Wed Sep 14 09:19:52 2022 ] Eval epoch: 11
|
69 |
+
[ Wed Sep 14 09:20:42 2022 ] Mean test loss of 258 batches: 3.238889217376709.
|
70 |
+
[ Wed Sep 14 09:20:42 2022 ] Top1: 33.14%
|
71 |
+
[ Wed Sep 14 09:20:42 2022 ] Top5: 67.65%
|
72 |
+
[ Wed Sep 14 09:20:42 2022 ] Training epoch: 12
|
73 |
+
[ Wed Sep 14 09:21:10 2022 ] Batch(46/123) done. Loss: 0.8957 lr:0.100000
|
74 |
+
[ Wed Sep 14 09:21:50 2022 ] Eval epoch: 12
|
75 |
+
[ Wed Sep 14 09:22:41 2022 ] Mean test loss of 258 batches: 2.5152828693389893.
|
76 |
+
[ Wed Sep 14 09:22:41 2022 ] Top1: 42.08%
|
77 |
+
[ Wed Sep 14 09:22:41 2022 ] Top5: 78.97%
|
78 |
+
[ Wed Sep 14 09:22:41 2022 ] Training epoch: 13
|
79 |
+
[ Wed Sep 14 09:22:57 2022 ] Batch(23/123) done. Loss: 1.0458 lr:0.100000
|
80 |
+
[ Wed Sep 14 09:23:49 2022 ] Eval epoch: 13
|
81 |
+
[ Wed Sep 14 09:24:39 2022 ] Mean test loss of 258 batches: 2.6714367866516113.
|
82 |
+
[ Wed Sep 14 09:24:39 2022 ] Top1: 37.28%
|
83 |
+
[ Wed Sep 14 09:24:39 2022 ] Top5: 71.97%
|
84 |
+
[ Wed Sep 14 09:24:39 2022 ] Training epoch: 14
|
85 |
+
[ Wed Sep 14 09:24:43 2022 ] Batch(0/123) done. Loss: 0.9178 lr:0.100000
|
86 |
+
[ Wed Sep 14 09:25:35 2022 ] Batch(100/123) done. Loss: 1.0160 lr:0.100000
|
87 |
+
[ Wed Sep 14 09:25:47 2022 ] Eval epoch: 14
|
88 |
+
[ Wed Sep 14 09:26:38 2022 ] Mean test loss of 258 batches: 2.4809772968292236.
|
89 |
+
[ Wed Sep 14 09:26:38 2022 ] Top1: 39.05%
|
90 |
+
[ Wed Sep 14 09:26:38 2022 ] Top5: 77.75%
|
91 |
+
[ Wed Sep 14 09:26:38 2022 ] Training epoch: 15
|
92 |
+
[ Wed Sep 14 09:27:22 2022 ] Batch(77/123) done. Loss: 0.9060 lr:0.100000
|
93 |
+
[ Wed Sep 14 09:27:46 2022 ] Eval epoch: 15
|
94 |
+
[ Wed Sep 14 09:28:36 2022 ] Mean test loss of 258 batches: 2.2054946422576904.
|
95 |
+
[ Wed Sep 14 09:28:36 2022 ] Top1: 43.96%
|
96 |
+
[ Wed Sep 14 09:28:37 2022 ] Top5: 81.79%
|
97 |
+
[ Wed Sep 14 09:28:37 2022 ] Training epoch: 16
|
98 |
+
[ Wed Sep 14 09:29:08 2022 ] Batch(54/123) done. Loss: 0.6082 lr:0.100000
|
99 |
+
[ Wed Sep 14 09:29:44 2022 ] Eval epoch: 16
|
100 |
+
[ Wed Sep 14 09:30:35 2022 ] Mean test loss of 258 batches: 2.413510322570801.
|
101 |
+
[ Wed Sep 14 09:30:35 2022 ] Top1: 41.93%
|
102 |
+
[ Wed Sep 14 09:30:35 2022 ] Top5: 79.76%
|
103 |
+
[ Wed Sep 14 09:30:35 2022 ] Training epoch: 17
|
104 |
+
[ Wed Sep 14 09:30:55 2022 ] Batch(31/123) done. Loss: 0.8388 lr:0.100000
|
105 |
+
[ Wed Sep 14 09:31:43 2022 ] Eval epoch: 17
|
106 |
+
[ Wed Sep 14 09:32:34 2022 ] Mean test loss of 258 batches: 2.462135076522827.
|
107 |
+
[ Wed Sep 14 09:32:34 2022 ] Top1: 44.52%
|
108 |
+
[ Wed Sep 14 09:32:34 2022 ] Top5: 80.72%
|
109 |
+
[ Wed Sep 14 09:32:34 2022 ] Training epoch: 18
|
110 |
+
[ Wed Sep 14 09:32:42 2022 ] Batch(8/123) done. Loss: 0.9092 lr:0.100000
|
111 |
+
[ Wed Sep 14 09:33:34 2022 ] Batch(108/123) done. Loss: 0.7070 lr:0.100000
|
112 |
+
[ Wed Sep 14 09:33:42 2022 ] Eval epoch: 18
|
113 |
+
[ Wed Sep 14 09:34:32 2022 ] Mean test loss of 258 batches: 2.302579164505005.
|
114 |
+
[ Wed Sep 14 09:34:32 2022 ] Top1: 48.50%
|
115 |
+
[ Wed Sep 14 09:34:32 2022 ] Top5: 84.71%
|
116 |
+
[ Wed Sep 14 09:34:32 2022 ] Training epoch: 19
|
117 |
+
[ Wed Sep 14 09:35:21 2022 ] Batch(85/123) done. Loss: 0.7225 lr:0.100000
|
118 |
+
[ Wed Sep 14 09:35:40 2022 ] Eval epoch: 19
|
119 |
+
[ Wed Sep 14 09:36:31 2022 ] Mean test loss of 258 batches: 2.287689208984375.
|
120 |
+
[ Wed Sep 14 09:36:31 2022 ] Top1: 45.81%
|
121 |
+
[ Wed Sep 14 09:36:31 2022 ] Top5: 83.19%
|
122 |
+
[ Wed Sep 14 09:36:31 2022 ] Training epoch: 20
|
123 |
+
[ Wed Sep 14 09:37:07 2022 ] Batch(62/123) done. Loss: 0.8596 lr:0.100000
|
124 |
+
[ Wed Sep 14 09:37:39 2022 ] Eval epoch: 20
|
125 |
+
[ Wed Sep 14 09:38:30 2022 ] Mean test loss of 258 batches: 6.094878196716309.
|
126 |
+
[ Wed Sep 14 09:38:30 2022 ] Top1: 24.93%
|
127 |
+
[ Wed Sep 14 09:38:30 2022 ] Top5: 63.79%
|
128 |
+
[ Wed Sep 14 09:38:30 2022 ] Training epoch: 21
|
129 |
+
[ Wed Sep 14 09:38:54 2022 ] Batch(39/123) done. Loss: 0.5370 lr:0.100000
|
130 |
+
[ Wed Sep 14 09:39:38 2022 ] Eval epoch: 21
|
131 |
+
[ Wed Sep 14 09:40:28 2022 ] Mean test loss of 258 batches: 2.607907772064209.
|
132 |
+
[ Wed Sep 14 09:40:28 2022 ] Top1: 46.86%
|
133 |
+
[ Wed Sep 14 09:40:28 2022 ] Top5: 83.54%
|
134 |
+
[ Wed Sep 14 09:40:29 2022 ] Training epoch: 22
|
135 |
+
[ Wed Sep 14 09:40:41 2022 ] Batch(16/123) done. Loss: 0.5816 lr:0.100000
|
136 |
+
[ Wed Sep 14 09:41:33 2022 ] Batch(116/123) done. Loss: 0.5359 lr:0.100000
|
137 |
+
[ Wed Sep 14 09:41:37 2022 ] Eval epoch: 22
|
138 |
+
[ Wed Sep 14 09:42:27 2022 ] Mean test loss of 258 batches: 2.0424587726593018.
|
139 |
+
[ Wed Sep 14 09:42:27 2022 ] Top1: 52.03%
|
140 |
+
[ Wed Sep 14 09:42:27 2022 ] Top5: 84.37%
|
141 |
+
[ Wed Sep 14 09:42:27 2022 ] Training epoch: 23
|
142 |
+
[ Wed Sep 14 09:43:20 2022 ] Batch(93/123) done. Loss: 0.5305 lr:0.100000
|
143 |
+
[ Wed Sep 14 09:43:35 2022 ] Eval epoch: 23
|
144 |
+
[ Wed Sep 14 09:44:25 2022 ] Mean test loss of 258 batches: 2.3085105419158936.
|
145 |
+
[ Wed Sep 14 09:44:25 2022 ] Top1: 47.98%
|
146 |
+
[ Wed Sep 14 09:44:25 2022 ] Top5: 82.33%
|
147 |
+
[ Wed Sep 14 09:44:26 2022 ] Training epoch: 24
|
148 |
+
[ Wed Sep 14 09:45:06 2022 ] Batch(70/123) done. Loss: 0.6515 lr:0.100000
|
149 |
+
[ Wed Sep 14 09:45:33 2022 ] Eval epoch: 24
|
150 |
+
[ Wed Sep 14 09:46:24 2022 ] Mean test loss of 258 batches: 3.1666781902313232.
|
151 |
+
[ Wed Sep 14 09:46:24 2022 ] Top1: 38.96%
|
152 |
+
[ Wed Sep 14 09:46:24 2022 ] Top5: 73.57%
|
153 |
+
[ Wed Sep 14 09:46:24 2022 ] Training epoch: 25
|
154 |
+
[ Wed Sep 14 09:46:52 2022 ] Batch(47/123) done. Loss: 0.5286 lr:0.100000
|
155 |
+
[ Wed Sep 14 09:47:32 2022 ] Eval epoch: 25
|
156 |
+
[ Wed Sep 14 09:48:22 2022 ] Mean test loss of 258 batches: 2.881316900253296.
|
157 |
+
[ Wed Sep 14 09:48:22 2022 ] Top1: 41.57%
|
158 |
+
[ Wed Sep 14 09:48:22 2022 ] Top5: 77.86%
|
159 |
+
[ Wed Sep 14 09:48:22 2022 ] Training epoch: 26
|
160 |
+
[ Wed Sep 14 09:48:39 2022 ] Batch(24/123) done. Loss: 0.3656 lr:0.100000
|
161 |
+
[ Wed Sep 14 09:49:30 2022 ] Eval epoch: 26
|
162 |
+
[ Wed Sep 14 09:50:21 2022 ] Mean test loss of 258 batches: 2.0794057846069336.
|
163 |
+
[ Wed Sep 14 09:50:21 2022 ] Top1: 53.21%
|
164 |
+
[ Wed Sep 14 09:50:21 2022 ] Top5: 86.81%
|
165 |
+
[ Wed Sep 14 09:50:21 2022 ] Training epoch: 27
|
166 |
+
[ Wed Sep 14 09:50:25 2022 ] Batch(1/123) done. Loss: 0.5163 lr:0.100000
|
167 |
+
[ Wed Sep 14 09:51:18 2022 ] Batch(101/123) done. Loss: 0.3626 lr:0.100000
|
168 |
+
[ Wed Sep 14 09:51:29 2022 ] Eval epoch: 27
|
169 |
+
[ Wed Sep 14 09:52:20 2022 ] Mean test loss of 258 batches: 8.668159484863281.
|
170 |
+
[ Wed Sep 14 09:52:20 2022 ] Top1: 25.24%
|
171 |
+
[ Wed Sep 14 09:52:20 2022 ] Top5: 53.81%
|
172 |
+
[ Wed Sep 14 09:52:20 2022 ] Training epoch: 28
|
173 |
+
[ Wed Sep 14 09:53:05 2022 ] Batch(78/123) done. Loss: 0.5547 lr:0.100000
|
174 |
+
[ Wed Sep 14 09:53:28 2022 ] Eval epoch: 28
|
175 |
+
[ Wed Sep 14 09:54:19 2022 ] Mean test loss of 258 batches: 2.150930643081665.
|
176 |
+
[ Wed Sep 14 09:54:19 2022 ] Top1: 49.72%
|
177 |
+
[ Wed Sep 14 09:54:19 2022 ] Top5: 81.52%
|
178 |
+
[ Wed Sep 14 09:54:19 2022 ] Training epoch: 29
|
179 |
+
[ Wed Sep 14 09:54:52 2022 ] Batch(55/123) done. Loss: 0.4950 lr:0.100000
|
180 |
+
[ Wed Sep 14 09:55:27 2022 ] Eval epoch: 29
|
181 |
+
[ Wed Sep 14 09:56:18 2022 ] Mean test loss of 258 batches: 2.4052159786224365.
|
182 |
+
[ Wed Sep 14 09:56:18 2022 ] Top1: 50.33%
|
183 |
+
[ Wed Sep 14 09:56:18 2022 ] Top5: 83.42%
|
184 |
+
[ Wed Sep 14 09:56:18 2022 ] Training epoch: 30
|
185 |
+
[ Wed Sep 14 09:56:38 2022 ] Batch(32/123) done. Loss: 0.3554 lr:0.100000
|
186 |
+
[ Wed Sep 14 09:57:26 2022 ] Eval epoch: 30
|
187 |
+
[ Wed Sep 14 09:58:17 2022 ] Mean test loss of 258 batches: 1.9445937871932983.
|
188 |
+
[ Wed Sep 14 09:58:17 2022 ] Top1: 55.87%
|
189 |
+
[ Wed Sep 14 09:58:17 2022 ] Top5: 88.66%
|
190 |
+
[ Wed Sep 14 09:58:17 2022 ] Training epoch: 31
|
191 |
+
[ Wed Sep 14 09:58:25 2022 ] Batch(9/123) done. Loss: 0.2407 lr:0.100000
|
192 |
+
[ Wed Sep 14 09:59:18 2022 ] Batch(109/123) done. Loss: 0.6598 lr:0.100000
|
193 |
+
[ Wed Sep 14 09:59:25 2022 ] Eval epoch: 31
|
194 |
+
[ Wed Sep 14 10:00:15 2022 ] Mean test loss of 258 batches: 4.852235317230225.
|
195 |
+
[ Wed Sep 14 10:00:16 2022 ] Top1: 29.85%
|
196 |
+
[ Wed Sep 14 10:00:16 2022 ] Top5: 64.09%
|
197 |
+
[ Wed Sep 14 10:00:16 2022 ] Training epoch: 32
|
198 |
+
[ Wed Sep 14 10:01:05 2022 ] Batch(86/123) done. Loss: 0.3530 lr:0.100000
|
199 |
+
[ Wed Sep 14 10:01:24 2022 ] Eval epoch: 32
|
200 |
+
[ Wed Sep 14 10:02:14 2022 ] Mean test loss of 258 batches: 2.033247470855713.
|
201 |
+
[ Wed Sep 14 10:02:14 2022 ] Top1: 53.42%
|
202 |
+
[ Wed Sep 14 10:02:14 2022 ] Top5: 87.76%
|
203 |
+
[ Wed Sep 14 10:02:15 2022 ] Training epoch: 33
|
204 |
+
[ Wed Sep 14 10:02:51 2022 ] Batch(63/123) done. Loss: 0.3615 lr:0.100000
|
205 |
+
[ Wed Sep 14 10:03:23 2022 ] Eval epoch: 33
|
206 |
+
[ Wed Sep 14 10:04:13 2022 ] Mean test loss of 258 batches: 1.970260500907898.
|
207 |
+
[ Wed Sep 14 10:04:13 2022 ] Top1: 50.72%
|
208 |
+
[ Wed Sep 14 10:04:13 2022 ] Top5: 82.19%
|
209 |
+
[ Wed Sep 14 10:04:13 2022 ] Training epoch: 34
|
210 |
+
[ Wed Sep 14 10:04:38 2022 ] Batch(40/123) done. Loss: 0.3227 lr:0.100000
|
211 |
+
[ Wed Sep 14 10:05:21 2022 ] Eval epoch: 34
|
212 |
+
[ Wed Sep 14 10:06:11 2022 ] Mean test loss of 258 batches: 3.3851633071899414.
|
213 |
+
[ Wed Sep 14 10:06:11 2022 ] Top1: 42.80%
|
214 |
+
[ Wed Sep 14 10:06:11 2022 ] Top5: 73.22%
|
215 |
+
[ Wed Sep 14 10:06:12 2022 ] Training epoch: 35
|
216 |
+
[ Wed Sep 14 10:06:25 2022 ] Batch(17/123) done. Loss: 1.0034 lr:0.100000
|
217 |
+
[ Wed Sep 14 10:07:17 2022 ] Batch(117/123) done. Loss: 0.3550 lr:0.100000
|
218 |
+
[ Wed Sep 14 10:07:20 2022 ] Eval epoch: 35
|
219 |
+
[ Wed Sep 14 10:08:11 2022 ] Mean test loss of 258 batches: 2.9214372634887695.
|
220 |
+
[ Wed Sep 14 10:08:11 2022 ] Top1: 44.47%
|
221 |
+
[ Wed Sep 14 10:08:11 2022 ] Top5: 80.00%
|
222 |
+
[ Wed Sep 14 10:08:11 2022 ] Training epoch: 36
|
223 |
+
[ Wed Sep 14 10:09:04 2022 ] Batch(94/123) done. Loss: 0.5148 lr:0.100000
|
224 |
+
[ Wed Sep 14 10:09:19 2022 ] Eval epoch: 36
|
225 |
+
[ Wed Sep 14 10:10:10 2022 ] Mean test loss of 258 batches: 1.9721895456314087.
|
226 |
+
[ Wed Sep 14 10:10:10 2022 ] Top1: 55.58%
|
227 |
+
[ Wed Sep 14 10:10:10 2022 ] Top5: 87.91%
|
228 |
+
[ Wed Sep 14 10:10:10 2022 ] Training epoch: 37
|
229 |
+
[ Wed Sep 14 10:10:51 2022 ] Batch(71/123) done. Loss: 0.2195 lr:0.100000
|
230 |
+
[ Wed Sep 14 10:11:19 2022 ] Eval epoch: 37
|
231 |
+
[ Wed Sep 14 10:12:09 2022 ] Mean test loss of 258 batches: 2.8524513244628906.
|
232 |
+
[ Wed Sep 14 10:12:09 2022 ] Top1: 40.61%
|
233 |
+
[ Wed Sep 14 10:12:10 2022 ] Top5: 77.65%
|
234 |
+
[ Wed Sep 14 10:12:10 2022 ] Training epoch: 38
|
235 |
+
[ Wed Sep 14 10:12:38 2022 ] Batch(48/123) done. Loss: 0.2998 lr:0.100000
|
236 |
+
[ Wed Sep 14 10:13:18 2022 ] Eval epoch: 38
|
237 |
+
[ Wed Sep 14 10:14:08 2022 ] Mean test loss of 258 batches: 2.7291102409362793.
|
238 |
+
[ Wed Sep 14 10:14:08 2022 ] Top1: 45.87%
|
239 |
+
[ Wed Sep 14 10:14:08 2022 ] Top5: 79.30%
|
240 |
+
[ Wed Sep 14 10:14:08 2022 ] Training epoch: 39
|
241 |
+
[ Wed Sep 14 10:14:24 2022 ] Batch(25/123) done. Loss: 0.3353 lr:0.100000
|
242 |
+
[ Wed Sep 14 10:15:16 2022 ] Eval epoch: 39
|
243 |
+
[ Wed Sep 14 10:16:06 2022 ] Mean test loss of 258 batches: 2.7411115169525146.
|
244 |
+
[ Wed Sep 14 10:16:06 2022 ] Top1: 46.88%
|
245 |
+
[ Wed Sep 14 10:16:06 2022 ] Top5: 77.58%
|
246 |
+
[ Wed Sep 14 10:16:06 2022 ] Training epoch: 40
|
247 |
+
[ Wed Sep 14 10:16:10 2022 ] Batch(2/123) done. Loss: 0.3090 lr:0.100000
|
248 |
+
[ Wed Sep 14 10:17:03 2022 ] Batch(102/123) done. Loss: 0.4225 lr:0.100000
|
249 |
+
[ Wed Sep 14 10:17:14 2022 ] Eval epoch: 40
|
250 |
+
[ Wed Sep 14 10:18:04 2022 ] Mean test loss of 258 batches: 2.272958993911743.
|
251 |
+
[ Wed Sep 14 10:18:04 2022 ] Top1: 55.48%
|
252 |
+
[ Wed Sep 14 10:18:04 2022 ] Top5: 84.42%
|
253 |
+
[ Wed Sep 14 10:18:04 2022 ] Training epoch: 41
|
254 |
+
[ Wed Sep 14 10:18:49 2022 ] Batch(79/123) done. Loss: 0.2367 lr:0.100000
|
255 |
+
[ Wed Sep 14 10:19:12 2022 ] Eval epoch: 41
|
256 |
+
[ Wed Sep 14 10:20:02 2022 ] Mean test loss of 258 batches: 2.334578275680542.
|
257 |
+
[ Wed Sep 14 10:20:02 2022 ] Top1: 50.41%
|
258 |
+
[ Wed Sep 14 10:20:02 2022 ] Top5: 85.12%
|
259 |
+
[ Wed Sep 14 10:20:02 2022 ] Training epoch: 42
|
260 |
+
[ Wed Sep 14 10:20:36 2022 ] Batch(56/123) done. Loss: 0.2050 lr:0.100000
|
261 |
+
[ Wed Sep 14 10:21:11 2022 ] Eval epoch: 42
|
262 |
+
[ Wed Sep 14 10:22:01 2022 ] Mean test loss of 258 batches: 2.800611734390259.
|
263 |
+
[ Wed Sep 14 10:22:01 2022 ] Top1: 48.37%
|
264 |
+
[ Wed Sep 14 10:22:01 2022 ] Top5: 81.51%
|
265 |
+
[ Wed Sep 14 10:22:01 2022 ] Training epoch: 43
|
266 |
+
[ Wed Sep 14 10:22:22 2022 ] Batch(33/123) done. Loss: 0.2342 lr:0.100000
|
267 |
+
[ Wed Sep 14 10:23:09 2022 ] Eval epoch: 43
|
268 |
+
[ Wed Sep 14 10:24:00 2022 ] Mean test loss of 258 batches: 2.851212739944458.
|
269 |
+
[ Wed Sep 14 10:24:00 2022 ] Top1: 48.91%
|
270 |
+
[ Wed Sep 14 10:24:00 2022 ] Top5: 82.86%
|
271 |
+
[ Wed Sep 14 10:24:00 2022 ] Training epoch: 44
|
272 |
+
[ Wed Sep 14 10:24:09 2022 ] Batch(10/123) done. Loss: 0.1688 lr:0.100000
|
273 |
+
[ Wed Sep 14 10:25:01 2022 ] Batch(110/123) done. Loss: 0.1909 lr:0.100000
|
274 |
+
[ Wed Sep 14 10:25:08 2022 ] Eval epoch: 44
|
275 |
+
[ Wed Sep 14 10:25:58 2022 ] Mean test loss of 258 batches: 2.710599184036255.
|
276 |
+
[ Wed Sep 14 10:25:59 2022 ] Top1: 49.56%
|
277 |
+
[ Wed Sep 14 10:25:59 2022 ] Top5: 80.95%
|
278 |
+
[ Wed Sep 14 10:25:59 2022 ] Training epoch: 45
|
279 |
+
[ Wed Sep 14 10:26:48 2022 ] Batch(87/123) done. Loss: 0.2594 lr:0.100000
|
280 |
+
[ Wed Sep 14 10:27:07 2022 ] Eval epoch: 45
|
281 |
+
[ Wed Sep 14 10:27:57 2022 ] Mean test loss of 258 batches: 2.7569780349731445.
|
282 |
+
[ Wed Sep 14 10:27:57 2022 ] Top1: 51.54%
|
283 |
+
[ Wed Sep 14 10:27:57 2022 ] Top5: 84.37%
|
284 |
+
[ Wed Sep 14 10:27:57 2022 ] Training epoch: 46
|
285 |
+
[ Wed Sep 14 10:28:34 2022 ] Batch(64/123) done. Loss: 0.1981 lr:0.100000
|
286 |
+
[ Wed Sep 14 10:29:05 2022 ] Eval epoch: 46
|
287 |
+
[ Wed Sep 14 10:29:55 2022 ] Mean test loss of 258 batches: 2.514928102493286.
|
288 |
+
[ Wed Sep 14 10:29:55 2022 ] Top1: 52.87%
|
289 |
+
[ Wed Sep 14 10:29:56 2022 ] Top5: 84.62%
|
290 |
+
[ Wed Sep 14 10:29:56 2022 ] Training epoch: 47
|
291 |
+
[ Wed Sep 14 10:30:21 2022 ] Batch(41/123) done. Loss: 0.2048 lr:0.100000
|
292 |
+
[ Wed Sep 14 10:31:04 2022 ] Eval epoch: 47
|
293 |
+
[ Wed Sep 14 10:31:54 2022 ] Mean test loss of 258 batches: 2.7253973484039307.
|
294 |
+
[ Wed Sep 14 10:31:54 2022 ] Top1: 47.93%
|
295 |
+
[ Wed Sep 14 10:31:54 2022 ] Top5: 83.28%
|
296 |
+
[ Wed Sep 14 10:31:54 2022 ] Training epoch: 48
|
297 |
+
[ Wed Sep 14 10:32:07 2022 ] Batch(18/123) done. Loss: 0.1297 lr:0.100000
|
298 |
+
[ Wed Sep 14 10:33:00 2022 ] Batch(118/123) done. Loss: 0.2993 lr:0.100000
|
299 |
+
[ Wed Sep 14 10:33:03 2022 ] Eval epoch: 48
|
300 |
+
[ Wed Sep 14 10:33:53 2022 ] Mean test loss of 258 batches: 2.5282280445098877.
|
301 |
+
[ Wed Sep 14 10:33:53 2022 ] Top1: 53.72%
|
302 |
+
[ Wed Sep 14 10:33:53 2022 ] Top5: 85.70%
|
303 |
+
[ Wed Sep 14 10:33:53 2022 ] Training epoch: 49
|
304 |
+
[ Wed Sep 14 10:34:46 2022 ] Batch(95/123) done. Loss: 0.2121 lr:0.100000
|
305 |
+
[ Wed Sep 14 10:35:01 2022 ] Eval epoch: 49
|
306 |
+
[ Wed Sep 14 10:35:51 2022 ] Mean test loss of 258 batches: 2.4360342025756836.
|
307 |
+
[ Wed Sep 14 10:35:51 2022 ] Top1: 56.73%
|
308 |
+
[ Wed Sep 14 10:35:52 2022 ] Top5: 85.95%
|
309 |
+
[ Wed Sep 14 10:35:52 2022 ] Training epoch: 50
|
310 |
+
[ Wed Sep 14 10:36:33 2022 ] Batch(72/123) done. Loss: 0.1184 lr:0.100000
|
311 |
+
[ Wed Sep 14 10:37:00 2022 ] Eval epoch: 50
|
312 |
+
[ Wed Sep 14 10:37:50 2022 ] Mean test loss of 258 batches: 6.697039604187012.
|
313 |
+
[ Wed Sep 14 10:37:50 2022 ] Top1: 30.44%
|
314 |
+
[ Wed Sep 14 10:37:50 2022 ] Top5: 64.86%
|
315 |
+
[ Wed Sep 14 10:37:50 2022 ] Training epoch: 51
|
316 |
+
[ Wed Sep 14 10:38:19 2022 ] Batch(49/123) done. Loss: 0.1056 lr:0.100000
|
317 |
+
[ Wed Sep 14 10:38:58 2022 ] Eval epoch: 51
|
318 |
+
[ Wed Sep 14 10:39:48 2022 ] Mean test loss of 258 batches: 2.337132453918457.
|
319 |
+
[ Wed Sep 14 10:39:48 2022 ] Top1: 54.66%
|
320 |
+
[ Wed Sep 14 10:39:49 2022 ] Top5: 85.56%
|
321 |
+
[ Wed Sep 14 10:39:49 2022 ] Training epoch: 52
|
322 |
+
[ Wed Sep 14 10:40:06 2022 ] Batch(26/123) done. Loss: 0.1039 lr:0.100000
|
323 |
+
[ Wed Sep 14 10:40:57 2022 ] Eval epoch: 52
|
324 |
+
[ Wed Sep 14 10:41:47 2022 ] Mean test loss of 258 batches: 2.2631611824035645.
|
325 |
+
[ Wed Sep 14 10:41:47 2022 ] Top1: 57.53%
|
326 |
+
[ Wed Sep 14 10:41:47 2022 ] Top5: 87.88%
|
327 |
+
[ Wed Sep 14 10:41:47 2022 ] Training epoch: 53
|
328 |
+
[ Wed Sep 14 10:41:52 2022 ] Batch(3/123) done. Loss: 0.1295 lr:0.100000
|
329 |
+
[ Wed Sep 14 10:42:45 2022 ] Batch(103/123) done. Loss: 0.2321 lr:0.100000
|
330 |
+
[ Wed Sep 14 10:42:55 2022 ] Eval epoch: 53
|
331 |
+
[ Wed Sep 14 10:43:45 2022 ] Mean test loss of 258 batches: 3.587123155593872.
|
332 |
+
[ Wed Sep 14 10:43:45 2022 ] Top1: 48.50%
|
333 |
+
[ Wed Sep 14 10:43:45 2022 ] Top5: 80.49%
|
334 |
+
[ Wed Sep 14 10:43:45 2022 ] Training epoch: 54
|
335 |
+
[ Wed Sep 14 10:44:31 2022 ] Batch(80/123) done. Loss: 0.3542 lr:0.100000
|
336 |
+
[ Wed Sep 14 10:44:54 2022 ] Eval epoch: 54
|
337 |
+
[ Wed Sep 14 10:45:44 2022 ] Mean test loss of 258 batches: 2.8351242542266846.
|
338 |
+
[ Wed Sep 14 10:45:44 2022 ] Top1: 51.29%
|
339 |
+
[ Wed Sep 14 10:45:44 2022 ] Top5: 84.38%
|
340 |
+
[ Wed Sep 14 10:45:44 2022 ] Training epoch: 55
|
341 |
+
[ Wed Sep 14 10:46:18 2022 ] Batch(57/123) done. Loss: 0.3892 lr:0.100000
|
342 |
+
[ Wed Sep 14 10:46:52 2022 ] Eval epoch: 55
|
343 |
+
[ Wed Sep 14 10:47:43 2022 ] Mean test loss of 258 batches: 3.4230153560638428.
|
344 |
+
[ Wed Sep 14 10:47:43 2022 ] Top1: 46.41%
|
345 |
+
[ Wed Sep 14 10:47:43 2022 ] Top5: 74.94%
|
346 |
+
[ Wed Sep 14 10:47:43 2022 ] Training epoch: 56
|
347 |
+
[ Wed Sep 14 10:48:05 2022 ] Batch(34/123) done. Loss: 0.2057 lr:0.100000
|
348 |
+
[ Wed Sep 14 10:48:51 2022 ] Eval epoch: 56
|
349 |
+
[ Wed Sep 14 10:49:41 2022 ] Mean test loss of 258 batches: 3.243438482284546.
|
350 |
+
[ Wed Sep 14 10:49:41 2022 ] Top1: 48.62%
|
351 |
+
[ Wed Sep 14 10:49:41 2022 ] Top5: 81.65%
|
352 |
+
[ Wed Sep 14 10:49:41 2022 ] Training epoch: 57
|
353 |
+
[ Wed Sep 14 10:49:51 2022 ] Batch(11/123) done. Loss: 0.1141 lr:0.100000
|
354 |
+
[ Wed Sep 14 10:50:44 2022 ] Batch(111/123) done. Loss: 0.2407 lr:0.100000
|
355 |
+
[ Wed Sep 14 10:50:50 2022 ] Eval epoch: 57
|
356 |
+
[ Wed Sep 14 10:51:40 2022 ] Mean test loss of 258 batches: 2.709395170211792.
|
357 |
+
[ Wed Sep 14 10:51:40 2022 ] Top1: 54.94%
|
358 |
+
[ Wed Sep 14 10:51:40 2022 ] Top5: 86.71%
|
359 |
+
[ Wed Sep 14 10:51:41 2022 ] Training epoch: 58
|
360 |
+
[ Wed Sep 14 10:52:31 2022 ] Batch(88/123) done. Loss: 0.3724 lr:0.100000
|
361 |
+
[ Wed Sep 14 10:52:49 2022 ] Eval epoch: 58
|
362 |
+
[ Wed Sep 14 10:53:39 2022 ] Mean test loss of 258 batches: 2.4786291122436523.
|
363 |
+
[ Wed Sep 14 10:53:39 2022 ] Top1: 55.64%
|
364 |
+
[ Wed Sep 14 10:53:39 2022 ] Top5: 86.99%
|
365 |
+
[ Wed Sep 14 10:53:39 2022 ] Training epoch: 59
|
366 |
+
[ Wed Sep 14 10:54:17 2022 ] Batch(65/123) done. Loss: 0.2332 lr:0.100000
|
367 |
+
[ Wed Sep 14 10:54:47 2022 ] Eval epoch: 59
|
368 |
+
[ Wed Sep 14 10:55:38 2022 ] Mean test loss of 258 batches: 2.4485208988189697.
|
369 |
+
[ Wed Sep 14 10:55:38 2022 ] Top1: 58.89%
|
370 |
+
[ Wed Sep 14 10:55:38 2022 ] Top5: 87.28%
|
371 |
+
[ Wed Sep 14 10:55:38 2022 ] Training epoch: 60
|
372 |
+
[ Wed Sep 14 10:56:04 2022 ] Batch(42/123) done. Loss: 0.3334 lr:0.100000
|
373 |
+
[ Wed Sep 14 10:56:46 2022 ] Eval epoch: 60
|
374 |
+
[ Wed Sep 14 10:57:36 2022 ] Mean test loss of 258 batches: 3.0010201930999756.
|
375 |
+
[ Wed Sep 14 10:57:37 2022 ] Top1: 53.13%
|
376 |
+
[ Wed Sep 14 10:57:37 2022 ] Top5: 81.94%
|
377 |
+
[ Wed Sep 14 10:57:37 2022 ] Training epoch: 61
|
378 |
+
[ Wed Sep 14 10:57:50 2022 ] Batch(19/123) done. Loss: 0.1282 lr:0.010000
|
379 |
+
[ Wed Sep 14 10:58:43 2022 ] Batch(119/123) done. Loss: 0.0618 lr:0.010000
|
380 |
+
[ Wed Sep 14 10:58:45 2022 ] Eval epoch: 61
|
381 |
+
[ Wed Sep 14 10:59:35 2022 ] Mean test loss of 258 batches: 1.9603235721588135.
|
382 |
+
[ Wed Sep 14 10:59:35 2022 ] Top1: 62.39%
|
383 |
+
[ Wed Sep 14 10:59:35 2022 ] Top5: 90.20%
|
384 |
+
[ Wed Sep 14 10:59:35 2022 ] Training epoch: 62
|
385 |
+
[ Wed Sep 14 11:00:29 2022 ] Batch(96/123) done. Loss: 0.0595 lr:0.010000
|
386 |
+
[ Wed Sep 14 11:00:43 2022 ] Eval epoch: 62
|
387 |
+
[ Wed Sep 14 11:01:34 2022 ] Mean test loss of 258 batches: 1.8955367803573608.
|
388 |
+
[ Wed Sep 14 11:01:34 2022 ] Top1: 63.66%
|
389 |
+
[ Wed Sep 14 11:01:34 2022 ] Top5: 90.93%
|
390 |
+
[ Wed Sep 14 11:01:34 2022 ] Training epoch: 63
|
391 |
+
[ Wed Sep 14 11:02:16 2022 ] Batch(73/123) done. Loss: 0.0512 lr:0.010000
|
392 |
+
[ Wed Sep 14 11:02:42 2022 ] Eval epoch: 63
|
393 |
+
[ Wed Sep 14 11:03:33 2022 ] Mean test loss of 258 batches: 1.8982775211334229.
|
394 |
+
[ Wed Sep 14 11:03:33 2022 ] Top1: 64.49%
|
395 |
+
[ Wed Sep 14 11:03:33 2022 ] Top5: 91.25%
|
396 |
+
[ Wed Sep 14 11:03:33 2022 ] Training epoch: 64
|
397 |
+
[ Wed Sep 14 11:04:03 2022 ] Batch(50/123) done. Loss: 0.0530 lr:0.010000
|
398 |
+
[ Wed Sep 14 11:04:41 2022 ] Eval epoch: 64
|
399 |
+
[ Wed Sep 14 11:05:31 2022 ] Mean test loss of 258 batches: 1.9569075107574463.
|
400 |
+
[ Wed Sep 14 11:05:31 2022 ] Top1: 64.34%
|
401 |
+
[ Wed Sep 14 11:05:31 2022 ] Top5: 91.17%
|
402 |
+
[ Wed Sep 14 11:05:31 2022 ] Training epoch: 65
|
403 |
+
[ Wed Sep 14 11:05:49 2022 ] Batch(27/123) done. Loss: 0.0265 lr:0.010000
|
404 |
+
[ Wed Sep 14 11:06:39 2022 ] Eval epoch: 65
|
405 |
+
[ Wed Sep 14 11:07:30 2022 ] Mean test loss of 258 batches: 1.949947714805603.
|
406 |
+
[ Wed Sep 14 11:07:30 2022 ] Top1: 64.49%
|
407 |
+
[ Wed Sep 14 11:07:30 2022 ] Top5: 91.01%
|
408 |
+
[ Wed Sep 14 11:07:30 2022 ] Training epoch: 66
|
409 |
+
[ Wed Sep 14 11:07:36 2022 ] Batch(4/123) done. Loss: 0.0709 lr:0.010000
|
410 |
+
[ Wed Sep 14 11:08:28 2022 ] Batch(104/123) done. Loss: 0.0198 lr:0.010000
|
411 |
+
[ Wed Sep 14 11:08:38 2022 ] Eval epoch: 66
|
412 |
+
[ Wed Sep 14 11:09:29 2022 ] Mean test loss of 258 batches: 2.0349206924438477.
|
413 |
+
[ Wed Sep 14 11:09:29 2022 ] Top1: 62.64%
|
414 |
+
[ Wed Sep 14 11:09:29 2022 ] Top5: 90.13%
|
415 |
+
[ Wed Sep 14 11:09:29 2022 ] Training epoch: 67
|
416 |
+
[ Wed Sep 14 11:10:15 2022 ] Batch(81/123) done. Loss: 0.0433 lr:0.010000
|
417 |
+
[ Wed Sep 14 11:10:37 2022 ] Eval epoch: 67
|
418 |
+
[ Wed Sep 14 11:11:28 2022 ] Mean test loss of 258 batches: 1.9164913892745972.
|
419 |
+
[ Wed Sep 14 11:11:28 2022 ] Top1: 64.89%
|
420 |
+
[ Wed Sep 14 11:11:28 2022 ] Top5: 90.84%
|
421 |
+
[ Wed Sep 14 11:11:28 2022 ] Training epoch: 68
|
422 |
+
[ Wed Sep 14 11:12:02 2022 ] Batch(58/123) done. Loss: 0.0351 lr:0.010000
|
423 |
+
[ Wed Sep 14 11:12:36 2022 ] Eval epoch: 68
|
424 |
+
[ Wed Sep 14 11:13:27 2022 ] Mean test loss of 258 batches: 2.004775047302246.
|
425 |
+
[ Wed Sep 14 11:13:27 2022 ] Top1: 64.33%
|
426 |
+
[ Wed Sep 14 11:13:27 2022 ] Top5: 91.14%
|
427 |
+
[ Wed Sep 14 11:13:27 2022 ] Training epoch: 69
|
428 |
+
[ Wed Sep 14 11:13:49 2022 ] Batch(35/123) done. Loss: 0.0100 lr:0.010000
|
429 |
+
[ Wed Sep 14 11:14:35 2022 ] Eval epoch: 69
|
430 |
+
[ Wed Sep 14 11:15:25 2022 ] Mean test loss of 258 batches: 1.8820568323135376.
|
431 |
+
[ Wed Sep 14 11:15:25 2022 ] Top1: 65.26%
|
432 |
+
[ Wed Sep 14 11:15:25 2022 ] Top5: 91.45%
|
433 |
+
[ Wed Sep 14 11:15:25 2022 ] Training epoch: 70
|
434 |
+
[ Wed Sep 14 11:15:35 2022 ] Batch(12/123) done. Loss: 0.0176 lr:0.010000
|
435 |
+
[ Wed Sep 14 11:16:28 2022 ] Batch(112/123) done. Loss: 0.0529 lr:0.010000
|
436 |
+
[ Wed Sep 14 11:16:34 2022 ] Eval epoch: 70
|
437 |
+
[ Wed Sep 14 11:17:24 2022 ] Mean test loss of 258 batches: 1.9909242391586304.
|
438 |
+
[ Wed Sep 14 11:17:24 2022 ] Top1: 64.18%
|
439 |
+
[ Wed Sep 14 11:17:24 2022 ] Top5: 91.01%
|
440 |
+
[ Wed Sep 14 11:17:24 2022 ] Training epoch: 71
|
441 |
+
[ Wed Sep 14 11:18:15 2022 ] Batch(89/123) done. Loss: 0.0750 lr:0.010000
|
442 |
+
[ Wed Sep 14 11:18:32 2022 ] Eval epoch: 71
|
443 |
+
[ Wed Sep 14 11:19:23 2022 ] Mean test loss of 258 batches: 2.0400452613830566.
|
444 |
+
[ Wed Sep 14 11:19:23 2022 ] Top1: 64.12%
|
445 |
+
[ Wed Sep 14 11:19:23 2022 ] Top5: 91.12%
|
446 |
+
[ Wed Sep 14 11:19:23 2022 ] Training epoch: 72
|
447 |
+
[ Wed Sep 14 11:20:02 2022 ] Batch(66/123) done. Loss: 0.0289 lr:0.010000
|
448 |
+
[ Wed Sep 14 11:20:31 2022 ] Eval epoch: 72
|
449 |
+
[ Wed Sep 14 11:21:22 2022 ] Mean test loss of 258 batches: 1.9922386407852173.
|
450 |
+
[ Wed Sep 14 11:21:22 2022 ] Top1: 65.43%
|
451 |
+
[ Wed Sep 14 11:21:22 2022 ] Top5: 91.14%
|
452 |
+
[ Wed Sep 14 11:21:22 2022 ] Training epoch: 73
|
453 |
+
[ Wed Sep 14 11:21:49 2022 ] Batch(43/123) done. Loss: 0.0306 lr:0.010000
|
454 |
+
[ Wed Sep 14 11:22:30 2022 ] Eval epoch: 73
|
455 |
+
[ Wed Sep 14 11:23:21 2022 ] Mean test loss of 258 batches: 1.9337546825408936.
|
456 |
+
[ Wed Sep 14 11:23:21 2022 ] Top1: 65.52%
|
457 |
+
[ Wed Sep 14 11:23:21 2022 ] Top5: 91.03%
|
458 |
+
[ Wed Sep 14 11:23:21 2022 ] Training epoch: 74
|
459 |
+
[ Wed Sep 14 11:23:35 2022 ] Batch(20/123) done. Loss: 0.0443 lr:0.010000
|
460 |
+
[ Wed Sep 14 11:24:28 2022 ] Batch(120/123) done. Loss: 0.0212 lr:0.010000
|
461 |
+
[ Wed Sep 14 11:24:29 2022 ] Eval epoch: 74
|
462 |
+
[ Wed Sep 14 11:25:20 2022 ] Mean test loss of 258 batches: 1.9641762971878052.
|
463 |
+
[ Wed Sep 14 11:25:20 2022 ] Top1: 64.78%
|
464 |
+
[ Wed Sep 14 11:25:20 2022 ] Top5: 91.12%
|
465 |
+
[ Wed Sep 14 11:25:20 2022 ] Training epoch: 75
|
466 |
+
[ Wed Sep 14 11:26:15 2022 ] Batch(97/123) done. Loss: 0.1005 lr:0.010000
|
467 |
+
[ Wed Sep 14 11:26:28 2022 ] Eval epoch: 75
|
468 |
+
[ Wed Sep 14 11:27:18 2022 ] Mean test loss of 258 batches: 2.0250115394592285.
|
469 |
+
[ Wed Sep 14 11:27:18 2022 ] Top1: 65.42%
|
470 |
+
[ Wed Sep 14 11:27:19 2022 ] Top5: 91.19%
|
471 |
+
[ Wed Sep 14 11:27:19 2022 ] Training epoch: 76
|
472 |
+
[ Wed Sep 14 11:28:01 2022 ] Batch(74/123) done. Loss: 0.0445 lr:0.010000
|
473 |
+
[ Wed Sep 14 11:28:27 2022 ] Eval epoch: 76
|
474 |
+
[ Wed Sep 14 11:29:18 2022 ] Mean test loss of 258 batches: 2.028303861618042.
|
475 |
+
[ Wed Sep 14 11:29:18 2022 ] Top1: 64.91%
|
476 |
+
[ Wed Sep 14 11:29:18 2022 ] Top5: 91.10%
|
477 |
+
[ Wed Sep 14 11:29:18 2022 ] Training epoch: 77
|
478 |
+
[ Wed Sep 14 11:29:49 2022 ] Batch(51/123) done. Loss: 0.0450 lr:0.010000
|
479 |
+
[ Wed Sep 14 11:30:26 2022 ] Eval epoch: 77
|
480 |
+
[ Wed Sep 14 11:31:17 2022 ] Mean test loss of 258 batches: 2.0982468128204346.
|
481 |
+
[ Wed Sep 14 11:31:17 2022 ] Top1: 63.76%
|
482 |
+
[ Wed Sep 14 11:31:17 2022 ] Top5: 90.35%
|
483 |
+
[ Wed Sep 14 11:31:17 2022 ] Training epoch: 78
|
484 |
+
[ Wed Sep 14 11:31:35 2022 ] Batch(28/123) done. Loss: 0.0178 lr:0.010000
|
485 |
+
[ Wed Sep 14 11:32:25 2022 ] Eval epoch: 78
|
486 |
+
[ Wed Sep 14 11:33:15 2022 ] Mean test loss of 258 batches: 2.0096275806427.
|
487 |
+
[ Wed Sep 14 11:33:15 2022 ] Top1: 65.12%
|
488 |
+
[ Wed Sep 14 11:33:15 2022 ] Top5: 91.05%
|
489 |
+
[ Wed Sep 14 11:33:15 2022 ] Training epoch: 79
|
490 |
+
[ Wed Sep 14 11:33:22 2022 ] Batch(5/123) done. Loss: 0.0282 lr:0.010000
|
491 |
+
[ Wed Sep 14 11:34:15 2022 ] Batch(105/123) done. Loss: 0.0274 lr:0.010000
|
492 |
+
[ Wed Sep 14 11:34:24 2022 ] Eval epoch: 79
|
493 |
+
[ Wed Sep 14 11:35:14 2022 ] Mean test loss of 258 batches: 2.0404365062713623.
|
494 |
+
[ Wed Sep 14 11:35:14 2022 ] Top1: 64.80%
|
495 |
+
[ Wed Sep 14 11:35:14 2022 ] Top5: 90.87%
|
496 |
+
[ Wed Sep 14 11:35:14 2022 ] Training epoch: 80
|
497 |
+
[ Wed Sep 14 11:36:01 2022 ] Batch(82/123) done. Loss: 0.0332 lr:0.010000
|
498 |
+
[ Wed Sep 14 11:36:22 2022 ] Eval epoch: 80
|
499 |
+
[ Wed Sep 14 11:37:13 2022 ] Mean test loss of 258 batches: 2.062880754470825.
|
500 |
+
[ Wed Sep 14 11:37:13 2022 ] Top1: 64.70%
|
501 |
+
[ Wed Sep 14 11:37:13 2022 ] Top5: 90.80%
|
502 |
+
[ Wed Sep 14 11:37:13 2022 ] Training epoch: 81
|
503 |
+
[ Wed Sep 14 11:37:48 2022 ] Batch(59/123) done. Loss: 0.0180 lr:0.001000
|
504 |
+
[ Wed Sep 14 11:38:21 2022 ] Eval epoch: 81
|
505 |
+
[ Wed Sep 14 11:39:12 2022 ] Mean test loss of 258 batches: 2.0670080184936523.
|
506 |
+
[ Wed Sep 14 11:39:12 2022 ] Top1: 64.45%
|
507 |
+
[ Wed Sep 14 11:39:12 2022 ] Top5: 90.50%
|
508 |
+
[ Wed Sep 14 11:39:12 2022 ] Training epoch: 82
|
509 |
+
[ Wed Sep 14 11:39:34 2022 ] Batch(36/123) done. Loss: 0.0049 lr:0.001000
|
510 |
+
[ Wed Sep 14 11:40:20 2022 ] Eval epoch: 82
|
511 |
+
[ Wed Sep 14 11:41:10 2022 ] Mean test loss of 258 batches: 2.0475635528564453.
|
512 |
+
[ Wed Sep 14 11:41:10 2022 ] Top1: 64.64%
|
513 |
+
[ Wed Sep 14 11:41:11 2022 ] Top5: 90.76%
|
514 |
+
[ Wed Sep 14 11:41:11 2022 ] Training epoch: 83
|
515 |
+
[ Wed Sep 14 11:41:21 2022 ] Batch(13/123) done. Loss: 0.0343 lr:0.001000
|
516 |
+
[ Wed Sep 14 11:42:14 2022 ] Batch(113/123) done. Loss: 0.0986 lr:0.001000
|
517 |
+
[ Wed Sep 14 11:42:19 2022 ] Eval epoch: 83
|
518 |
+
[ Wed Sep 14 11:43:09 2022 ] Mean test loss of 258 batches: 2.1397147178649902.
|
519 |
+
[ Wed Sep 14 11:43:09 2022 ] Top1: 63.35%
|
520 |
+
[ Wed Sep 14 11:43:09 2022 ] Top5: 90.08%
|
521 |
+
[ Wed Sep 14 11:43:09 2022 ] Training epoch: 84
|
522 |
+
[ Wed Sep 14 11:44:00 2022 ] Batch(90/123) done. Loss: 0.0335 lr:0.001000
|
523 |
+
[ Wed Sep 14 11:44:17 2022 ] Eval epoch: 84
|
524 |
+
[ Wed Sep 14 11:45:07 2022 ] Mean test loss of 258 batches: 2.033123731613159.
|
525 |
+
[ Wed Sep 14 11:45:08 2022 ] Top1: 65.30%
|
526 |
+
[ Wed Sep 14 11:45:08 2022 ] Top5: 91.11%
|
527 |
+
[ Wed Sep 14 11:45:08 2022 ] Training epoch: 85
|
528 |
+
[ Wed Sep 14 11:45:47 2022 ] Batch(67/123) done. Loss: 0.0603 lr:0.001000
|
529 |
+
[ Wed Sep 14 11:46:16 2022 ] Eval epoch: 85
|
530 |
+
[ Wed Sep 14 11:47:06 2022 ] Mean test loss of 258 batches: 2.0243470668792725.
|
531 |
+
[ Wed Sep 14 11:47:06 2022 ] Top1: 65.12%
|
532 |
+
[ Wed Sep 14 11:47:06 2022 ] Top5: 91.20%
|
533 |
+
[ Wed Sep 14 11:47:06 2022 ] Training epoch: 86
|
534 |
+
[ Wed Sep 14 11:47:33 2022 ] Batch(44/123) done. Loss: 0.0162 lr:0.001000
|
535 |
+
[ Wed Sep 14 11:48:14 2022 ] Eval epoch: 86
|
536 |
+
[ Wed Sep 14 11:49:05 2022 ] Mean test loss of 258 batches: 2.013662338256836.
|
537 |
+
[ Wed Sep 14 11:49:05 2022 ] Top1: 65.12%
|
538 |
+
[ Wed Sep 14 11:49:05 2022 ] Top5: 91.19%
|
539 |
+
[ Wed Sep 14 11:49:05 2022 ] Training epoch: 87
|
540 |
+
[ Wed Sep 14 11:49:20 2022 ] Batch(21/123) done. Loss: 0.0551 lr:0.001000
|
541 |
+
[ Wed Sep 14 11:50:13 2022 ] Batch(121/123) done. Loss: 0.0238 lr:0.001000
|
542 |
+
[ Wed Sep 14 11:50:13 2022 ] Eval epoch: 87
|
543 |
+
[ Wed Sep 14 11:51:04 2022 ] Mean test loss of 258 batches: 2.044766426086426.
|
544 |
+
[ Wed Sep 14 11:51:04 2022 ] Top1: 64.66%
|
545 |
+
[ Wed Sep 14 11:51:04 2022 ] Top5: 90.71%
|
546 |
+
[ Wed Sep 14 11:51:04 2022 ] Training epoch: 88
|
547 |
+
[ Wed Sep 14 11:51:59 2022 ] Batch(98/123) done. Loss: 0.0383 lr:0.001000
|
548 |
+
[ Wed Sep 14 11:52:12 2022 ] Eval epoch: 88
|
549 |
+
[ Wed Sep 14 11:53:03 2022 ] Mean test loss of 258 batches: 2.2156059741973877.
|
550 |
+
[ Wed Sep 14 11:53:03 2022 ] Top1: 61.84%
|
551 |
+
[ Wed Sep 14 11:53:03 2022 ] Top5: 89.54%
|
552 |
+
[ Wed Sep 14 11:53:03 2022 ] Training epoch: 89
|
553 |
+
[ Wed Sep 14 11:53:46 2022 ] Batch(75/123) done. Loss: 0.0369 lr:0.001000
|
554 |
+
[ Wed Sep 14 11:54:11 2022 ] Eval epoch: 89
|
555 |
+
[ Wed Sep 14 11:55:02 2022 ] Mean test loss of 258 batches: 2.0188987255096436.
|
556 |
+
[ Wed Sep 14 11:55:02 2022 ] Top1: 65.18%
|
557 |
+
[ Wed Sep 14 11:55:02 2022 ] Top5: 91.04%
|
558 |
+
[ Wed Sep 14 11:55:02 2022 ] Training epoch: 90
|
559 |
+
[ Wed Sep 14 11:55:33 2022 ] Batch(52/123) done. Loss: 0.0521 lr:0.001000
|
560 |
+
[ Wed Sep 14 11:56:10 2022 ] Eval epoch: 90
|
561 |
+
[ Wed Sep 14 11:57:00 2022 ] Mean test loss of 258 batches: 2.026618480682373.
|
562 |
+
[ Wed Sep 14 11:57:00 2022 ] Top1: 65.04%
|
563 |
+
[ Wed Sep 14 11:57:01 2022 ] Top5: 91.18%
|
564 |
+
[ Wed Sep 14 11:57:01 2022 ] Training epoch: 91
|
565 |
+
[ Wed Sep 14 11:57:19 2022 ] Batch(29/123) done. Loss: 0.0974 lr:0.001000
|
566 |
+
[ Wed Sep 14 11:58:09 2022 ] Eval epoch: 91
|
567 |
+
[ Wed Sep 14 11:58:59 2022 ] Mean test loss of 258 batches: 2.0772697925567627.
|
568 |
+
[ Wed Sep 14 11:58:59 2022 ] Top1: 64.63%
|
569 |
+
[ Wed Sep 14 11:58:59 2022 ] Top5: 90.98%
|
570 |
+
[ Wed Sep 14 11:58:59 2022 ] Training epoch: 92
|
571 |
+
[ Wed Sep 14 11:59:06 2022 ] Batch(6/123) done. Loss: 0.0350 lr:0.001000
|
572 |
+
[ Wed Sep 14 11:59:59 2022 ] Batch(106/123) done. Loss: 0.0306 lr:0.001000
|
573 |
+
[ Wed Sep 14 12:00:07 2022 ] Eval epoch: 92
|
574 |
+
[ Wed Sep 14 12:00:57 2022 ] Mean test loss of 258 batches: 2.028740882873535.
|
575 |
+
[ Wed Sep 14 12:00:57 2022 ] Top1: 64.95%
|
576 |
+
[ Wed Sep 14 12:00:58 2022 ] Top5: 90.94%
|
577 |
+
[ Wed Sep 14 12:00:58 2022 ] Training epoch: 93
|
578 |
+
[ Wed Sep 14 12:01:45 2022 ] Batch(83/123) done. Loss: 0.0421 lr:0.001000
|
579 |
+
[ Wed Sep 14 12:02:06 2022 ] Eval epoch: 93
|
580 |
+
[ Wed Sep 14 12:02:57 2022 ] Mean test loss of 258 batches: 2.0420658588409424.
|
581 |
+
[ Wed Sep 14 12:02:57 2022 ] Top1: 65.03%
|
582 |
+
[ Wed Sep 14 12:02:57 2022 ] Top5: 91.18%
|
583 |
+
[ Wed Sep 14 12:02:57 2022 ] Training epoch: 94
|
584 |
+
[ Wed Sep 14 12:03:32 2022 ] Batch(60/123) done. Loss: 0.0374 lr:0.001000
|
585 |
+
[ Wed Sep 14 12:04:05 2022 ] Eval epoch: 94
|
586 |
+
[ Wed Sep 14 12:04:55 2022 ] Mean test loss of 258 batches: 2.0979018211364746.
|
587 |
+
[ Wed Sep 14 12:04:55 2022 ] Top1: 64.39%
|
588 |
+
[ Wed Sep 14 12:04:55 2022 ] Top5: 90.70%
|
589 |
+
[ Wed Sep 14 12:04:55 2022 ] Training epoch: 95
|
590 |
+
[ Wed Sep 14 12:05:19 2022 ] Batch(37/123) done. Loss: 0.0155 lr:0.001000
|
591 |
+
[ Wed Sep 14 12:06:04 2022 ] Eval epoch: 95
|
592 |
+
[ Wed Sep 14 12:06:54 2022 ] Mean test loss of 258 batches: 2.1317577362060547.
|
593 |
+
[ Wed Sep 14 12:06:54 2022 ] Top1: 64.32%
|
594 |
+
[ Wed Sep 14 12:06:54 2022 ] Top5: 90.88%
|
595 |
+
[ Wed Sep 14 12:06:54 2022 ] Training epoch: 96
|
596 |
+
[ Wed Sep 14 12:07:05 2022 ] Batch(14/123) done. Loss: 0.0402 lr:0.001000
|
597 |
+
[ Wed Sep 14 12:07:58 2022 ] Batch(114/123) done. Loss: 0.0214 lr:0.001000
|
598 |
+
[ Wed Sep 14 12:08:02 2022 ] Eval epoch: 96
|
599 |
+
[ Wed Sep 14 12:08:53 2022 ] Mean test loss of 258 batches: 2.0478529930114746.
|
600 |
+
[ Wed Sep 14 12:08:53 2022 ] Top1: 64.88%
|
601 |
+
[ Wed Sep 14 12:08:53 2022 ] Top5: 90.83%
|
602 |
+
[ Wed Sep 14 12:08:53 2022 ] Training epoch: 97
|
603 |
+
[ Wed Sep 14 12:09:45 2022 ] Batch(91/123) done. Loss: 0.0384 lr:0.001000
|
604 |
+
[ Wed Sep 14 12:10:01 2022 ] Eval epoch: 97
|
605 |
+
[ Wed Sep 14 12:10:52 2022 ] Mean test loss of 258 batches: 2.002946615219116.
|
606 |
+
[ Wed Sep 14 12:10:52 2022 ] Top1: 65.23%
|
607 |
+
[ Wed Sep 14 12:10:52 2022 ] Top5: 91.10%
|
608 |
+
[ Wed Sep 14 12:10:52 2022 ] Training epoch: 98
|
609 |
+
[ Wed Sep 14 12:11:31 2022 ] Batch(68/123) done. Loss: 0.0264 lr:0.001000
|
610 |
+
[ Wed Sep 14 12:12:00 2022 ] Eval epoch: 98
|
611 |
+
[ Wed Sep 14 12:12:50 2022 ] Mean test loss of 258 batches: 2.1867194175720215.
|
612 |
+
[ Wed Sep 14 12:12:50 2022 ] Top1: 62.01%
|
613 |
+
[ Wed Sep 14 12:12:50 2022 ] Top5: 89.53%
|
614 |
+
[ Wed Sep 14 12:12:50 2022 ] Training epoch: 99
|
615 |
+
[ Wed Sep 14 12:13:18 2022 ] Batch(45/123) done. Loss: 0.0695 lr:0.001000
|
616 |
+
[ Wed Sep 14 12:13:59 2022 ] Eval epoch: 99
|
617 |
+
[ Wed Sep 14 12:14:49 2022 ] Mean test loss of 258 batches: 2.1064836978912354.
|
618 |
+
[ Wed Sep 14 12:14:49 2022 ] Top1: 64.20%
|
619 |
+
[ Wed Sep 14 12:14:49 2022 ] Top5: 90.53%
|
620 |
+
[ Wed Sep 14 12:14:49 2022 ] Training epoch: 100
|
621 |
+
[ Wed Sep 14 12:15:04 2022 ] Batch(22/123) done. Loss: 0.0574 lr:0.001000
|
622 |
+
[ Wed Sep 14 12:15:57 2022 ] Batch(122/123) done. Loss: 0.0998 lr:0.001000
|
623 |
+
[ Wed Sep 14 12:15:58 2022 ] Eval epoch: 100
|
624 |
+
[ Wed Sep 14 12:16:48 2022 ] Mean test loss of 258 batches: 2.2521514892578125.
|
625 |
+
[ Wed Sep 14 12:16:48 2022 ] Top1: 61.13%
|
626 |
+
[ Wed Sep 14 12:16:48 2022 ] Top5: 89.39%
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu_joint_xsub
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/nturgbd-cross-subject/train_joint.yaml
|
5 |
+
device:
|
6 |
+
- 2
|
7 |
+
- 3
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 16
|
21 |
+
num_class: 60
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu_joint_xsub
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu_joint_xsub
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/eval_results/best_acc.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2236ff2f1107d378f01f43fcea2695b74d41cdb119a970aaf352e77acb9fd636
|
3 |
+
size 4979902
|
ckpt/Others/DC-GCN+ADG/ntu60_xsub/ntu_joint_xsub/log.txt
ADDED
@@ -0,0 +1,626 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[ Wed Sep 14 08:59:00 2022 ] Parameters:
|
2 |
+
{'work_dir': './work_dir/ntu_joint_xsub', 'model_saved_name': './save_models/ntu_joint_xsub', 'Experiment_name': 'ntu_joint_xsub', 'config': './config/nturgbd-cross-subject/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.decouple_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'groups': 16, 'block_size': 41, 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80], 'device': [2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 100, 'weight_decay': 0.0001, 'keep_rate': 0.9, 'groups': 8, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
|
3 |
+
|
4 |
+
[ Wed Sep 14 08:59:00 2022 ] Training epoch: 1
|
5 |
+
[ Wed Sep 14 08:59:50 2022 ] Batch(99/123) done. Loss: 2.9147 lr:0.100000
|
6 |
+
[ Wed Sep 14 09:00:00 2022 ] Eval epoch: 1
|
7 |
+
[ Wed Sep 14 09:00:50 2022 ] Mean test loss of 258 batches: 4.591220378875732.
|
8 |
+
[ Wed Sep 14 09:00:50 2022 ] Top1: 6.49%
|
9 |
+
[ Wed Sep 14 09:00:50 2022 ] Top5: 23.94%
|
10 |
+
[ Wed Sep 14 09:00:50 2022 ] Training epoch: 2
|
11 |
+
[ Wed Sep 14 09:01:34 2022 ] Batch(76/123) done. Loss: 2.3112 lr:0.100000
|
12 |
+
[ Wed Sep 14 09:01:58 2022 ] Eval epoch: 2
|
13 |
+
[ Wed Sep 14 09:02:48 2022 ] Mean test loss of 258 batches: 4.086060523986816.
|
14 |
+
[ Wed Sep 14 09:02:48 2022 ] Top1: 10.88%
|
15 |
+
[ Wed Sep 14 09:02:48 2022 ] Top5: 34.88%
|
16 |
+
[ Wed Sep 14 09:02:48 2022 ] Training epoch: 3
|
17 |
+
[ Wed Sep 14 09:03:20 2022 ] Batch(53/123) done. Loss: 2.6104 lr:0.100000
|
18 |
+
[ Wed Sep 14 09:03:56 2022 ] Eval epoch: 3
|
19 |
+
[ Wed Sep 14 09:04:46 2022 ] Mean test loss of 258 batches: 3.353515148162842.
|
20 |
+
[ Wed Sep 14 09:04:47 2022 ] Top1: 15.84%
|
21 |
+
[ Wed Sep 14 09:04:47 2022 ] Top5: 45.24%
|
22 |
+
[ Wed Sep 14 09:04:47 2022 ] Training epoch: 4
|
23 |
+
[ Wed Sep 14 09:05:06 2022 ] Batch(30/123) done. Loss: 2.2817 lr:0.100000
|
24 |
+
[ Wed Sep 14 09:05:55 2022 ] Eval epoch: 4
|
25 |
+
[ Wed Sep 14 09:06:45 2022 ] Mean test loss of 258 batches: 3.0355422496795654.
|
26 |
+
[ Wed Sep 14 09:06:45 2022 ] Top1: 20.13%
|
27 |
+
[ Wed Sep 14 09:06:45 2022 ] Top5: 58.54%
|
28 |
+
[ Wed Sep 14 09:06:45 2022 ] Training epoch: 5
|
29 |
+
[ Wed Sep 14 09:06:52 2022 ] Batch(7/123) done. Loss: 1.8547 lr:0.100000
|
30 |
+
[ Wed Sep 14 09:07:45 2022 ] Batch(107/123) done. Loss: 1.6255 lr:0.100000
|
31 |
+
[ Wed Sep 14 09:07:53 2022 ] Eval epoch: 5
|
32 |
+
[ Wed Sep 14 09:08:43 2022 ] Mean test loss of 258 batches: 2.7248806953430176.
|
33 |
+
[ Wed Sep 14 09:08:43 2022 ] Top1: 24.51%
|
34 |
+
[ Wed Sep 14 09:08:43 2022 ] Top5: 59.93%
|
35 |
+
[ Wed Sep 14 09:08:43 2022 ] Training epoch: 6
|
36 |
+
[ Wed Sep 14 09:09:31 2022 ] Batch(84/123) done. Loss: 1.4542 lr:0.100000
|
37 |
+
[ Wed Sep 14 09:09:51 2022 ] Eval epoch: 6
|
38 |
+
[ Wed Sep 14 09:10:41 2022 ] Mean test loss of 258 batches: 2.7830545902252197.
|
39 |
+
[ Wed Sep 14 09:10:41 2022 ] Top1: 25.60%
|
40 |
+
[ Wed Sep 14 09:10:41 2022 ] Top5: 66.76%
|
41 |
+
[ Wed Sep 14 09:10:41 2022 ] Training epoch: 7
|
42 |
+
[ Wed Sep 14 09:11:17 2022 ] Batch(61/123) done. Loss: 1.4214 lr:0.100000
|
43 |
+
[ Wed Sep 14 09:11:49 2022 ] Eval epoch: 7
|
44 |
+
[ Wed Sep 14 09:12:40 2022 ] Mean test loss of 258 batches: 2.7432169914245605.
|
45 |
+
[ Wed Sep 14 09:12:40 2022 ] Top1: 26.01%
|
46 |
+
[ Wed Sep 14 09:12:40 2022 ] Top5: 66.35%
|
47 |
+
[ Wed Sep 14 09:12:40 2022 ] Training epoch: 8
|
48 |
+
[ Wed Sep 14 09:13:03 2022 ] Batch(38/123) done. Loss: 1.4682 lr:0.100000
|
49 |
+
[ Wed Sep 14 09:13:48 2022 ] Eval epoch: 8
|
50 |
+
[ Wed Sep 14 09:14:38 2022 ] Mean test loss of 258 batches: 2.611168622970581.
|
51 |
+
[ Wed Sep 14 09:14:38 2022 ] Top1: 32.43%
|
52 |
+
[ Wed Sep 14 09:14:38 2022 ] Top5: 68.30%
|
53 |
+
[ Wed Sep 14 09:14:38 2022 ] Training epoch: 9
|
54 |
+
[ Wed Sep 14 09:14:49 2022 ] Batch(15/123) done. Loss: 1.3381 lr:0.100000
|
55 |
+
[ Wed Sep 14 09:15:42 2022 ] Batch(115/123) done. Loss: 0.9783 lr:0.100000
|
56 |
+
[ Wed Sep 14 09:15:46 2022 ] Eval epoch: 9
|
57 |
+
[ Wed Sep 14 09:16:36 2022 ] Mean test loss of 258 batches: 2.5825960636138916.
|
58 |
+
[ Wed Sep 14 09:16:36 2022 ] Top1: 31.28%
|
59 |
+
[ Wed Sep 14 09:16:37 2022 ] Top5: 70.43%
|
60 |
+
[ Wed Sep 14 09:16:37 2022 ] Training epoch: 10
|
61 |
+
[ Wed Sep 14 09:17:29 2022 ] Batch(92/123) done. Loss: 0.8202 lr:0.100000
|
62 |
+
[ Wed Sep 14 09:17:45 2022 ] Eval epoch: 10
|
63 |
+
[ Wed Sep 14 09:18:35 2022 ] Mean test loss of 258 batches: 2.5349206924438477.
|
64 |
+
[ Wed Sep 14 09:18:35 2022 ] Top1: 38.55%
|
65 |
+
[ Wed Sep 14 09:18:35 2022 ] Top5: 73.22%
|
66 |
+
[ Wed Sep 14 09:18:35 2022 ] Training epoch: 11
|
67 |
+
[ Wed Sep 14 09:19:15 2022 ] Batch(69/123) done. Loss: 1.0928 lr:0.100000
|
68 |
+
[ Wed Sep 14 09:19:43 2022 ] Eval epoch: 11
|
69 |
+
[ Wed Sep 14 09:20:33 2022 ] Mean test loss of 258 batches: 2.245619773864746.
|
70 |
+
[ Wed Sep 14 09:20:33 2022 ] Top1: 39.81%
|
71 |
+
[ Wed Sep 14 09:20:33 2022 ] Top5: 78.21%
|
72 |
+
[ Wed Sep 14 09:20:33 2022 ] Training epoch: 12
|
73 |
+
[ Wed Sep 14 09:21:01 2022 ] Batch(46/123) done. Loss: 1.0304 lr:0.100000
|
74 |
+
[ Wed Sep 14 09:21:41 2022 ] Eval epoch: 12
|
75 |
+
[ Wed Sep 14 09:22:31 2022 ] Mean test loss of 258 batches: 2.614882469177246.
|
76 |
+
[ Wed Sep 14 09:22:31 2022 ] Top1: 38.28%
|
77 |
+
[ Wed Sep 14 09:22:31 2022 ] Top5: 76.38%
|
78 |
+
[ Wed Sep 14 09:22:32 2022 ] Training epoch: 13
|
79 |
+
[ Wed Sep 14 09:22:47 2022 ] Batch(23/123) done. Loss: 0.8045 lr:0.100000
|
80 |
+
[ Wed Sep 14 09:23:39 2022 ] Eval epoch: 13
|
81 |
+
[ Wed Sep 14 09:24:30 2022 ] Mean test loss of 258 batches: 2.154670238494873.
|
82 |
+
[ Wed Sep 14 09:24:30 2022 ] Top1: 43.33%
|
83 |
+
[ Wed Sep 14 09:24:30 2022 ] Top5: 79.09%
|
84 |
+
[ Wed Sep 14 09:24:30 2022 ] Training epoch: 14
|
85 |
+
[ Wed Sep 14 09:24:33 2022 ] Batch(0/123) done. Loss: 0.8124 lr:0.100000
|
86 |
+
[ Wed Sep 14 09:25:26 2022 ] Batch(100/123) done. Loss: 1.0300 lr:0.100000
|
87 |
+
[ Wed Sep 14 09:25:38 2022 ] Eval epoch: 14
|
88 |
+
[ Wed Sep 14 09:26:28 2022 ] Mean test loss of 258 batches: 2.1980268955230713.
|
89 |
+
[ Wed Sep 14 09:26:28 2022 ] Top1: 40.46%
|
90 |
+
[ Wed Sep 14 09:26:28 2022 ] Top5: 79.35%
|
91 |
+
[ Wed Sep 14 09:26:28 2022 ] Training epoch: 15
|
92 |
+
[ Wed Sep 14 09:27:12 2022 ] Batch(77/123) done. Loss: 1.0310 lr:0.100000
|
93 |
+
[ Wed Sep 14 09:27:36 2022 ] Eval epoch: 15
|
94 |
+
[ Wed Sep 14 09:28:26 2022 ] Mean test loss of 258 batches: 2.291968584060669.
|
95 |
+
[ Wed Sep 14 09:28:26 2022 ] Top1: 42.55%
|
96 |
+
[ Wed Sep 14 09:28:26 2022 ] Top5: 79.31%
|
97 |
+
[ Wed Sep 14 09:28:26 2022 ] Training epoch: 16
|
98 |
+
[ Wed Sep 14 09:28:58 2022 ] Batch(54/123) done. Loss: 0.7916 lr:0.100000
|
99 |
+
[ Wed Sep 14 09:29:34 2022 ] Eval epoch: 16
|
100 |
+
[ Wed Sep 14 09:30:24 2022 ] Mean test loss of 258 batches: 2.040419578552246.
|
101 |
+
[ Wed Sep 14 09:30:24 2022 ] Top1: 44.35%
|
102 |
+
[ Wed Sep 14 09:30:24 2022 ] Top5: 83.03%
|
103 |
+
[ Wed Sep 14 09:30:24 2022 ] Training epoch: 17
|
104 |
+
[ Wed Sep 14 09:30:44 2022 ] Batch(31/123) done. Loss: 0.6790 lr:0.100000
|
105 |
+
[ Wed Sep 14 09:31:32 2022 ] Eval epoch: 17
|
106 |
+
[ Wed Sep 14 09:32:22 2022 ] Mean test loss of 258 batches: 2.0747108459472656.
|
107 |
+
[ Wed Sep 14 09:32:22 2022 ] Top1: 43.29%
|
108 |
+
[ Wed Sep 14 09:32:23 2022 ] Top5: 80.58%
|
109 |
+
[ Wed Sep 14 09:32:23 2022 ] Training epoch: 18
|
110 |
+
[ Wed Sep 14 09:32:30 2022 ] Batch(8/123) done. Loss: 0.9900 lr:0.100000
|
111 |
+
[ Wed Sep 14 09:33:23 2022 ] Batch(108/123) done. Loss: 0.9786 lr:0.100000
|
112 |
+
[ Wed Sep 14 09:33:31 2022 ] Eval epoch: 18
|
113 |
+
[ Wed Sep 14 09:34:21 2022 ] Mean test loss of 258 batches: 2.5247063636779785.
|
114 |
+
[ Wed Sep 14 09:34:21 2022 ] Top1: 40.60%
|
115 |
+
[ Wed Sep 14 09:34:21 2022 ] Top5: 78.68%
|
116 |
+
[ Wed Sep 14 09:34:21 2022 ] Training epoch: 19
|
117 |
+
[ Wed Sep 14 09:35:09 2022 ] Batch(85/123) done. Loss: 1.0015 lr:0.100000
|
118 |
+
[ Wed Sep 14 09:35:29 2022 ] Eval epoch: 19
|
119 |
+
[ Wed Sep 14 09:36:19 2022 ] Mean test loss of 258 batches: 2.1536636352539062.
|
120 |
+
[ Wed Sep 14 09:36:19 2022 ] Top1: 44.80%
|
121 |
+
[ Wed Sep 14 09:36:19 2022 ] Top5: 82.30%
|
122 |
+
[ Wed Sep 14 09:36:19 2022 ] Training epoch: 20
|
123 |
+
[ Wed Sep 14 09:36:55 2022 ] Batch(62/123) done. Loss: 1.1036 lr:0.100000
|
124 |
+
[ Wed Sep 14 09:37:27 2022 ] Eval epoch: 20
|
125 |
+
[ Wed Sep 14 09:38:18 2022 ] Mean test loss of 258 batches: 1.6829923391342163.
|
126 |
+
[ Wed Sep 14 09:38:18 2022 ] Top1: 55.40%
|
127 |
+
[ Wed Sep 14 09:38:18 2022 ] Top5: 88.28%
|
128 |
+
[ Wed Sep 14 09:38:18 2022 ] Training epoch: 21
|
129 |
+
[ Wed Sep 14 09:38:42 2022 ] Batch(39/123) done. Loss: 0.7845 lr:0.100000
|
130 |
+
[ Wed Sep 14 09:39:26 2022 ] Eval epoch: 21
|
131 |
+
[ Wed Sep 14 09:40:16 2022 ] Mean test loss of 258 batches: 2.2478978633880615.
|
132 |
+
[ Wed Sep 14 09:40:16 2022 ] Top1: 43.64%
|
133 |
+
[ Wed Sep 14 09:40:16 2022 ] Top5: 82.79%
|
134 |
+
[ Wed Sep 14 09:40:16 2022 ] Training epoch: 22
|
135 |
+
[ Wed Sep 14 09:40:28 2022 ] Batch(16/123) done. Loss: 0.7186 lr:0.100000
|
136 |
+
[ Wed Sep 14 09:41:21 2022 ] Batch(116/123) done. Loss: 0.6766 lr:0.100000
|
137 |
+
[ Wed Sep 14 09:41:24 2022 ] Eval epoch: 22
|
138 |
+
[ Wed Sep 14 09:42:14 2022 ] Mean test loss of 258 batches: 2.2252414226531982.
|
139 |
+
[ Wed Sep 14 09:42:14 2022 ] Top1: 48.07%
|
140 |
+
[ Wed Sep 14 09:42:14 2022 ] Top5: 84.36%
|
141 |
+
[ Wed Sep 14 09:42:14 2022 ] Training epoch: 23
|
142 |
+
[ Wed Sep 14 09:43:07 2022 ] Batch(93/123) done. Loss: 0.9349 lr:0.100000
|
143 |
+
[ Wed Sep 14 09:43:22 2022 ] Eval epoch: 23
|
144 |
+
[ Wed Sep 14 09:44:12 2022 ] Mean test loss of 258 batches: 1.8568469285964966.
|
145 |
+
[ Wed Sep 14 09:44:12 2022 ] Top1: 53.35%
|
146 |
+
[ Wed Sep 14 09:44:13 2022 ] Top5: 86.43%
|
147 |
+
[ Wed Sep 14 09:44:13 2022 ] Training epoch: 24
|
148 |
+
[ Wed Sep 14 09:44:53 2022 ] Batch(70/123) done. Loss: 0.6512 lr:0.100000
|
149 |
+
[ Wed Sep 14 09:45:21 2022 ] Eval epoch: 24
|
150 |
+
[ Wed Sep 14 09:46:11 2022 ] Mean test loss of 258 batches: 1.7404369115829468.
|
151 |
+
[ Wed Sep 14 09:46:11 2022 ] Top1: 54.24%
|
152 |
+
[ Wed Sep 14 09:46:11 2022 ] Top5: 87.77%
|
153 |
+
[ Wed Sep 14 09:46:11 2022 ] Training epoch: 25
|
154 |
+
[ Wed Sep 14 09:46:39 2022 ] Batch(47/123) done. Loss: 0.4268 lr:0.100000
|
155 |
+
[ Wed Sep 14 09:47:19 2022 ] Eval epoch: 25
|
156 |
+
[ Wed Sep 14 09:48:09 2022 ] Mean test loss of 258 batches: 1.952316403388977.
|
157 |
+
[ Wed Sep 14 09:48:09 2022 ] Top1: 53.28%
|
158 |
+
[ Wed Sep 14 09:48:09 2022 ] Top5: 87.27%
|
159 |
+
[ Wed Sep 14 09:48:09 2022 ] Training epoch: 26
|
160 |
+
[ Wed Sep 14 09:48:25 2022 ] Batch(24/123) done. Loss: 0.4414 lr:0.100000
|
161 |
+
[ Wed Sep 14 09:49:17 2022 ] Eval epoch: 26
|
162 |
+
[ Wed Sep 14 09:50:07 2022 ] Mean test loss of 258 batches: 1.8949825763702393.
|
163 |
+
[ Wed Sep 14 09:50:07 2022 ] Top1: 53.51%
|
164 |
+
[ Wed Sep 14 09:50:07 2022 ] Top5: 86.95%
|
165 |
+
[ Wed Sep 14 09:50:07 2022 ] Training epoch: 27
|
166 |
+
[ Wed Sep 14 09:50:11 2022 ] Batch(1/123) done. Loss: 0.5545 lr:0.100000
|
167 |
+
[ Wed Sep 14 09:51:04 2022 ] Batch(101/123) done. Loss: 0.3903 lr:0.100000
|
168 |
+
[ Wed Sep 14 09:51:15 2022 ] Eval epoch: 27
|
169 |
+
[ Wed Sep 14 09:52:05 2022 ] Mean test loss of 258 batches: 1.7793006896972656.
|
170 |
+
[ Wed Sep 14 09:52:05 2022 ] Top1: 55.94%
|
171 |
+
[ Wed Sep 14 09:52:05 2022 ] Top5: 87.00%
|
172 |
+
[ Wed Sep 14 09:52:05 2022 ] Training epoch: 28
|
173 |
+
[ Wed Sep 14 09:52:50 2022 ] Batch(78/123) done. Loss: 0.8556 lr:0.100000
|
174 |
+
[ Wed Sep 14 09:53:13 2022 ] Eval epoch: 28
|
175 |
+
[ Wed Sep 14 09:54:03 2022 ] Mean test loss of 258 batches: 2.0563740730285645.
|
176 |
+
[ Wed Sep 14 09:54:03 2022 ] Top1: 52.83%
|
177 |
+
[ Wed Sep 14 09:54:04 2022 ] Top5: 87.34%
|
178 |
+
[ Wed Sep 14 09:54:04 2022 ] Training epoch: 29
|
179 |
+
[ Wed Sep 14 09:54:36 2022 ] Batch(55/123) done. Loss: 0.7766 lr:0.100000
|
180 |
+
[ Wed Sep 14 09:55:12 2022 ] Eval epoch: 29
|
181 |
+
[ Wed Sep 14 09:56:01 2022 ] Mean test loss of 258 batches: 2.5048208236694336.
|
182 |
+
[ Wed Sep 14 09:56:01 2022 ] Top1: 46.99%
|
183 |
+
[ Wed Sep 14 09:56:01 2022 ] Top5: 83.80%
|
184 |
+
[ Wed Sep 14 09:56:02 2022 ] Training epoch: 30
|
185 |
+
[ Wed Sep 14 09:56:22 2022 ] Batch(32/123) done. Loss: 0.4454 lr:0.100000
|
186 |
+
[ Wed Sep 14 09:57:10 2022 ] Eval epoch: 30
|
187 |
+
[ Wed Sep 14 09:58:00 2022 ] Mean test loss of 258 batches: 2.186023473739624.
|
188 |
+
[ Wed Sep 14 09:58:00 2022 ] Top1: 50.51%
|
189 |
+
[ Wed Sep 14 09:58:00 2022 ] Top5: 85.93%
|
190 |
+
[ Wed Sep 14 09:58:00 2022 ] Training epoch: 31
|
191 |
+
[ Wed Sep 14 09:58:08 2022 ] Batch(9/123) done. Loss: 0.4499 lr:0.100000
|
192 |
+
[ Wed Sep 14 09:59:01 2022 ] Batch(109/123) done. Loss: 0.4952 lr:0.100000
|
193 |
+
[ Wed Sep 14 09:59:08 2022 ] Eval epoch: 31
|
194 |
+
[ Wed Sep 14 09:59:59 2022 ] Mean test loss of 258 batches: 1.4914491176605225.
|
195 |
+
[ Wed Sep 14 09:59:59 2022 ] Top1: 60.33%
|
196 |
+
[ Wed Sep 14 09:59:59 2022 ] Top5: 90.76%
|
197 |
+
[ Wed Sep 14 09:59:59 2022 ] Training epoch: 32
|
198 |
+
[ Wed Sep 14 10:00:48 2022 ] Batch(86/123) done. Loss: 0.5064 lr:0.100000
|
199 |
+
[ Wed Sep 14 10:01:07 2022 ] Eval epoch: 32
|
200 |
+
[ Wed Sep 14 10:01:57 2022 ] Mean test loss of 258 batches: 1.968911051750183.
|
201 |
+
[ Wed Sep 14 10:01:57 2022 ] Top1: 53.23%
|
202 |
+
[ Wed Sep 14 10:01:58 2022 ] Top5: 87.66%
|
203 |
+
[ Wed Sep 14 10:01:58 2022 ] Training epoch: 33
|
204 |
+
[ Wed Sep 14 10:02:34 2022 ] Batch(63/123) done. Loss: 0.4501 lr:0.100000
|
205 |
+
[ Wed Sep 14 10:03:06 2022 ] Eval epoch: 33
|
206 |
+
[ Wed Sep 14 10:03:56 2022 ] Mean test loss of 258 batches: 1.7173289060592651.
|
207 |
+
[ Wed Sep 14 10:03:56 2022 ] Top1: 55.98%
|
208 |
+
[ Wed Sep 14 10:03:56 2022 ] Top5: 87.16%
|
209 |
+
[ Wed Sep 14 10:03:56 2022 ] Training epoch: 34
|
210 |
+
[ Wed Sep 14 10:04:21 2022 ] Batch(40/123) done. Loss: 0.4133 lr:0.100000
|
211 |
+
[ Wed Sep 14 10:05:04 2022 ] Eval epoch: 34
|
212 |
+
[ Wed Sep 14 10:05:54 2022 ] Mean test loss of 258 batches: 1.647443413734436.
|
213 |
+
[ Wed Sep 14 10:05:54 2022 ] Top1: 59.80%
|
214 |
+
[ Wed Sep 14 10:05:55 2022 ] Top5: 89.44%
|
215 |
+
[ Wed Sep 14 10:05:55 2022 ] Training epoch: 35
|
216 |
+
[ Wed Sep 14 10:06:07 2022 ] Batch(17/123) done. Loss: 0.4764 lr:0.100000
|
217 |
+
[ Wed Sep 14 10:06:59 2022 ] Batch(117/123) done. Loss: 0.5666 lr:0.100000
|
218 |
+
[ Wed Sep 14 10:07:02 2022 ] Eval epoch: 35
|
219 |
+
[ Wed Sep 14 10:07:52 2022 ] Mean test loss of 258 batches: 1.9978245496749878.
|
220 |
+
[ Wed Sep 14 10:07:52 2022 ] Top1: 54.96%
|
221 |
+
[ Wed Sep 14 10:07:52 2022 ] Top5: 86.92%
|
222 |
+
[ Wed Sep 14 10:07:52 2022 ] Training epoch: 36
|
223 |
+
[ Wed Sep 14 10:08:45 2022 ] Batch(94/123) done. Loss: 0.7286 lr:0.100000
|
224 |
+
[ Wed Sep 14 10:09:00 2022 ] Eval epoch: 36
|
225 |
+
[ Wed Sep 14 10:09:51 2022 ] Mean test loss of 258 batches: 1.6905821561813354.
|
226 |
+
[ Wed Sep 14 10:09:51 2022 ] Top1: 59.84%
|
227 |
+
[ Wed Sep 14 10:09:51 2022 ] Top5: 90.23%
|
228 |
+
[ Wed Sep 14 10:09:51 2022 ] Training epoch: 37
|
229 |
+
[ Wed Sep 14 10:10:32 2022 ] Batch(71/123) done. Loss: 0.5124 lr:0.100000
|
230 |
+
[ Wed Sep 14 10:10:59 2022 ] Eval epoch: 37
|
231 |
+
[ Wed Sep 14 10:11:49 2022 ] Mean test loss of 258 batches: 2.075540065765381.
|
232 |
+
[ Wed Sep 14 10:11:49 2022 ] Top1: 55.08%
|
233 |
+
[ Wed Sep 14 10:11:49 2022 ] Top5: 88.20%
|
234 |
+
[ Wed Sep 14 10:11:49 2022 ] Training epoch: 38
|
235 |
+
[ Wed Sep 14 10:12:18 2022 ] Batch(48/123) done. Loss: 0.3995 lr:0.100000
|
236 |
+
[ Wed Sep 14 10:12:57 2022 ] Eval epoch: 38
|
237 |
+
[ Wed Sep 14 10:13:47 2022 ] Mean test loss of 258 batches: 1.8056854009628296.
|
238 |
+
[ Wed Sep 14 10:13:47 2022 ] Top1: 56.98%
|
239 |
+
[ Wed Sep 14 10:13:47 2022 ] Top5: 88.25%
|
240 |
+
[ Wed Sep 14 10:13:48 2022 ] Training epoch: 39
|
241 |
+
[ Wed Sep 14 10:14:04 2022 ] Batch(25/123) done. Loss: 0.2991 lr:0.100000
|
242 |
+
[ Wed Sep 14 10:14:55 2022 ] Eval epoch: 39
|
243 |
+
[ Wed Sep 14 10:15:45 2022 ] Mean test loss of 258 batches: 1.9381791353225708.
|
244 |
+
[ Wed Sep 14 10:15:46 2022 ] Top1: 56.47%
|
245 |
+
[ Wed Sep 14 10:15:46 2022 ] Top5: 88.34%
|
246 |
+
[ Wed Sep 14 10:15:46 2022 ] Training epoch: 40
|
247 |
+
[ Wed Sep 14 10:15:50 2022 ] Batch(2/123) done. Loss: 0.4283 lr:0.100000
|
248 |
+
[ Wed Sep 14 10:16:43 2022 ] Batch(102/123) done. Loss: 0.5978 lr:0.100000
|
249 |
+
[ Wed Sep 14 10:16:54 2022 ] Eval epoch: 40
|
250 |
+
[ Wed Sep 14 10:17:44 2022 ] Mean test loss of 258 batches: 2.035595417022705.
|
251 |
+
[ Wed Sep 14 10:17:44 2022 ] Top1: 57.83%
|
252 |
+
[ Wed Sep 14 10:17:44 2022 ] Top5: 88.69%
|
253 |
+
[ Wed Sep 14 10:17:44 2022 ] Training epoch: 41
|
254 |
+
[ Wed Sep 14 10:18:29 2022 ] Batch(79/123) done. Loss: 0.2991 lr:0.100000
|
255 |
+
[ Wed Sep 14 10:18:52 2022 ] Eval epoch: 41
|
256 |
+
[ Wed Sep 14 10:19:42 2022 ] Mean test loss of 258 batches: 1.82920241355896.
|
257 |
+
[ Wed Sep 14 10:19:42 2022 ] Top1: 57.34%
|
258 |
+
[ Wed Sep 14 10:19:42 2022 ] Top5: 88.00%
|
259 |
+
[ Wed Sep 14 10:19:42 2022 ] Training epoch: 42
|
260 |
+
[ Wed Sep 14 10:20:15 2022 ] Batch(56/123) done. Loss: 0.2271 lr:0.100000
|
261 |
+
[ Wed Sep 14 10:20:50 2022 ] Eval epoch: 42
|
262 |
+
[ Wed Sep 14 10:21:41 2022 ] Mean test loss of 258 batches: 1.6885477304458618.
|
263 |
+
[ Wed Sep 14 10:21:41 2022 ] Top1: 59.30%
|
264 |
+
[ Wed Sep 14 10:21:41 2022 ] Top5: 89.62%
|
265 |
+
[ Wed Sep 14 10:21:41 2022 ] Training epoch: 43
|
266 |
+
[ Wed Sep 14 10:22:02 2022 ] Batch(33/123) done. Loss: 0.3189 lr:0.100000
|
267 |
+
[ Wed Sep 14 10:22:49 2022 ] Eval epoch: 43
|
268 |
+
[ Wed Sep 14 10:23:39 2022 ] Mean test loss of 258 batches: 2.392216205596924.
|
269 |
+
[ Wed Sep 14 10:23:39 2022 ] Top1: 55.12%
|
270 |
+
[ Wed Sep 14 10:23:39 2022 ] Top5: 87.38%
|
271 |
+
[ Wed Sep 14 10:23:39 2022 ] Training epoch: 44
|
272 |
+
[ Wed Sep 14 10:23:48 2022 ] Batch(10/123) done. Loss: 0.2906 lr:0.100000
|
273 |
+
[ Wed Sep 14 10:24:41 2022 ] Batch(110/123) done. Loss: 0.2826 lr:0.100000
|
274 |
+
[ Wed Sep 14 10:24:47 2022 ] Eval epoch: 44
|
275 |
+
[ Wed Sep 14 10:25:37 2022 ] Mean test loss of 258 batches: 1.8153728246688843.
|
276 |
+
[ Wed Sep 14 10:25:37 2022 ] Top1: 57.09%
|
277 |
+
[ Wed Sep 14 10:25:37 2022 ] Top5: 88.91%
|
278 |
+
[ Wed Sep 14 10:25:37 2022 ] Training epoch: 45
|
279 |
+
[ Wed Sep 14 10:26:27 2022 ] Batch(87/123) done. Loss: 0.3844 lr:0.100000
|
280 |
+
[ Wed Sep 14 10:26:46 2022 ] Eval epoch: 45
|
281 |
+
[ Wed Sep 14 10:27:36 2022 ] Mean test loss of 258 batches: 1.9515217542648315.
|
282 |
+
[ Wed Sep 14 10:27:36 2022 ] Top1: 54.25%
|
283 |
+
[ Wed Sep 14 10:27:36 2022 ] Top5: 87.61%
|
284 |
+
[ Wed Sep 14 10:27:36 2022 ] Training epoch: 46
|
285 |
+
[ Wed Sep 14 10:28:13 2022 ] Batch(64/123) done. Loss: 0.3033 lr:0.100000
|
286 |
+
[ Wed Sep 14 10:28:44 2022 ] Eval epoch: 46
|
287 |
+
[ Wed Sep 14 10:29:34 2022 ] Mean test loss of 258 batches: 1.7243938446044922.
|
288 |
+
[ Wed Sep 14 10:29:34 2022 ] Top1: 59.54%
|
289 |
+
[ Wed Sep 14 10:29:34 2022 ] Top5: 89.23%
|
290 |
+
[ Wed Sep 14 10:29:34 2022 ] Training epoch: 47
|
291 |
+
[ Wed Sep 14 10:29:59 2022 ] Batch(41/123) done. Loss: 0.2655 lr:0.100000
|
292 |
+
[ Wed Sep 14 10:30:42 2022 ] Eval epoch: 47
|
293 |
+
[ Wed Sep 14 10:31:33 2022 ] Mean test loss of 258 batches: 2.1055798530578613.
|
294 |
+
[ Wed Sep 14 10:31:33 2022 ] Top1: 55.53%
|
295 |
+
[ Wed Sep 14 10:31:33 2022 ] Top5: 87.32%
|
296 |
+
[ Wed Sep 14 10:31:33 2022 ] Training epoch: 48
|
297 |
+
[ Wed Sep 14 10:31:46 2022 ] Batch(18/123) done. Loss: 0.2439 lr:0.100000
|
298 |
+
[ Wed Sep 14 10:32:38 2022 ] Batch(118/123) done. Loss: 0.4248 lr:0.100000
|
299 |
+
[ Wed Sep 14 10:32:41 2022 ] Eval epoch: 48
|
300 |
+
[ Wed Sep 14 10:33:30 2022 ] Mean test loss of 258 batches: 1.7812795639038086.
|
301 |
+
[ Wed Sep 14 10:33:31 2022 ] Top1: 60.28%
|
302 |
+
[ Wed Sep 14 10:33:31 2022 ] Top5: 89.31%
|
303 |
+
[ Wed Sep 14 10:33:31 2022 ] Training epoch: 49
|
304 |
+
[ Wed Sep 14 10:34:24 2022 ] Batch(95/123) done. Loss: 0.3316 lr:0.100000
|
305 |
+
[ Wed Sep 14 10:34:39 2022 ] Eval epoch: 49
|
306 |
+
[ Wed Sep 14 10:35:29 2022 ] Mean test loss of 258 batches: 1.7424616813659668.
|
307 |
+
[ Wed Sep 14 10:35:29 2022 ] Top1: 62.07%
|
308 |
+
[ Wed Sep 14 10:35:29 2022 ] Top5: 90.17%
|
309 |
+
[ Wed Sep 14 10:35:29 2022 ] Training epoch: 50
|
310 |
+
[ Wed Sep 14 10:36:10 2022 ] Batch(72/123) done. Loss: 0.2719 lr:0.100000
|
311 |
+
[ Wed Sep 14 10:36:37 2022 ] Eval epoch: 50
|
312 |
+
[ Wed Sep 14 10:37:27 2022 ] Mean test loss of 258 batches: 2.0730063915252686.
|
313 |
+
[ Wed Sep 14 10:37:27 2022 ] Top1: 57.09%
|
314 |
+
[ Wed Sep 14 10:37:27 2022 ] Top5: 88.33%
|
315 |
+
[ Wed Sep 14 10:37:27 2022 ] Training epoch: 51
|
316 |
+
[ Wed Sep 14 10:37:57 2022 ] Batch(49/123) done. Loss: 0.1891 lr:0.100000
|
317 |
+
[ Wed Sep 14 10:38:35 2022 ] Eval epoch: 51
|
318 |
+
[ Wed Sep 14 10:39:25 2022 ] Mean test loss of 258 batches: 2.068854331970215.
|
319 |
+
[ Wed Sep 14 10:39:25 2022 ] Top1: 56.49%
|
320 |
+
[ Wed Sep 14 10:39:25 2022 ] Top5: 87.82%
|
321 |
+
[ Wed Sep 14 10:39:25 2022 ] Training epoch: 52
|
322 |
+
[ Wed Sep 14 10:39:43 2022 ] Batch(26/123) done. Loss: 0.2361 lr:0.100000
|
323 |
+
[ Wed Sep 14 10:40:34 2022 ] Eval epoch: 52
|
324 |
+
[ Wed Sep 14 10:41:24 2022 ] Mean test loss of 258 batches: 1.7761597633361816.
|
325 |
+
[ Wed Sep 14 10:41:24 2022 ] Top1: 60.53%
|
326 |
+
[ Wed Sep 14 10:41:24 2022 ] Top5: 89.23%
|
327 |
+
[ Wed Sep 14 10:41:24 2022 ] Training epoch: 53
|
328 |
+
[ Wed Sep 14 10:41:29 2022 ] Batch(3/123) done. Loss: 0.2845 lr:0.100000
|
329 |
+
[ Wed Sep 14 10:42:22 2022 ] Batch(103/123) done. Loss: 0.3599 lr:0.100000
|
330 |
+
[ Wed Sep 14 10:42:32 2022 ] Eval epoch: 53
|
331 |
+
[ Wed Sep 14 10:43:22 2022 ] Mean test loss of 258 batches: 1.8274670839309692.
|
332 |
+
[ Wed Sep 14 10:43:22 2022 ] Top1: 59.37%
|
333 |
+
[ Wed Sep 14 10:43:22 2022 ] Top5: 89.29%
|
334 |
+
[ Wed Sep 14 10:43:22 2022 ] Training epoch: 54
|
335 |
+
[ Wed Sep 14 10:44:07 2022 ] Batch(80/123) done. Loss: 0.3570 lr:0.100000
|
336 |
+
[ Wed Sep 14 10:44:30 2022 ] Eval epoch: 54
|
337 |
+
[ Wed Sep 14 10:45:20 2022 ] Mean test loss of 258 batches: 2.603060007095337.
|
338 |
+
[ Wed Sep 14 10:45:20 2022 ] Top1: 52.80%
|
339 |
+
[ Wed Sep 14 10:45:20 2022 ] Top5: 85.49%
|
340 |
+
[ Wed Sep 14 10:45:20 2022 ] Training epoch: 55
|
341 |
+
[ Wed Sep 14 10:45:54 2022 ] Batch(57/123) done. Loss: 0.3911 lr:0.100000
|
342 |
+
[ Wed Sep 14 10:46:28 2022 ] Eval epoch: 55
|
343 |
+
[ Wed Sep 14 10:47:19 2022 ] Mean test loss of 258 batches: 2.296555995941162.
|
344 |
+
[ Wed Sep 14 10:47:19 2022 ] Top1: 55.17%
|
345 |
+
[ Wed Sep 14 10:47:19 2022 ] Top5: 88.06%
|
346 |
+
[ Wed Sep 14 10:47:19 2022 ] Training epoch: 56
|
347 |
+
[ Wed Sep 14 10:47:40 2022 ] Batch(34/123) done. Loss: 0.3296 lr:0.100000
|
348 |
+
[ Wed Sep 14 10:48:26 2022 ] Eval epoch: 56
|
349 |
+
[ Wed Sep 14 10:49:17 2022 ] Mean test loss of 258 batches: 2.9580914974212646.
|
350 |
+
[ Wed Sep 14 10:49:17 2022 ] Top1: 50.03%
|
351 |
+
[ Wed Sep 14 10:49:17 2022 ] Top5: 84.24%
|
352 |
+
[ Wed Sep 14 10:49:17 2022 ] Training epoch: 57
|
353 |
+
[ Wed Sep 14 10:49:26 2022 ] Batch(11/123) done. Loss: 0.3893 lr:0.100000
|
354 |
+
[ Wed Sep 14 10:50:19 2022 ] Batch(111/123) done. Loss: 0.2722 lr:0.100000
|
355 |
+
[ Wed Sep 14 10:50:25 2022 ] Eval epoch: 57
|
356 |
+
[ Wed Sep 14 10:51:15 2022 ] Mean test loss of 258 batches: 2.132084608078003.
|
357 |
+
[ Wed Sep 14 10:51:15 2022 ] Top1: 56.30%
|
358 |
+
[ Wed Sep 14 10:51:15 2022 ] Top5: 87.72%
|
359 |
+
[ Wed Sep 14 10:51:15 2022 ] Training epoch: 58
|
360 |
+
[ Wed Sep 14 10:52:05 2022 ] Batch(88/123) done. Loss: 0.2534 lr:0.100000
|
361 |
+
[ Wed Sep 14 10:52:23 2022 ] Eval epoch: 58
|
362 |
+
[ Wed Sep 14 10:53:14 2022 ] Mean test loss of 258 batches: 65.98489379882812.
|
363 |
+
[ Wed Sep 14 10:53:14 2022 ] Top1: 3.87%
|
364 |
+
[ Wed Sep 14 10:53:14 2022 ] Top5: 15.19%
|
365 |
+
[ Wed Sep 14 10:53:14 2022 ] Training epoch: 59
|
366 |
+
[ Wed Sep 14 10:53:51 2022 ] Batch(65/123) done. Loss: 0.2358 lr:0.100000
|
367 |
+
[ Wed Sep 14 10:54:22 2022 ] Eval epoch: 59
|
368 |
+
[ Wed Sep 14 10:55:12 2022 ] Mean test loss of 258 batches: 2.2219696044921875.
|
369 |
+
[ Wed Sep 14 10:55:12 2022 ] Top1: 58.53%
|
370 |
+
[ Wed Sep 14 10:55:12 2022 ] Top5: 87.46%
|
371 |
+
[ Wed Sep 14 10:55:12 2022 ] Training epoch: 60
|
372 |
+
[ Wed Sep 14 10:55:38 2022 ] Batch(42/123) done. Loss: 0.2937 lr:0.100000
|
373 |
+
[ Wed Sep 14 10:56:20 2022 ] Eval epoch: 60
|
374 |
+
[ Wed Sep 14 10:57:11 2022 ] Mean test loss of 258 batches: 1.9551914930343628.
|
375 |
+
[ Wed Sep 14 10:57:11 2022 ] Top1: 58.57%
|
376 |
+
[ Wed Sep 14 10:57:11 2022 ] Top5: 87.32%
|
377 |
+
[ Wed Sep 14 10:57:11 2022 ] Training epoch: 61
|
378 |
+
[ Wed Sep 14 10:57:24 2022 ] Batch(19/123) done. Loss: 0.1494 lr:0.010000
|
379 |
+
[ Wed Sep 14 10:58:17 2022 ] Batch(119/123) done. Loss: 0.1193 lr:0.010000
|
380 |
+
[ Wed Sep 14 10:58:19 2022 ] Eval epoch: 61
|
381 |
+
[ Wed Sep 14 10:59:09 2022 ] Mean test loss of 258 batches: 1.575683832168579.
|
382 |
+
[ Wed Sep 14 10:59:09 2022 ] Top1: 66.74%
|
383 |
+
[ Wed Sep 14 10:59:09 2022 ] Top5: 91.60%
|
384 |
+
[ Wed Sep 14 10:59:09 2022 ] Training epoch: 62
|
385 |
+
[ Wed Sep 14 11:00:03 2022 ] Batch(96/123) done. Loss: 0.0999 lr:0.010000
|
386 |
+
[ Wed Sep 14 11:00:17 2022 ] Eval epoch: 62
|
387 |
+
[ Wed Sep 14 11:01:07 2022 ] Mean test loss of 258 batches: 1.536460280418396.
|
388 |
+
[ Wed Sep 14 11:01:07 2022 ] Top1: 67.30%
|
389 |
+
[ Wed Sep 14 11:01:07 2022 ] Top5: 91.82%
|
390 |
+
[ Wed Sep 14 11:01:07 2022 ] Training epoch: 63
|
391 |
+
[ Wed Sep 14 11:01:49 2022 ] Batch(73/123) done. Loss: 0.0839 lr:0.010000
|
392 |
+
[ Wed Sep 14 11:02:15 2022 ] Eval epoch: 63
|
393 |
+
[ Wed Sep 14 11:03:05 2022 ] Mean test loss of 258 batches: 1.558850646018982.
|
394 |
+
[ Wed Sep 14 11:03:05 2022 ] Top1: 67.97%
|
395 |
+
[ Wed Sep 14 11:03:05 2022 ] Top5: 91.76%
|
396 |
+
[ Wed Sep 14 11:03:05 2022 ] Training epoch: 64
|
397 |
+
[ Wed Sep 14 11:03:35 2022 ] Batch(50/123) done. Loss: 0.0853 lr:0.010000
|
398 |
+
[ Wed Sep 14 11:04:13 2022 ] Eval epoch: 64
|
399 |
+
[ Wed Sep 14 11:05:03 2022 ] Mean test loss of 258 batches: 1.6455943584442139.
|
400 |
+
[ Wed Sep 14 11:05:03 2022 ] Top1: 67.59%
|
401 |
+
[ Wed Sep 14 11:05:03 2022 ] Top5: 91.65%
|
402 |
+
[ Wed Sep 14 11:05:03 2022 ] Training epoch: 65
|
403 |
+
[ Wed Sep 14 11:05:21 2022 ] Batch(27/123) done. Loss: 0.0390 lr:0.010000
|
404 |
+
[ Wed Sep 14 11:06:11 2022 ] Eval epoch: 65
|
405 |
+
[ Wed Sep 14 11:07:01 2022 ] Mean test loss of 258 batches: 1.562364935874939.
|
406 |
+
[ Wed Sep 14 11:07:01 2022 ] Top1: 68.21%
|
407 |
+
[ Wed Sep 14 11:07:01 2022 ] Top5: 92.05%
|
408 |
+
[ Wed Sep 14 11:07:01 2022 ] Training epoch: 66
|
409 |
+
[ Wed Sep 14 11:07:06 2022 ] Batch(4/123) done. Loss: 0.0553 lr:0.010000
|
410 |
+
[ Wed Sep 14 11:07:59 2022 ] Batch(104/123) done. Loss: 0.0542 lr:0.010000
|
411 |
+
[ Wed Sep 14 11:08:09 2022 ] Eval epoch: 66
|
412 |
+
[ Wed Sep 14 11:08:59 2022 ] Mean test loss of 258 batches: 1.6343063116073608.
|
413 |
+
[ Wed Sep 14 11:08:59 2022 ] Top1: 67.62%
|
414 |
+
[ Wed Sep 14 11:08:59 2022 ] Top5: 91.82%
|
415 |
+
[ Wed Sep 14 11:08:59 2022 ] Training epoch: 67
|
416 |
+
[ Wed Sep 14 11:09:46 2022 ] Batch(81/123) done. Loss: 0.0217 lr:0.010000
|
417 |
+
[ Wed Sep 14 11:10:08 2022 ] Eval epoch: 67
|
418 |
+
[ Wed Sep 14 11:10:58 2022 ] Mean test loss of 258 batches: 1.6032357215881348.
|
419 |
+
[ Wed Sep 14 11:10:58 2022 ] Top1: 68.16%
|
420 |
+
[ Wed Sep 14 11:10:58 2022 ] Top5: 92.14%
|
421 |
+
[ Wed Sep 14 11:10:58 2022 ] Training epoch: 68
|
422 |
+
[ Wed Sep 14 11:11:32 2022 ] Batch(58/123) done. Loss: 0.0329 lr:0.010000
|
423 |
+
[ Wed Sep 14 11:12:06 2022 ] Eval epoch: 68
|
424 |
+
[ Wed Sep 14 11:12:56 2022 ] Mean test loss of 258 batches: 1.5968663692474365.
|
425 |
+
[ Wed Sep 14 11:12:56 2022 ] Top1: 67.98%
|
426 |
+
[ Wed Sep 14 11:12:56 2022 ] Top5: 91.98%
|
427 |
+
[ Wed Sep 14 11:12:56 2022 ] Training epoch: 69
|
428 |
+
[ Wed Sep 14 11:13:18 2022 ] Batch(35/123) done. Loss: 0.0294 lr:0.010000
|
429 |
+
[ Wed Sep 14 11:14:05 2022 ] Eval epoch: 69
|
430 |
+
[ Wed Sep 14 11:14:54 2022 ] Mean test loss of 258 batches: 1.6394085884094238.
|
431 |
+
[ Wed Sep 14 11:14:55 2022 ] Top1: 68.04%
|
432 |
+
[ Wed Sep 14 11:14:55 2022 ] Top5: 92.00%
|
433 |
+
[ Wed Sep 14 11:14:55 2022 ] Training epoch: 70
|
434 |
+
[ Wed Sep 14 11:15:05 2022 ] Batch(12/123) done. Loss: 0.0688 lr:0.010000
|
435 |
+
[ Wed Sep 14 11:15:57 2022 ] Batch(112/123) done. Loss: 0.0644 lr:0.010000
|
436 |
+
[ Wed Sep 14 11:16:03 2022 ] Eval epoch: 70
|
437 |
+
[ Wed Sep 14 11:16:53 2022 ] Mean test loss of 258 batches: 1.662743330001831.
|
438 |
+
[ Wed Sep 14 11:16:53 2022 ] Top1: 68.26%
|
439 |
+
[ Wed Sep 14 11:16:53 2022 ] Top5: 91.95%
|
440 |
+
[ Wed Sep 14 11:16:53 2022 ] Training epoch: 71
|
441 |
+
[ Wed Sep 14 11:17:44 2022 ] Batch(89/123) done. Loss: 0.1076 lr:0.010000
|
442 |
+
[ Wed Sep 14 11:18:01 2022 ] Eval epoch: 71
|
443 |
+
[ Wed Sep 14 11:18:52 2022 ] Mean test loss of 258 batches: 1.7534395456314087.
|
444 |
+
[ Wed Sep 14 11:18:52 2022 ] Top1: 67.43%
|
445 |
+
[ Wed Sep 14 11:18:52 2022 ] Top5: 91.75%
|
446 |
+
[ Wed Sep 14 11:18:52 2022 ] Training epoch: 72
|
447 |
+
[ Wed Sep 14 11:19:30 2022 ] Batch(66/123) done. Loss: 0.0462 lr:0.010000
|
448 |
+
[ Wed Sep 14 11:20:00 2022 ] Eval epoch: 72
|
449 |
+
[ Wed Sep 14 11:20:50 2022 ] Mean test loss of 258 batches: 1.6716774702072144.
|
450 |
+
[ Wed Sep 14 11:20:50 2022 ] Top1: 68.33%
|
451 |
+
[ Wed Sep 14 11:20:50 2022 ] Top5: 91.84%
|
452 |
+
[ Wed Sep 14 11:20:50 2022 ] Training epoch: 73
|
453 |
+
[ Wed Sep 14 11:21:16 2022 ] Batch(43/123) done. Loss: 0.0303 lr:0.010000
|
454 |
+
[ Wed Sep 14 11:21:58 2022 ] Eval epoch: 73
|
455 |
+
[ Wed Sep 14 11:22:48 2022 ] Mean test loss of 258 batches: 1.625872254371643.
|
456 |
+
[ Wed Sep 14 11:22:48 2022 ] Top1: 68.91%
|
457 |
+
[ Wed Sep 14 11:22:48 2022 ] Top5: 92.24%
|
458 |
+
[ Wed Sep 14 11:22:49 2022 ] Training epoch: 74
|
459 |
+
[ Wed Sep 14 11:23:02 2022 ] Batch(20/123) done. Loss: 0.0452 lr:0.010000
|
460 |
+
[ Wed Sep 14 11:23:55 2022 ] Batch(120/123) done. Loss: 0.0478 lr:0.010000
|
461 |
+
[ Wed Sep 14 11:23:57 2022 ] Eval epoch: 74
|
462 |
+
[ Wed Sep 14 11:24:47 2022 ] Mean test loss of 258 batches: 1.6459804773330688.
|
463 |
+
[ Wed Sep 14 11:24:47 2022 ] Top1: 68.80%
|
464 |
+
[ Wed Sep 14 11:24:47 2022 ] Top5: 92.11%
|
465 |
+
[ Wed Sep 14 11:24:47 2022 ] Training epoch: 75
|
466 |
+
[ Wed Sep 14 11:25:42 2022 ] Batch(97/123) done. Loss: 0.0224 lr:0.010000
|
467 |
+
[ Wed Sep 14 11:25:55 2022 ] Eval epoch: 75
|
468 |
+
[ Wed Sep 14 11:26:45 2022 ] Mean test loss of 258 batches: 1.661178469657898.
|
469 |
+
[ Wed Sep 14 11:26:45 2022 ] Top1: 68.80%
|
470 |
+
[ Wed Sep 14 11:26:45 2022 ] Top5: 92.26%
|
471 |
+
[ Wed Sep 14 11:26:45 2022 ] Training epoch: 76
|
472 |
+
[ Wed Sep 14 11:27:28 2022 ] Batch(74/123) done. Loss: 0.1159 lr:0.010000
|
473 |
+
[ Wed Sep 14 11:27:53 2022 ] Eval epoch: 76
|
474 |
+
[ Wed Sep 14 11:28:43 2022 ] Mean test loss of 258 batches: 1.709254503250122.
|
475 |
+
[ Wed Sep 14 11:28:43 2022 ] Top1: 68.46%
|
476 |
+
[ Wed Sep 14 11:28:43 2022 ] Top5: 91.94%
|
477 |
+
[ Wed Sep 14 11:28:44 2022 ] Training epoch: 77
|
478 |
+
[ Wed Sep 14 11:29:14 2022 ] Batch(51/123) done. Loss: 0.0600 lr:0.010000
|
479 |
+
[ Wed Sep 14 11:29:52 2022 ] Eval epoch: 77
|
480 |
+
[ Wed Sep 14 11:30:42 2022 ] Mean test loss of 258 batches: 1.7816194295883179.
|
481 |
+
[ Wed Sep 14 11:30:42 2022 ] Top1: 68.09%
|
482 |
+
[ Wed Sep 14 11:30:42 2022 ] Top5: 91.47%
|
483 |
+
[ Wed Sep 14 11:30:42 2022 ] Training epoch: 78
|
484 |
+
[ Wed Sep 14 11:31:00 2022 ] Batch(28/123) done. Loss: 0.0498 lr:0.010000
|
485 |
+
[ Wed Sep 14 11:31:50 2022 ] Eval epoch: 78
|
486 |
+
[ Wed Sep 14 11:32:40 2022 ] Mean test loss of 258 batches: 1.7139428853988647.
|
487 |
+
[ Wed Sep 14 11:32:40 2022 ] Top1: 68.27%
|
488 |
+
[ Wed Sep 14 11:32:40 2022 ] Top5: 91.90%
|
489 |
+
[ Wed Sep 14 11:32:41 2022 ] Training epoch: 79
|
490 |
+
[ Wed Sep 14 11:32:46 2022 ] Batch(5/123) done. Loss: 0.0601 lr:0.010000
|
491 |
+
[ Wed Sep 14 11:33:39 2022 ] Batch(105/123) done. Loss: 0.0715 lr:0.010000
|
492 |
+
[ Wed Sep 14 11:33:48 2022 ] Eval epoch: 79
|
493 |
+
[ Wed Sep 14 11:34:38 2022 ] Mean test loss of 258 batches: 1.718898057937622.
|
494 |
+
[ Wed Sep 14 11:34:39 2022 ] Top1: 68.59%
|
495 |
+
[ Wed Sep 14 11:34:39 2022 ] Top5: 92.04%
|
496 |
+
[ Wed Sep 14 11:34:39 2022 ] Training epoch: 80
|
497 |
+
[ Wed Sep 14 11:35:25 2022 ] Batch(82/123) done. Loss: 0.0313 lr:0.010000
|
498 |
+
[ Wed Sep 14 11:35:46 2022 ] Eval epoch: 80
|
499 |
+
[ Wed Sep 14 11:36:36 2022 ] Mean test loss of 258 batches: 1.7329745292663574.
|
500 |
+
[ Wed Sep 14 11:36:36 2022 ] Top1: 68.54%
|
501 |
+
[ Wed Sep 14 11:36:36 2022 ] Top5: 92.01%
|
502 |
+
[ Wed Sep 14 11:36:36 2022 ] Training epoch: 81
|
503 |
+
[ Wed Sep 14 11:37:10 2022 ] Batch(59/123) done. Loss: 0.0474 lr:0.001000
|
504 |
+
[ Wed Sep 14 11:37:44 2022 ] Eval epoch: 81
|
505 |
+
[ Wed Sep 14 11:38:34 2022 ] Mean test loss of 258 batches: 1.7133427858352661.
|
506 |
+
[ Wed Sep 14 11:38:34 2022 ] Top1: 68.72%
|
507 |
+
[ Wed Sep 14 11:38:34 2022 ] Top5: 92.05%
|
508 |
+
[ Wed Sep 14 11:38:34 2022 ] Training epoch: 82
|
509 |
+
[ Wed Sep 14 11:38:57 2022 ] Batch(36/123) done. Loss: 0.0127 lr:0.001000
|
510 |
+
[ Wed Sep 14 11:39:43 2022 ] Eval epoch: 82
|
511 |
+
[ Wed Sep 14 11:40:33 2022 ] Mean test loss of 258 batches: 1.688591718673706.
|
512 |
+
[ Wed Sep 14 11:40:33 2022 ] Top1: 68.88%
|
513 |
+
[ Wed Sep 14 11:40:33 2022 ] Top5: 92.21%
|
514 |
+
[ Wed Sep 14 11:40:33 2022 ] Training epoch: 83
|
515 |
+
[ Wed Sep 14 11:40:43 2022 ] Batch(13/123) done. Loss: 0.1167 lr:0.001000
|
516 |
+
[ Wed Sep 14 11:41:36 2022 ] Batch(113/123) done. Loss: 0.0909 lr:0.001000
|
517 |
+
[ Wed Sep 14 11:41:41 2022 ] Eval epoch: 83
|
518 |
+
[ Wed Sep 14 11:42:31 2022 ] Mean test loss of 258 batches: 1.7332514524459839.
|
519 |
+
[ Wed Sep 14 11:42:31 2022 ] Top1: 68.36%
|
520 |
+
[ Wed Sep 14 11:42:31 2022 ] Top5: 92.05%
|
521 |
+
[ Wed Sep 14 11:42:31 2022 ] Training epoch: 84
|
522 |
+
[ Wed Sep 14 11:43:22 2022 ] Batch(90/123) done. Loss: 0.0397 lr:0.001000
|
523 |
+
[ Wed Sep 14 11:43:39 2022 ] Eval epoch: 84
|
524 |
+
[ Wed Sep 14 11:44:29 2022 ] Mean test loss of 258 batches: 1.7026885747909546.
|
525 |
+
[ Wed Sep 14 11:44:29 2022 ] Top1: 68.63%
|
526 |
+
[ Wed Sep 14 11:44:29 2022 ] Top5: 92.21%
|
527 |
+
[ Wed Sep 14 11:44:29 2022 ] Training epoch: 85
|
528 |
+
[ Wed Sep 14 11:45:08 2022 ] Batch(67/123) done. Loss: 0.0991 lr:0.001000
|
529 |
+
[ Wed Sep 14 11:45:37 2022 ] Eval epoch: 85
|
530 |
+
[ Wed Sep 14 11:46:27 2022 ] Mean test loss of 258 batches: 1.7128080129623413.
|
531 |
+
[ Wed Sep 14 11:46:27 2022 ] Top1: 68.54%
|
532 |
+
[ Wed Sep 14 11:46:28 2022 ] Top5: 92.25%
|
533 |
+
[ Wed Sep 14 11:46:28 2022 ] Training epoch: 86
|
534 |
+
[ Wed Sep 14 11:46:54 2022 ] Batch(44/123) done. Loss: 0.0424 lr:0.001000
|
535 |
+
[ Wed Sep 14 11:47:36 2022 ] Eval epoch: 86
|
536 |
+
[ Wed Sep 14 11:48:26 2022 ] Mean test loss of 258 batches: 1.675726294517517.
|
537 |
+
[ Wed Sep 14 11:48:26 2022 ] Top1: 68.90%
|
538 |
+
[ Wed Sep 14 11:48:26 2022 ] Top5: 92.18%
|
539 |
+
[ Wed Sep 14 11:48:26 2022 ] Training epoch: 87
|
540 |
+
[ Wed Sep 14 11:48:40 2022 ] Batch(21/123) done. Loss: 0.0922 lr:0.001000
|
541 |
+
[ Wed Sep 14 11:49:33 2022 ] Batch(121/123) done. Loss: 0.0637 lr:0.001000
|
542 |
+
[ Wed Sep 14 11:49:34 2022 ] Eval epoch: 87
|
543 |
+
[ Wed Sep 14 11:50:24 2022 ] Mean test loss of 258 batches: 1.7134418487548828.
|
544 |
+
[ Wed Sep 14 11:50:24 2022 ] Top1: 68.70%
|
545 |
+
[ Wed Sep 14 11:50:24 2022 ] Top5: 92.15%
|
546 |
+
[ Wed Sep 14 11:50:24 2022 ] Training epoch: 88
|
547 |
+
[ Wed Sep 14 11:51:20 2022 ] Batch(98/123) done. Loss: 0.0572 lr:0.001000
|
548 |
+
[ Wed Sep 14 11:51:33 2022 ] Eval epoch: 88
|
549 |
+
[ Wed Sep 14 11:52:23 2022 ] Mean test loss of 258 batches: 1.7478185892105103.
|
550 |
+
[ Wed Sep 14 11:52:23 2022 ] Top1: 68.37%
|
551 |
+
[ Wed Sep 14 11:52:23 2022 ] Top5: 92.06%
|
552 |
+
[ Wed Sep 14 11:52:23 2022 ] Training epoch: 89
|
553 |
+
[ Wed Sep 14 11:53:06 2022 ] Batch(75/123) done. Loss: 0.0702 lr:0.001000
|
554 |
+
[ Wed Sep 14 11:53:31 2022 ] Eval epoch: 89
|
555 |
+
[ Wed Sep 14 11:54:21 2022 ] Mean test loss of 258 batches: 1.714134931564331.
|
556 |
+
[ Wed Sep 14 11:54:21 2022 ] Top1: 68.56%
|
557 |
+
[ Wed Sep 14 11:54:21 2022 ] Top5: 92.05%
|
558 |
+
[ Wed Sep 14 11:54:21 2022 ] Training epoch: 90
|
559 |
+
[ Wed Sep 14 11:54:52 2022 ] Batch(52/123) done. Loss: 0.0289 lr:0.001000
|
560 |
+
[ Wed Sep 14 11:55:29 2022 ] Eval epoch: 90
|
561 |
+
[ Wed Sep 14 11:56:20 2022 ] Mean test loss of 258 batches: 1.685438871383667.
|
562 |
+
[ Wed Sep 14 11:56:20 2022 ] Top1: 68.59%
|
563 |
+
[ Wed Sep 14 11:56:20 2022 ] Top5: 92.16%
|
564 |
+
[ Wed Sep 14 11:56:20 2022 ] Training epoch: 91
|
565 |
+
[ Wed Sep 14 11:56:39 2022 ] Batch(29/123) done. Loss: 0.1004 lr:0.001000
|
566 |
+
[ Wed Sep 14 11:57:28 2022 ] Eval epoch: 91
|
567 |
+
[ Wed Sep 14 11:58:18 2022 ] Mean test loss of 258 batches: 1.7459733486175537.
|
568 |
+
[ Wed Sep 14 11:58:18 2022 ] Top1: 68.23%
|
569 |
+
[ Wed Sep 14 11:58:18 2022 ] Top5: 92.06%
|
570 |
+
[ Wed Sep 14 11:58:18 2022 ] Training epoch: 92
|
571 |
+
[ Wed Sep 14 11:58:25 2022 ] Batch(6/123) done. Loss: 0.0909 lr:0.001000
|
572 |
+
[ Wed Sep 14 11:59:18 2022 ] Batch(106/123) done. Loss: 0.0383 lr:0.001000
|
573 |
+
[ Wed Sep 14 11:59:26 2022 ] Eval epoch: 92
|
574 |
+
[ Wed Sep 14 12:00:16 2022 ] Mean test loss of 258 batches: 1.7359580993652344.
|
575 |
+
[ Wed Sep 14 12:00:16 2022 ] Top1: 68.49%
|
576 |
+
[ Wed Sep 14 12:00:16 2022 ] Top5: 92.02%
|
577 |
+
[ Wed Sep 14 12:00:16 2022 ] Training epoch: 93
|
578 |
+
[ Wed Sep 14 12:01:03 2022 ] Batch(83/123) done. Loss: 0.0526 lr:0.001000
|
579 |
+
[ Wed Sep 14 12:01:24 2022 ] Eval epoch: 93
|
580 |
+
[ Wed Sep 14 12:02:14 2022 ] Mean test loss of 258 batches: 1.6857571601867676.
|
581 |
+
[ Wed Sep 14 12:02:14 2022 ] Top1: 68.85%
|
582 |
+
[ Wed Sep 14 12:02:14 2022 ] Top5: 92.33%
|
583 |
+
[ Wed Sep 14 12:02:14 2022 ] Training epoch: 94
|
584 |
+
[ Wed Sep 14 12:02:49 2022 ] Batch(60/123) done. Loss: 0.0618 lr:0.001000
|
585 |
+
[ Wed Sep 14 12:03:22 2022 ] Eval epoch: 94
|
586 |
+
[ Wed Sep 14 12:04:12 2022 ] Mean test loss of 258 batches: 1.7157808542251587.
|
587 |
+
[ Wed Sep 14 12:04:12 2022 ] Top1: 68.72%
|
588 |
+
[ Wed Sep 14 12:04:12 2022 ] Top5: 92.14%
|
589 |
+
[ Wed Sep 14 12:04:12 2022 ] Training epoch: 95
|
590 |
+
[ Wed Sep 14 12:04:35 2022 ] Batch(37/123) done. Loss: 0.0258 lr:0.001000
|
591 |
+
[ Wed Sep 14 12:05:20 2022 ] Eval epoch: 95
|
592 |
+
[ Wed Sep 14 12:06:10 2022 ] Mean test loss of 258 batches: 1.7361732721328735.
|
593 |
+
[ Wed Sep 14 12:06:10 2022 ] Top1: 68.57%
|
594 |
+
[ Wed Sep 14 12:06:10 2022 ] Top5: 92.00%
|
595 |
+
[ Wed Sep 14 12:06:10 2022 ] Training epoch: 96
|
596 |
+
[ Wed Sep 14 12:06:21 2022 ] Batch(14/123) done. Loss: 0.0354 lr:0.001000
|
597 |
+
[ Wed Sep 14 12:07:14 2022 ] Batch(114/123) done. Loss: 0.0514 lr:0.001000
|
598 |
+
[ Wed Sep 14 12:07:19 2022 ] Eval epoch: 96
|
599 |
+
[ Wed Sep 14 12:08:09 2022 ] Mean test loss of 258 batches: 1.7357374429702759.
|
600 |
+
[ Wed Sep 14 12:08:09 2022 ] Top1: 68.52%
|
601 |
+
[ Wed Sep 14 12:08:09 2022 ] Top5: 92.08%
|
602 |
+
[ Wed Sep 14 12:08:09 2022 ] Training epoch: 97
|
603 |
+
[ Wed Sep 14 12:09:01 2022 ] Batch(91/123) done. Loss: 0.0434 lr:0.001000
|
604 |
+
[ Wed Sep 14 12:09:17 2022 ] Eval epoch: 97
|
605 |
+
[ Wed Sep 14 12:10:07 2022 ] Mean test loss of 258 batches: 1.7196378707885742.
|
606 |
+
[ Wed Sep 14 12:10:07 2022 ] Top1: 68.52%
|
607 |
+
[ Wed Sep 14 12:10:08 2022 ] Top5: 92.22%
|
608 |
+
[ Wed Sep 14 12:10:08 2022 ] Training epoch: 98
|
609 |
+
[ Wed Sep 14 12:10:47 2022 ] Batch(68/123) done. Loss: 0.0164 lr:0.001000
|
610 |
+
[ Wed Sep 14 12:11:16 2022 ] Eval epoch: 98
|
611 |
+
[ Wed Sep 14 12:12:06 2022 ] Mean test loss of 258 batches: 1.734071969985962.
|
612 |
+
[ Wed Sep 14 12:12:06 2022 ] Top1: 68.67%
|
613 |
+
[ Wed Sep 14 12:12:06 2022 ] Top5: 91.99%
|
614 |
+
[ Wed Sep 14 12:12:06 2022 ] Training epoch: 99
|
615 |
+
[ Wed Sep 14 12:12:33 2022 ] Batch(45/123) done. Loss: 0.0544 lr:0.001000
|
616 |
+
[ Wed Sep 14 12:13:14 2022 ] Eval epoch: 99
|
617 |
+
[ Wed Sep 14 12:14:04 2022 ] Mean test loss of 258 batches: 1.715172529220581.
|
618 |
+
[ Wed Sep 14 12:14:04 2022 ] Top1: 68.75%
|
619 |
+
[ Wed Sep 14 12:14:04 2022 ] Top5: 91.98%
|
620 |
+
[ Wed Sep 14 12:14:04 2022 ] Training epoch: 100
|
621 |
+
[ Wed Sep 14 12:14:19 2022 ] Batch(22/123) done. Loss: 0.0687 lr:0.001000
|
622 |
+
[ Wed Sep 14 12:15:12 2022 ] Batch(122/123) done. Loss: 0.1128 lr:0.001000
|
623 |
+
[ Wed Sep 14 12:15:12 2022 ] Eval epoch: 100
|
624 |
+
[ Wed Sep 14 12:16:02 2022 ] Mean test loss of 258 batches: 1.6926435232162476.
|
625 |
+
[ Wed Sep 14 12:16:02 2022 ] Top1: 68.85%
|
626 |
+
[ Wed Sep 14 12:16:02 2022 ] Top5: 92.01%
|
ckpt/Others/DC-GCN+ADG/ntu60_xview/ntu_bone_motion_xview/config.yaml
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Experiment_name: ntu_bone_motion_xview
|
2 |
+
base_lr: 0.1
|
3 |
+
batch_size: 64
|
4 |
+
config: ./config/nturgbd-cross-view/train_bone_motion.yaml
|
5 |
+
device:
|
6 |
+
- 2
|
7 |
+
- 3
|
8 |
+
eval_interval: 5
|
9 |
+
feeder: feeders.feeder.Feeder
|
10 |
+
groups: 8
|
11 |
+
ignore_weights: []
|
12 |
+
keep_rate: 0.9
|
13 |
+
log_interval: 100
|
14 |
+
model: model.decouple_gcn.Model
|
15 |
+
model_args:
|
16 |
+
block_size: 41
|
17 |
+
graph: graph.ntu_rgb_d.Graph
|
18 |
+
graph_args:
|
19 |
+
labeling_mode: spatial
|
20 |
+
groups: 8
|
21 |
+
num_class: 60
|
22 |
+
num_person: 2
|
23 |
+
num_point: 25
|
24 |
+
model_saved_name: ./save_models/ntu_bone_motion_xview
|
25 |
+
nesterov: true
|
26 |
+
num_epoch: 100
|
27 |
+
num_worker: 32
|
28 |
+
only_train_epoch: 1
|
29 |
+
only_train_part: true
|
30 |
+
optimizer: SGD
|
31 |
+
phase: train
|
32 |
+
print_log: true
|
33 |
+
save_interval: 2
|
34 |
+
save_score: false
|
35 |
+
seed: 1
|
36 |
+
show_topk:
|
37 |
+
- 1
|
38 |
+
- 5
|
39 |
+
start_epoch: 0
|
40 |
+
step:
|
41 |
+
- 60
|
42 |
+
- 80
|
43 |
+
test_batch_size: 64
|
44 |
+
test_feeder_args:
|
45 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone_motion.npy
|
46 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl
|
47 |
+
train_feeder_args:
|
48 |
+
data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone_motion.npy
|
49 |
+
debug: false
|
50 |
+
label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl
|
51 |
+
normalization: false
|
52 |
+
random_choose: false
|
53 |
+
random_move: false
|
54 |
+
random_shift: false
|
55 |
+
window_size: -1
|
56 |
+
warm_up_epoch: 0
|
57 |
+
weight_decay: 0.0001
|
58 |
+
weights: null
|
59 |
+
work_dir: ./work_dir/ntu_bone_motion_xview
|
ckpt/Others/DC-GCN+ADG/ntu60_xview/ntu_bone_motion_xview/decouple_gcn.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.autograd import Variable
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
from model.dropSke import DropBlock_Ske
|
8 |
+
from model.dropT import DropBlockT_1d
|
9 |
+
|
10 |
+
|
11 |
+
def import_class(name):
|
12 |
+
components = name.split('.')
|
13 |
+
mod = __import__(components[0])
|
14 |
+
for comp in components[1:]:
|
15 |
+
mod = getattr(mod, comp)
|
16 |
+
return mod
|
17 |
+
|
18 |
+
|
19 |
+
def conv_branch_init(conv):
|
20 |
+
weight = conv.weight
|
21 |
+
n = weight.size(0)
|
22 |
+
k1 = weight.size(1)
|
23 |
+
k2 = weight.size(2)
|
24 |
+
nn.init.normal(weight, 0, math.sqrt(2. / (n * k1 * k2)))
|
25 |
+
nn.init.constant(conv.bias, 0)
|
26 |
+
|
27 |
+
|
28 |
+
def conv_init(conv):
|
29 |
+
nn.init.kaiming_normal(conv.weight, mode='fan_out')
|
30 |
+
nn.init.constant(conv.bias, 0)
|
31 |
+
|
32 |
+
|
33 |
+
def bn_init(bn, scale):
|
34 |
+
nn.init.constant(bn.weight, scale)
|
35 |
+
nn.init.constant(bn.bias, 0)
|
36 |
+
|
37 |
+
|
38 |
+
class unit_tcn(nn.Module):
|
39 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1, num_point=25, block_size=41):
|
40 |
+
super(unit_tcn, self).__init__()
|
41 |
+
pad = int((kernel_size - 1) / 2)
|
42 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
43 |
+
stride=(stride, 1))
|
44 |
+
|
45 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
46 |
+
self.relu = nn.ReLU()
|
47 |
+
conv_init(self.conv)
|
48 |
+
bn_init(self.bn, 1)
|
49 |
+
|
50 |
+
self.dropS = DropBlock_Ske(num_point=num_point)
|
51 |
+
self.dropT = DropBlockT_1d(block_size=block_size)
|
52 |
+
|
53 |
+
def forward(self, x, keep_prob, A):
|
54 |
+
x = self.bn(self.conv(x))
|
55 |
+
x = self.dropT(self.dropS(x, keep_prob, A), keep_prob)
|
56 |
+
return x
|
57 |
+
|
58 |
+
|
59 |
+
class unit_tcn_skip(nn.Module):
|
60 |
+
def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
|
61 |
+
super(unit_tcn_skip, self).__init__()
|
62 |
+
pad = int((kernel_size - 1) / 2)
|
63 |
+
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
|
64 |
+
stride=(stride, 1))
|
65 |
+
|
66 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
67 |
+
self.relu = nn.ReLU()
|
68 |
+
conv_init(self.conv)
|
69 |
+
bn_init(self.bn, 1)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
x = self.bn(self.conv(x))
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class unit_gcn(nn.Module):
|
77 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, coff_embedding=4, num_subset=3):
|
78 |
+
super(unit_gcn, self).__init__()
|
79 |
+
self.in_channels = in_channels
|
80 |
+
self.out_channels = out_channels
|
81 |
+
self.num_point = num_point
|
82 |
+
self.groups = groups
|
83 |
+
self.num_subset = num_subset
|
84 |
+
self.DecoupleA = nn.Parameter(torch.tensor(np.reshape(A.astype(np.float32), [
|
85 |
+
3, 1, num_point, num_point]), dtype=torch.float32, requires_grad=True).repeat(1, groups, 1, 1), requires_grad=True)
|
86 |
+
|
87 |
+
if in_channels != out_channels:
|
88 |
+
self.down = nn.Sequential(
|
89 |
+
nn.Conv2d(in_channels, out_channels, 1),
|
90 |
+
nn.BatchNorm2d(out_channels)
|
91 |
+
)
|
92 |
+
else:
|
93 |
+
self.down = lambda x: x
|
94 |
+
|
95 |
+
self.bn0 = nn.BatchNorm2d(out_channels * num_subset)
|
96 |
+
self.bn = nn.BatchNorm2d(out_channels)
|
97 |
+
self.relu = nn.ReLU()
|
98 |
+
|
99 |
+
for m in self.modules():
|
100 |
+
if isinstance(m, nn.Conv2d):
|
101 |
+
conv_init(m)
|
102 |
+
elif isinstance(m, nn.BatchNorm2d):
|
103 |
+
bn_init(m, 1)
|
104 |
+
bn_init(self.bn, 1e-6)
|
105 |
+
|
106 |
+
self.Linear_weight = nn.Parameter(torch.zeros(
|
107 |
+
in_channels, out_channels * num_subset, requires_grad=True, device='cuda'), requires_grad=True)
|
108 |
+
nn.init.normal_(self.Linear_weight, 0, math.sqrt(
|
109 |
+
0.5 / (out_channels * num_subset)))
|
110 |
+
|
111 |
+
self.Linear_bias = nn.Parameter(torch.zeros(
|
112 |
+
1, out_channels * num_subset, 1, 1, requires_grad=True, device='cuda'), requires_grad=True)
|
113 |
+
nn.init.constant(self.Linear_bias, 1e-6)
|
114 |
+
|
115 |
+
eye_array = []
|
116 |
+
for i in range(out_channels):
|
117 |
+
eye_array.append(torch.eye(num_point))
|
118 |
+
self.eyes = nn.Parameter(torch.tensor(torch.stack(
|
119 |
+
eye_array), requires_grad=False, device='cuda'), requires_grad=False) # [c,25,25]
|
120 |
+
|
121 |
+
def norm(self, A):
|
122 |
+
b, c, h, w = A.size()
|
123 |
+
A = A.view(c, self.num_point, self.num_point)
|
124 |
+
D_list = torch.sum(A, 1).view(c, 1, self.num_point)
|
125 |
+
D_list_12 = (D_list + 0.001)**(-1)
|
126 |
+
D_12 = self.eyes * D_list_12
|
127 |
+
A = torch.bmm(A, D_12).view(b, c, h, w)
|
128 |
+
return A
|
129 |
+
|
130 |
+
def forward(self, x0):
|
131 |
+
learn_A = self.DecoupleA.repeat(
|
132 |
+
1, self.out_channels // self.groups, 1, 1)
|
133 |
+
norm_learn_A = torch.cat([self.norm(learn_A[0:1, ...]), self.norm(
|
134 |
+
learn_A[1:2, ...]), self.norm(learn_A[2:3, ...])], 0)
|
135 |
+
|
136 |
+
x = torch.einsum(
|
137 |
+
'nctw,cd->ndtw', (x0, self.Linear_weight)).contiguous()
|
138 |
+
x = x + self.Linear_bias
|
139 |
+
x = self.bn0(x)
|
140 |
+
|
141 |
+
n, kc, t, v = x.size()
|
142 |
+
x = x.view(n, self.num_subset, kc // self.num_subset, t, v)
|
143 |
+
x = torch.einsum('nkctv,kcvw->nctw', (x, norm_learn_A))
|
144 |
+
|
145 |
+
x = self.bn(x)
|
146 |
+
x += self.down(x0)
|
147 |
+
x = self.relu(x)
|
148 |
+
return x
|
149 |
+
|
150 |
+
|
151 |
+
class TCN_GCN_unit(nn.Module):
|
152 |
+
def __init__(self, in_channels, out_channels, A, groups, num_point, block_size, stride=1, residual=True):
|
153 |
+
super(TCN_GCN_unit, self).__init__()
|
154 |
+
self.gcn1 = unit_gcn(in_channels, out_channels, A, groups, num_point)
|
155 |
+
self.tcn1 = unit_tcn(out_channels, out_channels,
|
156 |
+
stride=stride, num_point=num_point)
|
157 |
+
self.relu = nn.ReLU()
|
158 |
+
|
159 |
+
self.A = nn.Parameter(torch.tensor(np.sum(np.reshape(A.astype(np.float32), [
|
160 |
+
3, num_point, num_point]), axis=0), dtype=torch.float32, requires_grad=False, device='cuda'), requires_grad=False)
|
161 |
+
|
162 |
+
if not residual:
|
163 |
+
self.residual = lambda x: 0
|
164 |
+
|
165 |
+
elif (in_channels == out_channels) and (stride == 1):
|
166 |
+
self.residual = lambda x: x
|
167 |
+
|
168 |
+
else:
|
169 |
+
self.residual = unit_tcn_skip(
|
170 |
+
in_channels, out_channels, kernel_size=1, stride=stride)
|
171 |
+
self.dropSke = DropBlock_Ske(num_point=num_point)
|
172 |
+
self.dropT_skip = DropBlockT_1d(block_size=block_size)
|
173 |
+
|
174 |
+
def forward(self, x, keep_prob):
|
175 |
+
x = self.tcn1(self.gcn1(x), keep_prob, self.A) + self.dropT_skip(
|
176 |
+
self.dropSke(self.residual(x), keep_prob, self.A), keep_prob)
|
177 |
+
return self.relu(x)
|
178 |
+
|
179 |
+
|
180 |
+
class Model(nn.Module):
|
181 |
+
def __init__(self, num_class=60, num_point=25, num_person=2, groups=8, block_size=41, graph=None, graph_args=dict(), in_channels=3):
|
182 |
+
super(Model, self).__init__()
|
183 |
+
|
184 |
+
if graph is None:
|
185 |
+
raise ValueError()
|
186 |
+
else:
|
187 |
+
Graph = import_class(graph)
|
188 |
+
self.graph = Graph(**graph_args)
|
189 |
+
|
190 |
+
A = self.graph.A
|
191 |
+
self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
|
192 |
+
|
193 |
+
self.l1 = TCN_GCN_unit(3, 64, A, groups, num_point,
|
194 |
+
block_size, residual=False)
|
195 |
+
self.l2 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
196 |
+
self.l3 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
197 |
+
self.l4 = TCN_GCN_unit(64, 64, A, groups, num_point, block_size)
|
198 |
+
self.l5 = TCN_GCN_unit(
|
199 |
+
64, 128, A, groups, num_point, block_size, stride=2)
|
200 |
+
self.l6 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
201 |
+
self.l7 = TCN_GCN_unit(128, 128, A, groups, num_point, block_size)
|
202 |
+
self.l8 = TCN_GCN_unit(128, 256, A, groups,
|
203 |
+
num_point, block_size, stride=2)
|
204 |
+
self.l9 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
205 |
+
self.l10 = TCN_GCN_unit(256, 256, A, groups, num_point, block_size)
|
206 |
+
|
207 |
+
self.fc = nn.Linear(256, num_class)
|
208 |
+
nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
|
209 |
+
bn_init(self.data_bn, 1)
|
210 |
+
|
211 |
+
def forward(self, x, keep_prob=0.9):
|
212 |
+
N, C, T, V, M = x.size()
|
213 |
+
|
214 |
+
x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
|
215 |
+
x = self.data_bn(x)
|
216 |
+
x = x.view(N, M, V, C, T).permute(
|
217 |
+
0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
|
218 |
+
|
219 |
+
x = self.l1(x, 1.0)
|
220 |
+
x = self.l2(x, 1.0)
|
221 |
+
x = self.l3(x, 1.0)
|
222 |
+
x = self.l4(x, 1.0)
|
223 |
+
x = self.l5(x, 1.0)
|
224 |
+
x = self.l6(x, 1.0)
|
225 |
+
x = self.l7(x, keep_prob)
|
226 |
+
x = self.l8(x, keep_prob)
|
227 |
+
x = self.l9(x, keep_prob)
|
228 |
+
x = self.l10(x, keep_prob)
|
229 |
+
|
230 |
+
# N*M,C,T,V
|
231 |
+
c_new = x.size(1)
|
232 |
+
x = x.reshape(N, M, c_new, -1)
|
233 |
+
x = x.mean(3).mean(1)
|
234 |
+
|
235 |
+
return self.fc(x)
|