神经网络实现鸢尾花分类
环境:tensorflow2.3.0 python3.7.13 numpy1.21.5
(一)准备数据
1.数据集录入
from sklearn import datasets
# 没有的需要先安装:pip install sklearn
x_data = datasets.load_iris().data # 返回iris数据集所有的输入特征
y_data = datasets.load_iris().target # 返回iris数据集所有标签
2.数据集乱序
import numpy as np
import tensorflow as tf
np.random.seed(116) # 使用相同的seed,使输入特征/标签一一对应
np.random.shuffle(x_data)
np.random.seed(116)
np.random.shuffle(y_data)
tf.random.set_seed(116)
3.生成训练集和测试集(x_train/y_train,x_test/y_test)
# 将打乱后的数据集分割为训练集和测试集,训练集为前120行,测试集为后30行
x_train = x_data[:-30]
y_train = y_data[:-30]
x_test = x_data[-30:]
y_test = y_data[-30:]
# 转换x的数据类型,否则后面矩阵相乘时会因数据类型不一致报错
x_train = tf.cast(x_train, tf.float32)
x_test = tf.cast(x_test, tf.float32)
4.配成(输入特征,标签)对,每次读入一小撮(batch)
train_db = tf.data.Dataset.from_tensor_slices((x_train,y_train)).batch(32)
test_db = tf.data.Dataset.from_tensor_slices((x_test,y_test)).batch(32)
# 每32组输入标签对打包为一个batch
# 后续喂入神经网络时会以batch为单位喂入
(二)搭建神经网络
5.定义神经网络中所有可训练参数
w1 = tf.Variable(tf.random.truncated_normal([4,3],stddev=0.1,seed=1))
# 输入层4,输出层3
# 因为我们鸢尾花的特征分为4种,输出节点数等于分类数是3分类
# 因此,得到四行三列的张量w1
b1 = tf.Variable(tf.random.truncated_normal([3],stddev=0.1,seed=0.1))
# b1必须和w1的维度一致,因此是3
# 到此,神经网络的基本结构,就搭建出来了
(三)参数优化
lr = 0.1 # 学习率为0.1
train_loss_results = [] # 将每轮的loss记录在此列表中,为后续画loss曲线提供数据
test_acc = [] # 将每轮的acc记录在此列表中,为后续画acc曲线提供数据
epoch = 500 # 循环500轮
loss_all = 0 # 每轮分4个step,loss_all记录四个step生成的4个loss的和
6.嵌套循环迭代,with结构更新参数,显示当前loss
for epoch in range(epoch): # 数据集级别迭代
for step,(x_train,y_train) in enumerate(train_db): # batch 级别迭代
with tf.GradientTape() as tape: # 记录梯度信息
# 前向传播过程计算y
# 计算总loss
y = tf.matmul(x_train, w1) + b1 # 神经网络乘加运算
y = tf.nn.softmax(y) # 使输出y符合概率分布(此操作后与独热码同量级,可相减求loss)
y_ = tf.one_hot(y_train, depth=3) # 将标签值转换为独热码格式,方便计算loss和accuracy
loss = tf.reduce_mean(tf.square(y_ - y)) # 采用均方误差损失函数mse = mean(sum(y-out)^2)
loss_all += loss.numpy() # 将每个step计算出的loss累加,为后续求loss平均值提供数据,这样计算的loss更准确
grads = tape.gradient(loss,[w1,b1]) # 损失函数loss分别对参数w1和b1计算偏导数
w1.assign_sub(lr * grads[0]) # 参数自更新
b1.assign_sub(lr * grads[1])
print("Epoch {} , loss:{}".format(epoch,loss_all/4)) # 打印出该轮损失函数数值
# loss_all/4 求得每次step迭代的平均loss。因为:训练集有120组数据,batch是32个,每个step只能喂入32组数据,需要batch级别循环 循环4次 ,因此除以4
train_loss_results.append(loss_all / 4) # 将4个step的loss求平均记录在此变量中
loss_all = 0 # loss_all归零,为记录下一个epoch的loss做准备
# 第一层for循环是针对整个数据集进行循环
# 第二层for循环是针对batch进行循环
(四)测试效果
我们希望每个epoch循环后可以显示当前模型的效果
7.计算当前参数前向传播后的准确率,显示当前acc
total_correct, total_number = 0, 0
for x_test,y_test in test_db:
# y = tf.matmul(h,w)+b # y为预测结果
y = tf.matmul(x_test, w1) + b1 # y为预测结果
y = tf.nn.softmax(y) # y符合概率分布
pred = tf.argmax(y,axis=1) # 返回y中最大索引值,即预测的分类
pred = tf.cast(pred,dtype=y_test.dtype) # 调整数据类型与标签一致
correct = tf.cast(tf.equal(pred,y_test),dtype=tf.int32)
correct = tf.reduce_sum(correct) # 将每个batch的correct数加起来
total_correct += int(correct) # 将所有batch中的correct数加起来
total_number += x_test.shape[0]
acc = total_correct / total_number
test_acc.append(acc)
print("Test_acc:", acc)
print("--------------------------")
## 6、7代码综合
for epoch in range(epoch): # 数据集级别迭代
for step,(x_train,y_train) in enumerate(train_db): # batch 级别迭代
with tf.GradientTape() as tape: # 记录梯度信息
# 前向传播过程计算y
# 计算总loss
y = tf.matmul(x_train, w1) + b1 # 神经网络乘加运算
y = tf.nn.softmax(y) # 使输出y符合概率分布(此操作后与独热码同量级,可相减求loss)
y_ = tf.one_hot(y_train, depth=3) # 将标签值转换为独热码格式,方便计算loss和accuracy
loss = tf.reduce_mean(tf.square(y_ - y)) # 采用均方误差损失函数mse = mean(sum(y-out)^2)
loss_all += loss.numpy() # 将每个step计算出的loss累加,为后续求loss平均值提供数据,这样计算的loss更准确
grads = tape.gradient(loss,[w1,b1]) # 损失函数loss分别对参数w1和b1计算偏导数
w1.assign_sub(lr * grads[0]) # 参数自更新
b1.assign_sub(lr * grads[1])
print("Epoch {} , loss:{}".format(epoch,loss_all/4)) # 打印出该轮损失函数数值
# loss_all/4 求得每次step迭代的平均loss。因为:训练集有120组数据,batch是32个,每个step只能喂入32组数据,需要batch级别循环 循环4次 ,因此除以4
train_loss_results.append(loss_all / 4) # 将4个step的loss求平均记录在此变量中
loss_all = 0 # loss_all归零,为记录下一个epoch的loss做准备
# 第一层for循环是针对整个数据集进行循环
# 第二层for循环是针对batch进行循环
total_correct, total_number = 0, 0
for x_test,y_test in test_db:
y = tf.matmul(x_test, w1) + b1 # y为预测结果
y = tf.nn.softmax(y) # 使y符合概率分布
pred = tf.argmax(y,axis=1) # 返回y中最大索引值,即预测的分类
pred = tf.cast(pred,dtype=y_test.dtype) # 调整数据类型与标签一致
correct = tf.cast(tf.equal(pred,y_test),dtype=tf.int32)
correct = tf.reduce_sum(correct) # 将每个batch的correct数加起来
total_correct += int(correct) # 将所有batch中的correct数加起来
total_number += x_test.shape[0]
acc = total_correct / total_number # 计算正确率
test_acc.append(acc)
print("Test_acc:", acc)
print("--------------------------")
Epoch 0 , loss:0.282870564609766
Test_acc: 0.16666666666666666
--------------------------
Epoch 1 , loss:0.25627104938030243
Test_acc: 0.16666666666666666
--------------------------
Epoch 2 , loss:0.2263048030436039
Test_acc: 0.16666666666666666
--------------------------
Epoch 3 , loss:0.20885981991887093
Test_acc: 0.16666666666666666
--------------------------
Epoch 4 , loss:0.19758723676204681
Test_acc: 0.16666666666666666
--------------------------
Epoch 5 , loss:0.18692802637815475
Test_acc: 0.5333333333333333
--------------------------
Epoch 6 , loss:0.17684857174754143
Test_acc: 0.5333333333333333
--------------------------
Epoch 7 , loss:0.1677665412425995
Test_acc: 0.5333333333333333
--------------------------
Epoch 8 , loss:0.15983084589242935
Test_acc: 0.5333333333333333
--------------------------
Epoch 9 , loss:0.15300417318940163
Test_acc: 0.5333333333333333
--------------------------
Epoch 10 , loss:0.14716306701302528
Test_acc: 0.5333333333333333
--------------------------
Epoch 11 , loss:0.14216018468141556
Test_acc: 0.5333333333333333
--------------------------
Epoch 12 , loss:0.13785462640225887
Test_acc: 0.5333333333333333
--------------------------
Epoch 13 , loss:0.13412347622215748
Test_acc: 0.5333333333333333
--------------------------
Epoch 14 , loss:0.1308640670031309
Test_acc: 0.5333333333333333
--------------------------
Epoch 15 , loss:0.1279925461858511
Test_acc: 0.5333333333333333
--------------------------
Epoch 16 , loss:0.12544111162424088
Test_acc: 0.5333333333333333
--------------------------
Epoch 17 , loss:0.12315514497458935
Test_acc: 0.5333333333333333
--------------------------
Epoch 18 , loss:0.12109063751995564
Test_acc: 0.5333333333333333
--------------------------
Epoch 19 , loss:0.11921204812824726
Test_acc: 0.5333333333333333
--------------------------
Epoch 20 , loss:0.11749051511287689
Test_acc: 0.5333333333333333
--------------------------
Epoch 21 , loss:0.11590254865586758
Test_acc: 0.5333333333333333
--------------------------
Epoch 22 , loss:0.11442887037992477
Test_acc: 0.5333333333333333
--------------------------
Epoch 23 , loss:0.11305363290011883
Test_acc: 0.5333333333333333
--------------------------
Epoch 24 , loss:0.11176365613937378
Test_acc: 0.5333333333333333
--------------------------
Epoch 25 , loss:0.11054798401892185
Test_acc: 0.5333333333333333
--------------------------
Epoch 26 , loss:0.109397416934371
Test_acc: 0.5333333333333333
--------------------------
Epoch 27 , loss:0.10830420814454556
Test_acc: 0.5333333333333333
--------------------------
Epoch 28 , loss:0.10726181231439114
Test_acc: 0.5333333333333333
--------------------------
Epoch 29 , loss:0.10626463778316975
Test_acc: 0.5333333333333333
--------------------------
Epoch 30 , loss:0.10530793108046055
Test_acc: 0.5333333333333333
--------------------------
Epoch 31 , loss:0.10438757762312889
Test_acc: 0.5333333333333333
--------------------------
Epoch 32 , loss:0.10350004769861698
Test_acc: 0.5333333333333333
--------------------------
Epoch 33 , loss:0.10264228843152523
Test_acc: 0.5333333333333333
--------------------------
Epoch 34 , loss:0.10181162878870964
Test_acc: 0.5333333333333333
--------------------------
Epoch 35 , loss:0.10100574605166912
Test_acc: 0.5333333333333333
--------------------------
Epoch 36 , loss:0.10022261925041676
Test_acc: 0.5333333333333333
--------------------------
Epoch 37 , loss:0.09946045093238354
Test_acc: 0.5333333333333333
--------------------------
Epoch 38 , loss:0.09871766529977322
Test_acc: 0.5333333333333333
--------------------------
Epoch 39 , loss:0.09799287281930447
Test_acc: 0.5333333333333333
--------------------------
Epoch 40 , loss:0.0972848329693079
Test_acc: 0.5333333333333333
--------------------------
Epoch 41 , loss:0.09659244120121002
Test_acc: 0.5333333333333333
--------------------------
Epoch 42 , loss:0.09591471590101719
Test_acc: 0.5333333333333333
--------------------------
Epoch 43 , loss:0.09525078348815441
Test_acc: 0.5333333333333333
--------------------------
Epoch 44 , loss:0.09459986351430416
Test_acc: 0.5333333333333333
--------------------------
Epoch 45 , loss:0.09396122023463249
Test_acc: 0.5333333333333333
--------------------------
Epoch 46 , loss:0.09333424083888531
Test_acc: 0.5333333333333333
--------------------------
Epoch 47 , loss:0.09271831810474396
Test_acc: 0.5333333333333333
--------------------------
Epoch 48 , loss:0.09211294911801815
Test_acc: 0.5333333333333333
--------------------------
Epoch 49 , loss:0.09151765145361423
Test_acc: 0.5333333333333333
--------------------------
Epoch 50 , loss:0.09093198925256729
Test_acc: 0.5333333333333333
--------------------------
Epoch 51 , loss:0.09035556763410568
Test_acc: 0.5666666666666667
--------------------------
Epoch 52 , loss:0.08978801220655441
Test_acc: 0.5666666666666667
--------------------------
Epoch 53 , loss:0.08922900445759296
Test_acc: 0.6
--------------------------
Epoch 54 , loss:0.08867823518812656
Test_acc: 0.6
--------------------------
Epoch 55 , loss:0.08813541010022163
Test_acc: 0.6
--------------------------
Epoch 56 , loss:0.08760028518736362
Test_acc: 0.6
--------------------------
Epoch 57 , loss:0.08707260526716709
Test_acc: 0.6
--------------------------
Epoch 58 , loss:0.08655215427279472
Test_acc: 0.6
--------------------------
Epoch 59 , loss:0.08603872172534466
Test_acc: 0.6
--------------------------
Epoch 60 , loss:0.08553211390972137
Test_acc: 0.6
--------------------------
Epoch 61 , loss:0.08503215201199055
Test_acc: 0.6
--------------------------
Epoch 62 , loss:0.084538659080863
Test_acc: 0.6333333333333333
--------------------------
Epoch 63 , loss:0.08405148610472679
Test_acc: 0.6333333333333333
--------------------------
Epoch 64 , loss:0.08357047289609909
Test_acc: 0.6333333333333333
--------------------------
Epoch 65 , loss:0.083095483481884
Test_acc: 0.6333333333333333
--------------------------
Epoch 66 , loss:0.08262638002634048
Test_acc: 0.6333333333333333
--------------------------
Epoch 67 , loss:0.08216305263340473
Test_acc: 0.6333333333333333
--------------------------
Epoch 68 , loss:0.0817053597420454
Test_acc: 0.6333333333333333
--------------------------
Epoch 69 , loss:0.08125320449471474
Test_acc: 0.6333333333333333
--------------------------
Epoch 70 , loss:0.08080647140741348
Test_acc: 0.6333333333333333
--------------------------
Epoch 71 , loss:0.08036506734788418
Test_acc: 0.6333333333333333
--------------------------
Epoch 72 , loss:0.07992886938154697
Test_acc: 0.6666666666666666
--------------------------
Epoch 73 , loss:0.0794978104531765
Test_acc: 0.6666666666666666
--------------------------
Epoch 74 , loss:0.07907178066670895
Test_acc: 0.6666666666666666
--------------------------
Epoch 75 , loss:0.0786507111042738
Test_acc: 0.6666666666666666
--------------------------
Epoch 76 , loss:0.07823450677096844
Test_acc: 0.7
--------------------------
Epoch 77 , loss:0.0778230931609869
Test_acc: 0.7
--------------------------
Epoch 78 , loss:0.07741638459265232
Test_acc: 0.7
--------------------------
Epoch 79 , loss:0.07701431401073933
Test_acc: 0.7
--------------------------
Epoch 80 , loss:0.0766168013215065
Test_acc: 0.7
--------------------------
Epoch 81 , loss:0.07622378505766392
Test_acc: 0.7333333333333333
--------------------------
Epoch 82 , loss:0.07583519630134106
Test_acc: 0.7333333333333333
--------------------------
Epoch 83 , loss:0.0754509586840868
Test_acc: 0.7333333333333333
--------------------------
Epoch 84 , loss:0.07507101632654667
Test_acc: 0.7333333333333333
--------------------------
Epoch 85 , loss:0.07469531707465649
Test_acc: 0.7333333333333333
--------------------------
Epoch 86 , loss:0.07432377804070711
Test_acc: 0.7666666666666667
--------------------------
Epoch 87 , loss:0.07395635358989239
Test_acc: 0.7666666666666667
--------------------------
Epoch 88 , loss:0.07359297666698694
Test_acc: 0.7666666666666667
--------------------------
Epoch 89 , loss:0.07323360722512007
Test_acc: 0.7666666666666667
--------------------------
Epoch 90 , loss:0.07287816982716322
Test_acc: 0.7666666666666667
--------------------------
Epoch 91 , loss:0.07252661697566509
Test_acc: 0.7666666666666667
--------------------------
Epoch 92 , loss:0.07217890489846468
Test_acc: 0.7666666666666667
--------------------------
Epoch 93 , loss:0.07183496467769146
Test_acc: 0.8
--------------------------
Epoch 94 , loss:0.07149475440382957
Test_acc: 0.8
--------------------------
Epoch 95 , loss:0.0711582275107503
Test_acc: 0.8
--------------------------
Epoch 96 , loss:0.07082532811909914
Test_acc: 0.8
--------------------------
Epoch 97 , loss:0.07049601059406996
Test_acc: 0.8
--------------------------
Epoch 98 , loss:0.07017023116350174
Test_acc: 0.8
--------------------------
Epoch 99 , loss:0.06984793394804001
Test_acc: 0.8
--------------------------
Epoch 100 , loss:0.06952907796949148
Test_acc: 0.8666666666666667
--------------------------
Epoch 101 , loss:0.06921361479908228
Test_acc: 0.8666666666666667
--------------------------
Epoch 102 , loss:0.06890150718390942
Test_acc: 0.8666666666666667
--------------------------
Epoch 103 , loss:0.06859271135181189
Test_acc: 0.8666666666666667
--------------------------
Epoch 104 , loss:0.06828716862946749
Test_acc: 0.8666666666666667
--------------------------
Epoch 105 , loss:0.06798485666513443
Test_acc: 0.8666666666666667
--------------------------
Epoch 106 , loss:0.06768572796136141
Test_acc: 0.8666666666666667
--------------------------
Epoch 107 , loss:0.06738973222672939
Test_acc: 0.9
--------------------------
Epoch 108 , loss:0.0670968359336257
Test_acc: 0.9
--------------------------
Epoch 109 , loss:0.06680699624121189
Test_acc: 0.9
--------------------------
Epoch 110 , loss:0.06652017589658499
Test_acc: 0.9
--------------------------
Epoch 111 , loss:0.06623634044080973
Test_acc: 0.9
--------------------------
Epoch 112 , loss:0.06595543678849936
Test_acc: 0.9
--------------------------
Epoch 113 , loss:0.06567744445055723
Test_acc: 0.9
--------------------------
Epoch 114 , loss:0.06540231686085463
Test_acc: 0.9
--------------------------
Epoch 115 , loss:0.06513001583516598
Test_acc: 0.9
--------------------------
Epoch 116 , loss:0.06486051436513662
Test_acc: 0.9
--------------------------
Epoch 117 , loss:0.06459376774728298
Test_acc: 0.9
--------------------------
Epoch 118 , loss:0.06432974617928267
Test_acc: 0.9
--------------------------
Epoch 119 , loss:0.06406840588897467
Test_acc: 0.9
--------------------------
Epoch 120 , loss:0.06380972359329462
Test_acc: 0.9333333333333333
--------------------------
Epoch 121 , loss:0.0635536601766944
Test_acc: 0.9333333333333333
--------------------------
Epoch 122 , loss:0.06330018304288387
Test_acc: 0.9333333333333333
--------------------------
Epoch 123 , loss:0.06304925587028265
Test_acc: 0.9333333333333333
--------------------------
Epoch 124 , loss:0.06280085071921349
Test_acc: 0.9333333333333333
--------------------------
Epoch 125 , loss:0.06255493219941854
Test_acc: 0.9333333333333333
--------------------------
Epoch 126 , loss:0.06231147237122059
Test_acc: 0.9333333333333333
--------------------------
Epoch 127 , loss:0.06207043677568436
Test_acc: 0.9333333333333333
--------------------------
Epoch 128 , loss:0.061831796541810036
Test_acc: 0.9333333333333333
--------------------------
Epoch 129 , loss:0.06159551814198494
Test_acc: 0.9333333333333333
--------------------------
Epoch 130 , loss:0.0613615782931447
Test_acc: 0.9333333333333333
--------------------------
Epoch 131 , loss:0.061129944398999214
Test_acc: 0.9333333333333333
--------------------------
Epoch 132 , loss:0.06090058386325836
Test_acc: 0.9333333333333333
--------------------------
Epoch 133 , loss:0.060673465952277184
Test_acc: 0.9333333333333333
--------------------------
Epoch 134 , loss:0.06044857297092676
Test_acc: 0.9333333333333333
--------------------------
Epoch 135 , loss:0.06022586487233639
Test_acc: 0.9333333333333333
--------------------------
Epoch 136 , loss:0.06000532582402229
Test_acc: 0.9333333333333333
--------------------------
Epoch 137 , loss:0.05978691391646862
Test_acc: 0.9333333333333333
--------------------------
Epoch 138 , loss:0.059570605866611004
Test_acc: 0.9333333333333333
--------------------------
Epoch 139 , loss:0.05935639515519142
Test_acc: 0.9333333333333333
--------------------------
Epoch 140 , loss:0.05914423614740372
Test_acc: 0.9333333333333333
--------------------------
Epoch 141 , loss:0.058934105560183525
Test_acc: 0.9333333333333333
--------------------------
Epoch 142 , loss:0.05872597545385361
Test_acc: 0.9333333333333333
--------------------------
Epoch 143 , loss:0.058519829995930195
Test_acc: 0.9333333333333333
--------------------------
Epoch 144 , loss:0.05831564124673605
Test_acc: 0.9333333333333333
--------------------------
Epoch 145 , loss:0.05811337102204561
Test_acc: 0.9333333333333333
--------------------------
Epoch 146 , loss:0.05791301652789116
Test_acc: 0.9333333333333333
--------------------------
Epoch 147 , loss:0.05771454330533743
Test_acc: 0.9333333333333333
--------------------------
Epoch 148 , loss:0.05751792900264263
Test_acc: 0.9333333333333333
--------------------------
Epoch 149 , loss:0.05732314754277468
Test_acc: 0.9333333333333333
--------------------------
Epoch 150 , loss:0.05713018216192722
Test_acc: 0.9333333333333333
--------------------------
Epoch 151 , loss:0.056939003989100456
Test_acc: 0.9333333333333333
--------------------------
Epoch 152 , loss:0.05674959998577833
Test_acc: 0.9333333333333333
--------------------------
Epoch 153 , loss:0.05656193848699331
Test_acc: 0.9333333333333333
--------------------------
Epoch 154 , loss:0.05637600086629391
Test_acc: 0.9333333333333333
--------------------------
Epoch 155 , loss:0.05619177222251892
Test_acc: 0.9333333333333333
--------------------------
Epoch 156 , loss:0.05600921530276537
Test_acc: 0.9333333333333333
--------------------------
Epoch 157 , loss:0.055828324519097805
Test_acc: 0.9333333333333333
--------------------------
Epoch 158 , loss:0.05564907751977444
Test_acc: 0.9333333333333333
--------------------------
Epoch 159 , loss:0.0554714547470212
Test_acc: 0.9333333333333333
--------------------------
Epoch 160 , loss:0.05529542453587055
Test_acc: 0.9333333333333333
--------------------------
Epoch 161 , loss:0.055120985954999924
Test_acc: 0.9333333333333333
--------------------------
Epoch 162 , loss:0.05494810547679663
Test_acc: 0.9333333333333333
--------------------------
Epoch 163 , loss:0.054776763543486595
Test_acc: 0.9333333333333333
--------------------------
Epoch 164 , loss:0.054606955498456955
Test_acc: 0.9333333333333333
--------------------------
Epoch 165 , loss:0.05443864781409502
Test_acc: 0.9333333333333333
--------------------------
Epoch 166 , loss:0.054271834902465343
Test_acc: 0.9333333333333333
--------------------------
Epoch 167 , loss:0.054106482304632664
Test_acc: 0.9666666666666667
--------------------------
Epoch 168 , loss:0.05394259560853243
Test_acc: 0.9666666666666667
--------------------------
Epoch 169 , loss:0.05378013290464878
Test_acc: 0.9666666666666667
--------------------------
Epoch 170 , loss:0.0536190839484334
Test_acc: 0.9666666666666667
--------------------------
Epoch 171 , loss:0.05345943849533796
Test_acc: 0.9666666666666667
--------------------------
Epoch 172 , loss:0.053301187232136726
Test_acc: 0.9666666666666667
--------------------------
Epoch 173 , loss:0.05314429383724928
Test_acc: 0.9666666666666667
--------------------------
Epoch 174 , loss:0.05298875365406275
Test_acc: 0.9666666666666667
--------------------------
Epoch 175 , loss:0.05283454991877079
Test_acc: 0.9666666666666667
--------------------------
Epoch 176 , loss:0.052681658416986465
Test_acc: 0.9666666666666667
--------------------------
Epoch 177 , loss:0.05253008101135492
Test_acc: 0.9666666666666667
--------------------------
Epoch 178 , loss:0.05237978603690863
Test_acc: 0.9666666666666667
--------------------------
Epoch 179 , loss:0.052230763249099255
Test_acc: 0.9666666666666667
--------------------------
Epoch 180 , loss:0.052082995884120464
Test_acc: 0.9666666666666667
--------------------------
Epoch 181 , loss:0.05193647649139166
Test_acc: 0.9666666666666667
--------------------------
Epoch 182 , loss:0.05179118365049362
Test_acc: 0.9666666666666667
--------------------------
Epoch 183 , loss:0.05164710432291031
Test_acc: 0.9666666666666667
--------------------------
Epoch 184 , loss:0.051504223607480526
Test_acc: 0.9666666666666667
--------------------------
Epoch 185 , loss:0.051362527534365654
Test_acc: 0.9666666666666667
--------------------------
Epoch 186 , loss:0.05122200958430767
Test_acc: 0.9666666666666667
--------------------------
Epoch 187 , loss:0.05108263995498419
Test_acc: 0.9666666666666667
--------------------------
Epoch 188 , loss:0.05094442702829838
Test_acc: 0.9666666666666667
--------------------------
Epoch 189 , loss:0.05080734007060528
Test_acc: 1.0
--------------------------
Epoch 190 , loss:0.05067136324942112
Test_acc: 1.0
--------------------------
Epoch 191 , loss:0.0505365002900362
Test_acc: 1.0
--------------------------
Epoch 192 , loss:0.05040272977203131
Test_acc: 1.0
--------------------------
Epoch 193 , loss:0.050270047038793564
Test_acc: 1.0
--------------------------
Epoch 194 , loss:0.050138418562710285
Test_acc: 1.0
--------------------------
Epoch 195 , loss:0.050007857382297516
Test_acc: 1.0
--------------------------
Epoch 196 , loss:0.04987833369523287
Test_acc: 1.0
--------------------------
Epoch 197 , loss:0.04974984563887119
Test_acc: 1.0
--------------------------
Epoch 198 , loss:0.049622380174696445
Test_acc: 1.0
--------------------------
Epoch 199 , loss:0.04949592147022486
Test_acc: 1.0
--------------------------
Epoch 200 , loss:0.049370463006198406
Test_acc: 1.0
--------------------------
Epoch 201 , loss:0.049245987087488174
Test_acc: 1.0
--------------------------
Epoch 202 , loss:0.04912248905748129
Test_acc: 1.0
--------------------------
Epoch 203 , loss:0.048999947495758533
Test_acc: 1.0
--------------------------
Epoch 204 , loss:0.04887837450951338
Test_acc: 1.0
--------------------------
Epoch 205 , loss:0.04875773563981056
Test_acc: 1.0
--------------------------
Epoch 206 , loss:0.04863803368061781
Test_acc: 1.0
--------------------------
Epoch 207 , loss:0.04851924814283848
Test_acc: 1.0
--------------------------
Epoch 208 , loss:0.04840137995779514
Test_acc: 1.0
--------------------------
Epoch 209 , loss:0.048284415155649185
Test_acc: 1.0
--------------------------
Epoch 210 , loss:0.04816834069788456
Test_acc: 1.0
--------------------------
Epoch 211 , loss:0.048053150065243244
Test_acc: 1.0
--------------------------
Epoch 212 , loss:0.04793883115053177
Test_acc: 1.0
--------------------------
Epoch 213 , loss:0.04782537464052439
Test_acc: 1.0
--------------------------
Epoch 214 , loss:0.04771277587860823
Test_acc: 1.0
--------------------------
Epoch 215 , loss:0.04760102368891239
Test_acc: 1.0
--------------------------
Epoch 216 , loss:0.047490100376307964
Test_acc: 1.0
--------------------------
Epoch 217 , loss:0.04738000128418207
Test_acc: 1.0
--------------------------
Epoch 218 , loss:0.04727072920650244
Test_acc: 1.0
--------------------------
Epoch 219 , loss:0.0471622608602047
Test_acc: 1.0
--------------------------
Epoch 220 , loss:0.04705459624528885
Test_acc: 1.0
--------------------------
Epoch 221 , loss:0.04694772232323885
Test_acc: 1.0
--------------------------
Epoch 222 , loss:0.04684162884950638
Test_acc: 1.0
--------------------------
Epoch 223 , loss:0.04673631116747856
Test_acc: 1.0
--------------------------
Epoch 224 , loss:0.04663177113980055
Test_acc: 1.0
--------------------------
Epoch 225 , loss:0.046527973376214504
Test_acc: 1.0
--------------------------
Epoch 226 , loss:0.04642493650317192
Test_acc: 1.0
--------------------------
Epoch 227 , loss:0.04632263910025358
Test_acc: 1.0
--------------------------
Epoch 228 , loss:0.04622107557952404
Test_acc: 1.0
--------------------------
Epoch 229 , loss:0.046120245940983295
Test_acc: 1.0
--------------------------
Epoch 230 , loss:0.04602012876421213
Test_acc: 1.0
--------------------------
Epoch 231 , loss:0.04592072777450085
Test_acc: 1.0
--------------------------
Epoch 232 , loss:0.045822033658623695
Test_acc: 1.0
--------------------------
Epoch 233 , loss:0.045724029652774334
Test_acc: 1.0
--------------------------
Epoch 234 , loss:0.045626724138855934
Test_acc: 1.0
--------------------------
Epoch 235 , loss:0.045530098490417004
Test_acc: 1.0
--------------------------
Epoch 236 , loss:0.045434155501425266
Test_acc: 1.0
--------------------------
Epoch 237 , loss:0.04533886816352606
Test_acc: 1.0
--------------------------
Epoch 238 , loss:0.04524425510317087
Test_acc: 1.0
--------------------------
Epoch 239 , loss:0.04515030141919851
Test_acc: 1.0
--------------------------
Epoch 240 , loss:0.04505699593573809
Test_acc: 1.0
--------------------------
Epoch 241 , loss:0.044964320957660675
Test_acc: 1.0
--------------------------
Epoch 242 , loss:0.0448722867295146
Test_acc: 1.0
--------------------------
Epoch 243 , loss:0.04478088486939669
Test_acc: 1.0
--------------------------
Epoch 244 , loss:0.04469011165201664
Test_acc: 1.0
--------------------------
Epoch 245 , loss:0.04459994751960039
Test_acc: 1.0
--------------------------
Epoch 246 , loss:0.044510395266115665
Test_acc: 1.0
--------------------------
Epoch 247 , loss:0.04442145302891731
Test_acc: 1.0
--------------------------
Epoch 248 , loss:0.044333115220069885
Test_acc: 1.0
--------------------------
Epoch 249 , loss:0.04424535948783159
Test_acc: 1.0
--------------------------
Epoch 250 , loss:0.044158185832202435
Test_acc: 1.0
--------------------------
Epoch 251 , loss:0.04407161381095648
Test_acc: 1.0
--------------------------
Epoch 252 , loss:0.04398560244590044
Test_acc: 1.0
--------------------------
Epoch 253 , loss:0.04390017315745354
Test_acc: 1.0
--------------------------
Epoch 254 , loss:0.04381529614329338
Test_acc: 1.0
--------------------------
Epoch 255 , loss:0.04373098257929087
Test_acc: 1.0
--------------------------
Epoch 256 , loss:0.04364722687751055
Test_acc: 1.0
--------------------------
Epoch 257 , loss:0.04356401413679123
Test_acc: 1.0
--------------------------
Epoch 258 , loss:0.04348134249448776
Test_acc: 1.0
--------------------------
Epoch 259 , loss:0.043399215675890446
Test_acc: 1.0
--------------------------
Epoch 260 , loss:0.04331761971116066
Test_acc: 1.0
--------------------------
Epoch 261 , loss:0.04323654621839523
Test_acc: 1.0
--------------------------
Epoch 262 , loss:0.04315600264817476
Test_acc: 1.0
--------------------------
Epoch 263 , loss:0.04307597689330578
Test_acc: 1.0
--------------------------
Epoch 264 , loss:0.04299645218998194
Test_acc: 1.0
--------------------------
Epoch 265 , loss:0.04291744530200958
Test_acc: 1.0
--------------------------
Epoch 266 , loss:0.04283893667161465
Test_acc: 1.0
--------------------------
Epoch 267 , loss:0.04276093281805515
Test_acc: 1.0
--------------------------
Epoch 268 , loss:0.04268341325223446
Test_acc: 1.0
--------------------------
Epoch 269 , loss:0.042606390081346035
Test_acc: 1.0
--------------------------
Epoch 270 , loss:0.04252984933555126
Test_acc: 1.0
--------------------------
Epoch 271 , loss:0.04245378356426954
Test_acc: 1.0
--------------------------
Epoch 272 , loss:0.0423781992867589
Test_acc: 1.0
--------------------------
Epoch 273 , loss:0.04230308625847101
Test_acc: 1.0
--------------------------
Epoch 274 , loss:0.04222843889147043
Test_acc: 1.0
--------------------------
Epoch 275 , loss:0.04215425252914429
Test_acc: 1.0
--------------------------
Epoch 276 , loss:0.04208051785826683
Test_acc: 1.0
--------------------------
Epoch 277 , loss:0.04200724605470896
Test_acc: 1.0
--------------------------
Epoch 278 , loss:0.0419344175606966
Test_acc: 1.0
--------------------------
Epoch 279 , loss:0.041862040758132935
Test_acc: 1.0
--------------------------
Epoch 280 , loss:0.041790105402469635
Test_acc: 1.0
--------------------------
Epoch 281 , loss:0.04171860497444868
Test_acc: 1.0
--------------------------
Epoch 282 , loss:0.0416475348174572
Test_acc: 1.0
--------------------------
Epoch 283 , loss:0.041576895862817764
Test_acc: 1.0
--------------------------
Epoch 284 , loss:0.04150668252259493
Test_acc: 1.0
--------------------------
Epoch 285 , loss:0.04143689665943384
Test_acc: 1.0
--------------------------
Epoch 286 , loss:0.041367520578205585
Test_acc: 1.0
--------------------------
Epoch 287 , loss:0.04129855800420046
Test_acc: 1.0
--------------------------
Epoch 288 , loss:0.041230013594031334
Test_acc: 1.0
--------------------------
Epoch 289 , loss:0.041161881759762764
Test_acc: 1.0
--------------------------
Epoch 290 , loss:0.041094135493040085
Test_acc: 1.0
--------------------------
Epoch 291 , loss:0.04102680180221796
Test_acc: 1.0
--------------------------
Epoch 292 , loss:0.040959861129522324
Test_acc: 1.0
--------------------------
Epoch 293 , loss:0.040893315337598324
Test_acc: 1.0
--------------------------
Epoch 294 , loss:0.040827155113220215
Test_acc: 1.0
--------------------------
Epoch 295 , loss:0.04076138325035572
Test_acc: 1.0
--------------------------
Epoch 296 , loss:0.04069599322974682
Test_acc: 1.0
--------------------------
Epoch 297 , loss:0.04063097853213549
Test_acc: 1.0
--------------------------
Epoch 298 , loss:0.04056634288281202
Test_acc: 1.0
--------------------------
Epoch 299 , loss:0.04050208907574415
Test_acc: 1.0
--------------------------
Epoch 300 , loss:0.04043819569051266
Test_acc: 1.0
--------------------------
Epoch 301 , loss:0.040374668315052986
Test_acc: 1.0
--------------------------
Epoch 302 , loss:0.040311504155397415
Test_acc: 1.0
--------------------------
Epoch 303 , loss:0.04024870879948139
Test_acc: 1.0
--------------------------
Epoch 304 , loss:0.04018625942990184
Test_acc: 1.0
--------------------------
Epoch 305 , loss:0.0401241690851748
Test_acc: 1.0
--------------------------
Epoch 306 , loss:0.04006242612376809
Test_acc: 1.0
--------------------------
Epoch 307 , loss:0.04000102821737528
Test_acc: 1.0
--------------------------
Epoch 308 , loss:0.039939986541867256
Test_acc: 1.0
--------------------------
Epoch 309 , loss:0.039879275020211935
Test_acc: 1.0
--------------------------
Epoch 310 , loss:0.03981891227886081
Test_acc: 1.0
--------------------------
Epoch 311 , loss:0.039758887607604265
Test_acc: 1.0
--------------------------
Epoch 312 , loss:0.039699186105281115
Test_acc: 1.0
--------------------------
Epoch 313 , loss:0.03963982267305255
Test_acc: 1.0
--------------------------
Epoch 314 , loss:0.03958077589049935
Test_acc: 1.0
--------------------------
Epoch 315 , loss:0.03952206764370203
Test_acc: 1.0
--------------------------
Epoch 316 , loss:0.0394636788405478
Test_acc: 1.0
--------------------------
Epoch 317 , loss:0.039405607152730227
Test_acc: 1.0
--------------------------
Epoch 318 , loss:0.03934785583987832
Test_acc: 1.0
--------------------------
Epoch 319 , loss:0.03929041465744376
Test_acc: 1.0
--------------------------
Epoch 320 , loss:0.03923328733071685
Test_acc: 1.0
--------------------------
Epoch 321 , loss:0.039176476653665304
Test_acc: 1.0
--------------------------
Epoch 322 , loss:0.03911995841190219
Test_acc: 1.0
--------------------------
Epoch 323 , loss:0.03906376101076603
Test_acc: 1.0
--------------------------
Epoch 324 , loss:0.03900785371661186
Test_acc: 1.0
--------------------------
Epoch 325 , loss:0.038952252361923456
Test_acc: 1.0
--------------------------
Epoch 326 , loss:0.038896942511200905
Test_acc: 1.0
--------------------------
Epoch 327 , loss:0.03884193766862154
Test_acc: 1.0
--------------------------
Epoch 328 , loss:0.03878721268847585
Test_acc: 1.0
--------------------------
Epoch 329 , loss:0.03873277874663472
Test_acc: 1.0
--------------------------
Epoch 330 , loss:0.03867863770574331
Test_acc: 1.0
--------------------------
Epoch 331 , loss:0.038624780252575874
Test_acc: 1.0
--------------------------
Epoch 332 , loss:0.03857120871543884
Test_acc: 1.0
--------------------------
Epoch 333 , loss:0.038517920300364494
Test_acc: 1.0
--------------------------
Epoch 334 , loss:0.0384649015031755
Test_acc: 1.0
--------------------------
Epoch 335 , loss:0.038412167225033045
Test_acc: 1.0
--------------------------
Epoch 336 , loss:0.038359703961759806
Test_acc: 1.0
--------------------------
Epoch 337 , loss:0.03830751543864608
Test_acc: 1.0
--------------------------
Epoch 338 , loss:0.03825558861717582
Test_acc: 1.0
--------------------------
Epoch 339 , loss:0.03820393607020378
Test_acc: 1.0
--------------------------
Epoch 340 , loss:0.03815255546942353
Test_acc: 1.0
--------------------------
Epoch 341 , loss:0.03810142679139972
Test_acc: 1.0
--------------------------
Epoch 342 , loss:0.03805056540295482
Test_acc: 1.0
--------------------------
Epoch 343 , loss:0.037999965250492096
Test_acc: 1.0
--------------------------
Epoch 344 , loss:0.03794961981475353
Test_acc: 1.0
--------------------------
Epoch 345 , loss:0.03789952816441655
Test_acc: 1.0
--------------------------
Epoch 346 , loss:0.03784969728440046
Test_acc: 1.0
--------------------------
Epoch 347 , loss:0.037800111807882786
Test_acc: 1.0
--------------------------
Epoch 348 , loss:0.03775077825412154
Test_acc: 1.0
--------------------------
Epoch 349 , loss:0.03770169056952
Test_acc: 1.0
--------------------------
Epoch 350 , loss:0.0376528506167233
Test_acc: 1.0
--------------------------
Epoch 351 , loss:0.037604253739118576
Test_acc: 1.0
--------------------------
Epoch 352 , loss:0.03755589900538325
Test_acc: 1.0
--------------------------
Epoch 353 , loss:0.03750779293477535
Test_acc: 1.0
--------------------------
Epoch 354 , loss:0.03745991224423051
Test_acc: 1.0
--------------------------
Epoch 355 , loss:0.03741227509453893
Test_acc: 1.0
--------------------------
Epoch 356 , loss:0.037364871241152287
Test_acc: 1.0
--------------------------
Epoch 357 , loss:0.03731770068407059
Test_acc: 1.0
--------------------------
Epoch 358 , loss:0.03727075783535838
Test_acc: 1.0
--------------------------
Epoch 359 , loss:0.03722405293956399
Test_acc: 1.0
--------------------------
Epoch 360 , loss:0.037177571561187506
Test_acc: 1.0
--------------------------
Epoch 361 , loss:0.03713131649419665
Test_acc: 1.0
--------------------------
Epoch 362 , loss:0.03708529192954302
Test_acc: 1.0
--------------------------
Epoch 363 , loss:0.03703948762267828
Test_acc: 1.0
--------------------------
Epoch 364 , loss:0.036993893794715405
Test_acc: 1.0
--------------------------
Epoch 365 , loss:0.036948537454009056
Test_acc: 1.0
--------------------------
Epoch 366 , loss:0.03690338088199496
Test_acc: 1.0
--------------------------
Epoch 367 , loss:0.0368584580719471
Test_acc: 1.0
--------------------------
Epoch 368 , loss:0.036813742481172085
Test_acc: 1.0
--------------------------
Epoch 369 , loss:0.0367692387662828
Test_acc: 1.0
--------------------------
Epoch 370 , loss:0.03672494785860181
Test_acc: 1.0
--------------------------
Epoch 371 , loss:0.03668087022379041
Test_acc: 1.0
--------------------------
Epoch 372 , loss:0.03663699654862285
Test_acc: 1.0
--------------------------
Epoch 373 , loss:0.0365933389402926
Test_acc: 1.0
--------------------------
Epoch 374 , loss:0.0365498811006546
Test_acc: 1.0
--------------------------
Epoch 375 , loss:0.036506621632725
Test_acc: 1.0
--------------------------
Epoch 376 , loss:0.03646357636898756
Test_acc: 1.0
--------------------------
Epoch 377 , loss:0.036420723889023066
Test_acc: 1.0
--------------------------
Epoch 378 , loss:0.0363780758343637
Test_acc: 1.0
--------------------------
Epoch 379 , loss:0.0363356196321547
Test_acc: 1.0
--------------------------
Epoch 380 , loss:0.036293372977524996
Test_acc: 1.0
--------------------------
Epoch 381 , loss:0.036251313518732786
Test_acc: 1.0
--------------------------
Epoch 382 , loss:0.03620945382863283
Test_acc: 1.0
--------------------------
Epoch 383 , loss:0.03616778505966067
Test_acc: 1.0
--------------------------
Epoch 384 , loss:0.03612630208954215
Test_acc: 1.0
--------------------------
Epoch 385 , loss:0.03608501749113202
Test_acc: 1.0
--------------------------
Epoch 386 , loss:0.03604391356930137
Test_acc: 1.0
--------------------------
Epoch 387 , loss:0.03600299824029207
Test_acc: 1.0
--------------------------
Epoch 388 , loss:0.035962277092039585
Test_acc: 1.0
--------------------------
Epoch 389 , loss:0.03592173708602786
Test_acc: 1.0
--------------------------
Epoch 390 , loss:0.03588137263432145
Test_acc: 1.0
--------------------------
Epoch 391 , loss:0.03584120236337185
Test_acc: 1.0
--------------------------
Epoch 392 , loss:0.035801215562969446
Test_acc: 1.0
--------------------------
Epoch 393 , loss:0.035761401522904634
Test_acc: 1.0
--------------------------
Epoch 394 , loss:0.035721767228096724
Test_acc: 1.0
--------------------------
Epoch 395 , loss:0.035682305693626404
Test_acc: 1.0
--------------------------
Epoch 396 , loss:0.03564302809536457
Test_acc: 1.0
--------------------------
Epoch 397 , loss:0.035603920463472605
Test_acc: 1.0
--------------------------
Epoch 398 , loss:0.03556498885154724
Test_acc: 1.0
--------------------------
Epoch 399 , loss:0.035526234190911055
Test_acc: 1.0
--------------------------
Epoch 400 , loss:0.035487639252096415
Test_acc: 1.0
--------------------------
Epoch 401 , loss:0.03544922498986125
Test_acc: 1.0
--------------------------
Epoch 402 , loss:0.035410976968705654
Test_acc: 1.0
--------------------------
Epoch 403 , loss:0.0353728961199522
Test_acc: 1.0
--------------------------
Epoch 404 , loss:0.035334983840584755
Test_acc: 1.0
--------------------------
Epoch 405 , loss:0.03529723361134529
Test_acc: 1.0
--------------------------
Epoch 406 , loss:0.03525965381413698
Test_acc: 1.0
--------------------------
Epoch 407 , loss:0.03522223234176636
Test_acc: 1.0
--------------------------
Epoch 408 , loss:0.03518497571349144
Test_acc: 1.0
--------------------------
Epoch 409 , loss:0.03514788066968322
Test_acc: 1.0
--------------------------
Epoch 410 , loss:0.03511094581335783
Test_acc: 1.0
--------------------------
Epoch 411 , loss:0.035074169747531414
Test_acc: 1.0
--------------------------
Epoch 412 , loss:0.03503755433484912
Test_acc: 1.0
--------------------------
Epoch 413 , loss:0.035001086071133614
Test_acc: 1.0
--------------------------
Epoch 414 , loss:0.034964791033416986
Test_acc: 1.0
--------------------------
Epoch 415 , loss:0.034928635228425264
Test_acc: 1.0
--------------------------
Epoch 416 , loss:0.034892638213932514
Test_acc: 1.0
--------------------------
Epoch 417 , loss:0.03485679812729359
Test_acc: 1.0
--------------------------
Epoch 418 , loss:0.03482110844925046
Test_acc: 1.0
--------------------------
Epoch 419 , loss:0.034785570576786995
Test_acc: 1.0
--------------------------
Epoch 420 , loss:0.03475018125027418
Test_acc: 1.0
--------------------------
Epoch 421 , loss:0.03471494605764747
Test_acc: 1.0
--------------------------
Epoch 422 , loss:0.03467985149472952
Test_acc: 1.0
--------------------------
Epoch 423 , loss:0.03464490873739123
Test_acc: 1.0
--------------------------
Epoch 424 , loss:0.03461011219769716
Test_acc: 1.0
--------------------------
Epoch 425 , loss:0.034575455356389284
Test_acc: 1.0
--------------------------
Epoch 426 , loss:0.03454095032066107
Test_acc: 1.0
--------------------------
Epoch 427 , loss:0.03450658544898033
Test_acc: 1.0
--------------------------
Epoch 428 , loss:0.0344723598100245
Test_acc: 1.0
--------------------------
Epoch 429 , loss:0.034438283648341894
Test_acc: 1.0
--------------------------
Epoch 430 , loss:0.03440434439107776
Test_acc: 1.0
--------------------------
Epoch 431 , loss:0.0343705490231514
Test_acc: 1.0
--------------------------
Epoch 432 , loss:0.034336884040385485
Test_acc: 1.0
--------------------------
Epoch 433 , loss:0.034303362015634775
Test_acc: 1.0
--------------------------
Epoch 434 , loss:0.034269976895302534
Test_acc: 1.0
--------------------------
Epoch 435 , loss:0.03423672681674361
Test_acc: 1.0
--------------------------
Epoch 436 , loss:0.03420361038297415
Test_acc: 1.0
--------------------------
Epoch 437 , loss:0.03417063504457474
Test_acc: 1.0
--------------------------
Epoch 438 , loss:0.0341377891600132
Test_acc: 1.0
--------------------------
Epoch 439 , loss:0.03410507598891854
Test_acc: 1.0
--------------------------
Epoch 440 , loss:0.034072491340339184
Test_acc: 1.0
--------------------------
Epoch 441 , loss:0.03404005104675889
Test_acc: 1.0
--------------------------
Epoch 442 , loss:0.034007733687758446
Test_acc: 1.0
--------------------------
Epoch 443 , loss:0.03397554066032171
Test_acc: 1.0
--------------------------
Epoch 444 , loss:0.03394348407164216
Test_acc: 1.0
--------------------------
Epoch 445 , loss:0.03391155181452632
Test_acc: 1.0
--------------------------
Epoch 446 , loss:0.03387974854558706
Test_acc: 1.0
--------------------------
Epoch 447 , loss:0.03384807147085667
Test_acc: 1.0
--------------------------
Epoch 448 , loss:0.033816519659012556
Test_acc: 1.0
--------------------------
Epoch 449 , loss:0.033785090781748295
Test_acc: 1.0
--------------------------
Epoch 450 , loss:0.03375378483906388
Test_acc: 1.0
--------------------------
Epoch 451 , loss:0.033722604624927044
Test_acc: 1.0
--------------------------
Epoch 452 , loss:0.033691555727273226
Test_acc: 1.0
--------------------------
Epoch 453 , loss:0.03366061672568321
Test_acc: 1.0
--------------------------
Epoch 454 , loss:0.033629804849624634
Test_acc: 1.0
--------------------------
Epoch 455 , loss:0.03359911171719432
Test_acc: 1.0
--------------------------
Epoch 456 , loss:0.03356853872537613
Test_acc: 1.0
--------------------------
Epoch 457 , loss:0.03353808168321848
Test_acc: 1.0
--------------------------
Epoch 458 , loss:0.03350774757564068
Test_acc: 1.0
--------------------------
Epoch 459 , loss:0.03347752895206213
Test_acc: 1.0
--------------------------
Epoch 460 , loss:0.03344742488116026
Test_acc: 1.0
--------------------------
Epoch 461 , loss:0.033417444210499525
Test_acc: 1.0
--------------------------
Epoch 462 , loss:0.03338757110759616
Test_acc: 1.0
--------------------------
Epoch 463 , loss:0.03335781395435333
Test_acc: 1.0
--------------------------
Epoch 464 , loss:0.03332817042246461
Test_acc: 1.0
--------------------------
Epoch 465 , loss:0.033298646565526724
Test_acc: 1.0
--------------------------
Epoch 466 , loss:0.033269234001636505
Test_acc: 1.0
--------------------------
Epoch 467 , loss:0.0332399345934391
Test_acc: 1.0
--------------------------
Epoch 468 , loss:0.03321074414998293
Test_acc: 1.0
--------------------------
Epoch 469 , loss:0.03318166173994541
Test_acc: 1.0
--------------------------
Epoch 470 , loss:0.033152684569358826
Test_acc: 1.0
--------------------------
Epoch 471 , loss:0.033123827539384365
Test_acc: 1.0
--------------------------
Epoch 472 , loss:0.03309507854282856
Test_acc: 1.0
--------------------------
Epoch 473 , loss:0.03306643990799785
Test_acc: 1.0
--------------------------
Epoch 474 , loss:0.03303789859637618
Test_acc: 1.0
--------------------------
Epoch 475 , loss:0.03300946718081832
Test_acc: 1.0
--------------------------
Epoch 476 , loss:0.032981148455291986
Test_acc: 1.0
--------------------------
Epoch 477 , loss:0.03295292519032955
Test_acc: 1.0
--------------------------
Epoch 478 , loss:0.03292481508105993
Test_acc: 1.0
--------------------------
Epoch 479 , loss:0.03289681160822511
Test_acc: 1.0
--------------------------
Epoch 480 , loss:0.03286890313029289
Test_acc: 1.0
--------------------------
Epoch 481 , loss:0.032841107808053493
Test_acc: 1.0
--------------------------
Epoch 482 , loss:0.03281340887770057
Test_acc: 1.0
--------------------------
Epoch 483 , loss:0.032785809598863125
Test_acc: 1.0
--------------------------
Epoch 484 , loss:0.03275832487270236
Test_acc: 1.0
--------------------------
Epoch 485 , loss:0.03273092629387975
Test_acc: 1.0
--------------------------
Epoch 486 , loss:0.03270363621413708
Test_acc: 1.0
--------------------------
Epoch 487 , loss:0.03267644206061959
Test_acc: 1.0
--------------------------
Epoch 488 , loss:0.032649350352585316
Test_acc: 1.0
--------------------------
Epoch 489 , loss:0.03262235689908266
Test_acc: 1.0
--------------------------
Epoch 490 , loss:0.0325954626314342
Test_acc: 1.0
--------------------------
Epoch 491 , loss:0.032568661496043205
Test_acc: 1.0
--------------------------
Epoch 492 , loss:0.032541955821216106
Test_acc: 1.0
--------------------------
Epoch 493 , loss:0.032515356317162514
Test_acc: 1.0
--------------------------
Epoch 494 , loss:0.03248884342610836
Test_acc: 1.0
--------------------------
Epoch 495 , loss:0.032462427858263254
Test_acc: 1.0
--------------------------
Epoch 496 , loss:0.03243611566722393
Test_acc: 1.0
--------------------------
Epoch 497 , loss:0.032409884966909885
Test_acc: 1.0
--------------------------
Epoch 498 , loss:0.032383753918111324
Test_acc: 1.0
--------------------------
Epoch 499 , loss:0.03235771972686052
Test_acc: 1.0
--------------------------
(五)acc/loss可视化
import matplotlib.pyplot as plt
# 绘制 loss 曲线
plt.title('Loss Function Curve') # 图片标题
plt.xlabel('Epoch') # x轴变量名称
plt.ylabel('Loss') # y轴变量名称
plt.plot(train_loss_results, label="$Loss$") # 逐点画出trian_loss_results值并连线,连线图标是Loss
plt.legend() # 画出曲线图标
plt.show() # 画出图像
# 绘制 Accuracy 曲线
plt.title('Acc Curve') # 图片标题
plt.xlabel('Epoch') # x轴变量名称
plt.ylabel('Acc') # y轴变量名称
plt.plot(test_acc, label="$Accuracy$") # 逐点画出test_acc值并连线,连线图标是Accuracy
plt.legend()
plt.show()
完整代码
# -*- coding: UTF-8 -*-
# 利用鸢尾花数据集,实现前向传播、反向传播,可视化loss曲线
# 导入所需模块
import tensorflow as tf
from sklearn import datasets
from matplotlib import pyplot as plt
import numpy as np
# 导入数据,分别为输入特征和标签
x_data = datasets.load_iris().data
y_data = datasets.load_iris().target
# 随机打乱数据(因为原始数据是顺序的,顺序不打乱会影响准确率)
# seed: 随机数种子,是一个整数,当设置之后,每次生成的随机数都一样(为方便教学,以保每位同学结果一致)
np.random.seed(116) # 使用相同的seed,保证输入特征和标签一一对应
np.random.shuffle(x_data)
np.random.seed(116)
np.random.shuffle(y_data)
tf.random.set_seed(116)
# 将打乱后的数据集分割为训练集和测试集,训练集为前120行,测试集为后30行
x_train = x_data[:-30]
y_train = y_data[:-30]
x_test = x_data[-30:]
y_test = y_data[-30:]
# 转换x的数据类型,否则后面矩阵相乘时会因数据类型不一致报错
x_train = tf.cast(x_train, tf.float32)
x_test = tf.cast(x_test, tf.float32)
# from_tensor_slices函数使输入特征和标签值一一对应。(把数据集分批次,每个批次batch组数据)
train_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(32)
test_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(32)
# 生成神经网络的参数,4个输入特征故,输入层为4个输入节点;因为3分类,故输出层为3个神经元
# 用tf.Variable()标记参数可训练
# 使用seed使每次生成的随机数相同(方便教学,使大家结果都一致,在现实使用时不写seed)
w1 = tf.Variable(tf.random.truncated_normal([4, 3], stddev=0.1, seed=1))
b1 = tf.Variable(tf.random.truncated_normal([3], stddev=0.1, seed=1))
lr = 0.1 # 学习率为0.1
train_loss_results = [] # 将每轮的loss记录在此列表中,为后续画loss曲线提供数据
test_acc = [] # 将每轮的acc记录在此列表中,为后续画acc曲线提供数据
epoch = 500 # 循环500轮
loss_all = 0 # 每轮分4个step,loss_all记录四个step生成的4个loss的和
# 训练部分
for epoch in range(epoch): #数据集级别的循环,每个epoch循环一次数据集
for step, (x_train, y_train) in enumerate(train_db): #batch级别的循环 ,每个step循环一个batch
with tf.GradientTape() as tape: # with结构记录梯度信息
y = tf.matmul(x_train, w1) + b1 # 神经网络乘加运算
y = tf.nn.softmax(y) # 使输出y符合概率分布(此操作后与独热码同量级,可相减求loss)
y_ = tf.one_hot(y_train, depth=3) # 将标签值转换为独热码格式,方便计算loss和accuracy
loss = tf.reduce_mean(tf.square(y_ - y)) # 采用均方误差损失函数mse = mean(sum(y-out)^2)
loss_all += loss.numpy() # 将每个step计算出的loss累加,为后续求loss平均值提供数据,这样计算的loss更准确
# 计算loss对各个参数的梯度
grads = tape.gradient(loss, [w1, b1])
# 实现梯度更新 w1 = w1 - lr * w1_grad b = b - lr * b_grad
w1.assign_sub(lr * grads[0]) # 参数w1自更新
b1.assign_sub(lr * grads[1]) # 参数b自更新
# 每个epoch,打印loss信息
print("Epoch {}, loss: {}".format(epoch, loss_all/4))
train_loss_results.append(loss_all / 4) # 将4个step的loss求平均记录在此变量中
loss_all = 0 # loss_all归零,为记录下一个epoch的loss做准备
# 测试部分
# total_correct为预测对的样本个数, total_number为测试的总样本数,将这两个变量都初始化为0
total_correct, total_number = 0, 0
for x_test, y_test in test_db:
# 使用更新后的参数进行预测
y = tf.matmul(x_test, w1) + b1
y = tf.nn.softmax(y)
pred = tf.argmax(y, axis=1) # 返回y中最大值的索引,即预测的分类
# 将pred转换为y_test的数据类型
pred = tf.cast(pred, dtype=y_test.dtype)
# 若分类正确,则correct=1,否则为0,将bool型的结果转换为int型
correct = tf.cast(tf.equal(pred, y_test), dtype=tf.int32)
# 将每个batch的correct数加起来
correct = tf.reduce_sum(correct)
# 将所有batch中的correct数加起来
total_correct += int(correct)
# total_number为测试的总样本数,也就是x_test的行数,shape[0]返回变量的行数
total_number += x_test.shape[0]
# 总的准确率等于total_correct/total_number
acc = total_correct / total_number
test_acc.append(acc)
print("Test_acc:", acc)
print("--------------------------")
# 绘制 loss 曲线
plt.title('Loss Function Curve') # 图片标题
plt.xlabel('Epoch') # x轴变量名称
plt.ylabel('Loss') # y轴变量名称
plt.plot(train_loss_results, label="$Loss$") # 逐点画出trian_loss_results值并连线,连线图标是Loss
plt.legend() # 画出曲线图标
plt.show() # 画出图像
# 绘制 Accuracy 曲线
plt.title('Acc Curve') # 图片标题
plt.xlabel('Epoch') # x轴变量名称
plt.ylabel('Acc') # y轴变量名称
plt.plot(test_acc, label="$Accuracy$") # 逐点画出test_acc值并连线,连线图标是Accuracy
plt.legend()
plt.show()
Epoch 0, loss: 0.2821310982108116
Test_acc: 0.16666666666666666
--------------------------
Epoch 1, loss: 0.25459614023566246
Test_acc: 0.16666666666666666
--------------------------
Epoch 2, loss: 0.22570250183343887
Test_acc: 0.16666666666666666
--------------------------
Epoch 3, loss: 0.21028400212526321
Test_acc: 0.16666666666666666
--------------------------
Epoch 4, loss: 0.19942265003919601
Test_acc: 0.16666666666666666
--------------------------
Epoch 5, loss: 0.18873638287186623
Test_acc: 0.5
--------------------------
Epoch 6, loss: 0.17851299419999123
Test_acc: 0.5333333333333333
--------------------------
Epoch 7, loss: 0.16922875493764877
Test_acc: 0.5333333333333333
--------------------------
Epoch 8, loss: 0.16107673197984695
Test_acc: 0.5333333333333333
--------------------------
Epoch 9, loss: 0.15404684096574783
Test_acc: 0.5333333333333333
--------------------------
Epoch 10, loss: 0.14802725985646248
Test_acc: 0.5333333333333333
--------------------------
Epoch 11, loss: 0.14287303388118744
Test_acc: 0.5333333333333333
--------------------------
Epoch 12, loss: 0.1384414155036211
Test_acc: 0.5333333333333333
--------------------------
Epoch 13, loss: 0.13460607267916203
Test_acc: 0.5333333333333333
--------------------------
Epoch 14, loss: 0.1312607266008854
Test_acc: 0.5333333333333333
--------------------------
Epoch 15, loss: 0.12831821851432323
Test_acc: 0.5333333333333333
--------------------------
Epoch 16, loss: 0.12570794858038425
Test_acc: 0.5333333333333333
--------------------------
Epoch 17, loss: 0.12337299063801765
Test_acc: 0.5333333333333333
--------------------------
Epoch 18, loss: 0.12126746959984303
Test_acc: 0.5333333333333333
--------------------------
Epoch 19, loss: 0.11935433000326157
Test_acc: 0.5333333333333333
--------------------------
Epoch 20, loss: 0.11760355532169342
Test_acc: 0.5333333333333333
--------------------------
Epoch 21, loss: 0.11599067784845829
Test_acc: 0.5333333333333333
--------------------------
Epoch 22, loss: 0.11449568346142769
Test_acc: 0.5333333333333333
--------------------------
Epoch 23, loss: 0.11310208030045033
Test_acc: 0.5333333333333333
--------------------------
Epoch 24, loss: 0.11179621517658234
Test_acc: 0.5333333333333333
--------------------------
Epoch 25, loss: 0.11056671850383282
Test_acc: 0.5333333333333333
--------------------------
Epoch 26, loss: 0.1094040796160698
Test_acc: 0.5333333333333333
--------------------------
Epoch 27, loss: 0.10830028168857098
Test_acc: 0.5333333333333333
--------------------------
Epoch 28, loss: 0.10724855586886406
Test_acc: 0.5333333333333333
--------------------------
Epoch 29, loss: 0.10624313727021217
Test_acc: 0.5333333333333333
--------------------------
Epoch 30, loss: 0.1052791029214859
Test_acc: 0.5333333333333333
--------------------------
Epoch 31, loss: 0.10435222089290619
Test_acc: 0.5333333333333333
--------------------------
Epoch 32, loss: 0.10345886647701263
Test_acc: 0.5333333333333333
--------------------------
Epoch 33, loss: 0.10259587690234184
Test_acc: 0.5333333333333333
--------------------------
Epoch 34, loss: 0.10176053084433079
Test_acc: 0.5333333333333333
--------------------------
Epoch 35, loss: 0.10095042362809181
Test_acc: 0.5333333333333333
--------------------------
Epoch 36, loss: 0.10016347281634808
Test_acc: 0.5333333333333333
--------------------------
Epoch 37, loss: 0.09939785301685333
Test_acc: 0.5333333333333333
--------------------------
Epoch 38, loss: 0.098651934415102
Test_acc: 0.5333333333333333
--------------------------
Epoch 39, loss: 0.09792428836226463
Test_acc: 0.5333333333333333
--------------------------
Epoch 40, loss: 0.09721364639699459
Test_acc: 0.5333333333333333
--------------------------
Epoch 41, loss: 0.09651889279484749
Test_acc: 0.5333333333333333
--------------------------
Epoch 42, loss: 0.09583901055157185
Test_acc: 0.5333333333333333
--------------------------
Epoch 43, loss: 0.09517310746014118
Test_acc: 0.5333333333333333
--------------------------
Epoch 44, loss: 0.09452036395668983
Test_acc: 0.5333333333333333
--------------------------
Epoch 45, loss: 0.0938800685107708
Test_acc: 0.5333333333333333
--------------------------
Epoch 46, loss: 0.09325156174600124
Test_acc: 0.5333333333333333
--------------------------
Epoch 47, loss: 0.09263424947857857
Test_acc: 0.5333333333333333
--------------------------
Epoch 48, loss: 0.09202760085463524
Test_acc: 0.5333333333333333
--------------------------
Epoch 49, loss: 0.09143111668527126
Test_acc: 0.5333333333333333
--------------------------
Epoch 50, loss: 0.09084436297416687
Test_acc: 0.5666666666666667
--------------------------
Epoch 51, loss: 0.09026693738996983
Test_acc: 0.5666666666666667
--------------------------
Epoch 52, loss: 0.08969846740365028
Test_acc: 0.5666666666666667
--------------------------
Epoch 53, loss: 0.08913861028850079
Test_acc: 0.6
--------------------------
Epoch 54, loss: 0.08858705312013626
Test_acc: 0.6
--------------------------
Epoch 55, loss: 0.08804351277649403
Test_acc: 0.6
--------------------------
Epoch 56, loss: 0.08750772476196289
Test_acc: 0.6
--------------------------
Epoch 57, loss: 0.08697944693267345
Test_acc: 0.6
--------------------------
Epoch 58, loss: 0.08645843341946602
Test_acc: 0.6
--------------------------
Epoch 59, loss: 0.08594449236989021
Test_acc: 0.6
--------------------------
Epoch 60, loss: 0.08543741516768932
Test_acc: 0.6
--------------------------
Epoch 61, loss: 0.08493702113628387
Test_acc: 0.6
--------------------------
Epoch 62, loss: 0.08444313704967499
Test_acc: 0.6333333333333333
--------------------------
Epoch 63, loss: 0.08395560085773468
Test_acc: 0.6333333333333333
--------------------------
Epoch 64, loss: 0.08347426354885101
Test_acc: 0.6333333333333333
--------------------------
Epoch 65, loss: 0.08299897983670235
Test_acc: 0.6333333333333333
--------------------------
Epoch 66, loss: 0.08252961002290249
Test_acc: 0.6333333333333333
--------------------------
Epoch 67, loss: 0.08206603676080704
Test_acc: 0.6333333333333333
--------------------------
Epoch 68, loss: 0.0816081278026104
Test_acc: 0.6333333333333333
--------------------------
Epoch 69, loss: 0.08115577697753906
Test_acc: 0.6333333333333333
--------------------------
Epoch 70, loss: 0.08070887438952923
Test_acc: 0.6333333333333333
--------------------------
Epoch 71, loss: 0.08026730641722679
Test_acc: 0.6333333333333333
--------------------------
Epoch 72, loss: 0.07983098179101944
Test_acc: 0.6666666666666666
--------------------------
Epoch 73, loss: 0.07939981482923031
Test_acc: 0.6666666666666666
--------------------------
Epoch 74, loss: 0.07897369377315044
Test_acc: 0.6666666666666666
--------------------------
Epoch 75, loss: 0.07855254411697388
Test_acc: 0.7
--------------------------
Epoch 76, loss: 0.07813627645373344
Test_acc: 0.7
--------------------------
Epoch 77, loss: 0.07772481068968773
Test_acc: 0.7
--------------------------
Epoch 78, loss: 0.07731806486845016
Test_acc: 0.7
--------------------------
Epoch 79, loss: 0.07691597566008568
Test_acc: 0.7
--------------------------
Epoch 80, loss: 0.07651845179498196
Test_acc: 0.7
--------------------------
Epoch 81, loss: 0.07612544111907482
Test_acc: 0.7333333333333333
--------------------------
Epoch 82, loss: 0.07573685608804226
Test_acc: 0.7333333333333333
--------------------------
Epoch 83, loss: 0.07535265013575554
Test_acc: 0.7333333333333333
--------------------------
Epoch 84, loss: 0.07497275061905384
Test_acc: 0.7333333333333333
--------------------------
Epoch 85, loss: 0.07459708210080862
Test_acc: 0.7666666666666667
--------------------------
Epoch 86, loss: 0.07422559335827827
Test_acc: 0.7666666666666667
--------------------------
Epoch 87, loss: 0.07385822758078575
Test_acc: 0.7666666666666667
--------------------------
Epoch 88, loss: 0.07349492330104113
Test_acc: 0.7666666666666667
--------------------------
Epoch 89, loss: 0.0731356181204319
Test_acc: 0.7666666666666667
--------------------------
Epoch 90, loss: 0.0727802598848939
Test_acc: 0.7666666666666667
--------------------------
Epoch 91, loss: 0.07242879830300808
Test_acc: 0.7666666666666667
--------------------------
Epoch 92, loss: 0.07208118122071028
Test_acc: 0.7666666666666667
--------------------------
Epoch 93, loss: 0.07173734251409769
Test_acc: 0.8
--------------------------
Epoch 94, loss: 0.07139723561704159
Test_acc: 0.8
--------------------------
Epoch 95, loss: 0.07106082048267126
Test_acc: 0.8
--------------------------
Epoch 96, loss: 0.07072803843766451
Test_acc: 0.8
--------------------------
Epoch 97, loss: 0.07039883732795715
Test_acc: 0.8
--------------------------
Epoch 98, loss: 0.07007318176329136
Test_acc: 0.8333333333333334
--------------------------
Epoch 99, loss: 0.0697510102763772
Test_acc: 0.8666666666666667
--------------------------
Epoch 100, loss: 0.06943229492753744
Test_acc: 0.8666666666666667
--------------------------
Epoch 101, loss: 0.06911696959286928
Test_acc: 0.8666666666666667
--------------------------
Epoch 102, loss: 0.06880500260740519
Test_acc: 0.8666666666666667
--------------------------
Epoch 103, loss: 0.068496348336339
Test_acc: 0.8666666666666667
--------------------------
Epoch 104, loss: 0.06819095741957426
Test_acc: 0.8666666666666667
--------------------------
Epoch 105, loss: 0.06788879353553057
Test_acc: 0.8666666666666667
--------------------------
Epoch 106, loss: 0.06758981756865978
Test_acc: 0.8666666666666667
--------------------------
Epoch 107, loss: 0.0672939782962203
Test_acc: 0.9
--------------------------
Epoch 108, loss: 0.06700124125927687
Test_acc: 0.9
--------------------------
Epoch 109, loss: 0.06671155989170074
Test_acc: 0.9
--------------------------
Epoch 110, loss: 0.06642490718513727
Test_acc: 0.9
--------------------------
Epoch 111, loss: 0.06614123564213514
Test_acc: 0.9
--------------------------
Epoch 112, loss: 0.06586050800979137
Test_acc: 0.9
--------------------------
Epoch 113, loss: 0.06558268237859011
Test_acc: 0.9
--------------------------
Epoch 114, loss: 0.06530772428959608
Test_acc: 0.9
--------------------------
Epoch 115, loss: 0.06503560300916433
Test_acc: 0.9
--------------------------
Epoch 116, loss: 0.06476627010852098
Test_acc: 0.9
--------------------------
Epoch 117, loss: 0.06449970323592424
Test_acc: 0.9333333333333333
--------------------------
Epoch 118, loss: 0.06423585396260023
Test_acc: 0.9333333333333333
--------------------------
Epoch 119, loss: 0.06397469528019428
Test_acc: 0.9333333333333333
--------------------------
Epoch 120, loss: 0.06371619179844856
Test_acc: 0.9333333333333333
--------------------------
Epoch 121, loss: 0.06346031185239553
Test_acc: 0.9333333333333333
--------------------------
Epoch 122, loss: 0.06320700887590647
Test_acc: 0.9333333333333333
--------------------------
Epoch 123, loss: 0.06295627169311047
Test_acc: 0.9333333333333333
--------------------------
Epoch 124, loss: 0.06270804442465305
Test_acc: 0.9333333333333333
--------------------------
Epoch 125, loss: 0.062462314032018185
Test_acc: 0.9333333333333333
--------------------------
Epoch 126, loss: 0.062219033017754555
Test_acc: 0.9333333333333333
--------------------------
Epoch 127, loss: 0.061978185549378395
Test_acc: 0.9333333333333333
--------------------------
Epoch 128, loss: 0.06173973437398672
Test_acc: 0.9333333333333333
--------------------------
Epoch 129, loss: 0.06150364130735397
Test_acc: 0.9333333333333333
--------------------------
Epoch 130, loss: 0.06126988586038351
Test_acc: 0.9333333333333333
--------------------------
Epoch 131, loss: 0.06103843078017235
Test_acc: 0.9333333333333333
--------------------------
Epoch 132, loss: 0.060809263959527016
Test_acc: 0.9333333333333333
--------------------------
Epoch 133, loss: 0.06058233231306076
Test_acc: 0.9333333333333333
--------------------------
Epoch 134, loss: 0.06035762373358011
Test_acc: 0.9333333333333333
--------------------------
Epoch 135, loss: 0.06013510562479496
Test_acc: 0.9333333333333333
--------------------------
Epoch 136, loss: 0.05991474352777004
Test_acc: 0.9333333333333333
--------------------------
Epoch 137, loss: 0.05969652719795704
Test_acc: 0.9333333333333333
--------------------------
Epoch 138, loss: 0.05948041472584009
Test_acc: 0.9333333333333333
--------------------------
Epoch 139, loss: 0.059266386553645134
Test_acc: 0.9333333333333333
--------------------------
Epoch 140, loss: 0.059054408222436905
Test_acc: 0.9333333333333333
--------------------------
Epoch 141, loss: 0.058844465762376785
Test_acc: 0.9333333333333333
--------------------------
Epoch 142, loss: 0.05863652750849724
Test_acc: 0.9333333333333333
--------------------------
Epoch 143, loss: 0.058430563658475876
Test_acc: 0.9333333333333333
--------------------------
Epoch 144, loss: 0.058226559311151505
Test_acc: 0.9333333333333333
--------------------------
Epoch 145, loss: 0.05802448187023401
Test_acc: 0.9333333333333333
--------------------------
Epoch 146, loss: 0.05782431084662676
Test_acc: 0.9333333333333333
--------------------------
Epoch 147, loss: 0.0576260257512331
Test_acc: 0.9333333333333333
--------------------------
Epoch 148, loss: 0.05742959305644035
Test_acc: 0.9333333333333333
--------------------------
Epoch 149, loss: 0.057234992273151875
Test_acc: 0.9333333333333333
--------------------------
Epoch 150, loss: 0.05704221595078707
Test_acc: 0.9333333333333333
--------------------------
Epoch 151, loss: 0.05685121938586235
Test_acc: 0.9333333333333333
--------------------------
Epoch 152, loss: 0.05666199326515198
Test_acc: 0.9333333333333333
--------------------------
Epoch 153, loss: 0.05647451523691416
Test_acc: 0.9333333333333333
--------------------------
Epoch 154, loss: 0.0562887629494071
Test_acc: 0.9333333333333333
--------------------------
Epoch 155, loss: 0.05610471125692129
Test_acc: 0.9333333333333333
--------------------------
Epoch 156, loss: 0.05592234432697296
Test_acc: 0.9333333333333333
--------------------------
Epoch 157, loss: 0.0557416332885623
Test_acc: 0.9333333333333333
--------------------------
Epoch 158, loss: 0.05556256324052811
Test_acc: 0.9333333333333333
--------------------------
Epoch 159, loss: 0.05538512021303177
Test_acc: 0.9333333333333333
--------------------------
Epoch 160, loss: 0.05520927160978317
Test_acc: 0.9333333333333333
--------------------------
Epoch 161, loss: 0.0550350034609437
Test_acc: 0.9333333333333333
--------------------------
Epoch 162, loss: 0.054862307384610176
Test_acc: 0.9333333333333333
--------------------------
Epoch 163, loss: 0.05469114426523447
Test_acc: 0.9333333333333333
--------------------------
Epoch 164, loss: 0.05452151037752628
Test_acc: 0.9666666666666667
--------------------------
Epoch 165, loss: 0.05435337871313095
Test_acc: 0.9666666666666667
--------------------------
Epoch 166, loss: 0.05418673437088728
Test_acc: 0.9666666666666667
--------------------------
Epoch 167, loss: 0.054021554067730904
Test_acc: 0.9666666666666667
--------------------------
Epoch 168, loss: 0.053857832215726376
Test_acc: 0.9666666666666667
--------------------------
Epoch 169, loss: 0.05369554739445448
Test_acc: 0.9666666666666667
--------------------------
Epoch 170, loss: 0.05353467632085085
Test_acc: 0.9666666666666667
--------------------------
Epoch 171, loss: 0.05337520316243172
Test_acc: 0.9666666666666667
--------------------------
Epoch 172, loss: 0.05321711581200361
Test_acc: 0.9666666666666667
--------------------------
Epoch 173, loss: 0.053060390055179596
Test_acc: 0.9666666666666667
--------------------------
Epoch 174, loss: 0.05290501844137907
Test_acc: 0.9666666666666667
--------------------------
Epoch 175, loss: 0.05275098513811827
Test_acc: 0.9666666666666667
--------------------------
Epoch 176, loss: 0.0525982566177845
Test_acc: 0.9666666666666667
--------------------------
Epoch 177, loss: 0.05244683939963579
Test_acc: 0.9666666666666667
--------------------------
Epoch 178, loss: 0.05229670740664005
Test_acc: 0.9666666666666667
--------------------------
Epoch 179, loss: 0.05214785039424896
Test_acc: 0.9666666666666667
--------------------------
Epoch 180, loss: 0.052000246942043304
Test_acc: 0.9666666666666667
--------------------------
Epoch 181, loss: 0.05185388680547476
Test_acc: 0.9666666666666667
--------------------------
Epoch 182, loss: 0.05170875135809183
Test_acc: 0.9666666666666667
--------------------------
Epoch 183, loss: 0.0515648303553462
Test_acc: 0.9666666666666667
--------------------------
Epoch 184, loss: 0.0514221116900444
Test_acc: 0.9666666666666667
--------------------------
Epoch 185, loss: 0.05128058046102524
Test_acc: 1.0
--------------------------
Epoch 186, loss: 0.05114021524786949
Test_acc: 1.0
--------------------------
Epoch 187, loss: 0.051001012325286865
Test_acc: 1.0
--------------------------
Epoch 188, loss: 0.0508629409596324
Test_acc: 1.0
--------------------------
Epoch 189, loss: 0.05072600767016411
Test_acc: 1.0
--------------------------
Epoch 190, loss: 0.05059019848704338
Test_acc: 1.0
--------------------------
Epoch 191, loss: 0.05045548640191555
Test_acc: 1.0
--------------------------
Epoch 192, loss: 0.050321875140070915
Test_acc: 1.0
--------------------------
Epoch 193, loss: 0.05018933489918709
Test_acc: 1.0
--------------------------
Epoch 194, loss: 0.05005786381661892
Test_acc: 1.0
--------------------------
Epoch 195, loss: 0.04992745537310839
Test_acc: 1.0
--------------------------
Epoch 196, loss: 0.04979807883501053
Test_acc: 1.0
--------------------------
Epoch 197, loss: 0.04966974165290594
Test_acc: 1.0
--------------------------
Epoch 198, loss: 0.04954242426902056
Test_acc: 1.0
--------------------------
Epoch 199, loss: 0.04941611457616091
Test_acc: 1.0
--------------------------
Epoch 200, loss: 0.049290805123746395
Test_acc: 1.0
--------------------------
Epoch 201, loss: 0.049166472628712654
Test_acc: 1.0
--------------------------
Epoch 202, loss: 0.04904312454164028
Test_acc: 1.0
--------------------------
Epoch 203, loss: 0.04892073106020689
Test_acc: 1.0
--------------------------
Epoch 204, loss: 0.04879929404705763
Test_acc: 1.0
--------------------------
Epoch 205, loss: 0.04867880046367645
Test_acc: 1.0
--------------------------
Epoch 206, loss: 0.04855923913419247
Test_acc: 1.0
--------------------------
Epoch 207, loss: 0.048440598882734776
Test_acc: 1.0
--------------------------
Epoch 208, loss: 0.04832286946475506
Test_acc: 1.0
--------------------------
Epoch 209, loss: 0.04820604622364044
Test_acc: 1.0
--------------------------
Epoch 210, loss: 0.04809010960161686
Test_acc: 1.0
--------------------------
Epoch 211, loss: 0.04797505959868431
Test_acc: 1.0
--------------------------
Epoch 212, loss: 0.04786087851971388
Test_acc: 1.0
--------------------------
Epoch 213, loss: 0.047747560776770115
Test_acc: 1.0
--------------------------
Epoch 214, loss: 0.04763508960604668
Test_acc: 1.0
--------------------------
Epoch 215, loss: 0.04752346687018871
Test_acc: 1.0
--------------------------
Epoch 216, loss: 0.0474126823246479
Test_acc: 1.0
--------------------------
Epoch 217, loss: 0.04730272572487593
Test_acc: 1.0
--------------------------
Epoch 218, loss: 0.04719358030706644
Test_acc: 1.0
--------------------------
Epoch 219, loss: 0.04708524979650974
Test_acc: 1.0
--------------------------
Epoch 220, loss: 0.046977708116173744
Test_acc: 1.0
--------------------------
Epoch 221, loss: 0.046870965510606766
Test_acc: 1.0
--------------------------
Epoch 222, loss: 0.04676500242203474
Test_acc: 1.0
--------------------------
Epoch 223, loss: 0.046659816056489944
Test_acc: 1.0
--------------------------
Epoch 224, loss: 0.046555391512811184
Test_acc: 1.0
--------------------------
Epoch 225, loss: 0.04645173158496618
Test_acc: 1.0
--------------------------
Epoch 226, loss: 0.046348825097084045
Test_acc: 1.0
--------------------------
Epoch 227, loss: 0.04624664504081011
Test_acc: 1.0
--------------------------
Epoch 228, loss: 0.046145214699208736
Test_acc: 1.0
--------------------------
Epoch 229, loss: 0.04604450333863497
Test_acc: 1.0
--------------------------
Epoch 230, loss: 0.04594451282173395
Test_acc: 1.0
--------------------------
Epoch 231, loss: 0.045845234766602516
Test_acc: 1.0
--------------------------
Epoch 232, loss: 0.04574666079133749
Test_acc: 1.0
--------------------------
Epoch 233, loss: 0.04564878437668085
Test_acc: 1.0
--------------------------
Epoch 234, loss: 0.04555159714072943
Test_acc: 1.0
--------------------------
Epoch 235, loss: 0.04545509163290262
Test_acc: 1.0
--------------------------
Epoch 236, loss: 0.045359269715845585
Test_acc: 1.0
--------------------------
Epoch 237, loss: 0.04526410344988108
Test_acc: 1.0
--------------------------
Epoch 238, loss: 0.04516960680484772
Test_acc: 1.0
--------------------------
Epoch 239, loss: 0.04507576581090689
Test_acc: 1.0
--------------------------
Epoch 240, loss: 0.044982570223510265
Test_acc: 1.0
--------------------------
Epoch 241, loss: 0.04489001724869013
Test_acc: 1.0
--------------------------
Epoch 242, loss: 0.04479810781776905
Test_acc: 1.0
--------------------------
Epoch 243, loss: 0.04470681864768267
Test_acc: 1.0
--------------------------
Epoch 244, loss: 0.04461614973843098
Test_acc: 1.0
--------------------------
Epoch 245, loss: 0.04452611040323973
Test_acc: 1.0
--------------------------
Epoch 246, loss: 0.04443667363375425
Test_acc: 1.0
--------------------------
Epoch 247, loss: 0.044347839429974556
Test_acc: 1.0
--------------------------
Epoch 248, loss: 0.04425961058586836
Test_acc: 1.0
--------------------------
Epoch 249, loss: 0.04417197220027447
Test_acc: 1.0
--------------------------
Epoch 250, loss: 0.044084908440709114
Test_acc: 1.0
--------------------------
Epoch 251, loss: 0.043998440727591515
Test_acc: 1.0
--------------------------
Epoch 252, loss: 0.04391253925859928
Test_acc: 1.0
--------------------------
Epoch 253, loss: 0.043827205896377563
Test_acc: 1.0
--------------------------
Epoch 254, loss: 0.04374244436621666
Test_acc: 1.0
--------------------------
Epoch 255, loss: 0.04365824069827795
Test_acc: 1.0
--------------------------
Epoch 256, loss: 0.04357459116727114
Test_acc: 1.0
--------------------------
Epoch 257, loss: 0.043491488322615623
Test_acc: 1.0
--------------------------
Epoch 258, loss: 0.043408920988440514
Test_acc: 1.0
--------------------------
Epoch 259, loss: 0.043326896615326405
Test_acc: 1.0
--------------------------
Epoch 260, loss: 0.04324540589004755
Test_acc: 1.0
--------------------------
Epoch 261, loss: 0.043164435774087906
Test_acc: 1.0
--------------------------
Epoch 262, loss: 0.04308399651199579
Test_acc: 1.0
--------------------------
Epoch 263, loss: 0.043004062958061695
Test_acc: 1.0
--------------------------
Epoch 264, loss: 0.04292465187609196
Test_acc: 1.0
--------------------------
Epoch 265, loss: 0.04284573998302221
Test_acc: 1.0
--------------------------
Epoch 266, loss: 0.04276733938604593
Test_acc: 1.0
--------------------------
Epoch 267, loss: 0.042689427733421326
Test_acc: 1.0
--------------------------
Epoch 268, loss: 0.042612009681761265
Test_acc: 1.0
--------------------------
Epoch 269, loss: 0.042535084299743176
Test_acc: 1.0
--------------------------
Epoch 270, loss: 0.04245864413678646
Test_acc: 1.0
--------------------------
Epoch 271, loss: 0.0423826826736331
Test_acc: 1.0
--------------------------
Epoch 272, loss: 0.042307195253670216
Test_acc: 1.0
--------------------------
Epoch 273, loss: 0.04223217722028494
Test_acc: 1.0
--------------------------
Epoch 274, loss: 0.0421576201915741
Test_acc: 1.0
--------------------------
Epoch 275, loss: 0.042083533480763435
Test_acc: 1.0
--------------------------
Epoch 276, loss: 0.04200989939272404
Test_acc: 1.0
--------------------------
Epoch 277, loss: 0.041936714202165604
Test_acc: 1.0
--------------------------
Epoch 278, loss: 0.041863986290991306
Test_acc: 1.0
--------------------------
Epoch 279, loss: 0.04179169982671738
Test_acc: 1.0
--------------------------
Epoch 280, loss: 0.041719854809343815
Test_acc: 1.0
--------------------------
Epoch 281, loss: 0.041648441925644875
Test_acc: 1.0
--------------------------
Epoch 282, loss: 0.04157746955752373
Test_acc: 1.0
--------------------------
Epoch 283, loss: 0.04150692094117403
Test_acc: 1.0
--------------------------
Epoch 284, loss: 0.04143680352717638
Test_acc: 1.0
--------------------------
Epoch 285, loss: 0.04136709962040186
Test_acc: 1.0
--------------------------
Epoch 286, loss: 0.04129782039672136
Test_acc: 1.0
--------------------------
Epoch 287, loss: 0.04122895374894142
Test_acc: 1.0
--------------------------
Epoch 288, loss: 0.04116049408912659
Test_acc: 1.0
--------------------------
Epoch 289, loss: 0.04109244793653488
Test_acc: 1.0
--------------------------
Epoch 290, loss: 0.04102479945868254
Test_acc: 1.0
--------------------------
Epoch 291, loss: 0.04095755238085985
Test_acc: 1.0
--------------------------
Epoch 292, loss: 0.040890694595873356
Test_acc: 1.0
--------------------------
Epoch 293, loss: 0.040824233554303646
Test_acc: 1.0
--------------------------
Epoch 294, loss: 0.040758166462183
Test_acc: 1.0
--------------------------
Epoch 295, loss: 0.040692479349672794
Test_acc: 1.0
--------------------------
Epoch 296, loss: 0.04062717128545046
Test_acc: 1.0
--------------------------
Epoch 297, loss: 0.040562248788774014
Test_acc: 1.0
--------------------------
Epoch 298, loss: 0.04049769788980484
Test_acc: 1.0
--------------------------
Epoch 299, loss: 0.04043351951986551
Test_acc: 1.0
--------------------------
Epoch 300, loss: 0.04036970995366573
Test_acc: 1.0
--------------------------
Epoch 301, loss: 0.040306271985173225
Test_acc: 1.0
--------------------------
Epoch 302, loss: 0.04024319350719452
Test_acc: 1.0
--------------------------
Epoch 303, loss: 0.04018046986311674
Test_acc: 1.0
--------------------------
Epoch 304, loss: 0.040118103846907616
Test_acc: 1.0
--------------------------
Epoch 305, loss: 0.04005609406158328
Test_acc: 1.0
--------------------------
Epoch 306, loss: 0.03999443957582116
Test_acc: 1.0
--------------------------
Epoch 307, loss: 0.03993312222883105
Test_acc: 1.0
--------------------------
Epoch 308, loss: 0.039872155059129
Test_acc: 1.0
--------------------------
Epoch 309, loss: 0.03981153108179569
Test_acc: 1.0
--------------------------
Epoch 310, loss: 0.03975124144926667
Test_acc: 1.0
--------------------------
Epoch 311, loss: 0.03969129454344511
Test_acc: 1.0
--------------------------
Epoch 312, loss: 0.03963167825713754
Test_acc: 1.0
--------------------------
Epoch 313, loss: 0.039572385139763355
Test_acc: 1.0
--------------------------
Epoch 314, loss: 0.0395134249702096
Test_acc: 1.0
--------------------------
Epoch 315, loss: 0.039454787503927946
Test_acc: 1.0
--------------------------
Epoch 316, loss: 0.039396482054144144
Test_acc: 1.0
--------------------------
Epoch 317, loss: 0.03933848813176155
Test_acc: 1.0
--------------------------
Epoch 318, loss: 0.03928081039339304
Test_acc: 1.0
--------------------------
Epoch 319, loss: 0.039223446510732174
Test_acc: 1.0
--------------------------
Epoch 320, loss: 0.039166401606053114
Test_acc: 1.0
--------------------------
Epoch 321, loss: 0.039109662640839815
Test_acc: 1.0
--------------------------
Epoch 322, loss: 0.03905322263017297
Test_acc: 1.0
--------------------------
Epoch 323, loss: 0.03899709461256862
Test_acc: 1.0
--------------------------
Epoch 324, loss: 0.03894126648083329
Test_acc: 1.0
--------------------------
Epoch 325, loss: 0.038885737769305706
Test_acc: 1.0
--------------------------
Epoch 326, loss: 0.03883049916476011
Test_acc: 1.0
--------------------------
Epoch 327, loss: 0.03877555998042226
Test_acc: 1.0
--------------------------
Epoch 328, loss: 0.03872091369703412
Test_acc: 1.0
--------------------------
Epoch 329, loss: 0.038666556123644114
Test_acc: 1.0
--------------------------
Epoch 330, loss: 0.0386124849319458
Test_acc: 1.0
--------------------------
Epoch 331, loss: 0.038558701518923044
Test_acc: 1.0
--------------------------
Epoch 332, loss: 0.03850520169362426
Test_acc: 1.0
--------------------------
Epoch 333, loss: 0.03845197660848498
Test_acc: 1.0
--------------------------
Epoch 334, loss: 0.03839903511106968
Test_acc: 1.0
--------------------------
Epoch 335, loss: 0.03834636742249131
Test_acc: 1.0
--------------------------
Epoch 336, loss: 0.03829397866502404
Test_acc: 1.0
--------------------------
Epoch 337, loss: 0.038241852074861526
Test_acc: 1.0
--------------------------
Epoch 338, loss: 0.03819000208750367
Test_acc: 1.0
--------------------------
Epoch 339, loss: 0.03813841659575701
Test_acc: 1.0
--------------------------
Epoch 340, loss: 0.038087102584540844
Test_acc: 1.0
--------------------------
Epoch 341, loss: 0.03803604608401656
Test_acc: 1.0
--------------------------
Epoch 342, loss: 0.037985255010426044
Test_acc: 1.0
--------------------------
Epoch 343, loss: 0.0379347144626081
Test_acc: 1.0
--------------------------
Epoch 344, loss: 0.03788443887606263
Test_acc: 1.0
--------------------------
Epoch 345, loss: 0.037834418937563896
Test_acc: 1.0
--------------------------
Epoch 346, loss: 0.03778465045616031
Test_acc: 1.0
--------------------------
Epoch 347, loss: 0.03773513110354543
Test_acc: 1.0
--------------------------
Epoch 348, loss: 0.03768586413934827
Test_acc: 1.0
--------------------------
Epoch 349, loss: 0.03763684118166566
Test_acc: 1.0
--------------------------
Epoch 350, loss: 0.037588071543723345
Test_acc: 1.0
--------------------------
Epoch 351, loss: 0.037539539858698845
Test_acc: 1.0
--------------------------
Epoch 352, loss: 0.03749124659225345
Test_acc: 1.0
--------------------------
Epoch 353, loss: 0.03744320431724191
Test_acc: 1.0
--------------------------
Epoch 354, loss: 0.03739539487287402
Test_acc: 1.0
--------------------------
Epoch 355, loss: 0.037347821053117514
Test_acc: 1.0
--------------------------
Epoch 356, loss: 0.037300472147762775
Test_acc: 1.0
--------------------------
Epoch 357, loss: 0.03725337469950318
Test_acc: 1.0
--------------------------
Epoch 358, loss: 0.03720649937167764
Test_acc: 1.0
--------------------------
Epoch 359, loss: 0.037159846629947424
Test_acc: 1.0
--------------------------
Epoch 360, loss: 0.03711343090981245
Test_acc: 1.0
--------------------------
Epoch 361, loss: 0.03706724522635341
Test_acc: 1.0
--------------------------
Epoch 362, loss: 0.03702127141878009
Test_acc: 1.0
--------------------------
Epoch 363, loss: 0.036975531373173
Test_acc: 1.0
--------------------------
Epoch 364, loss: 0.036930006463080645
Test_acc: 1.0
--------------------------
Epoch 365, loss: 0.036884702276438475
Test_acc: 1.0
--------------------------
Epoch 366, loss: 0.03683961182832718
Test_acc: 1.0
--------------------------
Epoch 367, loss: 0.036794747691601515
Test_acc: 1.0
--------------------------
Epoch 368, loss: 0.03675009263679385
Test_acc: 1.0
--------------------------
Epoch 369, loss: 0.03670565178617835
Test_acc: 1.0
--------------------------
Epoch 370, loss: 0.03666142327710986
Test_acc: 1.0
--------------------------
Epoch 371, loss: 0.036617396865040064
Test_acc: 1.0
--------------------------
Epoch 372, loss: 0.03657358651980758
Test_acc: 1.0
--------------------------
Epoch 373, loss: 0.03652997827157378
Test_acc: 1.0
--------------------------
Epoch 374, loss: 0.03648658422753215
Test_acc: 1.0
--------------------------
Epoch 375, loss: 0.03644339134916663
Test_acc: 1.0
--------------------------
Epoch 376, loss: 0.03640039125457406
Test_acc: 1.0
--------------------------
Epoch 377, loss: 0.03635760257020593
Test_acc: 1.0
--------------------------
Epoch 378, loss: 0.0363150117918849
Test_acc: 1.0
--------------------------
Epoch 379, loss: 0.03627262031659484
Test_acc: 1.0
--------------------------
Epoch 380, loss: 0.036230423022061586
Test_acc: 1.0
--------------------------
Epoch 381, loss: 0.03618842549622059
Test_acc: 1.0
--------------------------
Epoch 382, loss: 0.03614661982282996
Test_acc: 1.0
--------------------------
Epoch 383, loss: 0.036105002742260695
Test_acc: 1.0
--------------------------
Epoch 384, loss: 0.03606357332319021
Test_acc: 1.0
--------------------------
Epoch 385, loss: 0.036022343672811985
Test_acc: 1.0
--------------------------
Epoch 386, loss: 0.03598130075260997
Test_acc: 1.0
--------------------------
Epoch 387, loss: 0.0359404431656003
Test_acc: 1.0
--------------------------
Epoch 388, loss: 0.035899775102734566
Test_acc: 1.0
--------------------------
Epoch 389, loss: 0.03585928911343217
Test_acc: 1.0
--------------------------
Epoch 390, loss: 0.035818991251289845
Test_acc: 1.0
--------------------------
Epoch 391, loss: 0.03577886475250125
Test_acc: 1.0
--------------------------
Epoch 392, loss: 0.035738930106163025
Test_acc: 1.0
--------------------------
Epoch 393, loss: 0.035699169617146254
Test_acc: 1.0
--------------------------
Epoch 394, loss: 0.03565958747640252
Test_acc: 1.0
--------------------------
Epoch 395, loss: 0.0356201883405447
Test_acc: 1.0
--------------------------
Epoch 396, loss: 0.03558096336200833
Test_acc: 1.0
--------------------------
Epoch 397, loss: 0.035541911609470844
Test_acc: 1.0
--------------------------
Epoch 398, loss: 0.035503033082932234
Test_acc: 1.0
--------------------------
Epoch 399, loss: 0.03546432685106993
Test_acc: 1.0
--------------------------
Epoch 400, loss: 0.03542578825727105
Test_acc: 1.0
--------------------------
Epoch 401, loss: 0.03538742894306779
Test_acc: 1.0
--------------------------
Epoch 402, loss: 0.03534923028200865
Test_acc: 1.0
--------------------------
Epoch 403, loss: 0.03531120624393225
Test_acc: 1.0
--------------------------
Epoch 404, loss: 0.03527334099635482
Test_acc: 1.0
--------------------------
Epoch 405, loss: 0.035235646180808544
Test_acc: 1.0
--------------------------
Epoch 406, loss: 0.035198114812374115
Test_acc: 1.0
--------------------------
Epoch 407, loss: 0.03516074409708381
Test_acc: 1.0
--------------------------
Epoch 408, loss: 0.03512354753911495
Test_acc: 1.0
--------------------------
Epoch 409, loss: 0.035086496733129025
Test_acc: 1.0
--------------------------
Epoch 410, loss: 0.03504961961880326
Test_acc: 1.0
--------------------------
Epoch 411, loss: 0.03501288779079914
Test_acc: 1.0
--------------------------
Epoch 412, loss: 0.03497632406651974
Test_acc: 1.0
--------------------------
Epoch 413, loss: 0.034939910750836134
Test_acc: 1.0
--------------------------
Epoch 414, loss: 0.034903660882264376
Test_acc: 1.0
--------------------------
Epoch 415, loss: 0.03486755723133683
Test_acc: 1.0
--------------------------
Epoch 416, loss: 0.034831615164875984
Test_acc: 1.0
--------------------------
Epoch 417, loss: 0.03479582257568836
Test_acc: 1.0
--------------------------
Epoch 418, loss: 0.034760179463773966
Test_acc: 1.0
--------------------------
Epoch 419, loss: 0.03472469002008438
Test_acc: 1.0
--------------------------
Epoch 420, loss: 0.034689351450651884
Test_acc: 1.0
--------------------------
Epoch 421, loss: 0.03465416468679905
Test_acc: 1.0
--------------------------
Epoch 422, loss: 0.03461912088096142
Test_acc: 1.0
--------------------------
Epoch 423, loss: 0.034584226086735725
Test_acc: 1.0
--------------------------
Epoch 424, loss: 0.0345494719222188
Test_acc: 1.0
--------------------------
Epoch 425, loss: 0.03451487235724926
Test_acc: 1.0
--------------------------
Epoch 426, loss: 0.034480408765375614
Test_acc: 1.0
--------------------------
Epoch 427, loss: 0.03444609651342034
Test_acc: 1.0
--------------------------
Epoch 428, loss: 0.03441191930323839
Test_acc: 1.0
--------------------------
Epoch 429, loss: 0.034377888310700655
Test_acc: 1.0
--------------------------
Epoch 430, loss: 0.0343439974822104
Test_acc: 1.0
--------------------------
Epoch 431, loss: 0.03431024495512247
Test_acc: 1.0
--------------------------
Epoch 432, loss: 0.034276632592082024
Test_acc: 1.0
--------------------------
Epoch 433, loss: 0.03424314921721816
Test_acc: 1.0
--------------------------
Epoch 434, loss: 0.034209814853966236
Test_acc: 1.0
--------------------------
Epoch 435, loss: 0.034176611341536045
Test_acc: 1.0
--------------------------
Epoch 436, loss: 0.03414354659616947
Test_acc: 1.0
--------------------------
Epoch 437, loss: 0.034110613632947206
Test_acc: 1.0
--------------------------
Epoch 438, loss: 0.03407781524583697
Test_acc: 1.0
--------------------------
Epoch 439, loss: 0.03404514491558075
Test_acc: 1.0
--------------------------
Epoch 440, loss: 0.03401261428371072
Test_acc: 1.0
--------------------------
Epoch 441, loss: 0.03398020705208182
Test_acc: 1.0
--------------------------
Epoch 442, loss: 0.03394793579354882
Test_acc: 1.0
--------------------------
Epoch 443, loss: 0.03391578933224082
Test_acc: 1.0
--------------------------
Epoch 444, loss: 0.033883774653077126
Test_acc: 1.0
--------------------------
Epoch 445, loss: 0.03385189222171903
Test_acc: 1.0
--------------------------
Epoch 446, loss: 0.03382013039663434
Test_acc: 1.0
--------------------------
Epoch 447, loss: 0.033788496162742376
Test_acc: 1.0
--------------------------
Epoch 448, loss: 0.033756992779672146
Test_acc: 1.0
--------------------------
Epoch 449, loss: 0.03372560581192374
Test_acc: 1.0
--------------------------
Epoch 450, loss: 0.03369434829801321
Test_acc: 1.0
--------------------------
Epoch 451, loss: 0.03366321278735995
Test_acc: 1.0
--------------------------
Epoch 452, loss: 0.03363220160827041
Test_acc: 1.0
--------------------------
Epoch 453, loss: 0.03360130963847041
Test_acc: 1.0
--------------------------
Epoch 454, loss: 0.033570545725524426
Test_acc: 1.0
--------------------------
Epoch 455, loss: 0.03353988938033581
Test_acc: 1.0
--------------------------
Epoch 456, loss: 0.03350936621427536
Test_acc: 1.0
--------------------------
Epoch 457, loss: 0.03347895201295614
Test_acc: 1.0
--------------------------
Epoch 458, loss: 0.03344865795224905
Test_acc: 1.0
--------------------------
Epoch 459, loss: 0.033418478444218636
Test_acc: 1.0
--------------------------
Epoch 460, loss: 0.033388426061719656
Test_acc: 1.0
--------------------------
Epoch 461, loss: 0.033358484506607056
Test_acc: 1.0
--------------------------
Epoch 462, loss: 0.033328657038509846
Test_acc: 1.0
--------------------------
Epoch 463, loss: 0.033298940397799015
Test_acc: 1.0
--------------------------
Epoch 464, loss: 0.033269339706748724
Test_acc: 1.0
--------------------------
Epoch 465, loss: 0.03323985496535897
Test_acc: 1.0
--------------------------
Epoch 466, loss: 0.03321048337966204
Test_acc: 1.0
--------------------------
Epoch 467, loss: 0.03318122075870633
Test_acc: 1.0
--------------------------
Epoch 468, loss: 0.0331520764157176
Test_acc: 1.0
--------------------------
Epoch 469, loss: 0.033123028464615345
Test_acc: 1.0
--------------------------
Epoch 470, loss: 0.033094107173383236
Test_acc: 1.0
--------------------------
Epoch 471, loss: 0.033065286464989185
Test_acc: 1.0
--------------------------
Epoch 472, loss: 0.0330365770496428
Test_acc: 1.0
--------------------------
Epoch 473, loss: 0.03300797566771507
Test_acc: 1.0
--------------------------
Epoch 474, loss: 0.03297947999089956
Test_acc: 1.0
--------------------------
Epoch 475, loss: 0.03295109001919627
Test_acc: 1.0
--------------------------
Epoch 476, loss: 0.03292280342429876
Test_acc: 1.0
--------------------------
Epoch 477, loss: 0.03289463231340051
Test_acc: 1.0
--------------------------
Epoch 478, loss: 0.0328665585257113
Test_acc: 1.0
--------------------------
Epoch 479, loss: 0.03283859835937619
Test_acc: 1.0
--------------------------
Epoch 480, loss: 0.03281073085963726
Test_acc: 1.0
--------------------------
Epoch 481, loss: 0.03278297046199441
Test_acc: 1.0
--------------------------
Epoch 482, loss: 0.032755312509834766
Test_acc: 1.0
--------------------------
Epoch 483, loss: 0.03272775420919061
Test_acc: 1.0
--------------------------
Epoch 484, loss: 0.03270029416307807
Test_acc: 1.0
--------------------------
Epoch 485, loss: 0.032672947738319635
Test_acc: 1.0
--------------------------
Epoch 486, loss: 0.032645690720528364
Test_acc: 1.0
--------------------------
Epoch 487, loss: 0.03261853428557515
Test_acc: 1.0
--------------------------
Epoch 488, loss: 0.03259148681536317
Test_acc: 1.0
--------------------------
Epoch 489, loss: 0.03256453387439251
Test_acc: 1.0
--------------------------
Epoch 490, loss: 0.032537673600018024
Test_acc: 1.0
--------------------------
Epoch 491, loss: 0.03251091670244932
Test_acc: 1.0
--------------------------
Epoch 492, loss: 0.03248425014317036
Test_acc: 1.0
--------------------------
Epoch 493, loss: 0.0324576823040843
Test_acc: 1.0
--------------------------
Epoch 494, loss: 0.032431211322546005
Test_acc: 1.0
--------------------------
Epoch 495, loss: 0.03240483347326517
Test_acc: 1.0
--------------------------
Epoch 496, loss: 0.03237855713814497
Test_acc: 1.0
--------------------------
Epoch 497, loss: 0.03235236741602421
Test_acc: 1.0
--------------------------
Epoch 498, loss: 0.03232627175748348
Test_acc: 1.0
--------------------------
Epoch 499, loss: 0.0323002771474421
Test_acc: 1.0
--------------------------