diff --git a/README.md b/README.md index 07ea68d..a8212b7 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ # SES2020spring ## Another day + +## The third day \ No newline at end of file diff --git a/pytorch_cancer_torch.py b/pytorch_cancer_torch.py new file mode 100644 index 0000000..ab65dfe --- /dev/null +++ b/pytorch_cancer_torch.py @@ -0,0 +1,85 @@ +import numpy as np +import torch +import torch.nn.functional as F +from torch.autograd import Variable +import pandas as pd +from sklearn import datasets +from sklearn.model_selection import train_test_split +from sklearn.linear_model import LogisticRegression +from sklearn import preprocessing + +#利用sklean自带的cancer数据集 +X,T= datasets.load_breast_cancer(return_X_y=True) +#数据预处理,标准化 +for i in range(X.shape[1]): + mean = np.mean(X[:,i]) + std = np.std(X[:,i]) + X[:,i] = (X[:,i] - mean)/std +#划分训练集和测试集,比例为7:3,交叉验证 +X_train,X_test,T_train,T_test = train_test_split(X,T,train_size=0.7,test_size=0.3) +x_data = Variable(torch.Tensor(X_train)) +y_data = Variable(torch.Tensor(T_train)) +x_test = Variable(torch.Tensor(X_test)) +t_test = Variable(torch.Tensor(T_test)) + +class Model(torch.nn.Module): + def __init__(self): + super(Model, self).__init__() + self.linear = torch.nn.Linear(x_data.shape[1], 1) # 30 in and 1 out + + def forward(self, x): + #y_pred = torch.sigmoid(self.linear(x)) + y_pred = self.linear(x).sigmoid() + return y_pred + +# Our model +model = Model() + +criterion = torch.nn.BCELoss(reduction="mean") +optimizer = torch.optim.SGD(model.parameters(), lr=0.01) + +f = open("result.txt", 'w') +# Training loop +for epoch in range(1000): + # Forward pass: Compute predicted y by passing x to the model + y_pred = model(x_data) + pre=y_pred.ge(0.5).float() + correct = 0 + for i in range(y_data.size(0)): + if pre[i] == y_data[i]: + correct += 1 + acc = correct / y_data.size(0) #计算Accuracy + print(epoch) + print('accuracy is',acc) + + # 计算损失 + loss = criterion(y_pred, y_data) + print(' loss is', loss.data.item()) + + # Zero gradients, perform a backward pass, and update the weights. + optimizer.zero_grad() + loss.backward() + optimizer.step() + +for f in model.parameters(): + print('data is') + print(f.data) + print(f.grad) + +w = list(model.parameters()) +w0 = w[0].data.numpy() +w1 = w[1].data.numpy() + +import matplotlib.pyplot as plt + +print("Final gradient descend:", w) +# plot the data and separating line +plt.scatter(x_data[:,0], x_data[:,1], c=y_data.reshape(len(x_data)), s=100, alpha=0.7) +x_axis = np.linspace(-6, 6, 100) +y_axis = -(w1[0] + x_axis*w0[0][0]) / w0[0][1] +line_up, = plt.plot(x_axis, y_axis,'r--', label='gradient descent') +plt.legend(handles=[line_up]) +plt.xlabel('X(1)') +plt.ylabel('X(2)') +plt.show() +f.close() diff --git a/pytorch_cancer_torch_update.py b/pytorch_cancer_torch_update.py new file mode 100644 index 0000000..81949ab --- /dev/null +++ b/pytorch_cancer_torch_update.py @@ -0,0 +1,96 @@ +import numpy as np +import torch +import torch.nn.functional as F +from torch.autograd import Variable +import pandas as pd +from sklearn import datasets +from sklearn.model_selection import train_test_split +from sklearn.linear_model import LogisticRegression +from sklearn import preprocessing + +#利用sklean自带的cancer数据集 +X,T= datasets.load_breast_cancer(return_X_y=True) +#数据预处理,标准化 +for i in range(X.shape[1]): + mean = np.mean(X[:,i]) + std = np.std(X[:,i]) + X[:,i] = (X[:,i] - mean)/std +#划分训练集和测试集,比例为7:3,交叉验证 +X_train,X_test,T_train,T_test = train_test_split(X,T,train_size=0.7,test_size=0.3) +x_data = Variable(torch.Tensor(X_train)) +y_data = Variable(torch.Tensor(T_train)) +x_test = Variable(torch.Tensor(X_test)) +t_test = Variable(torch.Tensor(T_test)) + +class Model(torch.nn.Module): + def __init__(self): + super(Model, self).__init__() + #self.hidden = torch.nn.Linear(x_data.shape[1]) + self.linear = torch.nn.Linear(x_data.shape[1], 1) # 30 in and 1 out + + def forward(self, x): + #y_pred = torch.sigmoid(self.linear(x)) + y_pred = self.linear(x).sigmoid() + return y_pred + +# Our model +model = Model() + +criterion = torch.nn.BCELoss(reduction="mean") +optimizer = torch.optim.SGD(model.parameters(), lr=0.01) + +f = open("result.txt", 'w') +# Training loop +for epoch in range(1000): + # Forward pass: Compute predicted y by passing x to the model + y_pred = model(x_data) + pre=y_pred.ge(0.5).float() + correct = 0 + for i in range(y_data.size(0)): + if pre[i] == y_data[i]: + correct += 1 + acc = correct / y_data.size(0) #计算Accuracy + print(epoch,file = f) + print('accuracy is',acc,file = f) + + # 计算损失 + loss = criterion(y_pred, y_data) + print(' loss is', loss.data.item(),file = f) + + # Zero gradients, perform a backward pass, and update the weights. + optimizer.zero_grad() + loss.backward() + optimizer.step() + +for f in model.parameters(): + print('data is') + print(f.data) + print(f.grad) + +w = list(model.parameters()) +w0 = w[0].data.numpy() +w1 = w[1].data.numpy() + +y_pred1 = model(x_test)#预测测试集 +pre1=y_pred1.ge(0.5).float() +correct1 = 0 +for i in range(t_test.size(0)): + if pre1[i] == t_test[i]: + correct1 += 1 +acc1 = correct1 / t_test.size(0) #计算测试集的Accuracy +print('test accuracy is',acc) + +import matplotlib.pyplot as plt + +print("Final gradient descend:", w) +# plot the data and separating line +plt.scatter(x_data[:,0], x_data[:,1], c=y_data.reshape(len(x_data)), s=100, alpha=0.7) +x_axis = np.linspace(-6, 6, 100) +y_axis = -(w1[0] + x_axis*w0[0][0]) / w0[0][1] +line_up, = plt.plot(x_axis, y_axis,'r--', label='gradient descent') +plt.legend(handles=[line_up]) +plt.xlabel('X(1)') +plt.ylabel('X(2)') +plt.show() + + diff --git a/result.png b/result.png new file mode 100644 index 0000000..e0173ca Binary files /dev/null and b/result.png differ diff --git a/result.txt b/result.txt new file mode 100644 index 0000000..05257cf --- /dev/null +++ b/result.txt @@ -0,0 +1,3000 @@ +0 +accuracy is 0.4271356783919598 + loss is 0.862797737121582 +1 +accuracy is 0.4547738693467337 + loss is 0.8367024064064026 +2 +accuracy is 0.4798994974874372 + loss is 0.8119094371795654 +3 +accuracy is 0.5025125628140703 + loss is 0.7883690595626831 +4 +accuracy is 0.5150753768844221 + loss is 0.766029953956604 +5 +accuracy is 0.5452261306532663 + loss is 0.7448387145996094 +6 +accuracy is 0.5628140703517588 + loss is 0.7247422337532043 +7 +accuracy is 0.585427135678392 + loss is 0.7056881189346313 +8 +accuracy is 0.6055276381909548 + loss is 0.6876235008239746 +9 +accuracy is 0.628140703517588 + loss is 0.6704968214035034 +10 +accuracy is 0.6407035175879398 + loss is 0.6542590856552124 +11 +accuracy is 0.6557788944723618 + loss is 0.6388620138168335 +12 +accuracy is 0.6708542713567839 + loss is 0.6242597699165344 +13 +accuracy is 0.6834170854271356 + loss is 0.610407829284668 +14 +accuracy is 0.7110552763819096 + loss is 0.5972638130187988 +15 +accuracy is 0.7261306532663316 + loss is 0.584787130355835 +16 +accuracy is 0.7361809045226131 + loss is 0.5729395151138306 +17 +accuracy is 0.7412060301507538 + loss is 0.5616830587387085 +18 +accuracy is 0.7437185929648241 + loss is 0.5509821176528931 +19 +accuracy is 0.7688442211055276 + loss is 0.5408033728599548 +20 +accuracy is 0.7763819095477387 + loss is 0.5311138033866882 +21 +accuracy is 0.7914572864321608 + loss is 0.5218827724456787 +22 +accuracy is 0.7989949748743719 + loss is 0.5130816102027893 +23 +accuracy is 0.8015075376884422 + loss is 0.5046820640563965 +24 +accuracy is 0.8040201005025126 + loss is 0.4966587424278259 +25 +accuracy is 0.8090452261306532 + loss is 0.48898786306381226 +26 +accuracy is 0.8140703517587939 + loss is 0.48164600133895874 +27 +accuracy is 0.821608040201005 + loss is 0.474612832069397 +28 +accuracy is 0.8266331658291457 + loss is 0.46786871552467346 +29 +accuracy is 0.8366834170854272 + loss is 0.46139538288116455 +30 +accuracy is 0.8417085427135679 + loss is 0.4551762640476227 +31 +accuracy is 0.8442211055276382 + loss is 0.4491962492465973 +32 +accuracy is 0.8442211055276382 + loss is 0.44344019889831543 +33 +accuracy is 0.8442211055276382 + loss is 0.4378960132598877 +34 +accuracy is 0.8467336683417085 + loss is 0.43255069851875305 +35 +accuracy is 0.8517587939698492 + loss is 0.4273930788040161 +36 +accuracy is 0.8592964824120602 + loss is 0.4224127233028412 +37 +accuracy is 0.8618090452261307 + loss is 0.41760003566741943 +38 +accuracy is 0.8618090452261307 + loss is 0.4129456877708435 +39 +accuracy is 0.8618090452261307 + loss is 0.4084414839744568 +40 +accuracy is 0.864321608040201 + loss is 0.4040803909301758 +41 +accuracy is 0.8693467336683417 + loss is 0.39985391497612 +42 +accuracy is 0.8693467336683417 + loss is 0.3957562744617462 +43 +accuracy is 0.871859296482412 + loss is 0.39178094267845154 +44 +accuracy is 0.8743718592964824 + loss is 0.3879218101501465 +45 +accuracy is 0.8768844221105527 + loss is 0.38417401909828186 +46 +accuracy is 0.8768844221105527 + loss is 0.38053157925605774 +47 +accuracy is 0.8819095477386935 + loss is 0.37699058651924133 +48 +accuracy is 0.8819095477386935 + loss is 0.37354591488838196 +49 +accuracy is 0.8819095477386935 + loss is 0.370193749666214 +50 +accuracy is 0.8819095477386935 + loss is 0.3669296205043793 +51 +accuracy is 0.8844221105527639 + loss is 0.36375024914741516 +52 +accuracy is 0.8844221105527639 + loss is 0.36065196990966797 +53 +accuracy is 0.8844221105527639 + loss is 0.3576318323612213 +54 +accuracy is 0.8894472361809045 + loss is 0.35468602180480957 +55 +accuracy is 0.8894472361809045 + loss is 0.35181212425231934 +56 +accuracy is 0.8919597989949749 + loss is 0.3490070104598999 +57 +accuracy is 0.8919597989949749 + loss is 0.3462682068347931 +58 +accuracy is 0.8919597989949749 + loss is 0.3435932695865631 +59 +accuracy is 0.8919597989949749 + loss is 0.3409797251224518 +60 +accuracy is 0.8919597989949749 + loss is 0.33842548727989197 +61 +accuracy is 0.8919597989949749 + loss is 0.3359280228614807 +62 +accuracy is 0.8919597989949749 + loss is 0.33348575234413147 +63 +accuracy is 0.8944723618090452 + loss is 0.331096351146698 +64 +accuracy is 0.8969849246231156 + loss is 0.32875847816467285 +65 +accuracy is 0.8969849246231156 + loss is 0.32646986842155457 +66 +accuracy is 0.8969849246231156 + loss is 0.3242292106151581 +67 +accuracy is 0.8994974874371859 + loss is 0.32203489542007446 +68 +accuracy is 0.9020100502512562 + loss is 0.31988492608070374 +69 +accuracy is 0.9020100502512562 + loss is 0.3177785575389862 +70 +accuracy is 0.9020100502512562 + loss is 0.31571394205093384 +71 +accuracy is 0.9020100502512562 + loss is 0.31368982791900635 +72 +accuracy is 0.9020100502512562 + loss is 0.31170517206192017 +73 +accuracy is 0.9045226130653267 + loss is 0.30975860357284546 +74 +accuracy is 0.9045226130653267 + loss is 0.3078487813472748 +75 +accuracy is 0.907035175879397 + loss is 0.3059748709201813 +76 +accuracy is 0.907035175879397 + loss is 0.30413559079170227 +77 +accuracy is 0.907035175879397 + loss is 0.30233025550842285 +78 +accuracy is 0.907035175879397 + loss is 0.30055761337280273 +79 +accuracy is 0.907035175879397 + loss is 0.29881665110588074 +80 +accuracy is 0.907035175879397 + loss is 0.29710668325424194 +81 +accuracy is 0.907035175879397 + loss is 0.29542672634124756 +82 +accuracy is 0.907035175879397 + loss is 0.2937760055065155 +83 +accuracy is 0.907035175879397 + loss is 0.29215335845947266 +84 +accuracy is 0.907035175879397 + loss is 0.2905586361885071 +85 +accuracy is 0.907035175879397 + loss is 0.2889906167984009 +86 +accuracy is 0.914572864321608 + loss is 0.2874489426612854 +87 +accuracy is 0.914572864321608 + loss is 0.2859327495098114 +88 +accuracy is 0.9170854271356784 + loss is 0.28444135189056396 +89 +accuracy is 0.9170854271356784 + loss is 0.2829740643501282 +90 +accuracy is 0.9170854271356784 + loss is 0.28153011202812195 +91 +accuracy is 0.9170854271356784 + loss is 0.280109167098999 +92 +accuracy is 0.9170854271356784 + loss is 0.2787104547023773 +93 +accuracy is 0.9170854271356784 + loss is 0.27733373641967773 +94 +accuracy is 0.9170854271356784 + loss is 0.2759782373905182 +95 +accuracy is 0.9170854271356784 + loss is 0.2746436297893524 +96 +accuracy is 0.9195979899497487 + loss is 0.2733290493488312 +97 +accuracy is 0.9195979899497487 + loss is 0.27203428745269775 +98 +accuracy is 0.9195979899497487 + loss is 0.27075889706611633 +99 +accuracy is 0.9195979899497487 + loss is 0.2695021331310272 +100 +accuracy is 0.9221105527638191 + loss is 0.26826393604278564 +101 +accuracy is 0.9246231155778895 + loss is 0.2670435607433319 +102 +accuracy is 0.9271356783919598 + loss is 0.26584091782569885 +103 +accuracy is 0.9271356783919598 + loss is 0.26465508341789246 +104 +accuracy is 0.9296482412060302 + loss is 0.26348644495010376 +105 +accuracy is 0.9296482412060302 + loss is 0.26233384013175964 +106 +accuracy is 0.9321608040201005 + loss is 0.26119744777679443 +107 +accuracy is 0.9321608040201005 + loss is 0.2600764036178589 +108 +accuracy is 0.9321608040201005 + loss is 0.25897112488746643 +109 +accuracy is 0.9321608040201005 + loss is 0.2578807473182678 +110 +accuracy is 0.9321608040201005 + loss is 0.2568046748638153 +111 +accuracy is 0.9321608040201005 + loss is 0.25574323534965515 +112 +accuracy is 0.9321608040201005 + loss is 0.25469571352005005 +113 +accuracy is 0.9321608040201005 + loss is 0.2536620497703552 +114 +accuracy is 0.9321608040201005 + loss is 0.252641886472702 +115 +accuracy is 0.9321608040201005 + loss is 0.2516348361968994 +116 +accuracy is 0.9321608040201005 + loss is 0.25064074993133545 +117 +accuracy is 0.9321608040201005 + loss is 0.24965940415859222 +118 +accuracy is 0.9321608040201005 + loss is 0.24869027733802795 +119 +accuracy is 0.9346733668341709 + loss is 0.247733473777771 +120 +accuracy is 0.9371859296482412 + loss is 0.24678842723369598 +121 +accuracy is 0.9371859296482412 + loss is 0.24585512280464172 +122 +accuracy is 0.9371859296482412 + loss is 0.2449333369731903 +123 +accuracy is 0.9371859296482412 + loss is 0.24402251839637756 +124 +accuracy is 0.9371859296482412 + loss is 0.243122860789299 +125 +accuracy is 0.9371859296482412 + loss is 0.242234006524086 +126 +accuracy is 0.9371859296482412 + loss is 0.24135585129261017 +127 +accuracy is 0.9422110552763819 + loss is 0.24048815667629242 +128 +accuracy is 0.9422110552763819 + loss is 0.23963043093681335 +129 +accuracy is 0.9422110552763819 + loss is 0.23878264427185059 +130 +accuracy is 0.9447236180904522 + loss is 0.23794466257095337 +131 +accuracy is 0.9447236180904522 + loss is 0.2371165156364441 +132 +accuracy is 0.9447236180904522 + loss is 0.23629766702651978 +133 +accuracy is 0.9447236180904522 + loss is 0.23548810184001923 +134 +accuracy is 0.9447236180904522 + loss is 0.23468779027462006 +135 +accuracy is 0.9447236180904522 + loss is 0.23389624059200287 +136 +accuracy is 0.9447236180904522 + loss is 0.2331135869026184 +137 +accuracy is 0.9447236180904522 + loss is 0.23233968019485474 +138 +accuracy is 0.9447236180904522 + loss is 0.23157402873039246 +139 +accuracy is 0.9447236180904522 + loss is 0.2308170050382614 +140 +accuracy is 0.9447236180904522 + loss is 0.23006778955459595 +141 +accuracy is 0.9472361809045227 + loss is 0.22932690382003784 +142 +accuracy is 0.9472361809045227 + loss is 0.22859390079975128 +143 +accuracy is 0.9472361809045227 + loss is 0.22786878049373627 +144 +accuracy is 0.9472361809045227 + loss is 0.22715091705322266 +145 +accuracy is 0.9472361809045227 + loss is 0.22644096612930298 +146 +accuracy is 0.9472361809045227 + loss is 0.2257383018732071 +147 +accuracy is 0.949748743718593 + loss is 0.22504277527332306 +148 +accuracy is 0.949748743718593 + loss is 0.2243545949459076 +149 +accuracy is 0.949748743718593 + loss is 0.22367310523986816 +150 +accuracy is 0.949748743718593 + loss is 0.22299884259700775 +151 +accuracy is 0.949748743718593 + loss is 0.222331240773201 +152 +accuracy is 0.949748743718593 + loss is 0.2216704934835434 +153 +accuracy is 0.949748743718593 + loss is 0.22101610898971558 +154 +accuracy is 0.949748743718593 + loss is 0.2203683704137802 +155 +accuracy is 0.949748743718593 + loss is 0.2197267860174179 +156 +accuracy is 0.949748743718593 + loss is 0.21909180283546448 +157 +accuracy is 0.949748743718593 + loss is 0.2184627652168274 +158 +accuracy is 0.949748743718593 + loss is 0.21783995628356934 +159 +accuracy is 0.949748743718593 + loss is 0.21722300350666046 +160 +accuracy is 0.949748743718593 + loss is 0.21661220490932465 +161 +accuracy is 0.949748743718593 + loss is 0.21600691974163055 +162 +accuracy is 0.949748743718593 + loss is 0.21540731191635132 +163 +accuracy is 0.949748743718593 + loss is 0.2148135006427765 +164 +accuracy is 0.949748743718593 + loss is 0.21422523260116577 +165 +accuracy is 0.949748743718593 + loss is 0.2136424332857132 +166 +accuracy is 0.949748743718593 + loss is 0.21306495368480682 +167 +accuracy is 0.949748743718593 + loss is 0.21249274909496307 +168 +accuracy is 0.949748743718593 + loss is 0.21192577481269836 +169 +accuracy is 0.949748743718593 + loss is 0.21136409044265747 +170 +accuracy is 0.949748743718593 + loss is 0.2108074575662613 +171 +accuracy is 0.949748743718593 + loss is 0.21025560796260834 +172 +accuracy is 0.949748743718593 + loss is 0.20970875024795532 +173 +accuracy is 0.949748743718593 + loss is 0.20916680991649628 +174 +accuracy is 0.949748743718593 + loss is 0.20862969756126404 +175 +accuracy is 0.949748743718593 + loss is 0.20809723436832428 +176 +accuracy is 0.949748743718593 + loss is 0.20756956934928894 +177 +accuracy is 0.949748743718593 + loss is 0.2070462554693222 +178 +accuracy is 0.949748743718593 + loss is 0.20652757585048676 +179 +accuracy is 0.949748743718593 + loss is 0.20601332187652588 +180 +accuracy is 0.949748743718593 + loss is 0.20550347864627838 +181 +accuracy is 0.949748743718593 + loss is 0.20499807596206665 +182 +accuracy is 0.949748743718593 + loss is 0.20449677109718323 +183 +accuracy is 0.949748743718593 + loss is 0.20399978756904602 +184 +accuracy is 0.949748743718593 + loss is 0.20350691676139832 +185 +accuracy is 0.949748743718593 + loss is 0.2030181735754013 +186 +accuracy is 0.949748743718593 + loss is 0.20253343880176544 +187 +accuracy is 0.949748743718593 + loss is 0.2020527869462967 +188 +accuracy is 0.949748743718593 + loss is 0.20157594978809357 +189 +accuracy is 0.949748743718593 + loss is 0.2011031210422516 +190 +accuracy is 0.949748743718593 + loss is 0.20063404738903046 +191 +accuracy is 0.949748743718593 + loss is 0.20016880333423615 +192 +accuracy is 0.949748743718593 + loss is 0.19970734417438507 +193 +accuracy is 0.949748743718593 + loss is 0.19924958050251007 +194 +accuracy is 0.949748743718593 + loss is 0.19879527390003204 +195 +accuracy is 0.949748743718593 + loss is 0.19834470748901367 +196 +accuracy is 0.949748743718593 + loss is 0.19789756834506989 +197 +accuracy is 0.949748743718593 + loss is 0.19745416939258575 +198 +accuracy is 0.949748743718593 + loss is 0.1970140039920807 +199 +accuracy is 0.949748743718593 + loss is 0.19657738506793976 +200 +accuracy is 0.949748743718593 + loss is 0.19614405930042267 +201 +accuracy is 0.949748743718593 + loss is 0.195714071393013 +202 +accuracy is 0.949748743718593 + loss is 0.19528740644454956 +203 +accuracy is 0.949748743718593 + loss is 0.19486410915851593 +204 +accuracy is 0.949748743718593 + loss is 0.19444377720355988 +205 +accuracy is 0.949748743718593 + loss is 0.19402669370174408 +206 +accuracy is 0.949748743718593 + loss is 0.19361282885074615 +207 +accuracy is 0.949748743718593 + loss is 0.1932019591331482 +208 +accuracy is 0.9522613065326633 + loss is 0.1927943080663681 +209 +accuracy is 0.9522613065326633 + loss is 0.19238945841789246 +210 +accuracy is 0.9522613065326633 + loss is 0.19198761880397797 +211 +accuracy is 0.9522613065326633 + loss is 0.1915888786315918 +212 +accuracy is 0.9522613065326633 + loss is 0.19119298458099365 +213 +accuracy is 0.9522613065326633 + loss is 0.19080005586147308 +214 +accuracy is 0.9522613065326633 + loss is 0.19040967524051666 +215 +accuracy is 0.9522613065326633 + loss is 0.1900223195552826 +216 +accuracy is 0.9522613065326633 + loss is 0.189637690782547 +217 +accuracy is 0.9522613065326633 + loss is 0.18925583362579346 +218 +accuracy is 0.9522613065326633 + loss is 0.18887674808502197 +219 +accuracy is 0.9522613065326633 + loss is 0.1885000616312027 +220 +accuracy is 0.9522613065326633 + loss is 0.18812629580497742 +221 +accuracy is 0.9522613065326633 + loss is 0.1877550482749939 +222 +accuracy is 0.9522613065326633 + loss is 0.18738648295402527 +223 +accuracy is 0.9522613065326633 + loss is 0.1870204657316208 +224 +accuracy is 0.9522613065326633 + loss is 0.1866568624973297 +225 +accuracy is 0.9522613065326633 + loss is 0.1862957775592804 +226 +accuracy is 0.9522613065326633 + loss is 0.18593716621398926 +227 +accuracy is 0.9522613065326633 + loss is 0.18558107316493988 +228 +accuracy is 0.9522613065326633 + loss is 0.18522733449935913 +229 +accuracy is 0.9547738693467337 + loss is 0.18487602472305298 +230 +accuracy is 0.9547738693467337 + loss is 0.18452702462673187 +231 +accuracy is 0.957286432160804 + loss is 0.18418028950691223 +232 +accuracy is 0.957286432160804 + loss is 0.1838361918926239 +233 +accuracy is 0.957286432160804 + loss is 0.18349412083625793 +234 +accuracy is 0.957286432160804 + loss is 0.18315431475639343 +235 +accuracy is 0.957286432160804 + loss is 0.18281695246696472 +236 +accuracy is 0.957286432160804 + loss is 0.18248151242733002 +237 +accuracy is 0.957286432160804 + loss is 0.18214844167232513 +238 +accuracy is 0.9597989949748744 + loss is 0.1818176656961441 +239 +accuracy is 0.9597989949748744 + loss is 0.18148863315582275 +240 +accuracy is 0.9597989949748744 + loss is 0.18116213381290436 +241 +accuracy is 0.9597989949748744 + loss is 0.18083754181861877 +242 +accuracy is 0.9597989949748744 + loss is 0.18051496148109436 +243 +accuracy is 0.9597989949748744 + loss is 0.18019452691078186 +244 +accuracy is 0.9597989949748744 + loss is 0.1798761785030365 +245 +accuracy is 0.9597989949748744 + loss is 0.1795598715543747 +246 +accuracy is 0.9597989949748744 + loss is 0.1792454719543457 +247 +accuracy is 0.9597989949748744 + loss is 0.17893293499946594 +248 +accuracy is 0.9597989949748744 + loss is 0.17862246930599213 +249 +accuracy is 0.9597989949748744 + loss is 0.1783139556646347 +250 +accuracy is 0.9597989949748744 + loss is 0.1780073493719101 +251 +accuracy is 0.9597989949748744 + loss is 0.17770257592201233 +252 +accuracy is 0.9597989949748744 + loss is 0.1773996353149414 +253 +accuracy is 0.9597989949748744 + loss is 0.17709873616695404 +254 +accuracy is 0.9597989949748744 + loss is 0.17679955065250397 +255 +accuracy is 0.9597989949748744 + loss is 0.17650215327739716 +256 +accuracy is 0.9597989949748744 + loss is 0.17620666325092316 +257 +accuracy is 0.9597989949748744 + loss is 0.17591287195682526 +258 +accuracy is 0.9597989949748744 + loss is 0.17562077939510345 +259 +accuracy is 0.9597989949748744 + loss is 0.17533056437969208 +260 +accuracy is 0.9597989949748744 + loss is 0.17504191398620605 +261 +accuracy is 0.9597989949748744 + loss is 0.17475517094135284 +262 +accuracy is 0.9597989949748744 + loss is 0.17447006702423096 +263 +accuracy is 0.9597989949748744 + loss is 0.17418666183948517 +264 +accuracy is 0.9597989949748744 + loss is 0.17390483617782593 +265 +accuracy is 0.9597989949748744 + loss is 0.17362461984157562 +266 +accuracy is 0.9597989949748744 + loss is 0.173346146941185 +267 +accuracy is 0.9597989949748744 + loss is 0.17306925356388092 +268 +accuracy is 0.9597989949748744 + loss is 0.1727939397096634 +269 +accuracy is 0.9597989949748744 + loss is 0.1725202053785324 +270 +accuracy is 0.9597989949748744 + loss is 0.17224808037281036 +271 +accuracy is 0.9597989949748744 + loss is 0.1719774454832077 +272 +accuracy is 0.9597989949748744 + loss is 0.17170844972133636 +273 +accuracy is 0.9597989949748744 + loss is 0.17144088447093964 +274 +accuracy is 0.9597989949748744 + loss is 0.1711748242378235 +275 +accuracy is 0.9597989949748744 + loss is 0.17091035842895508 +276 +accuracy is 0.9597989949748744 + loss is 0.17064738273620605 +277 +accuracy is 0.9597989949748744 + loss is 0.17038585245609283 +278 +accuracy is 0.9597989949748744 + loss is 0.17012567818164825 +279 +accuracy is 0.9597989949748744 + loss is 0.1698669195175171 +280 +accuracy is 0.9597989949748744 + loss is 0.16960978507995605 +281 +accuracy is 0.9597989949748744 + loss is 0.16935402154922485 +282 +accuracy is 0.9597989949748744 + loss is 0.16909950971603394 +283 +accuracy is 0.9597989949748744 + loss is 0.1688464879989624 +284 +accuracy is 0.9597989949748744 + loss is 0.1685948520898819 +285 +accuracy is 0.9597989949748744 + loss is 0.168344646692276 +286 +accuracy is 0.9597989949748744 + loss is 0.16809557378292084 +287 +accuracy is 0.9597989949748744 + loss is 0.16784796118736267 +288 +accuracy is 0.9597989949748744 + loss is 0.16760186851024628 +289 +accuracy is 0.9597989949748744 + loss is 0.16735684871673584 +290 +accuracy is 0.9597989949748744 + loss is 0.1671133190393448 +291 +accuracy is 0.9623115577889447 + loss is 0.1668708771467209 +292 +accuracy is 0.9623115577889447 + loss is 0.1666298508644104 +293 +accuracy is 0.9623115577889447 + loss is 0.1663900762796402 +294 +accuracy is 0.9623115577889447 + loss is 0.16615153849124908 +295 +accuracy is 0.9623115577889447 + loss is 0.165914386510849 +296 +accuracy is 0.9623115577889447 + loss is 0.1656782478094101 +297 +accuracy is 0.9623115577889447 + loss is 0.16544348001480103 +298 +accuracy is 0.9623115577889447 + loss is 0.16520990431308746 +299 +accuracy is 0.9623115577889447 + loss is 0.1649775356054306 +300 +accuracy is 0.9623115577889447 + loss is 0.16474634408950806 +301 +accuracy is 0.9623115577889447 + loss is 0.16451632976531982 +302 +accuracy is 0.964824120603015 + loss is 0.16428755223751068 +303 +accuracy is 0.964824120603015 + loss is 0.1640598475933075 +304 +accuracy is 0.964824120603015 + loss is 0.16383342444896698 +305 +accuracy is 0.964824120603015 + loss is 0.16360807418823242 +306 +accuracy is 0.964824120603015 + loss is 0.16338390111923218 +307 +accuracy is 0.964824120603015 + loss is 0.1631607562303543 +308 +accuracy is 0.964824120603015 + loss is 0.16293883323669434 +309 +accuracy is 0.964824120603015 + loss is 0.1627179980278015 +310 +accuracy is 0.964824120603015 + loss is 0.1624981313943863 +311 +accuracy is 0.964824120603015 + loss is 0.16227951645851135 +312 +accuracy is 0.964824120603015 + loss is 0.16206197440624237 +313 +accuracy is 0.964824120603015 + loss is 0.16184547543525696 +314 +accuracy is 0.964824120603015 + loss is 0.16163001954555511 +315 +accuracy is 0.964824120603015 + loss is 0.16141565144062042 +316 +accuracy is 0.964824120603015 + loss is 0.1612022966146469 +317 +accuracy is 0.964824120603015 + loss is 0.16099007427692413 +318 +accuracy is 0.964824120603015 + loss is 0.16077877581119537 +319 +accuracy is 0.964824120603015 + loss is 0.16056856513023376 +320 +accuracy is 0.964824120603015 + loss is 0.16035930812358856 +321 +accuracy is 0.964824120603015 + loss is 0.16015110909938812 +322 +accuracy is 0.964824120603015 + loss is 0.1599438339471817 +323 +accuracy is 0.964824120603015 + loss is 0.15973761677742004 +324 +accuracy is 0.964824120603015 + loss is 0.15953238308429718 +325 +accuracy is 0.964824120603015 + loss is 0.15932810306549072 +326 +accuracy is 0.964824120603015 + loss is 0.15912479162216187 +327 +accuracy is 0.964824120603015 + loss is 0.15892240405082703 +328 +accuracy is 0.964824120603015 + loss is 0.15872104465961456 +329 +accuracy is 0.964824120603015 + loss is 0.15852056443691254 +330 +accuracy is 0.964824120603015 + loss is 0.15832097828388214 +331 +accuracy is 0.964824120603015 + loss is 0.1581224650144577 +332 +accuracy is 0.964824120603015 + loss is 0.1579248011112213 +333 +accuracy is 0.964824120603015 + loss is 0.15772800147533417 +334 +accuracy is 0.964824120603015 + loss is 0.15753215551376343 +335 +accuracy is 0.964824120603015 + loss is 0.1573372781276703 +336 +accuracy is 0.964824120603015 + loss is 0.15714319050312042 +337 +accuracy is 0.964824120603015 + loss is 0.156949982047081 +338 +accuracy is 0.964824120603015 + loss is 0.1567576676607132 +339 +accuracy is 0.964824120603015 + loss is 0.15656626224517822 +340 +accuracy is 0.964824120603015 + loss is 0.15637581050395966 +341 +accuracy is 0.964824120603015 + loss is 0.1561860591173172 +342 +accuracy is 0.964824120603015 + loss is 0.15599726140499115 +343 +accuracy is 0.964824120603015 + loss is 0.155809223651886 +344 +accuracy is 0.964824120603015 + loss is 0.15562209486961365 +345 +accuracy is 0.964824120603015 + loss is 0.15543584525585175 +346 +accuracy is 0.964824120603015 + loss is 0.15525034070014954 +347 +accuracy is 0.964824120603015 + loss is 0.15506571531295776 +348 +accuracy is 0.964824120603015 + loss is 0.15488184988498688 +349 +accuracy is 0.964824120603015 + loss is 0.15469884872436523 +350 +accuracy is 0.964824120603015 + loss is 0.15451669692993164 +351 +accuracy is 0.964824120603015 + loss is 0.15433520078659058 +352 +accuracy is 0.964824120603015 + loss is 0.1541546732187271 +353 +accuracy is 0.964824120603015 + loss is 0.1539747565984726 +354 +accuracy is 0.964824120603015 + loss is 0.15379562973976135 +355 +accuracy is 0.964824120603015 + loss is 0.15361738204956055 +356 +accuracy is 0.964824120603015 + loss is 0.15343979001045227 +357 +accuracy is 0.964824120603015 + loss is 0.1532631367444992 +358 +accuracy is 0.964824120603015 + loss is 0.15308716893196106 +359 +accuracy is 0.964824120603015 + loss is 0.15291188657283783 +360 +accuracy is 0.964824120603015 + loss is 0.15273739397525787 +361 +accuracy is 0.964824120603015 + loss is 0.1525636613368988 +362 +accuracy is 0.964824120603015 + loss is 0.15239067375659943 +363 +accuracy is 0.964824120603015 + loss is 0.1522183120250702 +364 +accuracy is 0.964824120603015 + loss is 0.1520468145608902 +365 +accuracy is 0.964824120603015 + loss is 0.15187610685825348 +366 +accuracy is 0.964824120603015 + loss is 0.15170590579509735 +367 +accuracy is 0.964824120603015 + loss is 0.1515364795923233 +368 +accuracy is 0.964824120603015 + loss is 0.15136784315109253 +369 +accuracy is 0.964824120603015 + loss is 0.1511998474597931 +370 +accuracy is 0.964824120603015 + loss is 0.15103252232074738 +371 +accuracy is 0.964824120603015 + loss is 0.15086597204208374 +372 +accuracy is 0.964824120603015 + loss is 0.15069997310638428 +373 +accuracy is 0.964824120603015 + loss is 0.15053482353687286 +374 +accuracy is 0.964824120603015 + loss is 0.150370255112648 +375 +accuracy is 0.964824120603015 + loss is 0.15020643174648285 +376 +accuracy is 0.964824120603015 + loss is 0.15004321932792664 +377 +accuracy is 0.964824120603015 + loss is 0.14988064765930176 +378 +accuracy is 0.964824120603015 + loss is 0.1497187465429306 +379 +accuracy is 0.964824120603015 + loss is 0.14955759048461914 +380 +accuracy is 0.964824120603015 + loss is 0.14939700067043304 +381 +accuracy is 0.964824120603015 + loss is 0.14923708140850067 +382 +accuracy is 0.964824120603015 + loss is 0.14907778799533844 +383 +accuracy is 0.964824120603015 + loss is 0.14891913533210754 +384 +accuracy is 0.964824120603015 + loss is 0.1487611085176468 +385 +accuracy is 0.964824120603015 + loss is 0.14860375225543976 +386 +accuracy is 0.964824120603015 + loss is 0.14844690263271332 +387 +accuracy is 0.964824120603015 + loss is 0.14829082787036896 +388 +accuracy is 0.964824120603015 + loss is 0.14813539385795593 +389 +accuracy is 0.964824120603015 + loss is 0.1479804664850235 +390 +accuracy is 0.964824120603015 + loss is 0.1478261500597 +391 +accuracy is 0.964824120603015 + loss is 0.14767248928546906 +392 +accuracy is 0.964824120603015 + loss is 0.1475193351507187 +393 +accuracy is 0.964824120603015 + loss is 0.14736685156822205 +394 +accuracy is 0.964824120603015 + loss is 0.14721496403217316 +395 +accuracy is 0.964824120603015 + loss is 0.14706361293792725 +396 +accuracy is 0.964824120603015 + loss is 0.1469128578901291 +397 +accuracy is 0.964824120603015 + loss is 0.14676274359226227 +398 +accuracy is 0.964824120603015 + loss is 0.14661318063735962 +399 +accuracy is 0.964824120603015 + loss is 0.14646421372890472 +400 +accuracy is 0.964824120603015 + loss is 0.1463157832622528 +401 +accuracy is 0.964824120603015 + loss is 0.1461678445339203 +402 +accuracy is 0.964824120603015 + loss is 0.14602060616016388 +403 +accuracy is 0.964824120603015 + loss is 0.14587388932704926 +404 +accuracy is 0.964824120603015 + loss is 0.14572760462760925 +405 +accuracy is 0.964824120603015 + loss is 0.14558206498622894 +406 +accuracy is 0.964824120603015 + loss is 0.14543703198432922 +407 +accuracy is 0.964824120603015 + loss is 0.1452924609184265 +408 +accuracy is 0.964824120603015 + loss is 0.14514844119548798 +409 +accuracy is 0.964824120603015 + loss is 0.14500506222248077 +410 +accuracy is 0.964824120603015 + loss is 0.14486216008663177 +411 +accuracy is 0.964824120603015 + loss is 0.14471971988677979 +412 +accuracy is 0.964824120603015 + loss is 0.14457790553569794 +413 +accuracy is 0.964824120603015 + loss is 0.1444365382194519 +414 +accuracy is 0.964824120603015 + loss is 0.14429566264152527 +415 +accuracy is 0.964824120603015 + loss is 0.14415541291236877 +416 +accuracy is 0.964824120603015 + loss is 0.14401574432849884 +417 +accuracy is 0.964824120603015 + loss is 0.14387641847133636 +418 +accuracy is 0.964824120603015 + loss is 0.14373770356178284 +419 +accuracy is 0.964824120603015 + loss is 0.14359940588474274 +420 +accuracy is 0.964824120603015 + loss is 0.14346162974834442 +421 +accuracy is 0.964824120603015 + loss is 0.14332441985607147 +422 +accuracy is 0.964824120603015 + loss is 0.14318768680095673 +423 +accuracy is 0.964824120603015 + loss is 0.14305143058300018 +424 +accuracy is 0.964824120603015 + loss is 0.14291563630104065 +425 +accuracy is 0.964824120603015 + loss is 0.1427803784608841 +426 +accuracy is 0.964824120603015 + loss is 0.14264564216136932 +427 +accuracy is 0.964824120603015 + loss is 0.1425112783908844 +428 +accuracy is 0.964824120603015 + loss is 0.14237742125988007 +429 +accuracy is 0.964824120603015 + loss is 0.1422441601753235 +430 +accuracy is 0.964824120603015 + loss is 0.14211124181747437 +431 +accuracy is 0.964824120603015 + loss is 0.14197883009910583 +432 +accuracy is 0.964824120603015 + loss is 0.14184686541557312 +433 +accuracy is 0.9673366834170855 + loss is 0.14171551167964935 +434 +accuracy is 0.9673366834170855 + loss is 0.14158447086811066 +435 +accuracy is 0.9673366834170855 + loss is 0.1414538025856018 +436 +accuracy is 0.9673366834170855 + loss is 0.14132380485534668 +437 +accuracy is 0.9673366834170855 + loss is 0.141194149851799 +438 +accuracy is 0.9673366834170855 + loss is 0.14106492698192596 +439 +accuracy is 0.9673366834170855 + loss is 0.1409362256526947 +440 +accuracy is 0.9673366834170855 + loss is 0.1408078521490097 +441 +accuracy is 0.9673366834170855 + loss is 0.14068011939525604 +442 +accuracy is 0.9673366834170855 + loss is 0.14055266976356506 +443 +accuracy is 0.9673366834170855 + loss is 0.14042560756206512 +444 +accuracy is 0.9673366834170855 + loss is 0.14029909670352936 +445 +accuracy is 0.9673366834170855 + loss is 0.14017309248447418 +446 +accuracy is 0.9673366834170855 + loss is 0.14004738628864288 +447 +accuracy is 0.9673366834170855 + loss is 0.13992218673229218 +448 +accuracy is 0.9673366834170855 + loss is 0.13979732990264893 +449 +accuracy is 0.9673366834170855 + loss is 0.1396729052066803 +450 +accuracy is 0.9673366834170855 + loss is 0.13954895734786987 +451 +accuracy is 0.9673366834170855 + loss is 0.13942542672157288 +452 +accuracy is 0.9673366834170855 + loss is 0.1393023282289505 +453 +accuracy is 0.9673366834170855 + loss is 0.1391795575618744 +454 +accuracy is 0.9673366834170855 + loss is 0.1390572041273117 +455 +accuracy is 0.9673366834170855 + loss is 0.13893529772758484 +456 +accuracy is 0.9673366834170855 + loss is 0.1388138383626938 +457 +accuracy is 0.9673366834170855 + loss is 0.138692706823349 +458 +accuracy is 0.9673366834170855 + loss is 0.13857205212116241 +459 +accuracy is 0.9673366834170855 + loss is 0.13845181465148926 +460 +accuracy is 0.9673366834170855 + loss is 0.13833187520503998 +461 +accuracy is 0.9673366834170855 + loss is 0.1382123827934265 +462 +accuracy is 0.9673366834170855 + loss is 0.13809333741664886 +463 +accuracy is 0.9673366834170855 + loss is 0.1379746049642563 +464 +accuracy is 0.9673366834170855 + loss is 0.13785627484321594 +465 +accuracy is 0.9673366834170855 + loss is 0.13773836195468903 +466 +accuracy is 0.9673366834170855 + loss is 0.1376207321882248 +467 +accuracy is 0.9673366834170855 + loss is 0.13750368356704712 +468 +accuracy is 0.9673366834170855 + loss is 0.13738684356212616 +469 +accuracy is 0.9673366834170855 + loss is 0.13727042078971863 +470 +accuracy is 0.9673366834170855 + loss is 0.1371544450521469 +471 +accuracy is 0.9673366834170855 + loss is 0.13703885674476624 +472 +accuracy is 0.9673366834170855 + loss is 0.13692353665828705 +473 +accuracy is 0.9673366834170855 + loss is 0.1368086040019989 +474 +accuracy is 0.9673366834170855 + loss is 0.1366940438747406 +475 +accuracy is 0.9673366834170855 + loss is 0.13657985627651215 +476 +accuracy is 0.9673366834170855 + loss is 0.13646604120731354 +477 +accuracy is 0.9673366834170855 + loss is 0.13635258376598358 +478 +accuracy is 0.9673366834170855 + loss is 0.13623955845832825 +479 +accuracy is 0.9673366834170855 + loss is 0.1361268162727356 +480 +accuracy is 0.9673366834170855 + loss is 0.13601452112197876 +481 +accuracy is 0.9673366834170855 + loss is 0.13590234518051147 +482 +accuracy is 0.9673366834170855 + loss is 0.13579075038433075 +483 +accuracy is 0.9673366834170855 + loss is 0.13567937910556793 +484 +accuracy is 0.9673366834170855 + loss is 0.13556843996047974 +485 +accuracy is 0.9673366834170855 + loss is 0.1354578584432602 +486 +accuracy is 0.9673366834170855 + loss is 0.1353476345539093 +487 +accuracy is 0.9673366834170855 + loss is 0.1352376937866211 +488 +accuracy is 0.9673366834170855 + loss is 0.13512811064720154 +489 +accuracy is 0.9673366834170855 + loss is 0.13501881062984467 +490 +accuracy is 0.9673366834170855 + loss is 0.1349099576473236 +491 +accuracy is 0.9673366834170855 + loss is 0.13480144739151 +492 +accuracy is 0.9673366834170855 + loss is 0.13469316065311432 +493 +accuracy is 0.9673366834170855 + loss is 0.13458526134490967 +494 +accuracy is 0.9673366834170855 + loss is 0.13447770476341248 +495 +accuracy is 0.9673366834170855 + loss is 0.13437040150165558 +496 +accuracy is 0.9673366834170855 + loss is 0.13426348567008972 +497 +accuracy is 0.9673366834170855 + loss is 0.13415686786174774 +498 +accuracy is 0.9673366834170855 + loss is 0.13405071198940277 +499 +accuracy is 0.9673366834170855 + loss is 0.13394472002983093 +500 +accuracy is 0.9673366834170855 + loss is 0.13383913040161133 +501 +accuracy is 0.9673366834170855 + loss is 0.13373379409313202 +502 +accuracy is 0.9673366834170855 + loss is 0.13362883031368256 +503 +accuracy is 0.9673366834170855 + loss is 0.13352414965629578 +504 +accuracy is 0.9673366834170855 + loss is 0.13341979682445526 +505 +accuracy is 0.9673366834170855 + loss is 0.13331586122512817 +506 +accuracy is 0.9673366834170855 + loss is 0.1332121193408966 +507 +accuracy is 0.9673366834170855 + loss is 0.13310855627059937 +508 +accuracy is 0.9673366834170855 + loss is 0.1330055445432663 +509 +accuracy is 0.9673366834170855 + loss is 0.13290272653102875 +510 +accuracy is 0.9673366834170855 + loss is 0.13280020654201508 +511 +accuracy is 0.9673366834170855 + loss is 0.13269805908203125 +512 +accuracy is 0.9673366834170855 + loss is 0.13259625434875488 +513 +accuracy is 0.9673366834170855 + loss is 0.13249456882476807 +514 +accuracy is 0.9673366834170855 + loss is 0.13239328563213348 +515 +accuracy is 0.9673366834170855 + loss is 0.13229234516620636 +516 +accuracy is 0.9673366834170855 + loss is 0.13219162821769714 +517 +accuracy is 0.9673366834170855 + loss is 0.1320912390947342 +518 +accuracy is 0.9698492462311558 + loss is 0.1319911628961563 +519 +accuracy is 0.9698492462311558 + loss is 0.13189132511615753 +520 +accuracy is 0.9698492462311558 + loss is 0.13179181516170502 +521 +accuracy is 0.9698492462311558 + loss is 0.13169263303279877 +522 +accuracy is 0.9698492462311558 + loss is 0.13159362971782684 +523 +accuracy is 0.9698492462311558 + loss is 0.13149504363536835 +524 +accuracy is 0.9698492462311558 + loss is 0.13139663636684418 +525 +accuracy is 0.9698492462311558 + loss is 0.13129852712154388 +526 +accuracy is 0.9698492462311558 + loss is 0.13120073080062866 +527 +accuracy is 0.9698492462311558 + loss is 0.13110314309597015 +528 +accuracy is 0.9698492462311558 + loss is 0.13100597262382507 +529 +accuracy is 0.9698492462311558 + loss is 0.1309089958667755 +530 +accuracy is 0.9698492462311558 + loss is 0.13081228733062744 +531 +accuracy is 0.9698492462311558 + loss is 0.13071584701538086 +532 +accuracy is 0.9698492462311558 + loss is 0.13061967492103577 +533 +accuracy is 0.9698492462311558 + loss is 0.1305238902568817 +534 +accuracy is 0.9698492462311558 + loss is 0.1304282695055008 +535 +accuracy is 0.9698492462311558 + loss is 0.13033287227153778 +536 +accuracy is 0.9698492462311558 + loss is 0.130237877368927 +537 +accuracy is 0.9698492462311558 + loss is 0.13014300167560577 +538 +accuracy is 0.9698492462311558 + loss is 0.13004854321479797 +539 +accuracy is 0.9698492462311558 + loss is 0.1299542337656021 +540 +accuracy is 0.9698492462311558 + loss is 0.1298602819442749 +541 +accuracy is 0.9698492462311558 + loss is 0.12976647913455963 +542 +accuracy is 0.9698492462311558 + loss is 0.1296730935573578 +543 +accuracy is 0.9698492462311558 + loss is 0.1295798271894455 +544 +accuracy is 0.9698492462311558 + loss is 0.12948690354824066 +545 +accuracy is 0.9698492462311558 + loss is 0.12939421832561493 +546 +accuracy is 0.9698492462311558 + loss is 0.1293017566204071 +547 +accuracy is 0.9698492462311558 + loss is 0.12920959293842316 +548 +accuracy is 0.9698492462311558 + loss is 0.12911765277385712 +549 +accuracy is 0.9698492462311558 + loss is 0.12902593612670898 +550 +accuracy is 0.9698492462311558 + loss is 0.12893454730510712 +551 +accuracy is 0.9698492462311558 + loss is 0.12884336709976196 +552 +accuracy is 0.9698492462311558 + loss is 0.1287524700164795 +553 +accuracy is 0.9698492462311558 + loss is 0.12866179645061493 +554 +accuracy is 0.9698492462311558 + loss is 0.1285712718963623 +555 +accuracy is 0.9698492462311558 + loss is 0.12848113477230072 +556 +accuracy is 0.9698492462311558 + loss is 0.12839122116565704 +557 +accuracy is 0.9698492462311558 + loss is 0.12830153107643127 +558 +accuracy is 0.9698492462311558 + loss is 0.1282120943069458 +559 +accuracy is 0.9698492462311558 + loss is 0.12812282145023346 +560 +accuracy is 0.9698492462311558 + loss is 0.1280338317155838 +561 +accuracy is 0.9698492462311558 + loss is 0.1279451549053192 +562 +accuracy is 0.9698492462311558 + loss is 0.12785661220550537 +563 +accuracy is 0.9698492462311558 + loss is 0.1277683526277542 +564 +accuracy is 0.9698492462311558 + loss is 0.12768031656742096 +565 +accuracy is 0.9698492462311558 + loss is 0.12759257853031158 +566 +accuracy is 0.9698492462311558 + loss is 0.12750498950481415 +567 +accuracy is 0.9698492462311558 + loss is 0.1274176836013794 +568 +accuracy is 0.9698492462311558 + loss is 0.12733058631420135 +569 +accuracy is 0.9698492462311558 + loss is 0.12724368274211884 +570 +accuracy is 0.9698492462311558 + loss is 0.1271570920944214 +571 +accuracy is 0.9698492462311558 + loss is 0.12707071006298065 +572 +accuracy is 0.9698492462311558 + loss is 0.12698453664779663 +573 +accuracy is 0.9698492462311558 + loss is 0.12689855694770813 +574 +accuracy is 0.9698492462311558 + loss is 0.12681284546852112 +575 +accuracy is 0.9698492462311558 + loss is 0.12672731280326843 +576 +accuracy is 0.9698492462311558 + loss is 0.1266421228647232 +577 +accuracy is 0.9698492462311558 + loss is 0.12655700743198395 +578 +accuracy is 0.9698492462311558 + loss is 0.12647226452827454 +579 +accuracy is 0.9698492462311558 + loss is 0.12638764083385468 +580 +accuracy is 0.9698492462311558 + loss is 0.1263032704591751 +581 +accuracy is 0.9698492462311558 + loss is 0.1262189745903015 +582 +accuracy is 0.9698492462311558 + loss is 0.12613509595394135 +583 +accuracy is 0.9698492462311558 + loss is 0.1260514259338379 +584 +accuracy is 0.9698492462311558 + loss is 0.125967875123024 +585 +accuracy is 0.9698492462311558 + loss is 0.12588448822498322 +586 +accuracy is 0.9698492462311558 + loss is 0.12580148875713348 +587 +accuracy is 0.9698492462311558 + loss is 0.1257186233997345 +588 +accuracy is 0.9698492462311558 + loss is 0.12563590705394745 +589 +accuracy is 0.9698492462311558 + loss is 0.12555348873138428 +590 +accuracy is 0.9698492462311558 + loss is 0.12547123432159424 +591 +accuracy is 0.9698492462311558 + loss is 0.1253892481327057 +592 +accuracy is 0.9698492462311558 + loss is 0.12530742585659027 +593 +accuracy is 0.9698492462311558 + loss is 0.12522576749324799 +594 +accuracy is 0.9698492462311558 + loss is 0.1251443475484848 +595 +accuracy is 0.9698492462311558 + loss is 0.12506318092346191 +596 +accuracy is 0.9698492462311558 + loss is 0.12498220801353455 +597 +accuracy is 0.9698492462311558 + loss is 0.12490137666463852 +598 +accuracy is 0.9698492462311558 + loss is 0.12482079863548279 +599 +accuracy is 0.9698492462311558 + loss is 0.12474039942026138 +600 +accuracy is 0.9698492462311558 + loss is 0.1246602013707161 +601 +accuracy is 0.9698492462311558 + loss is 0.12458017468452454 +602 +accuracy is 0.9698492462311558 + loss is 0.12450040131807327 +603 +accuracy is 0.9698492462311558 + loss is 0.12442076206207275 +604 +accuracy is 0.9698492462311558 + loss is 0.12434142827987671 +605 +accuracy is 0.9698492462311558 + loss is 0.12426222860813141 +606 +accuracy is 0.9698492462311558 + loss is 0.12418323010206223 +607 +accuracy is 0.9698492462311558 + loss is 0.12410441040992737 +608 +accuracy is 0.9698492462311558 + loss is 0.12402581423521042 +609 +accuracy is 0.9698492462311558 + loss is 0.12394740432500839 +610 +accuracy is 0.9698492462311558 + loss is 0.1238691657781601 +611 +accuracy is 0.9698492462311558 + loss is 0.12379120290279388 +612 +accuracy is 0.9698492462311558 + loss is 0.12371331453323364 +613 +accuracy is 0.9698492462311558 + loss is 0.1236356645822525 +614 +accuracy is 0.9698492462311558 + loss is 0.12355820834636688 +615 +accuracy is 0.9698492462311558 + loss is 0.12348096817731857 +616 +accuracy is 0.9698492462311558 + loss is 0.12340383976697922 +617 +accuracy is 0.9698492462311558 + loss is 0.12332698702812195 +618 +accuracy is 0.9698492462311558 + loss is 0.1232503205537796 +619 +accuracy is 0.9698492462311558 + loss is 0.1231737956404686 +620 +accuracy is 0.9698492462311558 + loss is 0.12309743463993073 +621 +accuracy is 0.9698492462311558 + loss is 0.12302126735448837 +622 +accuracy is 0.9698492462311558 + loss is 0.12294532358646393 +623 +accuracy is 0.9698492462311558 + loss is 0.1228695809841156 +624 +accuracy is 0.9698492462311558 + loss is 0.12279403209686279 +625 +accuracy is 0.9698492462311558 + loss is 0.12271860986948013 +626 +accuracy is 0.9698492462311558 + loss is 0.12264329940080643 +627 +accuracy is 0.9698492462311558 + loss is 0.12256830185651779 +628 +accuracy is 0.9698492462311558 + loss is 0.12249340862035751 +629 +accuracy is 0.9698492462311558 + loss is 0.1224185973405838 +630 +accuracy is 0.9698492462311558 + loss is 0.12234416604042053 +631 +accuracy is 0.9698492462311558 + loss is 0.12226982414722443 +632 +accuracy is 0.9698492462311558 + loss is 0.12219566106796265 +633 +accuracy is 0.9698492462311558 + loss is 0.12212167680263519 +634 +accuracy is 0.9698492462311558 + loss is 0.12204783409833908 +635 +accuracy is 0.9698492462311558 + loss is 0.12197420001029968 +636 +accuracy is 0.9698492462311558 + loss is 0.12190067023038864 +637 +accuracy is 0.9698492462311558 + loss is 0.12182741612195969 +638 +accuracy is 0.9698492462311558 + loss is 0.12175436317920685 +639 +accuracy is 0.9698492462311558 + loss is 0.12168138474225998 +640 +accuracy is 0.9698492462311558 + loss is 0.12160864472389221 +641 +accuracy is 0.9698492462311558 + loss is 0.12153596431016922 +642 +accuracy is 0.9698492462311558 + loss is 0.12146352976560593 +643 +accuracy is 0.9698492462311558 + loss is 0.12139122188091278 +644 +accuracy is 0.9698492462311558 + loss is 0.12131915241479874 +645 +accuracy is 0.9698492462311558 + loss is 0.12124726921319962 +646 +accuracy is 0.9698492462311558 + loss is 0.12117549777030945 +647 +accuracy is 0.9698492462311558 + loss is 0.1211039274930954 +648 +accuracy is 0.9698492462311558 + loss is 0.12103242427110672 +649 +accuracy is 0.9698492462311558 + loss is 0.12096113711595535 +650 +accuracy is 0.9698492462311558 + loss is 0.12089002877473831 +651 +accuracy is 0.9698492462311558 + loss is 0.12081915885210037 +652 +accuracy is 0.9698492462311558 + loss is 0.12074829638004303 +653 +accuracy is 0.9698492462311558 + loss is 0.12067772448062897 +654 +accuracy is 0.9698492462311558 + loss is 0.12060726433992386 +655 +accuracy is 0.9698492462311558 + loss is 0.12053694576025009 +656 +accuracy is 0.9698492462311558 + loss is 0.12046684324741364 +657 +accuracy is 0.9698492462311558 + loss is 0.12039679288864136 +658 +accuracy is 0.9698492462311558 + loss is 0.12032703310251236 +659 +accuracy is 0.9698492462311558 + loss is 0.1202574223279953 +660 +accuracy is 0.9698492462311558 + loss is 0.12018789350986481 +661 +accuracy is 0.9698492462311558 + loss is 0.12011856585741043 +662 +accuracy is 0.9698492462311558 + loss is 0.1200493648648262 +663 +accuracy is 0.9698492462311558 + loss is 0.1199803277850151 +664 +accuracy is 0.9698492462311558 + loss is 0.11991148442029953 +665 +accuracy is 0.9698492462311558 + loss is 0.11984270066022873 +666 +accuracy is 0.9698492462311558 + loss is 0.11977420747280121 +667 +accuracy is 0.9698492462311558 + loss is 0.11970576643943787 +668 +accuracy is 0.9698492462311558 + loss is 0.11963754147291183 +669 +accuracy is 0.9698492462311558 + loss is 0.11956940591335297 +670 +accuracy is 0.9698492462311558 + loss is 0.1195015236735344 +671 +accuracy is 0.9698492462311558 + loss is 0.11943361163139343 +672 +accuracy is 0.9698492462311558 + loss is 0.11936604976654053 +673 +accuracy is 0.9698492462311558 + loss is 0.11929856985807419 +674 +accuracy is 0.9698492462311558 + loss is 0.11923111975193024 +675 +accuracy is 0.9698492462311558 + loss is 0.11916399002075195 +676 +accuracy is 0.9698492462311558 + loss is 0.11909692734479904 +677 +accuracy is 0.9698492462311558 + loss is 0.11903003603219986 +678 +accuracy is 0.9698492462311558 + loss is 0.11896328628063202 +679 +accuracy is 0.9698492462311558 + loss is 0.11889664828777313 +680 +accuracy is 0.9698492462311558 + loss is 0.11883022636175156 +681 +accuracy is 0.9698492462311558 + loss is 0.11876386404037476 +682 +accuracy is 0.9723618090452262 + loss is 0.11869768053293228 +683 +accuracy is 0.9723618090452262 + loss is 0.11863170564174652 +684 +accuracy is 0.9723618090452262 + loss is 0.1185658797621727 +685 +accuracy is 0.9723618090452262 + loss is 0.11850006133317947 +686 +accuracy is 0.9723618090452262 + loss is 0.11843448132276535 +687 +accuracy is 0.9723618090452262 + loss is 0.11836906522512436 +688 +accuracy is 0.9723618090452262 + loss is 0.11830373108386993 +689 +accuracy is 0.9723618090452262 + loss is 0.11823860555887222 +690 +accuracy is 0.9723618090452262 + loss is 0.1181735098361969 +691 +accuracy is 0.9723618090452262 + loss is 0.11810871958732605 +692 +accuracy is 0.9723618090452262 + loss is 0.1180439293384552 +693 +accuracy is 0.9723618090452262 + loss is 0.11797928065061569 +694 +accuracy is 0.9723618090452262 + loss is 0.1179148480296135 +695 +accuracy is 0.9723618090452262 + loss is 0.11785054951906204 +696 +accuracy is 0.9723618090452262 + loss is 0.11778631806373596 +697 +accuracy is 0.9723618090452262 + loss is 0.11772225052118301 +698 +accuracy is 0.9723618090452262 + loss is 0.1176583468914032 +699 +accuracy is 0.9723618090452262 + loss is 0.11759456247091293 +700 +accuracy is 0.9723618090452262 + loss is 0.11753091216087341 +701 +accuracy is 0.9723618090452262 + loss is 0.11746744811534882 +702 +accuracy is 0.9723618090452262 + loss is 0.117404043674469 +703 +accuracy is 0.9723618090452262 + loss is 0.1173408254981041 +704 +accuracy is 0.9723618090452262 + loss is 0.11727778613567352 +705 +accuracy is 0.9723618090452262 + loss is 0.11721474677324295 +706 +accuracy is 0.9723618090452262 + loss is 0.11715193092823029 +707 +accuracy is 0.9723618090452262 + loss is 0.11708923429250717 +708 +accuracy is 0.9723618090452262 + loss is 0.1170266792178154 +709 +accuracy is 0.9723618090452262 + loss is 0.11696428060531616 +710 +accuracy is 0.9723618090452262 + loss is 0.11690200865268707 +711 +accuracy is 0.9723618090452262 + loss is 0.11683971434831619 +712 +accuracy is 0.9723618090452262 + loss is 0.11677771061658859 +713 +accuracy is 0.9723618090452262 + loss is 0.11671584099531174 +714 +accuracy is 0.9723618090452262 + loss is 0.11665398627519608 +715 +accuracy is 0.9723618090452262 + loss is 0.1165924072265625 +716 +accuracy is 0.9723618090452262 + loss is 0.11653079837560654 +717 +accuracy is 0.9723618090452262 + loss is 0.11646940559148788 +718 +accuracy is 0.9723618090452262 + loss is 0.11640819162130356 +719 +accuracy is 0.9723618090452262 + loss is 0.11634699255228043 +720 +accuracy is 0.9723618090452262 + loss is 0.1162860244512558 +721 +accuracy is 0.9723618090452262 + loss is 0.11622513830661774 +722 +accuracy is 0.9723618090452262 + loss is 0.11616429686546326 +723 +accuracy is 0.9723618090452262 + loss is 0.11610368639230728 +724 +accuracy is 0.9723618090452262 + loss is 0.11604318022727966 +725 +accuracy is 0.9723618090452262 + loss is 0.11598269641399384 +726 +accuracy is 0.9723618090452262 + loss is 0.1159224808216095 +727 +accuracy is 0.9723618090452262 + loss is 0.1158623918890953 +728 +accuracy is 0.9723618090452262 + loss is 0.1158023551106453 +729 +accuracy is 0.9723618090452262 + loss is 0.1157425120472908 +730 +accuracy is 0.9723618090452262 + loss is 0.11568277329206467 +731 +accuracy is 0.9723618090452262 + loss is 0.11562300473451614 +732 +accuracy is 0.9723618090452262 + loss is 0.11556348204612732 +733 +accuracy is 0.9723618090452262 + loss is 0.11550411581993103 +734 +accuracy is 0.9723618090452262 + loss is 0.11544478684663773 +735 +accuracy is 0.9723618090452262 + loss is 0.11538561433553696 +736 +accuracy is 0.9723618090452262 + loss is 0.11532657593488693 +737 +accuracy is 0.9723618090452262 + loss is 0.11526764184236526 +738 +accuracy is 0.9723618090452262 + loss is 0.11520879715681076 +739 +accuracy is 0.9723618090452262 + loss is 0.11515018343925476 +740 +accuracy is 0.9723618090452262 + loss is 0.11509158462285995 +741 +accuracy is 0.9723618090452262 + loss is 0.11503313481807709 +742 +accuracy is 0.9723618090452262 + loss is 0.11497475206851959 +743 +accuracy is 0.9723618090452262 + loss is 0.11491648852825165 +744 +accuracy is 0.9723618090452262 + loss is 0.11485845595598221 +745 +accuracy is 0.9723618090452262 + loss is 0.11480040848255157 +746 +accuracy is 0.9723618090452262 + loss is 0.11474255472421646 +747 +accuracy is 0.9723618090452262 + loss is 0.11468484997749329 +748 +accuracy is 0.9723618090452262 + loss is 0.11462710797786713 +749 +accuracy is 0.9723618090452262 + loss is 0.11456958204507828 +750 +accuracy is 0.9723618090452262 + loss is 0.11451223492622375 +751 +accuracy is 0.9723618090452262 + loss is 0.11445494741201401 +752 +accuracy is 0.9723618090452262 + loss is 0.11439777910709381 +753 +accuracy is 0.9723618090452262 + loss is 0.11434073001146317 +754 +accuracy is 0.9723618090452262 + loss is 0.11428374797105789 +755 +accuracy is 0.9723618090452262 + loss is 0.11422695964574814 +756 +accuracy is 0.9723618090452262 + loss is 0.1141701191663742 +757 +accuracy is 0.9723618090452262 + loss is 0.11411347985267639 +758 +accuracy is 0.9723618090452262 + loss is 0.11405696719884872 +759 +accuracy is 0.9723618090452262 + loss is 0.11400061845779419 +760 +accuracy is 0.9723618090452262 + loss is 0.11394426971673965 +761 +accuracy is 0.9723618090452262 + loss is 0.11388812959194183 +762 +accuracy is 0.9723618090452262 + loss is 0.11383196711540222 +763 +accuracy is 0.9723618090452262 + loss is 0.11377602815628052 +764 +accuracy is 0.9723618090452262 + loss is 0.11372021585702896 +765 +accuracy is 0.9723618090452262 + loss is 0.11366453766822815 +766 +accuracy is 0.9723618090452262 + loss is 0.11360883712768555 +767 +accuracy is 0.9723618090452262 + loss is 0.11355330049991608 +768 +accuracy is 0.9723618090452262 + loss is 0.11349791288375854 +769 +accuracy is 0.9723618090452262 + loss is 0.1134425699710846 +770 +accuracy is 0.9723618090452262 + loss is 0.11338736861944199 +771 +accuracy is 0.9723618090452262 + loss is 0.11333221942186356 +772 +accuracy is 0.9723618090452262 + loss is 0.11327730864286423 +773 +accuracy is 0.9723618090452262 + loss is 0.11322236806154251 +774 +accuracy is 0.9723618090452262 + loss is 0.11316761374473572 +775 +accuracy is 0.9723618090452262 + loss is 0.11311297118663788 +776 +accuracy is 0.9723618090452262 + loss is 0.11305832862854004 +777 +accuracy is 0.9723618090452262 + loss is 0.11300384253263474 +778 +accuracy is 0.9723618090452262 + loss is 0.11294957995414734 +779 +accuracy is 0.9723618090452262 + loss is 0.11289530247449875 +780 +accuracy is 0.9723618090452262 + loss is 0.11284111440181732 +781 +accuracy is 0.9723618090452262 + loss is 0.11278704553842545 +782 +accuracy is 0.9723618090452262 + loss is 0.11273312568664551 +783 +accuracy is 0.9723618090452262 + loss is 0.11267925798892975 +784 +accuracy is 0.9723618090452262 + loss is 0.11262549459934235 +785 +accuracy is 0.9723618090452262 + loss is 0.11257191747426987 +786 +accuracy is 0.9723618090452262 + loss is 0.1125183179974556 +787 +accuracy is 0.9723618090452262 + loss is 0.11246489733457565 +788 +accuracy is 0.9723618090452262 + loss is 0.11241159588098526 +789 +accuracy is 0.9723618090452262 + loss is 0.11235832422971725 +790 +accuracy is 0.9723618090452262 + loss is 0.11230521649122238 +791 +accuracy is 0.9723618090452262 + loss is 0.11225215345621109 +792 +accuracy is 0.9723618090452262 + loss is 0.11219917982816696 +793 +accuracy is 0.9723618090452262 + loss is 0.11214635521173477 +794 +accuracy is 0.9723618090452262 + loss is 0.11209366470575333 +795 +accuracy is 0.9723618090452262 + loss is 0.11204096674919128 +796 +accuracy is 0.9723618090452262 + loss is 0.11198841035366058 +797 +accuracy is 0.9723618090452262 + loss is 0.11193600296974182 +798 +accuracy is 0.9723618090452262 + loss is 0.11188364028930664 +799 +accuracy is 0.9723618090452262 + loss is 0.11183135211467743 +800 +accuracy is 0.9723618090452262 + loss is 0.11177925020456314 +801 +accuracy is 0.9723618090452262 + loss is 0.11172712594270706 +802 +accuracy is 0.9723618090452262 + loss is 0.1116752177476883 +803 +accuracy is 0.9723618090452262 + loss is 0.11162333190441132 +804 +accuracy is 0.9723618090452262 + loss is 0.11157159507274628 +805 +accuracy is 0.9723618090452262 + loss is 0.11151988804340363 +806 +accuracy is 0.9723618090452262 + loss is 0.11146825551986694 +807 +accuracy is 0.9723618090452262 + loss is 0.11141689866781235 +808 +accuracy is 0.9723618090452262 + loss is 0.11136544495820999 +809 +accuracy is 0.9723618090452262 + loss is 0.11131420731544495 +810 +accuracy is 0.9723618090452262 + loss is 0.1112629622220993 +811 +accuracy is 0.9723618090452262 + loss is 0.11121183633804321 +812 +accuracy is 0.9748743718592965 + loss is 0.11116085201501846 +813 +accuracy is 0.9748743718592965 + loss is 0.1111099123954773 +814 +accuracy is 0.9748743718592965 + loss is 0.11105909943580627 +815 +accuracy is 0.9773869346733668 + loss is 0.11100836098194122 +816 +accuracy is 0.9773869346733668 + loss is 0.11095775663852692 +817 +accuracy is 0.9773869346733668 + loss is 0.11090720444917679 +818 +accuracy is 0.9773869346733668 + loss is 0.11085674911737442 +819 +accuracy is 0.9773869346733668 + loss is 0.11080635339021683 +820 +accuracy is 0.9773869346733668 + loss is 0.110756054520607 +821 +accuracy is 0.9773869346733668 + loss is 0.11070585250854492 +822 +accuracy is 0.9773869346733668 + loss is 0.11065574735403061 +823 +accuracy is 0.9773869346733668 + loss is 0.11060582101345062 +824 +accuracy is 0.9773869346733668 + loss is 0.11055589467287064 +825 +accuracy is 0.9773869346733668 + loss is 0.11050605773925781 +826 +accuracy is 0.9773869346733668 + loss is 0.11045632511377335 +827 +accuracy is 0.9773869346733668 + loss is 0.11040666699409485 +828 +accuracy is 0.9773869346733668 + loss is 0.11035710573196411 +829 +accuracy is 0.9773869346733668 + loss is 0.11030760407447815 +830 +accuracy is 0.9773869346733668 + loss is 0.11025829613208771 +831 +accuracy is 0.9773869346733668 + loss is 0.11020907014608383 +832 +accuracy is 0.9773869346733668 + loss is 0.1101597249507904 +833 +accuracy is 0.9773869346733668 + loss is 0.11011062562465668 +834 +accuracy is 0.9773869346733668 + loss is 0.11006167531013489 +835 +accuracy is 0.9773869346733668 + loss is 0.1100127175450325 +836 +accuracy is 0.9773869346733668 + loss is 0.10996387153863907 +837 +accuracy is 0.9773869346733668 + loss is 0.10991509258747101 +838 +accuracy is 0.9773869346733668 + loss is 0.1098664179444313 +839 +accuracy is 0.9798994974874372 + loss is 0.10981786251068115 +840 +accuracy is 0.9798994974874372 + loss is 0.1097693219780922 +841 +accuracy is 0.9798994974874372 + loss is 0.1097208559513092 +842 +accuracy is 0.9798994974874372 + loss is 0.10967253893613815 +843 +accuracy is 0.9798994974874372 + loss is 0.10962425172328949 +844 +accuracy is 0.9798994974874372 + loss is 0.10957607626914978 +845 +accuracy is 0.9798994974874372 + loss is 0.1095280647277832 +846 +accuracy is 0.9798994974874372 + loss is 0.10947999358177185 +847 +accuracy is 0.9798994974874372 + loss is 0.10943213105201721 +848 +accuracy is 0.9798994974874372 + loss is 0.10938424617052078 +849 +accuracy is 0.9798994974874372 + loss is 0.10933652520179749 +850 +accuracy is 0.9798994974874372 + loss is 0.10928886383771896 +851 +accuracy is 0.9798994974874372 + loss is 0.10924132913351059 +852 +accuracy is 0.9798994974874372 + loss is 0.10919379442930222 +853 +accuracy is 0.9798994974874372 + loss is 0.1091463565826416 +854 +accuracy is 0.9798994974874372 + loss is 0.10909901559352875 +855 +accuracy is 0.9798994974874372 + loss is 0.10905178636312485 +856 +accuracy is 0.9798994974874372 + loss is 0.1090046688914299 +857 +accuracy is 0.9798994974874372 + loss is 0.10895756632089615 +858 +accuracy is 0.9798994974874372 + loss is 0.10891057550907135 +859 +accuracy is 0.9798994974874372 + loss is 0.10886362940073013 +860 +accuracy is 0.9798994974874372 + loss is 0.10881678760051727 +861 +accuracy is 0.9798994974874372 + loss is 0.10877002775669098 +862 +accuracy is 0.9798994974874372 + loss is 0.10872326046228409 +863 +accuracy is 0.9798994974874372 + loss is 0.10867670178413391 +864 +accuracy is 0.9798994974874372 + loss is 0.10863018780946732 +865 +accuracy is 0.9798994974874372 + loss is 0.10858379304409027 +866 +accuracy is 0.9798994974874372 + loss is 0.10853744298219681 +867 +accuracy is 0.9798994974874372 + loss is 0.10849110037088394 +868 +accuracy is 0.9798994974874372 + loss is 0.10844490677118301 +869 +accuracy is 0.9798994974874372 + loss is 0.10839878022670746 +870 +accuracy is 0.9798994974874372 + loss is 0.10835272818803787 +871 +accuracy is 0.9798994974874372 + loss is 0.10830678045749664 +872 +accuracy is 0.9798994974874372 + loss is 0.1082608699798584 +873 +accuracy is 0.9798994974874372 + loss is 0.10821506381034851 +874 +accuracy is 0.9798994974874372 + loss is 0.10816933214664459 +875 +accuracy is 0.9798994974874372 + loss is 0.10812366753816605 +876 +accuracy is 0.9798994974874372 + loss is 0.10807809233665466 +877 +accuracy is 0.9798994974874372 + loss is 0.10803256928920746 +878 +accuracy is 0.9798994974874372 + loss is 0.10798719525337219 +879 +accuracy is 0.9798994974874372 + loss is 0.10794181376695633 +880 +accuracy is 0.9798994974874372 + loss is 0.1078965812921524 +881 +accuracy is 0.9798994974874372 + loss is 0.10785139352083206 +882 +accuracy is 0.9798994974874372 + loss is 0.10780622810125351 +883 +accuracy is 0.9798994974874372 + loss is 0.10776124894618988 +884 +accuracy is 0.9798994974874372 + loss is 0.10771623998880386 +885 +accuracy is 0.9798994974874372 + loss is 0.10767129063606262 +886 +accuracy is 0.9798994974874372 + loss is 0.10762650519609451 +887 +accuracy is 0.9798994974874372 + loss is 0.1075817346572876 +888 +accuracy is 0.9798994974874372 + loss is 0.10753709822893143 +889 +accuracy is 0.9798994974874372 + loss is 0.10749246925115585 +890 +accuracy is 0.9798994974874372 + loss is 0.10744800418615341 +891 +accuracy is 0.9798994974874372 + loss is 0.10740350186824799 +892 +accuracy is 0.9798994974874372 + loss is 0.1073591336607933 +893 +accuracy is 0.9798994974874372 + loss is 0.10731477290391922 +894 +accuracy is 0.9798994974874372 + loss is 0.10727061331272125 +895 +accuracy is 0.9798994974874372 + loss is 0.10722651332616806 +896 +accuracy is 0.9824120603015075 + loss is 0.10718236118555069 +897 +accuracy is 0.9824120603015075 + loss is 0.10713835805654526 +898 +accuracy is 0.9824120603015075 + loss is 0.10709444433450699 +899 +accuracy is 0.9824120603015075 + loss is 0.1070505902171135 +900 +accuracy is 0.9824120603015075 + loss is 0.10700670629739761 +901 +accuracy is 0.9824120603015075 + loss is 0.10696308314800262 +902 +accuracy is 0.9824120603015075 + loss is 0.10691945999860764 +903 +accuracy is 0.9824120603015075 + loss is 0.10687589645385742 +904 +accuracy is 0.9824120603015075 + loss is 0.10683237016201019 +905 +accuracy is 0.9824120603015075 + loss is 0.10678889602422714 +906 +accuracy is 0.9824120603015075 + loss is 0.10674550384283066 +907 +accuracy is 0.9824120603015075 + loss is 0.10670226067304611 +908 +accuracy is 0.9824120603015075 + loss is 0.10665903240442276 +909 +accuracy is 0.9824120603015075 + loss is 0.10661587864160538 +910 +accuracy is 0.9824120603015075 + loss is 0.10657274723052979 +911 +accuracy is 0.9824120603015075 + loss is 0.10652974247932434 +912 +accuracy is 0.9824120603015075 + loss is 0.10648681223392487 +913 +accuracy is 0.9824120603015075 + loss is 0.10644397884607315 +914 +accuracy is 0.9824120603015075 + loss is 0.10640111565589905 +915 +accuracy is 0.9824120603015075 + loss is 0.10635844618082047 +916 +accuracy is 0.9824120603015075 + loss is 0.10631577670574188 +917 +accuracy is 0.9824120603015075 + loss is 0.10627322643995285 +918 +accuracy is 0.9824120603015075 + loss is 0.10623067617416382 +919 +accuracy is 0.9824120603015075 + loss is 0.10618821531534195 +920 +accuracy is 0.9824120603015075 + loss is 0.10614582896232605 +921 +accuracy is 0.9824120603015075 + loss is 0.10610352456569672 +922 +accuracy is 0.9824120603015075 + loss is 0.10606129467487335 +923 +accuracy is 0.9824120603015075 + loss is 0.10601905733346939 +924 +accuracy is 0.9824120603015075 + loss is 0.10597697645425797 +925 +accuracy is 0.9824120603015075 + loss is 0.10593487322330475 +926 +accuracy is 0.9824120603015075 + loss is 0.10589288920164108 +927 +accuracy is 0.9824120603015075 + loss is 0.10585096478462219 +928 +accuracy is 0.9824120603015075 + loss is 0.10580913722515106 +929 +accuracy is 0.9824120603015075 + loss is 0.1057673916220665 +930 +accuracy is 0.9824120603015075 + loss is 0.10572564601898193 +931 +accuracy is 0.9824120603015075 + loss is 0.10568401217460632 +932 +accuracy is 0.9824120603015075 + loss is 0.1056424230337143 +933 +accuracy is 0.9824120603015075 + loss is 0.10560093075037003 +934 +accuracy is 0.9824120603015075 + loss is 0.10555942356586456 +935 +accuracy is 0.9824120603015075 + loss is 0.10551806539297104 +936 +accuracy is 0.9824120603015075 + loss is 0.10547672212123871 +937 +accuracy is 0.9824120603015075 + loss is 0.10543544590473175 +938 +accuracy is 0.9824120603015075 + loss is 0.10539426654577255 +939 +accuracy is 0.9824120603015075 + loss is 0.10535316169261932 +940 +accuracy is 0.9824120603015075 + loss is 0.1053120493888855 +941 +accuracy is 0.9824120603015075 + loss is 0.10527116805315018 +942 +accuracy is 0.9824120603015075 + loss is 0.10523015260696411 +943 +accuracy is 0.9824120603015075 + loss is 0.10518927127122879 +944 +accuracy is 0.9824120603015075 + loss is 0.10514849424362183 +945 +accuracy is 0.9824120603015075 + loss is 0.10510778427124023 +946 +accuracy is 0.9824120603015075 + loss is 0.10506709665060043 +947 +accuracy is 0.9824120603015075 + loss is 0.10502640157938004 +948 +accuracy is 0.9824120603015075 + loss is 0.10498592257499695 +949 +accuracy is 0.9824120603015075 + loss is 0.10494548827409744 +950 +accuracy is 0.9824120603015075 + loss is 0.10490500181913376 +951 +accuracy is 0.9824120603015075 + loss is 0.10486461222171783 +952 +accuracy is 0.9824120603015075 + loss is 0.10482435673475266 +953 +accuracy is 0.9824120603015075 + loss is 0.10478407144546509 +954 +accuracy is 0.9824120603015075 + loss is 0.10474389791488647 +955 +accuracy is 0.9849246231155779 + loss is 0.10470381379127502 +956 +accuracy is 0.9849246231155779 + loss is 0.10466381162405014 +957 +accuracy is 0.9849246231155779 + loss is 0.10462374985218048 +958 +accuracy is 0.9849246231155779 + loss is 0.10458379983901978 +959 +accuracy is 0.9849246231155779 + loss is 0.10454406589269638 +960 +accuracy is 0.9849246231155779 + loss is 0.10450422763824463 +961 +accuracy is 0.9849246231155779 + loss is 0.10446444898843765 +962 +accuracy is 0.9849246231155779 + loss is 0.10442478209733963 +963 +accuracy is 0.9849246231155779 + loss is 0.1043851301074028 +964 +accuracy is 0.9849246231155779 + loss is 0.10434561222791672 +965 +accuracy is 0.9849246231155779 + loss is 0.10430610179901123 +966 +accuracy is 0.9849246231155779 + loss is 0.10426667332649231 +967 +accuracy is 0.9849246231155779 + loss is 0.10422725975513458 +968 +accuracy is 0.9849246231155779 + loss is 0.10418791323900223 +969 +accuracy is 0.9849246231155779 + loss is 0.104148730635643 +970 +accuracy is 0.9849246231155779 + loss is 0.10410948097705841 +971 +accuracy is 0.9849246231155779 + loss is 0.10407035797834396 +972 +accuracy is 0.9849246231155779 + loss is 0.1040312796831131 +973 +accuracy is 0.9849246231155779 + loss is 0.1039922833442688 +974 +accuracy is 0.9849246231155779 + loss is 0.1039532795548439 +975 +accuracy is 0.9849246231155779 + loss is 0.10391440242528915 +976 +accuracy is 0.9849246231155779 + loss is 0.1038755550980568 +977 +accuracy is 0.9849246231155779 + loss is 0.10383675992488861 +978 +accuracy is 0.9849246231155779 + loss is 0.10379806905984879 +979 +accuracy is 0.9849246231155779 + loss is 0.10375937074422836 +980 +accuracy is 0.9849246231155779 + loss is 0.1037207767367363 +981 +accuracy is 0.9849246231155779 + loss is 0.1036822572350502 +982 +accuracy is 0.9849246231155779 + loss is 0.10364372283220291 +983 +accuracy is 0.9849246231155779 + loss is 0.10360532999038696 +984 +accuracy is 0.9849246231155779 + loss is 0.10356692224740982 +985 +accuracy is 0.9849246231155779 + loss is 0.10352863371372223 +986 +accuracy is 0.9849246231155779 + loss is 0.10349032282829285 +987 +accuracy is 0.9849246231155779 + loss is 0.10345213860273361 +988 +accuracy is 0.9849246231155779 + loss is 0.10341402888298035 +989 +accuracy is 0.9849246231155779 + loss is 0.10337592661380768 +990 +accuracy is 0.9849246231155779 + loss is 0.10333790630102158 +991 +accuracy is 0.9849246231155779 + loss is 0.10329990088939667 +992 +accuracy is 0.9849246231155779 + loss is 0.10326196998357773 +993 +accuracy is 0.9849246231155779 + loss is 0.10322413593530655 +994 +accuracy is 0.9849246231155779 + loss is 0.10318623483181 +995 +accuracy is 0.9849246231155779 + loss is 0.10314854234457016 +996 +accuracy is 0.9849246231155779 + loss is 0.10311085730791092 +997 +accuracy is 0.9849246231155779 + loss is 0.10307321697473526 +998 +accuracy is 0.9849246231155779 + loss is 0.10303562134504318 +999 +accuracy is 0.9849246231155779 + loss is 0.10299812257289886 diff --git a/unit1/readme.md b/unit1/readme.md index 3cff53f..8cb3e78 100644 --- a/unit1/readme.md +++ b/unit1/readme.md @@ -50,3 +50,9 @@ The same set of video could also be found on bilibili.com. 1. Ian Sommerville, Software Engineering (10th Edition), 2015. 2. Scott Chacon, Ben Straub, Pro Git (2nd Edition), 2014. + +<<<<<<< HEAD +### Master change +======= +### NOTHING CHANGE +>>>>>>> feature1 diff --git a/unit2/English-Redistributable-Intro-Scrum.pdf b/unit2/English-Redistributable-Intro-Scrum.pdf new file mode 100644 index 0000000..8bcd906 Binary files /dev/null and b/unit2/English-Redistributable-Intro-Scrum.pdf differ diff --git a/unit2/count_words.py b/unit2/count_words.py new file mode 100644 index 0000000..a4adadf --- /dev/null +++ b/unit2/count_words.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Feb 25 09:39:33 2020 + +@author: 85726 +""" + +import os.path + +def count_word(file): + with open(file, 'r') as f: + words = 0 + for line in f: + for char in ('#', '\n'): + line = line.replace(char," ") + word = line.split() + words += len(word) + return words + +file = './readme.md' + +if os.path.isfile(file): + words = count_word(file) + print('{} words in file'.format(words)) +else: + print('it is not a file') diff --git a/unit2/python_intro.py b/unit2/python_intro.py new file mode 100644 index 0000000..b046599 --- /dev/null +++ b/unit2/python_intro.py @@ -0,0 +1,128 @@ +# Variable + +x = 1 +x +type(x) +x = 1.1 +x +type(x) +1+1j +x + 1 +x = 'abc' +x +x = "abc" +x = """abc""" +x = 'abc"abc"' +x +x + 'bcd' +x = " sfdsf " +x.strip() +x +dir(x) +help(x.strip) +help(help) +help(dir) +# this is a comment. +print("Hello class!") + +# Control flow, branching +x = 1 +if x == 1: + print("x is {}".format(x)) +else: + print("No way") + +if x == 1: + print('x==1') +elif x == 0: + print('x == 0') +else: + print('x') + +type(x==1) +x == 1 and 2 == int('2') +x == 1 or 2 == int('1') +not x == 1 +a = [1, 2, 3] +1 in a +x = None +x == 1 +x == 0 +type(x) +x is None +x is not None + +# List, tuple, and dict + +x = [1, 2, 3, 4, 5] +x +type(x) +x[0] +x[0] = -1 +x +x[:2] +type(x[:2]) +for elem in x: + print(str(elem + 1)) + +for index in range(len(x)): + print(str(x[index])) + +[print(str(x[index])) for index in range(len(x))] +x +y = tuple(x) +y +y[0] +y[0] = 1 +y = list(x) +y +z = dict() +z = {} +z[0] = 1 +z[1] = 2 +z +z[0] +for key, value in z.items(): + print(str(key), str(value)) + + + +dir(z) +-1 in z +1 in z + +# Control flow, loop + +for x in [1, 2, 3, 4, 5]: + if x == 2: + continue + elif x == 4: + break + else: + print(x) +else: + print('no find') + +for x in [1, 2]: + print(x) +else: + print('no find') + +i = 0 +while i < 5: + print(i) + i += 1 + +# Function and module + +def count_char(fn,): + import os.path + if os.path.isfile(fn): + with open(fn, 'r') as fh: + total = 0 + for line in fh: + total += len(line) + return total + +count_char('./unit2/readme.md') +import os.path diff --git a/unit2/readme.md b/unit2/readme.md new file mode 100644 index 0000000..c55adf0 --- /dev/null +++ b/unit2/readme.md @@ -0,0 +1,67 @@ +This unit is about introduction to an agile development process called Scrum and introduction to a programming language called Python. + +Please go through the video and slides first. +Please install the Python runtime before the official classtime. + +## Introduction to Scrum + +Scrum is an agile development process aiming to deliver business value over shortest time. +It's widely used and often mixed with other agile development process. + +Video 1: Introduction to Scrum + +https://www.acfun.cn/v/ac13145403 + +Video 2: The Sprint in Scrum + +https://www.acfun.cn/v/ac13145466 + +Video 3: Roles, Meetings, and Documents in Scrum + +https://www.acfun.cn/v/ac13145616 + +Slide: English-Redistributable-Intro-Scrum.pdf + +## Introduction to Python + +Install Python using miniconda + +https://docs.conda.io/en/latest/miniconda.html + +Video 1: Variables + +https://www.acfun.cn/v/ac13164403_1 + +Video 2: Control flow, branching + +https://www.acfun.cn/v/ac13164403_2 + +video 3: List, tuple, and dict + +https://www.acfun.cn/v/ac13164403_3 + +Video 4: Control flow, loop + +https://www.acfun.cn/v/ac13164403_4 + +Video 5: Function and Module + +https://www.acfun.cn/v/ac13164403_5 + + +### Exercise + +1. Write a Python code to count the words in this readme. +2. Use Git to commit your code in unit2 directory and push to your repository. Also comment on the issue with the link to your code. + +## Reference + +1. Kenneth S. Rubin, Essential Scrum: A Practical Guide to the Most Popular Agile Process, 2012 +2. Eric Matthes, Python Crash Course, 2nd Edition: A Hands-On, Project-Based Introduction to Programming (2nd Edition), 2019 +3. Mark Pilgrim, Dive Into Python 3, 2009 + +## Notes + +1. The same set of video could also be found on bilibili.com. +2. Students without computers at your disposal are suggested to read till chapter 7 of "Dive Into Python 3", which is available online at https://diveintopython3.problemsolving.io, or section 1, 2, 3, 4, 5, 9 of Official Python tutorial at https://docs.python.org/3/tutorial/. +3. This unit doesn't require students to team up. diff --git a/unit3/FindMCircle.py b/unit3/FindMCircle.py new file mode 100644 index 0000000..92a45fb --- /dev/null +++ b/unit3/FindMCircle.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Mar 5 10:00:30 2020 + +@author: 85726 +""" + +""" +思想:保证每一个圆在当前可行域内都取得最大的半径,可取值为圆心到四边的距离和圆心到各个圆边的距离,取其中最小的一个 +就是当前最大半径 + +实现:在保证新取的圆心处于可行域的范围内,使用scipy的优化器,在邻域内不断优化圆心的位置,直至找到最大的半径 + +结果: +m=10, sum of r^2 =1.14 +m=50, sum of r^2 =1.21 +m=100, sum of r^2 =1.23 +保存的图在unit3中 +""" + +import numpy as np +import random +import matplotlib.pyplot as plt +from scipy.optimize import minimize + +#定义圆类 +class circle: + def __init__(self, radius = 0, x = 0, y = 0): + self.radius = radius + self.x = x + self.y = y + + def print_circle(self): + print('radius={}, coordinate=({},{})'.format(self.radius, self.x, self.y)) + + #计算两圆心之间距离 + def distance(self, c2): + dis = ((self.x-c2.x)**2+(self.y-c2.y)**2)**0.5 + return dis + + #判断新圆与现存圆是否相交,相交为0,全不相交为1 + def ifcross(self, c_list): + for i in range (len(c_list)): + c2 = c_list[i] + r1 = self.radius + r2 = c2.radius + rr = r1+r2 + dis = self.distance(c2) + if dis < rr: + return 0 + return 1 + + #判断圆是否越界,越界为0,不越界为1 + def ifexcess(self): + r = self.radius + x = self.x + y = self.y + if x + r > 1 or x - r < -1 or y + r > 1 or y - r < -1: + return 0 + else: + return 1 + +#找出可行的最大半径 +def MaxR(c1, c_list): + x = c1.x + y = c1.y + R_list = [1-x,1+x,1-y,1+y] + for i in range (len(c_list)): + c2 = c_list[i] + dis = c1.distance(c2) + R_list.append(dis-c2.radius) + return min(R_list) + +#需要优化的目标函数 +def func(c_list): + return lambda x : 1 - MaxR(circle(x[0], x[1], x[2]), c_list) + +#找出最优圆心 +def opt_center(c, c_list): + r = c.radius + x = c.x + y = c.y + rxy = [r,x,y] + bd_r = (0, 1) + bd_x = (-1, 1) + bd_y = (-1, 1) + bds = (bd_r, bd_x, bd_y) + res = minimize(func(c_list), rxy, method='SLSQP', bounds=bds) + c.x = res.x[1] + c.y = res.x[2] + c.radius = MaxR(c, c_list) + return c + +#找m个圆,使得每个圆在邻域内半径最大 +def FindMaxCircuit(m): + c_list = [] + for i in range (m): + r = 0 + x = random.uniform(-1, 1) + y = random.uniform(-1, 1) + c = circle(r, x, y) + while not c.ifcross(c_list): + x = random.uniform(-1, 1) + y = random.uniform(-1, 1) + c = circle(r, x, y) + c = opt_center(c, c_list) + c_list.append(c) + return c_list + +def plot(c_list): + plt.figure() + plt.axes().set_aspect('equal') + plt.xlim([-1,1]) + plt.ylim([-1,1]) + theta = np.linspace(0,2*np.pi,50) + for c in c_list: + plt.plot(c.x+c.radius*np.cos(theta),c.y+c.radius*np.sin(theta),'b') + plt.show() + +if __name__ == "__main__": + m = 10 + c_list = FindMaxCircuit(m) + RR = 0 + for c in c_list: + RR += c.radius**2 + c.print_circle() + print('for {} circles, the maximize sum of r^2 = {}'.format(m, RR)) + + plot(c_list) + + + + + + + + + + + \ No newline at end of file diff --git a/unit3/m=10.png b/unit3/m=10.png new file mode 100644 index 0000000..b50cec3 Binary files /dev/null and b/unit3/m=10.png differ diff --git a/unit3/m=100.png b/unit3/m=100.png new file mode 100644 index 0000000..4297c1f Binary files /dev/null and b/unit3/m=100.png differ diff --git a/unit3/m=50.png b/unit3/m=50.png new file mode 100644 index 0000000..b7b91d5 Binary files /dev/null and b/unit3/m=50.png differ