티스토리 뷰

04_Deep_Learning_02_MNIST_TF_2.1Dropout_1019
In [3]:
# Adam optimizer : SGD보다 조금 더 좋은 성능
import tensorflow as tf
print(tf.__version__)

# kernel_initializer -> W값 초기화
# he_uniform,  he_normal(정규분포 형태)

# Multinomial 0~9 숫자 파악 -> Logistic 10개, 출력 10개
# sparse_categorical_crossentropy => Multinomial은 categorical
# sparse : one hot encoding 안해도 됨
# verbose = 1, 출력 1로 잡음
2.1.0
In [5]:
import numpy as np
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler        # Normalization
from sklearn.model_selection import train_test_split  # train, test 분리
from sklearn.metrics import classification_report
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.optimizers import Adam # Adam optimizer : SGD보다 조금 더 좋은 성능

## Raw Data Loading
df = pd.read_csv('./data/mnist/train.csv')
# display(df.head(), df.shape) # (42000, 785)

##### 결측치와 이상치는 없음 #####

## Data Split
x_data_train, x_data_test, t_data_train, t_data_test = \
train_test_split(df.drop('label', axis=1, inplace=False), df['label'], 
                 test_size=0.3, random_state=0)
# test_size : test set의 비율 (0.3 => 30%)
# random_state : split할 때 랜덤하게 split하게 되는데 이를 일정하게 고정(seed의 개념)

# Min-Max Normalization
scaler = MinMaxScaler() # scaler = StandardScaler()
scaler.fit(x_data_train)
x_data_train_norm = scaler.transform(x_data_train)
x_data_test_norm = scaler.transform(x_data_test)

del x_data_train
del x_data_test
In [6]:
x_data_train_norm.shape
Out[6]:
(29400, 784)
In [7]:
## TF 2.1 구현

model = Sequential()

model.add(Dense(256,
                activation='relu',
                kernel_initializer='he_uniform',
                input_shape=(x_data_train_norm.shape[1],))
          ) # 29400
# kernel_initializer -> W값 초기화
# he_uniform,  he_normal(정규분포 형태)
model.add(Dropout(0.3))

model.add(Dense(128,
                activation='relu',
                kernel_initializer='he_uniform'))
model.add(Dropout(0.3))

model.add(Dense(128, 
                activation='relu', 
                kernel_initializer='he_uniform'))
model.add(Dropout(0.3))

# Multinomial 0~9 숫자 파악 -> Logistic 10개, 출력 10개
model.add(Dense(10,
                activation='softmax',
                kernel_initializer='he_uniform'))

model.compile(optimizer=Adam(learning_rate=1e-3),
              loss='sparse_categorical_crossentropy',
              metrics=['sparse_categorical_accuracy'])
# sparse_categorical_crossentropy => Multinomial은 categorical
# sparse : one hot encoding 안해도 됨

history = model.fit(x_data_train_norm,
                    t_data_train,
                    epochs=100,
                    verbose=1,
                    batch_size=128,
                    validation_split=0.3)
# verbose = 1, 출력 1로 잡음
Train on 20580 samples, validate on 8820 samples
Epoch 1/100
20580/20580 [==============================] - 2s 114us/sample - loss: 0.8043 - sparse_categorical_accuracy: 0.7350 - val_loss: 0.2798 - val_sparse_categorical_accuracy: 0.9133
Epoch 2/100
20580/20580 [==============================] - 1s 57us/sample - loss: 0.3231 - sparse_categorical_accuracy: 0.9020 - val_loss: 0.1962 - val_sparse_categorical_accuracy: 0.9416
Epoch 3/100
20580/20580 [==============================] - 1s 57us/sample - loss: 0.2255 - sparse_categorical_accuracy: 0.9328 - val_loss: 0.1758 - val_sparse_categorical_accuracy: 0.9461
Epoch 4/100
20580/20580 [==============================] - 1s 53us/sample - loss: 0.1811 - sparse_categorical_accuracy: 0.9463 - val_loss: 0.1543 - val_sparse_categorical_accuracy: 0.9544
Epoch 5/100
20580/20580 [==============================] - 1s 54us/sample - loss: 0.1498 - sparse_categorical_accuracy: 0.9559 - val_loss: 0.1478 - val_sparse_categorical_accuracy: 0.9598
Epoch 6/100
20580/20580 [==============================] - 1s 53us/sample - loss: 0.1266 - sparse_categorical_accuracy: 0.9637 - val_loss: 0.1330 - val_sparse_categorical_accuracy: 0.9629
Epoch 7/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.1080 - sparse_categorical_accuracy: 0.9672 - val_loss: 0.1403 - val_sparse_categorical_accuracy: 0.9627
Epoch 8/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0905 - sparse_categorical_accuracy: 0.9717 - val_loss: 0.1365 - val_sparse_categorical_accuracy: 0.9646
Epoch 9/100
20580/20580 [==============================] - 1s 53us/sample - loss: 0.0884 - sparse_categorical_accuracy: 0.9733 - val_loss: 0.1221 - val_sparse_categorical_accuracy: 0.9685
Epoch 10/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0820 - sparse_categorical_accuracy: 0.9745 - val_loss: 0.1288 - val_sparse_categorical_accuracy: 0.9678
Epoch 11/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0722 - sparse_categorical_accuracy: 0.9771 - val_loss: 0.1236 - val_sparse_categorical_accuracy: 0.9695
Epoch 12/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0650 - sparse_categorical_accuracy: 0.9803 - val_loss: 0.1271 - val_sparse_categorical_accuracy: 0.9698
Epoch 13/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0704 - sparse_categorical_accuracy: 0.9768 - val_loss: 0.1343 - val_sparse_categorical_accuracy: 0.9677
Epoch 14/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0559 - sparse_categorical_accuracy: 0.9823 - val_loss: 0.1385 - val_sparse_categorical_accuracy: 0.9688
Epoch 15/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0564 - sparse_categorical_accuracy: 0.9824 - val_loss: 0.1327 - val_sparse_categorical_accuracy: 0.9703
Epoch 16/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0507 - sparse_categorical_accuracy: 0.9836 - val_loss: 0.1345 - val_sparse_categorical_accuracy: 0.9693
Epoch 17/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0434 - sparse_categorical_accuracy: 0.9868 - val_loss: 0.1277 - val_sparse_categorical_accuracy: 0.9709
Epoch 18/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0468 - sparse_categorical_accuracy: 0.9856 - val_loss: 0.1397 - val_sparse_categorical_accuracy: 0.9692
Epoch 19/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0406 - sparse_categorical_accuracy: 0.9868 - val_loss: 0.1300 - val_sparse_categorical_accuracy: 0.9709
Epoch 20/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0411 - sparse_categorical_accuracy: 0.9875 - val_loss: 0.1326 - val_sparse_categorical_accuracy: 0.9711
Epoch 21/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0412 - sparse_categorical_accuracy: 0.9875 - val_loss: 0.1391 - val_sparse_categorical_accuracy: 0.9702
Epoch 22/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0381 - sparse_categorical_accuracy: 0.9872 - val_loss: 0.1394 - val_sparse_categorical_accuracy: 0.9709
Epoch 23/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0352 - sparse_categorical_accuracy: 0.9893 - val_loss: 0.1309 - val_sparse_categorical_accuracy: 0.9729
Epoch 24/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0331 - sparse_categorical_accuracy: 0.9898 - val_loss: 0.1452 - val_sparse_categorical_accuracy: 0.9701
Epoch 25/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0329 - sparse_categorical_accuracy: 0.9897 - val_loss: 0.1328 - val_sparse_categorical_accuracy: 0.9729
Epoch 26/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0279 - sparse_categorical_accuracy: 0.9921 - val_loss: 0.1447 - val_sparse_categorical_accuracy: 0.9715
Epoch 27/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0285 - sparse_categorical_accuracy: 0.9912 - val_loss: 0.1482 - val_sparse_categorical_accuracy: 0.9703
Epoch 28/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0323 - sparse_categorical_accuracy: 0.9898 - val_loss: 0.1829 - val_sparse_categorical_accuracy: 0.9651
Epoch 29/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0346 - sparse_categorical_accuracy: 0.9897 - val_loss: 0.1406 - val_sparse_categorical_accuracy: 0.9697
Epoch 30/100
20580/20580 [==============================] - 1s 53us/sample - loss: 0.0310 - sparse_categorical_accuracy: 0.9911 - val_loss: 0.1383 - val_sparse_categorical_accuracy: 0.9715
Epoch 31/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0320 - sparse_categorical_accuracy: 0.9900 - val_loss: 0.1470 - val_sparse_categorical_accuracy: 0.9696
Epoch 32/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0293 - sparse_categorical_accuracy: 0.9907 - val_loss: 0.1379 - val_sparse_categorical_accuracy: 0.9721
Epoch 33/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0340 - sparse_categorical_accuracy: 0.9903 - val_loss: 0.1425 - val_sparse_categorical_accuracy: 0.9719
Epoch 34/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0229 - sparse_categorical_accuracy: 0.9927 - val_loss: 0.1395 - val_sparse_categorical_accuracy: 0.9730
Epoch 35/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0258 - sparse_categorical_accuracy: 0.9926 - val_loss: 0.1377 - val_sparse_categorical_accuracy: 0.9722
Epoch 36/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0253 - sparse_categorical_accuracy: 0.9922 - val_loss: 0.1382 - val_sparse_categorical_accuracy: 0.9718
Epoch 37/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0237 - sparse_categorical_accuracy: 0.9927 - val_loss: 0.1448 - val_sparse_categorical_accuracy: 0.9713
Epoch 38/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0212 - sparse_categorical_accuracy: 0.9925 - val_loss: 0.1601 - val_sparse_categorical_accuracy: 0.9710
Epoch 39/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0220 - sparse_categorical_accuracy: 0.9928 - val_loss: 0.1557 - val_sparse_categorical_accuracy: 0.9728
Epoch 40/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0251 - sparse_categorical_accuracy: 0.9919 - val_loss: 0.1395 - val_sparse_categorical_accuracy: 0.9740
Epoch 41/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0234 - sparse_categorical_accuracy: 0.9929 - val_loss: 0.1464 - val_sparse_categorical_accuracy: 0.9711
Epoch 42/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0261 - sparse_categorical_accuracy: 0.9917 - val_loss: 0.1515 - val_sparse_categorical_accuracy: 0.9707
Epoch 43/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0264 - sparse_categorical_accuracy: 0.9911 - val_loss: 0.1406 - val_sparse_categorical_accuracy: 0.9723
Epoch 44/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0225 - sparse_categorical_accuracy: 0.9933 - val_loss: 0.1580 - val_sparse_categorical_accuracy: 0.9713
Epoch 45/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0242 - sparse_categorical_accuracy: 0.9925 - val_loss: 0.1430 - val_sparse_categorical_accuracy: 0.9735
Epoch 46/100
20580/20580 [==============================] - 1s 44us/sample - loss: 0.0214 - sparse_categorical_accuracy: 0.9932 - val_loss: 0.1406 - val_sparse_categorical_accuracy: 0.9718
Epoch 47/100
20580/20580 [==============================] - 1s 44us/sample - loss: 0.0197 - sparse_categorical_accuracy: 0.9936 - val_loss: 0.1513 - val_sparse_categorical_accuracy: 0.9724
Epoch 48/100
20580/20580 [==============================] - 1s 44us/sample - loss: 0.0223 - sparse_categorical_accuracy: 0.9931 - val_loss: 0.1483 - val_sparse_categorical_accuracy: 0.9726
Epoch 49/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0196 - sparse_categorical_accuracy: 0.9944 - val_loss: 0.1437 - val_sparse_categorical_accuracy: 0.9735
Epoch 50/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0218 - sparse_categorical_accuracy: 0.9929 - val_loss: 0.1533 - val_sparse_categorical_accuracy: 0.9734
Epoch 51/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0217 - sparse_categorical_accuracy: 0.9929 - val_loss: 0.1468 - val_sparse_categorical_accuracy: 0.9726
Epoch 52/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0182 - sparse_categorical_accuracy: 0.9947 - val_loss: 0.1456 - val_sparse_categorical_accuracy: 0.9740
Epoch 53/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0221 - sparse_categorical_accuracy: 0.9930 - val_loss: 0.1557 - val_sparse_categorical_accuracy: 0.9702
Epoch 54/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0170 - sparse_categorical_accuracy: 0.9953 - val_loss: 0.1523 - val_sparse_categorical_accuracy: 0.9719
Epoch 55/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0195 - sparse_categorical_accuracy: 0.9941 - val_loss: 0.1641 - val_sparse_categorical_accuracy: 0.9713
Epoch 56/100
20580/20580 [==============================] - 1s 45us/sample - loss: 0.0167 - sparse_categorical_accuracy: 0.9948 - val_loss: 0.1731 - val_sparse_categorical_accuracy: 0.9724
Epoch 57/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0228 - sparse_categorical_accuracy: 0.9930 - val_loss: 0.1537 - val_sparse_categorical_accuracy: 0.9727
Epoch 58/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0156 - sparse_categorical_accuracy: 0.9950 - val_loss: 0.1714 - val_sparse_categorical_accuracy: 0.9724
Epoch 59/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0135 - sparse_categorical_accuracy: 0.9960 - val_loss: 0.1669 - val_sparse_categorical_accuracy: 0.9717
Epoch 60/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0173 - sparse_categorical_accuracy: 0.9939 - val_loss: 0.1686 - val_sparse_categorical_accuracy: 0.9730
Epoch 61/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0185 - sparse_categorical_accuracy: 0.9942 - val_loss: 0.1637 - val_sparse_categorical_accuracy: 0.9719
Epoch 62/100
20580/20580 [==============================] - 1s 46us/sample - loss: 0.0183 - sparse_categorical_accuracy: 0.9943 - val_loss: 0.1662 - val_sparse_categorical_accuracy: 0.9720
Epoch 63/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0177 - sparse_categorical_accuracy: 0.9946 - val_loss: 0.1762 - val_sparse_categorical_accuracy: 0.9715
Epoch 64/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0184 - sparse_categorical_accuracy: 0.9946 - val_loss: 0.1486 - val_sparse_categorical_accuracy: 0.9730
Epoch 65/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0189 - sparse_categorical_accuracy: 0.9939 - val_loss: 0.1640 - val_sparse_categorical_accuracy: 0.9711
Epoch 66/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0188 - sparse_categorical_accuracy: 0.9940 - val_loss: 0.1803 - val_sparse_categorical_accuracy: 0.9702
Epoch 67/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0130 - sparse_categorical_accuracy: 0.9959 - val_loss: 0.1694 - val_sparse_categorical_accuracy: 0.9706
Epoch 68/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0129 - sparse_categorical_accuracy: 0.9964 - val_loss: 0.1629 - val_sparse_categorical_accuracy: 0.9726
Epoch 69/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0148 - sparse_categorical_accuracy: 0.9951 - val_loss: 0.1736 - val_sparse_categorical_accuracy: 0.9719
Epoch 70/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0167 - sparse_categorical_accuracy: 0.9947 - val_loss: 0.1755 - val_sparse_categorical_accuracy: 0.9724
Epoch 71/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0159 - sparse_categorical_accuracy: 0.9957 - val_loss: 0.1616 - val_sparse_categorical_accuracy: 0.9730
Epoch 72/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0174 - sparse_categorical_accuracy: 0.9951 - val_loss: 0.1599 - val_sparse_categorical_accuracy: 0.9723
Epoch 73/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0150 - sparse_categorical_accuracy: 0.9954 - val_loss: 0.1656 - val_sparse_categorical_accuracy: 0.9723
Epoch 74/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0168 - sparse_categorical_accuracy: 0.9948 - val_loss: 0.1734 - val_sparse_categorical_accuracy: 0.9718
Epoch 75/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0146 - sparse_categorical_accuracy: 0.9952 - val_loss: 0.1757 - val_sparse_categorical_accuracy: 0.9713
Epoch 76/100
20580/20580 [==============================] - 1s 49us/sample - loss: 0.0187 - sparse_categorical_accuracy: 0.9943 - val_loss: 0.1796 - val_sparse_categorical_accuracy: 0.9714
Epoch 77/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0162 - sparse_categorical_accuracy: 0.9952 - val_loss: 0.1581 - val_sparse_categorical_accuracy: 0.9717
Epoch 78/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0134 - sparse_categorical_accuracy: 0.9960 - val_loss: 0.1798 - val_sparse_categorical_accuracy: 0.9705
Epoch 79/100
20580/20580 [==============================] - 1s 50us/sample - loss: 0.0117 - sparse_categorical_accuracy: 0.9963 - val_loss: 0.1882 - val_sparse_categorical_accuracy: 0.9722
Epoch 80/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0161 - sparse_categorical_accuracy: 0.9948 - val_loss: 0.1701 - val_sparse_categorical_accuracy: 0.9737
Epoch 81/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0163 - sparse_categorical_accuracy: 0.9952 - val_loss: 0.1726 - val_sparse_categorical_accuracy: 0.9729
Epoch 82/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0144 - sparse_categorical_accuracy: 0.9957 - val_loss: 0.1596 - val_sparse_categorical_accuracy: 0.9735
Epoch 83/100
20580/20580 [==============================] - 1s 51us/sample - loss: 0.0155 - sparse_categorical_accuracy: 0.9955 - val_loss: 0.1620 - val_sparse_categorical_accuracy: 0.9715
Epoch 84/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0105 - sparse_categorical_accuracy: 0.9965 - val_loss: 0.1663 - val_sparse_categorical_accuracy: 0.9727
Epoch 85/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0147 - sparse_categorical_accuracy: 0.9960 - val_loss: 0.1575 - val_sparse_categorical_accuracy: 0.9747
Epoch 86/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0160 - sparse_categorical_accuracy: 0.9955 - val_loss: 0.1576 - val_sparse_categorical_accuracy: 0.9751
Epoch 87/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0131 - sparse_categorical_accuracy: 0.9959 - val_loss: 0.1602 - val_sparse_categorical_accuracy: 0.9737
Epoch 88/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0103 - sparse_categorical_accuracy: 0.9969 - val_loss: 0.1725 - val_sparse_categorical_accuracy: 0.9740
Epoch 89/100
20580/20580 [==============================] - 1s 52us/sample - loss: 0.0137 - sparse_categorical_accuracy: 0.9958 - val_loss: 0.1740 - val_sparse_categorical_accuracy: 0.9728
Epoch 90/100
20580/20580 [==============================] - 1s 48us/sample - loss: 0.0117 - sparse_categorical_accuracy: 0.9967 - val_loss: 0.1619 - val_sparse_categorical_accuracy: 0.9745
Epoch 91/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0109 - sparse_categorical_accuracy: 0.9974 - val_loss: 0.1810 - val_sparse_categorical_accuracy: 0.9738
Epoch 92/100
20580/20580 [==============================] - 1s 47us/sample - loss: 0.0113 - sparse_categorical_accuracy: 0.9963 - val_loss: 0.2123 - val_sparse_categorical_accuracy: 0.9698
Epoch 93/100
20580/20580 [==============================] - 1s 54us/sample - loss: 0.0164 - sparse_categorical_accuracy: 0.9951 - val_loss: 0.1853 - val_sparse_categorical_accuracy: 0.9718
Epoch 94/100
20580/20580 [==============================] - 1s 55us/sample - loss: 0.0153 - sparse_categorical_accuracy: 0.9949 - val_loss: 0.1841 - val_sparse_categorical_accuracy: 0.9735
Epoch 95/100
20580/20580 [==============================] - 1s 56us/sample - loss: 0.0165 - sparse_categorical_accuracy: 0.9954 - val_loss: 0.1544 - val_sparse_categorical_accuracy: 0.9731
Epoch 96/100
20580/20580 [==============================] - 1s 56us/sample - loss: 0.0116 - sparse_categorical_accuracy: 0.9967 - val_loss: 0.1668 - val_sparse_categorical_accuracy: 0.9738
Epoch 97/100
20580/20580 [==============================] - 1s 69us/sample - loss: 0.0120 - sparse_categorical_accuracy: 0.9966 - val_loss: 0.1724 - val_sparse_categorical_accuracy: 0.9731
Epoch 98/100
20580/20580 [==============================] - 1s 64us/sample - loss: 0.0104 - sparse_categorical_accuracy: 0.9971 - val_loss: 0.1833 - val_sparse_categorical_accuracy: 0.9726
Epoch 99/100
20580/20580 [==============================] - 1s 63us/sample - loss: 0.0123 - sparse_categorical_accuracy: 0.9962 - val_loss: 0.1728 - val_sparse_categorical_accuracy: 0.9728
Epoch 100/100
20580/20580 [==============================] - 1s 60us/sample - loss: 0.0138 - sparse_categorical_accuracy: 0.9959 - val_loss: 0.1857 - val_sparse_categorical_accuracy: 0.9734
In [8]:
import matplotlib.pyplot as plt

print(history.history.keys())
plt.plot(history.history['sparse_categorical_accuracy'], color='r')
plt.plot(history.history['val_sparse_categorical_accuracy'], color='b')
plt.show()
dict_keys(['loss', 'sparse_categorical_accuracy', 'val_loss', 'val_sparse_categorical_accuracy'])
댓글
공지사항
최근에 올라온 글
최근에 달린 댓글
Total
Today
Yesterday
«   2024/09   »
1 2 3 4 5 6 7
8 9 10 11 12 13 14
15 16 17 18 19 20 21
22 23 24 25 26 27 28
29 30
글 보관함