2 minute read

import tensorflow as tf
(X_train, y_train), (X_test, y_test) = tf.keras.datasets.mnist.load_data()
tf.nn.sigmoid_cross_entropy_with_logits

# logit : activation 들어가기 전에 결과값
tf.keras.losses.binary_crossentropy # 함수 : 함수형 패러다임
tf.keras.losses.BinaryCrossentropy # 클래스 : 객체지향(함수형 패러다임 숨어 있음)
model = tf.keras.models.Sequential([
  tf.keras.layers.Flatten(input_shape=(28, 28)),
  tf.keras.layers.Dense(128, activation='relu'),
  tf.keras.layers.Dense(10) # softmax 는 numerical stability 보장 안됨: 
                            # softmax 안쓴다는 것은 logit 개념으로 softmax 들어가기 전에 값을 가지고 와서 사용 한다는 의미, 
                            # 논리니어티가 아니라 readout
                            # tf.nn.sigmoid_cross_entropy_with_logits 사용하면 numerical stability 보장 되기 때문에 
])
input_ = tf.keras.Input((28,28))
x = tf.keras.layers.Flatten()(input_)
x = tf.keras.layers.Dense(128, activation='relu')(x)
x = tf.keras.layers.Dense(10)(x)
model = tf.keras.Model(input_, x)
model.summary()
Model: "model"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 input_1 (InputLayer)        [(None, 28, 28)]          0         
                                                                 
 flatten (Flatten)           (None, 784)               0         
                                                                 
 dense (Dense)               (None, 128)               100480    
                                                                 
 dense_1 (Dense)             (None, 10)                1290      
                                                                 
=================================================================
Total params: 101,770
Trainable params: 101,770
Non-trainable params: 0
_________________________________________________________________
# 1. argument 변경시
loss1 = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
# 2. () 없이 > agument 변경할려면 
# 2-1. agument 변경할려면 partial 테크닉
# 2-2. nested function
loss = tf.keras.losses.sparse_categorical_crossentropy
from functools import partial
loss2 = partial(tf.keras.losses.sparse_categorical_crossentropy, from_logits=True)
def loss3(from_logits=True):
    def y(y_true, y_pred):
        return tf.keras.losses.sparse_categorical_crossentropy(y_true, y_pred, from_logits=from_logits)
    return y
# 3. string
model6.compile(loss='sparse_categorical_crossentropy')
loss4 = tf.nn.softmax_cross_entropy_with_logits # one-hot encoding 가정
# = tf.keras.losses.CategoricalCrossentropy
model.compile(loss=loss1)
model.fit(X_train, y_train)
1875/1875 [==============================] - 14s 6ms/step - loss: 496111.7812





<keras.callbacks.History at 0x1cd09de8850>
model.compile(loss=loss2)
model.fit(X_train, y_train)
1875/1875 [==============================] - 11s 6ms/step - loss: 2.7764





<keras.callbacks.History at 0x1cd17ed9d60>
model.compile(loss=loss3())
model.fit(X_train, y_train)
1875/1875 [==============================] - 11s 6ms/step - loss: 2.6502





<keras.callbacks.History at 0x1cc97e62730>
model.compile(loss=loss4)
model.fit(X_train, y_train) # loss4
1875/1875 [==============================] - 12s 6ms/step - loss: 733138.9375





<keras.callbacks.History at 0x1cc97f27910>
loss5 = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # softmax 안쓰고 false 값을 쓰면 loss 학습이 잘 안됨 아래 loss1 비교
model.compile(loss=loss5)
model.fit(X_train, y_train, epochs=5)
Epoch 1/5
1875/1875 [==============================] - 12s 6ms/step - loss: 3.8974
Epoch 2/5
1875/1875 [==============================] - 11s 6ms/step - loss: 2.3095
Epoch 3/5
1875/1875 [==============================] - 11s 6ms/step - loss: 2.3037
Epoch 4/5
1875/1875 [==============================] - 11s 6ms/step - loss: 2.3026
Epoch 5/5
1875/1875 [==============================] - 12s 6ms/step - loss: 2.3026





<keras.callbacks.History at 0x1cd11c5cdc0>
loss1 = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(loss=loss1)
model.fit(X_train, y_train, epochs=5)
Epoch 1/5
1875/1875 [==============================] - 12s 6ms/step - loss: 3.0797
Epoch 2/5
1875/1875 [==============================] - 11s 6ms/step - loss: 0.5858
Epoch 3/5
1875/1875 [==============================] - 11s 6ms/step - loss: 0.4708
Epoch 4/5
1875/1875 [==============================] - 10s 5ms/step - loss: 0.4371
Epoch 5/5
1875/1875 [==============================] - 10s 5ms/step - loss: 0.3952





<keras.callbacks.History at 0x1cd225b9c70>
X_train = X_train / 255.0 # min max normalization 했을 때 loss 값 시작이 0.25 위에 안했을 때는 3.07
loss1 = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(loss=loss1)
model.fit(X_train, y_train, epochs=5)
Epoch 1/5
1875/1875 [==============================] - 12s 6ms/step - loss: 0.2545
Epoch 2/5
1875/1875 [==============================] - 11s 6ms/step - loss: 0.1194
Epoch 3/5
1875/1875 [==============================] - 10s 5ms/step - loss: 0.0877
Epoch 4/5
1875/1875 [==============================] - 9s 5ms/step - loss: 0.0717
Epoch 5/5
1875/1875 [==============================] - 9s 5ms/step - loss: 0.0611





<keras.callbacks.History at 0x1cd263b7cd0>