<aside> 🖥️ layer_output *= np.random.randint(0, high=2, size=layer_output.shape) # 훈련할때 유닛 출력중 50%을 0으로 만듦 layer_output /= 0.5 # dropout rate로 나누어 스케일을 높임
</aside>
model = models.Sequential([
layers.Dense(16, activation='relu'),
layers.Dropout(0.5),
layers.Dense(16, activation='relu'),
layers.Dropout(0.5),
layers.Dense(1, activation='sigmoid')])
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
history_dropout = model.fit(
train_data, train_labels,
epochs=20, batch_size=512, validation_split=0.4)
import matplotlib.pyplot as plt
history_original = history_original.history
history_dr = history_dropout.history
val_loss_ori = history_original["val_loss"]
val_loss_dr = history_dr["val_loss"]
epochs = range(1, len(val_loss_ori) + 1)
plt.plot(epochs, val_loss_ori, "b--", label="Validation loss of original model")
plt.plot(epochs, val_loss_dr, "b", label="Validation loss of dropout-regularized model")
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.legend()
plt.show()