import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, BatchNormalization
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
print(x_train.shape, y_train.shape)
model = Sequential()
model.add(Dense(4, activation='relu', input_shape=(80,)))
model.add(BatchNormalization())
model.add(Dropout(0.3))
model.add(Dense(3, activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.3))
model.add(Dense(1, activation='sigmoid'))
loss
metrics
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
model.summary()
.h5
.ckpt
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
es = EarlyStopping(monitor='val_loss', min_delta=0, patience=3, mode='min', verbose=1) # EarlyStopping
mc = ModelCheckpoint('my_checkpoint.h5', monitor='val_loss', save_best_only=True, verbose=1) # ModelCheckpoint: 모델 저장
history = model.fit(X_train, y_train,
validation_data=(x_test, y_test),
epochs=20,
callbacks=[es, mc],
batch_size=32,
verbose=1)
model.save("my_model.h5")
model = keras.models.load_model("my_model.h5")
model.save_weights("my_modelweight.ckpt")
model.load_weights("my_modelweight.ckpt")
model.load_weights("my_checkpoint.ckpt")
model.save("my_model.h5")
performance = pd.DataFrame(model.history.history)
plt.plot(performance[['loss','val_loss', 'accuracy','val_accuracy']])
plt.legend(['Loss','Validation Loss', 'Accuracy','Validation Accuracy'])
plt.xlabel('Epochs')
plt.show()