Keras

# using Keras

from keras.models import Sequential

from keras.layers imprt Dense, Activation

# using tf.keras

import tensorflow as tf

from tensorflow.keras.models imprt Sequential

from tensorflow.keras.layers imprt Dense, Activation

Preprocessing

# Sequence Padding

from keras.preprocessing import sequence

x_train = sequence.pad_sequences(x_train, maxlen=80)

x_test = sequence.pad_sequences(x_test, maxlen=80)

# One-Hot Encoding

from keras.utils import to_categorical

Y_train = to_categorical(y_train, num_classes)

Y_test = to_categorical(y_test, num_classes)

# Train and Test Sets

from sklearn.model_selection import train_test_split

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

# Standardization/Normalization

from sklearn.preprocessing import StandardScaler

scaler = StandardScaler().fit(x_train)

standardized_X = scaler.transform(x_train)

standardized_X_test = scaler.transform(x_test)

Model Architecture

model = Sequential()

### Multilayer Perceptron (MLP) ###

# Binary Classification

model.add(Dense(12, input_dim=8, kernel_initializer='uniform', activation='relu'))

model.add(Dense(8, kernel_initializer='uniform', activation='relu'))

model.add(Dense(1, kernel_initializer='uniform', activation='sigmoid'))

# Multi-Class Classification

from keras.layers import Dropout

model.add(Dense(512, activation='relu', input_shape=(784,)))

# model.add(Dense(512, activation='relu', input_dim=784))

model.add(Dropout(0.2))

model.add(Dense(512, activation='relu'))

model.add(Dropout(0.2))

model.add(Dense(10, activation='softmax'))

# Regression

model.add(Dense(64, activation='relu', input_dim=train_data.shape[1]))

model.add(Dense(1))

### Convolutional Neural Network (CNN) ###

from keras.layers import Activation, Conv2D, MaxPooling2D, Flatten

model.add(Conv2D(32, (3,3), padding='same', input_shape=x_train.shape[1:]))

model.add(Activation('relu'))

model.add(Conv2D(32, (3,3)))

model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(2,2)))

model.add(Dropout(0.25))

model.add(Conv2D(64, (3,3), padding='same'))

model.add(Activation('relu'))

model.add(Conv2D(64, (3, 3)))

model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(2,2)))

model.add(Dropout(0.25))

model.add(Flatten())

model.add(Dense(512))

model.add(Activation('relu'))

model.add(Dropout(0.5))

model.add(Dense(num_classes))

model.add(Activation('softmax'))

### Recurrent Neural Network (RNN) ###

from keras.klayers import Embedding, LSTM

model.add(Embedding(20000, 128))

model.add(LSTM(128, dropout=0.2, recurrent_dropout=0.2))

model.add(Dense(1, activation='sigmoid'))

別の書き方

model.add(Dense(units=64, input_dim=100))

model.add(Activation("relu"))

model.add(Dense(units=10))

model.add(Activation("softmax"))

Inspect Model

model.output_shape

model.summary()

model.get_config()

model.get_weights()

Compile Model

# Binary Classification

model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])

# Multi-Class Classification

model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])

# Regression

model.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])

# RNN

model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

>別の書き方

model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.01),

loss=tf.keras.losses.SparseCategoricalCrossentropy(),

metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])

或いは

model.compile(optimizer=tf.keras.optimizers.Adam(),

loss=tf.keras.losses.sparse_categorical_crossentropy,

metrics=[tf.keras.metrics.sparse_categorical_accuracy])

Model Training

model.fit(x_train, y_train, batch_size=32, epochs=15, verbose=1, validation_data=(x_test,y_test))

Evaluate Model

score = model.evaluate(x_test, y_test, batch_size=32)

Prediction

classes = model.predict(x_test, batch_size=32)

classes = model.predict_classes(x_test, batch_size=32)

Save/ Reload Models

from keras.models import load_model

model.save('model_file.h5')

my_model = load_model('my_model.h5')

Model Fine-tuning

# Optimization Parameters

from keras.optimizers import RMSprop

opt = RMSprop(lr=0.0001, decay=1e-6)

model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])

# Early Stopping

from keras.callbacks import EarlyStopping, ModelCheckpoint

early_stopping = EarlyStopping(patience=2)

model.fit(x_train, y_train, batch_size=32, epochs=15, validation_data=(x_test,y_test), callbacks=[early_stopping])

callbacks = [EarlyStopping(patience=2, restore_best_weights=True),

ModelCheckpoint(

'../data/temp/mnist_sequential_{epoch:03d}_{val_loss:.4f}.h5',

save_best_only=True

)]

history = model.fit(x_train, y_train, batch_size=128, epochs=20,

validation_split=0.2, callbacks=callbacks)

活性化関数: tf.keras.activations

レイヤー: tf.keras.layers

最適化アルゴリズム: tf.keras.optimizers

損失関数: tf.keras.losses

二値分類 binary_crossentropy

多クラス単一ラベル分類 categorical_crossentropy

多クラス多ラベル分類 binary_crossentropy

回帰問題(任意の値) mse

回帰問題(0~1の値) mse / binary_crossentropy

評価関数: tf.keras.metrics