This tutorial shows the hyperparameter tuning for MNIST dataset.
If you have difficulty with the following topic, Keras tutorials would be very helpful.
At first, we should determine the input shape of our model.
inputs = tf$keras$Input(shape=list(28L, 28L, 1L))
The next step is to put iterable parameters into the loop function for (i in 1:n) {}
and define layers.
for (i in 1:3) {
tf$keras$layers$Conv2D(filters = hp$Int(paste('filters_', i, sep = ''), 4L, 32L, step=4L, default=8L),
kernel_size = hp$Int(paste('kernel_size_', i, sep = ''), 3L, 5L),
activation ='relu',
padding='same')
}
Later, we should concatenate all the layers and create a model via Functional API. Please, refer to the Functional API, if you want to get more details about it.
model = tf$keras$Model(inputs, outputs)
Finally, we should determine our optimizer. Keras Tuner allows us to tune even optimizers.
optimizer = hp$Choice('optimizer', c('adam', 'sgd'))
As the build(hp)
function is ready, we should build a Hyperband object, and start searching the best hyperparameters.
Below is the full version of a tuning process for MNIST dataset.
= function(hp) {
conv_build_model 'Builds a convolutional model.'
= tf$keras$Input(shape=list(28L, 28L, 1L))
inputs
= inputs
x
for (i in 1:hp$Int('conv_layers', 1L, 3L, default=3L)) {
= tf$keras$layers$Conv2D(filters = hp$Int(paste('filters_', i, sep = ''),
x step=4L, default=8L),
4L, 32L, kernel_size = hp$Int(paste('kernel_size_', i, sep = ''), 3L, 5L),
activation ='relu',
padding='same')(x)
if (hp$Choice(paste('pooling', i, sep = ''), c('max', 'avg')) == 'max') {
= tf$keras$layers$MaxPooling2D()(x)
x else {
} = tf$keras$layers$AveragePooling2D()(x)
x
}= tf$keras$layers$BatchNormalization()(x)
x = tf$keras$layers$ReLU()(x)
x
}if (hp$Choice('global_pooling', c('max', 'avg')) == 'max') {
= tf$keras$layers$GlobalMaxPool2D()(x)
x else {
} = tf$keras$layers$GlobalAveragePooling2D()(x)
x
}
= tf$keras$layers$Dense(10L, activation='softmax')(x)
outputs = tf$keras$Model(inputs, outputs)
model = hp$Choice('optimizer', c('adam', 'sgd'))
optimizer %>% compile(optimizer, loss='sparse_categorical_crossentropy', metrics='accuracy')
model return(model)
}
= function() {
main = Hyperband(
tuner hypermodel = conv_build_model,
objective = 'val_accuracy',
max_epochs = 8,
factor = 2,
hyperband_iterations = 3,
directory = 'results_dir',
project_name='mnist')
# call keras library for downloading MNIST dataset
library(keras)
= dataset_fashion_mnist()
mnist_data c(mnist_train, mnist_test) %<-% mnist_data
rm(mnist_data)
# reshape data
$x = keras::k_reshape(mnist_train$x,shape = c(6e4,28,28,1))
mnist_train$x = keras::k_reshape(mnist_test$x,shape = c(1e4,28,28,1))
mnist_test
# call tfdatasets and slice dataset
# turn data type into float 32 (features, not labels/outputs)
library(tfdatasets)
= tensor_slices_dataset(list(tf$dtypes$cast(
mnist_train $x, 'float32') / 255., mnist_train$y)) %>%
mnist_traindataset_shuffle(1e3) %>% dataset_batch(1e2) %>% dataset_repeat()
= tensor_slices_dataset(list(tf$dtypes$cast(
mnist_test $x, 'float32') / 255., mnist_test$y)) %>%
mnist_testdataset_batch(1e2)
# finally, begin a training with a bunch of parameters
%>% fit_tuner(x = mnist_train,
tuner steps_per_epoch=600,
validation_data=mnist_test,
validation_steps=100,
epochs=2,
callbacks=c(tf$keras$callbacks$EarlyStopping('val_accuracy'))
)
}
main()