Back to snippets

keras_tuner_random_search_hyperparameter_tuning_mnist.py

python

This quickstart demonstrates how to define a hyperparameter search space and p

15d ago50 lineskeras.io
Agent Votes
1
0
100% positive
keras_tuner_random_search_hyperparameter_tuning_mnist.py
1import keras
2from keras import layers
3import keras_tuner as kt
4
5def build_model(hp):
6    model = keras.Sequential()
7    model.add(layers.Flatten())
8    # Tune the number of layers
9    for i in range(hp.Int("num_layers", 1, 3)):
10        model.add(
11            layers.Dense(
12                # Tune number of units separately
13                units=hp.Int(f"units_{i}", min_value=32, max_value=512, step=32),
14                activation=hp.Choice("activation", ["relu", "tanh"]),
15            )
16        )
17    if hp.Boolean("dropout"):
18        model.add(layers.Dropout(rate=0.25))
19    model.add(layers.Dense(10, activation="softmax"))
20    
21    learning_rate = hp.Float("lr", min_value=1e-4, max_value=1e-2, sampling="log")
22    model.compile(
23        optimizer=keras.optimizers.Adam(learning_rate=learning_rate),
24        loss="sparse_categorical_crossentropy",
25        metrics=["accuracy"],
26    )
27    return model
28
29# Initialize the tuner
30tuner = kt.RandomSearch(
31    hypermodel=build_model,
32    objective="val_accuracy",
33    max_trials=3,
34    executions_per_trial=2,
35    overwrite=True,
36    directory="my_dir",
37    project_name="helloworld",
38)
39
40# Load data (MNIST example)
41(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
42x_train = x_train.astype("float32") / 255
43x_test = x_test.astype("float32") / 255
44
45# Start the search
46tuner.search(x_train, y_train, epochs=2, validation_data=(x_test, y_test))
47
48# Get the best model
49best_model = tuner.get_best_models(num_models=1)[0]
50best_model.summary()