0

I am running hyperparameter tunning using Hyperband and Keras_tunner, I have the following network :

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, BatchNormalization
from kerastuner import HyperParameters

# Set the hyperparameter search space
hp = HyperParameters()



# Define the build_model function to create the CNN architecture
def build_model(hp):
    
    learning_rate=hp.Choice('learning_rate', [1e-2, 1e-3, 1e-4])
    num_filters=hp.Int('num_filters', min_value=32, max_value=128, step=32)
    num_dense=hp.Int('num_dense', min_value=64, max_value=256, step=64)
    activation=hp.Choice('activation', ['relu', 'sigmoid', 'tanh'])
    
    
    model = Sequential()

    model.add(Conv2D(num_filters, (3, 3), activation='relu', input_shape=(img_width, img_height, 3)))
    model.add(BatchNormalization())
    model.add(Conv2D(num_filters,(3,3), activation ='relu', padding = 'same'))
    model.add(MaxPooling2D(pool_size=(2, 2)))


    model.add(Conv2D(2*num_filters, (3, 3), activation='relu', padding = 'same'))
    model.add(BatchNormalization())
    model.add(Conv2D(2*num_filters,(3,3), activation ='relu', padding = 'same'))
    model.add(MaxPooling2D(pool_size=(2, 2)))


    model.add(Flatten())
    model.add(Dense(num_dense, activation=activation))

    model.add(Dense(num_dense//2, activation = activation))

    model.add(Dense(num_dense//4, activation = activation))

    model.add(Dense(num_classes, activation='softmax'))

    
    # Compile the model
    optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
    model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
    
    return model


# Create the Hyperband tuner
from kerastuner.tuners import Hyperband

tuner = Hyperband(
    build_model,
    objective='val_accuracy',
    max_epochs=20,
    factor=3,
    hyperband_iterations=2,
    directory='.',
    project_name='Custom5classesGrey'
)
tuner.search_space_summary()

tuner.search(train_data, train_labels, validation_data=(test_data, test_labels), verbose=2)

best_model = tuner.get_best_models(num_models=1)[0]

best_model.save("C:\\Custom5classesGrey.h5") 

# Get the best hyperparameters
best_hp = tuner.get_best_hyperparameters(1)[0]

# Print the best hyperparameters
print("Best hyperparameters:")
print(best_hp.values)

# Build the best model with the best hyperparameters
best_model = tuner.hypermodel.build(best_hp)

callback = EarlyStopping(monitor='val_loss', patience=20)
# Train the best model with the training data
history = best_model.fit(train_data, train_labels, epochs=epochs, validation_data=(test_data, test_labels),callbacks=callback)

For some reason I get the following warning:"WARNING:tensorflow:Callback method on_train_batch_end is slow compared to the batch time (batch time: 0.0261s vs on_train_batch_end time: 0.0511s). Check your callbacks." When I check my files containing the callback it contains an Error: "Error! C:\Custom5classesGrey\trial_001\checkpoint.data-00000 is not UTF-8 encoded. Saving disabled. check your console for more info" Can someone explain what is happening and hoe to fix it?

Stetco Oana
  • 101
  • 1
  • 1
  • 11

0 Answers0