I want to optimize the f1-score for a binary image classification model using keras-tuner. I know the default F1 Score metric is removed for keras, so I tried using Tensorflow Addons' F1Score() class, but it is giving me a KeyError since from what I understand, keras-tuner does not recognize f1-score as a metric.
I tried using Tensorflow Addons' F1Score() class as a metric, but that doesn't seem to work.
def model_builder(hp):
model = tf.keras.Sequential()
model.add(tf.keras.layers.Rescaling(scale=255))
model.add(tf.keras.layers.TimeDistributed(net))
model.add(tf.keras.layers.Dense(units=hp.Int(
'units', min_value=32, max_value=512, step=32), activation='relu'))
model.add(tf.keras.layers.GlobalAveragePooling3D())
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
custom_optimizer = keras.optimizers.Adam(
learning_rate=hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4]),
beta_1=hp.Choice('beta_1', values=[0.9, 0.99, 0.999]),
beta_2=hp.Choice('beta_2', values=[0.999, 0.9999]),
epsilon=hp.Float('epsilon', min_value=1e-10, max_value=1e-7)
)
# Define metrics
#metrics = [tf.keras.metrics.AUC(), tf.keras.metrics.Recall(), tf.keras.metrics.Precision(), tf.keras.metrics.BinaryAccuracy(), tf.keras.metrics.TruePositives(), tf.keras.metrics.TrueNegatives(), tf.keras.metrics.FalseNegatives(), tf.keras.metrics.FalsePositives()]
# Running with SGD optimizer
model.compile(optimizer='sgd',
loss=keras.losses.binary_crossentropy, metrics=tfa.metrics.F1Score(num_classes=1, average='macro',threshold=0.5))
return model
# Initialize the tuner
tuner = RandomSearch(
model_builder,
# understand 'objective' should be converted to binary
objective=Objective(tfa.metrics.F1Score(num_classes=1, average='macro',threshold=0.5), direction=max),
max_trials=10, # Adjust the number of trials as needed
directory='test_directory/logs'
)
# Start the tuning process
tuner.search(train_ds, epochs=10, validation_data=(
val_ds), callbacks=combined)
This is the error that my code is outputting:
RuntimeError Traceback (most recent call last)
3 combined = [tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5)]
5 # Start the tuning process
----> 6 tuner.search(train_ds, epochs=10, validation_data=(
7 val_ds), callbacks=combined)
File ~\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\LocalCache\local-packages\Python311\site-packages\keras_tuner\src\engine\base_tuner.py:234, in BaseTuner.search(self, *fit_args, **fit_kwargs)
232 self.on_trial_begin(trial)
233 self._try_run_and_update_trial(trial, *fit_args, **fit_kwargs)
--> 234 self.on_trial_end(trial)
235 self.on_search_end()
File ~\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\LocalCache\local-packages\Python311\site-packages\keras_tuner\src\engine\base_tuner.py:338, in BaseTuner.on_trial_end(self, trial)
332 def on_trial_end(self, trial):
333 """Called at the end of a trial.
334
335 Args:
336 trial: A `Trial` instance.
337 """
--> 338 self.oracle.end_trial(trial)
339 self.save()
File ~\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\LocalCache\local-packages\Python311\site-packages\keras_tuner\src\engine\oracle.py:108, in synchronized.<locals>.wrapped_func(*args, **kwargs)
...
File "C:\Users\name_here\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\LocalCache\local-packages\Python311\site-packages\keras_tuner\src\engine\objective.py", line 59, in get_value
return logs[self.name]
~~~~^^^^^^^^^^^
KeyError: <tensorflow_addons.metrics.f_scores.F1Score object at 0x000001C8709D6710>
I am wondering if there is a workaround to obtain the f1-score from keras' Tuner class. Thank you.