I was working off some github code where I’m training a model to recognize laughter. This particular bit of code is giving me problems:
from keras.models import Sequential from keras.layers import Dense, BatchNormalization, Flatten lr_model = Sequential() # lr_model.add(keras.Input((None, 128))) lr_model.add(BatchNormalization(input_shape=(10, 128))) lr_model.add(Flatten()) lr_model.add(Dense(1, activation='sigmoid')) # try using different optimizers and different optimizer configs lr_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) batch_size=32 CV_frac = 0.1 train_gen = data_generator(batch_size,'../Data/bal_laugh_speech_subset.tfrecord', 0, 1-CV_frac) val_gen = data_generator(128,'../Data/bal_laugh_speech_subset.tfrecord', 1-CV_frac, 1) rec_len = 18768 lr_h = lr_model.fit_generator(train_gen,steps_per_epoch=int(rec_len*(1-CV_frac))//batch_size, epochs=100, validation_data=val_gen, validation_steps=int(rec_len*CV_frac)//128, verbose=0, callbacks=[TQDMNotebookCallback()])
I get the following error:
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) <ipython-input-15-9eced074de11> in <module> 7 rec_len = 18768 8 ----> 9 lr_h = lr_model.fit_generator(train_gen,steps_per_epoch=int(rec_len*(1-CV_frac))//batch_size, epochs=100, 10 validation_data=val_gen, validation_steps=int(rec_len*CV_frac)//128, 11 verbose=0, callbacks=[TQDMNotebookCallback()]) ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch) 1845 'will be removed in a future version. ' 1846 'Please use `Model.fit`, which supports generators.') -> 1847 return self.fit( 1848 generator, 1849 steps_per_epoch=steps_per_epoch, ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing) 1103 logs = tmp_logs # No error, now safe to assign to logs. 1104 end_step = step + data_handler.step_increment -> 1105 callbacks.on_train_batch_end(end_step, logs) 1106 if self.stop_training: 1107 break ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/callbacks.py in on_train_batch_end(self, batch, logs) 452 """ 453 if self._should_call_train_batch_hooks: --> 454 self._call_batch_hook(ModeKeys.TRAIN, 'end', batch, logs=logs) 455 456 def on_test_batch_begin(self, batch, logs=None): ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/callbacks.py in _call_batch_hook(self, mode, hook, batch, logs) 294 self._call_batch_begin_hook(mode, batch, logs) 295 elif hook == 'end': --> 296 self._call_batch_end_hook(mode, batch, logs) 297 else: 298 raise ValueError('Unrecognized hook: {}'.format(hook)) ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/callbacks.py in _call_batch_end_hook(self, mode, batch, logs) 314 self._batch_times.append(batch_time) 315 --> 316 self._call_batch_hook_helper(hook_name, batch, logs) 317 318 if len(self._batch_times) >= self._num_batches_for_timing_check: ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/callbacks.py in _call_batch_hook_helper(self, hook_name, batch, logs) 358 if numpy_logs is None: # Only convert once. 359 numpy_logs = tf_utils.to_numpy_or_python_type(logs) --> 360 hook(batch, numpy_logs) 361 362 if self._check_timing: ~/.local/lib/python3.8/site-packages/tensorflow/python/keras/callbacks.py in on_train_batch_end(self, batch, logs) 708 """ 709 # For backwards compatibility. --> 710 self.on_batch_end(batch, logs=logs) 711 712 @doc_controls.for_subclass_implementers ~/env/py385/lib/python3.8/site-packages/keras_tqdm/tqdm_callback.py in on_batch_end(self, batch, logs) 115 self.inner_count += update 116 if self.inner_count < self.inner_total: --> 117 self.append_logs(logs) 118 metrics = self.format_metrics(self.running_logs) 119 desc = self.inner_description_update.format(epoch=self.epoch, metrics=metrics) ~/env/py385/lib/python3.8/site-packages/keras_tqdm/tqdm_callback.py in append_logs(self, logs) 134 135 def append_logs(self, logs): --> 136 metrics = self.params['metrics'] 137 for metric, value in six.iteritems(logs): 138 if metric in metrics: KeyError: 'metrics'
All the other KeyError ‘metrics’ problems I’ve googled have to do with something else called livelossplot. Any help will be much appreciated!