Improvise_a_Jazz_Solo_with_an_LSTM_Network_v4 issue with model.fit

I was able to pass assignment test for djmodel function implementation, but in the following lines it throws an error during the execution of the following cell

history = model.fit([X, a0, c0], list(Y), epochs=100, verbose = 0)

Traceback

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-39-f10a44fe26fa> in <module>
----> 1 history = model.fit([X, a0, c0], list(Y), epochs=100)

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self, *args, **kwargs)
    106   def _method_wrapper(self, *args, **kwargs):
    107     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
--> 108       return method(self, *args, **kwargs)
    109 
    110     # Running inside `run_distribute_coordinator` already.

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
   1061           use_multiprocessing=use_multiprocessing,
   1062           model=self,
-> 1063           steps_per_execution=self._steps_per_execution)
   1064 
   1065       # Container that configures and calls `tf.keras.Callback`s.

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/data_adapter.py in __init__(self, x, y, sample_weight, batch_size, steps_per_epoch, initial_epoch, epochs, shuffle, class_weight, max_queue_size, workers, use_multiprocessing, model, steps_per_execution)
   1115         use_multiprocessing=use_multiprocessing,
   1116         distribution_strategy=ds_context.get_strategy(),
-> 1117         model=model)
   1118 
   1119     strategy = ds_context.get_strategy()

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/data_adapter.py in __init__(self, x, y, sample_weights, sample_weight_modes, batch_size, epochs, steps, shuffle, **kwargs)
    273     inputs = pack_x_y_sample_weight(x, y, sample_weights)
    274 
--> 275     num_samples = set(int(i.shape[0]) for i in nest.flatten(inputs))
    276     if len(num_samples) > 1:
    277       msg = "Data cardinality is ambiguous:\n"

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/data_adapter.py in <genexpr>(.0)
    273     inputs = pack_x_y_sample_weight(x, y, sample_weights)
    274 
--> 275     num_samples = set(int(i.shape[0]) for i in nest.flatten(inputs))
    276     if len(num_samples) > 1:
    277       msg = "Data cardinality is ambiguous:\n"

TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'

I can’t find out the issue, please help.

Hi jijo,

sorry for the late reply. Did you solve the issue? Could you share how did you solve it?

Many thanks and happy learning,

Rosa