I am getting this error. I have been stuck there for 2 days now. I know it is some error with the input that is being passed to the LSTM cell, but not able to figure out what
ValueError Traceback (most recent call last)
in
1 ### YOU CANNOT EDIT THIS CELL
----> 2 inference_model = music_inference_model(LSTM_cell, densor, Ty = 50)
in music_inference_model(LSTM_cell, densor, Ty)
38 for t in range(Ty):
39 # Step 2.A: Perform one step of LSTM_cell. Use “x”, not “x0” (≈1 line)
—> 40 _, a, c = LSTM_cell(x, initial_state=(a, c))
41
42 # Step 2.B: Apply Dense layer to the hidden state output of the LSTM_cell (≈1 line)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/recurrent.py in call(self, inputs, initial_state, constants, **kwargs)
707 # Perform the call with temporarily replaced input_spec
708 self.input_spec = full_input_spec
→ 709 output = super(RNN, self).call(full_input, **kwargs)
710 # Remove the additional_specs from input spec and keep the rest. It is
711 # important to keep since the input spec was populated by build(), and
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
924 if _in_functional_construction_mode(self, inputs, args, kwargs, input_list):
925 return self._functional_construction_call(inputs, args, kwargs,
→ 926 input_list)
927
928 # Maintains info about the Layer.call
stack.
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in _functional_construction_call(self, inputs, args, kwargs, input_list)
1090 # TODO(reedwm): We should assert input compatibility after the inputs
1091 # are casted, not before.
→ 1092 input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
1093 graph = backend.get_graph()
1094 # Use self._name_scope()
to avoid auto-incrementing the name.
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/input_spec.py in assert_input_compatibility(input_spec, inputs, layer_name)
156 str(len(input_spec)) + ’ inputs, ’
157 'but it received ’ + str(len(inputs)) +
→ 158 ’ input tensors. Inputs received: ’ + str(inputs))
159 for input_index, (x, spec) in enumerate(zip(inputs, input_spec)):
160 if spec is None:
ValueError: Layer lstm expects 39 inputs, but it received 3 input tensors. Inputs received: [<tf.Tensor ‘input_21:0’ shape=(None, 1, 90) dtype=float32>, <tf.Tensor ‘a0_20:0’ shape=(None, 64) dtype=float32>, <tf.Tensor ‘c0_20:0’ shape=(None, 64) dtype=float32>]