C5 W4 A1 exercise 7

Hi,
I get the following error but cant seem to pin point where the mistake is.


InvalidArgumentError Traceback (most recent call last)
in
1 # UNIT TEST
----> 2 Decoder_test(Decoder, create_look_ahead_mask, create_padding_mask)

~/work/W4A1/public_tests.py in Decoder_test(target, create_look_ahead_mask, create_padding_mask)
218 target_vocab_size,
219 maximum_position_encoding)
β†’ 220 outd, att_weights = decoderk(x, encoderq_output, False, look_ahead_mask, None)
221 assert tf.is_tensor(outd), β€œWrong type for outd. It must be a dict”
222 assert np.allclose(tf.shape(outd), tf.shape(encoderq_output)), f"Wrong shape. We expected { tf.shape(encoderq_output)}"

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
β†’ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

in call(self, x, enc_output, training, look_ahead_mask, padding_mask)
65 # of block 1 and 2 (~1 line)
66 x, block1, block2 = self.dec_layers[i](x, x, x,
β€”> 67 enc_output, enc_output)
68
69 #update attention_weights dictionary with the attention weights of block 1 and block 2

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
β†’ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

in call(self, x, enc_output, training, look_ahead_mask, padding_mask)
51 # calculate self-attention and return attention scores as attn_weights_block1.
52 # Dropout will be applied during training (~1 line).
β€”> 53 mult_attn_out1, attn_weights_block1 = self.mha1(x, x, x, look_ahead_mask, return_attention_scores=True) # (batch_size, target_seq_len, d_model)
54
55 # apply layer normalization (layernorm1) to the sum of the attention output and the input (~1 line)

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
β†’ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in call(self, query, value, key, attention_mask, return_attention_scores, training)
472
473 attention_output, attention_scores = self._compute_attention(
β†’ 474 query, key, value, attention_mask, training)
475 attention_output = self._output_dense(attention_output)
476

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in _compute_attention(self, query, key, value, attention_mask, training)
436 query)
437
β†’ 438 attention_scores = self._masked_softmax(attention_scores, attention_mask)
439
440 # This is actually dropping out entire tokens to attend to, which might

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in _masked_softmax(self, attention_scores, attention_mask)
399 attention_mask = array_ops.expand_dims(
400 attention_mask, axis=mask_expansion_axes)
β†’ 401 return self._softmax(attention_scores, attention_mask)
402
403 def _compute_attention(self,

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
β†’ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/advanced_activations.py in call(self, inputs, mask)
326 # Since we are adding it to the raw scores before the softmax, this is
327 # effectively the same as removing these entirely.
β†’ 328 inputs += adder
329 if isinstance(self.axis, (tuple, list)):
330 if len(self.axis) > 1:

/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/math_ops.py in binary_op_wrapper(x, y)
1162 with ops.name_scope(None, op_name, [x, y]) as name:
1163 try:
β†’ 1164 return func(x, y, name=name)
1165 except (TypeError, ValueError) as e:
1166 # Even if dispatching the op failed, the RHS may be a tensor aware

/opt/conda/lib/python3.7/site-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
199 β€œβ€β€œCall target, and fall back on dispatchers if there is a TypeError.”""
200 try:
β†’ 201 return target(*args, **kwargs)
202 except (TypeError, ValueError):
203 # Note: convert_to_eager_tensor currently raises a ValueError, not a

/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/math_ops.py in _add_dispatch(x, y, name)
1484 return gen_math_ops.add(x, y, name=name)
1485 else:
β†’ 1486 return gen_math_ops.add_v2(x, y, name=name)
1487
1488

/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/gen_math_ops.py in add_v2(x, y, name)
470 return _result
471 except _core._NotOkStatusException as e:
β†’ 472 _ops.raise_from_not_ok_status(e, name)
473 except _core._FallbackException:
474 pass

/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/ops.py in raise_from_not_ok_status(e, name)
6860 message = e.message + (" name: " + name if name is not None else β€œβ€)
6861 # pylint: disable=protected-access
β†’ 6862 six.raise_from(core._status_to_exception(e.code, message), None)
6863 # pylint: enable=protected-access
6864

/opt/conda/lib/python3.7/site-packages/six.py in raise_from(value, from_value)

InvalidArgumentError: Incompatible shapes: [2,3,3,3] vs. [2,1,3,4] [Op:AddV2]

Thanks for the help in advance,

Carlos.

Your code isn’t passing the correct arguments to self.dec_layers[i](...)

1 Like