C5W4A1 Exercise 5: Encoder

I am getting the error:


AttributeError Traceback (most recent call last)
in
1 # UNIT TEST
----> 2 Encoder_test(Encoder)

~/work/W4A1/public_tests.py in Encoder_test(target)
114 x = np.array([[2, 1, 3], [1, 2, 0]])
115
→ 116 encoderq_output = encoderq(x, True, None)
117
118 assert tf.is_tensor(encoderq_output), “Wrong type. Output must be a tensor”

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
→ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

in call(self, x, training, mask)
56 # Pass the output through the stack of encoding layers
57 for i in range(self.num_layers):
—> 58 x = self.enc_layers[i](x,training, mask)
59 # END CODE HERE
60

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
→ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

in call(self, x, training, mask)
39 # START CODE HERE
40 # calculate self-attention using mha(~1 line). Dropout will be applied during training
—> 41 attn_output = self.mha(x,x,x,mask) # Self attention (batch_size, input_seq_len, fully_connected_dim)
42
43 # apply layer normalization on sum of the input and the attention output to get the

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py in call(self, *args, **kwargs)
1010 with autocast_variable.enable_auto_cast_variables(
1011 self._compute_dtype_object):
→ 1012 outputs = call_fn(inputs, *args, **kwargs)
1013
1014 if self._activity_regularizer:

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in call(self, query, value, key, attention_mask, return_attention_scores, training)
456 training=None):
457 if not self._built_from_signature:
→ 458 self._build_from_signature(query=query, value=value, key=key)
459 if key is None:
460 key = value

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/multi_head_attention.py in _build_from_signature(self, query, value, key)
319 # operations.
320 with tf_utils.maybe_init_scope(self):
→ 321 free_dims = query_shape.rank - 1
322 einsum_equation, bias_axes, output_rank = _build_proj_equation(
323 free_dims, bound_dims=1, output_dims=2)

AttributeError: ‘Dropout’ object has no attribute ‘rank’

Please help.

Nevermind…I was using tf.keras.layers.Dropout as opposed to self.dropout. I’ll leave this here for anyone who happens to get the same problem.