Hello, I am having the following error as i try to complete exercise 2 on functional API, here is the error message I get:
OperatorNotAllowedInGraphError Traceback (most recent call last)
in
----> 1 conv_model = convolutional_model((64, 64, 3))
2 conv_model.compile(optimizer=‘adam’,
3 loss=‘categorical_crossentropy’,
4 metrics=[‘accuracy’])
5 conv_model.summary()
in convolutional_model(input_shape)
28 # YOUR CODE STARTS HERE
29 Z1 = tf.keras.layers.Conv2D(filters= 8 , kernel_size= (4,4) , padding=‘same’)(input_img)
—> 30 A1 = tf.keras.layers.ReLU(Z1)
31 P1 = tf.keras.layers.MaxPool2D(pool_size=(8, 8), strides=(8, 8), padding=‘same’)(A1)
32 Z2 = tf.keras.layers.Conv2D(filters= 16 , kernel_size= (2,2) , padding=‘same’)(P1)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/advanced_activations.py in init(self, max_value, negative_slope, threshold, **kwargs)
344 def init(self, max_value=None, negative_slope=0, threshold=0, **kwargs):
345 super(ReLU, self).init(**kwargs)
→ 346 if max_value is not None and max_value < 0.:
347 raise ValueError('max_value of Relu layer ’
348 'cannot be negative value: ’ + str(max_value))
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/ops.py in bool(self)
875 TypeError
.
876 “”"
→ 877 self._disallow_bool_casting()
878
879 def nonzero(self):
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/ops.py in _disallow_bool_casting(self)
488 else:
489 # Default: V1-style Graph execution.
→ 490 self._disallow_in_graph_mode("using a tf.Tensor
as a Python bool
")
491
492 def _disallow_iteration(self):
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/ops.py in _disallow_in_graph_mode(self, task)
477 raise errors.OperatorNotAllowedInGraphError(
478 “{} is not allowed in Graph execution. Use Eager execution or decorate”
→ 479 " this function with @tf.function.".format(task))
480
481 def _disallow_bool_casting(self):
OperatorNotAllowedInGraphError: using a tf.Tensor
as a Python bool
is not allowed in Graph execution. Use Eager execution or decorate this function with @tf.function.
I’d be glad if you take a look at this and inform me of my mistake