I also have a problem with Exercise 6. This one throws an error, seemingly before any any student code is evoked, before the execution arrives at train_step. Any guidance from the instructor/moderators would be appreciated:
ValueError: No gradients provided for any variable: (['Variable:0'],). Provided `grads_and_vars` is ((None, <tf.Variable 'Variable:0' shape=(1, 400, 400, 3) dtype=float32>),).
The entire traceback is:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Input In [30], in <cell line: 7>()
1 ### you cannot edit this cell
2
3 # You always must run the last cell before this one. You will get an error if not.
5 generated_image = tf.Variable(generated_image)
----> 7 train_step_test(train_step, generated_image)
File /tf/W4A2/public_tests.py:86, in train_step_test(target, generated_image)
82 def train_step_test(target, generated_image):
83 generated_image = tf.Variable(generated_image)
---> 86 J1 = target(generated_image)
87 print(J1)
88 assert type(J1) == EagerTensor, f"Wrong type {type(J1)} != {EagerTensor}"
File /usr/local/lib/python3.8/dist-packages/tensorflow/python/util/traceback_utils.py:153, in filter_traceback.<locals>.error_handler(*args, **kwargs)
151 except Exception as e:
152 filtered_tb = _process_traceback_frames(e.__traceback__)
--> 153 raise e.with_traceback(filtered_tb) from None
154 finally:
155 del filtered_tb
File /tmp/__autograph_generated_file4vjy1r38.py:16, in outer_factory.<locals>.inner_factory.<locals>.tf__train_step(generated_image)
14 J = ag__.converted_call(ag__.ld(total_cost), (ag__.ld(J_content), ag__.ld(J_style)), dict(alpha=10, beta=40), fscope)
15 grad = ag__.converted_call(ag__.ld(tape).gradient, (ag__.ld(J), ag__.ld(generated_image)), None, fscope)
---> 16 ag__.converted_call(ag__.ld(optimizer).apply_gradients, ([(ag__.ld(grad), ag__.ld(generated_image))],), None, fscope)
17 ag__.converted_call(ag__.ld(generated_image).assign, (ag__.converted_call(ag__.ld(clip_0_1), (ag__.ld(generated_image),), None, fscope),), None, fscope)
18 try:
File /usr/local/lib/python3.8/dist-packages/keras/optimizers/optimizer_v2/optimizer_v2.py:640, in OptimizerV2.apply_gradients(self, grads_and_vars, name, experimental_aggregate_gradients)
599 def apply_gradients(self,
600 grads_and_vars,
601 name=None,
602 experimental_aggregate_gradients=True):
603 """Apply gradients to variables.
604
605 This is the second part of `minimize()`. It returns an `Operation` that
(...)
638 RuntimeError: If called in a cross-replica context.
639 """
--> 640 grads_and_vars = optimizer_utils.filter_empty_gradients(grads_and_vars)
641 var_list = [v for (_, v) in grads_and_vars]
643 with tf.name_scope(self._name):
644 # Create iteration if necessary.
File /usr/local/lib/python3.8/dist-packages/keras/optimizers/optimizer_v2/utils.py:73, in filter_empty_gradients(grads_and_vars)
71 if not filtered:
72 variable = ([v.name for _, v in grads_and_vars],)
---> 73 raise ValueError(f"No gradients provided for any variable: {variable}. "
74 f"Provided `grads_and_vars` is {grads_and_vars}.")
75 if vars_with_empty_grads:
76 logging.warning(
77 ("Gradients do not exist for variables %s when minimizing the loss. "
78 "If you're using `model.compile()`, did you forget to provide a `loss`"
79 "argument?"),
80 ([v.name for v in vars_with_empty_grads]))
ValueError: in user code:
File "<ipython-input-29-76de60c7f5da>", line 32, in train_step *
optimizer.apply_gradients([(grad, generated_image)])
File "/usr/local/lib/python3.8/dist-packages/keras/optimizers/optimizer_v2/optimizer_v2.py", line 640, in apply_gradients **
grads_and_vars = optimizer_utils.filter_empty_gradients(grads_and_vars)
File "/usr/local/lib/python3.8/dist-packages/keras/optimizers/optimizer_v2/utils.py", line 73, in filter_empty_gradients
raise ValueError(f"No gradients provided for any variable: {variable}. "