I got stuck with this exercise and my code raised the following errors:
ValueError Traceback (most recent call last)
in
4 # print((generated_image) )
5
----> 6 J1 = train_step(generated_image)
7 print(J1)
8 assert type(J1) == EagerTensor, f"Wrong type {type(J1)} != {EagerTensor}"
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in call(self, *args, **kwds)
778 else:
779 compiler = “nonXla”
→ 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
812 # In this case we have not created variables on the first call. So we can
813 # run the first trace but we should fail if variables are created.
→ 814 results = self._stateful_fn(*args, **kwds)
815 if self._created_variables:
816 raise ValueError(“Creating variables on a non-first call to a function”
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in call(self, *args, **kwargs)
2826 “”“Calls a graph function specialized to the inputs.”""
2827 with self._lock:
→ 2828 graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
2829 return graph_function._filtered_call(args, kwargs) # pylint: disable=protected-access
2830
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
→ 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
→ 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
→ 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: func_outputs
contains only Tensors, CompositeTensors,
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # wrapped allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
→ 600 return weak_wrapped_fn().wrapped(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, “ag_error_metadata”):
→ 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
<ipython-input-27-d6307d663a2a>:26 train_step *
J_style = compute_style_cost(a_S, generated_image, STYLE_LAYERS)
<ipython-input-14-e737f0082e76>:28 compute_style_cost *
J_style_layer = compute_layer_style_cost(a_S[i], a_G[i])
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper
return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py:1024 _slice_helper
name=name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper
return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/array_ops.py:1196 strided_slice
shrink_axis_mask=shrink_axis_mask)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py:10352 strided_slice
shrink_axis_mask=shrink_axis_mask, name=name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:744 _apply_op_helper
attrs=attr_protos, op_def=op_def)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py:593 _create_op_internal
compute_device)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py:3485 _create_op_internal
op_def=op_def)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py:1975 __init__
control_input_ops, op_def)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py:1815 _create_c_op
raise ValueError(str(e))
ValueError: slice index 0 of dimension 0 out of bounds. for '{{node strided_slice_1}} = StridedSlice[Index=DT_INT32, T=DT_FLOAT, begin_mask=0, ellipsis_mask=0, end_mask=0, new_axis_mask=0, shrink_axis_mask=1](strided_slice, strided_slice_1/stack, strided_slice_1/stack_1, strided_slice_1/stack_2)' with input shapes: [0,400,400,3], [1], [1], [1] and with computed input tensors: input[1] = <0>, input[2] = <1>, input[3] = <1>.
Also, here is my code for UNQ_C3:
{moderator edit - solution code removed}