C2W3 - getting error in Exercise 11: Transform

Hello Learners,

I am trying to perform the transform operation by passing the all the correct arguments:
transform = Transform(
examples=example_gen.outputs[‘examples’],
schema=user_schema_importer.outputs[‘result’],
module_file=os.path.abspath(_cover_transform_module_file)
)
but I am getting error:
TypeError: Expected Tensor, SparseTensor, RaggedTensor or Operation got None of type <class ‘NoneType’>

Pasting the trace for reference:
TypeError Traceback (most recent call last)
in
10
11 # Run the component
—> 12 context.run(transform, enable_cache=False)

/opt/conda/lib/python3.8/site-packages/tfx/orchestration/experimental/interactive/interactive_context.py in run_if_ipython(*args, **kwargs)
65 # IPYTHON variable is set by IPython, see
66 # IPython reference — IPython 0.10.2 documentation.
—> 67 return fn(*args, **kwargs)
68 else:
69 absl.logging.warning(

/opt/conda/lib/python3.8/site-packages/tfx/orchestration/experimental/interactive/interactive_context.py in run(self, component, enable_cache, beam_pipeline_args)
180 telemetry_utils.LABEL_TFX_RUNNER: runner_label,
181 }):
→ 182 execution_id = launcher.launch().execution_id
183
184 return execution_result.ExecutionResult(

/opt/conda/lib/python3.8/site-packages/tfx/orchestration/launcher/base_component_launcher.py in launch(self)
200 absl.logging.info(‘Running executor for %s’,
201 self._component_info.component_id)
→ 202 self._run_executor(execution_decision.execution_id,
203 execution_decision.input_dict,
204 execution_decision.output_dict,

/opt/conda/lib/python3.8/site-packages/tfx/orchestration/launcher/in_process_component_launcher.py in _run_executor(self, execution_id, input_dict, output_dict, exec_properties)
65 executor_context) # type: ignore
66
—> 67 executor.Do(input_dict, output_dict, exec_properties)

/opt/conda/lib/python3.8/site-packages/tfx/components/transform/executor.py in Do(self, input_dict, output_dict, exec_properties)
415 label_outputs[labels.CACHE_OUTPUT_PATH_LABEL] = cache_output
416 status_file = ‘status_file’ # Unused
→ 417 self.Transform(label_inputs, label_outputs, status_file)
418 absl.logging.debug(‘Cleaning up temp path %s on executor success’,
419 temp_path)

/opt/conda/lib/python3.8/site-packages/tfx/components/transform/executor.py in Transform(failed resolving arguments)
933 materialization_format = (
934 transform_paths_file_formats[-1] if materialize_output_paths else None)
→ 935 self._RunBeamImpl(analyze_data_list, transform_data_list,
936 preprocessing_fn, input_dataset_metadata,
937 transform_output_path, raw_examples_data_format,

/opt/conda/lib/python3.8/site-packages/tfx/components/transform/executor.py in _RunBeamImpl(self, analyze_data_list, transform_data_list, preprocessing_fn, input_dataset_metadata, transform_output_path, raw_examples_data_format, temp_path, input_cache_dir, output_cache_dir, compute_statistics, per_set_stats_output_paths, materialization_format, analyze_paths_count)
980 analyze_input_columns = tft.get_analyze_input_columns(
981 preprocessing_fn, unprojected_typespecs)
→ 982 transform_input_columns = tft.get_transform_input_columns(
983 preprocessing_fn, unprojected_typespecs)
984 # Use the same dataset (same columns) for AnalyzeDataset and computing

/opt/conda/lib/python3.8/site-packages/tensorflow_transform/inspect_preprocessing_fn.py in get_transform_input_columns(preprocessing_fn, specs)
83 specs)
84 output_signature = preprocessing_fn(input_signature.copy())
—> 85 transform_input_tensors = graph_tools.get_dependent_inputs(
86 graph, input_signature, output_signature)
87 return list(transform_input_tensors.keys())

/opt/conda/lib/python3.8/site-packages/tensorflow_transform/graph_tools.py in get_dependent_inputs(graph, input_tensors, output_tensors)
775 dependent_inputs = {}
776 for output_tensor in output_iterator:
→ 777 dependent_inputs.update(graph_analyzer.get_dependent_inputs(output_tensor))
778 return {
779 name: tensor

/opt/conda/lib/python3.8/site-packages/tensorflow_transform/graph_tools.py in wrapper(self, tensor_or_op)
166 “”“Wrapper when calling func to re-raise exceptions.”""
167 try:
→ 168 return func(self, tensor_or_op)
169 except _UnexpectedPlaceholderError as e:
170 if e.func_graph_name:

/opt/conda/lib/python3.8/site-packages/tensorflow_transform/graph_tools.py in get_dependent_inputs(self, tensor_or_op)
722 tensor_or_op,
723 (tf.Tensor, tf.SparseTensor, tf.RaggedTensor, tf.Operation)):
→ 724 raise TypeError(
725 'Expected Tensor, SparseTensor, RaggedTensor or Operation got {} of ’
726 ‘type {}’.format(tensor_or_op, type(tensor_or_op)))

TypeError: Expected Tensor, SparseTensor, RaggedTensor or Operation got None of type <class ‘NoneType’>

I checked the type for example_gen.outputs[‘examples’] and user_schema_importer.outputs[‘result’] and it seems fine. Not sure how to proceed, pls help

thanks
Mithun

Hi Mithun! Welcome to Discourse! The error message seems to be pointing to your _cover_transform_module_file. Please review that file and make sure that you don’t have a None type in any of your features_dict values. Hope this helps!