Lab 2 Python import error

When executing the Jupyter notebook for Week 2/Lab 2, I encounter the following error when executing the “Import necessary components …” cell:

Error message:


---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
Cell In[1], line 2
      1 from datasets import load_dataset
----> 2 from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, GenerationConfig, TrainingArguments, Trainer
      3 import torch
      4 import time

File /opt/conda/lib/python3.12/site-packages/transformers/__init__.py:26
     23 from typing import TYPE_CHECKING
     25 # Check the dependencies satisfy the minimal versions required.
---> 26 from . import dependency_versions_check
     27 from .utils import (
     28     OptionalDependencyNotAvailable,
     29     _LazyModule,
   (...)
     47     logging,
     48 )
     51 logger = logging.get_logger(__name__)  # pylint: disable=invalid-name

File /opt/conda/lib/python3.12/site-packages/transformers/dependency_versions_check.py:16
      1 # Copyright 2020 The HuggingFace Team. All rights reserved.
      2 #
      3 # Licensed under the Apache License, Version 2.0 (the "License");
   (...)
     12 # See the License for the specific language governing permissions and
     13 # limitations under the License.
     15 from .dependency_versions_table import deps
---> 16 from .utils.versions import require_version, require_version_core
     19 # define which module versions we always want to check at run time
     20 # (usually the ones defined in `install_requires` in setup.py)
     21 #
     22 # order specific notes:
     23 # - tqdm must be checked before tokenizers
     25 pkgs_to_check_at_runtime = [
     26     "python",
     27     "tqdm",
   (...)
     37     "pyyaml",
     38 ]

File /opt/conda/lib/python3.12/site-packages/transformers/utils/__init__.py:33
     24 from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD
     25 from .doc import (
     26     add_code_sample_docstrings,
     27     add_end_docstrings,
   (...)
     31     replace_return_docstrings,
     32 )
---> 33 from .generic import (
     34     ContextManagers,
     35     ExplicitEnum,
     36     ModelOutput,
     37     PaddingStrategy,
     38     TensorType,
     39     add_model_info_to_auto_map,
     40     cached_property,
     41     can_return_loss,
     42     expand_dims,
     43     find_labels,
     44     flatten_dict,
     45     infer_framework,
     46     is_jax_tensor,
     47     is_numpy_array,
     48     is_tensor,
     49     is_tf_symbolic_tensor,
     50     is_tf_tensor,
     51     is_torch_device,
     52     is_torch_dtype,
     53     is_torch_tensor,
     54     reshape,
     55     squeeze,
     56     strtobool,
     57     tensor_size,
     58     to_numpy,
     59     to_py_obj,
     60     transpose,
     61     working_or_temp_dir,
     62 )
     63 from .hub import (
     64     CLOUDFRONT_DISTRIB_PREFIX,
     65     HF_MODULES_CACHE,
   (...)
     91     try_to_load_from_cache,
     92 )
     93 from .import_utils import (
     94     ACCELERATE_MIN_VERSION,
     95     ENV_VARS_TRUE_AND_AUTO_VALUES,
   (...)
    200     torch_only_method,
    201 )

File /opt/conda/lib/python3.12/site-packages/transformers/utils/generic.py:442
    438         return tuple(self[k] for k in self.keys())
    441 if is_torch_available():
--> 442     import torch.utils._pytree as _torch_pytree
    444     def _model_output_flatten(output: ModelOutput) -> Tuple[List[Any], "_torch_pytree.Context"]:
    445         return list(output.values()), list(output.keys())

File /opt/conda/lib/python3.12/site-packages/torch/utils/__init__.py:8
      5 import weakref
      7 import torch
----> 8 from torch.utils import (
      9     backcompat as backcompat,
     10     collect_env as collect_env,
     11     data as data,
     12     deterministic as deterministic,
     13     hooks as hooks,
     14 )
     15 from torch.utils.backend_registration import (
     16     generate_methods_for_privateuse1_backend,
     17     rename_privateuse1_backend,
     18 )
     19 from torch.utils.cpp_backtrace import get_cpp_backtrace

File /opt/conda/lib/python3.12/site-packages/torch/utils/backcompat/__init__.py:2
      1 # mypy: allow-untyped-defs
----> 2 from torch._C import _set_backcompat_broadcast_warn
      3 from torch._C import _get_backcompat_broadcast_warn
      4 from torch._C import _set_backcompat_keepdim_warn

ModuleNotFoundError: No module named 'torch._C'
15 from .dependency_versions_table import deps
—> 16 from .utils.versions import require_version, require_version_core
19 # define which module versions we always want to check at run time
20 # (usually the ones defined in install_requires in setup.py)
21 #
22 # order specific notes:
23 # - tqdm must be checked before tokenizers
25 pkgs_to_check_at_runtime = [
26     “python”,
27     “tqdm”,
(…)
37     “pyyaml”,
38 ]

File /opt/conda/lib/python3.12/site-packages/transformers/utils/init.py:33
24 from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD
25 from .doc import (
26     add_code_sample_docstrings,
27     add_end_docstrings,
(…)
31     replace_return_docstrings,
32 )
—> 33 from .generic import (
34     ContextManagers,
35     ExplicitEnum,
36     ModelOutput,
37     PaddingStrategy,
38     TensorType,
39     add_model_info_to_auto_map,
40     cached_property,
41     can_return_loss,
42     expand_dims,
43     find_labels,
44     flatten_dict,
45     infer_framework,
46     is_jax_tensor,
47     is_numpy_array,
48     is_tensor,
49     is_tf_symbolic_tensor,
50     is_tf_tensor,
51     is_torch_device,
52     is_torch_dtype,
53     is_torch_tensor,
54     reshape,
55     squeeze,
56     strtobool,
57     tensor_size,
58     to_numpy,
59     to_py_obj,
60     transpose,
61     working_or_temp_dir,
62 )
63 from .hub import (
64     CLOUDFRONT_DISTRIB_PREFIX,
65     HF_MODULES_CACHE,
(…)
91     try_to_load_from_cache,
92 )
93 from .import_utils import (
94     ACCELERATE_MIN_VERSION,
95     ENV_VARS_TRUE_AND_AUTO_VALUES,
(…)
200     torch_only_method,
201 )

File /opt/conda/lib/python3.12/site-packages/transformers/utils/generic.py:442
438         return tuple(self[k] for k in self.keys())
441 if is_torch_available():
 → 442     import torch.utils._pytree as _torch_pytree
444     def _model_output_flatten(output: ModelOutput) → Tuple[List[Any], “_torch_pytree.Context”]:
445         return list(output.values()), list(output.keys())

File /opt/conda/lib/python3.12/site-packages/torch/utils/init.py:8
5 import weakref
7 import torch
----> 8 from torch.utils import (
9     backcompat as backcompat,
10     collect_env as collect_env,
11     data as data,
12     deterministic as deterministic,
13     hooks as hooks,
14 )
15 from torch.utils.backend_registration import (
16     generate_methods_for_privateuse1_backend,
17     rename_privateuse1_backend,
18 )
19 from torch.utils.cpp_backtrace import get_cpp_backtrace

File /opt/conda/lib/python3.12/site-packages/torch/utils/backcompat/init.py:2
1 # mypy: allow-untyped-defs
----> 2 from torch._C import _set_backcompat_broadcast_warn
3 from torch._C import _get_backcompat_broadcast_warn
4 from torch._C import _set_backcompat_keepdim_warn

ModuleNotFoundError: No module named ‘torch._C’

Thanks for any help to get this to work!

hello @maqong

can I know if you are running the codes locally or in the course provided platform?

On the course provided platform.

can you refresh and try again. i didn’t have this issue.

Also remember some of Coursera labs may not work as they are being upgraded as per a recent notification by Coursera.