[Lab 2]ModuleNotFoundError: No module named 'torch._C'

from datasets import load_dataset
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, GenerationConfig, TrainingArguments, Trainer
import torch
import time
import evaluate
import pandas as pd
import numpy as np

---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
Cell In[3], line 2
      1 from datasets import load_dataset
----> 2 from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, GenerationConfig, TrainingArguments, Trainer
      3 import torch
      4 import time

File /opt/conda/lib/python3.12/site-packages/transformers/__init__.py:26
     23 from typing import TYPE_CHECKING
     25 # Check the dependencies satisfy the minimal versions required.
---> 26 from . import dependency_versions_check
     27 from .utils import (
     28     OptionalDependencyNotAvailable,
     29     _LazyModule,
   (...)
     47     logging,
     48 )
     51 logger = logging.get_logger(__name__)  # pylint: disable=invalid-name

File /opt/conda/lib/python3.12/site-packages/transformers/dependency_versions_check.py:16
      1 # Copyright 2020 The HuggingFace Team. All rights reserved.
      2 #
      3 # Licensed under the Apache License, Version 2.0 (the "License");
   (...)
     12 # See the License for the specific language governing permissions and
     13 # limitations under the License.
     15 from .dependency_versions_table import deps
---> 16 from .utils.versions import require_version, require_version_core
     19 # define which module versions we always want to check at run time
     20 # (usually the ones defined in `install_requires` in setup.py)
     21 #
     22 # order specific notes:
     23 # - tqdm must be checked before tokenizers
     25 pkgs_to_check_at_runtime = [
     26     "python",
     27     "tqdm",
   (...)
     37     "pyyaml",
     38 ]

File /opt/conda/lib/python3.12/site-packages/transformers/utils/__init__.py:33
     24 from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD
     25 from .doc import (
     26     add_code_sample_docstrings,
     27     add_end_docstrings,
   (...)
     31     replace_return_docstrings,
     32 )
---> 33 from .generic import (
     34     ContextManagers,
     35     ExplicitEnum,
     36     ModelOutput,
     37     PaddingStrategy,
     38     TensorType,
     39     add_model_info_to_auto_map,
     40     cached_property,
     41     can_return_loss,
     42     expand_dims,
     43     find_labels,
     44     flatten_dict,
     45     infer_framework,
     46     is_jax_tensor,
     47     is_numpy_array,
     48     is_tensor,
     49     is_tf_symbolic_tensor,
     50     is_tf_tensor,
     51     is_torch_device,
     52     is_torch_dtype,
     53     is_torch_tensor,
     54     reshape,
     55     squeeze,
     56     strtobool,
     57     tensor_size,
     58     to_numpy,
     59     to_py_obj,
     60     transpose,
     61     working_or_temp_dir,
     62 )
     63 from .hub import (
     64     CLOUDFRONT_DISTRIB_PREFIX,
     65     HF_MODULES_CACHE,
   (...)
     91     try_to_load_from_cache,
     92 )
     93 from .import_utils import (
     94     ACCELERATE_MIN_VERSION,
     95     ENV_VARS_TRUE_AND_AUTO_VALUES,
   (...)
    200     torch_only_method,
    201 )

File /opt/conda/lib/python3.12/site-packages/transformers/utils/generic.py:442
    438         return tuple(self[k] for k in self.keys())
    441 if is_torch_available():
--> 442     import torch.utils._pytree as _torch_pytree
    444     def _model_output_flatten(output: ModelOutput) -> Tuple[List[Any], "_torch_pytree.Context"]:
    445         return list(output.values()), list(output.keys())

File /opt/conda/lib/python3.12/site-packages/torch/utils/__init__.py:8
      5 import weakref
      7 import torch
----> 8 from torch.utils import (
      9     backcompat as backcompat,
     10     collect_env as collect_env,
     11     data as data,
     12     deterministic as deterministic,
     13     hooks as hooks,
     14 )
     15 from torch.utils.backend_registration import (
     16     generate_methods_for_privateuse1_backend,
     17     rename_privateuse1_backend,
     18 )
     19 from torch.utils.cpp_backtrace import get_cpp_backtrace

File /opt/conda/lib/python3.12/site-packages/torch/utils/backcompat/__init__.py:2
      1 # mypy: allow-untyped-defs
----> 2 from torch._C import _set_backcompat_broadcast_warn
      3 from torch._C import _get_backcompat_broadcast_warn
      4 from torch._C import _set_backcompat_keepdim_warn

ModuleNotFoundError: No module named 'torch._C'

Hi, and thank you for reporting. We’ll look into this and patch the lab if needed. Please give us the next 24 hours.

Hi. Unfortunately, I can’t reproduce the issue. All required packages were installed and imported successfully. Your case might have been a glitch. Please retry and make sure to follow the instructions in Vocareum (i.e. the page that opens when click Launch Lab in Coursera) carefully, particularly in selecting the right kernel. Also make sure to run the pip installs up top. You might see some errors about dependency conflicts but those packages will not be needed to complete the lab (e.g. autogluon, jupyter-ai, etc). Hope this helps.

1 Like

Delete the current lab file and open terminal again and enter the lab command found on the vocareum page, try to run the lab again now!

I am also facing the same issue, deleted the lab and redownloaded but didnt help. was anyone else able to resolve this?

Hi. Can you post a screenshot of the entire window right from the top of the notebook, up to the module not found error? It might not fit in one image and you might need to scroll a few times. You can upload multiple images here.

As mentioned here, I’m not able to reproduce the issue so I’d like to see if there are any clues in the notebook on why it is failing for some. Thanks.