Hi,
There is some issue with JupyterLab notebook provided for the Week-1 assignment. The below
import statement:
from transformers import AutoModelForSeq2SeqLM
fails with below error trace.
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
Cell In[1], line 2
1 from datasets import load_dataset
----> 2 from transformers import AutoModelForSeq2SeqLM
3 from transformers import AutoTokenizer
4 from transformers import GenerationConfig
File /opt/conda/lib/python3.12/site-packages/transformers/__init__.py:27
24 from typing import TYPE_CHECKING
26 # Check the dependencies satisfy the minimal versions required.
---> 27 from . import dependency_versions_check
28 from .utils import (
29 OptionalDependencyNotAvailable,
30 _LazyModule,
(...)
49 logging,
50 )
51 from .utils.import_utils import define_import_structure
File /opt/conda/lib/python3.12/site-packages/transformers/dependency_versions_check.py:16
1 # Copyright 2020 The HuggingFace Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
(...)
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
15 from .dependency_versions_table import deps
---> 16 from .utils.versions import require_version, require_version_core
19 # define which module versions we always want to check at run time
20 # (usually the ones defined in `install_requires` in setup.py)
21 #
22 # order specific notes:
23 # - tqdm must be checked before tokenizers
25 pkgs_to_check_at_runtime = [
26 "python",
27 "tqdm",
(...)
37 "pyyaml",
38 ]
File /opt/conda/lib/python3.12/site-packages/transformers/utils/__init__.py:24
21 from packaging import version
23 from .. import __version__
---> 24 from .args_doc import (
25 ClassAttrs,
26 ClassDocstring,
27 ImageProcessorArgs,
28 ModelArgs,
29 ModelOutputArgs,
30 auto_class_docstring,
31 auto_docstring,
32 get_args_doc_from_source,
33 parse_docstring,
34 set_min_indent,
35 )
36 from .backbone_utils import BackboneConfigMixin, BackboneMixin
37 from .chat_template_utils import DocstringParsingException, TypeHintParsingException, get_json_schema
File /opt/conda/lib/python3.12/site-packages/transformers/utils/args_doc.py:30
22 import regex as re
24 from .doc import (
25 MODELS_TO_PIPELINE,
26 PIPELINE_TASKS_TO_SAMPLE_DOCSTRINGS,
27 PT_SAMPLE_DOCSTRINGS,
28 _prepare_output_docstrings,
29 )
---> 30 from .generic import ModelOutput
33 PATH_TO_TRANSFORMERS = Path("src").resolve() / "transformers"
36 AUTODOC_FILES = [
37 "configuration_*.py",
38 "modeling_*.py",
(...)
43 "feature_extractor_*.py",
44 ]
File /opt/conda/lib/python3.12/site-packages/transformers/utils/generic.py:480
476 return tuple(self[k] for k in self.keys())
479 if is_torch_available():
--> 480 import torch.utils._pytree as _torch_pytree
482 def _model_output_flatten(output: ModelOutput) -> tuple[list[Any], "_torch_pytree.Context"]:
483 return list(output.values()), list(output.keys())
File /opt/conda/lib/python3.12/site-packages/torch/utils/__init__.py:8
5 import weakref
7 import torch
----> 8 from torch.utils import (
9 backcompat as backcompat,
10 collect_env as collect_env,
11 data as data,
12 deterministic as deterministic,
13 hooks as hooks,
14 )
15 from torch.utils.backend_registration import (
16 generate_methods_for_privateuse1_backend,
17 rename_privateuse1_backend,
18 )
19 from torch.utils.cpp_backtrace import get_cpp_backtrace
File /opt/conda/lib/python3.12/site-packages/torch/utils/backcompat/__init__.py:2
1 # mypy: allow-untyped-defs
----> 2 from torch._C import _set_backcompat_broadcast_warn
3 from torch._C import _get_backcompat_broadcast_warn
4 from torch._C import _set_backcompat_keepdim_warn
ModuleNotFoundError: No module named 'torch._C'
This is the pip install
statement that exists:
%pip install -U
datasets==2.17.0,
transformers==4.38.2,
evaluate==0.4.0,
rouge_score==0.1.2,
peft==0.3.0 --quiet"
]
seems the right module version of transformers
isn’t mentioned.