C2-W1-Practical Data Science on the AWS Cloud -"unable to move forward -regarding python and pytorch version"

I am facing issues regarding python and pytorch version… I have switched my environment from data science 3.0 to data science 2.0. but still it giving error for my beloved code -
import boto3
import sagemaker
import botocore

config = botocore.config.Config(user_agent_extra=‘dlai-pds/c2/w1’)

low-level service client of the boto3 session

sm = boto3.client(service_name=‘sagemaker’,
config=config)

featurestore_runtime = boto3.client(service_name=‘sagemaker-featurestore-runtime’,
config=config)

sess = sagemaker.Session(sagemaker_client=sm,
sagemaker_featurestore_runtime_client=featurestore_runtime)

bucket = sess.default_bucket()
role = sagemaker.get_execution_role()
region = sess.boto_region_name

error is-TypeError Traceback (most recent call last)
Cell In[4], line 2
1 import boto3
----> 2 import sagemaker
3 import botocore
5 config = botocore.config.Config(user_agent_extra=‘dlai-pds/c2/w1’)

File /opt/conda/lib/python3.8/site-packages/sagemaker/init.py:18
14 from future import absolute_import
16 import importlib_metadata
—> 18 from sagemaker import estimator, parameter, tuner # noqa: F401
19 from sagemaker.amazon.kmeans import KMeans, KMeansModel, KMeansPredictor # noqa: F401
20 from sagemaker.amazon.pca import PCA, PCAModel, PCAPredictor # noqa: F401

File /opt/conda/lib/python3.8/site-packages/sagemaker/estimator.py:28
26 from six.moves.urllib.parse import urlparse
27 import sagemaker
—> 28 from sagemaker import git_utils, image_uris
29 from sagemaker.analytics import TrainingJobAnalytics
30 from sagemaker.debugger import TensorBoardOutputConfig # noqa: F401 # pylint: disable=unused-import

File /opt/conda/lib/python3.8/site-packages/sagemaker/image_uris.py:22
19 import re
21 from sagemaker import utils
—> 22 from sagemaker.spark import defaults
24 logger = logging.getLogger(name)
26 ECR_URI_TEMPLATE = “{registry}.dkr.{hostname}/{repository}”

File /opt/conda/lib/python3.8/site-packages/sagemaker/spark/init.py:16
13 “”“Placeholder docstring”“”
14 from future import absolute_import
—> 16 from sagemaker.spark.processing import PySparkProcessor, SparkJarProcessor # noqa: F401

File /opt/conda/lib/python3.8/site-packages/sagemaker/spark/processing.py:35
32 from urllib.parse import urlparse
34 from sagemaker import image_uris
—> 35 from sagemaker.local.image import _ecr_login_if_needed, _pull_image
36 from sagemaker.processing import ProcessingInput, ProcessingOutput, ScriptProcessor
37 from sagemaker.s3 import S3Uploader

File /opt/conda/lib/python3.8/site-packages/sagemaker/local/init.py:16
13 “”“Placeholder docstring”“”
14 from future import absolute_import
—> 16 from .local_session import ( # noqa: F401
17 file_input,
18 LocalSagemakerClient,
19 LocalSagemakerRuntimeClient,
20 LocalSession,
21 )

File /opt/conda/lib/python3.8/site-packages/sagemaker/local/local_session.py:23
20 import boto3
21 from botocore.exceptions import ClientError
—> 23 from sagemaker.local.image import _SageMakerContainer
24 from sagemaker.local.entities import (
25 _LocalEndpointConfig,
26 _LocalEndpoint,
(…)
30 _LocalTransformJob,
31 )
32 from sagemaker.session import Session

File /opt/conda/lib/python3.8/site-packages/sagemaker/local/image.py:38
35 from six.moves.urllib.parse import urlparse
37 import sagemaker
—> 38 import sagemaker.local.data
39 import sagemaker.local.utils
40 import sagemaker.utils

File /opt/conda/lib/python3.8/site-packages/sagemaker/local/data.py:26
22 from six import with_metaclass
24 from six.moves.urllib.parse import urlparse
—> 26 import sagemaker.amazon.common
27 import sagemaker.local.utils
28 import sagemaker.utils

File /opt/conda/lib/python3.8/site-packages/sagemaker/amazon/common.py:23
19 import sys
21 import numpy as np
—> 23 from sagemaker.amazon.record_pb2 import Record
24 from sagemaker.deprecations import deprecated_class
25 from sagemaker.deserializers import SimpleBaseDeserializer

File /opt/conda/lib/python3.8/site-packages/sagemaker/amazon/record_pb2.py:36
16 _sym_db = _symbol_database.Default()
19 DESCRIPTOR = _descriptor.FileDescriptor(
20 name=“record.proto”,
21 package=“aialgs.data”,
(…)
25 ),
26 )
29 _FLOAT32TENSOR = _descriptor.Descriptor(
30 name=“Float32Tensor”,
31 full_name=“aialgs.data.Float32Tensor”,
32 filename=None,
33 file=DESCRIPTOR,
34 containing_type=None,
35 fields=[
—> 36 _descriptor.FieldDescriptor(
37 name=“values”,
38 full_name=“aialgs.data.Float32Tensor.values”,
39 index=0,
40 number=1,
41 type=2,
42 cpp_type=6,
43 label=3,
44 has_default_value=False,
45 default_value=,
46 message_type=None,
47 enum_type=None,
48 containing_type=None,
49 is_extension=False,
50 extension_scope=None,
51 options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b(“\020\001”)),
52 file=DESCRIPTOR,
53 ),
54 _descriptor.FieldDescriptor(
55 name=“keys”,
56 full_name=“aialgs.data.Float32Tensor.keys”,
57 index=1,
58 number=2,
59 type=4,
60 cpp_type=4,
61 label=3,
62 has_default_value=False,
63 default_value=,
64 message_type=None,
65 enum_type=None,
66 containing_type=None,
67 is_extension=False,
68 extension_scope=None,
69 options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b(“\020\001”)),
70 file=DESCRIPTOR,
71 ),
72 _descriptor.FieldDescriptor(
73 name=“shape”,
74 full_name=“aialgs.data.Float32Tensor.shape”,
75 index=2,
76 number=3,
77 type=4,
78 cpp_type=4,
79 label=3,
80 has_default_value=False,
81 default_value=,
82 message_type=None,
83 enum_type=None,
84 containing_type=None,
85 is_extension=False,
86 extension_scope=None,
87 options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b(“\020\001”)),
88 file=DESCRIPTOR,
89 ),
90 ],
91 extensions=,
92 nested_types=,
93 enum_types=,
94 options=None,
95 is_extendable=False,
96 syntax=“proto2”,
97 extension_ranges=,
98 oneofs=,
99 serialized_start=29,
100 serialized_end=101,
101 )
104 _FLOAT64TENSOR = _descriptor.Descriptor(
105 name=“Float64Tensor”,
106 full_name=“aialgs.data.Float64Tensor”,
(…)
175 serialized_end=175,
176 )
179 _INT32TENSOR = _descriptor.Descriptor(
180 name=“Int32Tensor”,
181 full_name=“aialgs.data.Int32Tensor”,
(…)
250 serialized_end=247,
251 )

File /opt/conda/lib/python3.8/site-packages/google/protobuf/descriptor.py:553, in FieldDescriptor.new(cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options, serialized_options, has_default_value, containing_oneof, json_name, file, create_key)
547 def new(cls, name, full_name, index, number, type, cpp_type, label,
548 default_value, message_type, enum_type, containing_type,
549 is_extension, extension_scope, options=None,
550 serialized_options=None,
551 has_default_value=True, containing_oneof=None, json_name=None,
552 file=None, create_key=None): # pylint: disable=redefined-builtin
→ 553 _message.Message._CheckCalledFromGeneratedFile()
554 if is_extension:
555 return _message.default_pool.FindExtensionByName(full_name)

TypeError: Descriptors cannot be created directly.
If this call came from a _pb2.py file, your generated code is out of date and must be regenerated with protoc >= 3.19.0.
If you cannot immediately regenerate your protos, some other possible workarounds are:

  1. Downgrade the protobuf package to 3.20.x or lower.
  2. Set PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python (but this will use pure-Python parsing and will be much slower).

More information: Changes announced May 6, 2022 | Protocol Buffers Documentation