Practical Data Science - C2W2 - train.py file

train.py (16.3 KB)
I am encounter an error on Exercise 4 after updating “train.py”
attached is the train.py file… anyone can help?

William

File /opt/conda/lib/python3.8/json/encoder.py:376, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
374 continue
375 else:
→ 376 raise TypeError(f’keys must be str, int, float, bool or None, ’
377 f’not {key.class.name}')
378 if first:
379 first = False

TypeError: keys must be str, int, float, bool or None, not ellipsis


adding screen shot from train.py
here is the original error message


TypeError Traceback (most recent call last)
Cell In[22], line 11
8 importlib.reload(train)
10 # Ignore warnings below
—> 11 config = train.configure_model()
13 label_0 = config.id2label[0]
14 label_1 = config.id2label[1]

File ~/src/train.py:176, in configure_model()
173 def configure_model():
174 classes = [-1, 0, 1]
→ 176 config = RobertaConfig.from_pretrained(
177 PRE_TRAINED_MODEL_NAME,
178 num_labels=len(classes),
179 id2label={
180 ### BEGIN SOLUTION - DO NOT delete this comment for grading purposes
181 …: …,
182 …: …,
183 …: …,
184 ### END SOLUTION - DO NOT delete this comment for grading purposes
185 },
186 label2id={
187 -1: 0,
188 0: 1,
189 1: 2,
190 }
191 )
193 config.output_attentions=True
195 return config

File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:348, in PretrainedConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
286 r"“”
287 Instantiate a :class:~transformers.PretrainedConfig (or a derived class) from a pretrained model
288 configuration.
(…)
345
346 “”"
347 config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
→ 348 return cls.from_dict(config_dict, **kwargs)

File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:449, in PretrainedConfig.from_dict(cls, config_dict, **kwargs)
446 for key in to_remove:
447 kwargs.pop(key, None)
→ 449 logger.info(“Model config %s”, str(config))
450 if return_unused_kwargs:
451 return config, kwargs

File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:481, in PretrainedConfig.repr(self)
480 def repr(self):
→ 481 return “{} {}”.format(self.class.name, self.to_json_string())

File /opt/conda/lib/python3.8/site-packages/transformers/configuration_utils.py:540, in PretrainedConfig.to_json_string(self, use_diff)
538 else:
539 config_dict = self.to_dict()
→ 540 return json.dumps(config_dict, indent=2, sort_keys=True) + “\n”

File /opt/conda/lib/python3.8/json/init.py:234, in dumps(obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
232 if cls is None:
233 cls = JSONEncoder
→ 234 return cls(
235 skipkeys=skipkeys, ensure_ascii=ensure_ascii,
236 check_circular=check_circular, allow_nan=allow_nan, indent=indent,
237 separators=separators, default=default, sort_keys=sort_keys,
238 **kw).encode(obj)

File /opt/conda/lib/python3.8/json/encoder.py:201, in JSONEncoder.encode(self, o)
199 chunks = self.iterencode(o, _one_shot=True)
200 if not isinstance(chunks, (list, tuple)):
→ 201 chunks = list(chunks)
202 return ‘’.join(chunks)

File /opt/conda/lib/python3.8/json/encoder.py:431, in _make_iterencode.._iterencode(o, _current_indent_level)
429 yield from _iterencode_list(o, _current_indent_level)
430 elif isinstance(o, dict):
→ 431 yield from _iterencode_dict(o, _current_indent_level)
432 else:
433 if markers is not None:

File /opt/conda/lib/python3.8/json/encoder.py:405, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
403 else:
404 chunks = _iterencode(value, _current_indent_level)
→ 405 yield from chunks
406 if newline_indent is not None:
407 _current_indent_level -= 1

File /opt/conda/lib/python3.8/json/encoder.py:376, in _make_iterencode.._iterencode_dict(dct, _current_indent_level)
374 continue
375 else:
→ 376 raise TypeError(f’keys must be str, int, float, bool or None, ’
377 f’not {key.class.name}')
378 if first:
379 first = False

TypeError: keys must be str, int, float, bool or None, not ellipsis

Closing the threads… I have found this answer below