- Week 2
- Link to the classroom item I am referring
- Description:
output_dir = f’./dialogue-summary-training-{str(int(time.time()))}’
training_args = TrainingArguments(
output_dir=output_dir,
learning_rate=1e-5,
num_train_epochs=1,
weight_decay=0.01,
logging_steps=1,
max_steps=1,
)
trainer = Trainer(
model=original_model,
args=training_args,
train_dataset=tokenized_datasets[‘train’],
eval_dataset=tokenized_datasets[‘validation’]
)
Facing error while compiling this code
TypeError Traceback (most recent call last)
Cell In[21], line 10
1 output_dir = f’./dialogue-summary-training-{str(int(time.time()))}’
2 training_args = TrainingArguments(
3 output_dir=output_dir,
4 learning_rate=1e-5,
(…)
8 max_steps=1,
9 )
—> 10 trainer = Trainer(
11 model=original_model,
12 args=training_args,
13 train_dataset=tokenized_datasets[‘train’],
14 eval_dataset=tokenized_datasets[‘validation’]
15 )
File ~\AppData\Local\Programs\Python\Python311\Lib\site-packages\transformers\trainer.py:367, in Trainer.init(self, model, args, data_collator, train_dataset, eval_dataset, tokenizer, model_init, compute_metrics, callbacks, optimizers, preprocess_logits_for_metrics)
364 self.deepspeed = None
365 self.is_in_train = False
→ 367 self.create_accelerator_and_postprocess()
369 # memory metrics - must set up as early as possible
370 self._memory_tracker = TrainerMemoryTracker(self.args.skip_memory_metrics)
File ~\AppData\Local\Programs\Python\Python311\Lib\site-packages\transformers\trainer.py:4127, in Trainer.create_accelerator_and_postprocess(self)
4122 elif isinstance(accelerator_kwargs, dict):
4123 # Some values may need to go through non-accelerate aligned defaults
4124 # and we need to run the __post_init__
to set them
4125 accelerator_kwargs = AcceleratorConfig(**accelerator_kwargs).to_dict()
→ 4127 self.accelerator = Accelerator(
4128 deepspeed_plugin=self.args.deepspeed_plugin,
4129 gradient_accumulation_plugin=gradient_accumulation_plugin,
4130 **accelerator_kwargs,
4131 )
4132 # some Trainer classes need to use gather
instead of gather_for_metrics
, thus we store a flag
4133 self.gather_function = self.accelerator.gather_for_metrics
TypeError: Accelerator.init() got an unexpected keyword argument ‘dispatch_batches’