deepspeed + grad acumm (#9622)

This commit is contained in:
Stas Bekman 2021-01-15 10:12:26 -08:00 committed by GitHub
parent 6d3b688b04
commit c60e0e1ee4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 8 additions and 1 deletions

View file

@ -112,6 +112,11 @@ class TestFinetuneTrainer(TestCasePlus):
def test_finetune_trainer_deepspeed(self):
self.finetune_trainer_quick(deepspeed=True)
@require_torch_multi_gpu
@require_deepspeed
def test_finetune_trainer_deepspeed_grad_acum(self):
self.finetune_trainer_quick(deepspeed=True, extra_args_str="--gradient_accumulation_steps 2")
@slow
def test_finetune_trainer_slow(self):
# There is a missing call to __init__process_group somewhere

View file

@ -931,7 +931,9 @@ class Trainer:
)
# Optimizer step
if is_torch_tpu_available():
if self.deepspeed:
self.deepspeed.step()
elif is_torch_tpu_available():
xm.optimizer_step(self.optimizer)
elif self.use_amp:
self.scaler.step(self.optimizer)