mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
fix: fix gradient accumulate step for learning rate (#27667)
This commit is contained in:
parent
f84d85ba67
commit
0410a29a2d
1 changed files with 1 additions and 1 deletions
|
|
@ -640,7 +640,7 @@ def main():
|
|||
|
||||
# Create learning rate schedule
|
||||
linear_decay_lr_schedule_fn = create_learning_rate_fn(
|
||||
len(vectorized_datasets["train"]),
|
||||
total_train_steps,
|
||||
training_args.warmup_steps,
|
||||
training_args.learning_rate,
|
||||
)
|
||||
|
|
|
|||
Loading…
Reference in a new issue