mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
fix no sequence length models error (#27522)
* fix no sequence length models error * block size check --------- Co-authored-by: Adam Louly <adamlouly@microsoft.com@orttrainingdev9.d32nl1ml4oruzj4qz3bqlggovf.px.internal.cloudapp.net>
This commit is contained in:
parent
4b4b864224
commit
4850aaba6f
1 changed files with 4 additions and 1 deletions
|
|
@ -510,7 +510,10 @@ def main():
|
|||
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
|
||||
f"Using block_size={min(1024, max_pos_embeddings)} instead. You can change that default value by passing --block_size xxx."
|
||||
)
|
||||
block_size = min(1024, max_pos_embeddings)
|
||||
if max_pos_embeddings > 0:
|
||||
block_size = min(1024, max_pos_embeddings)
|
||||
else:
|
||||
block_size = 1024
|
||||
else:
|
||||
if data_args.block_size > tokenizer.model_max_length:
|
||||
logger.warning(
|
||||
|
|
|
|||
Loading…
Reference in a new issue