mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
style
This commit is contained in:
parent
196cf39446
commit
7deee60a04
1 changed files with 2 additions and 2 deletions
|
|
@ -4158,8 +4158,8 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
|
|||
elif device_map is not None:
|
||||
# Make sure we correctly place the rotary embedding module by default if not provided, as we moved it from
|
||||
# inside the Layers to the Model
|
||||
for buffer in {name for name, _ in model.named_buffers()}:
|
||||
rotary_module = None
|
||||
rotary_module = None
|
||||
for buffer, _ in model.named_buffers():
|
||||
if "rotary_emb.inv_freq" in buffer and "layer" not in buffer:
|
||||
rotary_module = buffer.replace(".inv_freq", "")
|
||||
break
|
||||
|
|
|
|||
Loading…
Reference in a new issue