mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
Update test_flash_attn_2_can_dispatch_composite_models (#36050)
* update * update * update --------- Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
8201506d28
commit
4886cb79c1
1 changed files with 6 additions and 1 deletions
|
|
@ -4436,10 +4436,15 @@ class ModelTesterMixin:
|
|||
model.save_pretrained(tmpdirname)
|
||||
model = model_class.from_pretrained(tmpdirname, torch_dtype=torch_dtype)
|
||||
|
||||
supports_fa2_all_modules = all(
|
||||
sub_models_supporting_fa2 = [
|
||||
module._supports_flash_attn_2
|
||||
for name, module in model.named_modules()
|
||||
if isinstance(module, PreTrainedModel) and name != ""
|
||||
]
|
||||
supports_fa2_all_modules = (
|
||||
all(sub_models_supporting_fa2)
|
||||
if len(sub_models_supporting_fa2) > 0
|
||||
else model._supports_flash_attn_2
|
||||
)
|
||||
if not supports_fa2_all_modules:
|
||||
with self.assertRaises(ValueError):
|
||||
|
|
|
|||
Loading…
Reference in a new issue