mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
[tests] use torch_device instead of auto for model testing (#29531)
* use torch_device * skip for XPU * Update tests/generation/test_utils.py Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com> --------- Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com>
This commit is contained in:
parent
14536c339a
commit
1ea3ad1aec
1 changed files with 3 additions and 0 deletions
|
|
@ -1073,6 +1073,9 @@ class GenerationTesterMixin:
|
|||
@require_torch_multi_accelerator
|
||||
def test_model_parallel_beam_search(self):
|
||||
for model_class in self.all_generative_model_classes:
|
||||
if "xpu" in torch_device:
|
||||
return unittest.skip("device_map='auto' does not work with XPU devices")
|
||||
|
||||
if model_class._no_split_modules is None:
|
||||
continue
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue