mirror of
https://github.com/saymrwulf/transformers.git
synced 2026-05-14 20:58:08 +00:00
[docs] use device-agnostic API instead of cuda (#34913)
add device-agnostic API Signed-off-by: Lin, Fanli <fanli.lin@intel.com>
This commit is contained in:
parent
64b73e61f8
commit
6bc0c219c1
1 changed files with 2 additions and 1 deletions
|
|
@ -73,8 +73,9 @@ Let's demonstrate this process with GPT-2.
|
|||
|
||||
```python
|
||||
from transformers import GPT2LMHeadModel, GPT2TokenizerFast
|
||||
from accelerate.test_utils.testing import get_backend
|
||||
|
||||
device = "cuda"
|
||||
device, _, _ = get_backend() # automatically detects the underlying device type (CUDA, CPU, XPU, MPS, etc.)
|
||||
model_id = "openai-community/gpt2-large"
|
||||
model = GPT2LMHeadModel.from_pretrained(model_id).to(device)
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained(model_id)
|
||||
|
|
|
|||
Loading…
Reference in a new issue