Fix: move inputs to model device in inference example to avoid "same device" error
#3
by
lim4349
- opened
README.md
CHANGED
|
@@ -116,6 +116,7 @@ inputs = processor.apply_chat_template(
|
|
| 116 |
return_dict=True,
|
| 117 |
return_tensors="pt"
|
| 118 |
)
|
|
|
|
| 119 |
|
| 120 |
# Inference: Generation of the output
|
| 121 |
generated_ids = model.generate(**inputs, max_new_tokens=128)
|
|
|
|
| 116 |
return_dict=True,
|
| 117 |
return_tensors="pt"
|
| 118 |
)
|
| 119 |
+
inputs = inputs.to(model.device)
|
| 120 |
|
| 121 |
# Inference: Generation of the output
|
| 122 |
generated_ids = model.generate(**inputs, max_new_tokens=128)
|