Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion paddlex/inference/models/doc_vlm/predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from ....modules.doc_vlm.model_list import MODELS
from ....utils import logging
from ....utils.deps import require_genai_client_plugin
from ....utils.device import TemporaryDeviceChanger
from ....utils.device import TemporaryDeviceChanger, constr_device
from ....utils.env import get_device_type
from ...common.batch_sampler import DocVLMBatchSampler
from ..base import BasePredictor
Expand Down Expand Up @@ -56,6 +56,14 @@ def __init__(self, *args, **kwargs):
import paddle

self.device = kwargs.get("device", None)
if self.device is None and self.pp_option is not None:
if self.pp_option.device_type is not None:
device_ids = (
None
if self.pp_option.device_id is None
else [self.pp_option.device_id]
)
self.device = constr_device(self.pp_option.device_type, device_ids)
self.dtype = (
"bfloat16"
if ("npu" in get_device_type() or paddle.amp.is_bfloat16_supported())
Expand Down