From f2641c5e4f54cd067834829eee56805df20534c7 Mon Sep 17 00:00:00 2001 From: cehongwang Date: Fri, 22 Aug 2025 20:54:31 +0000 Subject: [PATCH 1/3] Added the dynamic check in the validator --- .../dynamo/conversion/aten_ops_converters.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index 147813d8e0..867082dd36 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -25,6 +25,7 @@ get_positive_dim, is_only_operator_on_placeholder, ) +from torch_tensorrt.dynamo.utils import DYNAMIC_DIM _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -2721,6 +2722,13 @@ def sort_validator(node: Node, settings: Optional[CompilationSettings] = None) - def topk_sort_validator(k: int) -> bool: + + # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at + # compile time. + if k == DYNAMIC_DIM: + _LOGGER.debug("k value cannot be dynamic!") + return False + if k > 3840: _LOGGER.debug( f"Currently only topk values up to 3840 are supported, got k={k}." From 14fdf423189b919fa1266fbe68f54aee25f5ffd6 Mon Sep 17 00:00:00 2001 From: cehongwang Date: Mon, 25 Aug 2025 18:19:28 +0000 Subject: [PATCH 2/3] rewrote the comment --- py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py | 6 ++++-- py/torch_tensorrt/dynamo/conversion/impl/topk.py | 4 ---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index 867082dd36..b6ce0a92bc 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -2726,12 +2726,14 @@ def topk_sort_validator(k: int) -> bool: # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at # compile time. if k == DYNAMIC_DIM: - _LOGGER.debug("k value cannot be dynamic!") + _LOGGER.debug( + "[top_k validator] Converter does not support k being a dynamic value. Therefore, aten::topk will run in PyTorch" + ) return False if k > 3840: _LOGGER.debug( - f"Currently only topk values up to 3840 are supported, got k={k}." + f"[top_k validator] Currently only topk values up to 3840 are supported, got k={k}. Therefore, aten::topk will run in PyTorch" ) return False return True diff --git a/py/torch_tensorrt/dynamo/conversion/impl/topk.py b/py/torch_tensorrt/dynamo/conversion/impl/topk.py index 053a46ce2b..638cbf599e 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/topk.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/topk.py @@ -209,10 +209,6 @@ def topk( get_axes_for_reduce_op(get_positive_dim(dim, len(input.shape))), ) - # topk layer supports dynamic k value but we cannot dertermin supported dynamic topk value at - # compile time. - assert k != DYNAMIC_DIM, "k value cannot be dynamic!" - # TensorRT ITopKLayer does not have a sorted flag, it is always returning the sorted topk elements # so here no matter sorted is True or False the returned the topk Tensor object is always sorted set_layer_name(topk_layer, target, f"{name}_topk", source_ir) From 1cabfd49577f9a6c73e847d47a93ec6a74d33f70 Mon Sep 17 00:00:00 2001 From: cehongwang Date: Mon, 17 Nov 2025 23:58:33 +0000 Subject: [PATCH 3/3] changed the warning message --- .../dynamo/conversion/aten_ops_converters.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index b6ce0a92bc..c314eded3b 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -2725,14 +2725,14 @@ def topk_sort_validator(k: int) -> bool: # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at # compile time. - if k == DYNAMIC_DIM: - _LOGGER.debug( - "[top_k validator] Converter does not support k being a dynamic value. Therefore, aten::topk will run in PyTorch" + if k == DYNAMIC_DIM or not isinstance(k, int): + _LOGGER.warning( + "[top_k validator] It's not expected for k to be a dynamic or data-dependent value. aten::topk will run in PyTorch" ) return False if k > 3840: - _LOGGER.debug( + _LOGGER.warning( f"[top_k validator] Currently only topk values up to 3840 are supported, got k={k}. Therefore, aten::topk will run in PyTorch" ) return False