Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 7705ddf

Browse files
authoredMay 27, 2025··
Minor fix - check for DTensor on igpu platform (#3531)
1 parent 33ba2f2 commit 7705ddf

File tree

2 files changed

+10
-5
lines changed

2 files changed

+10
-5
lines changed
 

‎py/torch_tensorrt/dynamo/backend/backends.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
repair_input_aliasing,
2222
)
2323
from torch_tensorrt.dynamo.utils import (
24+
is_tegra_platform,
2425
parse_dynamo_kwargs,
2526
prepare_inputs,
2627
set_log_level,
@@ -80,10 +81,14 @@ def aot_torch_tensorrt_aten_backend(
8081
fw_compiler=_pretraced_backend_autograd,
8182
decompositions=settings_aot_autograd["decompositions"],
8283
)(gm, sample_inputs)
83-
if any(isinstance(tensor, DTensor) for tensor in sample_inputs):
84-
logger.warning(
85-
"It is recommended to run the model with use_distributed_mode_trace = True since there are distributed tensors in the input which is not supported in aot_export_joint_simple"
86-
)
84+
85+
if is_tegra_platform():
86+
from torch.distributed.tensor import DTensor
87+
88+
if any(isinstance(tensor, DTensor) for tensor in sample_inputs):
89+
logger.warning(
90+
"It is recommended to run the model with use_distributed_mode_trace = True since there are distributed tensors in the input which is not supported in aot_export_joint_simple"
91+
)
8792
return _pretraced_backend(gm, sample_inputs, settings, engine_cache)
8893

8994

‎py/torch_tensorrt/dynamo/conversion/_ConversionContext.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import numpy as np
44
from torch_tensorrt.dynamo._settings import CompilationSettings
5-
from torch_tensorrt.fx.types import TRTNetwork
5+
from torch_tensorrt.dynamo.types import TRTNetwork
66

77

88
@dataclass

0 commit comments

Comments
 (0)
Please sign in to comment.