From a96abdd85523871cb66cf6292dc827f8418fa00d Mon Sep 17 00:00:00 2001 From: l30036321 Date: Fri, 13 Oct 2023 11:17:53 +0800 Subject: [PATCH] Adapt to Torch2.1 version --- .../src/python/ptdbg_ascend/hook_module/register_hook.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/register_hook.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/register_hook.py index 0d69d465d..da6a30a5b 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/register_hook.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/register_hook.py @@ -25,7 +25,7 @@ from . import wrap_torch, wrap_functional, wrap_tensor, wrap_vf, wrap_distribute from .hook_module import HOOKModule from .wrap_functional import remove_dropout from ..common.utils import check_file_or_directory_path, print_error_log, CompareException, Const, \ - print_info_log, print_warn_log, get_process_rank + print_info_log, print_warn_log, get_process_rank, torch_without_guard_version from ..dump.utils import make_dump_dirs, DumpUtil from ..overflow_check.utils import OverFlowUtil @@ -61,7 +61,7 @@ def initialize_hook(hook): if attr_name.startswith("wrap_"): setattr(dist, attr_name[5:], getattr(wrap_distributed.HOOKDistributedOP, attr_name)) setattr(dist.distributed_c10d, attr_name[5:], getattr(wrap_distributed.HOOKDistributedOP, attr_name)) - if not is_gpu: + if not is_gpu and not torch_without_guard_version: setattr(torch_npu.distributed, attr_name[5:], getattr(wrap_distributed.HOOKDistributedOP, attr_name)) setattr(torch_npu.distributed.distributed_c10d, attr_name[5:], getattr(wrap_distributed.HOOKDistributedOP, attr_name)) -- Gitee