From 8566452cf9c735d8fec36eef3fe3cd7ddcc9d654 Mon Sep 17 00:00:00 2001 From: s30048155 Date: Thu, 28 Mar 2024 19:27:27 +0800 Subject: [PATCH 1/9] update --- .../ptdbg_ascend/hook_module/api_registry.py | 15 +++++++++++++-- .../hook_module/support_wrap_ops.yaml | 2 ++ .../ptdbg_ascend/hook_module/wrap_torch.py | 18 +++++++++++++++--- 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py index 064103aead..439026fd91 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py @@ -60,12 +60,23 @@ class ApiRegistry: @staticmethod def store_ori_attr(ori_api_group, api_list, api_ori_attr): for api in api_list: - api_ori_attr[api] = getattr(ori_api_group, api) + parts = api.split('.') + if len(parts) > 1: + sub_module = getattr(ori_api_group, parts[0]) + api_ori_attr[api] = getattr(sub_module, parts[1]) + else: + api_ori_attr[api] = getattr(ori_api_group, api) @staticmethod def set_api_attr(api_group, attr_dict): for api, api_attr in attr_dict.items(): - setattr(api_group, api, api_attr) + parts = api.split('.') + if len(parts) > 1: + sub_module = getattr(api_group, parts[0], None) + if sub_module is not None: + setattr(sub_module, parts[1], api_attr) + else: + setattr(api_group, api, api_attr) def api_modularity(self): self.set_api_attr(torch.Tensor, self.tensor_hook_attr) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml index de9fac5dbb..3899279d45 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml @@ -560,12 +560,14 @@ tensor: - xlogy_ torch: + - linalg.vector_norm - _adaptive_avg_pool2d - _add_relu - _add_relu_ - _aminmax - _batch_norm_impl_index - _convolution + - _foreach_norm - _softmax_backward_data - abs - abs_ diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index 5dcc41b1c8..77a5df3231 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -33,11 +33,23 @@ with FileOpen(yaml_path, 'r') as f: def get_torch_ops(): global WrapTorchOps _torch_ops = dir(torch) - return set(WrapTorchOps) & set(_torch_ops) + _linalg_ops = ["linalg." + op for op in dir(torch.linalg)] + return set(WrapTorchOps) & (set(_torch_ops) | set(_linalg_ops)) + + +TorchOps = {} +for op in get_torch_ops(): + parts = op.split('.') + if len(parts) == 1: + if op in dir(torch): + TorchOps[op] = getattr(torch, op) + else: + sub_module_name, sub_op = parts + sub_module = getattr(torch, sub_module_name, None) + if sub_module and sub_op in dir(sub_module): + TorchOps[op] = getattr(sub_module, sub_op) -TorchOps = {op: getattr(torch, op) for op in get_torch_ops()} - class HOOKTorchOP(object): pass -- Gitee From 5b68fbd83bcaa34a5c2d0266b00b8100af00f336 Mon Sep 17 00:00:00 2001 From: s30048155 Date: Thu, 28 Mar 2024 19:48:27 +0800 Subject: [PATCH 2/9] update --- .../src/python/ptdbg_ascend/hook_module/wrap_torch.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index 77a5df3231..366e16a020 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -40,14 +40,14 @@ def get_torch_ops(): TorchOps = {} for op in get_torch_ops(): parts = op.split('.') - if len(parts) == 1: - if op in dir(torch): - TorchOps[op] = getattr(torch, op) - else: + if len(parts) > 1: sub_module_name, sub_op = parts sub_module = getattr(torch, sub_module_name, None) if sub_module and sub_op in dir(sub_module): TorchOps[op] = getattr(sub_module, sub_op) + else: + if op in dir(torch): + TorchOps[op] = getattr(torch, op) @@ -59,7 +59,7 @@ class TorchOPTemplate(HOOKModule): def __init__(self, op_name, hook): self.op_name_ = op_name - self.prefix_op_name_ = "Torch_" + str(op_name) + "_" + self.prefix_op_name_ = "Torch_" + str(op_name.replace(".", "_")) + "_" super().__init__(hook) @torch_device_guard -- Gitee From 36388ffddda6b4c27ed042e637c0c4a9cd7d48df Mon Sep 17 00:00:00 2001 From: s30048155 Date: Fri, 29 Mar 2024 10:08:34 +0800 Subject: [PATCH 3/9] update --- .../python/ptdbg_ascend/hook_module/wrap_torch.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index 366e16a020..0093006e9a 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -32,9 +32,18 @@ with FileOpen(yaml_path, 'r') as f: def get_torch_ops(): global WrapTorchOps - _torch_ops = dir(torch) - _linalg_ops = ["linalg." + op for op in dir(torch.linalg)] - return set(WrapTorchOps) & (set(_torch_ops) | set(_linalg_ops)) + _torch_ops = [] + for op in WrapTorchOps: + parts = op.split('.') + if len(parts) > 1: + sub_module_name, sub_op = parts + sub_module = getattr(torch, sub_module_name, None) + if sub_module and sub_op in dir(sub_module): + _torch_ops.append(op) + else: + if op in dir(torch): + _torch_ops.append(op) + return set(WrapTorchOps) & set(_torch_ops) TorchOps = {} -- Gitee From 489db4f56f9e489a89d14960df2c2fd4bf7fd081 Mon Sep 17 00:00:00 2001 From: s30048155 Date: Fri, 29 Mar 2024 10:11:33 +0800 Subject: [PATCH 4/9] update --- .../src/python/ptdbg_ascend/hook_module/wrap_torch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index 0093006e9a..a6ca76e192 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -43,7 +43,7 @@ def get_torch_ops(): else: if op in dir(torch): _torch_ops.append(op) - return set(WrapTorchOps) & set(_torch_ops) + return set(_torch_ops) TorchOps = {} -- Gitee From 8ac2c62d9da20f05f8f639d82e66bb215a401ac4 Mon Sep 17 00:00:00 2001 From: s30048155 Date: Fri, 29 Mar 2024 10:25:50 +0800 Subject: [PATCH 5/9] clean code --- .../ptdbg_ascend/hook_module/wrap_torch.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index a6ca76e192..3bf8fbcaa6 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -33,16 +33,16 @@ with FileOpen(yaml_path, 'r') as f: def get_torch_ops(): global WrapTorchOps _torch_ops = [] - for op in WrapTorchOps: - parts = op.split('.') - if len(parts) > 1: - sub_module_name, sub_op = parts - sub_module = getattr(torch, sub_module_name, None) - if sub_module and sub_op in dir(sub_module): - _torch_ops.append(op) + for operation in WrapTorchOps: + operation_parts = operation.split('.') + if len(operation_parts) > 1: + operation_sub_module_name, operation_sub_op = operation_parts + operation_sub_module = getattr(torch, operation_sub_module_name, None) + if operation_sub_module and operation_sub_op in dir(operation_sub_module): + _torch_ops.append(operation) else: - if op in dir(torch): - _torch_ops.append(op) + if operation in dir(torch): + _torch_ops.append(operation) return set(_torch_ops) -- Gitee From 4a3d1ae2c025611509576c442a9db9c398451bcf Mon Sep 17 00:00:00 2001 From: s30048155 Date: Fri, 29 Mar 2024 18:07:55 +0800 Subject: [PATCH 6/9] update --- .../src/python/ptdbg_ascend/hook_module/wrap_torch.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index 3bf8fbcaa6..ab8238a16d 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -36,7 +36,7 @@ def get_torch_ops(): for operation in WrapTorchOps: operation_parts = operation.split('.') if len(operation_parts) > 1: - operation_sub_module_name, operation_sub_op = operation_parts + operation_sub_module_name, operation_sub_op = '.'.join(operation_parts[:-1]), operation_parts[-1] operation_sub_module = getattr(torch, operation_sub_module_name, None) if operation_sub_module and operation_sub_op in dir(operation_sub_module): _torch_ops.append(operation) @@ -50,7 +50,7 @@ TorchOps = {} for op in get_torch_ops(): parts = op.split('.') if len(parts) > 1: - sub_module_name, sub_op = parts + sub_module_name, sub_op = '.'.join(parts[:-1]), parts[-1] sub_module = getattr(torch, sub_module_name, None) if sub_module and sub_op in dir(sub_module): TorchOps[op] = getattr(sub_module, sub_op) @@ -68,7 +68,7 @@ class TorchOPTemplate(HOOKModule): def __init__(self, op_name, hook): self.op_name_ = op_name - self.prefix_op_name_ = "Torch_" + str(op_name.replace(".", "_")) + "_" + self.prefix_op_name_ = "Torch_" + str(op_name) + "_" super().__init__(hook) @torch_device_guard -- Gitee From 4fc278a18d446a9380c7209e692c733a84bdc3fe Mon Sep 17 00:00:00 2001 From: s30048155 Date: Sat, 30 Mar 2024 10:46:57 +0800 Subject: [PATCH 7/9] add support --- .../hook_module/support_wrap_ops.yaml | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml index 3899279d45..18b4ef5b2c 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml @@ -560,7 +560,47 @@ tensor: - xlogy_ torch: + - linalg.norm - linalg.vector_norm + - linalg.matrix_norm + - linalg.diagonal + - linalg.det + - linalg.slogdet + - linalg.cond + - linalg.matrix_rank + - linalg.cholesky + - linalg.qr + - linalg.lu + - linalg.lu_factor + - linalg.eig + - linalg.eigvals + - linalg.eigh + - linalg.eigvalsh + - linalg.svd + - linalg.svdvals + - linalg.solve + - linalg.solve_triangular + - linalg.lu_solve + - linalg.lstsq + - linalg.inv + - linalg.pinv + - linalg.matrix_exp + - linalg.matrix_power + - linalg.cross + - linalg.matmul + - linalg.vecdot + - linalg.multi_dot + - linalg.householder_product + - linalg.tensorinv + - linalg.tensorsolve + - linalg.vander + - linalg.cholesky_ex + - linalg.inv_ex + - linalg.solve_ex + - linalg.lu_factor_ex + - linalg.ldl_factor + - linalg.ldl_factor_ex + - linalg.ldl_solve - _adaptive_avg_pool2d - _add_relu - _add_relu_ -- Gitee From 12903023c977ec4dc84b6a24727c03114e01f426 Mon Sep 17 00:00:00 2001 From: s30048155 Date: Sat, 30 Mar 2024 16:35:10 +0800 Subject: [PATCH 8/9] update --- .../ptdbg_ascend/hook_module/api_registry.py | 16 ++++++------- .../ptdbg_ascend/hook_module/wrap_torch.py | 24 ++++++++----------- 2 files changed, 18 insertions(+), 22 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py index 439026fd91..cf21fe86bb 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/api_registry.py @@ -60,21 +60,21 @@ class ApiRegistry: @staticmethod def store_ori_attr(ori_api_group, api_list, api_ori_attr): for api in api_list: - parts = api.split('.') - if len(parts) > 1: - sub_module = getattr(ori_api_group, parts[0]) - api_ori_attr[api] = getattr(sub_module, parts[1]) + if '.' in api: + sub_module_name, sub_op = api.rsplit('.', 1) + sub_module = getattr(ori_api_group, sub_module_name) + api_ori_attr[api] = getattr(sub_module, sub_op) else: api_ori_attr[api] = getattr(ori_api_group, api) @staticmethod def set_api_attr(api_group, attr_dict): for api, api_attr in attr_dict.items(): - parts = api.split('.') - if len(parts) > 1: - sub_module = getattr(api_group, parts[0], None) + if '.' in api: + sub_module_name, sub_op = api.rsplit('.', 1) + sub_module = getattr(api_group, sub_module_name, None) if sub_module is not None: - setattr(sub_module, parts[1], api_attr) + setattr(sub_module, sub_op, api_attr) else: setattr(api_group, api, api_attr) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py index ab8238a16d..e3a4af7a85 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/wrap_torch.py @@ -34,29 +34,25 @@ def get_torch_ops(): global WrapTorchOps _torch_ops = [] for operation in WrapTorchOps: - operation_parts = operation.split('.') - if len(operation_parts) > 1: - operation_sub_module_name, operation_sub_op = '.'.join(operation_parts[:-1]), operation_parts[-1] - operation_sub_module = getattr(torch, operation_sub_module_name, None) - if operation_sub_module and operation_sub_op in dir(operation_sub_module): + if '.' in operation: + operation_sub_module_name, operation_sub_op = operation.rsplit('.', 1) + operation_sub_module = getattr(torch, operation_sub_module_name) + if operation_sub_op in dir(operation_sub_module): _torch_ops.append(operation) else: - if operation in dir(torch): + if hasattr(torch, operation): _torch_ops.append(operation) return set(_torch_ops) TorchOps = {} for op in get_torch_ops(): - parts = op.split('.') - if len(parts) > 1: - sub_module_name, sub_op = '.'.join(parts[:-1]), parts[-1] - sub_module = getattr(torch, sub_module_name, None) - if sub_module and sub_op in dir(sub_module): - TorchOps[op] = getattr(sub_module, sub_op) + if '.' in op: + sub_module_name, sub_op = op.rsplit('.', 1) + sub_module = getattr(torch, sub_module_name) + TorchOps[op] = getattr(sub_module, sub_op) else: - if op in dir(torch): - TorchOps[op] = getattr(torch, op) + TorchOps[op] = getattr(torch, op) -- Gitee From 9dd085ae9e6e26044833ad615fa7c00836f2850c Mon Sep 17 00:00:00 2001 From: s30048155 Date: Sat, 30 Mar 2024 18:04:01 +0800 Subject: [PATCH 9/9] update --- .../ptdbg_ascend/hook_module/support_wrap_ops.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml index 18b4ef5b2c..92096fc4bb 100644 --- a/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml +++ b/debug/accuracy_tools/ptdbg_ascend/src/python/ptdbg_ascend/hook_module/support_wrap_ops.yaml @@ -568,19 +568,12 @@ torch: - linalg.slogdet - linalg.cond - linalg.matrix_rank - - linalg.cholesky - linalg.qr - linalg.lu - linalg.lu_factor - - linalg.eig - - linalg.eigvals - - linalg.eigh - - linalg.eigvalsh - linalg.svd - linalg.svdvals - linalg.solve - - linalg.solve_triangular - - linalg.lu_solve - linalg.lstsq - linalg.inv - linalg.pinv @@ -591,7 +584,6 @@ torch: - linalg.vecdot - linalg.multi_dot - linalg.householder_product - - linalg.tensorinv - linalg.tensorsolve - linalg.vander - linalg.cholesky_ex @@ -600,7 +592,6 @@ torch: - linalg.lu_factor_ex - linalg.ldl_factor - linalg.ldl_factor_ex - - linalg.ldl_solve - _adaptive_avg_pool2d - _add_relu - _add_relu_ -- Gitee