From 58da28b4a29fee9c938519b9a3018b5226e929f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=94=B0=E9=87=8E?= Date: Tue, 8 Oct 2024 19:28:17 +0800 Subject: [PATCH] cherry pick 2d803ef from https://gitee.com/tianye0806/pytorch/pulls/15199 fix dfx --- torch_npu/meta/_meta_registrations.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/torch_npu/meta/_meta_registrations.py b/torch_npu/meta/_meta_registrations.py index dc245410ff..0fdad4db80 100644 --- a/torch_npu/meta/_meta_registrations.py +++ b/torch_npu/meta/_meta_registrations.py @@ -759,9 +759,11 @@ def npu_group_quant_meta(x, scale, group_index, *, offset=None, dst_dtype=None): return torch.empty_like(x, dtype=torch.int8) elif dst_dtype == torch.quint4x2: dim_num = x.dim() + if dim_num == 0: + raise RuntimeError("Input x can't be scalar" + ops_error(ErrCode.PARAM)) if x.size(dim_num - 1) % 8: - raise RuntimeError("If dst_dtype is quint4x2, last dim must be divisible by 8" + - ops_error(ErrCode.NOT_SUPPORT)) + raise RuntimeError("If dst_dtype is quint4x2, x last dim must be divisible by 8" + + ops_error(ErrCode.PARAM)) output_shape = [] for dim in range(dim_num - 1): output_shape.append(x.size(dim)) -- Gitee