From dc1214869ecd028fc4f99e7a38d276e6ab012cf2 Mon Sep 17 00:00:00 2001 From: huangxiaolan Date: Tue, 23 Sep 2025 16:42:55 +0800 Subject: [PATCH 1/3] fix bool expression --- .../gmm_deq_swiglu_quant_gmm_deq_operation.cpp | 4 ++-- .../mm_deq_swiglu_quant_mm_deq_operation.cpp | 4 ++-- .../gmm_deq_swiglu_quant_gmm_deq_operation.cpp | 4 ++-- .../mm_deq_swiglu_quant_mm_deq_operation.cpp | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/kernels/mixkernels/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp b/src/kernels/mixkernels/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp index c03469b1..1337ebf5 100644 --- a/src/kernels/mixkernels/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp +++ b/src/kernels/mixkernels/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp @@ -102,9 +102,9 @@ private: "Param groupListType only support GROUP_LIST_CUM_SUM (0).", return false); MKI_CHECK(param.weightUpPermuteType != OpParam::GmmDeqSwigluQuantGmmDeq::PERMUTE_INVALID, "Param weightUpPermuteType has invalid value.", return false); - MKI_CHECK(param.transposeWeightUp == false, + MKI_CHECK(!param.transposeWeightUp, "Param transposeWeightUp only support false.", return false); - MKI_CHECK(param.transposeWeightDown == true, + MKI_CHECK(param.transposeWeightDown, "Param transposeWeightDown only support true.", return false); return true; } diff --git a/src/kernels/mixkernels/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp b/src/kernels/mixkernels/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp index 2a402181..135beb06 100644 --- a/src/kernels/mixkernels/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp +++ b/src/kernels/mixkernels/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp @@ -94,9 +94,9 @@ private: "Param outputType only support OUTPUT_FLOAT16 (0).", return false); MKI_CHECK(param.weightUpPermuteType != OpParam::MmDeqSwigluQuantMmDeq::PERMUTE_INVALID, "Param weightUpPermuteType has invalid value.", return false); - MKI_CHECK(param.transposeWeightUp == false, + MKI_CHECK(!param.transposeWeightUp, "Param transposeWeightUp only support false.", return false); - MKI_CHECK(param.transposeWeightDown == true, + MKI_CHECK(param.transposeWeightDown, "Param transposeWeightDown only support true.", return false); return true; } diff --git a/src/ops_infer/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp b/src/ops_infer/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp index e63c92e9..de8be698 100644 --- a/src/ops_infer/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp +++ b/src/ops_infer/gmm_deq_swiglu_quant_gmm_deq/gmm_deq_swiglu_quant_gmm_deq_operation.cpp @@ -102,12 +102,12 @@ bool ParamCheck(const atb::infer::GmmDeqSwigluQuantGmmDeqParam &opParam) return false; } - if (opParam.transposeWeightUp != false) { + if (opParam.transposeWeightUp) { ATB_LOG(ERROR) << "Param transposeWeightUp only support false."; return false; } - if (opParam.transposeWeightDown != true) { + if (!opParam.transposeWeightDown) { ATB_LOG(ERROR) << "Param transposeWeightDown only support true."; return false; } diff --git a/src/ops_infer/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp b/src/ops_infer/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp index d96602ba..64cf34fb 100644 --- a/src/ops_infer/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp +++ b/src/ops_infer/mm_deq_swiglu_quant_mm_deq/mm_deq_swiglu_quant_mm_deq_operation.cpp @@ -87,12 +87,12 @@ bool ParamCheck(const atb::infer::MmDeqSwigluQuantMmDeqParam &opParam) return false; } - if (opParam.transposeWeightUp != false) { + if (opParam.transposeWeightUp) { ATB_LOG(ERROR) << "Param transposeWeightUp only support false."; return false; } - if (opParam.transposeWeightDown != true) { + if (!opParam.transposeWeightDown) { ATB_LOG(ERROR) << "Param transposeWeightDown only support true."; return false; } -- Gitee From 9dc70dec45ace7186f54303064fe0e76a7b6e61e Mon Sep 17 00:00:00 2001 From: huangxiaolan Date: Tue, 23 Sep 2025 16:46:32 +0800 Subject: [PATCH 2/3] fix variable init --- example/multiStream/multiStream_multiGraph_demo.cpp | 3 +-- example/multiStream/multiStream_singleGraph_demo.cpp | 3 +-- src/ops_infer/ring_mla/ring_mla_operation.cpp | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/example/multiStream/multiStream_multiGraph_demo.cpp b/example/multiStream/multiStream_multiGraph_demo.cpp index ff6491a0..17a0e039 100644 --- a/example/multiStream/multiStream_multiGraph_demo.cpp +++ b/example/multiStream/multiStream_multiGraph_demo.cpp @@ -237,8 +237,7 @@ int main() packRW.outTensors.resize(outTensorNum); operationWR->InferShape(intensorDescs, outtensorDescs); - aclError ret; - ret = CreateInTensors(packWR.inTensors, intensorDescs); + aclError ret = CreateInTensors(packWR.inTensors, intensorDescs); if (ret != 0) { exit(ret); } diff --git a/example/multiStream/multiStream_singleGraph_demo.cpp b/example/multiStream/multiStream_singleGraph_demo.cpp index d95873fd..e1eb53af 100644 --- a/example/multiStream/multiStream_singleGraph_demo.cpp +++ b/example/multiStream/multiStream_singleGraph_demo.cpp @@ -264,8 +264,7 @@ int main() outtensorDescs.resize(outTensorNum); pack.outTensors.resize(outTensorNum); operation->InferShape(intensorDescs, outtensorDescs); - aclError ret; - ret = CreateOutTensors(pack.outTensors, outtensorDescs); + aclError ret = CreateOutTensors(pack.outTensors, outtensorDescs); if (ret != 0) { exit(ret); } diff --git a/src/ops_infer/ring_mla/ring_mla_operation.cpp b/src/ops_infer/ring_mla/ring_mla_operation.cpp index 8106fdd3..0376eabb 100644 --- a/src/ops_infer/ring_mla/ring_mla_operation.cpp +++ b/src/ops_infer/ring_mla/ring_mla_operation.cpp @@ -394,8 +394,7 @@ bool RingMLAOperation::InputLseDimCheck(const SVector &inTensorDescs Status RingMLAOperation::InferShapeCheckImpl(const SVector &inTensorDescs) const { - Status st; - st = DimCheck(inTensorDescs); + Status st = DimCheck(inTensorDescs); if (st != NO_ERROR) { return st; } -- Gitee From 61968c9d25d1d79c774982a50e0d5b04d4e9664f Mon Sep 17 00:00:00 2001 From: huangxiaolan Date: Tue, 23 Sep 2025 16:55:10 +0800 Subject: [PATCH 3/3] add brackets --- .../multi_latent_attention/multi_latent_attention_operation.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ops_infer/multi_latent_attention/multi_latent_attention_operation.cpp b/src/ops_infer/multi_latent_attention/multi_latent_attention_operation.cpp index 8bdfceaf..4cfbc6cd 100644 --- a/src/ops_infer/multi_latent_attention/multi_latent_attention_operation.cpp +++ b/src/ops_infer/multi_latent_attention/multi_latent_attention_operation.cpp @@ -461,7 +461,7 @@ Status MultiLatentAttentionOperation::DimCheckInt8Nz(const SVector & return ERROR_INVALID_TENSOR_DIM; } if (inTensorDesc.at(idx + 1).shape.dims[0] != param_.headNum) { - ATB_LOG(ERROR) << GetLogPrefix() << "dim 0 of of pvDescale(intensor" << idx + 1 + ATB_LOG(ERROR) << GetLogPrefix() << "dim 0 of of pvDescale(intensor" << (idx + 1) << ") should be equal to dim0 of headNum"; return ERROR_INVALID_TENSOR_DIM; } -- Gitee