From 82fdb784a2e43c55de082adce2f85d06492ad537 Mon Sep 17 00:00:00 2001 From: yuhanshi Date: Thu, 30 Mar 2023 14:45:36 +0800 Subject: [PATCH] Avoid implicit type casting in the project. Signed-off-by: yuhanshi --- frameworks/native/compilation.cpp | 6 ++++-- frameworks/native/nn_tensor.cpp | 4 ++-- frameworks/native/ops/conv2d_builder.cpp | 6 +++--- frameworks/native/ops/conv2d_transpose_builder.cpp | 8 ++++---- frameworks/native/ops/depthwise_conv2d_native_builder.cpp | 6 +++--- frameworks/native/ops/pooling_builder.cpp | 4 ++-- frameworks/native/ops/top_k_builder.h | 2 +- frameworks/native/validation.h | 3 ++- 8 files changed, 21 insertions(+), 18 deletions(-) diff --git a/frameworks/native/compilation.cpp b/frameworks/native/compilation.cpp index ed6e737..0f89fd3 100644 --- a/frameworks/native/compilation.cpp +++ b/frameworks/native/compilation.cpp @@ -371,6 +371,7 @@ OH_NN_ReturnCode Compilation::GetCacheFileLength(std::ifstream& ifs, int& fsize) OH_NN_ReturnCode Compilation::ReadCacheModelFile(const std::string& file, ModelBuffer& modelBuffer) const { + // file is validated outside. std::ifstream ifs(file.c_str(), std::ios::in | std::ios::binary); if (!ifs) { LOGE("[Compilation] Fail to open cache file."); @@ -410,15 +411,16 @@ OH_NN_ReturnCode Compilation::ReadCacheModelFile(const std::string& file, ModelB ifs.close(); modelBuffer.buffer = ptr; - modelBuffer.length = fsize; + modelBuffer.length = static_cast(fsize); // fsize should be non-negative, safe to cast. return OH_NN_SUCCESS; } OH_NN_ReturnCode Compilation::CheckCacheInfo(ModelCacheInfo& modelCacheInfo, const std::string& cacheInfoPath) const { + // cacheInfoPath is validated outside. std::ifstream infoCacheFile(cacheInfoPath.c_str(), std::ios::in | std::ios::binary); if (!infoCacheFile) { - LOGE("[Compilation] Openning cache info file failed."); + LOGE("[Compilation] Opening cache info file failed."); return OH_NN_INVALID_FILE; } diff --git a/frameworks/native/nn_tensor.cpp b/frameworks/native/nn_tensor.cpp index 68f392a..2b6aaf4 100644 --- a/frameworks/native/nn_tensor.cpp +++ b/frameworks/native/nn_tensor.cpp @@ -390,8 +390,8 @@ bool NNTensor::CompareAttribute(const NNTensor& tensor) const return false; } - for (auto i = 0; i < dimensions.size(); i++) { - if (m_dimensions[i] != -1 && m_dimensions[i] != dimensions[i]) { + for (size_t i = 0; i < dimensions.size(); i++) { + if ((m_dimensions[i] != -1) && (m_dimensions[i] != dimensions[i])) { LOGI("Tensors have different dimension: dimension index: %u, dimension value: %d and %d.", i, m_dimensions[i], dimensions[i]); return false; diff --git a/frameworks/native/ops/conv2d_builder.cpp b/frameworks/native/ops/conv2d_builder.cpp index 302f1e4..df23946 100644 --- a/frameworks/native/ops/conv2d_builder.cpp +++ b/frameworks/native/ops/conv2d_builder.cpp @@ -96,7 +96,7 @@ OH_NN_ReturnCode Conv2DBuilder::SetStrides(std::shared_ptr tensor) return OH_NN_INVALID_PARAMETER; } const int64_t* pStrides = reinterpret_cast(buffer); - int stridesSize = tensor->GetElementCount(); + uint32_t stridesSize = tensor->GetElementCount(); m_strides.assign(pStrides, pStrides + stridesSize); return OH_NN_SUCCESS; @@ -117,7 +117,7 @@ OH_NN_ReturnCode Conv2DBuilder::SetDilation(std::shared_ptr tensor) return OH_NN_INVALID_PARAMETER; } const int64_t* pDilation = reinterpret_cast(buffer); - int dilationSize = tensor->GetElementCount(); + uint32_t dilationSize = tensor->GetElementCount(); m_dilation.assign(pDilation, pDilation + dilationSize); return OH_NN_SUCCESS; @@ -161,7 +161,7 @@ OH_NN_ReturnCode Conv2DBuilder::SetPad(std::shared_ptr tensor) } int64_t* pPadList = static_cast(buffer); - int padListSize = tensor->GetElementCount(); + uint32_t padListSize = tensor->GetElementCount(); m_pad.assign(pPadList, pPadList + padListSize); } diff --git a/frameworks/native/ops/conv2d_transpose_builder.cpp b/frameworks/native/ops/conv2d_transpose_builder.cpp index 2e7b8b0..9111b55 100644 --- a/frameworks/native/ops/conv2d_transpose_builder.cpp +++ b/frameworks/native/ops/conv2d_transpose_builder.cpp @@ -89,7 +89,7 @@ OH_NN_ReturnCode Conv2DTransposeBuilder::SetStrides(std::shared_ptr te return OH_NN_INVALID_PARAMETER; } const int64_t* pStrides = reinterpret_cast(buffer); - int elementSize = tensor->GetElementCount(); + uint32_t elementSize = tensor->GetElementCount(); m_strides.assign(pStrides, pStrides + elementSize); return OH_NN_SUCCESS; @@ -110,7 +110,7 @@ OH_NN_ReturnCode Conv2DTransposeBuilder::SetDilation(std::shared_ptr t return OH_NN_INVALID_PARAMETER; } const int64_t* pDilation = reinterpret_cast(buffer); - int dilationSize = tensor->GetElementCount(); + uint32_t dilationSize = tensor->GetElementCount(); m_dilation.assign(pDilation, pDilation + dilationSize); return OH_NN_SUCCESS; @@ -154,7 +154,7 @@ OH_NN_ReturnCode Conv2DTransposeBuilder::SetPad(std::shared_ptr tensor } const int64_t* pPadList = reinterpret_cast(buffer); - int padListPadSize = tensor->GetElementCount(); + uint32_t padListPadSize = tensor->GetElementCount(); m_padList.assign(pPadList, pPadList + padListPadSize); } @@ -200,7 +200,7 @@ OH_NN_ReturnCode Conv2DTransposeBuilder::SetOutPadding(std::shared_ptr return OH_NN_INVALID_PARAMETER; } const int64_t* pOutputPadding = reinterpret_cast(buffer); - int outputPadSize = tensor->GetElementCount(); + uint32_t outputPadSize = tensor->GetElementCount(); m_outputPaddings.assign(pOutputPadding, pOutputPadding + outputPadSize); return OH_NN_SUCCESS; diff --git a/frameworks/native/ops/depthwise_conv2d_native_builder.cpp b/frameworks/native/ops/depthwise_conv2d_native_builder.cpp index 51a2066..d1fbeb8 100644 --- a/frameworks/native/ops/depthwise_conv2d_native_builder.cpp +++ b/frameworks/native/ops/depthwise_conv2d_native_builder.cpp @@ -113,7 +113,7 @@ OH_NN_ReturnCode DepthwiseConv2DNativeBuilder::SetStrides(std::shared_ptr(buffer); - int stridesSize = tensor->GetElementCount(); + uint32_t stridesSize = tensor->GetElementCount(); m_strides.assign(pStrides, pStrides + stridesSize); return OH_NN_SUCCESS; @@ -132,7 +132,7 @@ OH_NN_ReturnCode DepthwiseConv2DNativeBuilder::SetDilation(std::shared_ptr(buffer); - int dilationSize = tensor->GetElementCount(); + uint32_t dilationSize = tensor->GetElementCount(); m_dilation.assign(pDilation, pDilation + dilationSize); return OH_NN_SUCCESS; @@ -174,7 +174,7 @@ OH_NN_ReturnCode DepthwiseConv2DNativeBuilder::SetPadModeOrPaddings( } const int64_t* pPadList = reinterpret_cast(buffer); - int padListSize = tensor->GetElementCount(); + uint32_t padListSize = tensor->GetElementCount(); m_pad.assign(pPadList, pPadList + padListSize); } return OH_NN_SUCCESS; diff --git a/frameworks/native/ops/pooling_builder.cpp b/frameworks/native/ops/pooling_builder.cpp index 9b52c8f..7338ed4 100644 --- a/frameworks/native/ops/pooling_builder.cpp +++ b/frameworks/native/ops/pooling_builder.cpp @@ -113,7 +113,7 @@ OH_NN_ReturnCode PoolingBuilder::SetKernel(std::shared_ptr tensor) } const int64_t* pKernelSize = reinterpret_cast(buffer); - int kernelSize = tensor->GetElementCount(); + uint32_t kernelSize = tensor->GetElementCount(); m_kernelSize.assign(pKernelSize, pKernelSize + kernelSize); return OH_NN_SUCCESS; @@ -135,7 +135,7 @@ OH_NN_ReturnCode PoolingBuilder::SetStrides(std::shared_ptr tensor) } const int64_t* pStrides = reinterpret_cast(buffer); - int strideslSize = tensor->GetElementCount(); + uint32_t strideslSize = tensor->GetElementCount(); m_strides.assign(pStrides, pStrides + strideslSize); return OH_NN_SUCCESS; diff --git a/frameworks/native/ops/top_k_builder.h b/frameworks/native/ops/top_k_builder.h index dfd4a6a..69d5080 100644 --- a/frameworks/native/ops/top_k_builder.h +++ b/frameworks/native/ops/top_k_builder.h @@ -36,7 +36,7 @@ private: OH_NN_ReturnCode SetSorted(std::shared_ptr tensor); private: - bool m_sorted; + bool m_sorted {true}; // true means sorting in the descending order. }; } // namespace Ops } // namespace NeuralNetworkRuntime diff --git a/frameworks/native/validation.h b/frameworks/native/validation.h index 919d4c4..003df7b 100644 --- a/frameworks/native/validation.h +++ b/frameworks/native/validation.h @@ -26,7 +26,8 @@ template OH_NN_ReturnCode ValidateArray(const T* data, size_t size) { if ((data != nullptr) != (size > 0)) { - LOGE("ValidateArray failed, data is %p but the length is %zu", data, size); + LOGE("ValidateArray failed, data should passed a valid pointer when size is larger than 0, " + "otherwise, data should be nullptr when size is 0."); return OH_NN_INVALID_PARAMETER; } return OH_NN_SUCCESS; -- Gitee