From fbdec5218abdd8bc3249044c381907187cb47153 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=B8=9B=E6=99=93=E5=AE=87?= Date: Fri, 9 May 2025 09:30:34 +0000 Subject: [PATCH] =?UTF-8?q?=E4=BD=BF=E7=94=A84=E7=BA=BF=E7=A8=8B=E5=AF=B9?= =?UTF-8?q?=E5=BA=94=E7=94=A8=E5=AE=89=E8=A3=85=E9=AA=8C=E7=AD=BE=E8=BF=9B?= =?UTF-8?q?=E8=A1=8C=E5=B9=B6=E8=A1=8C=E5=8C=96=EF=BC=8C=E5=B7=B2=E9=AA=8C?= =?UTF-8?q?=E8=AF=81top6=E5=BA=94=E7=94=A8=E6=94=B6=E7=9B=8A=E7=BA=A6?= =?UTF-8?q?=E4=B8=BA68%?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 丛晓宇 --- .../include/util/hap_signing_block_utils.h | 8 +- .../src/util/hap_signing_block_utils.cpp | 131 +++++++++++++----- 2 files changed, 103 insertions(+), 36 deletions(-) diff --git a/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h b/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h index 53959ec..3d196d7 100644 --- a/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h +++ b/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h @@ -76,6 +76,8 @@ private: static const int32_t ZIP_BLOCKS_NUM_NEED_DIGEST; static const char ZIP_FIRST_LEVEL_CHUNK_PREFIX; static const char ZIP_SECOND_LEVEL_CHUNK_PREFIX; + static const int32_t ZIP_UPDATE_DIGEST_THREADS_NUM; + static const long long SMALL_FILE_SIZE; /* the specifications of hap sign block */ static constexpr long long MAX_HAP_SIGN_BLOCK_SIZE = 1024 * 1024 * 1024LL; // 1024MB static constexpr int32_t MAX_BLOCK_COUNT = 10; @@ -99,7 +101,11 @@ private: const std::vector& optionalBlocks, const HapByteBuffer& chunkDigest, HapByteBuffer& finalDigest); static bool ComputeDigestsForEachChunk(const DigestParameter& digestParam, DataSource* contents[], - int32_t len, HapByteBuffer& result); + int32_t len, HapByteBuffer& result, int32_t& offset); + static bool ComputeDigestsForDataSource(const DigestParameter& digestParam, DataSource* content, + HapByteBuffer& result, int32_t& offset); + static bool ComputeDigestsForContentsZip(int32_t nId, RandomAccessFile& hapFile, + int32_t chunkNum, long long fileSize, HapByteBuffer& digestsBuffer); static int32_t GetChunkCount(long long inputSize, long long chunkSize); static bool InitDigestPrefix(const DigestParameter& digestParam, unsigned char (&chunkContentPrefix)[ZIP_CHUNK_DIGEST_PRIFIX_LEN], int32_t chunkLen); diff --git a/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp b/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp index b8b4d8b..e63716c 100644 --- a/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp +++ b/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp @@ -16,6 +16,7 @@ #include "util/hap_signing_block_utils.h" #include +#include #include #include "algorithm" @@ -36,6 +37,7 @@ const long long HapSigningBlockUtils::HAP_SIG_BLOCK_MAGIC_HIGH = 449779798307046 /* 1MB = 1024 * 1024 Bytes */ const long long HapSigningBlockUtils::CHUNK_SIZE = 1048576LL; +const long long HapSigningBlockUtils::SMALL_FILE_SIZE = CHUNK_SIZE * 2; const int32_t HapSigningBlockUtils::HAP_SIG_BLOCK_MIN_SIZE = 32; const int32_t HapSigningBlockUtils::ZIP_HEAD_OF_SIGNING_BLOCK_LENGTH = 32; @@ -46,6 +48,7 @@ const int32_t HapSigningBlockUtils::ZIP_EOCD_COMMENT_LENGTH_OFFSET = 20; const int32_t HapSigningBlockUtils::ZIP_CD_OFFSET_IN_EOCD = 16; const int32_t HapSigningBlockUtils::ZIP_CD_SIZE_OFFSET_IN_EOCD = 12; const int32_t HapSigningBlockUtils::ZIP_BLOCKS_NUM_NEED_DIGEST = 3; +const int32_t HapSigningBlockUtils::ZIP_UPDATE_DIGEST_THREADS_NUM = 4; const char HapSigningBlockUtils::ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a; const char HapSigningBlockUtils::ZIP_SECOND_LEVEL_CHUNK_PREFIX = 0xa5; @@ -428,18 +431,47 @@ bool HapSigningBlockUtils::VerifyHapIntegrity( return false; } + long long fileSize = signInfo.hapSigningBlockOffset; long long centralDirSize = signInfo.hapEocdOffset - signInfo.hapCentralDirOffset; - HapFileDataSource contentsZip(hapFile, 0, signInfo.hapSigningBlockOffset, 0); + HapFileDataSource contentsZip(hapFile, 0, fileSize, 0); HapFileDataSource centralDir(hapFile, signInfo.hapCentralDirOffset, centralDirSize, 0); HapByteBufferDataSource eocd(signInfo.hapEocd); DataSource* content[ZIP_BLOCKS_NUM_NEED_DIGEST] = { &contentsZip, ¢ralDir, &eocd }; int32_t nId = HapVerifyOpensslUtils::GetDigestAlgorithmId(digestInfo.digestAlgorithm); DigestParameter digestParam = GetDigestParameter(nId); HapByteBuffer chunkDigest; - if (!ComputeDigestsForEachChunk(digestParam, content, ZIP_BLOCKS_NUM_NEED_DIGEST, chunkDigest)) { - HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + int32_t chunkCount = 0; + int32_t sumOfChunksLen = 0; + if (!GetSumOfChunkDigestLen(content, ZIP_BLOCKS_NUM_NEED_DIGEST, digestParam.digestOutputSizeBytes, + chunkCount, sumOfChunksLen)) { + HAPVERIFY_LOG_ERROR("GetSumOfChunkDigestLen failed"); return false; } + chunkDigest.SetCapacity(sumOfChunksLen); + chunkDigest.PutByte(0, ZIP_FIRST_LEVEL_CHUNK_PREFIX); + chunkDigest.PutInt32(1, chunkCount); + if (fileSize <= SMALL_FILE_SIZE) { + // No parallel for small size <= 2MB. + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; + if (!ComputeDigestsForEachChunk(digestParam, content, ZIP_BLOCKS_NUM_NEED_DIGEST, chunkDigest, offset)) { + HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + return false; + } + } else { + // Compute digests for contents zip in parallel. + int32_t contentsZipChunkCount = GetChunkCount(fileSize, CHUNK_SIZE); + if (!ComputeDigestsForContentsZip(nId, hapFile, contentsZipChunkCount, fileSize, chunkDigest)) { + HAPVERIFY_LOG_ERROR("ComputeDigestsForContentsZip failed, alg: %{public}d", nId); + return false; + } + // Compute digests for other contents. + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN + contentsZipChunkCount * digestParam.digestOutputSizeBytes; + if (!ComputeDigestsForEachChunk(digestParam, content + 1, ZIP_BLOCKS_NUM_NEED_DIGEST - 1, chunkDigest, + offset)) { + HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + return false; + } + } HapByteBuffer actualDigest; if (!ComputeDigestsWithOptionalBlock(digestParam, signInfo.optionBlocks, chunkDigest, actualDigest)) { @@ -497,45 +529,74 @@ bool HapSigningBlockUtils::GetSumOfChunkDigestLen(DataSource* contents[], int32_ return true; } -bool HapSigningBlockUtils::ComputeDigestsForEachChunk(const DigestParameter& digestParam, - DataSource* contents[], int32_t len, HapByteBuffer& result) +bool HapSigningBlockUtils::ComputeDigestsForContentsZip(int32_t nId, RandomAccessFile& hapFile, int32_t chunkNum, + long long fileSize, HapByteBuffer& digestsBuffer) { - int32_t chunkCount = 0; - int32_t sumOfChunksLen = 0; - if (!GetSumOfChunkDigestLen(contents, len, digestParam.digestOutputSizeBytes, chunkCount, sumOfChunksLen)) { - HAPVERIFY_LOG_ERROR("GetSumOfChunkDigestLen failed"); - return false; + int32_t chunkNumToUpdate = (chunkNum + ZIP_UPDATE_DIGEST_THREADS_NUM - 1) / ZIP_UPDATE_DIGEST_THREADS_NUM; + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; + std::vector threads; + std::vector results(ZIP_UPDATE_DIGEST_THREADS_NUM, false); + for (int i = 0; i < ZIP_UPDATE_DIGEST_THREADS_NUM; i++) { + threads.emplace_back([&, i, chunkNumToUpdate, fileSize]() { + long long fileBeginPosition = CHUNK_SIZE * chunkNumToUpdate * i; + long long fileEndPosition = std::min(CHUNK_SIZE * chunkNumToUpdate * (i + 1), fileSize); + HapFileDataSource hapDataChunk(hapFile, fileBeginPosition, fileEndPosition - fileBeginPosition, 0); + DigestParameter digestParam = GetDigestParameter(nId); + int32_t digestOffset = offset + chunkNumToUpdate * digestParam.digestOutputSizeBytes * i; + results[i] = ComputeDigestsForDataSource(digestParam, &hapDataChunk, digestsBuffer, digestOffset); + }); + } + + for (auto& thread : threads) { + thread.join(); + } + + for (bool computeDigestResult : results) { + if (!computeDigestResult) { + HAPVERIFY_LOG_ERROR("Compute digests failed"); + return false; + } } - result.SetCapacity(sumOfChunksLen); - result.PutByte(0, ZIP_FIRST_LEVEL_CHUNK_PREFIX); - result.PutInt32(1, chunkCount); - int32_t chunkIndex = 0; + return true; +} + +bool HapSigningBlockUtils::ComputeDigestsForDataSource(const DigestParameter& digestParam, DataSource* content, + HapByteBuffer& result, int32_t& offset) +{ unsigned char out[EVP_MAX_MD_SIZE]; unsigned char chunkContentPrefix[ZIP_CHUNK_DIGEST_PRIFIX_LEN] = {ZIP_SECOND_LEVEL_CHUNK_PREFIX, 0, 0, 0, 0}; - int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; - for (int32_t i = 0; i < len; i++) { - while (contents[i]->HasRemaining()) { - int32_t chunkSize = std::min(contents[i]->Remaining(), CHUNK_SIZE); - if (!InitDigestPrefix(digestParam, chunkContentPrefix, chunkSize)) { - HAPVERIFY_LOG_ERROR("InitDigestPrefix failed"); - return false; - } + while (content->HasRemaining()) { + int32_t chunkSize = std::min(content->Remaining(), CHUNK_SIZE); + if (!InitDigestPrefix(digestParam, chunkContentPrefix, chunkSize)) { + HAPVERIFY_LOG_ERROR("InitDigestPrefix failed"); + return false; + } - if (!contents[i]->ReadDataAndDigestUpdate(digestParam, chunkSize)) { - HAPVERIFY_LOG_ERROR("Copy Partial Buffer failed, count: %{public}d", chunkIndex); - return false; - } + if (!content->ReadDataAndDigestUpdate(digestParam, chunkSize)) { + HAPVERIFY_LOG_ERROR("Copy Partial Buffer failed"); + return false; + } - int32_t digestLen = HapVerifyOpensslUtils::GetDigest(digestParam, out); - if (digestLen != digestParam.digestOutputSizeBytes) { - HAPVERIFY_LOG_ERROR("GetDigest failed len: %{public}d digestSizeBytes: %{public}d", - digestLen, digestParam.digestOutputSizeBytes); - return false; - } - result.PutData(offset, reinterpret_cast(out), digestParam.digestOutputSizeBytes); - offset += digestLen; - chunkIndex++; + int32_t digestLen = HapVerifyOpensslUtils::GetDigest(digestParam, out); + if (digestLen != digestParam.digestOutputSizeBytes) { + HAPVERIFY_LOG_ERROR("GetDigest failed len: %{public}d digestSizeBytes: %{public}d", + digestLen, digestParam.digestOutputSizeBytes); + return false; + } + result.PutData(offset, reinterpret_cast(out), digestParam.digestOutputSizeBytes); + offset += digestLen; + } + return true; +} + +bool HapSigningBlockUtils::ComputeDigestsForEachChunk(const DigestParameter& digestParam, + DataSource* contents[], int32_t len, HapByteBuffer& result, int32_t& offset) +{ + for (int32_t i = 0; i < len; i++) { + if (!ComputeDigestsForDataSource(digestParam, contents[i], result, offset)) { + HAPVERIFY_LOG_ERROR("Compute digest failed"); + return false; } } return true; -- Gitee