From a8ec08c43e8eb92c3c4cac5e78ceb5bae0e6647f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=B8=9B=E6=99=93=E5=AE=87?= Date: Fri, 9 May 2025 08:52:27 +0000 Subject: [PATCH] =?UTF-8?q?=E4=BD=BF=E7=94=A84=E7=BA=BF=E7=A8=8B=E5=AF=B9?= =?UTF-8?q?=E5=BA=94=E7=94=A8=E5=AE=89=E8=A3=85=E9=AA=8C=E7=AD=BE=E8=BF=87?= =?UTF-8?q?=E7=A8=8B=E5=B9=B6=E8=A1=8C=E5=8C=96=EF=BC=8C=E5=B7=B2=E9=AA=8C?= =?UTF-8?q?=E8=AF=81top6=E5=BA=94=E7=94=A8=E6=94=B6=E7=9B=8A=E7=BA=A6?= =?UTF-8?q?=E4=B8=BA68%?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 丛晓宇 --- .../include/util/hap_signing_block_utils.h | 8 +- .../src/util/hap_signing_block_utils.cpp | 171 ++++++++++++------ 2 files changed, 123 insertions(+), 56 deletions(-) diff --git a/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h b/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h index 53959ec..3d196d7 100644 --- a/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h +++ b/interfaces/innerkits/appverify/include/util/hap_signing_block_utils.h @@ -76,6 +76,8 @@ private: static const int32_t ZIP_BLOCKS_NUM_NEED_DIGEST; static const char ZIP_FIRST_LEVEL_CHUNK_PREFIX; static const char ZIP_SECOND_LEVEL_CHUNK_PREFIX; + static const int32_t ZIP_UPDATE_DIGEST_THREADS_NUM; + static const long long SMALL_FILE_SIZE; /* the specifications of hap sign block */ static constexpr long long MAX_HAP_SIGN_BLOCK_SIZE = 1024 * 1024 * 1024LL; // 1024MB static constexpr int32_t MAX_BLOCK_COUNT = 10; @@ -99,7 +101,11 @@ private: const std::vector& optionalBlocks, const HapByteBuffer& chunkDigest, HapByteBuffer& finalDigest); static bool ComputeDigestsForEachChunk(const DigestParameter& digestParam, DataSource* contents[], - int32_t len, HapByteBuffer& result); + int32_t len, HapByteBuffer& result, int32_t& offset); + static bool ComputeDigestsForDataSource(const DigestParameter& digestParam, DataSource* content, + HapByteBuffer& result, int32_t& offset); + static bool ComputeDigestsForContentsZip(int32_t nId, RandomAccessFile& hapFile, + int32_t chunkNum, long long fileSize, HapByteBuffer& digestsBuffer); static int32_t GetChunkCount(long long inputSize, long long chunkSize); static bool InitDigestPrefix(const DigestParameter& digestParam, unsigned char (&chunkContentPrefix)[ZIP_CHUNK_DIGEST_PRIFIX_LEN], int32_t chunkLen); diff --git a/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp b/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp index b8b4d8b..78198fd 100644 --- a/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp +++ b/interfaces/innerkits/appverify/src/util/hap_signing_block_utils.cpp @@ -16,6 +16,7 @@ #include "util/hap_signing_block_utils.h" #include +#include #include #include "algorithm" @@ -36,16 +37,18 @@ const long long HapSigningBlockUtils::HAP_SIG_BLOCK_MAGIC_HIGH = 449779798307046 /* 1MB = 1024 * 1024 Bytes */ const long long HapSigningBlockUtils::CHUNK_SIZE = 1048576LL; - +const long long HapSigningBlockUtils::SMALL_FILE_SIZE = CHUNK_SIZE * 2; + const int32_t HapSigningBlockUtils::HAP_SIG_BLOCK_MIN_SIZE = 32; const int32_t HapSigningBlockUtils::ZIP_HEAD_OF_SIGNING_BLOCK_LENGTH = 32; - + const int32_t HapSigningBlockUtils::ZIP_EOCD_SEG_MIN_SIZE = 22; const int32_t HapSigningBlockUtils::ZIP_EOCD_SEGMENT_FLAG = 0x06054b50; const int32_t HapSigningBlockUtils::ZIP_EOCD_COMMENT_LENGTH_OFFSET = 20; const int32_t HapSigningBlockUtils::ZIP_CD_OFFSET_IN_EOCD = 16; const int32_t HapSigningBlockUtils::ZIP_CD_SIZE_OFFSET_IN_EOCD = 12; const int32_t HapSigningBlockUtils::ZIP_BLOCKS_NUM_NEED_DIGEST = 3; +const int32_t HapSigningBlockUtils::ZIP_UPDATE_DIGEST_THREADS_NUM = 4; const char HapSigningBlockUtils::ZIP_FIRST_LEVEL_CHUNK_PREFIX = 0x5a; const char HapSigningBlockUtils::ZIP_SECOND_LEVEL_CHUNK_PREFIX = 0xa5; @@ -427,33 +430,62 @@ bool HapSigningBlockUtils::VerifyHapIntegrity( HAPVERIFY_LOG_ERROR("Set central dir offset failed"); return false; } - + + long long fileSize = signInfo.hapSigningBlockOffset; long long centralDirSize = signInfo.hapEocdOffset - signInfo.hapCentralDirOffset; - HapFileDataSource contentsZip(hapFile, 0, signInfo.hapSigningBlockOffset, 0); + HapFileDataSource contentsZip(hapFile, 0, fileSize, 0); HapFileDataSource centralDir(hapFile, signInfo.hapCentralDirOffset, centralDirSize, 0); HapByteBufferDataSource eocd(signInfo.hapEocd); DataSource* content[ZIP_BLOCKS_NUM_NEED_DIGEST] = { &contentsZip, ¢ralDir, &eocd }; int32_t nId = HapVerifyOpensslUtils::GetDigestAlgorithmId(digestInfo.digestAlgorithm); DigestParameter digestParam = GetDigestParameter(nId); HapByteBuffer chunkDigest; - if (!ComputeDigestsForEachChunk(digestParam, content, ZIP_BLOCKS_NUM_NEED_DIGEST, chunkDigest)) { - HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + int32_t chunkCount = 0; + int32_t sumOfChunksLen = 0; + if (!GetSumOfChunkDigestLen(content, ZIP_BLOCKS_NUM_NEED_DIGEST, digestParam.digestOutputSizeBytes, + chunkCount, sumOfChunksLen)) { + HAPVERIFY_LOG_ERROR("GetSumOfChunkDigestLen failed"); return false; } - + chunkDigest.SetCapacity(sumOfChunksLen); + chunkDigest.PutByte(0, ZIP_FIRST_LEVEL_CHUNK_PREFIX); + chunkDigest.PutInt32(1, chunkCount); + if (fileSize <= SMALL_FILE_SIZE) { + // No parallel for small size <= 2MB. + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; + if (!ComputeDigestsForEachChunk(digestParam, content, ZIP_BLOCKS_NUM_NEED_DIGEST, chunkDigest, offset)) { + HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + return false; + } + } else { + // Compute digests for contents zip in parallel. + int32_t contentsZipChunkCount = GetChunkCount(fileSize, CHUNK_SIZE); + if (!ComputeDigestsForContentsZip(nId, hapFile, contentsZipChunkCount, fileSize, chunkDigest)) { + HAPVERIFY_LOG_ERROR("ComputeDigestsForContentsZip failed, alg: %{public}d", nId); + return false; + } + // Compute digests for other contents. + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN + contentsZipChunkCount * digestParam.digestOutputSizeBytes; + if (!ComputeDigestsForEachChunk(digestParam, content + 1, ZIP_BLOCKS_NUM_NEED_DIGEST - 1, chunkDigest, + offset)) { + HAPVERIFY_LOG_ERROR("Compute Content Digests failed, alg: %{public}d", nId); + return false; + } + } + HapByteBuffer actualDigest; if (!ComputeDigestsWithOptionalBlock(digestParam, signInfo.optionBlocks, chunkDigest, actualDigest)) { HAPVERIFY_LOG_ERROR("Compute Final Digests failed, alg: %{public}d", nId); return false; } - + if (!digestInfo.content.IsEqual(actualDigest)) { HAPVERIFY_LOG_ERROR("digest of contents verify failed, alg %{public}d", nId); return false; } return true; } - + bool HapSigningBlockUtils::ComputeDigestsWithOptionalBlock(const DigestParameter& digestParam, const std::vector& optionalBlocks, const HapByteBuffer& chunkDigest, HapByteBuffer& finalDigest) { @@ -464,12 +496,12 @@ bool HapSigningBlockUtils::ComputeDigestsWithOptionalBlock(const DigestParameter digestLen, digestParam.digestOutputSizeBytes); return false; } - + finalDigest.SetCapacity(digestParam.digestOutputSizeBytes); finalDigest.PutData(0, reinterpret_cast(out), digestParam.digestOutputSizeBytes); return true; } - + bool HapSigningBlockUtils::GetSumOfChunkDigestLen(DataSource* contents[], int32_t len, int32_t chunkDigestLen, int& chunkCount, int& sumOfChunkDigestLen) { @@ -481,66 +513,95 @@ bool HapSigningBlockUtils::GetSumOfChunkDigestLen(DataSource* contents[], int32_ contents[i]->Reset(); chunkCount += GetChunkCount(contents[i]->Remaining(), CHUNK_SIZE); } - + if (chunkCount <= 0) { HAPVERIFY_LOG_ERROR("no content for digest"); return false; } - + if (chunkDigestLen < 0 || ((INT_MAX - ZIP_CHUNK_DIGEST_PRIFIX_LEN) / chunkCount) < chunkDigestLen) { HAPVERIFY_LOG_ERROR("overflow chunkCount: %{public}d, chunkDigestLen: %{public}d", chunkCount, chunkDigestLen); return false; } - + sumOfChunkDigestLen = ZIP_CHUNK_DIGEST_PRIFIX_LEN + chunkCount * chunkDigestLen; return true; } - -bool HapSigningBlockUtils::ComputeDigestsForEachChunk(const DigestParameter& digestParam, - DataSource* contents[], int32_t len, HapByteBuffer& result) + +bool HapSigningBlockUtils::ComputeDigestsForContentsZip(int32_t nId, RandomAccessFile& hapFile, int32_t chunkNum, + long long fileSize, HapByteBuffer& digestsBuffer) { - int32_t chunkCount = 0; - int32_t sumOfChunksLen = 0; - if (!GetSumOfChunkDigestLen(contents, len, digestParam.digestOutputSizeBytes, chunkCount, sumOfChunksLen)) { - HAPVERIFY_LOG_ERROR("GetSumOfChunkDigestLen failed"); - return false; + int32_t chunkNumToUpdate = (chunkNum + ZIP_UPDATE_DIGEST_THREADS_NUM - 1) / ZIP_UPDATE_DIGEST_THREADS_NUM; + int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; + std::vector threads; + std::vector results(ZIP_UPDATE_DIGEST_THREADS_NUM, false); + for (int i = 0; i < ZIP_UPDATE_DIGEST_THREADS_NUM; i++) { + threads.emplace_back([&, i, chunkNumToUpdate, fileSize]() { + long long fileBeginPosition = CHUNK_SIZE * chunkNumToUpdate * i; + long long fileEndPosition = std::min(CHUNK_SIZE * chunkNumToUpdate * (i + 1), fileSize); + HapFileDataSource hapDataChunk(hapFile, fileBeginPosition, fileEndPosition - fileBeginPosition, 0); + DigestParameter digestParam = GetDigestParameter(nId); + int32_t digestOffset = offset + chunkNumToUpdate * digestParam.digestOutputSizeBytes * i; + results[i] = ComputeDigestsForDataSource(digestParam, &hapDataChunk, digestsBuffer, digestOffset); + }); + } + + for (auto& thread : threads) { + thread.join(); + } + + for (bool computeDigestResult : results) { + if (!computeDigestResult) { + HAPVERIFY_LOG_ERROR("Compute digests failed"); + return false; + } } - result.SetCapacity(sumOfChunksLen); - result.PutByte(0, ZIP_FIRST_LEVEL_CHUNK_PREFIX); - result.PutInt32(1, chunkCount); - - int32_t chunkIndex = 0; + + return true; +} + +bool HapSigningBlockUtils::ComputeDigestsForDataSource(const DigestParameter& digestParam, DataSource* content, + HapByteBuffer& result, int32_t& offset) +{ unsigned char out[EVP_MAX_MD_SIZE]; unsigned char chunkContentPrefix[ZIP_CHUNK_DIGEST_PRIFIX_LEN] = {ZIP_SECOND_LEVEL_CHUNK_PREFIX, 0, 0, 0, 0}; - int32_t offset = ZIP_CHUNK_DIGEST_PRIFIX_LEN; - for (int32_t i = 0; i < len; i++) { - while (contents[i]->HasRemaining()) { - int32_t chunkSize = std::min(contents[i]->Remaining(), CHUNK_SIZE); - if (!InitDigestPrefix(digestParam, chunkContentPrefix, chunkSize)) { - HAPVERIFY_LOG_ERROR("InitDigestPrefix failed"); - return false; - } - - if (!contents[i]->ReadDataAndDigestUpdate(digestParam, chunkSize)) { - HAPVERIFY_LOG_ERROR("Copy Partial Buffer failed, count: %{public}d", chunkIndex); - return false; - } - - int32_t digestLen = HapVerifyOpensslUtils::GetDigest(digestParam, out); - if (digestLen != digestParam.digestOutputSizeBytes) { - HAPVERIFY_LOG_ERROR("GetDigest failed len: %{public}d digestSizeBytes: %{public}d", - digestLen, digestParam.digestOutputSizeBytes); - return false; - } - result.PutData(offset, reinterpret_cast(out), digestParam.digestOutputSizeBytes); - offset += digestLen; - chunkIndex++; + while (content->HasRemaining()) { + int32_t chunkSize = std::min(content->Remaining(), CHUNK_SIZE); + if (!InitDigestPrefix(digestParam, chunkContentPrefix, chunkSize)) { + HAPVERIFY_LOG_ERROR("InitDigestPrefix failed"); + return false; } + + if (!content->ReadDataAndDigestUpdate(digestParam, chunkSize)) { + HAPVERIFY_LOG_ERROR("Copy Partial Buffer failed"); + return false; + } + + int32_t digestLen = HapVerifyOpensslUtils::GetDigest(digestParam, out); + if (digestLen != digestParam.digestOutputSizeBytes) { + HAPVERIFY_LOG_ERROR("GetDigest failed len: %{public}d digestSizeBytes: %{public}d", + digestLen, digestParam.digestOutputSizeBytes); + return false; + } + result.PutData(offset, reinterpret_cast(out), digestParam.digestOutputSizeBytes); + offset += digestLen; } return true; } +bool HapSigningBlockUtils::ComputeDigestsForEachChunk(const DigestParameter& digestParam, + DataSource* contents[], int32_t len, HapByteBuffer& result, int32_t& offset) +{ + for (int32_t i = 0; i < len; i++) { + if (!ComputeDigestsForDataSource(digestParam, contents[i], result, offset)) { + HAPVERIFY_LOG_ERROR("Compute digest failed"); + return false; + } + } + return true; +} + DigestParameter HapSigningBlockUtils::GetDigestParameter(int32_t nId) { DigestParameter digestParam; @@ -550,20 +611,20 @@ DigestParameter HapSigningBlockUtils::GetDigestParameter(int32_t nId) EVP_MD_CTX_init(digestParam.ptrCtx); return digestParam; } - + int32_t HapSigningBlockUtils::GetChunkCount(long long inputSize, long long chunkSize) { if (chunkSize <= 0 || inputSize > LLONG_MAX - chunkSize) { return 0; } - + long long res = (inputSize + chunkSize - 1) / chunkSize; if (res > INT_MAX || res < 0) { return 0; } return static_cast(res); } - + bool HapSigningBlockUtils::InitDigestPrefix(const DigestParameter& digestParam, unsigned char (&chunkContentPrefix)[ZIP_CHUNK_DIGEST_PRIFIX_LEN], int32_t chunkLen) { @@ -571,12 +632,12 @@ bool HapSigningBlockUtils::InitDigestPrefix(const DigestParameter& digestParam, HAPVERIFY_LOG_ERROR("memcpy_s failed"); return false; } - + if (!HapVerifyOpensslUtils::DigestInit(digestParam)) { HAPVERIFY_LOG_ERROR("DigestInit failed"); return false; } - + if (!HapVerifyOpensslUtils::DigestUpdate(digestParam, chunkContentPrefix, ZIP_CHUNK_DIGEST_PRIFIX_LEN)) { HAPVERIFY_LOG_ERROR("DigestUpdate failed"); return false; -- Gitee