diff --git a/common_components/objects/base_string_table.cpp b/common_components/objects/base_string_table.cpp index 6c4151f940e5c27e683b2d4c5bdeb7affc9e16f4..ca891525572e0074ed3db04af6e8fd31c72d349d 100644 --- a/common_components/objects/base_string_table.cpp +++ b/common_components/objects/base_string_table.cpp @@ -196,17 +196,12 @@ BaseString* BaseStringTableInternal::TryGetInternString(const R template template > -void BaseStringTableInternal::SweepWeakRef(const WeakRefFieldVisitor& visitor, uint32_t rootID, +void BaseStringTableInternal::SweepWeakRef(const WeakRefFieldVisitor& visitor, uint32_t index, std::vector& waitDeleteEntries) { - ASSERT(rootID >= 0 && rootID < TrieMapConfig::ROOT_SIZE); - auto rootNode = stringTable_.root_[rootID].load(std::memory_order_relaxed); - if (rootNode == nullptr) { - return; - } - for (uint32_t index = 0; index < TrieMapConfig::INDIRECT_SIZE; ++index) { - stringTable_.ClearNodeFromGC(rootNode, index, visitor, waitDeleteEntries); - } + ASSERT(index >= 0 && index < TrieMapConfig::ROOT_SIZE); + HashTrieMapRoot* rootNode = stringTable_.root_.load(std::memory_order_relaxed); + stringTable_.ClearNodeFromGC(rootNode, index, visitor, waitDeleteEntries); } template @@ -221,14 +216,10 @@ template > void BaseStringTableInternal::SweepWeakRef(const WeakRefFieldVisitor& visitor) { // No need lock here, only shared gc will sweep string table, meanwhile other threads are suspended. - for (uint32_t rootID = 0; rootID < TrieMapConfig::ROOT_SIZE; ++rootID) { - auto rootNode = stringTable_.root_[rootID].load(std::memory_order_relaxed); - if (rootNode == nullptr) { - continue; - } - for (uint32_t index = 0; index < TrieMapConfig::INDIRECT_SIZE; ++index) { - stringTable_.ClearNodeFromGC(rootNode, index, visitor); - } + for (uint32_t index = 0; index < TrieMapConfig::ROOT_SIZE; ++index) { + ASSERT(index >= 0 && index < TrieMapConfig::ROOT_SIZE); + HashTrieMapRoot* rootNode = stringTable_.root_.load(std::memory_order_relaxed); + stringTable_.ClearNodeFromGC(rootNode, index, visitor); } } diff --git a/common_components/objects/string_table/hashtriemap-inl.h b/common_components/objects/string_table/hashtriemap-inl.h index 925628b3304268446d04d7af1fbc924695667af3..cd2ee513307be2bb7097cef7b50367cb86d49292 100644 --- a/common_components/objects/string_table/hashtriemap-inl.h +++ b/common_components/objects/string_table/hashtriemap-inl.h @@ -30,6 +30,7 @@ template typename HashTrieMap::Node* HashTrieMap::Expand( Entry* oldEntry, Entry* newEntry, uint32_t oldHash, uint32_t newHash, uint32_t hashShift, Indirect* parent) { + ASSERT(hashShift > TrieMapConfig::ROOT_CHILDREN_LOG2); // Check for hash conflicts. if (oldHash == newHash) { // Store the old entry in the overflow list of the new entry, and then store @@ -53,10 +54,10 @@ typename HashTrieMap::Node* HashTrieMap> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - uint32_t newIdx = (newHash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; + uint32_t oldIdx = (oldHash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + uint32_t newIdx = (newHash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; if (oldIdx != newIdx) { newIndirect->GetChild(oldIdx).store(oldEntry, std::memory_order_release); newIndirect->GetChild(newIdx).store(newEntry, std::memory_order_release); @@ -77,19 +78,35 @@ BaseString* HashTrieMap::Load(ReadBarrier&& re BaseString* value) { uint32_t hash = key; - Indirect* current = GetRootAndProcessHash(hash); - - for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += - TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; + Root* currentRoot = root_.load(std::memory_order_relaxed); + // When on the first layer + size_t index = (hash >> 0) & TrieMapConfig::ROOT_CHILDREN_MASK; + std::atomic* slot = ¤tRoot->GetChild(index); + Node* node = slot->load(std::memory_order_acquire); + if (node != nullptr) { + if (node->IsEntry()) { + if (node->AsEntry()->Key() == key && + BaseString::StringsAreEqual(std::forward(readBarrier), + node->AsEntry()->Value(), value)) { + return node->AsEntry()->Value(); + } + } + } else { + return nullptr; + } + // When located after the first layer + for (uint32_t hashShift = TrieMapConfig::ROOT_CHILDREN_LOG2; hashShift < TrieMapConfig::TOTAL_HASH_BITS; + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + Indirect* currentIndirect = node->AsIndirect(); + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; - std::atomic* slot = ¤t->GetChild(index); + std::atomic* slot = ¤tIndirect->GetChild(index); Node* node = slot->load(std::memory_order_acquire); if (node == nullptr) { return nullptr; } if (!node->IsEntry()) { - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); continue; } for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -132,13 +149,49 @@ BaseString* HashTrieMap::LoadOrStore(ThreadHol [[maybe_unused]] bool haveInsertPoint = false; ReadOnlyHandle str; bool isStrCreated = false; // Flag to track whether an object has been created - Indirect* current = GetRootAndProcessHash(hash); + Root* currentRoot = root_.load(std::memory_order_acquire); + Indirect* currentIndirect = nullptr; while (true) { haveInsertPoint = false; // find the key or insert the candidate position. - for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - slot = ¤t->GetChild(index); + // When on the first layer + if (hashShift == 0) { + size_t index = (hash >> hashShift) & TrieMapConfig::ROOT_CHILDREN_MASK; + slot = ¤tRoot->GetChild(index); + node = slot->load(std::memory_order_acquire); + + if (node == nullptr) { + haveInsertPoint = true; + break; + } + + if (!node->IsEntry()) { + Indirect* currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::ROOT_CHILDREN_LOG2; // Move to next level + continue; + } + + for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; + currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) { + auto oldValue = currentEntry->Value(); + if (IsNull(oldValue)) { + continue; + } + if (std::invoke(std::forward(equalsCallback), oldValue)) { +#if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE + TraceFindSuccessDepth(hashShift); +#endif + return oldValue; + } + } + haveInsertPoint = true; + break; + } + // When located after the first layer + for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + Indirect* currentIndirect = node->AsIndirect(); + slot = ¤tIndirect->GetChild(index); node = slot->load(std::memory_order_acquire); if (node == nullptr) { haveInsertPoint = true; @@ -147,7 +200,7 @@ BaseString* HashTrieMap::LoadOrStore(ThreadHol // Entry, Search in overflow if (!node->IsEntry()) { // Indirect, Next level Continue to search - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); continue; } for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -191,8 +244,8 @@ BaseString* HashTrieMap::LoadOrStore(ThreadHol if constexpr (IsLock) { GetMutex().Unlock(); } - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2; } #if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE @@ -216,6 +269,9 @@ BaseString* HashTrieMap::LoadOrStore(ThreadHol if constexpr (IsLock) { GetMutex().Unlock(); } +#if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE + TraceFindSuccessDepth(hashShift); +#endif return oldValue; } } @@ -233,7 +289,7 @@ BaseString* HashTrieMap::LoadOrStore(ThreadHol // Expand an existing entry to one or more new nodes. // Release the node, which will make both oldEntry and newEntry visible auto expandedNode = Expand(oldEntry, newEntry, - oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current); + oldHash >> TrieMapConfig::ROOT_CHILDREN_LOG2, hash, hashShift, currentIndirect); slot->store(expandedNode, std::memory_order_release); } if constexpr (IsLock) { @@ -259,13 +315,50 @@ BaseString* HashTrieMap::LoadOrStoreForJit(Thr Node* node = nullptr; [[maybe_unused]] bool haveInsertPoint = false; BaseString* value = nullptr; - Indirect* current = GetRootAndProcessHash(hash); + Root* currentRoot = root_.load(std::memory_order_acquire); + Indirect* currentIndirect = nullptr; while (true) { haveInsertPoint = false; // find the key or insert the candidate position. - for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - slot = ¤t->GetChild(index); + // When on the first layer + if (hashShift == 0) { + size_t index = (hash >> hashShift) & TrieMapConfig::ROOT_CHILDREN_MASK; + slot = ¤tRoot->GetChild(index); + node = slot->load(std::memory_order_acquire); + + if (node == nullptr) { + haveInsertPoint = true; + break; + } + + if (!node->IsEntry()) { + Indirect* currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::ROOT_CHILDREN_LOG2; // Move to next level + continue; + } + + for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; + currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) { + auto oldValue = currentEntry->Value(); + if (IsNull(oldValue)) { + continue; + } + if (std::invoke(std::forward(equalsCallback), oldValue)) { +#if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE + TraceFindSuccessDepth(hashShift); +#endif + return oldValue; + } + } + haveInsertPoint = true; + break; + } + // When located after the first layer + for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + Indirect* currentIndirect = node->AsIndirect(); + slot = ¤tIndirect->GetChild(index); + node = slot->load(std::memory_order_acquire); if (node == nullptr) { haveInsertPoint = true; @@ -274,7 +367,7 @@ BaseString* HashTrieMap::LoadOrStoreForJit(Thr // Entry, Search in overflow if (!node->IsEntry()) { // Indirect, Next level Continue to search - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); continue; } for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -308,8 +401,8 @@ BaseString* HashTrieMap::LoadOrStoreForJit(Thr } GetMutex().Unlock(); - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2; } Entry* oldEntry = nullptr; @@ -344,7 +437,7 @@ BaseString* HashTrieMap::LoadOrStoreForJit(Thr // Expand an existing entry to one or more new nodes. // Release the node, which will make both oldEntry and newEntry visible auto expandedNode = Expand(oldEntry, newEntry, - oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current); + oldHash >> TrieMapConfig::ROOT_CHILDREN_LOG2, hash, hashShift, currentIndirect); slot->store(expandedNode, std::memory_order_release); } GetMutex().Unlock(); @@ -362,26 +455,60 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol { HashTrieMapInUseScope mapInUse(this); uint32_t hash = key; - ProcessHash(hash); uint32_t hashShift = loadResult.hashShift; std::atomic* slot = loadResult.slot; Node* node = nullptr; [[maybe_unused]] bool haveInsertPoint = true; - Indirect* current = loadResult.current; + Indirect* currentIndirect = nullptr; ReadOnlyHandle str = std::invoke(std::forward(loaderCallback)); // lock and double-check GetMutex().LockWithThreadState(holder); node = slot->load(std::memory_order_acquire); if (node != nullptr && !node->IsEntry()) { GetMutex().Unlock(); - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + if (hashShift == 0) { + Root* currentRoot = root_.load(std::memory_order_acquire); + currentRoot = root_.load(std::memory_order_acquire); + size_t index = (hash >> hashShift) & TrieMapConfig::ROOT_CHILDREN_MASK; + slot = ¤tRoot->GetChild(index); + } else { + currentIndirect = node->AsIndirect(); + } while (true) { haveInsertPoint = false; // find the key or insert the candidate position. - for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - slot = ¤t->GetChild(index); + // When on the first layer + if (hashShift == 0) { + node = slot->load(std::memory_order_acquire); + if (node == nullptr) { + haveInsertPoint = true; + break; + } + if (!node->IsEntry()) { + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::ROOT_CHILDREN_LOG2; + continue; + } + for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; + currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) { + if (currentEntry->Key() == key) { + auto oldValue = currentEntry->Value(); + if (!IsNull(oldValue) && std::invoke(equalsCallback, oldValue)) { + GetMutex().Unlock(); +#if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE + TraceFindSuccessDepth(hashShift); +#endif + return oldValue; + } + } + } + haveInsertPoint = true; + break; + } + // When located after the first layer + for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + slot = ¤tIndirect->GetChild(index); node = slot->load(std::memory_order_acquire); if (node == nullptr) { haveInsertPoint = true; @@ -401,7 +528,7 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol break; } // Indirect, Next level Continue to search - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); } #ifndef NDEBUG if (!haveInsertPoint) { @@ -417,8 +544,8 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol break; } GetMutex().Unlock(); - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2; } } Entry* oldEntry = nullptr; @@ -454,7 +581,7 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol // Expand an existing entry to one or more new nodes. // Release the node, which will make both oldEntry and newEntry visible auto expandedNode = Expand(oldEntry, newEntry, - oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current); + oldHash >> TrieMapConfig::ROOT_CHILDREN_LOG2, hash, hashShift, currentIndirect); slot->store(expandedNode, std::memory_order_release); } @@ -469,15 +596,31 @@ HashTrieMapLoadResult HashTrieMap::Load(ReadBa const uint32_t key, BaseString* value) { uint32_t hash = key; - Indirect* current = GetRootAndProcessHash(hash); - for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += - TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - - std::atomic* slot = ¤t->GetChild(index); + Root* currentRoot = root_.load(std::memory_order_relaxed); + // When on the first layer + size_t index = (hash >> 0) & TrieMapConfig::ROOT_CHILDREN_MASK; + std::atomic* slot = ¤tRoot->GetChild(index); + Node* node = slot->load(std::memory_order_acquire); + uint32_t hashShift = TrieMapConfig::ROOT_CHILDREN_LOG2; + if (node != nullptr) { + if (node->IsEntry()) { + if (node->AsEntry()->Key() == key && + BaseString::StringsAreEqual(std::forward(readBarrier), + node->AsEntry()->Value(), value)) { + return {node->AsEntry()->Value(), hashShift, slot}; + } + } + } else { + return {nullptr, hashShift, slot}; + } + // When located after the first layer + for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + Indirect* currentIndirect = node->AsIndirect(); + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + std::atomic* slot = ¤tIndirect->GetChild(index); Node* node = slot->load(std::memory_order_acquire); if (node == nullptr) { - return {nullptr, current, hashShift, slot}; + return {nullptr, hashShift, slot}; } if (node->IsEntry()) { for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -487,12 +630,12 @@ HashTrieMapLoadResult HashTrieMap::Load(ReadBa continue; } if (BaseString::StringsAreEqual(std::forward(readBarrier), oldValue, value)) { - return {oldValue, nullptr, hashShift, nullptr}; + return {oldValue, hashShift, nullptr}; } } - return {nullptr, current, hashShift, slot}; + return {nullptr, hashShift, slot}; } - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); } LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating"; @@ -507,19 +650,36 @@ HashTrieMapLoadResult HashTrieMap::Load(ReadBa uint32_t offset, uint32_t utf8Len) { uint32_t hash = key; - Indirect* current = GetRootAndProcessHash(hash); + Root* currentRoot = root_.load(std::memory_order_relaxed); + // When on the first layer const uint8_t* utf8Data = string->GetDataUtf8() + offset; - for (uint32_t hashShift = 0; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += - TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - - std::atomic* slot = ¤t->GetChild(index); + size_t index = (hash >> 0) & TrieMapConfig::ROOT_CHILDREN_MASK; + std::atomic* slot = ¤tRoot->GetChild(index); + Node* node = slot->load(std::memory_order_acquire); + uint32_t hashShift = TrieMapConfig::ROOT_CHILDREN_LOG2; + if (node != nullptr) { + if (node->IsEntry()) { + if (node->AsEntry()->Key() == key && + BaseString::StringIsEqualUint8Data(std::forward(readBarrier), + node->AsEntry()->Value(), utf8Data, utf8Len, true)) { + return {node->AsEntry()->Value(), hashShift, slot}; + } + } + } else { + return {nullptr, hashShift, slot}; + } + // When located after the first layer + for (uint32_t hashShift = TrieMapConfig::ROOT_CHILDREN_LOG2; hashShift < TrieMapConfig::TOTAL_HASH_BITS; + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + Indirect* currentIndirect = node->AsIndirect(); + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + std::atomic* slot = ¤tIndirect->GetChild(index); Node* node = slot->load(std::memory_order_acquire); if (node == nullptr) { - return {nullptr, current, hashShift, slot}; + return {nullptr, hashShift, slot}; } if (!node->IsEntry()) { - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); continue; } for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -530,10 +690,10 @@ HashTrieMapLoadResult HashTrieMap::Load(ReadBa } if (BaseString::StringIsEqualUint8Data(std::forward(readBarrier), oldValue, utf8Data, utf8Len, true)) { - return {oldValue, nullptr, hashShift, nullptr}; + return {oldValue, hashShift, nullptr}; } } - return {nullptr, current, hashShift, slot}; + return {nullptr, hashShift, slot}; } LOG_COMMON(FATAL) << "StringTable: ran out of hash bits while iterating"; @@ -551,12 +711,11 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol { HashTrieMapInUseScope mapInUse(this); uint32_t hash = key; - ProcessHash(hash); uint32_t hashShift = loadResult.hashShift; std::atomic* slot = loadResult.slot; Node* node = nullptr; [[maybe_unused]] bool haveInsertPoint = true; - Indirect* current = loadResult.current; + Indirect* currentIndirect = nullptr; if constexpr (threadState) { GetMutex().LockWithThreadState(holder); } else { @@ -565,13 +724,52 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol node = slot->load(std::memory_order_acquire); if (node != nullptr && !node->IsEntry()) { GetMutex().Unlock(); - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + if (hashShift == 0) { + Root* currentRoot = root_.load(std::memory_order_acquire); + currentRoot = root_.load(std::memory_order_acquire); + size_t index = (hash >> hashShift) & TrieMapConfig::ROOT_CHILDREN_MASK; + slot = ¤tRoot->GetChild(index); + } else { + currentIndirect = node->AsIndirect(); + } while (true) { haveInsertPoint = false; - for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::N_CHILDREN_LOG2) { - size_t index = (hash >> hashShift) & TrieMapConfig::N_CHILDREN_MASK; - slot = ¤t->GetChild(index); + // find the key or insert the candidate position. + // When on the first layer + if (hashShift == 0) { + node = slot->load(std::memory_order_acquire); + if (node == nullptr) { + haveInsertPoint = true; + break; + } + if (!node->IsEntry()) { + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::ROOT_CHILDREN_LOG2; + continue; + } + for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; + currentEntry = currentEntry->Overflow().load(std::memory_order_acquire)) { + if (currentEntry->Key() != key) { + continue; + } + BaseString* oldValue = currentEntry->Value(); + if (IsNull(oldValue)) { + continue; + } + if (BaseString::StringsAreEqual(std::forward(readBarrier), oldValue, *str)) { +#if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE + TraceFindSuccessDepth(hashShift); +#endif + return oldValue; + } + } + haveInsertPoint = true; + break; + } + // When located after the first layer + for (; hashShift < TrieMapConfig::TOTAL_HASH_BITS; hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2) { + size_t index = (hash >> hashShift) & TrieMapConfig::INDIRECT_CHILDREN_MASK; + slot = ¤tIndirect->GetChild(index); node = slot->load(std::memory_order_acquire); if (node == nullptr) { haveInsertPoint = true; @@ -580,7 +778,7 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol // Entry, Search in overflow if (!node->IsEntry()) { // Indirect, Next level Continue to search - current = node->AsIndirect(); + currentIndirect = node->AsIndirect(); continue; } for (Entry* currentEntry = node->AsEntry(); currentEntry != nullptr; @@ -614,8 +812,8 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol break; } GetMutex().Unlock(); - current = node->AsIndirect(); - hashShift += TrieMapConfig::N_CHILDREN_LOG2; + currentIndirect = node->AsIndirect(); + hashShift += TrieMapConfig::INDIRECT_CHILDREN_LOG2; } } @@ -652,7 +850,7 @@ BaseString* HashTrieMap::StoreOrLoad(ThreadHol // Expand an existing entry to one or more new nodes. // Release the node, which will make both oldEntry and newEntry visible auto expandedNode = Expand(oldEntry, newEntry, - oldHash >> TrieMapConfig::ROOT_BIT, hash, hashShift, current); + oldHash >> TrieMapConfig::ROOT_CHILDREN_LOG2, hash, hashShift, currentIndirect); slot->store(expandedNode, std::memory_order_release); } GetMutex().Unlock(); @@ -673,6 +871,7 @@ bool HashTrieMap::CheckWeakRef(const WeakRootV entry->SetValue(reinterpret_cast(fwd)); LOG_COMMON(VERBOSE) << "StringTable: forward " << std::hex << object << " -> " << fwd; } + // object is still alive and has not been moved return false; } @@ -694,32 +893,52 @@ bool HashTrieMap::CheckValidity(ReadBarrier&& } template -template -void HashTrieMap::Iter(ReadBarrier&& readBarrier, Indirect* node, bool& isValid) +template +void HashTrieMap::IterCommon(ReadBarrier&& readBarrier, + typename std::conditional, Root*, Node*>::type node, bool& isValid) { - if (node == nullptr) - return; - - for (std::atomic& temp : node->children_) { - auto &child = reinterpret_cast&>(temp); - Node* childNode = child.load(std::memory_order_relaxed); - if (childNode == nullptr) - continue; - - if (!(childNode->IsEntry())) { - // Recursive traversal of indirect nodes - Iter(std::forward(readBarrier), childNode->AsIndirect(), isValid); - continue; + auto processNode = [this, &readBarrier, &isValid](Node* childNode) { + if (childNode == nullptr) { + return; } + if (!childNode->IsEntry()) { + IterCommon(std::forward(readBarrier), childNode->AsIndirect(), isValid); + return; + } + for (Entry* e = childNode->AsEntry(); e != nullptr; + e = e->Overflow().load(std::memory_order_relaxed)) { + auto value = e->Value(); + if (!IsNull(value) && + !CheckValidity(std::forward(readBarrier), value, isValid)) { + return; + } + } + }; - for (Entry* e = childNode->AsEntry(); e != nullptr; e = e->Overflow().load(std::memory_order_relaxed)) { - auto value = e->Value(); - if (!IsNull(value) && - !CheckValidity(std::forward(readBarrier), value, isValid)) { + if constexpr (std::is_same_v) { + for (std::atomic& child : node->GetChildren()) { + processNode(child.load(std::memory_order_relaxed)); + if (!isValid) { // Early exit on invalid return; } } - } + } else if constexpr (std::is_same_v) { + for (std::atomic& child : node->children_) { + processNode(child.load(std::memory_order_relaxed)); + if (!isValid) { // Early exit on invalid + return; + } + } + } +} + +template +template +void HashTrieMap::Iter(ReadBarrier&& readBarrier, + typename std::conditional, Root*, Node*>::type node, bool& isValid) +{ + // Adapter Layer, hide template parameter details + IterCommon(std::forward(readBarrier), node, isValid); } template @@ -744,20 +963,21 @@ bool HashTrieMap::CheckWeakRef(const WeakRefFi } template -template > -bool HashTrieMap::ClearNodeFromGC(Indirect* parent, int index, - const WeakRefFieldVisitor& visitor, - std::vector& waitDeleteEntries) +template > +bool HashTrieMap::ClearNodeWithBarrierMarkOnly(NodeType* node, int index, + const WeakRefFieldVisitor& visitor, + std::vector& waitDeleteEntries) { - // load sub-nodes - Node* child = parent->GetChild(index).load(std::memory_order_relaxed); + using TagType = std::conditional_t, RootTag, IndirectTag>; + Node* child = node->GetChild(index).load(std::memory_order_relaxed); if (child == nullptr) return true; if (child->IsEntry()) { // Processing the overflow linked list - for (Entry *prev = nullptr, *current = child->AsEntry(); current != nullptr; current = current-> - Overflow().load(std::memory_order_acquire)) { + for (Entry *prev = nullptr, *current = child->AsEntry(); current != nullptr; + current = current->Overflow().load(std::memory_order_acquire)) { if (!CheckWeakRef(visitor, current) && prev != nullptr) { prev->Overflow().store(current->Overflow().load(std::memory_order_acquire), std::memory_order_release); waitDeleteEntries.push_back(current); @@ -767,12 +987,21 @@ bool HashTrieMap::ClearNodeFromGC(Indirect* pa } return false; } else { - // Recursive processing of the Indirect node Indirect* indirect = child->AsIndirect(); uint32_t cleanCount = 0; - for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { - if (ClearNodeFromGC(indirect, i, visitor, waitDeleteEntries)) { - cleanCount += 1; + if constexpr (std::is_same_v) { + for (uint32_t i = 0; i < TrieMapConfig::ROOT_CHILDREN; ++i) { + if (ClearNodeWithBarrierMarkOnly + (indirect, i, visitor, waitDeleteEntries)) { + cleanCount += 1; + } + } + } else { + for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { + if (ClearNodeWithBarrierMarkOnly + (indirect, i, visitor, waitDeleteEntries)) { + cleanCount += 1; + } } } return false; @@ -780,15 +1009,28 @@ bool HashTrieMap::ClearNodeFromGC(Indirect* pa } template -template > -bool HashTrieMap::ClearNodeFromGC(Indirect* parent, int index, - const WeakRefFieldVisitor& visitor) +template > +bool HashTrieMap::ClearNodeFromGC(NodeType* parent, int index, + const WeakRefFieldVisitor& visitor, + std::vector& waitDeleteEntries) +{ + return ClearNodeWithBarrierMarkOnly(parent, index, visitor, waitDeleteEntries); +} + +template +template > +bool HashTrieMap::ClearNodeWithoutBarrierImmediate(NodeType* node, int index, + const WeakRefFieldVisitor& visitor) { + using TagType = std::conditional_t, RootTag, IndirectTag>; + Node* child = node->GetChild(index).load(std::memory_order_relaxed);; // load sub-nodes - Node* child = parent->GetChild(index).load(std::memory_order_relaxed); if (child == nullptr) { return true; } + if (child->IsEntry()) { Entry* entry = child->AsEntry(); // Processing the overflow linked list @@ -814,41 +1056,69 @@ bool HashTrieMap::ClearNodeFromGC(Indirect* pa if (e == nullptr) { // Delete the empty Entry node and update the parent reference delete entry; - parent->GetChild(index).store(nullptr, std::memory_order_relaxed); + node->GetChild(index).store(nullptr, std::memory_order_relaxed); return true; } // Delete the Entry node and update the parent reference delete entry; - parent->GetChild(index).store(e, std::memory_order_relaxed); + node->GetChild(index).store(e, std::memory_order_relaxed); } return false; } else { // Recursive processing of the Indirect node Indirect* indirect = child->AsIndirect(); uint32_t cleanCount = 0; - for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { - if (ClearNodeFromGC(indirect, i, visitor)) { - cleanCount += 1; + if constexpr (std::is_same_v) { + for (uint32_t i = 0; i < TrieMapConfig::ROOT_CHILDREN; ++i) { + if (ClearNodeWithoutBarrierImmediate + (indirect, i, visitor)) { + cleanCount += 1; + } + } + // Check whether the root node is empty + if (cleanCount == TrieMapConfig::ROOT_CHILDREN) { + // Remove the empty Indirect and update the parent reference + delete indirect; + node->GetChild(index).store(nullptr, std::memory_order_relaxed); + return true; + } + } else { + for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { + if (ClearNodeWithoutBarrierImmediate + (indirect, i, visitor)) { + cleanCount += 1; + } + } + // Check whether the indirect node is empty + if (cleanCount == TrieMapConfig::INDIRECT_SIZE) { + // Remove the empty Indirect and update the parent reference + delete indirect; + node->GetChild(index).store(nullptr, std::memory_order_relaxed); + return true; } - } - // Check whether the indirect node is empty - if (cleanCount == TrieMapConfig::INDIRECT_SIZE) { - // Remove the empty Indirect and update the parent reference - delete indirect; - parent->GetChild(index).store(nullptr, std::memory_order_relaxed); - return true; } return false; } } template -template > -bool HashTrieMap::ClearNodeFromGC(Indirect* parent, int index, - const WeakRootVisitor& visitor) +template > +bool HashTrieMap::ClearNodeFromGC(NodeType* node, int index, + const WeakRefFieldVisitor& visitor) { + return ClearNodeWithoutBarrierImmediate(node, index, visitor); +} + +template +template > +bool HashTrieMap::ClearNodeWithoutBarrierWithRootVisitor(NodeType* node, int index, + const WeakRootVisitor& visitor) +{ + using TagType = std::conditional_t, RootTag, IndirectTag>; // load sub-nodes - Node* child = parent->GetChild(index).load(std::memory_order_relaxed); + Node* child = node->GetChild(index).load(std::memory_order_relaxed);; if (child == nullptr) return true; @@ -876,32 +1146,58 @@ bool HashTrieMap::ClearNodeFromGC(Indirect* pa if (e == nullptr) { // Delete the empty Entry node and update the parent reference delete entry; - parent->GetChild(index).store(nullptr, std::memory_order_relaxed); + node->GetChild(index).store(nullptr, std::memory_order_relaxed); return true; } // Delete the Entry node and update the parent reference delete entry; - parent->GetChild(index).store(e, std::memory_order_relaxed); + node->GetChild(index).store(e, std::memory_order_relaxed); } return false; } else { // Recursive processing of the Indirect node Indirect* indirect = child->AsIndirect(); uint32_t cleanCount = 0; - for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { - if (ClearNodeFromGC(indirect, i, visitor)) { - cleanCount += 1; + if constexpr (std::is_same_v) { + for (uint32_t i = 0; i < TrieMapConfig::ROOT_CHILDREN; ++i) { + if (ClearNodeWithoutBarrierWithRootVisitor + (indirect, i, visitor)) { + cleanCount += 1; + } } - } - // Check whether the indirect node is empty - if (cleanCount == TrieMapConfig::INDIRECT_SIZE && inuseCount_ == 0) { - // Remove the empty Indirect and update the parent reference - delete indirect; - parent->GetChild(index).store(nullptr, std::memory_order_relaxed); - return true; + // Check whether the root node is empty + if (cleanCount == TrieMapConfig::ROOT_CHILDREN) { + // Remove the empty Indirect and update the parent reference + delete indirect; + node->GetChild(index).store(nullptr, std::memory_order_relaxed); + return true; + } + } else { + for (uint32_t i = 0; i < TrieMapConfig::INDIRECT_SIZE; ++i) { + if (ClearNodeWithoutBarrierWithRootVisitor + (indirect, i, visitor)) { + cleanCount += 1; + } + } + // Check whether the indirect node is empty + if (cleanCount == TrieMapConfig::INDIRECT_SIZE && inuseCount_ == 0) { + // Remove the empty Indirect and update the parent reference + delete indirect; + node->GetChild(index).store(nullptr, std::memory_order_relaxed); + return true; + } } return false; } } + +template +template > +bool HashTrieMap::ClearNodeFromGC(NodeType* node, int index, + const WeakRootVisitor& visitor) +{ + return ClearNodeWithoutBarrierWithRootVisitor(node, index, visitor); +} } #endif //COMMON_COMPONENTS_OBJECTS_STRING_TABLE_HASHTRIEMAP_INL_H diff --git a/common_components/objects/string_table/hashtriemap.h b/common_components/objects/string_table/hashtriemap.h index bbb26a7e08316e5befaee4125dff2c007d6548d9..80fb65ed4e7366a2d1db81f743dd8ad8d3decef5 100644 --- a/common_components/objects/string_table/hashtriemap.h +++ b/common_components/objects/string_table/hashtriemap.h @@ -26,19 +26,25 @@ class TaggedObject; } namespace common { + struct RootTag {}; + struct IndirectTag {}; + + inline constexpr RootTag root_tag{}; + inline constexpr IndirectTag indirect_tag{}; class TrieMapConfig { public: - static constexpr uint32_t ROOT_BIT = 11U; - static constexpr uint32_t ROOT_SIZE = (1 << ROOT_BIT); - static constexpr uint32_t ROOT_BIT_MASK = ROOT_SIZE - 1U; - - static constexpr uint32_t N_CHILDREN_LOG2 = 3U; - static constexpr uint32_t TOTAL_HASH_BITS = 32U - ROOT_BIT; - - static constexpr uint32_t N_CHILDREN = 1 << N_CHILDREN_LOG2; - static constexpr uint32_t N_CHILDREN_MASK = N_CHILDREN - 1U; - - static constexpr uint32_t INDIRECT_SIZE = 8U; // 8: 2^3 + static constexpr uint32_t ROOT_CHILDREN_LOG2 = 13U; + static constexpr uint32_t INDIRECT_CHILDREN_LOG2 = 3U; + static constexpr uint32_t TOTAL_HASH_BITS = 32U; + + static constexpr uint32_t ROOT_CHILDREN = 1 << ROOT_CHILDREN_LOG2; // 用于哈希索引、掩码运算 + static constexpr uint32_t ROOT_CHILDREN_MASK = ROOT_CHILDREN - 1; + static constexpr uint32_t INDIRECT_CHILDREN = 1 << INDIRECT_CHILDREN_LOG2; + static constexpr uint32_t INDIRECT_CHILDREN_MASK = INDIRECT_CHILDREN - 1U; + + static constexpr uint32_t ROOT_SIZE = 8192U; // 8192: 2^13,用于数组定义、内存分配 + static constexpr uint32_t ROOT_MASK = ROOT_SIZE - 1U; + static constexpr uint32_t INDIRECT_SIZE = 8U; // 8: 2^13 static constexpr uint32_t INDIRECT_MASK = INDIRECT_SIZE - 1U; enum SlotBarrier { @@ -67,13 +73,35 @@ public: return EntryBit::Decode(bitField); } - HashTrieMapEntry* AsEntry(); - HashTrieMapIndirect* AsIndirect(); + HashTrieMapEntry* AsEntry(); // 安全将node类型转化为entry类型 + HashTrieMapIndirect* AsIndirect(); // 安全将node类型转化为Indirect类型 +}; + +class HashTrieMapRoot{ +public: + HashTrieMapRoot() : children_() {} // 初始化了一个数组,数组内所有元素为空 + + ~HashTrieMapRoot() + { + for (auto& child : children_) { + HashTrieMapNode* node = child.load(std::memory_order_relaxed); // 以 relaxed 内存序从原子变量中读取当前值 + if (node != nullptr) { + delete node; + } + } + } + + auto& GetChild(size_t index) { + return children_[index]; + } + +private: + std::array, TrieMapConfig::ROOT_CHILDREN> children_; }; class HashTrieMapEntry final : public HashTrieMapNode { public: - HashTrieMapEntry(BaseString* v) : overflow_(nullptr) + HashTrieMapEntry(BaseString* v) : overflow_(nullptr) // 初始化整个对象;必须在初始化列表中初始化其他成员(如 overflow_);调用时机是对象创建时; { bitField_ = (ENTRY_TAG_MASK | reinterpret_cast(v)); } @@ -95,7 +123,7 @@ public: *reinterpret_cast*>((void*)(&value)))); } - void SetValue(BaseString* v) + void SetValue(BaseString* v) // 修改已有对象的状态;不涉及对象的构造过程;可能被多次调用 { bitField_ = ENTRY_TAG_MASK | reinterpret_cast(v); } @@ -116,20 +144,25 @@ public: explicit HashTrieMapIndirect() {} + // 在 HashTrieMapIndirect 被销毁时,遍历其所有子节点(children) + // 并递归删除每个节点所指向的 HashTrieMapNode 对象(可能是 entry 或 indirect 类型),确保内存不泄漏 ~HashTrieMapIndirect() { - for (std::atomic& temp : children_) { - auto &child = reinterpret_cast&>(temp); - HashTrieMapNode* node = child.exchange(nullptr, std::memory_order_relaxed); + for (std::atomic& temp : children_) { // 遍历 children_ 数组中的每一个原子字段 + auto &child = reinterpret_cast&>(temp); // 强制类型转换为 std::atomic; + HashTrieMapNode* node = child.exchange(nullptr, std::memory_order_relaxed); // 原子操作获取当前节点指针,并将其设为 nullptr; if (node == nullptr) { continue; } - if (!node->IsEntry()) { + // 如果当前子节点不是 entry 类型(即为 indirect 类型) + // 就调用 delete node->AsIndirect() 递归删除该间接节点及其所有子节点 delete node->AsIndirect(); + if (!node->IsEntry()) { continue; } HashTrieMapEntry* e = node->AsEntry(); // Clear overflow chain + // 获取 entry 的 overflow_ 指针;遍历整个溢出链表,逐个删除;最后删除 entry 本身; for (HashTrieMapEntry* current = e->Overflow().exchange(nullptr, std::memory_order_relaxed); current != nullptr ;) { @@ -152,7 +185,6 @@ public: struct HashTrieMapLoadResult { BaseString* value; - HashTrieMapIndirect* current; uint32_t hashShift; std::atomic* slot; }; @@ -175,16 +207,24 @@ public: using WeakRefFieldVisitor = std::function&)>; using WeakRootVisitor = std::function; using Node = HashTrieMapNode; + using Root = HashTrieMapRoot; using Indirect = HashTrieMapIndirect; using Entry = HashTrieMapEntry; using LoadResult = HashTrieMapLoadResult; - HashTrieMap() {} + + struct RootTag; + struct IndirectTag; + + HashTrieMap() + { + root_.store(new Root(), std::memory_order_relaxed); + } ~HashTrieMap() { Clear(); }; - + #if ECMASCRIPT_ENABLE_TRACE_STRING_TABLE class StringTableTracer { public: @@ -194,10 +234,10 @@ public: static StringTableTracer tracer; return tracer; } - + NO_COPY_SEMANTIC_CC(StringTableTracer); NO_MOVE_SEMANTIC_CC(StringTableTracer); - + void TraceFindSuccess(uint32_t hashShift) { totalDepth_.fetch_add(hashShift / TrieMapConfig::N_CHILDREN_LOG2 + 1, std::memory_order_relaxed); @@ -207,33 +247,33 @@ public: DumpWithLock(currentSuccess); } } - + void TraceFindFail() { totalFailNum_.fetch_add(1, std::memory_order_relaxed); } - + private: StringTableTracer() = default; - + void DumpWithLock(uint64_t triggerPoint) { std::lock_guard lock(mu_); - + if (triggerPoint >= lastDumpPoint_.load(std::memory_order_relaxed) + DUMP_THRESHOLD) { lastDumpPoint_ = triggerPoint; DumpInfo(); } } - + void DumpInfo() const { uint64_t depth = totalDepth_.load(std::memory_order_relaxed); uint64_t success = totalSuccessNum_.load(std::memory_order_relaxed); uint64_t fail = totalFailNum_.load(std::memory_order_relaxed); - + double avgDepth = (static_cast(depth) / success); - + LOG_COMMON(INFO) << "------------------------------------------------------------" << "---------------------------------------------------------"; LOG_COMMON(INFO) << "StringTableTotalSuccessFindNum: " << success; @@ -242,14 +282,14 @@ public: LOG_COMMON(INFO) << "------------------------------------------------------------" << "---------------------------------------------------------"; } - + std::mutex mu_; std::atomic totalDepth_{0}; std::atomic totalSuccessNum_{0}; std::atomic totalFailNum_{0}; std::atomic lastDumpPoint_{0}; }; - + void TraceFindSuccessDepth(uint32_t hashShift) { StringTableTracer::GetInstance().TraceFindSuccess(hashShift); @@ -260,6 +300,7 @@ public: StringTableTracer::GetInstance().TraceFindFail(); } #endif + template LoadResult Load(ReadBarrier&& readBarrier, const uint32_t key, BaseString* value); @@ -282,74 +323,55 @@ public: template BaseString* LoadOrStoreForJit(ThreadHolder* holder, const uint32_t key, LoaderCallback loaderCallback, EqualsCallback equalsCallback); - - static void ProcessHash(uint32_t &hash) - { - hash >>= TrieMapConfig::ROOT_BIT; - } - - Indirect* GetRootAndProcessHash(uint32_t &hash) - { - uint32_t rootID = (hash & TrieMapConfig::ROOT_BIT_MASK); - hash >>= TrieMapConfig::ROOT_BIT; - auto root = root_[rootID].load(std::memory_order_acquire); - if (root != nullptr) { - return root; - } else { - Indirect* expected = nullptr; - Indirect* newRoot = new Indirect(); - - if (root_[rootID].compare_exchange_strong(expected, newRoot, - std::memory_order_release, std::memory_order_acquire)) { - return newRoot; - } else { - delete newRoot; - return expected; - } - } - } // All other threads have stopped due to the gc and Clear phases. // Therefore, the operations related to atoms in ClearNodeFromGc and Clear use std::memory_order_relaxed, // which ensures atomicity but does not guarantee memory order - template = 0> - bool ClearNodeFromGC(Indirect* parent, int index, const WeakRefFieldVisitor& visitor, + template > + bool ClearNodeWithBarrierMarkOnly(NodeType* node, int index, const WeakRefFieldVisitor& visitor, + std::vector& waitDeleteEntries); + template = 0> + bool ClearNodeFromGC(NodeType* node, int index, const WeakRefFieldVisitor& visitor, std::vector& waitDeleteEntries); - template > + bool ClearNodeWithoutBarrierImmediate(NodeType* node, int index, const WeakRefFieldVisitor& visitor); + template = 0> + bool ClearNodeFromGC(NodeType* node, int index, const WeakRefFieldVisitor& visitor); + + template > + bool ClearNodeWithoutBarrierWithRootVisitor(NodeType* node, int index, const WeakRootVisitor& visitor); + template = 0> - bool ClearNodeFromGC(Indirect* parent, int index, const WeakRefFieldVisitor& visitor); + bool ClearNodeFromGC(NodeType* node, int index, const WeakRootVisitor& visitor); - template = 0> - bool ClearNodeFromGC(Indirect* parent, int index, const WeakRootVisitor& visitor); // Iterator template void Range(ReadBarrier&& readBarrier, bool& isValid) { - for (uint32_t i = 0; i < TrieMapConfig::ROOT_SIZE; i++) { - Iter(std::forward(readBarrier), root_[i].load(std::memory_order_relaxed), isValid); - } + // Initiate the process and specify the root node type + Iter(std::forward(readBarrier), root_.load(), isValid); } void Clear() { - for (uint32_t i = 0; i < TrieMapConfig::ROOT_SIZE; i++) { - // The atom replaces the root node with nullptr and obtains the old root node - Indirect* oldRoot = root_[i].exchange(nullptr, std::memory_order_relaxed); - if (oldRoot != nullptr) { - // Clear the entire HashTreeMap based on the Indirect destructor - delete oldRoot; - } + // The atom replaces the root node with nullptr and obtains the old root node + Root* oldRoot = root_.exchange(nullptr, std::memory_order_relaxed); + if (oldRoot != nullptr) { + // Clear the entire HashTreeMap based on the Indirect destructor + delete oldRoot; } } // ut used - const std::atomic& GetRoot(uint32_t index) const + const std::atomic& GetRoot() const { - ASSERT(index < TrieMapConfig::ROOT_SIZE); - return root_[index]; + return root_; } void IncreaseInuseCount() @@ -390,7 +412,7 @@ public: isSweeping = false; GetMutex().Unlock(); } - std::atomic root_[TrieMapConfig::ROOT_SIZE] = {}; + std::atomic root_; private: Mutex mu_; std::vector waitFreeEntries_{}; @@ -399,8 +421,14 @@ private: template Node* Expand(Entry* oldEntry, Entry* newEntry, uint32_t oldHash, uint32_t newHash, uint32_t hashShift, Indirect* parent); - template - void Iter(ReadBarrier&& readBarrier, Indirect* node, bool& isValid); + template + void Iter(ReadBarrier&& readBarrier, + typename std::conditional, Root*, Node*>::type node, + bool& isValid); + template + void IterCommon(ReadBarrier&& readBarrier, + typename std::conditional, Root*, Node*>::type node, + bool& isValid); bool CheckWeakRef(const WeakRefFieldVisitor& visitor, Entry* entry); bool CheckWeakRef(const WeakRootVisitor& visitor, Entry* entry); diff --git a/common_components/objects/string_table_internal.h b/common_components/objects/string_table_internal.h index c46fef7b9398abdaddeedc3c43ef56502e922867..cc2f3a62072211c2a465df28ba3ffabbf673ad1f 100644 --- a/common_components/objects/string_table_internal.h +++ b/common_components/objects/string_table_internal.h @@ -164,7 +164,7 @@ public: } template = 0> - void SweepWeakRef(const WeakRefFieldVisitor& visitor, uint32_t rootID, + void SweepWeakRef(const WeakRefFieldVisitor& visitor, uint32_t index, std::vector& waitDeleteEntries); template = 0> diff --git a/ecmascript/ecma_string_table_optimization.cpp b/ecmascript/ecma_string_table_optimization.cpp index eb1c8ea05ad802e34a9101a7e31087bfc5305e29..a761e1f76c200628414e674bd472ddba8f7fed09 100644 --- a/ecmascript/ecma_string_table_optimization.cpp +++ b/ecmascript/ecma_string_table_optimization.cpp @@ -13,6 +13,7 @@ * limitations under the License. */ +#include "common_components/objects/string_table/hashtriemap.h" #include "ecmascript/ecma_string_table.h" #include "common_components/objects/string_table/hashtriemap-inl.h" @@ -445,10 +446,10 @@ EcmaString* EcmaStringTableImpl::GetOrInternStringWithoutJSHandleForJit( } template -void EcmaStringTableImpl::SweepWeakRef(const WeakRootVisitor &visitor, uint32_t rootID) +void EcmaStringTableImpl::SweepWeakRef(const WeakRootVisitor &visitor, uint32_t index) { - ASSERT(rootID >= 0 && rootID < common::TrieMapConfig::ROOT_SIZE); - auto *root_node = stringTable_.root_[rootID].load(std::memory_order_relaxed); + ASSERT(index >= 0 && index < common::TrieMapConfig::ROOT_SIZE); + common::HashTrieMapRoot *root_node = stringTable_.root_.load(std::memory_order_relaxed); if (root_node == nullptr) { return; } @@ -631,10 +632,10 @@ EcmaString *EcmaStringTable::TryGetInternString(JSThread *thread, const JSHandle return visitImpl([&](auto &impl) { return impl.TryGetInternString(thread, string); }); } -void EcmaStringTable::SweepWeakRef(const WeakRootVisitor& visitor, uint32_t rootID) +void EcmaStringTable::SweepWeakRef(const WeakRootVisitor& visitor, uint32_t index) { if (std::holds_alternative>(impl_)) { - return std::get>(impl_).SweepWeakRef(visitor, rootID); + return std::get>(impl_).SweepWeakRef(visitor, index); } UNREACHABLE(); }