// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/builtins/builtins-collections-gen.h" #include "src/builtins/builtins-constructor-gen.h" #include "src/builtins/builtins-iterator-gen.h" #include "src/builtins/builtins-utils-gen.h" #include "src/codegen/code-stub-assembler.h" #include "src/heap/factory-inl.h" #include "src/heap/heap-inl.h" #include "src/objects/hash-table-inl.h" #include "src/objects/js-collection.h" #include "src/objects/ordered-hash-table.h" namespace v8 { namespace internal { using compiler::Node; template using TNode = compiler::TNode; template using TVariable = compiler::TypedCodeAssemblerVariable; class BaseCollectionsAssembler : public CodeStubAssembler { public: explicit BaseCollectionsAssembler(compiler::CodeAssemblerState* state) : CodeStubAssembler(state) {} virtual ~BaseCollectionsAssembler() = default; protected: enum Variant { kMap, kSet, kWeakMap, kWeakSet }; // Adds an entry to a collection. For Maps, properly handles extracting the // key and value from the entry (see LoadKeyValue()). void AddConstructorEntry(Variant variant, TNode context, TNode collection, TNode add_function, TNode key_value, Label* if_may_have_side_effects = nullptr, Label* if_exception = nullptr, TVariable* var_exception = nullptr); // Adds constructor entries to a collection. Choosing a fast path when // possible. void AddConstructorEntries(Variant variant, TNode context, TNode native_context, TNode collection, TNode initial_entries); // Fast path for adding constructor entries. Assumes the entries are a fast // JS array (see CodeStubAssembler::BranchIfFastJSArray()). void AddConstructorEntriesFromFastJSArray(Variant variant, TNode context, TNode native_context, TNode collection, TNode fast_jsarray, Label* if_may_have_side_effects); // Adds constructor entries to a collection using the iterator protocol. void AddConstructorEntriesFromIterable(Variant variant, TNode context, TNode native_context, TNode collection, TNode iterable); // Constructs a collection instance. Choosing a fast path when possible. TNode AllocateJSCollection(TNode context, TNode constructor, TNode new_target); // Fast path for constructing a collection instance if the constructor // function has not been modified. TNode AllocateJSCollectionFast(TNode constructor); // Fallback for constructing a collection instance if the constructor function // has been modified. TNode AllocateJSCollectionSlow(TNode context, TNode constructor, TNode new_target); // Allocates the backing store for a collection. virtual TNode AllocateTable(Variant variant, TNode context, TNode at_least_space_for) = 0; // Main entry point for a collection constructor builtin. void GenerateConstructor(Variant variant, Handle constructor_function_name, TNode new_target, TNode argc, TNode context); // Retrieves the collection function that adds an entry. `set` for Maps and // `add` for Sets. TNode GetAddFunction(Variant variant, TNode context, TNode collection); // Retrieves the collection constructor function. TNode GetConstructor(Variant variant, TNode native_context); // Retrieves the initial collection function that adds an entry. Should only // be called when it is certain that a collection prototype's map hasn't been // changed. TNode GetInitialAddFunction(Variant variant, TNode native_context); // Checks whether {collection}'s initial add/set function has been modified // (depending on {variant}, loaded from {native_context}). void GotoIfInitialAddFunctionModified(Variant variant, TNode native_context, TNode collection, Label* if_modified); // Gets root index for the name of the add/set function. RootIndex GetAddFunctionNameIndex(Variant variant); // Retrieves the offset to access the backing table from the collection. int GetTableOffset(Variant variant); // Estimates the number of entries the collection will have after adding the // entries passed in the constructor. AllocateTable() can use this to avoid // the time of growing/rehashing when adding the constructor entries. TNode EstimatedInitialSize(TNode initial_entries, TNode is_fast_jsarray); void GotoIfNotJSReceiver(Node* const obj, Label* if_not_receiver); // Determines whether the collection's prototype has been modified. TNode HasInitialCollectionPrototype(Variant variant, TNode native_context, TNode collection); // Gets the initial prototype map for given collection {variant}. TNode GetInitialCollectionPrototype(Variant variant, TNode native_context); // Loads an element from a fixed array. If the element is the hole, returns // `undefined`. TNode LoadAndNormalizeFixedArrayElement(TNode elements, TNode index); // Loads an element from a fixed double array. If the element is the hole, // returns `undefined`. TNode LoadAndNormalizeFixedDoubleArrayElement( TNode elements, TNode index); }; void BaseCollectionsAssembler::AddConstructorEntry( Variant variant, TNode context, TNode collection, TNode add_function, TNode key_value, Label* if_may_have_side_effects, Label* if_exception, TVariable* var_exception) { compiler::CodeAssemblerScopedExceptionHandler handler(this, if_exception, var_exception); CSA_ASSERT(this, Word32BinaryNot(IsTheHole(key_value))); if (variant == kMap || variant == kWeakMap) { TorqueStructKeyValuePair pair = if_may_have_side_effects != nullptr ? LoadKeyValuePairNoSideEffects(context, key_value, if_may_have_side_effects) : LoadKeyValuePair(context, key_value); Node* key_n = pair.key; Node* value_n = pair.value; CallJS(CodeFactory::Call(isolate()), context, add_function, collection, key_n, value_n); } else { DCHECK(variant == kSet || variant == kWeakSet); CallJS(CodeFactory::Call(isolate()), context, add_function, collection, key_value); } } void BaseCollectionsAssembler::AddConstructorEntries( Variant variant, TNode context, TNode native_context, TNode collection, TNode initial_entries) { TVARIABLE(BoolT, use_fast_loop, IsFastJSArrayWithNoCustomIteration(context, initial_entries)); TNode at_least_space_for = EstimatedInitialSize(initial_entries, use_fast_loop.value()); Label allocate_table(this, &use_fast_loop), exit(this), fast_loop(this), slow_loop(this, Label::kDeferred); Goto(&allocate_table); BIND(&allocate_table); { TNode table = AllocateTable(variant, context, at_least_space_for); StoreObjectField(collection, GetTableOffset(variant), table); GotoIf(IsNullOrUndefined(initial_entries), &exit); GotoIfInitialAddFunctionModified(variant, CAST(native_context), CAST(collection), &slow_loop); Branch(use_fast_loop.value(), &fast_loop, &slow_loop); } BIND(&fast_loop); { TNode initial_entries_jsarray = UncheckedCast(initial_entries); #if DEBUG CSA_ASSERT(this, IsFastJSArrayWithNoCustomIteration( context, initial_entries_jsarray)); TNode original_initial_entries_map = LoadMap(initial_entries_jsarray); #endif Label if_may_have_side_effects(this, Label::kDeferred); AddConstructorEntriesFromFastJSArray(variant, context, native_context, collection, initial_entries_jsarray, &if_may_have_side_effects); Goto(&exit); if (variant == kMap || variant == kWeakMap) { BIND(&if_may_have_side_effects); #if DEBUG { // Check that add/set function has not been modified. Label if_not_modified(this), if_modified(this); GotoIfInitialAddFunctionModified(variant, CAST(native_context), CAST(collection), &if_modified); Goto(&if_not_modified); BIND(&if_modified); Unreachable(); BIND(&if_not_modified); } CSA_ASSERT(this, TaggedEqual(original_initial_entries_map, LoadMap(initial_entries_jsarray))); #endif use_fast_loop = Int32FalseConstant(); Goto(&allocate_table); } } BIND(&slow_loop); { AddConstructorEntriesFromIterable(variant, context, native_context, collection, initial_entries); Goto(&exit); } BIND(&exit); } void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray( Variant variant, TNode context, TNode native_context, TNode collection, TNode fast_jsarray, Label* if_may_have_side_effects) { TNode elements = LoadElements(fast_jsarray); TNode elements_kind = LoadElementsKind(fast_jsarray); TNode add_func = GetInitialAddFunction(variant, native_context); CSA_ASSERT(this, TaggedEqual(GetAddFunction(variant, native_context, collection), add_func)); CSA_ASSERT(this, IsFastJSArrayWithNoCustomIteration(context, fast_jsarray)); TNode length = SmiUntag(LoadFastJSArrayLength(fast_jsarray)); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(length, IntPtrConstant(0))); CSA_ASSERT( this, HasInitialCollectionPrototype(variant, native_context, collection)); #if DEBUG TNode original_collection_map = LoadMap(CAST(collection)); TNode original_fast_js_array_map = LoadMap(fast_jsarray); #endif Label exit(this), if_doubles(this), if_smiorobjects(this); GotoIf(IntPtrEqual(length, IntPtrConstant(0)), &exit); Branch(IsFastSmiOrTaggedElementsKind(elements_kind), &if_smiorobjects, &if_doubles); BIND(&if_smiorobjects); { auto set_entry = [&](Node* index) { TNode element = LoadAndNormalizeFixedArrayElement( CAST(elements), UncheckedCast(index)); AddConstructorEntry(variant, context, collection, add_func, element, if_may_have_side_effects); }; // Instead of using the slower iteration protocol to iterate over the // elements, a fast loop is used. This assumes that adding an element // to the collection does not call user code that could mutate the elements // or collection. BuildFastLoop(IntPtrConstant(0), length, set_entry, 1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); Goto(&exit); } BIND(&if_doubles); { // A Map constructor requires entries to be arrays (ex. [key, value]), // so a FixedDoubleArray can never succeed. if (variant == kMap || variant == kWeakMap) { CSA_ASSERT(this, IntPtrGreaterThan(length, IntPtrConstant(0))); TNode element = LoadAndNormalizeFixedDoubleArrayElement(elements, IntPtrConstant(0)); ThrowTypeError(context, MessageTemplate::kIteratorValueNotAnObject, element); } else { DCHECK(variant == kSet || variant == kWeakSet); auto set_entry = [&](Node* index) { TNode entry = LoadAndNormalizeFixedDoubleArrayElement( elements, UncheckedCast(index)); AddConstructorEntry(variant, context, collection, add_func, entry); }; BuildFastLoop(IntPtrConstant(0), length, set_entry, 1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost); Goto(&exit); } } BIND(&exit); #if DEBUG CSA_ASSERT(this, TaggedEqual(original_collection_map, LoadMap(CAST(collection)))); CSA_ASSERT(this, TaggedEqual(original_fast_js_array_map, LoadMap(fast_jsarray))); #endif } void BaseCollectionsAssembler::AddConstructorEntriesFromIterable( Variant variant, TNode context, TNode native_context, TNode collection, TNode iterable) { Label exit(this), loop(this), if_exception(this, Label::kDeferred); CSA_ASSERT(this, Word32BinaryNot(IsNullOrUndefined(iterable))); TNode add_func = GetAddFunction(variant, context, collection); IteratorBuiltinsAssembler iterator_assembler(this->state()); TorqueStructIteratorRecord iterator = iterator_assembler.GetIterator(context, iterable); CSA_ASSERT(this, Word32BinaryNot(IsUndefined(iterator.object))); TNode fast_iterator_result_map = CAST( LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX)); TVARIABLE(Object, var_exception); Goto(&loop); BIND(&loop); { TNode next = iterator_assembler.IteratorStep( context, iterator, &exit, fast_iterator_result_map); TNode next_value = iterator_assembler.IteratorValue( context, next, fast_iterator_result_map); AddConstructorEntry(variant, context, collection, add_func, next_value, nullptr, &if_exception, &var_exception); Goto(&loop); } BIND(&if_exception); { iterator_assembler.IteratorCloseOnException(context, iterator, var_exception.value()); } BIND(&exit); } RootIndex BaseCollectionsAssembler::GetAddFunctionNameIndex(Variant variant) { switch (variant) { case kMap: case kWeakMap: return RootIndex::kset_string; case kSet: case kWeakSet: return RootIndex::kadd_string; } UNREACHABLE(); } void BaseCollectionsAssembler::GotoIfInitialAddFunctionModified( Variant variant, TNode native_context, TNode collection, Label* if_modified) { STATIC_ASSERT(JSCollection::kAddFunctionDescriptorIndex == JSWeakCollection::kAddFunctionDescriptorIndex); // TODO(jgruber): Investigate if this should also fall back to full prototype // verification. static constexpr PrototypeCheckAssembler::Flags flags{ PrototypeCheckAssembler::kCheckPrototypePropertyConstness}; static constexpr int kNoContextIndex = -1; STATIC_ASSERT( (flags & PrototypeCheckAssembler::kCheckPrototypePropertyIdentity) == 0); using DescriptorIndexNameValue = PrototypeCheckAssembler::DescriptorIndexNameValue; DescriptorIndexNameValue property_to_check{ JSCollection::kAddFunctionDescriptorIndex, GetAddFunctionNameIndex(variant), kNoContextIndex}; PrototypeCheckAssembler prototype_check_assembler( state(), flags, native_context, GetInitialCollectionPrototype(variant, native_context), Vector(&property_to_check, 1)); TNode prototype = LoadMapPrototype(LoadMap(collection)); Label if_unmodified(this); prototype_check_assembler.CheckAndBranch(prototype, &if_unmodified, if_modified); BIND(&if_unmodified); } TNode BaseCollectionsAssembler::AllocateJSCollection( TNode context, TNode constructor, TNode new_target) { TNode is_target_unmodified = TaggedEqual(constructor, new_target); return Select( is_target_unmodified, [=] { return AllocateJSCollectionFast(constructor); }, [=] { return AllocateJSCollectionSlow(context, constructor, new_target); }); } TNode BaseCollectionsAssembler::AllocateJSCollectionFast( TNode constructor) { CSA_ASSERT(this, IsConstructorMap(LoadMap(constructor))); TNode initial_map = CAST(LoadJSFunctionPrototypeOrInitialMap(constructor)); return AllocateJSObjectFromMap(initial_map); } TNode BaseCollectionsAssembler::AllocateJSCollectionSlow( TNode context, TNode constructor, TNode new_target) { ConstructorBuiltinsAssembler constructor_assembler(this->state()); return constructor_assembler.EmitFastNewObject(context, constructor, new_target); } void BaseCollectionsAssembler::GenerateConstructor( Variant variant, Handle constructor_function_name, TNode new_target, TNode argc, TNode context) { const int kIterableArg = 0; CodeStubArguments args(this, argc); TNode iterable = args.GetOptionalArgumentValue(kIterableArg); Label if_undefined(this, Label::kDeferred); GotoIf(IsUndefined(new_target), &if_undefined); TNode native_context = LoadNativeContext(context); TNode collection = AllocateJSCollection( context, GetConstructor(variant, native_context), CAST(new_target)); AddConstructorEntries(variant, context, native_context, collection, iterable); Return(collection); BIND(&if_undefined); ThrowTypeError(context, MessageTemplate::kConstructorNotFunction, HeapConstant(constructor_function_name)); } TNode BaseCollectionsAssembler::GetAddFunction( Variant variant, TNode context, TNode collection) { Handle add_func_name = (variant == kMap || variant == kWeakMap) ? isolate()->factory()->set_string() : isolate()->factory()->add_string(); TNode add_func = GetProperty(context, collection, add_func_name); Label exit(this), if_notcallable(this, Label::kDeferred); GotoIf(TaggedIsSmi(add_func), &if_notcallable); GotoIfNot(IsCallable(CAST(add_func)), &if_notcallable); Goto(&exit); BIND(&if_notcallable); ThrowTypeError(context, MessageTemplate::kPropertyNotFunction, add_func, HeapConstant(add_func_name), collection); BIND(&exit); return add_func; } TNode BaseCollectionsAssembler::GetConstructor( Variant variant, TNode native_context) { int index; switch (variant) { case kMap: index = Context::JS_MAP_FUN_INDEX; break; case kSet: index = Context::JS_SET_FUN_INDEX; break; case kWeakMap: index = Context::JS_WEAK_MAP_FUN_INDEX; break; case kWeakSet: index = Context::JS_WEAK_SET_FUN_INDEX; break; } return CAST(LoadContextElement(native_context, index)); } TNode BaseCollectionsAssembler::GetInitialAddFunction( Variant variant, TNode native_context) { int index; switch (variant) { case kMap: index = Context::MAP_SET_INDEX; break; case kSet: index = Context::SET_ADD_INDEX; break; case kWeakMap: index = Context::WEAKMAP_SET_INDEX; break; case kWeakSet: index = Context::WEAKSET_ADD_INDEX; break; } return CAST(LoadContextElement(native_context, index)); } int BaseCollectionsAssembler::GetTableOffset(Variant variant) { switch (variant) { case kMap: return JSMap::kTableOffset; case kSet: return JSSet::kTableOffset; case kWeakMap: return JSWeakMap::kTableOffset; case kWeakSet: return JSWeakSet::kTableOffset; } UNREACHABLE(); } TNode BaseCollectionsAssembler::EstimatedInitialSize( TNode initial_entries, TNode is_fast_jsarray) { return Select( is_fast_jsarray, [=] { return SmiUntag(LoadFastJSArrayLength(CAST(initial_entries))); }, [=] { return IntPtrConstant(0); }); } void BaseCollectionsAssembler::GotoIfNotJSReceiver(Node* const obj, Label* if_not_receiver) { GotoIf(TaggedIsSmi(obj), if_not_receiver); GotoIfNot(IsJSReceiver(obj), if_not_receiver); } TNode BaseCollectionsAssembler::GetInitialCollectionPrototype( Variant variant, TNode native_context) { int initial_prototype_index; switch (variant) { case kMap: initial_prototype_index = Context::INITIAL_MAP_PROTOTYPE_MAP_INDEX; break; case kSet: initial_prototype_index = Context::INITIAL_SET_PROTOTYPE_MAP_INDEX; break; case kWeakMap: initial_prototype_index = Context::INITIAL_WEAKMAP_PROTOTYPE_MAP_INDEX; break; case kWeakSet: initial_prototype_index = Context::INITIAL_WEAKSET_PROTOTYPE_MAP_INDEX; break; } return CAST(LoadContextElement(native_context, initial_prototype_index)); } TNode BaseCollectionsAssembler::HasInitialCollectionPrototype( Variant variant, TNode native_context, TNode collection) { TNode collection_proto_map = LoadMap(LoadMapPrototype(LoadMap(CAST(collection)))); return TaggedEqual(collection_proto_map, GetInitialCollectionPrototype(variant, native_context)); } TNode BaseCollectionsAssembler::LoadAndNormalizeFixedArrayElement( TNode elements, TNode index) { TNode element = UnsafeLoadFixedArrayElement(elements, index); return Select(IsTheHole(element), [=] { return UndefinedConstant(); }, [=] { return element; }); } TNode BaseCollectionsAssembler::LoadAndNormalizeFixedDoubleArrayElement( TNode elements, TNode index) { TVARIABLE(Object, entry); Label if_hole(this, Label::kDeferred), next(this); TNode element = LoadFixedDoubleArrayElement(CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS, &if_hole); { // not hole entry = AllocateHeapNumberWithValue(element); Goto(&next); } BIND(&if_hole); { entry = UndefinedConstant(); Goto(&next); } BIND(&next); return entry.value(); } class CollectionsBuiltinsAssembler : public BaseCollectionsAssembler { public: explicit CollectionsBuiltinsAssembler(compiler::CodeAssemblerState* state) : BaseCollectionsAssembler(state) {} // Check whether |iterable| is a JS_MAP_KEY_ITERATOR_TYPE or // JS_MAP_VALUE_ITERATOR_TYPE object that is not partially consumed and still // has original iteration behavior. void BranchIfIterableWithOriginalKeyOrValueMapIterator(TNode iterable, TNode context, Label* if_true, Label* if_false); // Check whether |iterable| is a JS_SET_TYPE or JS_SET_VALUE_ITERATOR_TYPE // object that still has original iteration behavior. In case of the iterator, // the iterator also must not have been partially consumed. void BranchIfIterableWithOriginalValueSetIterator(TNode iterable, TNode context, Label* if_true, Label* if_false); protected: template Node* AllocateJSCollectionIterator(SloppyTNode context, int map_index, Node* collection); TNode AllocateTable(Variant variant, TNode context, TNode at_least_space_for) override; TNode GetHash(SloppyTNode const key); TNode CallGetHashRaw(SloppyTNode const key); TNode CallGetOrCreateHashRaw(SloppyTNode const key); // Transitions the iterator to the non obsolete backing store. // This is a NOP if the [table] is not obsolete. using UpdateInTransition = std::function; template std::pair, TNode> Transition( TNode const table, TNode const index, UpdateInTransition const& update_in_transition); template std::pair, TNode> TransitionAndUpdate( TNode const iterator); template std::tuple, TNode, TNode> NextSkipHoles( TNode table, TNode index, Label* if_end); // Specialization for Smi. // The {result} variable will contain the entry index if the key was found, // or the hash code otherwise. template void FindOrderedHashTableEntryForSmiKey(Node* table, SloppyTNode key_tagged, Variable* result, Label* entry_found, Label* not_found); void SameValueZeroSmi(SloppyTNode key_smi, SloppyTNode candidate_key, Label* if_same, Label* if_not_same); // Specialization for heap numbers. // The {result} variable will contain the entry index if the key was found, // or the hash code otherwise. void SameValueZeroHeapNumber(SloppyTNode key_float, SloppyTNode candidate_key, Label* if_same, Label* if_not_same); template void FindOrderedHashTableEntryForHeapNumberKey( SloppyTNode context, Node* table, SloppyTNode key_heap_number, Variable* result, Label* entry_found, Label* not_found); // Specialization for bigints. // The {result} variable will contain the entry index if the key was found, // or the hash code otherwise. void SameValueZeroBigInt(Node* key, Node* candidate_key, Label* if_same, Label* if_not_same); template void FindOrderedHashTableEntryForBigIntKey(SloppyTNode context, Node* table, Node* key, Variable* result, Label* entry_found, Label* not_found); // Specialization for string. // The {result} variable will contain the entry index if the key was found, // or the hash code otherwise. template void FindOrderedHashTableEntryForStringKey( SloppyTNode context, Node* table, SloppyTNode key_tagged, Variable* result, Label* entry_found, Label* not_found); TNode ComputeStringHash(TNode context, TNode string_key); void SameValueZeroString(SloppyTNode context, SloppyTNode key_string, SloppyTNode candidate_key, Label* if_same, Label* if_not_same); // Specialization for non-strings, non-numbers. For those we only need // reference equality to compare the keys. // The {result} variable will contain the entry index if the key was found, // or the hash code otherwise. If the hash-code has not been computed, it // should be Smi -1. template void FindOrderedHashTableEntryForOtherKey( SloppyTNode context, Node* table, SloppyTNode key, Variable* result, Label* entry_found, Label* not_found); template void TryLookupOrderedHashTableIndex(Node* const table, Node* const key, Node* const context, Variable* result, Label* if_entry_found, Label* if_not_found); Node* NormalizeNumberKey(Node* key); void StoreOrderedHashMapNewEntry(TNode const table, Node* const key, Node* const value, Node* const hash, Node* const number_of_buckets, Node* const occupancy); void StoreOrderedHashSetNewEntry(TNode const table, Node* const key, Node* const hash, Node* const number_of_buckets, Node* const occupancy); // Create a JSArray with PACKED_ELEMENTS kind from a Map.prototype.keys() or // Map.prototype.values() iterator. The iterator is assumed to satisfy // IterableWithOriginalKeyOrValueMapIterator. This function will skip the // iterator and iterate directly on the underlying hash table. In the end it // will update the state of the iterator to 'exhausted'. TNode MapIteratorToList(TNode context, TNode iterator); // Create a JSArray with PACKED_ELEMENTS kind from a Set.prototype.keys() or // Set.prototype.values() iterator, or a Set. The |iterable| is assumed to // satisfy IterableWithOriginalValueSetIterator. This function will skip the // iterator and iterate directly on the underlying hash table. In the end, if // |iterable| is an iterator, it will update the state of the iterator to // 'exhausted'. TNode SetOrSetIteratorToList(TNode context, TNode iterable); void BranchIfMapIteratorProtectorValid(Label* if_true, Label* if_false); void BranchIfSetIteratorProtectorValid(Label* if_true, Label* if_false); }; template Node* CollectionsBuiltinsAssembler::AllocateJSCollectionIterator( SloppyTNode context, int map_index, Node* collection) { TNode const table = LoadObjectField(collection, JSCollection::kTableOffset); TNode const native_context = LoadNativeContext(context); TNode const iterator_map = LoadContextElement(native_context, map_index); TNode const iterator = AllocateInNewSpace(IteratorType::kSize); StoreMapNoWriteBarrier(iterator, iterator_map); StoreObjectFieldRoot(iterator, IteratorType::kPropertiesOrHashOffset, RootIndex::kEmptyFixedArray); StoreObjectFieldRoot(iterator, IteratorType::kElementsOffset, RootIndex::kEmptyFixedArray); StoreObjectFieldNoWriteBarrier(iterator, IteratorType::kTableOffset, table); StoreObjectFieldNoWriteBarrier(iterator, IteratorType::kIndexOffset, SmiConstant(0)); return iterator; } TNode CollectionsBuiltinsAssembler::AllocateTable( Variant variant, TNode context, TNode at_least_space_for) { return CAST((variant == kMap || variant == kWeakMap) ? AllocateOrderedHashTable() : AllocateOrderedHashTable()); } TF_BUILTIN(MapConstructor, CollectionsBuiltinsAssembler) { TNode new_target = CAST(Parameter(Descriptor::kJSNewTarget)); TNode argc = ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount)); TNode context = CAST(Parameter(Descriptor::kContext)); GenerateConstructor(kMap, isolate()->factory()->Map_string(), new_target, argc, context); } TF_BUILTIN(SetConstructor, CollectionsBuiltinsAssembler) { TNode new_target = CAST(Parameter(Descriptor::kJSNewTarget)); TNode argc = ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount)); TNode context = CAST(Parameter(Descriptor::kContext)); GenerateConstructor(kSet, isolate()->factory()->Set_string(), new_target, argc, context); } TNode CollectionsBuiltinsAssembler::CallGetOrCreateHashRaw( SloppyTNode const key) { TNode const function_addr = ExternalConstant(ExternalReference::get_or_create_hash_raw()); TNode const isolate_ptr = ExternalConstant(ExternalReference::isolate_address(isolate())); MachineType type_ptr = MachineType::Pointer(); MachineType type_tagged = MachineType::AnyTagged(); Node* const result = CallCFunction(function_addr, type_tagged, std::make_pair(type_ptr, isolate_ptr), std::make_pair(type_tagged, key)); return CAST(result); } TNode CollectionsBuiltinsAssembler::CallGetHashRaw( SloppyTNode const key) { TNode const function_addr = ExternalConstant(ExternalReference::orderedhashmap_gethash_raw()); TNode const isolate_ptr = ExternalConstant(ExternalReference::isolate_address(isolate())); MachineType type_ptr = MachineType::Pointer(); MachineType type_tagged = MachineType::AnyTagged(); Node* const result = CallCFunction(function_addr, type_tagged, std::make_pair(type_ptr, isolate_ptr), std::make_pair(type_tagged, key)); return SmiUntag(result); } TNode CollectionsBuiltinsAssembler::GetHash( SloppyTNode const key) { TVARIABLE(IntPtrT, var_hash); Label if_receiver(this), if_other(this), done(this); Branch(IsJSReceiver(key), &if_receiver, &if_other); BIND(&if_receiver); { var_hash = LoadJSReceiverIdentityHash(key); Goto(&done); } BIND(&if_other); { var_hash = CallGetHashRaw(key); Goto(&done); } BIND(&done); return var_hash.value(); } void CollectionsBuiltinsAssembler::SameValueZeroSmi( SloppyTNode key_smi, SloppyTNode candidate_key, Label* if_same, Label* if_not_same) { // If the key is the same, we are done. GotoIf(TaggedEqual(candidate_key, key_smi), if_same); // If the candidate key is smi, then it must be different (because // we already checked for equality above). GotoIf(TaggedIsSmi(candidate_key), if_not_same); // If the candidate key is not smi, we still have to check if it is a // heap number with the same value. GotoIfNot(IsHeapNumber(CAST(candidate_key)), if_not_same); TNode const candidate_key_number = LoadHeapNumberValue(CAST(candidate_key)); TNode const key_number = SmiToFloat64(key_smi); GotoIf(Float64Equal(candidate_key_number, key_number), if_same); Goto(if_not_same); } void CollectionsBuiltinsAssembler::BranchIfMapIteratorProtectorValid( Label* if_true, Label* if_false) { TNode protector_cell = MapIteratorProtectorConstant(); DCHECK(isolate()->heap()->map_iterator_protector().IsPropertyCell()); Branch( TaggedEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset), SmiConstant(Isolate::kProtectorValid)), if_true, if_false); } void CollectionsBuiltinsAssembler:: BranchIfIterableWithOriginalKeyOrValueMapIterator(TNode iterator, TNode context, Label* if_true, Label* if_false) { Label if_key_or_value_iterator(this), extra_checks(this); // Check if iterator is a keys or values JSMapIterator. GotoIf(TaggedIsSmi(iterator), if_false); TNode iter_map = LoadMap(CAST(iterator)); TNode const instance_type = LoadMapInstanceType(iter_map); GotoIf(InstanceTypeEqual(instance_type, JS_MAP_KEY_ITERATOR_TYPE), &if_key_or_value_iterator); Branch(InstanceTypeEqual(instance_type, JS_MAP_VALUE_ITERATOR_TYPE), &if_key_or_value_iterator, if_false); BIND(&if_key_or_value_iterator); // Check that the iterator is not partially consumed. TNode const index = LoadObjectField(CAST(iterator), JSMapIterator::kIndexOffset); GotoIfNot(TaggedEqual(index, SmiConstant(0)), if_false); BranchIfMapIteratorProtectorValid(&extra_checks, if_false); BIND(&extra_checks); // Check if the iterator object has the original %MapIteratorPrototype%. TNode const native_context = LoadNativeContext(context); TNode const initial_map_iter_proto = LoadContextElement( native_context, Context::INITIAL_MAP_ITERATOR_PROTOTYPE_INDEX); TNode const map_iter_proto = LoadMapPrototype(iter_map); GotoIfNot(TaggedEqual(map_iter_proto, initial_map_iter_proto), if_false); // Check if the original MapIterator prototype has the original // %IteratorPrototype%. TNode const initial_iter_proto = LoadContextElement( native_context, Context::INITIAL_ITERATOR_PROTOTYPE_INDEX); TNode const iter_proto = LoadMapPrototype(LoadMap(map_iter_proto)); Branch(TaggedEqual(iter_proto, initial_iter_proto), if_true, if_false); } void BranchIfIterableWithOriginalKeyOrValueMapIterator( compiler::CodeAssemblerState* state, TNode iterable, TNode context, compiler::CodeAssemblerLabel* if_true, compiler::CodeAssemblerLabel* if_false) { CollectionsBuiltinsAssembler assembler(state); assembler.BranchIfIterableWithOriginalKeyOrValueMapIterator( iterable, context, if_true, if_false); } void CollectionsBuiltinsAssembler::BranchIfSetIteratorProtectorValid( Label* if_true, Label* if_false) { TNode const protector_cell = SetIteratorProtectorConstant(); DCHECK(isolate()->heap()->set_iterator_protector().IsPropertyCell()); Branch( TaggedEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset), SmiConstant(Isolate::kProtectorValid)), if_true, if_false); } void CollectionsBuiltinsAssembler::BranchIfIterableWithOriginalValueSetIterator( TNode iterable, TNode context, Label* if_true, Label* if_false) { Label if_set(this), if_value_iterator(this), check_protector(this); TVARIABLE(BoolT, var_result); GotoIf(TaggedIsSmi(iterable), if_false); TNode iterable_map = LoadMap(CAST(iterable)); TNode const instance_type = LoadMapInstanceType(iterable_map); GotoIf(InstanceTypeEqual(instance_type, JS_SET_TYPE), &if_set); Branch(InstanceTypeEqual(instance_type, JS_SET_VALUE_ITERATOR_TYPE), &if_value_iterator, if_false); BIND(&if_set); // Check if the set object has the original Set prototype. TNode const initial_set_proto = LoadContextElement( LoadNativeContext(context), Context::INITIAL_SET_PROTOTYPE_INDEX); TNode const set_proto = LoadMapPrototype(iterable_map); GotoIfNot(TaggedEqual(set_proto, initial_set_proto), if_false); Goto(&check_protector); BIND(&if_value_iterator); // Check that the iterator is not partially consumed. TNode const index = LoadObjectField(CAST(iterable), JSSetIterator::kIndexOffset); GotoIfNot(TaggedEqual(index, SmiConstant(0)), if_false); // Check if the iterator object has the original SetIterator prototype. TNode const native_context = LoadNativeContext(context); TNode const initial_set_iter_proto = LoadContextElement( native_context, Context::INITIAL_SET_ITERATOR_PROTOTYPE_INDEX); TNode const set_iter_proto = LoadMapPrototype(iterable_map); GotoIfNot(TaggedEqual(set_iter_proto, initial_set_iter_proto), if_false); // Check if the original SetIterator prototype has the original // %IteratorPrototype%. TNode const initial_iter_proto = LoadContextElement( native_context, Context::INITIAL_ITERATOR_PROTOTYPE_INDEX); TNode const iter_proto = LoadMapPrototype(LoadMap(set_iter_proto)); GotoIfNot(TaggedEqual(iter_proto, initial_iter_proto), if_false); Goto(&check_protector); BIND(&check_protector); BranchIfSetIteratorProtectorValid(if_true, if_false); } void BranchIfIterableWithOriginalValueSetIterator( compiler::CodeAssemblerState* state, TNode iterable, TNode context, compiler::CodeAssemblerLabel* if_true, compiler::CodeAssemblerLabel* if_false) { CollectionsBuiltinsAssembler assembler(state); assembler.BranchIfIterableWithOriginalValueSetIterator(iterable, context, if_true, if_false); } TNode CollectionsBuiltinsAssembler::MapIteratorToList( TNode context, TNode iterator) { // Transition the {iterator} table if necessary. TNode table; TNode index; std::tie(table, index) = TransitionAndUpdate(iterator); CSA_ASSERT(this, IntPtrEqual(index, IntPtrConstant(0))); TNode size = LoadAndUntagObjectField(table, OrderedHashMap::NumberOfElementsOffset()); const ElementsKind kind = PACKED_ELEMENTS; TNode array_map = LoadJSArrayElementsMap(kind, LoadNativeContext(context)); TNode array = AllocateJSArray(kind, array_map, size, SmiTag(size), nullptr, INTPTR_PARAMETERS, kAllowLargeObjectAllocation); TNode elements = CAST(LoadElements(array)); const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; TNode first_to_element_offset = ElementOffsetFromIndex(IntPtrConstant(0), kind, INTPTR_PARAMETERS, 0); VARIABLE( var_offset, MachineType::PointerRepresentation(), IntPtrAdd(first_to_element_offset, IntPtrConstant(first_element_offset))); TVARIABLE(IntPtrT, var_index, index); VariableList vars({&var_index, &var_offset}, zone()); Label done(this, {&var_index}), loop(this, vars), continue_loop(this, vars), write_key(this, vars), write_value(this, vars); Goto(&loop); BIND(&loop); { // Read the next entry from the {table}, skipping holes. TNode entry_key; TNode entry_start_position; TNode cur_index; std::tie(entry_key, entry_start_position, cur_index) = NextSkipHoles(table, var_index.value(), &done); // Decide to write key or value. Branch( InstanceTypeEqual(LoadInstanceType(iterator), JS_MAP_KEY_ITERATOR_TYPE), &write_key, &write_value); BIND(&write_key); { Store(elements, var_offset.value(), entry_key); Goto(&continue_loop); } BIND(&write_value); { CSA_ASSERT(this, InstanceTypeEqual(LoadInstanceType(iterator), JS_MAP_VALUE_ITERATOR_TYPE)); TNode entry_value = UnsafeLoadFixedArrayElement(table, entry_start_position, (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset) * kTaggedSize); Store(elements, var_offset.value(), entry_value); Goto(&continue_loop); } BIND(&continue_loop); { // Increment the array offset and continue the loop to the next entry. var_index = cur_index; var_offset.Bind( IntPtrAdd(var_offset.value(), IntPtrConstant(kTaggedSize))); Goto(&loop); } } BIND(&done); // Set the {iterator} to exhausted. StoreObjectFieldRoot(iterator, JSMapIterator::kTableOffset, RootIndex::kEmptyOrderedHashMap); StoreObjectFieldNoWriteBarrier(iterator, JSMapIterator::kIndexOffset, SmiTag(var_index.value())); return UncheckedCast(array); } TF_BUILTIN(MapIteratorToList, CollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode iterator = CAST(Parameter(Descriptor::kSource)); Return(MapIteratorToList(context, iterator)); } TNode CollectionsBuiltinsAssembler::SetOrSetIteratorToList( TNode context, TNode iterable) { TVARIABLE(OrderedHashSet, var_table); Label if_set(this), if_iterator(this), copy(this); TNode const instance_type = LoadInstanceType(CAST(iterable)); Branch(InstanceTypeEqual(instance_type, JS_SET_TYPE), &if_set, &if_iterator); BIND(&if_set); { // {iterable} is a JSSet. var_table = CAST(LoadObjectField(CAST(iterable), JSSet::kTableOffset)); Goto(©); } BIND(&if_iterator); { // {iterable} is a JSSetIterator. // Transition the {iterable} table if necessary. TNode iter_table; TNode iter_index; std::tie(iter_table, iter_index) = TransitionAndUpdate(CAST(iterable)); CSA_ASSERT(this, IntPtrEqual(iter_index, IntPtrConstant(0))); var_table = iter_table; Goto(©); } BIND(©); TNode table = var_table.value(); TNode size = LoadAndUntagObjectField(table, OrderedHashMap::NumberOfElementsOffset()); const ElementsKind kind = PACKED_ELEMENTS; TNode array_map = LoadJSArrayElementsMap(kind, LoadNativeContext(context)); TNode array = AllocateJSArray(kind, array_map, size, SmiTag(size), nullptr, INTPTR_PARAMETERS, kAllowLargeObjectAllocation); TNode elements = CAST(LoadElements(array)); const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; TNode first_to_element_offset = ElementOffsetFromIndex(IntPtrConstant(0), kind, INTPTR_PARAMETERS, 0); VARIABLE( var_offset, MachineType::PointerRepresentation(), IntPtrAdd(first_to_element_offset, IntPtrConstant(first_element_offset))); TVARIABLE(IntPtrT, var_index, IntPtrConstant(0)); Label done(this), finalize(this, {&var_index}), loop(this, {&var_index, &var_offset}); Goto(&loop); BIND(&loop); { // Read the next entry from the {table}, skipping holes. TNode entry_key; TNode entry_start_position; TNode cur_index; std::tie(entry_key, entry_start_position, cur_index) = NextSkipHoles(table, var_index.value(), &finalize); Store(elements, var_offset.value(), entry_key); var_index = cur_index; var_offset.Bind(IntPtrAdd(var_offset.value(), IntPtrConstant(kTaggedSize))); Goto(&loop); } BIND(&finalize); GotoIf(InstanceTypeEqual(instance_type, JS_SET_TYPE), &done); // Set the {iterable} to exhausted if it's an iterator. StoreObjectFieldRoot(iterable, JSSetIterator::kTableOffset, RootIndex::kEmptyOrderedHashSet); StoreObjectFieldNoWriteBarrier(iterable, JSSetIterator::kIndexOffset, SmiTag(var_index.value())); Goto(&done); BIND(&done); return UncheckedCast(array); } TF_BUILTIN(SetOrSetIteratorToList, CollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode object = CAST(Parameter(Descriptor::kSource)); Return(SetOrSetIteratorToList(context, object)); } template void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForSmiKey( Node* table, SloppyTNode smi_key, Variable* result, Label* entry_found, Label* not_found) { TNode const key_untagged = SmiUntag(smi_key); TNode const hash = ChangeInt32ToIntPtr(ComputeUnseededHash(key_untagged)); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0))); result->Bind(hash); FindOrderedHashTableEntry( table, hash, [&](TNode other_key, Label* if_same, Label* if_not_same) { SameValueZeroSmi(smi_key, other_key, if_same, if_not_same); }, result, entry_found, not_found); } template void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForStringKey( SloppyTNode context, Node* table, SloppyTNode key_tagged, Variable* result, Label* entry_found, Label* not_found) { TNode const hash = ComputeStringHash(context, key_tagged); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0))); result->Bind(hash); FindOrderedHashTableEntry( table, hash, [&](TNode other_key, Label* if_same, Label* if_not_same) { SameValueZeroString(context, key_tagged, other_key, if_same, if_not_same); }, result, entry_found, not_found); } template void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForHeapNumberKey( SloppyTNode context, Node* table, SloppyTNode key_heap_number, Variable* result, Label* entry_found, Label* not_found) { TNode const hash = CallGetHashRaw(key_heap_number); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0))); result->Bind(hash); TNode const key_float = LoadHeapNumberValue(key_heap_number); FindOrderedHashTableEntry( table, hash, [&](TNode other_key, Label* if_same, Label* if_not_same) { SameValueZeroHeapNumber(key_float, other_key, if_same, if_not_same); }, result, entry_found, not_found); } template void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForBigIntKey( SloppyTNode context, Node* table, Node* key, Variable* result, Label* entry_found, Label* not_found) { TNode const hash = CallGetHashRaw(key); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0))); result->Bind(hash); FindOrderedHashTableEntry( table, hash, [&](TNode other_key, Label* if_same, Label* if_not_same) { SameValueZeroBigInt(key, other_key, if_same, if_not_same); }, result, entry_found, not_found); } template void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForOtherKey( SloppyTNode context, Node* table, SloppyTNode key, Variable* result, Label* entry_found, Label* not_found) { TNode const hash = GetHash(key); CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0))); result->Bind(hash); FindOrderedHashTableEntry( table, hash, [&](TNode other_key, Label* if_same, Label* if_not_same) { Branch(TaggedEqual(key, other_key), if_same, if_not_same); }, result, entry_found, not_found); } TNode CollectionsBuiltinsAssembler::ComputeStringHash( TNode context, TNode string_key) { TVARIABLE(IntPtrT, var_result); Label hash_not_computed(this), done(this, &var_result); TNode const hash = ChangeInt32ToIntPtr(LoadNameHash(string_key, &hash_not_computed)); var_result = hash; Goto(&done); BIND(&hash_not_computed); var_result = CallGetHashRaw(string_key); Goto(&done); BIND(&done); return var_result.value(); } void CollectionsBuiltinsAssembler::SameValueZeroString( SloppyTNode context, SloppyTNode key_string, SloppyTNode candidate_key, Label* if_same, Label* if_not_same) { // If the candidate is not a string, the keys are not equal. GotoIf(TaggedIsSmi(candidate_key), if_not_same); GotoIfNot(IsString(CAST(candidate_key)), if_not_same); Branch(TaggedEqual(CallBuiltin(Builtins::kStringEqual, context, key_string, candidate_key), TrueConstant()), if_same, if_not_same); } void CollectionsBuiltinsAssembler::SameValueZeroBigInt(Node* key, Node* candidate_key, Label* if_same, Label* if_not_same) { CSA_ASSERT(this, IsBigInt(key)); GotoIf(TaggedIsSmi(candidate_key), if_not_same); GotoIfNot(IsBigInt(candidate_key), if_not_same); Branch(TaggedEqual(CallRuntime(Runtime::kBigIntEqualToBigInt, NoContextConstant(), key, candidate_key), TrueConstant()), if_same, if_not_same); } void CollectionsBuiltinsAssembler::SameValueZeroHeapNumber( SloppyTNode key_float, SloppyTNode candidate_key, Label* if_same, Label* if_not_same) { Label if_smi(this), if_keyisnan(this); GotoIf(TaggedIsSmi(candidate_key), &if_smi); GotoIfNot(IsHeapNumber(CAST(candidate_key)), if_not_same); { // {candidate_key} is a heap number. TNode const candidate_float = LoadHeapNumberValue(CAST(candidate_key)); GotoIf(Float64Equal(key_float, candidate_float), if_same); // SameValueZero needs to treat NaNs as equal. First check if {key_float} // is NaN. BranchIfFloat64IsNaN(key_float, &if_keyisnan, if_not_same); BIND(&if_keyisnan); { // Return true iff {candidate_key} is NaN. Branch(Float64Equal(candidate_float, candidate_float), if_not_same, if_same); } } BIND(&if_smi); { TNode const candidate_float = SmiToFloat64(CAST(candidate_key)); Branch(Float64Equal(key_float, candidate_float), if_same, if_not_same); } } TF_BUILTIN(OrderedHashTableHealIndex, CollectionsBuiltinsAssembler) { TNode table = CAST(Parameter(Descriptor::kTable)); TNode index = CAST(Parameter(Descriptor::kIndex)); Label return_index(this), return_zero(this); // Check if we need to update the {index}. GotoIfNot(SmiLessThan(SmiConstant(0), index), &return_zero); // Check if the {table} was cleared. STATIC_ASSERT(OrderedHashMap::NumberOfDeletedElementsOffset() == OrderedHashSet::NumberOfDeletedElementsOffset()); TNode number_of_deleted_elements = LoadAndUntagObjectField( table, OrderedHashMap::NumberOfDeletedElementsOffset()); STATIC_ASSERT(OrderedHashMap::kClearedTableSentinel == OrderedHashSet::kClearedTableSentinel); GotoIf(IntPtrEqual(number_of_deleted_elements, IntPtrConstant(OrderedHashMap::kClearedTableSentinel)), &return_zero); VARIABLE(var_i, MachineType::PointerRepresentation(), IntPtrConstant(0)); VARIABLE(var_index, MachineRepresentation::kTagged, index); Label loop(this, {&var_i, &var_index}); Goto(&loop); BIND(&loop); { Node* i = var_i.value(); GotoIfNot(IntPtrLessThan(i, number_of_deleted_elements), &return_index); STATIC_ASSERT(OrderedHashMap::RemovedHolesIndex() == OrderedHashSet::RemovedHolesIndex()); TNode removed_index = CAST(LoadFixedArrayElement( CAST(table), i, OrderedHashMap::RemovedHolesIndex() * kTaggedSize)); GotoIf(SmiGreaterThanOrEqual(removed_index, index), &return_index); Decrement(&var_index, 1, SMI_PARAMETERS); Increment(&var_i); Goto(&loop); } BIND(&return_index); Return(var_index.value()); BIND(&return_zero); Return(SmiConstant(0)); } template std::pair, TNode> CollectionsBuiltinsAssembler::Transition( TNode const table, TNode const index, UpdateInTransition const& update_in_transition) { TVARIABLE(IntPtrT, var_index, index); TVARIABLE(TableType, var_table, table); Label if_done(this), if_transition(this, Label::kDeferred); Branch(TaggedIsSmi( LoadObjectField(var_table.value(), TableType::NextTableOffset())), &if_done, &if_transition); BIND(&if_transition); { Label loop(this, {&var_table, &var_index}), done_loop(this); Goto(&loop); BIND(&loop); { TNode table = var_table.value(); TNode index = var_index.value(); TNode next_table = LoadObjectField(table, TableType::NextTableOffset()); GotoIf(TaggedIsSmi(next_table), &done_loop); var_table = CAST(next_table); var_index = SmiUntag( CAST(CallBuiltin(Builtins::kOrderedHashTableHealIndex, NoContextConstant(), table, SmiTag(index)))); Goto(&loop); } BIND(&done_loop); // Update with the new {table} and {index}. update_in_transition(var_table.value(), var_index.value()); Goto(&if_done); } BIND(&if_done); return {var_table.value(), var_index.value()}; } template std::pair, TNode> CollectionsBuiltinsAssembler::TransitionAndUpdate( TNode const iterator) { return Transition( CAST(LoadObjectField(iterator, IteratorType::kTableOffset)), LoadAndUntagObjectField(iterator, IteratorType::kIndexOffset), [this, iterator](Node* const table, Node* const index) { // Update the {iterator} with the new state. StoreObjectField(iterator, IteratorType::kTableOffset, table); StoreObjectFieldNoWriteBarrier(iterator, IteratorType::kIndexOffset, SmiTag(index)); }); } template std::tuple, TNode, TNode> CollectionsBuiltinsAssembler::NextSkipHoles(TNode table, TNode index, Label* if_end) { // Compute the used capacity for the {table}. TNode number_of_buckets = LoadAndUntagObjectField(table, TableType::NumberOfBucketsOffset()); TNode number_of_elements = LoadAndUntagObjectField(table, TableType::NumberOfElementsOffset()); TNode number_of_deleted_elements = LoadAndUntagObjectField( table, TableType::NumberOfDeletedElementsOffset()); TNode used_capacity = IntPtrAdd(number_of_elements, number_of_deleted_elements); TNode entry_key; TNode entry_start_position; TVARIABLE(IntPtrT, var_index, index); Label loop(this, &var_index), done_loop(this); Goto(&loop); BIND(&loop); { GotoIfNot(IntPtrLessThan(var_index.value(), used_capacity), if_end); entry_start_position = IntPtrAdd( IntPtrMul(var_index.value(), IntPtrConstant(TableType::kEntrySize)), number_of_buckets); entry_key = UnsafeLoadFixedArrayElement( table, entry_start_position, TableType::HashTableStartIndex() * kTaggedSize); Increment(&var_index); Branch(IsTheHole(entry_key), &loop, &done_loop); } BIND(&done_loop); return std::tuple, TNode, TNode>{ entry_key, entry_start_position, var_index.value()}; } TF_BUILTIN(MapPrototypeGet, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.get"); TNode const table = LoadObjectField(receiver, JSMap::kTableOffset); TNode index = CAST( CallBuiltin(Builtins::kFindOrderedHashMapEntry, context, table, key)); Label if_found(this), if_not_found(this); Branch(SmiGreaterThanOrEqual(index, SmiConstant(0)), &if_found, &if_not_found); BIND(&if_found); Return(LoadFixedArrayElement( CAST(table), SmiUntag(index), (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset) * kTaggedSize)); BIND(&if_not_found); Return(UndefinedConstant()); } TF_BUILTIN(MapPrototypeHas, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.has"); TNode const table = LoadObjectField(receiver, JSMap::kTableOffset); TNode index = CAST( CallBuiltin(Builtins::kFindOrderedHashMapEntry, context, table, key)); Label if_found(this), if_not_found(this); Branch(SmiGreaterThanOrEqual(index, SmiConstant(0)), &if_found, &if_not_found); BIND(&if_found); Return(TrueConstant()); BIND(&if_not_found); Return(FalseConstant()); } Node* CollectionsBuiltinsAssembler::NormalizeNumberKey(Node* const key) { VARIABLE(result, MachineRepresentation::kTagged, key); Label done(this); GotoIf(TaggedIsSmi(key), &done); GotoIfNot(IsHeapNumber(key), &done); TNode const number = LoadHeapNumberValue(key); GotoIfNot(Float64Equal(number, Float64Constant(0.0)), &done); // We know the value is zero, so we take the key to be Smi 0. // Another option would be to normalize to Smi here. result.Bind(SmiConstant(0)); Goto(&done); BIND(&done); return result.value(); } TF_BUILTIN(MapPrototypeSet, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* key = Parameter(Descriptor::kKey); Node* const value = Parameter(Descriptor::kValue); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.set"); key = NormalizeNumberKey(key); TNode const table = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(), IntPtrConstant(0)); Label entry_found(this), not_found(this); TryLookupOrderedHashTableIndex(table, key, context, &entry_start_position_or_hash, &entry_found, ¬_found); BIND(&entry_found); // If we found the entry, we just store the value there. StoreFixedArrayElement(table, entry_start_position_or_hash.value(), value, UPDATE_WRITE_BARRIER, kTaggedSize * (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset)); Return(receiver); Label no_hash(this), add_entry(this), store_new_entry(this); BIND(¬_found); { // If we have a hash code, we can start adding the new entry. GotoIf(IntPtrGreaterThan(entry_start_position_or_hash.value(), IntPtrConstant(0)), &add_entry); // Otherwise, go to runtime to compute the hash code. entry_start_position_or_hash.Bind(SmiUntag(CallGetOrCreateHashRaw(key))); Goto(&add_entry); } BIND(&add_entry); VARIABLE(number_of_buckets, MachineType::PointerRepresentation()); VARIABLE(occupancy, MachineType::PointerRepresentation()); TVARIABLE(OrderedHashMap, table_var, table); { // Check we have enough space for the entry. number_of_buckets.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement( table, OrderedHashMap::NumberOfBucketsIndex())))); STATIC_ASSERT(OrderedHashMap::kLoadFactor == 2); TNode const capacity = WordShl(number_of_buckets.value(), 1); TNode const number_of_elements = SmiUntag( CAST(LoadObjectField(table, OrderedHashMap::NumberOfElementsOffset()))); TNode const number_of_deleted = SmiUntag(CAST(LoadObjectField( table, OrderedHashMap::NumberOfDeletedElementsOffset()))); occupancy.Bind(IntPtrAdd(number_of_elements, number_of_deleted)); GotoIf(IntPtrLessThan(occupancy.value(), capacity), &store_new_entry); // We do not have enough space, grow the table and reload the relevant // fields. CallRuntime(Runtime::kMapGrow, context, receiver); table_var = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); number_of_buckets.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement( table_var.value(), OrderedHashMap::NumberOfBucketsIndex())))); TNode const new_number_of_elements = SmiUntag(CAST(LoadObjectField( table_var.value(), OrderedHashMap::NumberOfElementsOffset()))); TNode const new_number_of_deleted = SmiUntag(CAST(LoadObjectField( table_var.value(), OrderedHashMap::NumberOfDeletedElementsOffset()))); occupancy.Bind(IntPtrAdd(new_number_of_elements, new_number_of_deleted)); Goto(&store_new_entry); } BIND(&store_new_entry); // Store the key, value and connect the element to the bucket chain. StoreOrderedHashMapNewEntry(table_var.value(), key, value, entry_start_position_or_hash.value(), number_of_buckets.value(), occupancy.value()); Return(receiver); } void CollectionsBuiltinsAssembler::StoreOrderedHashMapNewEntry( TNode const table, Node* const key, Node* const value, Node* const hash, Node* const number_of_buckets, Node* const occupancy) { TNode const bucket = WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1))); TNode bucket_entry = CAST(UnsafeLoadFixedArrayElement( table, bucket, OrderedHashMap::HashTableStartIndex() * kTaggedSize)); // Store the entry elements. TNode const entry_start = IntPtrAdd( IntPtrMul(occupancy, IntPtrConstant(OrderedHashMap::kEntrySize)), number_of_buckets); UnsafeStoreFixedArrayElement( table, entry_start, key, UPDATE_WRITE_BARRIER, kTaggedSize * OrderedHashMap::HashTableStartIndex()); UnsafeStoreFixedArrayElement( table, entry_start, value, UPDATE_WRITE_BARRIER, kTaggedSize * (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset)); UnsafeStoreFixedArrayElement( table, entry_start, bucket_entry, SKIP_WRITE_BARRIER, kTaggedSize * (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kChainOffset)); // Update the bucket head. UnsafeStoreFixedArrayElement( table, bucket, SmiTag(occupancy), SKIP_WRITE_BARRIER, OrderedHashMap::HashTableStartIndex() * kTaggedSize); // Bump the elements count. TNode const number_of_elements = CAST(LoadObjectField(table, OrderedHashMap::NumberOfElementsOffset())); StoreObjectFieldNoWriteBarrier(table, OrderedHashMap::NumberOfElementsOffset(), SmiAdd(number_of_elements, SmiConstant(1))); } TF_BUILTIN(MapPrototypeDelete, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.delete"); TNode const table = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(), IntPtrConstant(0)); Label entry_found(this), not_found(this); TryLookupOrderedHashTableIndex(table, key, context, &entry_start_position_or_hash, &entry_found, ¬_found); BIND(¬_found); Return(FalseConstant()); BIND(&entry_found); // If we found the entry, mark the entry as deleted. StoreFixedArrayElement(table, entry_start_position_or_hash.value(), TheHoleConstant(), UPDATE_WRITE_BARRIER, kTaggedSize * OrderedHashMap::HashTableStartIndex()); StoreFixedArrayElement(table, entry_start_position_or_hash.value(), TheHoleConstant(), UPDATE_WRITE_BARRIER, kTaggedSize * (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset)); // Decrement the number of elements, increment the number of deleted elements. TNode const number_of_elements = SmiSub( CAST(LoadObjectField(table, OrderedHashMap::NumberOfElementsOffset())), SmiConstant(1)); StoreObjectFieldNoWriteBarrier( table, OrderedHashMap::NumberOfElementsOffset(), number_of_elements); TNode const number_of_deleted = SmiAdd(CAST(LoadObjectField( table, OrderedHashMap::NumberOfDeletedElementsOffset())), SmiConstant(1)); StoreObjectFieldNoWriteBarrier( table, OrderedHashMap::NumberOfDeletedElementsOffset(), number_of_deleted); TNode const number_of_buckets = CAST( LoadFixedArrayElement(table, OrderedHashMap::NumberOfBucketsIndex())); // If there fewer elements than #buckets / 2, shrink the table. Label shrink(this); GotoIf(SmiLessThan(SmiAdd(number_of_elements, number_of_elements), number_of_buckets), &shrink); Return(TrueConstant()); BIND(&shrink); CallRuntime(Runtime::kMapShrink, context, receiver); Return(TrueConstant()); } TF_BUILTIN(SetPrototypeAdd, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "Set.prototype.add"); key = NormalizeNumberKey(key); TNode const table = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(), IntPtrConstant(0)); Label entry_found(this), not_found(this); TryLookupOrderedHashTableIndex(table, key, context, &entry_start_position_or_hash, &entry_found, ¬_found); BIND(&entry_found); // The entry was found, there is nothing to do. Return(receiver); Label no_hash(this), add_entry(this), store_new_entry(this); BIND(¬_found); { // If we have a hash code, we can start adding the new entry. GotoIf(IntPtrGreaterThan(entry_start_position_or_hash.value(), IntPtrConstant(0)), &add_entry); // Otherwise, go to runtime to compute the hash code. entry_start_position_or_hash.Bind(SmiUntag(CallGetOrCreateHashRaw(key))); Goto(&add_entry); } BIND(&add_entry); VARIABLE(number_of_buckets, MachineType::PointerRepresentation()); VARIABLE(occupancy, MachineType::PointerRepresentation()); TVARIABLE(OrderedHashSet, table_var, table); { // Check we have enough space for the entry. number_of_buckets.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement( table, OrderedHashSet::NumberOfBucketsIndex())))); STATIC_ASSERT(OrderedHashSet::kLoadFactor == 2); TNode const capacity = WordShl(number_of_buckets.value(), 1); TNode const number_of_elements = SmiUntag( CAST(LoadObjectField(table, OrderedHashSet::NumberOfElementsOffset()))); TNode const number_of_deleted = SmiUntag(CAST(LoadObjectField( table, OrderedHashSet::NumberOfDeletedElementsOffset()))); occupancy.Bind(IntPtrAdd(number_of_elements, number_of_deleted)); GotoIf(IntPtrLessThan(occupancy.value(), capacity), &store_new_entry); // We do not have enough space, grow the table and reload the relevant // fields. CallRuntime(Runtime::kSetGrow, context, receiver); table_var = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); number_of_buckets.Bind(SmiUntag(CAST(UnsafeLoadFixedArrayElement( table_var.value(), OrderedHashSet::NumberOfBucketsIndex())))); TNode const new_number_of_elements = SmiUntag(CAST(LoadObjectField( table_var.value(), OrderedHashSet::NumberOfElementsOffset()))); TNode const new_number_of_deleted = SmiUntag(CAST(LoadObjectField( table_var.value(), OrderedHashSet::NumberOfDeletedElementsOffset()))); occupancy.Bind(IntPtrAdd(new_number_of_elements, new_number_of_deleted)); Goto(&store_new_entry); } BIND(&store_new_entry); // Store the key, value and connect the element to the bucket chain. StoreOrderedHashSetNewEntry(table_var.value(), key, entry_start_position_or_hash.value(), number_of_buckets.value(), occupancy.value()); Return(receiver); } void CollectionsBuiltinsAssembler::StoreOrderedHashSetNewEntry( TNode const table, Node* const key, Node* const hash, Node* const number_of_buckets, Node* const occupancy) { TNode const bucket = WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1))); TNode bucket_entry = CAST(UnsafeLoadFixedArrayElement( table, bucket, OrderedHashSet::HashTableStartIndex() * kTaggedSize)); // Store the entry elements. TNode const entry_start = IntPtrAdd( IntPtrMul(occupancy, IntPtrConstant(OrderedHashSet::kEntrySize)), number_of_buckets); UnsafeStoreFixedArrayElement( table, entry_start, key, UPDATE_WRITE_BARRIER, kTaggedSize * OrderedHashSet::HashTableStartIndex()); UnsafeStoreFixedArrayElement( table, entry_start, bucket_entry, SKIP_WRITE_BARRIER, kTaggedSize * (OrderedHashSet::HashTableStartIndex() + OrderedHashSet::kChainOffset)); // Update the bucket head. UnsafeStoreFixedArrayElement( table, bucket, SmiTag(occupancy), SKIP_WRITE_BARRIER, OrderedHashSet::HashTableStartIndex() * kTaggedSize); // Bump the elements count. TNode const number_of_elements = CAST(LoadObjectField(table, OrderedHashSet::NumberOfElementsOffset())); StoreObjectFieldNoWriteBarrier(table, OrderedHashSet::NumberOfElementsOffset(), SmiAdd(number_of_elements, SmiConstant(1))); } TF_BUILTIN(SetPrototypeDelete, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "Set.prototype.delete"); TNode const table = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(), IntPtrConstant(0)); Label entry_found(this), not_found(this); TryLookupOrderedHashTableIndex(table, key, context, &entry_start_position_or_hash, &entry_found, ¬_found); BIND(¬_found); Return(FalseConstant()); BIND(&entry_found); // If we found the entry, mark the entry as deleted. StoreFixedArrayElement(table, entry_start_position_or_hash.value(), TheHoleConstant(), UPDATE_WRITE_BARRIER, kTaggedSize * OrderedHashSet::HashTableStartIndex()); // Decrement the number of elements, increment the number of deleted elements. TNode const number_of_elements = SmiSub( CAST(LoadObjectField(table, OrderedHashSet::NumberOfElementsOffset())), SmiConstant(1)); StoreObjectFieldNoWriteBarrier( table, OrderedHashSet::NumberOfElementsOffset(), number_of_elements); TNode const number_of_deleted = SmiAdd(CAST(LoadObjectField( table, OrderedHashSet::NumberOfDeletedElementsOffset())), SmiConstant(1)); StoreObjectFieldNoWriteBarrier( table, OrderedHashSet::NumberOfDeletedElementsOffset(), number_of_deleted); TNode const number_of_buckets = CAST( LoadFixedArrayElement(table, OrderedHashSet::NumberOfBucketsIndex())); // If there fewer elements than #buckets / 2, shrink the table. Label shrink(this); GotoIf(SmiLessThan(SmiAdd(number_of_elements, number_of_elements), number_of_buckets), &shrink); Return(TrueConstant()); BIND(&shrink); CallRuntime(Runtime::kSetShrink, context, receiver); Return(TrueConstant()); } TF_BUILTIN(MapPrototypeEntries, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.entries"); Return(AllocateJSCollectionIterator( context, Context::MAP_KEY_VALUE_ITERATOR_MAP_INDEX, receiver)); } TF_BUILTIN(MapPrototypeGetSize, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "get Map.prototype.size"); TNode const table = CAST(LoadObjectField(receiver, JSMap::kTableOffset)); Return(LoadObjectField(table, OrderedHashMap::NumberOfElementsOffset())); } TF_BUILTIN(MapPrototypeForEach, CollectionsBuiltinsAssembler) { const char* const kMethodName = "Map.prototype.forEach"; Node* const argc = Parameter(Descriptor::kJSActualArgumentsCount); Node* const context = Parameter(Descriptor::kContext); CodeStubArguments args(this, ChangeInt32ToIntPtr(argc)); TNode const receiver = args.GetReceiver(); TNode const callback = args.GetOptionalArgumentValue(0); TNode const this_arg = args.GetOptionalArgumentValue(1); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, kMethodName); // Ensure that {callback} is actually callable. Label callback_not_callable(this, Label::kDeferred); GotoIf(TaggedIsSmi(callback), &callback_not_callable); GotoIfNot(IsCallable(CAST(callback)), &callback_not_callable); TVARIABLE(IntPtrT, var_index, IntPtrConstant(0)); TVARIABLE(OrderedHashMap, var_table, CAST(LoadObjectField(CAST(receiver), JSMap::kTableOffset))); Label loop(this, {&var_index, &var_table}), done_loop(this); Goto(&loop); BIND(&loop); { // Transition {table} and {index} if there was any modification to // the {receiver} while we're iterating. TNode index = var_index.value(); TNode table = var_table.value(); std::tie(table, index) = Transition(table, index, [](Node*, Node*) {}); // Read the next entry from the {table}, skipping holes. TNode entry_key; TNode entry_start_position; std::tie(entry_key, entry_start_position, index) = NextSkipHoles(table, index, &done_loop); // Load the entry value as well. TNode entry_value = LoadFixedArrayElement( table, entry_start_position, (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset) * kTaggedSize); // Invoke the {callback} passing the {entry_key}, {entry_value} and the // {receiver}. CallJS(CodeFactory::Call(isolate()), context, callback, this_arg, entry_value, entry_key, receiver); // Continue with the next entry. var_index = index; var_table = table; Goto(&loop); } BIND(&done_loop); args.PopAndReturn(UndefinedConstant()); BIND(&callback_not_callable); { CallRuntime(Runtime::kThrowCalledNonCallable, context, callback); Unreachable(); } } TF_BUILTIN(MapPrototypeKeys, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.keys"); Return(AllocateJSCollectionIterator( context, Context::MAP_KEY_ITERATOR_MAP_INDEX, receiver)); } TF_BUILTIN(MapPrototypeValues, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE, "Map.prototype.values"); Return(AllocateJSCollectionIterator( context, Context::MAP_VALUE_ITERATOR_MAP_INDEX, receiver)); } TF_BUILTIN(MapIteratorPrototypeNext, CollectionsBuiltinsAssembler) { const char* const kMethodName = "Map Iterator.prototype.next"; Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); // Ensure that the {receiver} is actually a JSMapIterator. Label if_receiver_valid(this), if_receiver_invalid(this, Label::kDeferred); GotoIf(TaggedIsSmi(receiver), &if_receiver_invalid); TNode const receiver_instance_type = LoadInstanceType(receiver); GotoIf( InstanceTypeEqual(receiver_instance_type, JS_MAP_KEY_VALUE_ITERATOR_TYPE), &if_receiver_valid); GotoIf(InstanceTypeEqual(receiver_instance_type, JS_MAP_KEY_ITERATOR_TYPE), &if_receiver_valid); Branch(InstanceTypeEqual(receiver_instance_type, JS_MAP_VALUE_ITERATOR_TYPE), &if_receiver_valid, &if_receiver_invalid); BIND(&if_receiver_invalid); ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver, StringConstant(kMethodName), receiver); BIND(&if_receiver_valid); // Check if the {receiver} is exhausted. VARIABLE(var_done, MachineRepresentation::kTagged, TrueConstant()); VARIABLE(var_value, MachineRepresentation::kTagged, UndefinedConstant()); Label return_value(this, {&var_done, &var_value}), return_entry(this), return_end(this, Label::kDeferred); // Transition the {receiver} table if necessary. TNode table; TNode index; std::tie(table, index) = TransitionAndUpdate(CAST(receiver)); // Read the next entry from the {table}, skipping holes. TNode entry_key; TNode entry_start_position; std::tie(entry_key, entry_start_position, index) = NextSkipHoles(table, index, &return_end); StoreObjectFieldNoWriteBarrier(receiver, JSMapIterator::kIndexOffset, SmiTag(index)); var_value.Bind(entry_key); var_done.Bind(FalseConstant()); // Check how to return the {key} (depending on {receiver} type). GotoIf(InstanceTypeEqual(receiver_instance_type, JS_MAP_KEY_ITERATOR_TYPE), &return_value); var_value.Bind(LoadFixedArrayElement( table, entry_start_position, (OrderedHashMap::HashTableStartIndex() + OrderedHashMap::kValueOffset) * kTaggedSize)); Branch(InstanceTypeEqual(receiver_instance_type, JS_MAP_VALUE_ITERATOR_TYPE), &return_value, &return_entry); BIND(&return_entry); { Node* result = AllocateJSIteratorResultForEntry(context, entry_key, var_value.value()); Return(result); } BIND(&return_value); { TNode result = AllocateJSIteratorResult(context, var_value.value(), var_done.value()); Return(result); } BIND(&return_end); { StoreObjectFieldRoot(receiver, JSMapIterator::kTableOffset, RootIndex::kEmptyOrderedHashMap); Goto(&return_value); } } TF_BUILTIN(SetPrototypeHas, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "Set.prototype.has"); TNode const table = LoadObjectField(receiver, JSMap::kTableOffset); VARIABLE(entry_start_position, MachineType::PointerRepresentation(), IntPtrConstant(0)); VARIABLE(result, MachineRepresentation::kTaggedSigned, IntPtrConstant(0)); Label if_key_smi(this), if_key_string(this), if_key_heap_number(this), if_key_bigint(this), entry_found(this), not_found(this), done(this); GotoIf(TaggedIsSmi(key), &if_key_smi); TNode key_map = LoadMap(key); TNode key_instance_type = LoadMapInstanceType(key_map); GotoIf(IsStringInstanceType(key_instance_type), &if_key_string); GotoIf(IsHeapNumberMap(key_map), &if_key_heap_number); GotoIf(IsBigIntInstanceType(key_instance_type), &if_key_bigint); FindOrderedHashTableEntryForOtherKey( context, table, key, &entry_start_position, &entry_found, ¬_found); BIND(&if_key_smi); { FindOrderedHashTableEntryForSmiKey( table, key, &entry_start_position, &entry_found, ¬_found); } BIND(&if_key_string); { FindOrderedHashTableEntryForStringKey( context, table, key, &entry_start_position, &entry_found, ¬_found); } BIND(&if_key_heap_number); { FindOrderedHashTableEntryForHeapNumberKey( context, table, key, &entry_start_position, &entry_found, ¬_found); } BIND(&if_key_bigint); { FindOrderedHashTableEntryForBigIntKey( context, table, key, &entry_start_position, &entry_found, ¬_found); } BIND(&entry_found); Return(TrueConstant()); BIND(¬_found); Return(FalseConstant()); } TF_BUILTIN(SetPrototypeEntries, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "Set.prototype.entries"); Return(AllocateJSCollectionIterator( context, Context::SET_KEY_VALUE_ITERATOR_MAP_INDEX, receiver)); } TF_BUILTIN(SetPrototypeGetSize, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "get Set.prototype.size"); TNode const table = CAST(LoadObjectField(receiver, JSSet::kTableOffset)); Return(LoadObjectField(table, OrderedHashSet::NumberOfElementsOffset())); } TF_BUILTIN(SetPrototypeForEach, CollectionsBuiltinsAssembler) { const char* const kMethodName = "Set.prototype.forEach"; Node* const argc = Parameter(Descriptor::kJSActualArgumentsCount); Node* const context = Parameter(Descriptor::kContext); CodeStubArguments args(this, ChangeInt32ToIntPtr(argc)); TNode const receiver = args.GetReceiver(); TNode const callback = args.GetOptionalArgumentValue(0); TNode const this_arg = args.GetOptionalArgumentValue(1); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, kMethodName); // Ensure that {callback} is actually callable. Label callback_not_callable(this, Label::kDeferred); GotoIf(TaggedIsSmi(callback), &callback_not_callable); GotoIfNot(IsCallable(CAST(callback)), &callback_not_callable); TVARIABLE(IntPtrT, var_index, IntPtrConstant(0)); TVARIABLE(OrderedHashSet, var_table, CAST(LoadObjectField(CAST(receiver), JSSet::kTableOffset))); Label loop(this, {&var_index, &var_table}), done_loop(this); Goto(&loop); BIND(&loop); { // Transition {table} and {index} if there was any modification to // the {receiver} while we're iterating. TNode index = var_index.value(); TNode table = var_table.value(); std::tie(table, index) = Transition(table, index, [](Node*, Node*) {}); // Read the next entry from the {table}, skipping holes. Node* entry_key; Node* entry_start_position; std::tie(entry_key, entry_start_position, index) = NextSkipHoles(table, index, &done_loop); // Invoke the {callback} passing the {entry_key} (twice) and the {receiver}. CallJS(CodeFactory::Call(isolate()), context, callback, this_arg, entry_key, entry_key, receiver); // Continue with the next entry. var_index = index; var_table = table; Goto(&loop); } BIND(&done_loop); args.PopAndReturn(UndefinedConstant()); BIND(&callback_not_callable); { CallRuntime(Runtime::kThrowCalledNonCallable, context, callback); Unreachable(); } } TF_BUILTIN(SetPrototypeValues, CollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE, "Set.prototype.values"); Return(AllocateJSCollectionIterator( context, Context::SET_VALUE_ITERATOR_MAP_INDEX, receiver)); } TF_BUILTIN(SetIteratorPrototypeNext, CollectionsBuiltinsAssembler) { const char* const kMethodName = "Set Iterator.prototype.next"; Node* const receiver = Parameter(Descriptor::kReceiver); Node* const context = Parameter(Descriptor::kContext); // Ensure that the {receiver} is actually a JSSetIterator. Label if_receiver_valid(this), if_receiver_invalid(this, Label::kDeferred); GotoIf(TaggedIsSmi(receiver), &if_receiver_invalid); TNode const receiver_instance_type = LoadInstanceType(receiver); GotoIf(InstanceTypeEqual(receiver_instance_type, JS_SET_VALUE_ITERATOR_TYPE), &if_receiver_valid); Branch( InstanceTypeEqual(receiver_instance_type, JS_SET_KEY_VALUE_ITERATOR_TYPE), &if_receiver_valid, &if_receiver_invalid); BIND(&if_receiver_invalid); ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver, StringConstant(kMethodName), receiver); BIND(&if_receiver_valid); // Check if the {receiver} is exhausted. VARIABLE(var_done, MachineRepresentation::kTagged, TrueConstant()); VARIABLE(var_value, MachineRepresentation::kTagged, UndefinedConstant()); Label return_value(this, {&var_done, &var_value}), return_entry(this), return_end(this, Label::kDeferred); // Transition the {receiver} table if necessary. TNode table; TNode index; std::tie(table, index) = TransitionAndUpdate(CAST(receiver)); // Read the next entry from the {table}, skipping holes. Node* entry_key; Node* entry_start_position; std::tie(entry_key, entry_start_position, index) = NextSkipHoles(table, index, &return_end); StoreObjectFieldNoWriteBarrier(receiver, JSSetIterator::kIndexOffset, SmiTag(index)); var_value.Bind(entry_key); var_done.Bind(FalseConstant()); // Check how to return the {key} (depending on {receiver} type). Branch(InstanceTypeEqual(receiver_instance_type, JS_SET_VALUE_ITERATOR_TYPE), &return_value, &return_entry); BIND(&return_entry); { Node* result = AllocateJSIteratorResultForEntry(context, var_value.value(), var_value.value()); Return(result); } BIND(&return_value); { TNode result = AllocateJSIteratorResult(context, var_value.value(), var_done.value()); Return(result); } BIND(&return_end); { StoreObjectFieldRoot(receiver, JSSetIterator::kTableOffset, RootIndex::kEmptyOrderedHashSet); Goto(&return_value); } } template void CollectionsBuiltinsAssembler::TryLookupOrderedHashTableIndex( Node* const table, Node* const key, Node* const context, Variable* result, Label* if_entry_found, Label* if_not_found) { Label if_key_smi(this), if_key_string(this), if_key_heap_number(this), if_key_bigint(this); GotoIf(TaggedIsSmi(key), &if_key_smi); TNode key_map = LoadMap(key); TNode key_instance_type = LoadMapInstanceType(key_map); GotoIf(IsStringInstanceType(key_instance_type), &if_key_string); GotoIf(IsHeapNumberMap(key_map), &if_key_heap_number); GotoIf(IsBigIntInstanceType(key_instance_type), &if_key_bigint); FindOrderedHashTableEntryForOtherKey( context, table, key, result, if_entry_found, if_not_found); BIND(&if_key_smi); { FindOrderedHashTableEntryForSmiKey( table, key, result, if_entry_found, if_not_found); } BIND(&if_key_string); { FindOrderedHashTableEntryForStringKey( context, table, key, result, if_entry_found, if_not_found); } BIND(&if_key_heap_number); { FindOrderedHashTableEntryForHeapNumberKey( context, table, key, result, if_entry_found, if_not_found); } BIND(&if_key_bigint); { FindOrderedHashTableEntryForBigIntKey( context, table, key, result, if_entry_found, if_not_found); } } TF_BUILTIN(FindOrderedHashMapEntry, CollectionsBuiltinsAssembler) { Node* const table = Parameter(Descriptor::kTable); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); VARIABLE(entry_start_position, MachineType::PointerRepresentation(), IntPtrConstant(0)); Label entry_found(this), not_found(this); TryLookupOrderedHashTableIndex( table, key, context, &entry_start_position, &entry_found, ¬_found); BIND(&entry_found); Return(SmiTag(entry_start_position.value())); BIND(¬_found); Return(SmiConstant(-1)); } class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler { public: explicit WeakCollectionsBuiltinsAssembler(compiler::CodeAssemblerState* state) : BaseCollectionsAssembler(state) {} protected: void AddEntry(TNode table, TNode key_index, TNode key, TNode value, TNode number_of_elements); TNode AllocateTable(Variant variant, TNode context, TNode at_least_space_for) override; // Generates and sets the identity for a JSRececiver. TNode CreateIdentityHash(TNode receiver); TNode EntryMask(TNode capacity); // Builds code that finds the EphemeronHashTable entry for a {key} using the // comparison code generated by {key_compare}. The key index is returned if // the {key} is found. using KeyComparator = std::function entry_key, Label* if_same)>; TNode FindKeyIndex(TNode table, TNode key_hash, TNode entry_mask, const KeyComparator& key_compare); // Builds code that finds an EphemeronHashTable entry available for a new // entry. TNode FindKeyIndexForInsertion(TNode table, TNode key_hash, TNode entry_mask); // Builds code that finds the EphemeronHashTable entry with key that matches // {key} and returns the entry's key index. If {key} cannot be found, jumps to // {if_not_found}. TNode FindKeyIndexForKey(TNode table, TNode key, TNode hash, TNode entry_mask, Label* if_not_found); TNode InsufficientCapacityToAdd(TNode capacity, TNode number_of_elements, TNode number_of_deleted); TNode KeyIndexFromEntry(TNode entry); TNode LoadNumberOfElements(TNode table, int offset); TNode LoadNumberOfDeleted(TNode table, int offset = 0); TNode LoadTable(TNode collection); TNode LoadTableCapacity(TNode table); void RemoveEntry(TNode table, TNode key_index, TNode number_of_elements); TNode ShouldRehash(TNode number_of_elements, TNode number_of_deleted); TNode ShouldShrink(TNode capacity, TNode number_of_elements); TNode ValueIndexFromKeyIndex(TNode key_index); }; void WeakCollectionsBuiltinsAssembler::AddEntry( TNode table, TNode key_index, TNode key, TNode value, TNode number_of_elements) { // See EphemeronHashTable::AddEntry(). TNode value_index = ValueIndexFromKeyIndex(key_index); UnsafeStoreFixedArrayElement(table, key_index, key, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); UnsafeStoreFixedArrayElement(table, value_index, value); // See HashTableBase::ElementAdded(). UnsafeStoreFixedArrayElement( table, EphemeronHashTable::kNumberOfElementsIndex, SmiFromIntPtr(number_of_elements), SKIP_WRITE_BARRIER); } TNode WeakCollectionsBuiltinsAssembler::AllocateTable( Variant variant, TNode context, TNode at_least_space_for) { // See HashTable::New(). CSA_ASSERT(this, IntPtrLessThanOrEqual(IntPtrConstant(0), at_least_space_for)); TNode capacity = HashTableComputeCapacity(at_least_space_for); // See HashTable::NewInternal(). TNode length = KeyIndexFromEntry(capacity); TNode table = CAST( AllocateFixedArray(HOLEY_ELEMENTS, length, kAllowLargeObjectAllocation)); RootIndex map_root_index = EphemeronHashTableShape::GetMapRootIndex(); StoreMapNoWriteBarrier(table, map_root_index); StoreFixedArrayElement(table, EphemeronHashTable::kNumberOfElementsIndex, SmiConstant(0), SKIP_WRITE_BARRIER); StoreFixedArrayElement(table, EphemeronHashTable::kNumberOfDeletedElementsIndex, SmiConstant(0), SKIP_WRITE_BARRIER); StoreFixedArrayElement(table, EphemeronHashTable::kCapacityIndex, SmiFromIntPtr(capacity), SKIP_WRITE_BARRIER); TNode start = KeyIndexFromEntry(IntPtrConstant(0)); FillFixedArrayWithValue(HOLEY_ELEMENTS, table, start, length, RootIndex::kUndefinedValue); return table; } TNode WeakCollectionsBuiltinsAssembler::CreateIdentityHash( TNode key) { TNode function_addr = ExternalConstant(ExternalReference::jsreceiver_create_identity_hash()); TNode isolate_ptr = ExternalConstant(ExternalReference::isolate_address(isolate())); MachineType type_ptr = MachineType::Pointer(); MachineType type_tagged = MachineType::AnyTagged(); return CAST(CallCFunction(function_addr, type_tagged, std::make_pair(type_ptr, isolate_ptr), std::make_pair(type_tagged, key))); } TNode WeakCollectionsBuiltinsAssembler::EntryMask( TNode capacity) { return IntPtrSub(capacity, IntPtrConstant(1)); } TNode WeakCollectionsBuiltinsAssembler::FindKeyIndex( TNode table, TNode key_hash, TNode entry_mask, const KeyComparator& key_compare) { // See HashTable::FirstProbe(). TVARIABLE(IntPtrT, var_entry, WordAnd(key_hash, entry_mask)); TVARIABLE(IntPtrT, var_count, IntPtrConstant(0)); Variable* loop_vars[] = {&var_count, &var_entry}; Label loop(this, arraysize(loop_vars), loop_vars), if_found(this); Goto(&loop); BIND(&loop); TNode key_index; { key_index = KeyIndexFromEntry(var_entry.value()); TNode entry_key = UnsafeLoadFixedArrayElement(CAST(table), key_index); key_compare(entry_key, &if_found); // See HashTable::NextProbe(). Increment(&var_count); var_entry = WordAnd(IntPtrAdd(var_entry.value(), var_count.value()), entry_mask); Goto(&loop); } BIND(&if_found); return key_index; } TNode WeakCollectionsBuiltinsAssembler::FindKeyIndexForInsertion( TNode table, TNode key_hash, TNode entry_mask) { // See HashTable::FindInsertionEntry(). auto is_not_live = [&](TNode entry_key, Label* if_found) { // This is the the negative form BaseShape::IsLive(). GotoIf(Word32Or(IsTheHole(entry_key), IsUndefined(entry_key)), if_found); }; return FindKeyIndex(table, key_hash, entry_mask, is_not_live); } TNode WeakCollectionsBuiltinsAssembler::FindKeyIndexForKey( TNode table, TNode key, TNode hash, TNode entry_mask, Label* if_not_found) { // See HashTable::FindEntry(). auto match_key_or_exit_on_empty = [&](TNode entry_key, Label* if_same) { GotoIf(IsUndefined(entry_key), if_not_found); GotoIf(TaggedEqual(entry_key, key), if_same); }; return FindKeyIndex(table, hash, entry_mask, match_key_or_exit_on_empty); } TNode WeakCollectionsBuiltinsAssembler::KeyIndexFromEntry( TNode entry) { // See HashTable::KeyAt(). // (entry * kEntrySize) + kElementsStartIndex + kEntryKeyIndex return IntPtrAdd( IntPtrMul(entry, IntPtrConstant(EphemeronHashTable::kEntrySize)), IntPtrConstant(EphemeronHashTable::kElementsStartIndex + EphemeronHashTable::kEntryKeyIndex)); } TNode WeakCollectionsBuiltinsAssembler::LoadNumberOfElements( TNode table, int offset) { TNode number_of_elements = SmiUntag(CAST(UnsafeLoadFixedArrayElement( table, EphemeronHashTable::kNumberOfElementsIndex))); return IntPtrAdd(number_of_elements, IntPtrConstant(offset)); } TNode WeakCollectionsBuiltinsAssembler::LoadNumberOfDeleted( TNode table, int offset) { TNode number_of_deleted = SmiUntag(CAST(UnsafeLoadFixedArrayElement( table, EphemeronHashTable::kNumberOfDeletedElementsIndex))); return IntPtrAdd(number_of_deleted, IntPtrConstant(offset)); } TNode WeakCollectionsBuiltinsAssembler::LoadTable( TNode collection) { return CAST(LoadObjectField(collection, JSWeakCollection::kTableOffset)); } TNode WeakCollectionsBuiltinsAssembler::LoadTableCapacity( TNode table) { return SmiUntag(CAST( UnsafeLoadFixedArrayElement(table, EphemeronHashTable::kCapacityIndex))); } TNode WeakCollectionsBuiltinsAssembler::InsufficientCapacityToAdd( TNode capacity, TNode number_of_elements, TNode number_of_deleted) { // This is the negative form of HashTable::HasSufficientCapacityToAdd(). // Return true if: // - more than 50% of the available space are deleted elements // - less than 50% will be available TNode available = IntPtrSub(capacity, number_of_elements); TNode half_available = WordShr(available, 1); TNode needed_available = WordShr(number_of_elements, 1); return Word32Or( // deleted > half IntPtrGreaterThan(number_of_deleted, half_available), // elements + needed available > capacity IntPtrGreaterThan(IntPtrAdd(number_of_elements, needed_available), capacity)); } void WeakCollectionsBuiltinsAssembler::RemoveEntry( TNode table, TNode key_index, TNode number_of_elements) { // See EphemeronHashTable::RemoveEntry(). TNode value_index = ValueIndexFromKeyIndex(key_index); StoreFixedArrayElement(table, key_index, TheHoleConstant()); StoreFixedArrayElement(table, value_index, TheHoleConstant()); // See HashTableBase::ElementRemoved(). TNode number_of_deleted = LoadNumberOfDeleted(table, 1); StoreFixedArrayElement(table, EphemeronHashTable::kNumberOfElementsIndex, SmiFromIntPtr(number_of_elements), SKIP_WRITE_BARRIER); StoreFixedArrayElement(table, EphemeronHashTable::kNumberOfDeletedElementsIndex, SmiFromIntPtr(number_of_deleted), SKIP_WRITE_BARRIER); } TNode WeakCollectionsBuiltinsAssembler::ShouldRehash( TNode number_of_elements, TNode number_of_deleted) { // Rehash if more than 33% of the entries are deleted. return IntPtrGreaterThanOrEqual(WordShl(number_of_deleted, 1), number_of_elements); } TNode WeakCollectionsBuiltinsAssembler::ShouldShrink( TNode capacity, TNode number_of_elements) { // See HashTable::Shrink(). TNode quarter_capacity = WordShr(capacity, 2); return Word32And( // Shrink to fit the number of elements if only a quarter of the // capacity is filled with elements. IntPtrLessThanOrEqual(number_of_elements, quarter_capacity), // Allocate a new dictionary with room for at least the current // number of elements. The allocation method will make sure that // there is extra room in the dictionary for additions. Don't go // lower than room for 16 elements. IntPtrGreaterThanOrEqual(number_of_elements, IntPtrConstant(16))); } TNode WeakCollectionsBuiltinsAssembler::ValueIndexFromKeyIndex( TNode key_index) { return IntPtrAdd(key_index, IntPtrConstant(EphemeronHashTableShape::kEntryValueIndex - EphemeronHashTable::kEntryKeyIndex)); } TF_BUILTIN(WeakMapConstructor, WeakCollectionsBuiltinsAssembler) { TNode new_target = CAST(Parameter(Descriptor::kJSNewTarget)); TNode argc = ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount)); TNode context = CAST(Parameter(Descriptor::kContext)); GenerateConstructor(kWeakMap, isolate()->factory()->WeakMap_string(), new_target, argc, context); } TF_BUILTIN(WeakSetConstructor, WeakCollectionsBuiltinsAssembler) { TNode new_target = CAST(Parameter(Descriptor::kJSNewTarget)); TNode argc = ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount)); TNode context = CAST(Parameter(Descriptor::kContext)); GenerateConstructor(kWeakSet, isolate()->factory()->WeakSet_string(), new_target, argc, context); } TF_BUILTIN(WeakMapLookupHashIndex, WeakCollectionsBuiltinsAssembler) { TNode table = CAST(Parameter(Descriptor::kTable)); TNode key = CAST(Parameter(Descriptor::kKey)); Label if_not_found(this); GotoIfNotJSReceiver(key, &if_not_found); TNode hash = LoadJSReceiverIdentityHash(key, &if_not_found); TNode capacity = LoadTableCapacity(table); TNode key_index = FindKeyIndexForKey(table, key, hash, EntryMask(capacity), &if_not_found); Return(SmiTag(ValueIndexFromKeyIndex(key_index))); BIND(&if_not_found); Return(SmiConstant(-1)); } TF_BUILTIN(WeakMapGet, WeakCollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); Label return_undefined(this); ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE, "WeakMap.prototype.get"); TNode const table = LoadTable(CAST(receiver)); TNode const index = CAST(CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key)); GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_undefined); Return(LoadFixedArrayElement(table, SmiUntag(index))); BIND(&return_undefined); Return(UndefinedConstant()); } TF_BUILTIN(WeakMapPrototypeHas, WeakCollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); Label return_false(this); ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE, "WeakMap.prototype.has"); TNode const table = LoadTable(CAST(receiver)); TNode const index = CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key); GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_false); Return(TrueConstant()); BIND(&return_false); Return(FalseConstant()); } // Helper that removes the entry with a given key from the backing store // (EphemeronHashTable) of a WeakMap or WeakSet. TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode collection = CAST(Parameter(Descriptor::kCollection)); TNode key = CAST(Parameter(Descriptor::kKey)); Label call_runtime(this), if_not_found(this); GotoIfNotJSReceiver(key, &if_not_found); TNode hash = LoadJSReceiverIdentityHash(key, &if_not_found); TNode table = LoadTable(collection); TNode capacity = LoadTableCapacity(table); TNode key_index = FindKeyIndexForKey(table, key, hash, EntryMask(capacity), &if_not_found); TNode number_of_elements = LoadNumberOfElements(table, -1); GotoIf(ShouldShrink(capacity, number_of_elements), &call_runtime); RemoveEntry(table, key_index, number_of_elements); Return(TrueConstant()); BIND(&if_not_found); Return(FalseConstant()); BIND(&call_runtime); Return(CallRuntime(Runtime::kWeakCollectionDelete, context, collection, key, SmiTag(hash))); } // Helper that sets the key and value to the backing store (EphemeronHashTable) // of a WeakMap or WeakSet. TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode collection = CAST(Parameter(Descriptor::kCollection)); TNode key = CAST(Parameter(Descriptor::kKey)); TNode value = CAST(Parameter(Descriptor::kValue)); CSA_ASSERT(this, IsJSReceiver(key)); Label call_runtime(this), if_no_hash(this), if_not_found(this); TNode table = LoadTable(collection); TNode capacity = LoadTableCapacity(table); TNode entry_mask = EntryMask(capacity); TVARIABLE(IntPtrT, var_hash, LoadJSReceiverIdentityHash(key, &if_no_hash)); TNode key_index = FindKeyIndexForKey(table, key, var_hash.value(), entry_mask, &if_not_found); StoreFixedArrayElement(table, ValueIndexFromKeyIndex(key_index), value); Return(collection); BIND(&if_no_hash); { var_hash = SmiUntag(CreateIdentityHash(key)); Goto(&if_not_found); } BIND(&if_not_found); { TNode number_of_deleted = LoadNumberOfDeleted(table); TNode number_of_elements = LoadNumberOfElements(table, 1); // TODO(pwong): Port HashTable's Rehash() and EnsureCapacity() to CSA. GotoIf(Word32Or(ShouldRehash(number_of_elements, number_of_deleted), InsufficientCapacityToAdd(capacity, number_of_elements, number_of_deleted)), &call_runtime); TNode insertion_key_index = FindKeyIndexForInsertion(table, var_hash.value(), entry_mask); AddEntry(table, insertion_key_index, key, value, number_of_elements); Return(collection); } BIND(&call_runtime); { CallRuntime(Runtime::kWeakCollectionSet, context, collection, key, value, SmiTag(var_hash.value())); Return(collection); } } TF_BUILTIN(WeakMapPrototypeDelete, CodeStubAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode receiver = CAST(Parameter(Descriptor::kReceiver)); TNode key = CAST(Parameter(Descriptor::kKey)); ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE, "WeakMap.prototype.delete"); Return(CallBuiltin(Builtins::kWeakCollectionDelete, context, receiver, key)); } TF_BUILTIN(WeakMapPrototypeSet, WeakCollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode receiver = CAST(Parameter(Descriptor::kReceiver)); TNode key = CAST(Parameter(Descriptor::kKey)); TNode value = CAST(Parameter(Descriptor::kValue)); ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE, "WeakMap.prototype.set"); Label throw_invalid_key(this); GotoIfNotJSReceiver(key, &throw_invalid_key); Return( CallBuiltin(Builtins::kWeakCollectionSet, context, receiver, key, value)); BIND(&throw_invalid_key); ThrowTypeError(context, MessageTemplate::kInvalidWeakMapKey, key); } TF_BUILTIN(WeakSetPrototypeAdd, WeakCollectionsBuiltinsAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode receiver = CAST(Parameter(Descriptor::kReceiver)); TNode value = CAST(Parameter(Descriptor::kValue)); ThrowIfNotInstanceType(context, receiver, JS_WEAK_SET_TYPE, "WeakSet.prototype.add"); Label throw_invalid_value(this); GotoIfNotJSReceiver(value, &throw_invalid_value); Return(CallBuiltin(Builtins::kWeakCollectionSet, context, receiver, value, TrueConstant())); BIND(&throw_invalid_value); ThrowTypeError(context, MessageTemplate::kInvalidWeakSetValue, value); } TF_BUILTIN(WeakSetPrototypeDelete, CodeStubAssembler) { TNode context = CAST(Parameter(Descriptor::kContext)); TNode receiver = CAST(Parameter(Descriptor::kReceiver)); TNode value = CAST(Parameter(Descriptor::kValue)); ThrowIfNotInstanceType(context, receiver, JS_WEAK_SET_TYPE, "WeakSet.prototype.delete"); Return( CallBuiltin(Builtins::kWeakCollectionDelete, context, receiver, value)); } TF_BUILTIN(WeakSetPrototypeHas, WeakCollectionsBuiltinsAssembler) { Node* const receiver = Parameter(Descriptor::kReceiver); Node* const key = Parameter(Descriptor::kKey); Node* const context = Parameter(Descriptor::kContext); Label return_false(this); ThrowIfNotInstanceType(context, receiver, JS_WEAK_SET_TYPE, "WeakSet.prototype.has"); TNode const table = LoadTable(CAST(receiver)); TNode const index = CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key); GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_false); Return(TrueConstant()); BIND(&return_false); Return(FalseConstant()); } } // namespace internal } // namespace v8