diff options
Diffstat (limited to 'deps/v8/src/ic/ia32/ic-ia32.cc')
-rw-r--r-- | deps/v8/src/ic/ia32/ic-ia32.cc | 166 |
1 files changed, 66 insertions, 100 deletions
diff --git a/deps/v8/src/ic/ia32/ic-ia32.cc b/deps/v8/src/ic/ia32/ic-ia32.cc index 9822f26ced..54fd053eaf 100644 --- a/deps/v8/src/ic/ia32/ic-ia32.cc +++ b/deps/v8/src/ic/ia32/ic-ia32.cc @@ -169,40 +169,65 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, // Loads an indexed element from a fast case array. -// If not_fast_array is NULL, doesn't perform the elements map check. static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, Register key, Register scratch, - Register result, Label* not_fast_array, - Label* out_of_range) { + Register scratch2, Register result, + Label* slow) { // Register use: // receiver - holds the receiver and is unchanged. // key - holds the key and is unchanged (must be a smi). // Scratch registers: // scratch - used to hold elements of the receiver and the loaded value. + // scratch2 - holds maps and prototypes during prototype chain check. // result - holds the result on exit if the load succeeds and // we fall through. + Label check_prototypes, check_next_prototype; + Label done, in_bounds, return_undefined; __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset)); - if (not_fast_array != NULL) { - // Check that the object is in fast mode and writable. - __ CheckMap(scratch, masm->isolate()->factory()->fixed_array_map(), - not_fast_array, DONT_DO_SMI_CHECK); - } else { - __ AssertFastElements(scratch); - } + __ AssertFastElements(scratch); + // Check that the key (index) is within bounds. __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); - __ j(above_equal, out_of_range); + __ j(below, &in_bounds); + // Out-of-bounds. Check the prototype chain to see if we can just return + // 'undefined'. + __ cmp(key, 0); + __ j(less, slow); // Negative keys can't take the fast OOB path. + __ bind(&check_prototypes); + __ mov(scratch2, FieldOperand(receiver, HeapObject::kMapOffset)); + __ bind(&check_next_prototype); + __ mov(scratch2, FieldOperand(scratch2, Map::kPrototypeOffset)); + // scratch2: current prototype + __ cmp(scratch2, masm->isolate()->factory()->null_value()); + __ j(equal, &return_undefined); + __ mov(scratch, FieldOperand(scratch2, JSObject::kElementsOffset)); + __ mov(scratch2, FieldOperand(scratch2, HeapObject::kMapOffset)); + // scratch: elements of current prototype + // scratch2: map of current prototype + __ CmpInstanceType(scratch2, JS_OBJECT_TYPE); + __ j(below, slow); + __ test_b( + FieldOperand(scratch2, Map::kBitFieldOffset), + (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor)); + __ j(not_zero, slow); + __ cmp(scratch, masm->isolate()->factory()->empty_fixed_array()); + __ j(not_equal, slow); + __ jmp(&check_next_prototype); + + __ bind(&return_undefined); + __ mov(result, masm->isolate()->factory()->undefined_value()); + __ jmp(&done); + + __ bind(&in_bounds); // Fast case: Do the load. STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value())); - // In case the loaded value is the_hole we have to consult GetProperty - // to ensure the prototype chain is searched. - __ j(equal, out_of_range); - if (!result.is(scratch)) { - __ mov(result, scratch); - } + // In case the loaded value is the_hole we have to check the prototype chain. + __ j(equal, &check_prototypes); + __ Move(result, scratch); + __ bind(&done); } @@ -305,7 +330,7 @@ static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, } -void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { +void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { // The return address is on the stack. Label slow, check_name, index_smi, index_name, property_array_property; Label probe_dictionary, check_number_dictionary; @@ -327,7 +352,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // Check the receiver's map to see if it has fast elements. __ CheckFastElements(eax, &check_number_dictionary); - GenerateFastArrayLoad(masm, receiver, key, eax, eax, NULL, &slow); + GenerateFastArrayLoad(masm, receiver, key, eax, ebx, eax, &slow); Isolate* isolate = masm->isolate(); Counters* counters = isolate->counters(); __ IncrementCounter(counters->keyed_load_generic_smi(), 1); @@ -367,95 +392,36 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor, &slow); - // If the receiver is a fast-case object, check the keyed lookup - // cache. Otherwise probe the dictionary. + // If the receiver is a fast-case object, check the stub cache. Otherwise + // probe the dictionary. __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), Immediate(isolate->factory()->hash_table_map())); __ j(equal, &probe_dictionary); - // The receiver's map is still in eax, compute the keyed lookup cache hash - // based on 32 bits of the map pointer and the string hash. - if (FLAG_debug_code) { - __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset)); - __ Check(equal, kMapIsNoLongerInEax); - } - __ mov(ebx, eax); // Keep the map around for later. - __ shr(eax, KeyedLookupCache::kMapHashShift); - __ mov(edi, FieldOperand(key, String::kHashFieldOffset)); - __ shr(edi, String::kHashShift); - __ xor_(eax, edi); - __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); - - // Load the key (consisting of map and internalized string) from the cache and - // check for match. - Label load_in_object_property; - static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; - Label hit_on_nth_entry[kEntriesPerBucket]; - ExternalReference cache_keys = - ExternalReference::keyed_lookup_cache_keys(masm->isolate()); - - for (int i = 0; i < kEntriesPerBucket - 1; i++) { - Label try_next_entry; - __ mov(edi, eax); - __ shl(edi, kPointerSizeLog2 + 1); - if (i != 0) { - __ add(edi, Immediate(kPointerSize * i * 2)); - } - __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &try_next_entry); - __ add(edi, Immediate(kPointerSize)); - __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(equal, &hit_on_nth_entry[i]); - __ bind(&try_next_entry); + if (FLAG_vector_ics) { + // When vector ics are in use, the handlers in the stub cache expect a + // vector and slot. Since we won't change the IC from any downstream + // misses, a dummy vector can be used. + Handle<TypeFeedbackVector> dummy_vector = Handle<TypeFeedbackVector>::cast( + isolate->factory()->keyed_load_dummy_vector()); + int slot = dummy_vector->GetIndex(FeedbackVectorICSlot(0)); + __ push(Immediate(Smi::FromInt(slot))); + __ push(Immediate(dummy_vector)); } - __ lea(edi, Operand(eax, 1)); - __ shl(edi, kPointerSizeLog2 + 1); - __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2)); - __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &slow); - __ add(edi, Immediate(kPointerSize)); - __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &slow); + Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( + Code::ComputeHandlerFlags(Code::LOAD_IC)); + masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, + false, receiver, key, ebx, edi); - // Get field offset. - // ebx : receiver's map - // eax : lookup cache index - ExternalReference cache_field_offsets = - ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); - - // Hit on nth entry. - for (int i = kEntriesPerBucket - 1; i >= 0; i--) { - __ bind(&hit_on_nth_entry[i]); - if (i != 0) { - __ add(eax, Immediate(i)); - } - __ mov(edi, - Operand::StaticArray(eax, times_pointer_size, cache_field_offsets)); - __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); - __ sub(edi, eax); - __ j(above_equal, &property_array_property); - if (i != 0) { - __ jmp(&load_in_object_property); - } + if (FLAG_vector_ics) { + __ pop(VectorLoadICDescriptor::VectorRegister()); + __ pop(VectorLoadICDescriptor::SlotRegister()); } - // Load in-object property. - __ bind(&load_in_object_property); - __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset)); - __ add(eax, edi); - __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); - - // Load property array property. - __ bind(&property_array_property); - __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset)); - __ mov(eax, - FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); + // Cache miss. + GenerateMiss(masm); // Do a quick inline probe of the receiver's dictionary, if it // exists. @@ -646,7 +612,7 @@ static void KeyedStoreGenerateMegamorphicHelper( void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm, - StrictMode strict_mode) { + LanguageMode language_mode) { // Return address is on the stack. Label slow, fast_object, fast_object_grow; Label fast_double, fast_double_grow; @@ -683,7 +649,7 @@ void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm, // Slow case: call runtime. __ bind(&slow); - PropertyICCompiler::GenerateRuntimeSetProperty(masm, strict_mode); + PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode); // Never returns to here. __ bind(&maybe_name_key); |