summaryrefslogtreecommitdiff
path: root/deps/v8/src/arm/full-codegen-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm/full-codegen-arm.cc')
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc313
1 files changed, 144 insertions, 169 deletions
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index b2f629b26c..3b560fedfa 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -130,7 +130,7 @@ void FullCodeGenerator::Generate() {
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
- Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
@@ -164,14 +164,19 @@ void FullCodeGenerator::Generate() {
int locals_count = info->scope()->num_stack_slots();
- __ Push(lr, fp, cp, r1);
- if (locals_count > 0) {
+ info->set_prologue_offset(masm_->pc_offset());
+ {
+ PredictableCodeSizeScope predictible_code_size_scope(
+ masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize);
+ // The following three instructions must remain together and unmodified
+ // for code aging to work properly.
+ __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
// Load undefined value here, so the value is ready for the loop
// below.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ // Adjust FP to point to saved FP.
+ __ add(fp, sp, Operand(2 * kPointerSize));
}
- // Adjust fp to point to caller's fp.
- __ add(fp, sp, Operand(2 * kPointerSize));
{ Comment cmnt(masm_, "[ Allocate locals");
for (int i = 0; i < locals_count; i++) {
@@ -287,6 +292,7 @@ void FullCodeGenerator::Generate() {
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
+ PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
@@ -341,41 +347,31 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
}
-void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
- Label* back_edge_target) {
- Comment cmnt(masm_, "[ Stack check");
+void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
+ Label* back_edge_target) {
+ Comment cmnt(masm_, "[ Back edge bookkeeping");
// Block literal pools whilst emitting stack check code.
Assembler::BlockConstPoolScope block_const_pool(masm_);
Label ok;
- if (FLAG_count_based_interrupts) {
- int weight = 1;
- if (FLAG_weighted_back_edges) {
- ASSERT(back_edge_target->is_bound());
- int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
- weight = Min(kMaxBackEdgeWeight,
- Max(1, distance / kBackEdgeDistanceUnit));
- }
- EmitProfilingCounterDecrement(weight);
- __ b(pl, &ok);
- InterruptStub stub;
- __ CallStub(&stub);
- } else {
- __ LoadRoot(ip, Heap::kStackLimitRootIndex);
- __ cmp(sp, Operand(ip));
- __ b(hs, &ok);
- StackCheckStub stub;
- __ CallStub(&stub);
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceUnit));
}
+ EmitProfilingCounterDecrement(weight);
+ __ b(pl, &ok);
+ InterruptStub stub;
+ __ CallStub(&stub);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
- RecordStackCheck(stmt->OsrEntryId());
+ RecordBackEdge(stmt->OsrEntryId());
- if (FLAG_count_based_interrupts) {
- EmitProfilingCounterReset();
- }
+ EmitProfilingCounterReset();
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
@@ -437,6 +433,8 @@ void FullCodeGenerator::EmitReturnSequence() {
// tool from instrumenting as we rely on the code size here.
int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
+ // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
+ PredictableCodeSizeScope predictable(masm_, -1);
__ RecordJSReturn();
masm_->mov(sp, fp);
masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
@@ -911,34 +909,33 @@ void FullCodeGenerator::VisitFunctionDeclaration(
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
- VariableProxy* proxy = declaration->proxy();
- Variable* variable = proxy->var();
- Handle<JSModule> instance = declaration->module()->interface()->Instance();
- ASSERT(!instance.is_null());
+ Variable* variable = declaration->proxy()->var();
+ ASSERT(variable->location() == Variable::CONTEXT);
+ ASSERT(variable->interface()->IsFrozen());
- switch (variable->location()) {
- case Variable::UNALLOCATED: {
- Comment cmnt(masm_, "[ ModuleDeclaration");
- globals_->Add(variable->name(), zone());
- globals_->Add(instance, zone());
- Visit(declaration->module());
- break;
- }
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
- case Variable::CONTEXT: {
- Comment cmnt(masm_, "[ ModuleDeclaration");
- EmitDebugCheckDeclarationContext(variable);
- __ mov(r1, Operand(instance));
- __ str(r1, ContextOperand(cp, variable->index()));
- Visit(declaration->module());
- break;
- }
+ // Load instance object.
+ __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
+ __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
+ __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
- case Variable::PARAMETER:
- case Variable::LOCAL:
- case Variable::LOOKUP:
- UNREACHABLE();
- }
+ // Assign it.
+ __ str(r1, ContextOperand(cp, variable->index()));
+ // We know that we have written a module, which is not a smi.
+ __ RecordWriteContextSlot(cp,
+ Context::SlotOffset(variable->index()),
+ r1,
+ r3,
+ kLRHasBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
+
+ // Traverse into body.
+ Visit(declaration->module());
}
@@ -981,6 +978,14 @@ void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
}
+void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
+ // Call the runtime to declare the modules.
+ __ Push(descriptions);
+ __ CallRuntime(Runtime::kDeclareModules, 1);
+ // Return value is ignored.
+}
+
+
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Comment cmnt(masm_, "[ SwitchStatement");
Breakable nested_statement(this, stmt);
@@ -1137,7 +1142,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ cmp(r1, Operand(Smi::FromInt(0)));
__ b(eq, &no_descriptors);
- __ LoadInstanceDescriptors(r0, r2, r4);
+ __ LoadInstanceDescriptors(r0, r2);
__ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
__ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
@@ -1235,7 +1240,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ add(r0, r0, Operand(Smi::FromInt(1)));
__ push(r0);
- EmitStackCheck(stmt, &loop);
+ EmitBackEdgeBookkeeping(stmt, &loop);
__ b(&loop);
// Remove the pointers stored on the stack.
@@ -1388,9 +1393,9 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
} else if (var->mode() == DYNAMIC_LOCAL) {
Variable* local = var->local_if_not_shadowed();
__ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
- if (local->mode() == CONST ||
- local->mode() == CONST_HARMONY ||
- local->mode() == LET) {
+ if (local->mode() == LET ||
+ local->mode() == CONST ||
+ local->mode() == CONST_HARMONY) {
__ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
if (local->mode() == CONST) {
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
@@ -2183,43 +2188,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- // Load receiver to r1. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r1, MemOperand(sp));
- } else {
- __ pop(r1);
- }
+ __ pop(r1);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2228,44 +2206,16 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
- // If the assignment starts a block of assignments to the same object,
- // change to slow case to avoid the quadratic behavior of repeatedly
- // adding fast properties.
- if (expr->starts_initialization_block()) {
- __ push(result_register());
- // Receiver is now under the key and value.
- __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToSlowProperties, 1);
- __ pop(result_register());
- }
-
// Record source code position before IC call.
SetSourcePosition(expr->position());
__ pop(r1); // Key.
- // Load receiver to r2. Leave a copy in the stack if needed for turning the
- // receiver into fast case.
- if (expr->ends_initialization_block()) {
- __ ldr(r2, MemOperand(sp));
- } else {
- __ pop(r2);
- }
+ __ pop(r2);
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
- // If the assignment ends an initialization block, revert to fast case.
- if (expr->ends_initialization_block()) {
- __ push(r0); // Result of assignment, saved even if not needed.
- // Receiver is under the result value.
- __ ldr(ip, MemOperand(sp, kPointerSize));
- __ push(ip);
- __ CallRuntime(Runtime::kToFastProperties, 1);
- __ pop(r0);
- __ Drop(1);
- }
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(r0);
}
@@ -2294,7 +2244,9 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
RelocInfo::Mode rmode,
TypeFeedbackId ast_id) {
ic_total_count_++;
- __ Call(code, rmode, ast_id);
+ // All calls must have a predictable size in full-codegen code to ensure that
+ // the debugger can patch them correctly.
+ __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
}
void FullCodeGenerator::EmitCallWithIC(Call* expr,
@@ -2424,7 +2376,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
VariableProxy* proxy = callee->AsVariableProxy();
Property* property = callee->AsProperty();
- if (proxy != NULL && proxy->var()->is_possibly_eval()) {
+ if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
@@ -2714,7 +2666,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- if (generate_debug_code_) __ AbortIfSmi(r0);
+ __ AssertNotSmi(r0);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
@@ -2729,26 +2681,31 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ b(eq, if_false);
// Look for valueOf symbol in the descriptor array, and indicate false if
- // found. The type is not checked, so if it is a transition it is a false
- // negative.
- __ LoadInstanceDescriptors(r1, r4, r3);
- __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
- // r4: descriptor array
- // r3: length of descriptor array
- // Calculate the end of the descriptor array.
+ // found. Since we omit an enumeration index check, if it is added via a
+ // transition that shares its descriptor array, this is a false positive.
+ Label entry, loop, done;
+
+ // Skip loop if no descriptors are valid.
+ __ NumberOfOwnDescriptors(r3, r1);
+ __ cmp(r3, Operand(0));
+ __ b(eq, &done);
+
+ __ LoadInstanceDescriptors(r1, r4);
+ // r4: descriptor array.
+ // r3: valid entries in the descriptor array.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kPointerSize == 4);
- __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
+ __ mul(r3, r3, ip);
+ // Calculate location of the first key name.
+ __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
+ // Calculate the end of the descriptor array.
+ __ mov(r2, r4);
__ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
- // Calculate location of the first key name.
- __ add(r4,
- r4,
- Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
// Loop through all the keys in the descriptor array. If one of these is the
// symbol valueOf the result is false.
- Label entry, loop;
// The use of ip to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
__ mov(ip, Operand(FACTORY->value_of_symbol()));
@@ -2762,7 +2719,8 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
__ cmp(r4, Operand(r2));
__ b(ne, &loop);
- // If a valueOf property is not found on the object check that it's
+ __ bind(&done);
+ // If a valueOf property is not found on the object check that its
// prototype is the un-modified String prototype. If not result is false.
__ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
__ JumpIfSmi(r2, if_false);
@@ -3173,6 +3131,39 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT_EQ(3, args->length());
+
+ VisitForStackValue(args->at(1)); // index
+ VisitForStackValue(args->at(2)); // value
+ __ pop(r2);
+ __ pop(r1);
+ VisitForAccumulatorValue(args->at(0)); // string
+
+ static const String::Encoding encoding = String::ONE_BYTE_ENCODING;
+ SeqStringSetCharGenerator::Generate(masm_, encoding, r0, r1, r2);
+ context()->Plug(r0);
+}
+
+
+void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT_EQ(3, args->length());
+
+ VisitForStackValue(args->at(1)); // index
+ VisitForStackValue(args->at(2)); // value
+ __ pop(r2);
+ __ pop(r1);
+ VisitForAccumulatorValue(args->at(0)); // string
+
+ static const String::Encoding encoding = String::TWO_BYTE_ENCODING;
+ SeqStringSetCharGenerator::Generate(masm_, encoding, r0, r1, r2);
+ context()->Plug(r0);
+}
+
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
@@ -3583,8 +3574,7 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
ASSERT(args->length() == 1);
VisitForAccumulatorValue(args->at(0));
- __ AbortIfNotString(r0);
-
+ __ AssertString(r0);
__ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
__ IndexFromHash(r0, r0);
@@ -3666,7 +3656,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
__ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
- __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
+ __ ldr(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
__ add(string_length, string_length, Operand(scratch1), SetCC);
__ b(vs, &bailout);
__ cmp(element, elements_end);
@@ -3695,7 +3685,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Add (separator length times array_length) - separator length to the
// string_length to get the length of the result string. array_length is not
// smi but the other values are, so the result is a smi
- __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
+ __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
__ sub(string_length, string_length, Operand(scratch1));
__ smull(scratch2, ip, array_length, scratch1);
// Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
@@ -3733,10 +3723,10 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
array_length = no_reg;
__ add(result_pos,
result,
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
// Check the length of the separator.
- __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
+ __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
__ cmp(scratch1, Operand(Smi::FromInt(1)));
__ b(eq, &one_char_separator);
__ b(gt, &long_separator);
@@ -3752,7 +3742,9 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
__ SmiUntag(string_length);
- __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ add(string,
+ string,
+ Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
__ CopyBytes(string, result_pos, string_length, scratch1);
__ cmp(element, elements_end);
__ b(lt, &empty_separator_loop); // End while (element < elements_end).
@@ -3762,7 +3754,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// One-character separator case
__ bind(&one_char_separator);
// Replace separator with its ASCII character value.
- __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
+ __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
__ jmp(&one_char_separator_loop_entry);
@@ -3782,7 +3774,9 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
__ SmiUntag(string_length);
- __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ add(string,
+ string,
+ Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
__ CopyBytes(string, result_pos, string_length, scratch1);
__ cmp(element, elements_end);
__ b(lt, &one_char_separator_loop); // End while (element < elements_end).
@@ -3803,14 +3797,16 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ SmiUntag(string_length);
__ add(string,
separator,
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
__ CopyBytes(string, result_pos, string_length, scratch1);
__ bind(&long_separator);
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
__ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
__ SmiUntag(string_length);
- __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ add(string,
+ string,
+ Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
__ CopyBytes(string, result_pos, string_length, scratch1);
__ cmp(element, elements_end);
__ b(lt, &long_separator_loop); // End while (element < elements_end).
@@ -4115,7 +4111,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Call stub. Undo operation first.
__ sub(r0, r0, Operand(Smi::FromInt(count_value)));
}
- __ mov(r1, Operand(Smi::FromInt(count_value)));
+ __ mov(r1, r0);
+ __ mov(r0, Operand(Smi::FromInt(count_value)));
// Record position before stub call.
SetSourcePosition(expr->position());
@@ -4340,29 +4337,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
default: {
VisitForAccumulatorValue(expr->right());
- Condition cond = eq;
- switch (op) {
- case Token::EQ_STRICT:
- case Token::EQ:
- cond = eq;
- break;
- case Token::LT:
- cond = lt;
- break;
- case Token::GT:
- cond = gt;
- break;
- case Token::LTE:
- cond = le;
- break;
- case Token::GTE:
- cond = ge;
- break;
- case Token::IN:
- case Token::INSTANCEOF:
- default:
- UNREACHABLE();
- }
+ Condition cond = CompareIC::ComputeCondition(op);
__ pop(r1);
bool inline_smi_code = ShouldInlineSmiCase(op);