aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/memory-optimizer.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/compiler/memory-optimizer.cc')
-rw-r--r--deps/v8/src/compiler/memory-optimizer.cc45
1 files changed, 26 insertions, 19 deletions
diff --git a/deps/v8/src/compiler/memory-optimizer.cc b/deps/v8/src/compiler/memory-optimizer.cc
index 29cbb4d26c..368c060c1d 100644
--- a/deps/v8/src/compiler/memory-optimizer.cc
+++ b/deps/v8/src/compiler/memory-optimizer.cc
@@ -5,6 +5,7 @@
#include "src/compiler/memory-optimizer.h"
#include "src/codegen/interface-descriptors.h"
+#include "src/codegen/tick-counter.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
@@ -20,7 +21,8 @@ namespace compiler {
MemoryOptimizer::MemoryOptimizer(JSGraph* jsgraph, Zone* zone,
PoisoningMitigationLevel poisoning_level,
AllocationFolding allocation_folding,
- const char* function_debug_name)
+ const char* function_debug_name,
+ TickCounter* tick_counter)
: jsgraph_(jsgraph),
empty_state_(AllocationState::Empty(zone)),
pending_(zone),
@@ -29,7 +31,8 @@ MemoryOptimizer::MemoryOptimizer(JSGraph* jsgraph, Zone* zone,
graph_assembler_(jsgraph, nullptr, nullptr, zone),
poisoning_level_(poisoning_level),
allocation_folding_(allocation_folding),
- function_debug_name_(function_debug_name) {}
+ function_debug_name_(function_debug_name),
+ tick_counter_(tick_counter) {}
void MemoryOptimizer::Optimize() {
EnqueueUses(graph()->start(), empty_state());
@@ -99,7 +102,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kBitcastTaggedToWord:
case IrOpcode::kBitcastWordToTagged:
case IrOpcode::kComment:
- case IrOpcode::kDebugAbort:
+ case IrOpcode::kAbortCSAAssert:
case IrOpcode::kDebugBreak:
case IrOpcode::kDeoptimizeIf:
case IrOpcode::kDeoptimizeUnless:
@@ -108,6 +111,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kLoad:
case IrOpcode::kLoadElement:
case IrOpcode::kLoadField:
+ case IrOpcode::kLoadFromObject:
case IrOpcode::kPoisonedLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kProtectedStore:
@@ -118,6 +122,7 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kStore:
case IrOpcode::kStoreElement:
case IrOpcode::kStoreField:
+ case IrOpcode::kStoreToObject:
case IrOpcode::kTaggedPoisonOnSpeculation:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kUnalignedStore:
@@ -214,6 +219,7 @@ Node* EffectPhiForPhi(Node* phi) {
} // namespace
void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
+ tick_counter_->DoTick();
DCHECK(!node->IsDead());
DCHECK_LT(0, node->op()->EffectInputCount());
switch (node->opcode()) {
@@ -296,6 +302,21 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
}
}
+ Node* allocate_builtin;
+ if (allocation_type == AllocationType::kYoung) {
+ if (allocation.allow_large_objects() == AllowLargeObjects::kTrue) {
+ allocate_builtin = __ AllocateInYoungGenerationStubConstant();
+ } else {
+ allocate_builtin = __ AllocateRegularInYoungGenerationStubConstant();
+ }
+ } else {
+ if (allocation.allow_large_objects() == AllowLargeObjects::kTrue) {
+ allocate_builtin = __ AllocateInOldGenerationStubConstant();
+ } else {
+ allocate_builtin = __ AllocateRegularInOldGenerationStubConstant();
+ }
+ }
+
// Determine the top/limit addresses.
Node* top_address = __ ExternalConstant(
allocation_type == AllocationType::kYoung
@@ -371,11 +392,6 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
__ Bind(&call_runtime);
{
- Node* target = allocation_type == AllocationType::kYoung
- ? __
- AllocateInYoungGenerationStubConstant()
- : __
- AllocateInOldGenerationStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
@@ -384,7 +400,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
allocate_operator_.set(common()->Call(call_descriptor));
}
Node* vfalse = __ BitcastTaggedToWord(
- __ Call(allocate_operator_.get(), target, size));
+ __ Call(allocate_operator_.get(), allocate_builtin, size));
vfalse = __ IntSub(vfalse, __ IntPtrConstant(kHeapObjectTag));
__ Goto(&done, vfalse);
}
@@ -434,11 +450,6 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
__ IntAdd(top, __ IntPtrConstant(kHeapObjectTag))));
__ Bind(&call_runtime);
- Node* target = allocation_type == AllocationType::kYoung
- ? __
- AllocateInYoungGenerationStubConstant()
- : __
- AllocateInOldGenerationStubConstant();
if (!allocate_operator_.is_set()) {
auto descriptor = AllocateDescriptor{};
auto call_descriptor = Linkage::GetStubCallDescriptor(
@@ -446,7 +457,7 @@ void MemoryOptimizer::VisitAllocateRaw(Node* node,
CallDescriptor::kCanUseRoots, Operator::kNoThrow);
allocate_operator_.set(common()->Call(call_descriptor));
}
- __ Goto(&done, __ Call(allocate_operator_.get(), target, size));
+ __ Goto(&done, __ Call(allocate_operator_.get(), allocate_builtin, size));
__ Bind(&done);
value = done.PhiAt(0);
@@ -483,8 +494,6 @@ void MemoryOptimizer::VisitLoadFromObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
ObjectAccess const& access = ObjectAccessOf(node->op());
- Node* offset = node->InputAt(1);
- node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
EnqueueUses(node, state);
}
@@ -494,9 +503,7 @@ void MemoryOptimizer::VisitStoreToObject(Node* node,
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
ObjectAccess const& access = ObjectAccessOf(node->op());
Node* object = node->InputAt(0);
- Node* offset = node->InputAt(1);
Node* value = node->InputAt(2);
- node->ReplaceInput(1, __ IntSub(offset, __ IntPtrConstant(kHeapObjectTag)));
WriteBarrierKind write_barrier_kind = ComputeWriteBarrierKind(
node, object, value, state, access.write_barrier_kind);
NodeProperties::ChangeOp(