summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/memory-optimizer.h
blob: a663bf07ed6a49cc84e76d6bae4ef12b458c7af8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_
#define V8_COMPILER_MEMORY_OPTIMIZER_H_

#include "src/compiler/graph-assembler.h"
#include "src/zone/zone-containers.h"

namespace v8 {
namespace internal {

class TickCounter;

namespace compiler {

// Forward declarations.
class CommonOperatorBuilder;
struct ElementAccess;
class Graph;
class JSGraph;
class MachineOperatorBuilder;
class Node;
class Operator;

// NodeIds are identifying numbers for nodes that can be used to index auxiliary
// out-of-line data associated with each node.
using NodeId = uint32_t;

// Lowers all simplified memory access and allocation related nodes (i.e.
// Allocate, LoadField, StoreField and friends) to machine operators.
// Performs allocation folding and store write barrier elimination
// implicitly.
class MemoryOptimizer final {
 public:
  enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding };

  MemoryOptimizer(JSGraph* jsgraph, Zone* zone,
                  PoisoningMitigationLevel poisoning_level,
                  AllocationFolding allocation_folding,
                  const char* function_debug_name, TickCounter* tick_counter);
  ~MemoryOptimizer() = default;

  void Optimize();

 private:
  // An allocation group represents a set of allocations that have been folded
  // together.
  class AllocationGroup final : public ZoneObject {
   public:
    AllocationGroup(Node* node, AllocationType allocation, Zone* zone);
    AllocationGroup(Node* node, AllocationType allocation, Node* size,
                    Zone* zone);
    ~AllocationGroup() = default;

    void Add(Node* object);
    bool Contains(Node* object) const;
    bool IsYoungGenerationAllocation() const {
      return allocation() == AllocationType::kYoung;
    }

    AllocationType allocation() const { return allocation_; }
    Node* size() const { return size_; }

   private:
    ZoneSet<NodeId> node_ids_;
    AllocationType const allocation_;
    Node* const size_;

    DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup);
  };

  // An allocation state is propagated on the effect paths through the graph.
  class AllocationState final : public ZoneObject {
   public:
    static AllocationState const* Empty(Zone* zone) {
      return new (zone) AllocationState();
    }
    static AllocationState const* Closed(AllocationGroup* group, Zone* zone) {
      return new (zone) AllocationState(group);
    }
    static AllocationState const* Open(AllocationGroup* group, intptr_t size,
                                       Node* top, Zone* zone) {
      return new (zone) AllocationState(group, size, top);
    }

    bool IsYoungGenerationAllocation() const;

    AllocationGroup* group() const { return group_; }
    Node* top() const { return top_; }
    intptr_t size() const { return size_; }

   private:
    AllocationState();
    explicit AllocationState(AllocationGroup* group);
    AllocationState(AllocationGroup* group, intptr_t size, Node* top);

    AllocationGroup* const group_;
    // The upper bound of the combined allocated object size on the current path
    // (max int if allocation folding is impossible on this path).
    intptr_t const size_;
    Node* const top_;

    DISALLOW_COPY_AND_ASSIGN(AllocationState);
  };

  // An array of allocation states used to collect states on merges.
  using AllocationStates = ZoneVector<AllocationState const*>;

  // We thread through tokens to represent the current state on a given effect
  // path through the graph.
  struct Token {
    Node* node;
    AllocationState const* state;
  };

  void VisitNode(Node*, AllocationState const*);
  void VisitAllocateRaw(Node*, AllocationState const*);
  void VisitCall(Node*, AllocationState const*);
  void VisitLoadFromObject(Node*, AllocationState const*);
  void VisitLoadElement(Node*, AllocationState const*);
  void VisitLoadField(Node*, AllocationState const*);
  void VisitStoreToObject(Node*, AllocationState const*);
  void VisitStoreElement(Node*, AllocationState const*);
  void VisitStoreField(Node*, AllocationState const*);
  void VisitStore(Node*, AllocationState const*);
  void VisitOtherEffect(Node*, AllocationState const*);

  Node* ComputeIndex(ElementAccess const&, Node*);
  WriteBarrierKind ComputeWriteBarrierKind(Node* node, Node* object,
                                           Node* value,
                                           AllocationState const* state,
                                           WriteBarrierKind);

  AllocationState const* MergeStates(AllocationStates const& states);

  void EnqueueMerge(Node*, int, AllocationState const*);
  void EnqueueUses(Node*, AllocationState const*);
  void EnqueueUse(Node*, int, AllocationState const*);

  bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;

  // Returns true if the AllocationType of the current AllocateRaw node that we
  // are visiting needs to be updated to kOld, due to propagation of tenuring
  // from outer to inner allocations.
  bool AllocationTypeNeedsUpdateToOld(Node* const user, const Edge edge);

  AllocationState const* empty_state() const { return empty_state_; }
  Graph* graph() const;
  Isolate* isolate() const;
  JSGraph* jsgraph() const { return jsgraph_; }
  CommonOperatorBuilder* common() const;
  MachineOperatorBuilder* machine() const;
  Zone* zone() const { return zone_; }
  GraphAssembler* gasm() { return &graph_assembler_; }

  SetOncePointer<const Operator> allocate_operator_;
  JSGraph* const jsgraph_;
  AllocationState const* const empty_state_;
  ZoneMap<NodeId, AllocationStates> pending_;
  ZoneQueue<Token> tokens_;
  Zone* const zone_;
  GraphAssembler graph_assembler_;
  PoisoningMitigationLevel poisoning_level_;
  AllocationFolding allocation_folding_;
  const char* function_debug_name_;
  TickCounter* const tick_counter_;

  DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryOptimizer);
};

}  // namespace compiler
}  // namespace internal
}  // namespace v8

#endif  // V8_COMPILER_MEMORY_OPTIMIZER_H_