summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/objects-visiting.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/objects-visiting.h')
-rw-r--r--deps/v8/src/heap/objects-visiting.h37
1 files changed, 21 insertions, 16 deletions
diff --git a/deps/v8/src/heap/objects-visiting.h b/deps/v8/src/heap/objects-visiting.h
index 1b788e893b..5b150cf199 100644
--- a/deps/v8/src/heap/objects-visiting.h
+++ b/deps/v8/src/heap/objects-visiting.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_VISITING_H_
#include "src/allocation.h"
+#include "src/heap/spaces.h"
#include "src/layout-descriptor.h"
// This file provides base classes and auxiliary methods for defining
@@ -29,6 +30,7 @@ class StaticVisitorBase : public AllStatic {
V(SeqTwoByteString) \
V(ShortcutCandidate) \
V(ByteArray) \
+ V(BytecodeArray) \
V(FreeSpace) \
V(FixedArray) \
V(FixedDoubleArray) \
@@ -109,11 +111,7 @@ class StaticVisitorBase : public AllStatic {
bool has_unboxed_fields);
// Determine which specialized visitor should be used for given map.
- static VisitorId GetVisitorId(Map* map) {
- return GetVisitorId(
- map->instance_type(), map->instance_size(),
- FLAG_unbox_double_fields && !map->HasFastPointerLayout());
- }
+ static VisitorId GetVisitorId(Map* map);
// For visitors that allow specialization by size calculate VisitorId based
// on size, base visitor id and generic visitor id.
@@ -150,14 +148,12 @@ class VisitorDispatchTable {
}
}
+ inline Callback GetVisitor(Map* map);
+
inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
return reinterpret_cast<Callback>(callbacks_[id]);
}
- inline Callback GetVisitor(Map* map) {
- return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
- }
-
void Register(StaticVisitorBase::VisitorId id, Callback callback) {
DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
@@ -214,7 +210,7 @@ class BodyVisitorBase : public AllStatic {
private:
INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
int start_offset, int end_offset)) {
- StaticVisitor::VisitPointers(heap,
+ StaticVisitor::VisitPointers(heap, object,
HeapObject::RawField(object, start_offset),
HeapObject::RawField(object, end_offset));
}
@@ -296,22 +292,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return table_.GetVisitor(map)(map, obj);
}
- INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
+ INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
+ Object** start, Object** end)) {
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
}
private:
INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
Heap* heap = map->GetHeap();
- VisitPointers(heap,
+ VisitPointers(heap, object,
HeapObject::RawField(object, JSFunction::kPropertiesOffset),
HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
// Don't visit code entry. We are using this visitor only during scavenges.
VisitPointers(
- heap, HeapObject::RawField(object,
- JSFunction::kCodeEntryOffset + kPointerSize),
+ heap, object, HeapObject::RawField(
+ object, JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
return JSFunction::kSize;
}
@@ -320,6 +317,10 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
}
+ INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object)) {
+ return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
+ }
+
INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
return FixedDoubleArray::SizeFor(length);
@@ -405,7 +406,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
- INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
+ INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
+ Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
@@ -417,7 +419,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Skip the weak next code link in a code object.
INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
- // TODO(mstarzinger): This should be made protected once refactoring is done.
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
@@ -440,6 +441,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
static void MarkMapContents(Heap* heap, Map* map);
static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
+ // Mark pointers in the optimized code map that should act as strong
+ // references, possibly treating some entries weak.
+ static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
+
// Code flushing support.
INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));