summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects/objects-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects/objects-inl.h')
-rw-r--r--deps/v8/src/objects/objects-inl.h21
1 files changed, 20 insertions, 1 deletions
diff --git a/deps/v8/src/objects/objects-inl.h b/deps/v8/src/objects/objects-inl.h
index cf8c3ffad2..08f4a2b6f0 100644
--- a/deps/v8/src/objects/objects-inl.h
+++ b/deps/v8/src/objects/objects-inl.h
@@ -350,6 +350,13 @@ DEF_GETTER(HeapObject, IsDependentCode, bool) {
return true;
}
+DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
+ if (!IsWeakFixedArray(isolate)) return false;
+ // There's actually no way to see the difference between a weak fixed array
+ // and a osr optimized code cache.
+ return true;
+}
+
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
return IsBytecodeArray(isolate) || IsCode(isolate);
}
@@ -411,6 +418,12 @@ DEF_GETTER(HeapObject, IsSmallOrderedHashTable, bool) {
IsSmallOrderedNameDictionary(isolate);
}
+DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
+ // It is not possible to check for the existence of certain properties on the
+ // underlying {JSReceiver} here because that requires calling handlified code.
+ return IsJSReceiver(isolate);
+}
+
bool Object::IsPrimitive() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
@@ -506,7 +519,7 @@ bool Object::IsMinusZero() const {
OBJECT_CONSTRUCTORS_IMPL(RegExpMatchInfo, FixedArray)
OBJECT_CONSTRUCTORS_IMPL(ScopeInfo, FixedArray)
-OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObject)
+OBJECT_CONSTRUCTORS_IMPL(BigIntBase, PrimitiveHeapObject)
OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
@@ -756,11 +769,13 @@ void HeapObject::set_map(Map value) {
#endif
}
set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
DEF_GETTER(HeapObject, synchronized_map, Map) {
@@ -774,11 +789,13 @@ void HeapObject::synchronized_set_map(Map value) {
#endif
}
synchronized_set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
// Unsafe accessor omitting write barrier.
@@ -793,12 +810,14 @@ void HeapObject::set_map_no_write_barrier(Map value) {
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (mode != SKIP_WRITE_BARRIER) {
DCHECK(!value.is_null());
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
ObjectSlot HeapObject::map_slot() const {