aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/objects/map-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects/map-inl.h')
-rw-r--r--deps/v8/src/objects/map-inl.h99
1 files changed, 45 insertions, 54 deletions
diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h
index 338ac78ec4..c91902f130 100644
--- a/deps/v8/src/objects/map-inl.h
+++ b/deps/v8/src/objects/map-inl.h
@@ -8,12 +8,12 @@
#include "src/objects/map.h"
#include "src/field-type.h"
-#include "src/heap/heap-inl.h"
+#include "src/heap/heap-write-barrier-inl.h"
#include "src/layout-descriptor-inl.h"
#include "src/objects-inl.h"
#include "src/objects/api-callbacks-inl.h"
#include "src/objects/cell-inl.h"
-#include "src/objects/descriptor-array.h"
+#include "src/objects/descriptor-array-inl.h"
#include "src/objects/instance-type-inl.h"
#include "src/objects/prototype-info-inl.h"
#include "src/objects/shared-function-info.h"
@@ -102,19 +102,9 @@ InterceptorInfo Map::GetIndexedInterceptor() {
return InterceptorInfo::cast(info->GetIndexedPropertyHandler());
}
-bool Map::IsInplaceGeneralizableField(PropertyConstness constness,
- Representation representation,
- FieldType field_type) {
- if (FLAG_track_constant_fields && FLAG_modify_map_inplace &&
- (constness == PropertyConstness::kConst)) {
- // VariableMode::kConst -> PropertyConstness::kMutable field generalization
- // may happen in-place.
- return true;
- }
- if (representation.IsHeapObject() && !field_type->IsAny()) {
- return true;
- }
- return false;
+bool Map::IsMostGeneralFieldType(Representation representation,
+ FieldType field_type) {
+ return !representation.IsHeapObject() || field_type->IsAny();
}
bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
@@ -135,13 +125,7 @@ void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
// kind transitions because they are inserted into the transition tree
// before field transitions. In order to avoid complexity of handling
// such a case we ensure that all maps with transitionable elements kinds
- // do not have fields that can be generalized in-place (without creation
- // of a new map).
- if (FLAG_track_constant_fields && FLAG_modify_map_inplace) {
- // The constness is either already PropertyConstness::kMutable or should
- // become PropertyConstness::kMutable if it was VariableMode::kConst.
- *constness = PropertyConstness::kMutable;
- }
+ // have the most general field type.
if (representation->IsHeapObject()) {
// The field type is either already Any or should become Any if it was
// something else.
@@ -213,32 +197,33 @@ FixedArrayBase Map::GetInitialElements() const {
} else if (has_fast_sloppy_arguments_elements()) {
result = GetReadOnlyRoots().empty_sloppy_arguments_elements();
} else if (has_fixed_typed_array_elements()) {
- result = GetReadOnlyRoots().EmptyFixedTypedArrayForMap(*this);
+ result =
+ GetReadOnlyRoots().EmptyFixedTypedArrayForTypedArray(elements_kind());
} else if (has_dictionary_elements()) {
result = GetReadOnlyRoots().empty_slow_element_dictionary();
} else {
UNREACHABLE();
}
- DCHECK(!Heap::InNewSpace(result));
+ DCHECK(!ObjectInYoungGeneration(result));
return result;
}
VisitorId Map::visitor_id() const {
return static_cast<VisitorId>(
- RELAXED_READ_BYTE_FIELD(this, kVisitorIdOffset));
+ RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
}
void Map::set_visitor_id(VisitorId id) {
CHECK_LT(static_cast<unsigned>(id), 256);
- RELAXED_WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
+ RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<byte>(id));
}
int Map::instance_size_in_words() const {
- return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
+ return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
}
void Map::set_instance_size_in_words(int value) {
- RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
+ RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
@@ -255,14 +240,14 @@ void Map::set_instance_size(int value) {
int Map::inobject_properties_start_or_constructor_function_index() const {
return RELAXED_READ_BYTE_FIELD(
- this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
+ *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
}
void Map::set_inobject_properties_start_or_constructor_function_index(
int value) {
CHECK_LT(static_cast<unsigned>(value), 256);
RELAXED_WRITE_BYTE_FIELD(
- this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
+ *this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
static_cast<byte>(value));
}
@@ -305,11 +290,11 @@ Handle<Map> Map::AddMissingTransitionsForTesting(
InstanceType Map::instance_type() const {
return static_cast<InstanceType>(
- READ_UINT16_FIELD(this, kInstanceTypeOffset));
+ READ_UINT16_FIELD(*this, kInstanceTypeOffset));
}
void Map::set_instance_type(InstanceType value) {
- WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
+ WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value);
}
int Map::UnusedPropertyFields() const {
@@ -338,12 +323,12 @@ int Map::UnusedInObjectProperties() const {
}
int Map::used_or_unused_instance_size_in_words() const {
- return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
+ return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
}
void Map::set_used_or_unused_instance_size_in_words(int value) {
CHECK_LE(static_cast<unsigned>(value), 255);
- RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
+ RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
@@ -431,16 +416,18 @@ void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
}
-byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
+byte Map::bit_field() const { return READ_BYTE_FIELD(*this, kBitFieldOffset); }
void Map::set_bit_field(byte value) {
- WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
+ WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
}
-byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
+byte Map::bit_field2() const {
+ return READ_BYTE_FIELD(*this, kBitField2Offset);
+}
void Map::set_bit_field2(byte value) {
- WRITE_BYTE_FIELD(this, kBitField2Offset, value);
+ WRITE_BYTE_FIELD(*this, kBitField2Offset, value);
}
bool Map::is_abandoned_prototype_map() const {
@@ -565,7 +552,7 @@ bool Map::IsPrimitiveMap() const {
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
-Object Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }
+Object Map::prototype() const { return READ_FIELD(*this, kPrototypeOffset); }
void Map::set_prototype(Object value, WriteBarrierMode mode) {
DCHECK(value->IsNull() || value->IsJSReceiver());
@@ -631,14 +618,18 @@ void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors,
}
void Map::set_bit_field3(uint32_t bits) {
- if (kInt32Size != kTaggedSize) {
- RELAXED_WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
- }
- RELAXED_WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
+ RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, bits);
}
uint32_t Map::bit_field3() const {
- return RELAXED_READ_UINT32_FIELD(this, kBitField3Offset);
+ return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
+}
+
+void Map::clear_padding() {
+ if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
+ DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
+ memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
+ FIELD_SIZE(kOptionalPaddingOffset));
}
LayoutDescriptor Map::GetLayoutDescriptor() const {
@@ -694,7 +685,7 @@ Map Map::ElementsTransitionMap() {
Object Map::prototype_info() const {
DCHECK(is_prototype_map());
- return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
+ return READ_FIELD(*this, Map::kTransitionsOrPrototypeInfoOffset);
}
void Map::set_prototype_info(Object value, WriteBarrierMode mode) {
@@ -782,23 +773,23 @@ int Map::SlackForArraySize(int old_size, int size_limit) {
return Min(max_slack, old_size / 4);
}
+int Map::InstanceSizeFromSlack(int slack) const {
+ return instance_size() - slack * kTaggedSize;
+}
+
+OBJECT_CONSTRUCTORS_IMPL(NormalizedMapCache, WeakFixedArray)
+CAST_ACCESSOR(NormalizedMapCache)
NEVER_READ_ONLY_SPACE_IMPL(NormalizedMapCache)
int NormalizedMapCache::GetIndex(Handle<Map> map) {
return map->Hash() % NormalizedMapCache::kEntries;
}
-bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject obj) {
- if (!obj->IsWeakFixedArray()) return false;
- if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
+bool HeapObject::IsNormalizedMapCache() const {
+ if (!IsWeakFixedArray()) return false;
+ if (WeakFixedArray::cast(*this)->length() != NormalizedMapCache::kEntries) {
return false;
}
-#ifdef VERIFY_HEAP
- if (FLAG_verify_heap) {
- NormalizedMapCache cache = NormalizedMapCache::cast(obj);
- cache->NormalizedMapCacheVerify(cache->GetIsolate());
- }
-#endif
return true;
}