aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/objects-inl.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/objects-inl.h')
-rw-r--r--deps/v8/src/objects-inl.h458
1 files changed, 214 insertions, 244 deletions
diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h
index 66d258f128..406f49d996 100644
--- a/deps/v8/src/objects-inl.h
+++ b/deps/v8/src/objects-inl.h
@@ -31,13 +31,13 @@
#include "src/lookup-cache-inl.h"
#include "src/lookup.h"
#include "src/objects.h"
-#include "src/objects/hash-table-inl.h"
#include "src/objects/literal-objects.h"
#include "src/objects/module-info.h"
#include "src/objects/regexp-match-info.h"
#include "src/objects/scope-info.h"
#include "src/property.h"
#include "src/prototype.h"
+#include "src/string-hasher-inl.h"
#include "src/transitions-inl.h"
#include "src/v8memory.h"
@@ -140,10 +140,13 @@ int PropertyDetails::field_width_in_words() const {
return map()->instance_type() == instancetype; \
}
+TYPE_CHECKER(BreakPointInfo, TUPLE2_TYPE)
TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
+TYPE_CHECKER(CallHandlerInfo, TUPLE2_TYPE)
TYPE_CHECKER(Cell, CELL_TYPE)
TYPE_CHECKER(Code, CODE_TYPE)
+TYPE_CHECKER(ConstantElementsPair, TUPLE2_TYPE)
TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
TYPE_CHECKER(Foreign, FOREIGN_TYPE)
TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
@@ -179,8 +182,10 @@ TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
+TYPE_CHECKER(SourcePositionTableWithFrameCache, TUPLE2_TYPE)
TYPE_CHECKER(Symbol, SYMBOL_TYPE)
TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
+TYPE_CHECKER(TypeFeedbackInfo, TUPLE3_TYPE)
TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
@@ -203,6 +208,10 @@ bool HeapObject::IsFixedArray() const {
bool HeapObject::IsSloppyArgumentsElements() const { return IsFixedArray(); }
+bool HeapObject::IsJSSloppyArgumentsObject() const {
+ return IsJSArgumentsObject();
+}
+
bool HeapObject::IsJSGeneratorObject() const {
return map()->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
IsJSAsyncGeneratorObject();
@@ -514,24 +523,6 @@ bool HeapObject::IsNormalizedMapCache() const {
return NormalizedMapCache::IsNormalizedMapCache(this);
}
-int NormalizedMapCache::GetIndex(Handle<Map> map) {
- return map->Hash() % NormalizedMapCache::kEntries;
-}
-
-bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
- if (!obj->IsFixedArray()) return false;
- if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
- return false;
- }
-#ifdef VERIFY_HEAP
- if (FLAG_verify_heap) {
- reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj))
- ->NormalizedMapCacheVerify();
- }
-#endif
- return true;
-}
-
bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
bool HeapObject::IsCodeCacheHashTable() const { return IsHashTable(); }
@@ -613,11 +604,14 @@ bool Object::IsMinusZero() const {
CAST_ACCESSOR(AbstractCode)
CAST_ACCESSOR(ArrayList)
CAST_ACCESSOR(BoilerplateDescription)
+CAST_ACCESSOR(BreakPointInfo)
CAST_ACCESSOR(ByteArray)
CAST_ACCESSOR(BytecodeArray)
+CAST_ACCESSOR(CallHandlerInfo)
CAST_ACCESSOR(Cell)
CAST_ACCESSOR(Code)
CAST_ACCESSOR(ConsString)
+CAST_ACCESSOR(ConstantElementsPair)
CAST_ACCESSOR(DeoptimizationInputData)
CAST_ACCESSOR(DeoptimizationOutputData)
CAST_ACCESSOR(DependentCode)
@@ -633,6 +627,7 @@ CAST_ACCESSOR(Foreign)
CAST_ACCESSOR(GlobalDictionary)
CAST_ACCESSOR(HandlerTable)
CAST_ACCESSOR(HeapObject)
+CAST_ACCESSOR(JSArgumentsObject);
CAST_ACCESSOR(JSArray)
CAST_ACCESSOR(JSArrayBuffer)
CAST_ACCESSOR(JSArrayBufferView)
@@ -656,6 +651,7 @@ CAST_ACCESSOR(JSPromiseCapability)
CAST_ACCESSOR(JSPromise)
CAST_ACCESSOR(JSSet)
CAST_ACCESSOR(JSSetIterator)
+CAST_ACCESSOR(JSSloppyArgumentsObject)
CAST_ACCESSOR(JSAsyncFromSyncIterator)
CAST_ACCESSOR(JSStringIterator)
CAST_ACCESSOR(JSArrayIterator)
@@ -665,7 +661,6 @@ CAST_ACCESSOR(JSWeakCollection)
CAST_ACCESSOR(JSWeakMap)
CAST_ACCESSOR(JSWeakSet)
CAST_ACCESSOR(LayoutDescriptor)
-CAST_ACCESSOR(Map)
CAST_ACCESSOR(ModuleInfo)
CAST_ACCESSOR(Name)
CAST_ACCESSOR(NameDictionary)
@@ -685,7 +680,9 @@ CAST_ACCESSOR(SeqOneByteString)
CAST_ACCESSOR(SeqString)
CAST_ACCESSOR(SeqTwoByteString)
CAST_ACCESSOR(SharedFunctionInfo)
+CAST_ACCESSOR(SourcePositionTableWithFrameCache)
CAST_ACCESSOR(SlicedString)
+CAST_ACCESSOR(SloppyArgumentsElements)
CAST_ACCESSOR(Smi)
CAST_ACCESSOR(String)
CAST_ACCESSOR(StringSet)
@@ -694,11 +691,11 @@ CAST_ACCESSOR(Struct)
CAST_ACCESSOR(Symbol)
CAST_ACCESSOR(TemplateInfo)
CAST_ACCESSOR(ThinString)
+CAST_ACCESSOR(TypeFeedbackInfo)
CAST_ACCESSOR(UnseededNumberDictionary)
CAST_ACCESSOR(WeakCell)
CAST_ACCESSOR(WeakFixedArray)
CAST_ACCESSOR(WeakHashTable)
-CAST_ACCESSOR(SloppyArgumentsElements)
#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)
@@ -832,6 +829,11 @@ bool String::HasOnlyOneByteChars() {
IsOneByteRepresentation();
}
+bool StringShape::HasOnlyOneByteChars() {
+ return (type_ & kStringEncodingMask) == kOneByteStringTag ||
+ (type_ & kOneByteDataHintMask) == kOneByteDataHintTag;
+}
+
bool StringShape::IsCons() {
return (type_ & kStringRepresentationMask) == kConsStringTag;
}
@@ -965,6 +967,14 @@ class SeqOneByteSubStringKey : public HashTableKey {
DCHECK(string_->IsSeqOneByteString());
}
+// VS 2017 on official builds gives this spurious warning:
+// warning C4789: buffer 'key' of size 16 bytes will be overrun; 4 bytes will
+// be written starting at offset 16
+// https://bugs.chromium.org/p/v8/issues/detail?id=6068
+#if defined(V8_CC_MSVC)
+#pragma warning(push)
+#pragma warning(disable : 4789)
+#endif
uint32_t Hash() override {
DCHECK(length_ >= 0);
DCHECK(from_ + length_ <= string_->length());
@@ -975,6 +985,9 @@ class SeqOneByteSubStringKey : public HashTableKey {
DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
return result;
}
+#if defined(V8_CC_MSVC)
+#pragma warning(pop)
+#endif
uint32_t HashForObject(Object* other) override {
return String::cast(other)->Hash();
@@ -1090,9 +1103,10 @@ bool Object::ToUint32(uint32_t* value) {
// static
MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
- Handle<Object> object) {
+ Handle<Object> object,
+ const char* method_name) {
if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
- return ToObject(isolate, object, isolate->native_context());
+ return ToObject(isolate, object, isolate->native_context(), method_name);
}
@@ -1270,8 +1284,15 @@ bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject* object) {
reinterpret_cast<Object*>(base::NoBarrier_Load( \
reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
+#ifdef V8_CONCURRENT_MARKING
+#define WRITE_FIELD(p, offset, value) \
+ base::NoBarrier_Store( \
+ reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
+ reinterpret_cast<base::AtomicWord>(value));
+#else
#define WRITE_FIELD(p, offset, value) \
(*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
+#endif
#define RELEASE_WRITE_FIELD(p, offset, value) \
base::Release_Store( \
@@ -1445,21 +1466,22 @@ Isolate* HeapObject::GetIsolate() const {
return GetHeap()->isolate();
}
-
Map* HeapObject::map() const {
return map_word().ToMap();
}
void HeapObject::set_map(Map* value) {
+ if (value != nullptr) {
+#ifdef VERIFY_HEAP
+ value->GetHeap()->VerifyObjectLayoutChange(this, value);
+#endif
+ }
set_map_word(MapWord::FromMap(value));
if (value != nullptr) {
// TODO(1600) We are passing NULL as a slot because maps can never be on
// evacuation candidate.
value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
-#ifdef VERIFY_HEAP
- value->GetHeap()->VerifyObjectLayoutChange(this, value);
-#endif
}
}
@@ -1470,28 +1492,43 @@ Map* HeapObject::synchronized_map() {
void HeapObject::synchronized_set_map(Map* value) {
- synchronized_set_map_word(MapWord::FromMap(value));
if (value != nullptr) {
- // TODO(1600) We are passing NULL as a slot because maps can never be on
- // evacuation candidate.
- value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
#ifdef VERIFY_HEAP
value->GetHeap()->VerifyObjectLayoutChange(this, value);
#endif
}
-}
-
-
-void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
synchronized_set_map_word(MapWord::FromMap(value));
+ if (value != nullptr) {
+ // TODO(1600) We are passing NULL as a slot because maps can never be on
+ // evacuation candidate.
+ value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
+ }
}
// Unsafe accessor omitting write barrier.
void HeapObject::set_map_no_write_barrier(Map* value) {
+ if (value != nullptr) {
+#ifdef VERIFY_HEAP
+ value->GetHeap()->VerifyObjectLayoutChange(this, value);
+#endif
+ }
set_map_word(MapWord::FromMap(value));
}
+void HeapObject::set_map_after_allocation(Map* value, WriteBarrierMode mode) {
+ set_map_word(MapWord::FromMap(value));
+ if (mode != SKIP_WRITE_BARRIER) {
+ DCHECK(value != nullptr);
+ // TODO(1600) We are passing NULL as a slot because maps can never be on
+ // evacuation candidate.
+ value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
+ }
+}
+
+HeapObject** HeapObject::map_slot() {
+ return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset));
+}
MapWord HeapObject::map_word() const {
return MapWord(
@@ -1952,18 +1989,6 @@ InterceptorInfo* JSObject::GetNamedInterceptor() {
return map()->GetNamedInterceptor();
}
-InterceptorInfo* Map::GetNamedInterceptor() {
- DCHECK(has_named_interceptor());
- FunctionTemplateInfo* info = GetFunctionTemplateInfo();
- return InterceptorInfo::cast(info->named_property_handler());
-}
-
-InterceptorInfo* Map::GetIndexedInterceptor() {
- DCHECK(has_indexed_interceptor());
- FunctionTemplateInfo* info = GetFunctionTemplateInfo();
- return InterceptorInfo::cast(info->indexed_property_handler());
-}
-
double Oddball::to_number_raw() const {
return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
}
@@ -2718,25 +2743,25 @@ inline int DescriptorArray::number_of_entries() {
bool DescriptorArray::HasEnumCache() {
- return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
+ return !IsEmpty() && !get(kEnumCacheBridgeIndex)->IsSmi();
}
void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
- set(kEnumCacheIndex, array->get(kEnumCacheIndex));
+ set(kEnumCacheBridgeIndex, array->get(kEnumCacheBridgeIndex));
}
FixedArray* DescriptorArray::GetEnumCache() {
DCHECK(HasEnumCache());
- FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheBridgeIndex));
return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
}
bool DescriptorArray::HasEnumIndicesCache() {
if (IsEmpty()) return false;
- Object* object = get(kEnumCacheIndex);
+ Object* object = get(kEnumCacheBridgeIndex);
if (object->IsSmi()) return false;
FixedArray* bridge = FixedArray::cast(object);
return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
@@ -2745,17 +2770,11 @@ bool DescriptorArray::HasEnumIndicesCache() {
FixedArray* DescriptorArray::GetEnumIndicesCache() {
DCHECK(HasEnumIndicesCache());
- FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
+ FixedArray* bridge = FixedArray::cast(get(kEnumCacheBridgeIndex));
return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
}
-Object** DescriptorArray::GetEnumCacheSlot() {
- DCHECK(HasEnumCache());
- return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
- kEnumCacheOffset);
-}
-
// Perform a binary search in a fixed array.
template <SearchMode search_mode, typename T>
int BinarySearch(T* array, Name* name, int valid_entries,
@@ -3092,7 +3111,11 @@ void HashTableBase::ElementsRemoved(int n) {
// static
int HashTableBase::ComputeCapacity(int at_least_space_for) {
- int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
+ // Add 50% slack to make slot collisions sufficiently unlikely.
+ // See matching computation in HashTable::HasSufficientCapacityToAdd().
+ // Must be kept in sync with CodeStubAssembler::HashTableComputeCapacity().
+ int raw_cap = at_least_space_for + (at_least_space_for >> 1);
+ int capacity = base::bits::RoundUpToPowerOfTwo32(raw_cap);
return Max(capacity, kMinCapacity);
}
@@ -4026,13 +4049,20 @@ int BytecodeArray::parameter_count() const {
ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
-ACCESSORS(BytecodeArray, source_position_table, ByteArray,
+ACCESSORS(BytecodeArray, source_position_table, Object,
kSourcePositionTableOffset)
Address BytecodeArray::GetFirstBytecodeAddress() {
return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
}
+ByteArray* BytecodeArray::SourcePositionTable() {
+ Object* maybe_table = source_position_table();
+ if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
+ DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ return SourcePositionTableWithFrameCache::cast(maybe_table)
+ ->source_position_table();
+}
int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
@@ -4040,7 +4070,7 @@ int BytecodeArray::SizeIncludingMetadata() {
int size = BytecodeArraySize();
size += constant_pool()->Size();
size += handler_table()->Size();
- size += source_position_table()->Size();
+ size += SourcePositionTable()->Size();
return size;
}
@@ -4152,50 +4182,56 @@ typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
template <class Traits>
void FixedTypedArray<Traits>::set(int index, ElementType value) {
- DCHECK((index >= 0) && (index < this->length()));
+ CHECK((index >= 0) && (index < this->length()));
ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
ptr[index] = value;
}
-
template <class Traits>
-typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
+typename Traits::ElementType FixedTypedArray<Traits>::from(int value) {
return static_cast<ElementType>(value);
}
-
-template <> inline
-uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(int value) {
if (value < 0) return 0;
if (value > 0xFF) return 0xFF;
return static_cast<uint8_t>(value);
}
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::from(uint32_t value) {
+ return static_cast<ElementType>(value);
+}
+
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(uint32_t value) {
+ // We need this special case for Uint32 -> Uint8Clamped, because the highest
+ // Uint32 values will be negative as an int, clamping to 0, rather than 255.
+ if (value > 0xFF) return 0xFF;
+ return static_cast<uint8_t>(value);
+}
template <class Traits>
-typename Traits::ElementType FixedTypedArray<Traits>::from_double(
- double value) {
+typename Traits::ElementType FixedTypedArray<Traits>::from(double value) {
return static_cast<ElementType>(DoubleToInt32(value));
}
-
-template<> inline
-uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(double value) {
// Handle NaNs and less than zero values which clamp to zero.
if (!(value > 0)) return 0;
if (value > 0xFF) return 0xFF;
return static_cast<uint8_t>(lrint(value));
}
-
-template<> inline
-float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
+template <>
+inline float FixedTypedArray<Float32ArrayTraits>::from(double value) {
return static_cast<float>(value);
}
-
-template<> inline
-double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
+template <>
+inline double FixedTypedArray<Float64ArrayTraits>::from(double value) {
return value;
}
@@ -4211,10 +4247,10 @@ void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
ElementType cast_value = Traits::defaultValue();
if (value->IsSmi()) {
int int_value = Smi::cast(value)->value();
- cast_value = from_int(int_value);
+ cast_value = from(int_value);
} else if (value->IsHeapNumber()) {
double double_value = HeapNumber::cast(value)->value();
- cast_value = from_double(double_value);
+ cast_value = from(double_value);
} else {
// Clamp undefined to the default value. All other types have been
// converted to a number type further up in the call chain.
@@ -5253,9 +5289,9 @@ int AbstractCode::instruction_size() {
ByteArray* AbstractCode::source_position_table() {
if (IsCode()) {
- return GetCode()->source_position_table();
+ return GetCode()->SourcePositionTable();
} else {
- return GetBytecodeArray()->source_position_table();
+ return GetBytecodeArray()->SourcePositionTable();
}
}
@@ -5267,6 +5303,20 @@ void AbstractCode::set_source_position_table(ByteArray* source_position_table) {
}
}
+Object* AbstractCode::stack_frame_cache() {
+ Object* maybe_table;
+ if (IsCode()) {
+ maybe_table = GetCode()->source_position_table();
+ } else {
+ maybe_table = GetBytecodeArray()->source_position_table();
+ }
+ if (maybe_table->IsSourcePositionTableWithFrameCache()) {
+ return SourcePositionTableWithFrameCache::cast(maybe_table)
+ ->stack_frame_cache();
+ }
+ return Smi::kZero;
+}
+
int AbstractCode::SizeIncludingMetadata() {
if (IsCode()) {
return GetCode()->SizeIncludingMetadata();
@@ -5464,6 +5514,9 @@ void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
set_constructor_or_backpointer(value, mode);
}
+ACCESSORS(JSArgumentsObject, length, Object, kLengthOffset);
+ACCESSORS(JSSloppyArgumentsObject, callee, Object, kCalleeOffset);
+
ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
@@ -5763,7 +5816,7 @@ ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
ACCESSORS_CHECKED(Script, wasm_compiled_module, Object, kEvalFromSharedOffset,
this->type() == TYPE_WASM)
-ACCESSORS(Script, preparsed_scope_data, FixedTypedArrayBase,
+ACCESSORS(Script, preparsed_scope_data, PodArray<uint32_t>,
kPreParsedScopeDataOffset)
Script::CompilationType Script::compilation_type() {
@@ -5838,15 +5891,19 @@ SMI_ACCESSORS(StackFrameInfo, flag, kFlagIndex)
BOOL_ACCESSORS(StackFrameInfo, flag, is_eval, kIsEvalBit)
BOOL_ACCESSORS(StackFrameInfo, flag, is_constructor, kIsConstructorBit)
BOOL_ACCESSORS(StackFrameInfo, flag, is_wasm, kIsWasmBit)
+SMI_ACCESSORS(StackFrameInfo, id, kIdIndex)
+
+ACCESSORS(SourcePositionTableWithFrameCache, source_position_table, ByteArray,
+ kSourcePositionTableIndex)
+ACCESSORS(SourcePositionTableWithFrameCache, stack_frame_cache,
+ UnseededNumberDictionary, kStackFrameCacheIndex)
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
-ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
- kOptimizedCodeMapOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, feedback_metadata, FeedbackMetadata,
kFeedbackMetadataOffset)
SMI_ACCESSORS(SharedFunctionInfo, function_literal_id, kFunctionLiteralIdOffset)
-#if TRACE_MAPS
+#if V8_SFI_HAS_UNIQUE_ID
SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
#endif
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
@@ -6138,6 +6195,16 @@ bool SharedFunctionInfo::is_compiled() const {
return code() != builtins->builtin(Builtins::kCompileLazy);
}
+int SharedFunctionInfo::GetLength() const {
+ DCHECK(is_compiled());
+ DCHECK(HasLength());
+ return length();
+}
+
+bool SharedFunctionInfo::HasLength() const {
+ DCHECK_IMPLIES(length() < 0, length() == kInvalidLength);
+ return length() != kInvalidLength;
+}
bool SharedFunctionInfo::has_simple_parameters() {
return scope_info()->HasSimpleParameters();
@@ -6349,10 +6416,6 @@ bool SharedFunctionInfo::IsSubjectToDebugging() {
return IsUserJavaScript() && !HasAsmWasmData();
}
-bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
- return optimized_code_map() == GetHeap()->empty_fixed_array();
-}
-
FeedbackVector* JSFunction::feedback_vector() const {
DCHECK(feedback_vector_cell()->value()->IsFeedbackVector());
return FeedbackVector::cast(feedback_vector_cell()->value());
@@ -6436,14 +6499,24 @@ void JSFunction::set_code_no_write_barrier(Code* value) {
WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
}
+void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
+ if (has_feedback_vector() && feedback_vector()->has_optimized_code()) {
+ if (FLAG_trace_opt) {
+ PrintF("[evicting entry from optimizing code feedback slot (%s) for ",
+ reason);
+ shared()->ShortPrint();
+ PrintF("]\n");
+ }
+ feedback_vector()->ClearOptimizedCode();
+ }
+}
void JSFunction::ReplaceCode(Code* code) {
bool was_optimized = IsOptimized();
bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
if (was_optimized && is_optimized) {
- shared()->EvictFromOptimizedCodeMap(this->code(),
- "Replacing with another optimized code");
+ ClearOptimizedCodeSlot("Replacing with another optimized code");
}
set_code(code);
@@ -6666,7 +6739,7 @@ INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
CODE_ACCESSORS(handler_table, FixedArray, kHandlerTableOffset)
CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
-CODE_ACCESSORS(source_position_table, ByteArray, kSourcePositionTableOffset)
+CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
CODE_ACCESSORS(trap_handler_index, Smi, kTrapHandlerIndex)
CODE_ACCESSORS(raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
@@ -6699,6 +6772,13 @@ void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
value, mode);
}
+ByteArray* Code::SourcePositionTable() {
+ Object* maybe_table = source_position_table();
+ if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
+ DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ return SourcePositionTableWithFrameCache::cast(maybe_table)
+ ->source_position_table();
+}
uint32_t Code::stub_key() {
DCHECK(IsCodeStubOrIC());
@@ -6765,7 +6845,9 @@ int Code::SizeIncludingMetadata() {
size += relocation_info()->Size();
size += deoptimization_data()->Size();
size += handler_table()->Size();
- if (kind() == FUNCTION) size += source_position_table()->Size();
+ if (kind() == FUNCTION) {
+ size += SourcePositionTable()->Size();
+ }
return size;
}
@@ -6822,6 +6904,31 @@ void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
+void* JSArrayBuffer::allocation_base() const {
+ intptr_t ptr = READ_INTPTR_FIELD(this, kAllocationBaseOffset);
+ return reinterpret_cast<void*>(ptr);
+}
+
+void JSArrayBuffer::set_allocation_base(void* value, WriteBarrierMode mode) {
+ intptr_t ptr = reinterpret_cast<intptr_t>(value);
+ WRITE_INTPTR_FIELD(this, kAllocationBaseOffset, ptr);
+}
+
+size_t JSArrayBuffer::allocation_length() const {
+ return *reinterpret_cast<const size_t*>(
+ FIELD_ADDR_CONST(this, kAllocationLengthOffset));
+}
+
+void JSArrayBuffer::set_allocation_length(size_t value) {
+ (*reinterpret_cast<size_t*>(FIELD_ADDR(this, kAllocationLengthOffset))) =
+ value;
+}
+
+ArrayBuffer::Allocator::AllocationMode JSArrayBuffer::allocation_mode() const {
+ using AllocationMode = ArrayBuffer::Allocator::AllocationMode;
+ return has_guard_region() ? AllocationMode::kReservation
+ : AllocationMode::kNormal;
+}
void JSArrayBuffer::set_bit_field(uint32_t bits) {
if (kInt32Size != kPointerSize) {
@@ -6844,7 +6951,6 @@ bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
void JSArrayBuffer::set_is_external(bool value) {
- DCHECK(!value || !has_guard_region());
set_bit_field(IsExternal::update(bit_field(), value));
}
@@ -6874,7 +6980,7 @@ void JSArrayBuffer::set_is_shared(bool value) {
set_bit_field(IsShared::update(bit_field(), value));
}
-bool JSArrayBuffer::has_guard_region() {
+bool JSArrayBuffer::has_guard_region() const {
return HasGuardRegion::decode(bit_field());
}
@@ -6954,8 +7060,18 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
}
- // TODO(caitp): throw if array.[[ViewedArrayBuffer]] is neutered (per v8:4648)
- return Handle<JSTypedArray>::cast(receiver);
+ Handle<JSTypedArray> array = Handle<JSTypedArray>::cast(receiver);
+ if (V8_UNLIKELY(array->WasNeutered())) {
+ const MessageTemplate::Template message =
+ MessageTemplate::kDetachedOperation;
+ Handle<String> operation =
+ isolate->factory()->NewStringFromAsciiChecked(method_name);
+ THROW_NEW_ERROR(isolate, NewTypeError(message, operation), JSTypedArray);
+ }
+
+ // spec describes to return `buffer`, but it may disrupt current
+ // implementations, and it's much useful to return array for now.
+ return array;
}
#ifdef VERIFY_HEAP
@@ -7206,152 +7322,6 @@ bool Name::IsPrivate() {
return this->IsSymbol() && Symbol::cast(this)->is_private();
}
-
-StringHasher::StringHasher(int length, uint32_t seed)
- : length_(length),
- raw_running_hash_(seed),
- array_index_(0),
- is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
- is_first_char_(true) {
- DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
-}
-
-
-bool StringHasher::has_trivial_hash() {
- return length_ > String::kMaxHashCalcLength;
-}
-
-
-uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
- running_hash += c;
- running_hash += (running_hash << 10);
- running_hash ^= (running_hash >> 6);
- return running_hash;
-}
-
-
-uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
- running_hash += (running_hash << 3);
- running_hash ^= (running_hash >> 11);
- running_hash += (running_hash << 15);
- if ((running_hash & String::kHashBitMask) == 0) {
- return kZeroHash;
- }
- return running_hash;
-}
-
-
-uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
- const uc16* chars, int length) {
- DCHECK_NOT_NULL(chars);
- DCHECK(length >= 0);
- for (int i = 0; i < length; ++i) {
- running_hash = AddCharacterCore(running_hash, *chars++);
- }
- return running_hash;
-}
-
-
-uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
- const char* chars,
- int length) {
- DCHECK_NOT_NULL(chars);
- DCHECK(length >= 0);
- for (int i = 0; i < length; ++i) {
- uint16_t c = static_cast<uint16_t>(*chars++);
- running_hash = AddCharacterCore(running_hash, c);
- }
- return running_hash;
-}
-
-
-void StringHasher::AddCharacter(uint16_t c) {
- // Use the Jenkins one-at-a-time hash function to update the hash
- // for the given character.
- raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
-}
-
-
-bool StringHasher::UpdateIndex(uint16_t c) {
- DCHECK(is_array_index_);
- if (c < '0' || c > '9') {
- is_array_index_ = false;
- return false;
- }
- int d = c - '0';
- if (is_first_char_) {
- is_first_char_ = false;
- if (c == '0' && length_ > 1) {
- is_array_index_ = false;
- return false;
- }
- }
- if (array_index_ > 429496729U - ((d + 3) >> 3)) {
- is_array_index_ = false;
- return false;
- }
- array_index_ = array_index_ * 10 + d;
- return true;
-}
-
-
-template<typename Char>
-inline void StringHasher::AddCharacters(const Char* chars, int length) {
- DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
- int i = 0;
- if (is_array_index_) {
- for (; i < length; i++) {
- AddCharacter(chars[i]);
- if (!UpdateIndex(chars[i])) {
- i++;
- break;
- }
- }
- }
- for (; i < length; i++) {
- DCHECK(!is_array_index_);
- AddCharacter(chars[i]);
- }
-}
-
-
-template <typename schar>
-uint32_t StringHasher::HashSequentialString(const schar* chars,
- int length,
- uint32_t seed) {
- StringHasher hasher(length, seed);
- if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
- return hasher.GetHashField();
-}
-
-
-IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
- : StringHasher(len, seed) {}
-
-
-uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
- IteratingStringHasher hasher(string->length(), seed);
- // Nothing to do.
- if (hasher.has_trivial_hash()) return hasher.GetHashField();
- ConsString* cons_string = String::VisitFlat(&hasher, string);
- if (cons_string == nullptr) return hasher.GetHashField();
- hasher.VisitConsString(cons_string);
- return hasher.GetHashField();
-}
-
-
-void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
- int length) {
- AddCharacters(chars, length);
-}
-
-
-void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
- int length) {
- AddCharacters(chars, length);
-}
-
-
bool Name::AsArrayIndex(uint32_t* index) {
return IsString() && String::cast(this)->AsArrayIndex(index);
}