aboutsummaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorFedor Indutny <fedor@indutny.com>2015-10-27 12:54:42 -0400
committerFedor Indutny <fedor@indutny.com>2015-10-27 20:16:57 -0400
commit32237041b84faa1ddd2caa43c47b8f75135a37f7 (patch)
tree50676f650b9ca7db538f8fbc88a16fb09aa4a8ea /deps
parentaaf9b488e28cb904be2ceec032b3fb2dbe532d6d (diff)
downloadandroid-node-v8-32237041b84faa1ddd2caa43c47b8f75135a37f7.tar.gz
android-node-v8-32237041b84faa1ddd2caa43c47b8f75135a37f7.tar.bz2
android-node-v8-32237041b84faa1ddd2caa43c47b8f75135a37f7.zip
deps: backport 8d6a228 from the v8's upstream
Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: https://github.com/nodejs/node/pull/3549 Reviewed-By: Trevor Norris <trev.norris@gmail.com>
Diffstat (limited to 'deps')
-rw-r--r--deps/v8/src/heap/heap.cc100
-rw-r--r--deps/v8/src/heap/heap.h3
-rw-r--r--deps/v8/test/cctest/test-api.cc26
3 files changed, 93 insertions, 36 deletions
diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc
index 6c8c3ce34e..edf9dea31d 100644
--- a/deps/v8/src/heap/heap.cc
+++ b/deps/v8/src/heap/heap.cc
@@ -2016,42 +2016,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// for pointers to from semispace instead of looking for pointers
// to new space.
DCHECK(!target->IsMap());
- Address obj_address = target->address();
-
- // We are not collecting slots on new space objects during mutation
- // thus we have to scan for pointers to evacuation candidates when we
- // promote objects. But we should not record any slots in non-black
- // objects. Grey object's slots would be rescanned.
- // White object might not survive until the end of collection
- // it would be a violation of the invariant to record it's slots.
- bool record_slots = false;
- if (incremental_marking()->IsCompacting()) {
- MarkBit mark_bit = Marking::MarkBitFrom(target);
- record_slots = Marking::IsBlack(mark_bit);
- }
-#if V8_DOUBLE_FIELDS_UNBOXING
- LayoutDescriptorHelper helper(target->map());
- bool has_only_tagged_fields = helper.all_fields_tagged();
-
- if (!has_only_tagged_fields) {
- for (int offset = 0; offset < size;) {
- int end_of_region_offset;
- if (helper.IsTagged(offset, size, &end_of_region_offset)) {
- IterateAndMarkPointersToFromSpace(
- target, obj_address + offset,
- obj_address + end_of_region_offset, record_slots,
- &ScavengeObject);
- }
- offset = end_of_region_offset;
- }
- } else {
-#endif
- IterateAndMarkPointersToFromSpace(target, obj_address,
- obj_address + size, record_slots,
- &ScavengeObject);
-#if V8_DOUBLE_FIELDS_UNBOXING
- }
-#endif
+
+ IteratePointersToFromSpace(target, size, &ScavengeObject);
}
}
@@ -5184,6 +5150,68 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
}
+void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
+ ObjectSlotCallback callback) {
+ Address obj_address = target->address();
+
+ // We are not collecting slots on new space objects during mutation
+ // thus we have to scan for pointers to evacuation candidates when we
+ // promote objects. But we should not record any slots in non-black
+ // objects. Grey object's slots would be rescanned.
+ // White object might not survive until the end of collection
+ // it would be a violation of the invariant to record it's slots.
+ bool record_slots = false;
+ if (incremental_marking()->IsCompacting()) {
+ MarkBit mark_bit = Marking::MarkBitFrom(target);
+ record_slots = Marking::IsBlack(mark_bit);
+ }
+
+ // Do not scavenge JSArrayBuffer's contents
+ switch (target->ContentType()) {
+ case HeapObjectContents::kTaggedValues: {
+ IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
+ record_slots, callback);
+ break;
+ }
+ case HeapObjectContents::kMixedValues: {
+ if (target->IsFixedTypedArrayBase()) {
+ IterateAndMarkPointersToFromSpace(
+ target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
+ obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
+ callback);
+ } else if (target->IsJSArrayBuffer()) {
+ IterateAndMarkPointersToFromSpace(
+ target, obj_address,
+ obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
+ record_slots, callback);
+ IterateAndMarkPointersToFromSpace(
+ target, obj_address + JSArrayBuffer::kSize, obj_address + size,
+ record_slots, callback);
+#if V8_DOUBLE_FIELDS_UNBOXING
+ } else if (FLAG_unbox_double_fields) {
+ LayoutDescriptorHelper helper(target->map());
+ DCHECK(!helper.all_fields_tagged());
+
+ for (int offset = 0; offset < size;) {
+ int end_of_region_offset;
+ if (helper.IsTagged(offset, size, &end_of_region_offset)) {
+ IterateAndMarkPointersToFromSpace(
+ target, obj_address + offset,
+ obj_address + end_of_region_offset, record_slots, callback);
+ }
+ offset = end_of_region_offset;
+ }
+#endif
+ }
+ break;
+ }
+ case HeapObjectContents::kRawValues: {
+ break;
+ }
+ }
+}
+
+
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h
index 0da6f5e6c0..a33a98226c 100644
--- a/deps/v8/src/heap/heap.h
+++ b/deps/v8/src/heap/heap.h
@@ -961,6 +961,9 @@ class Heap {
// Iterate pointers to from semispace of new space found in memory interval
// from start to end within |object|.
+ void IteratePointersToFromSpace(HeapObject* target, int size,
+ ObjectSlotCallback callback);
+
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback);
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index d465f2f87a..e0f9d30b7c 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -14242,6 +14242,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
}
+THREADED_TEST(SkipArrayBufferDuringScavenge) {
+ LocalContext env;
+ v8::Isolate* isolate = env->GetIsolate();
+ v8::HandleScope handle_scope(isolate);
+
+ // Make sure the pointer looks like a heap object
+ Local<v8::Object> tmp = v8::Object::New(isolate);
+ uint8_t* store_ptr =
+ reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
+
+ // Make `store_ptr` point to from space
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+
+ // Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
+ Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
+
+ // Should not crash,
+ // i.e. backing store pointer should not be treated as a heap object pointer
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
+
+ // Use `ab` to silence compiler warning
+ CHECK_EQ(ab->GetContents().Data(), store_ptr);
+}
+
+
THREADED_TEST(SharedUint8Array) {
i::FLAG_harmony_sharedarraybuffer = true;
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,