aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/test/cctest/test-weakmaps.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/cctest/test-weakmaps.cc')
-rw-r--r--deps/v8/test/cctest/test-weakmaps.cc80
1 files changed, 76 insertions, 4 deletions
diff --git a/deps/v8/test/cctest/test-weakmaps.cc b/deps/v8/test/cctest/test-weakmaps.cc
index 56d593628a..7bba7b6486 100644
--- a/deps/v8/test/cctest/test-weakmaps.cc
+++ b/deps/v8/test/cctest/test-weakmaps.cc
@@ -48,11 +48,11 @@ static Handle<JSWeakMap> AllocateJSWeakMap() {
static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
Handle<JSObject> key,
- int value) {
+ Handle<Object> value) {
Handle<ObjectHashTable> table = PutIntoObjectHashTable(
Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
Handle<JSObject>(JSObject::cast(*key)),
- Handle<Smi>(Smi::FromInt(value)));
+ value);
weakmap->set_table(*table);
}
@@ -65,6 +65,7 @@ static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
TEST(Weakness) {
+ FLAG_incremental_marking = false;
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
@@ -83,7 +84,9 @@ TEST(Weakness) {
// Put entry into weak map.
{
v8::HandleScope scope;
- PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ PutIntoWeakMap(weakmap,
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(23)));
}
CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
@@ -133,7 +136,7 @@ TEST(Shrinking) {
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
- PutIntoWeakMap(weakmap, object, i);
+ PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i)));
}
}
@@ -152,3 +155,72 @@ TEST(Shrinking) {
// Check shrunk capacity.
CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
}
+
+
+// Test that weak map values on an evacuation candidate which are not reachable
+// by other paths are correctly recorded in the slots buffer.
+TEST(Regress2060a) {
+ FLAG_always_compact = true;
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSFunction> function =
+ FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
+ Handle<JSObject> key = FACTORY->NewJSObject(function);
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Start second old-space page so that values land on evacuation candidate.
+ Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
+ FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
+
+ // Fill up weak map with values on an evacuation candidate.
+ {
+ v8::HandleScope scope;
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObject(function, TENURED);
+ CHECK(!HEAP->InNewSpace(object->address()));
+ CHECK(!first_page->Contains(object->address()));
+ PutIntoWeakMap(weakmap, key, object);
+ }
+ }
+
+ // Force compacting garbage collection.
+ CHECK(FLAG_always_compact);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+}
+
+
+// Test that weak map keys on an evacuation candidate which are reachable by
+// other strong paths are correctly recorded in the slots buffer.
+TEST(Regress2060b) {
+ FLAG_always_compact = true;
+#ifdef DEBUG
+ FLAG_verify_heap = true;
+#endif
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSFunction> function =
+ FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
+
+ // Start second old-space page so that keys land on evacuation candidate.
+ Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
+ FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
+
+ // Fill up weak map with keys on an evacuation candidate.
+ Handle<JSObject> keys[32];
+ for (int i = 0; i < 32; i++) {
+ keys[i] = FACTORY->NewJSObject(function, TENURED);
+ CHECK(!HEAP->InNewSpace(keys[i]->address()));
+ CHECK(!first_page->Contains(keys[i]->address()));
+ }
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+ for (int i = 0; i < 32; i++) {
+ PutIntoWeakMap(weakmap, keys[i], Handle<Smi>(Smi::FromInt(i)));
+ }
+
+ // Force compacting garbage collection. The subsequent collections are used
+ // to verify that key references were actually updated.
+ CHECK(FLAG_always_compact);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+}