• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include <stdlib.h>
29 #include <utility>
30 
31 #include "src/v8.h"
32 
33 #include "src/compilation-cache.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/ic/ic.h"
38 #include "src/macro-assembler.h"
39 #include "test/cctest/cctest.h"
40 
41 using namespace v8::internal;
42 
CheckMap(Map * map,int type,int instance_size)43 static void CheckMap(Map* map, int type, int instance_size) {
44   CHECK(map->IsHeapObject());
45 #ifdef DEBUG
46   CHECK(CcTest::heap()->Contains(map));
47 #endif
48   CHECK_EQ(CcTest::heap()->meta_map(), map->map());
49   CHECK_EQ(type, map->instance_type());
50   CHECK_EQ(instance_size, map->instance_size());
51 }
52 
53 
TEST(HeapMaps)54 TEST(HeapMaps) {
55   CcTest::InitializeVM();
56   Heap* heap = CcTest::heap();
57   CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
58   CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
59   CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
60   CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
61 }
62 
63 
CheckOddball(Isolate * isolate,Object * obj,const char * string)64 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
65   CHECK(obj->IsOddball());
66   Handle<Object> handle(obj, isolate);
67   Object* print_string =
68       *Execution::ToString(isolate, handle).ToHandleChecked();
69   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
70 }
71 
72 
CheckSmi(Isolate * isolate,int value,const char * string)73 static void CheckSmi(Isolate* isolate, int value, const char* string) {
74   Handle<Object> handle(Smi::FromInt(value), isolate);
75   Object* print_string =
76       *Execution::ToString(isolate, handle).ToHandleChecked();
77   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
78 }
79 
80 
CheckNumber(Isolate * isolate,double value,const char * string)81 static void CheckNumber(Isolate* isolate, double value, const char* string) {
82   Handle<Object> number = isolate->factory()->NewNumber(value);
83   CHECK(number->IsNumber());
84   Handle<Object> print_string =
85       Execution::ToString(isolate, number).ToHandleChecked();
86   CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
87 }
88 
89 
CheckFindCodeObject(Isolate * isolate)90 static void CheckFindCodeObject(Isolate* isolate) {
91   // Test FindCodeObject
92 #define __ assm.
93 
94   Assembler assm(isolate, NULL, 0);
95 
96   __ nop();  // supported on all architectures
97 
98   CodeDesc desc;
99   assm.GetCode(&desc);
100   Handle<Code> code = isolate->factory()->NewCode(
101       desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
102   CHECK(code->IsCode());
103 
104   HeapObject* obj = HeapObject::cast(*code);
105   Address obj_addr = obj->address();
106 
107   for (int i = 0; i < obj->Size(); i += kPointerSize) {
108     Object* found = isolate->FindCodeObject(obj_addr + i);
109     CHECK_EQ(*code, found);
110   }
111 
112   Handle<Code> copy = isolate->factory()->NewCode(
113       desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
114   HeapObject* obj_copy = HeapObject::cast(*copy);
115   Object* not_right = isolate->FindCodeObject(obj_copy->address() +
116                                               obj_copy->Size() / 2);
117   CHECK(not_right != *code);
118 }
119 
120 
TEST(HandleNull)121 TEST(HandleNull) {
122   CcTest::InitializeVM();
123   Isolate* isolate = CcTest::i_isolate();
124   HandleScope outer_scope(isolate);
125   LocalContext context;
126   Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
127   CHECK(!n.is_null());
128 }
129 
130 
TEST(HeapObjects)131 TEST(HeapObjects) {
132   CcTest::InitializeVM();
133   Isolate* isolate = CcTest::i_isolate();
134   Factory* factory = isolate->factory();
135   Heap* heap = isolate->heap();
136 
137   HandleScope sc(isolate);
138   Handle<Object> value = factory->NewNumber(1.000123);
139   CHECK(value->IsHeapNumber());
140   CHECK(value->IsNumber());
141   CHECK_EQ(1.000123, value->Number());
142 
143   value = factory->NewNumber(1.0);
144   CHECK(value->IsSmi());
145   CHECK(value->IsNumber());
146   CHECK_EQ(1.0, value->Number());
147 
148   value = factory->NewNumberFromInt(1024);
149   CHECK(value->IsSmi());
150   CHECK(value->IsNumber());
151   CHECK_EQ(1024.0, value->Number());
152 
153   value = factory->NewNumberFromInt(Smi::kMinValue);
154   CHECK(value->IsSmi());
155   CHECK(value->IsNumber());
156   CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
157 
158   value = factory->NewNumberFromInt(Smi::kMaxValue);
159   CHECK(value->IsSmi());
160   CHECK(value->IsNumber());
161   CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
162 
163 #if !defined(V8_TARGET_ARCH_X64) && !defined(V8_TARGET_ARCH_ARM64) && \
164     !defined(V8_TARGET_ARCH_MIPS64)
165   // TODO(lrn): We need a NumberFromIntptr function in order to test this.
166   value = factory->NewNumberFromInt(Smi::kMinValue - 1);
167   CHECK(value->IsHeapNumber());
168   CHECK(value->IsNumber());
169   CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
170 #endif
171 
172   value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
173   CHECK(value->IsHeapNumber());
174   CHECK(value->IsNumber());
175   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
176            value->Number());
177 
178   value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
179   CHECK(value->IsHeapNumber());
180   CHECK(value->IsNumber());
181   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
182            value->Number());
183 
184   // nan oddball checks
185   CHECK(factory->nan_value()->IsNumber());
186   CHECK(std::isnan(factory->nan_value()->Number()));
187 
188   Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
189   CHECK(s->IsString());
190   CHECK_EQ(10, s->length());
191 
192   Handle<String> object_string = Handle<String>::cast(factory->Object_string());
193   Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
194   v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, object_string);
195   CHECK(maybe.has_value);
196   CHECK(maybe.value);
197 
198   // Check ToString for oddballs
199   CheckOddball(isolate, heap->true_value(), "true");
200   CheckOddball(isolate, heap->false_value(), "false");
201   CheckOddball(isolate, heap->null_value(), "null");
202   CheckOddball(isolate, heap->undefined_value(), "undefined");
203 
204   // Check ToString for Smis
205   CheckSmi(isolate, 0, "0");
206   CheckSmi(isolate, 42, "42");
207   CheckSmi(isolate, -42, "-42");
208 
209   // Check ToString for Numbers
210   CheckNumber(isolate, 1.1, "1.1");
211 
212   CheckFindCodeObject(isolate);
213 }
214 
215 
TEST(Tagging)216 TEST(Tagging) {
217   CcTest::InitializeVM();
218   int request = 24;
219   CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
220   CHECK(Smi::FromInt(42)->IsSmi());
221   CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
222   CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
223 }
224 
225 
TEST(GarbageCollection)226 TEST(GarbageCollection) {
227   CcTest::InitializeVM();
228   Isolate* isolate = CcTest::i_isolate();
229   Heap* heap = isolate->heap();
230   Factory* factory = isolate->factory();
231 
232   HandleScope sc(isolate);
233   // Check GC.
234   heap->CollectGarbage(NEW_SPACE);
235 
236   Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
237   Handle<String> name = factory->InternalizeUtf8String("theFunction");
238   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
239   Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
240   Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
241   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
242   Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
243 
244   {
245     HandleScope inner_scope(isolate);
246     // Allocate a function and keep it in global object's property.
247     Handle<JSFunction> function = factory->NewFunction(name);
248     JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
249     // Allocate an object.  Unrooted after leaving the scope.
250     Handle<JSObject> obj = factory->NewJSObject(function);
251     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
252     JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
253 
254     CHECK_EQ(Smi::FromInt(23),
255              *Object::GetProperty(obj, prop_name).ToHandleChecked());
256     CHECK_EQ(Smi::FromInt(24),
257              *Object::GetProperty(obj, prop_namex).ToHandleChecked());
258   }
259 
260   heap->CollectGarbage(NEW_SPACE);
261 
262   // Function should be alive.
263   v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, name);
264   CHECK(maybe.has_value);
265   CHECK(maybe.value);
266   // Check function is retained.
267   Handle<Object> func_value =
268       Object::GetProperty(global, name).ToHandleChecked();
269   CHECK(func_value->IsJSFunction());
270   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
271 
272   {
273     HandleScope inner_scope(isolate);
274     // Allocate another object, make it reachable from global.
275     Handle<JSObject> obj = factory->NewJSObject(function);
276     JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
277     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
278   }
279 
280   // After gc, it should survive.
281   heap->CollectGarbage(NEW_SPACE);
282 
283   maybe = JSReceiver::HasOwnProperty(global, obj_name);
284   CHECK(maybe.has_value);
285   CHECK(maybe.value);
286   Handle<Object> obj =
287       Object::GetProperty(global, obj_name).ToHandleChecked();
288   CHECK(obj->IsJSObject());
289   CHECK_EQ(Smi::FromInt(23),
290            *Object::GetProperty(obj, prop_name).ToHandleChecked());
291 }
292 
293 
VerifyStringAllocation(Isolate * isolate,const char * string)294 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
295   HandleScope scope(isolate);
296   Handle<String> s = isolate->factory()->NewStringFromUtf8(
297       CStrVector(string)).ToHandleChecked();
298   CHECK_EQ(StrLength(string), s->length());
299   for (int index = 0; index < s->length(); index++) {
300     CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
301   }
302 }
303 
304 
TEST(String)305 TEST(String) {
306   CcTest::InitializeVM();
307   Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
308 
309   VerifyStringAllocation(isolate, "a");
310   VerifyStringAllocation(isolate, "ab");
311   VerifyStringAllocation(isolate, "abc");
312   VerifyStringAllocation(isolate, "abcd");
313   VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
314 }
315 
316 
TEST(LocalHandles)317 TEST(LocalHandles) {
318   CcTest::InitializeVM();
319   Isolate* isolate = CcTest::i_isolate();
320   Factory* factory = isolate->factory();
321 
322   v8::HandleScope scope(CcTest::isolate());
323   const char* name = "Kasper the spunky";
324   Handle<String> string = factory->NewStringFromAsciiChecked(name);
325   CHECK_EQ(StrLength(name), string->length());
326 }
327 
328 
TEST(GlobalHandles)329 TEST(GlobalHandles) {
330   CcTest::InitializeVM();
331   Isolate* isolate = CcTest::i_isolate();
332   Heap* heap = isolate->heap();
333   Factory* factory = isolate->factory();
334   GlobalHandles* global_handles = isolate->global_handles();
335 
336   Handle<Object> h1;
337   Handle<Object> h2;
338   Handle<Object> h3;
339   Handle<Object> h4;
340 
341   {
342     HandleScope scope(isolate);
343 
344     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
345     Handle<Object> u = factory->NewNumber(1.12344);
346 
347     h1 = global_handles->Create(*i);
348     h2 = global_handles->Create(*u);
349     h3 = global_handles->Create(*i);
350     h4 = global_handles->Create(*u);
351   }
352 
353   // after gc, it should survive
354   heap->CollectGarbage(NEW_SPACE);
355 
356   CHECK((*h1)->IsString());
357   CHECK((*h2)->IsHeapNumber());
358   CHECK((*h3)->IsString());
359   CHECK((*h4)->IsHeapNumber());
360 
361   CHECK_EQ(*h3, *h1);
362   GlobalHandles::Destroy(h1.location());
363   GlobalHandles::Destroy(h3.location());
364 
365   CHECK_EQ(*h4, *h2);
366   GlobalHandles::Destroy(h2.location());
367   GlobalHandles::Destroy(h4.location());
368 }
369 
370 
371 static bool WeakPointerCleared = false;
372 
TestWeakGlobalHandleCallback(const v8::WeakCallbackData<v8::Value,void> & data)373 static void TestWeakGlobalHandleCallback(
374     const v8::WeakCallbackData<v8::Value, void>& data) {
375   std::pair<v8::Persistent<v8::Value>*, int>* p =
376       reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
377           data.GetParameter());
378   if (p->second == 1234) WeakPointerCleared = true;
379   p->first->Reset();
380 }
381 
382 
TEST(WeakGlobalHandlesScavenge)383 TEST(WeakGlobalHandlesScavenge) {
384   i::FLAG_stress_compaction = false;
385   CcTest::InitializeVM();
386   Isolate* isolate = CcTest::i_isolate();
387   Heap* heap = isolate->heap();
388   Factory* factory = isolate->factory();
389   GlobalHandles* global_handles = isolate->global_handles();
390 
391   WeakPointerCleared = false;
392 
393   Handle<Object> h1;
394   Handle<Object> h2;
395 
396   {
397     HandleScope scope(isolate);
398 
399     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
400     Handle<Object> u = factory->NewNumber(1.12344);
401 
402     h1 = global_handles->Create(*i);
403     h2 = global_handles->Create(*u);
404   }
405 
406   std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
407   GlobalHandles::MakeWeak(h2.location(),
408                           reinterpret_cast<void*>(&handle_and_id),
409                           &TestWeakGlobalHandleCallback);
410 
411   // Scavenge treats weak pointers as normal roots.
412   heap->CollectGarbage(NEW_SPACE);
413 
414   CHECK((*h1)->IsString());
415   CHECK((*h2)->IsHeapNumber());
416 
417   CHECK(!WeakPointerCleared);
418   CHECK(!global_handles->IsNearDeath(h2.location()));
419   CHECK(!global_handles->IsNearDeath(h1.location()));
420 
421   GlobalHandles::Destroy(h1.location());
422   GlobalHandles::Destroy(h2.location());
423 }
424 
425 
TEST(WeakGlobalHandlesMark)426 TEST(WeakGlobalHandlesMark) {
427   CcTest::InitializeVM();
428   Isolate* isolate = CcTest::i_isolate();
429   Heap* heap = isolate->heap();
430   Factory* factory = isolate->factory();
431   GlobalHandles* global_handles = isolate->global_handles();
432 
433   WeakPointerCleared = false;
434 
435   Handle<Object> h1;
436   Handle<Object> h2;
437 
438   {
439     HandleScope scope(isolate);
440 
441     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
442     Handle<Object> u = factory->NewNumber(1.12344);
443 
444     h1 = global_handles->Create(*i);
445     h2 = global_handles->Create(*u);
446   }
447 
448   // Make sure the objects are promoted.
449   heap->CollectGarbage(OLD_POINTER_SPACE);
450   heap->CollectGarbage(NEW_SPACE);
451   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
452 
453   std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
454   GlobalHandles::MakeWeak(h2.location(),
455                           reinterpret_cast<void*>(&handle_and_id),
456                           &TestWeakGlobalHandleCallback);
457   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
458   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
459 
460   // Incremental marking potentially marked handles before they turned weak.
461   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
462 
463   CHECK((*h1)->IsString());
464 
465   CHECK(WeakPointerCleared);
466   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
467 
468   GlobalHandles::Destroy(h1.location());
469 }
470 
471 
TEST(DeleteWeakGlobalHandle)472 TEST(DeleteWeakGlobalHandle) {
473   i::FLAG_stress_compaction = false;
474   CcTest::InitializeVM();
475   Isolate* isolate = CcTest::i_isolate();
476   Heap* heap = isolate->heap();
477   Factory* factory = isolate->factory();
478   GlobalHandles* global_handles = isolate->global_handles();
479 
480   WeakPointerCleared = false;
481 
482   Handle<Object> h;
483 
484   {
485     HandleScope scope(isolate);
486 
487     Handle<Object> i = factory->NewStringFromStaticChars("fisk");
488     h = global_handles->Create(*i);
489   }
490 
491   std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
492   GlobalHandles::MakeWeak(h.location(),
493                           reinterpret_cast<void*>(&handle_and_id),
494                           &TestWeakGlobalHandleCallback);
495 
496   // Scanvenge does not recognize weak reference.
497   heap->CollectGarbage(NEW_SPACE);
498 
499   CHECK(!WeakPointerCleared);
500 
501   // Mark-compact treats weak reference properly.
502   heap->CollectGarbage(OLD_POINTER_SPACE);
503 
504   CHECK(WeakPointerCleared);
505 }
506 
507 
508 static const char* not_so_random_string_table[] = {
509   "abstract",
510   "boolean",
511   "break",
512   "byte",
513   "case",
514   "catch",
515   "char",
516   "class",
517   "const",
518   "continue",
519   "debugger",
520   "default",
521   "delete",
522   "do",
523   "double",
524   "else",
525   "enum",
526   "export",
527   "extends",
528   "false",
529   "final",
530   "finally",
531   "float",
532   "for",
533   "function",
534   "goto",
535   "if",
536   "implements",
537   "import",
538   "in",
539   "instanceof",
540   "int",
541   "interface",
542   "long",
543   "native",
544   "new",
545   "null",
546   "package",
547   "private",
548   "protected",
549   "public",
550   "return",
551   "short",
552   "static",
553   "super",
554   "switch",
555   "synchronized",
556   "this",
557   "throw",
558   "throws",
559   "transient",
560   "true",
561   "try",
562   "typeof",
563   "var",
564   "void",
565   "volatile",
566   "while",
567   "with",
568   0
569 };
570 
571 
CheckInternalizedStrings(const char ** strings)572 static void CheckInternalizedStrings(const char** strings) {
573   Isolate* isolate = CcTest::i_isolate();
574   Factory* factory = isolate->factory();
575   for (const char* string = *strings; *strings != 0; string = *strings++) {
576     HandleScope scope(isolate);
577     Handle<String> a =
578         isolate->factory()->InternalizeUtf8String(CStrVector(string));
579     // InternalizeUtf8String may return a failure if a GC is needed.
580     CHECK(a->IsInternalizedString());
581     Handle<String> b = factory->InternalizeUtf8String(string);
582     CHECK_EQ(*b, *a);
583     CHECK(b->IsUtf8EqualTo(CStrVector(string)));
584     b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
585     CHECK_EQ(*b, *a);
586     CHECK(b->IsUtf8EqualTo(CStrVector(string)));
587   }
588 }
589 
590 
TEST(StringTable)591 TEST(StringTable) {
592   CcTest::InitializeVM();
593 
594   v8::HandleScope sc(CcTest::isolate());
595   CheckInternalizedStrings(not_so_random_string_table);
596   CheckInternalizedStrings(not_so_random_string_table);
597 }
598 
599 
TEST(FunctionAllocation)600 TEST(FunctionAllocation) {
601   CcTest::InitializeVM();
602   Isolate* isolate = CcTest::i_isolate();
603   Factory* factory = isolate->factory();
604 
605   v8::HandleScope sc(CcTest::isolate());
606   Handle<String> name = factory->InternalizeUtf8String("theFunction");
607   Handle<JSFunction> function = factory->NewFunction(name);
608 
609   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
610   Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
611 
612   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
613   Handle<JSObject> obj = factory->NewJSObject(function);
614   JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
615   CHECK_EQ(Smi::FromInt(23),
616            *Object::GetProperty(obj, prop_name).ToHandleChecked());
617   // Check that we can add properties to function objects.
618   JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
619   CHECK_EQ(Smi::FromInt(24),
620            *Object::GetProperty(function, prop_name).ToHandleChecked());
621 }
622 
623 
TEST(ObjectProperties)624 TEST(ObjectProperties) {
625   CcTest::InitializeVM();
626   Isolate* isolate = CcTest::i_isolate();
627   Factory* factory = isolate->factory();
628 
629   v8::HandleScope sc(CcTest::isolate());
630   Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
631   Handle<Object> object = Object::GetProperty(
632       CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
633   Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
634   Handle<JSObject> obj = factory->NewJSObject(constructor);
635   Handle<String> first = factory->InternalizeUtf8String("first");
636   Handle<String> second = factory->InternalizeUtf8String("second");
637 
638   Handle<Smi> one(Smi::FromInt(1), isolate);
639   Handle<Smi> two(Smi::FromInt(2), isolate);
640 
641   // check for empty
642   v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(obj, first);
643   CHECK(maybe.has_value);
644   CHECK(!maybe.value);
645 
646   // add first
647   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
648   maybe = JSReceiver::HasOwnProperty(obj, first);
649   CHECK(maybe.has_value);
650   CHECK(maybe.value);
651 
652   // delete first
653   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
654   maybe = JSReceiver::HasOwnProperty(obj, first);
655   CHECK(maybe.has_value);
656   CHECK(!maybe.value);
657 
658   // add first and then second
659   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
660   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
661   maybe = JSReceiver::HasOwnProperty(obj, first);
662   CHECK(maybe.has_value);
663   CHECK(maybe.value);
664   maybe = JSReceiver::HasOwnProperty(obj, second);
665   CHECK(maybe.has_value);
666   CHECK(maybe.value);
667 
668   // delete first and then second
669   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
670   maybe = JSReceiver::HasOwnProperty(obj, second);
671   CHECK(maybe.has_value);
672   CHECK(maybe.value);
673   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
674   maybe = JSReceiver::HasOwnProperty(obj, first);
675   CHECK(maybe.has_value);
676   CHECK(!maybe.value);
677   maybe = JSReceiver::HasOwnProperty(obj, second);
678   CHECK(maybe.has_value);
679   CHECK(!maybe.value);
680 
681   // add first and then second
682   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
683   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
684   maybe = JSReceiver::HasOwnProperty(obj, first);
685   CHECK(maybe.has_value);
686   CHECK(maybe.value);
687   maybe = JSReceiver::HasOwnProperty(obj, second);
688   CHECK(maybe.has_value);
689   CHECK(maybe.value);
690 
691   // delete second and then first
692   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
693   maybe = JSReceiver::HasOwnProperty(obj, first);
694   CHECK(maybe.has_value);
695   CHECK(maybe.value);
696   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
697   maybe = JSReceiver::HasOwnProperty(obj, first);
698   CHECK(maybe.has_value);
699   CHECK(!maybe.value);
700   maybe = JSReceiver::HasOwnProperty(obj, second);
701   CHECK(maybe.has_value);
702   CHECK(!maybe.value);
703 
704   // check string and internalized string match
705   const char* string1 = "fisk";
706   Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
707   JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
708   Handle<String> s1_string = factory->InternalizeUtf8String(string1);
709   maybe = JSReceiver::HasOwnProperty(obj, s1_string);
710   CHECK(maybe.has_value);
711   CHECK(maybe.value);
712 
713   // check internalized string and string match
714   const char* string2 = "fugl";
715   Handle<String> s2_string = factory->InternalizeUtf8String(string2);
716   JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
717   Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
718   maybe = JSReceiver::HasOwnProperty(obj, s2);
719   CHECK(maybe.has_value);
720   CHECK(maybe.value);
721 }
722 
723 
TEST(JSObjectMaps)724 TEST(JSObjectMaps) {
725   CcTest::InitializeVM();
726   Isolate* isolate = CcTest::i_isolate();
727   Factory* factory = isolate->factory();
728 
729   v8::HandleScope sc(CcTest::isolate());
730   Handle<String> name = factory->InternalizeUtf8String("theFunction");
731   Handle<JSFunction> function = factory->NewFunction(name);
732 
733   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
734   Handle<JSObject> obj = factory->NewJSObject(function);
735   Handle<Map> initial_map(function->initial_map());
736 
737   // Set a propery
738   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
739   JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
740   CHECK_EQ(Smi::FromInt(23),
741            *Object::GetProperty(obj, prop_name).ToHandleChecked());
742 
743   // Check the map has changed
744   CHECK(*initial_map != obj->map());
745 }
746 
747 
TEST(JSArray)748 TEST(JSArray) {
749   CcTest::InitializeVM();
750   Isolate* isolate = CcTest::i_isolate();
751   Factory* factory = isolate->factory();
752 
753   v8::HandleScope sc(CcTest::isolate());
754   Handle<String> name = factory->InternalizeUtf8String("Array");
755   Handle<Object> fun_obj = Object::GetProperty(
756       CcTest::i_isolate()->global_object(), name).ToHandleChecked();
757   Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
758 
759   // Allocate the object.
760   Handle<Object> element;
761   Handle<JSObject> object = factory->NewJSObject(function);
762   Handle<JSArray> array = Handle<JSArray>::cast(object);
763   // We just initialized the VM, no heap allocation failure yet.
764   JSArray::Initialize(array, 0);
765 
766   // Set array length to 0.
767   JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
768   CHECK_EQ(Smi::FromInt(0), array->length());
769   // Must be in fast mode.
770   CHECK(array->HasFastSmiOrObjectElements());
771 
772   // array[length] = name.
773   JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
774   CHECK_EQ(Smi::FromInt(1), array->length());
775   element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
776   CHECK_EQ(*element, *name);
777 
778   // Set array length with larger than smi value.
779   Handle<Object> length =
780       factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
781   JSArray::SetElementsLength(array, length).Check();
782 
783   uint32_t int_length = 0;
784   CHECK(length->ToArrayIndex(&int_length));
785   CHECK_EQ(*length, array->length());
786   CHECK(array->HasDictionaryElements());  // Must be in slow mode.
787 
788   // array[length] = name.
789   JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
790   uint32_t new_int_length = 0;
791   CHECK(array->length()->ToArrayIndex(&new_int_length));
792   CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
793   element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
794   CHECK_EQ(*element, *name);
795   element = Object::GetElement(isolate, array, 0).ToHandleChecked();
796   CHECK_EQ(*element, *name);
797 }
798 
799 
TEST(JSObjectCopy)800 TEST(JSObjectCopy) {
801   CcTest::InitializeVM();
802   Isolate* isolate = CcTest::i_isolate();
803   Factory* factory = isolate->factory();
804 
805   v8::HandleScope sc(CcTest::isolate());
806   Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
807   Handle<Object> object = Object::GetProperty(
808       CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
809   Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
810   Handle<JSObject> obj = factory->NewJSObject(constructor);
811   Handle<String> first = factory->InternalizeUtf8String("first");
812   Handle<String> second = factory->InternalizeUtf8String("second");
813 
814   Handle<Smi> one(Smi::FromInt(1), isolate);
815   Handle<Smi> two(Smi::FromInt(2), isolate);
816 
817   JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
818   JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
819 
820   JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
821   JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
822 
823   // Make the clone.
824   Handle<Object> value1, value2;
825   Handle<JSObject> clone = factory->CopyJSObject(obj);
826   CHECK(!clone.is_identical_to(obj));
827 
828   value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
829   value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
830   CHECK_EQ(*value1, *value2);
831   value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
832   value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
833   CHECK_EQ(*value1, *value2);
834 
835   value1 = Object::GetProperty(obj, first).ToHandleChecked();
836   value2 = Object::GetProperty(clone, first).ToHandleChecked();
837   CHECK_EQ(*value1, *value2);
838   value1 = Object::GetProperty(obj, second).ToHandleChecked();
839   value2 = Object::GetProperty(clone, second).ToHandleChecked();
840   CHECK_EQ(*value1, *value2);
841 
842   // Flip the values.
843   JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
844   JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
845 
846   JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
847   JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
848 
849   value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
850   value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
851   CHECK_EQ(*value1, *value2);
852   value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
853   value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
854   CHECK_EQ(*value1, *value2);
855 
856   value1 = Object::GetProperty(obj, second).ToHandleChecked();
857   value2 = Object::GetProperty(clone, first).ToHandleChecked();
858   CHECK_EQ(*value1, *value2);
859   value1 = Object::GetProperty(obj, first).ToHandleChecked();
860   value2 = Object::GetProperty(clone, second).ToHandleChecked();
861   CHECK_EQ(*value1, *value2);
862 }
863 
864 
TEST(StringAllocation)865 TEST(StringAllocation) {
866   CcTest::InitializeVM();
867   Isolate* isolate = CcTest::i_isolate();
868   Factory* factory = isolate->factory();
869 
870   const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
871   for (int length = 0; length < 100; length++) {
872     v8::HandleScope scope(CcTest::isolate());
873     char* non_one_byte = NewArray<char>(3 * length + 1);
874     char* one_byte = NewArray<char>(length + 1);
875     non_one_byte[3 * length] = 0;
876     one_byte[length] = 0;
877     for (int i = 0; i < length; i++) {
878       one_byte[i] = 'a';
879       non_one_byte[3 * i] = chars[0];
880       non_one_byte[3 * i + 1] = chars[1];
881       non_one_byte[3 * i + 2] = chars[2];
882     }
883     Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
884         Vector<const char>(non_one_byte, 3 * length));
885     CHECK_EQ(length, non_one_byte_sym->length());
886     Handle<String> one_byte_sym =
887         factory->InternalizeOneByteString(OneByteVector(one_byte, length));
888     CHECK_EQ(length, one_byte_sym->length());
889     Handle<String> non_one_byte_str =
890         factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
891             .ToHandleChecked();
892     non_one_byte_str->Hash();
893     CHECK_EQ(length, non_one_byte_str->length());
894     Handle<String> one_byte_str =
895         factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
896             .ToHandleChecked();
897     one_byte_str->Hash();
898     CHECK_EQ(length, one_byte_str->length());
899     DeleteArray(non_one_byte);
900     DeleteArray(one_byte);
901   }
902 }
903 
904 
ObjectsFoundInHeap(Heap * heap,Handle<Object> objs[],int size)905 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
906   // Count the number of objects found in the heap.
907   int found_count = 0;
908   HeapIterator iterator(heap);
909   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
910     for (int i = 0; i < size; i++) {
911       if (*objs[i] == obj) {
912         found_count++;
913       }
914     }
915   }
916   return found_count;
917 }
918 
919 
TEST(Iteration)920 TEST(Iteration) {
921   CcTest::InitializeVM();
922   Isolate* isolate = CcTest::i_isolate();
923   Factory* factory = isolate->factory();
924   v8::HandleScope scope(CcTest::isolate());
925 
926   // Array of objects to scan haep for.
927   const int objs_count = 6;
928   Handle<Object> objs[objs_count];
929   int next_objs_index = 0;
930 
931   // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
932   objs[next_objs_index++] = factory->NewJSArray(10);
933   objs[next_objs_index++] = factory->NewJSArray(10,
934                                                 FAST_HOLEY_ELEMENTS,
935                                                 TENURED);
936 
937   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
938   objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
939   objs[next_objs_index++] =
940       factory->NewStringFromStaticChars("abcdefghij", TENURED);
941 
942   // Allocate a large string (for large object space).
943   int large_size = Page::kMaxRegularHeapObjectSize + 1;
944   char* str = new char[large_size];
945   for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
946   str[large_size - 1] = '\0';
947   objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
948   delete[] str;
949 
950   // Add a Map object to look for.
951   objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
952 
953   CHECK_EQ(objs_count, next_objs_index);
954   CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
955 }
956 
957 
TEST(EmptyHandleEscapeFrom)958 TEST(EmptyHandleEscapeFrom) {
959   CcTest::InitializeVM();
960 
961   v8::HandleScope scope(CcTest::isolate());
962   Handle<JSObject> runaway;
963 
964   {
965       v8::EscapableHandleScope nested(CcTest::isolate());
966       Handle<JSObject> empty;
967       runaway = empty.EscapeFrom(&nested);
968   }
969 
970   CHECK(runaway.is_null());
971 }
972 
973 
LenFromSize(int size)974 static int LenFromSize(int size) {
975   return (size - FixedArray::kHeaderSize) / kPointerSize;
976 }
977 
978 
TEST(Regression39128)979 TEST(Regression39128) {
980   // Test case for crbug.com/39128.
981   CcTest::InitializeVM();
982   Isolate* isolate = CcTest::i_isolate();
983   TestHeap* heap = CcTest::test_heap();
984 
985   // Increase the chance of 'bump-the-pointer' allocation in old space.
986   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
987 
988   v8::HandleScope scope(CcTest::isolate());
989 
990   // The plan: create JSObject which references objects in new space.
991   // Then clone this object (forcing it to go into old space) and check
992   // that region dirty marks are updated correctly.
993 
994   // Step 1: prepare a map for the object.  We add 1 inobject property to it.
995   // Create a map with single inobject property.
996   Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
997   int n_properties = my_map->inobject_properties();
998   CHECK_GT(n_properties, 0);
999 
1000   int object_size = my_map->instance_size();
1001 
1002   // Step 2: allocate a lot of objects so to almost fill new space: we need
1003   // just enough room to allocate JSObject and thus fill the newspace.
1004 
1005   int allocation_amount = Min(FixedArray::kMaxSize,
1006                               Page::kMaxRegularHeapObjectSize + kPointerSize);
1007   int allocation_len = LenFromSize(allocation_amount);
1008   NewSpace* new_space = heap->new_space();
1009   Address* top_addr = new_space->allocation_top_address();
1010   Address* limit_addr = new_space->allocation_limit_address();
1011   while ((*limit_addr - *top_addr) > allocation_amount) {
1012     CHECK(!heap->always_allocate());
1013     Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1014     CHECK(new_space->Contains(array));
1015   }
1016 
1017   // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1018   int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1019   int fixed_array_len = LenFromSize(to_fill);
1020   CHECK(fixed_array_len < FixedArray::kMaxLength);
1021 
1022   CHECK(!heap->always_allocate());
1023   Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1024   CHECK(new_space->Contains(array));
1025 
1026   Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1027   CHECK(new_space->Contains(object));
1028   JSObject* jsobject = JSObject::cast(object);
1029   CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1030   CHECK_EQ(0, jsobject->properties()->length());
1031   // Create a reference to object in new space in jsobject.
1032   FieldIndex index = FieldIndex::ForInObjectOffset(
1033       JSObject::kHeaderSize - kPointerSize);
1034   jsobject->FastPropertyAtPut(index, array);
1035 
1036   CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1037 
1038   // Step 4: clone jsobject, but force always allocate first to create a clone
1039   // in old pointer space.
1040   Address old_pointer_space_top = heap->old_pointer_space()->top();
1041   AlwaysAllocateScope aa_scope(isolate);
1042   Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1043   JSObject* clone = JSObject::cast(clone_obj);
1044   if (clone->address() != old_pointer_space_top) {
1045     // Alas, got allocated from free list, we cannot do checks.
1046     return;
1047   }
1048   CHECK(heap->old_pointer_space()->Contains(clone->address()));
1049 }
1050 
1051 
UNINITIALIZED_TEST(TestCodeFlushing)1052 UNINITIALIZED_TEST(TestCodeFlushing) {
1053   // If we do not flush code this test is invalid.
1054   if (!FLAG_flush_code) return;
1055   i::FLAG_allow_natives_syntax = true;
1056   i::FLAG_optimize_for_size = false;
1057   v8::Isolate* isolate = v8::Isolate::New();
1058   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1059   isolate->Enter();
1060   Factory* factory = i_isolate->factory();
1061   {
1062     v8::HandleScope scope(isolate);
1063     v8::Context::New(isolate)->Enter();
1064     const char* source =
1065         "function foo() {"
1066         "  var x = 42;"
1067         "  var y = 42;"
1068         "  var z = x + y;"
1069         "};"
1070         "foo()";
1071     Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1072 
1073     // This compile will add the code to the compilation cache.
1074     {
1075       v8::HandleScope scope(isolate);
1076       CompileRun(source);
1077     }
1078 
1079     // Check function is compiled.
1080     Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1081                                                     foo_name).ToHandleChecked();
1082     CHECK(func_value->IsJSFunction());
1083     Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1084     CHECK(function->shared()->is_compiled());
1085 
1086     // The code will survive at least two GCs.
1087     i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1088     i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1089     CHECK(function->shared()->is_compiled());
1090 
1091     // Simulate several GCs that use full marking.
1092     const int kAgingThreshold = 6;
1093     for (int i = 0; i < kAgingThreshold; i++) {
1094       i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1095     }
1096 
1097     // foo should no longer be in the compilation cache
1098     CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1099     CHECK(!function->is_compiled() || function->IsOptimized());
1100     // Call foo to get it recompiled.
1101     CompileRun("foo()");
1102     CHECK(function->shared()->is_compiled());
1103     CHECK(function->is_compiled());
1104   }
1105   isolate->Exit();
1106   isolate->Dispose();
1107 }
1108 
1109 
TEST(TestCodeFlushingPreAged)1110 TEST(TestCodeFlushingPreAged) {
1111   // If we do not flush code this test is invalid.
1112   if (!FLAG_flush_code) return;
1113   i::FLAG_allow_natives_syntax = true;
1114   i::FLAG_optimize_for_size = true;
1115   CcTest::InitializeVM();
1116   Isolate* isolate = CcTest::i_isolate();
1117   Factory* factory = isolate->factory();
1118   v8::HandleScope scope(CcTest::isolate());
1119   const char* source = "function foo() {"
1120                        "  var x = 42;"
1121                        "  var y = 42;"
1122                        "  var z = x + y;"
1123                        "};"
1124                        "foo()";
1125   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1126 
1127   // Compile foo, but don't run it.
1128   { v8::HandleScope scope(CcTest::isolate());
1129     CompileRun(source);
1130   }
1131 
1132   // Check function is compiled.
1133   Handle<Object> func_value =
1134       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1135   CHECK(func_value->IsJSFunction());
1136   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1137   CHECK(function->shared()->is_compiled());
1138 
1139   // The code has been run so will survive at least one GC.
1140   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1141   CHECK(function->shared()->is_compiled());
1142 
1143   // The code was only run once, so it should be pre-aged and collected on the
1144   // next GC.
1145   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1146   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1147 
1148   // Execute the function again twice, and ensure it is reset to the young age.
1149   { v8::HandleScope scope(CcTest::isolate());
1150     CompileRun("foo();"
1151                "foo();");
1152   }
1153 
1154   // The code will survive at least two GC now that it is young again.
1155   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1156   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1157   CHECK(function->shared()->is_compiled());
1158 
1159   // Simulate several GCs that use full marking.
1160   const int kAgingThreshold = 6;
1161   for (int i = 0; i < kAgingThreshold; i++) {
1162     CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1163   }
1164 
1165   // foo should no longer be in the compilation cache
1166   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1167   CHECK(!function->is_compiled() || function->IsOptimized());
1168   // Call foo to get it recompiled.
1169   CompileRun("foo()");
1170   CHECK(function->shared()->is_compiled());
1171   CHECK(function->is_compiled());
1172 }
1173 
1174 
TEST(TestCodeFlushingIncremental)1175 TEST(TestCodeFlushingIncremental) {
1176   // If we do not flush code this test is invalid.
1177   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1178   i::FLAG_allow_natives_syntax = true;
1179   i::FLAG_optimize_for_size = false;
1180   CcTest::InitializeVM();
1181   Isolate* isolate = CcTest::i_isolate();
1182   Factory* factory = isolate->factory();
1183   v8::HandleScope scope(CcTest::isolate());
1184   const char* source = "function foo() {"
1185                        "  var x = 42;"
1186                        "  var y = 42;"
1187                        "  var z = x + y;"
1188                        "};"
1189                        "foo()";
1190   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1191 
1192   // This compile will add the code to the compilation cache.
1193   { v8::HandleScope scope(CcTest::isolate());
1194     CompileRun(source);
1195   }
1196 
1197   // Check function is compiled.
1198   Handle<Object> func_value =
1199       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1200   CHECK(func_value->IsJSFunction());
1201   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1202   CHECK(function->shared()->is_compiled());
1203 
1204   // The code will survive at least two GCs.
1205   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1206   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1207   CHECK(function->shared()->is_compiled());
1208 
1209   // Simulate several GCs that use incremental marking.
1210   const int kAgingThreshold = 6;
1211   for (int i = 0; i < kAgingThreshold; i++) {
1212     SimulateIncrementalMarking(CcTest::heap());
1213     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1214   }
1215   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1216   CHECK(!function->is_compiled() || function->IsOptimized());
1217 
1218   // This compile will compile the function again.
1219   { v8::HandleScope scope(CcTest::isolate());
1220     CompileRun("foo();");
1221   }
1222 
1223   // Simulate several GCs that use incremental marking but make sure
1224   // the loop breaks once the function is enqueued as a candidate.
1225   for (int i = 0; i < kAgingThreshold; i++) {
1226     SimulateIncrementalMarking(CcTest::heap());
1227     if (!function->next_function_link()->IsUndefined()) break;
1228     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1229   }
1230 
1231   // Force optimization while incremental marking is active and while
1232   // the function is enqueued as a candidate.
1233   { v8::HandleScope scope(CcTest::isolate());
1234     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1235   }
1236 
1237   // Simulate one final GC to make sure the candidate queue is sane.
1238   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1239   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1240   CHECK(function->is_compiled() || !function->IsOptimized());
1241 }
1242 
1243 
TEST(TestCodeFlushingIncrementalScavenge)1244 TEST(TestCodeFlushingIncrementalScavenge) {
1245   // If we do not flush code this test is invalid.
1246   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1247   i::FLAG_allow_natives_syntax = true;
1248   i::FLAG_optimize_for_size = false;
1249   CcTest::InitializeVM();
1250   Isolate* isolate = CcTest::i_isolate();
1251   Factory* factory = isolate->factory();
1252   v8::HandleScope scope(CcTest::isolate());
1253   const char* source = "var foo = function() {"
1254                        "  var x = 42;"
1255                        "  var y = 42;"
1256                        "  var z = x + y;"
1257                        "};"
1258                        "foo();"
1259                        "var bar = function() {"
1260                        "  var x = 23;"
1261                        "};"
1262                        "bar();";
1263   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1264   Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1265 
1266   // Perfrom one initial GC to enable code flushing.
1267   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1268 
1269   // This compile will add the code to the compilation cache.
1270   { v8::HandleScope scope(CcTest::isolate());
1271     CompileRun(source);
1272   }
1273 
1274   // Check functions are compiled.
1275   Handle<Object> func_value =
1276       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1277   CHECK(func_value->IsJSFunction());
1278   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1279   CHECK(function->shared()->is_compiled());
1280   Handle<Object> func_value2 =
1281       Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1282   CHECK(func_value2->IsJSFunction());
1283   Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1284   CHECK(function2->shared()->is_compiled());
1285 
1286   // Clear references to functions so that one of them can die.
1287   { v8::HandleScope scope(CcTest::isolate());
1288     CompileRun("foo = 0; bar = 0;");
1289   }
1290 
1291   // Bump the code age so that flushing is triggered while the function
1292   // object is still located in new-space.
1293   const int kAgingThreshold = 6;
1294   for (int i = 0; i < kAgingThreshold; i++) {
1295     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1296     function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1297   }
1298 
1299   // Simulate incremental marking so that the functions are enqueued as
1300   // code flushing candidates. Then kill one of the functions. Finally
1301   // perform a scavenge while incremental marking is still running.
1302   SimulateIncrementalMarking(CcTest::heap());
1303   *function2.location() = NULL;
1304   CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1305 
1306   // Simulate one final GC to make sure the candidate queue is sane.
1307   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1308   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1309   CHECK(!function->is_compiled() || function->IsOptimized());
1310 }
1311 
1312 
TEST(TestCodeFlushingIncrementalAbort)1313 TEST(TestCodeFlushingIncrementalAbort) {
1314   // If we do not flush code this test is invalid.
1315   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1316   i::FLAG_allow_natives_syntax = true;
1317   i::FLAG_optimize_for_size = false;
1318   CcTest::InitializeVM();
1319   Isolate* isolate = CcTest::i_isolate();
1320   Factory* factory = isolate->factory();
1321   Heap* heap = isolate->heap();
1322   v8::HandleScope scope(CcTest::isolate());
1323   const char* source = "function foo() {"
1324                        "  var x = 42;"
1325                        "  var y = 42;"
1326                        "  var z = x + y;"
1327                        "};"
1328                        "foo()";
1329   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1330 
1331   // This compile will add the code to the compilation cache.
1332   { v8::HandleScope scope(CcTest::isolate());
1333     CompileRun(source);
1334   }
1335 
1336   // Check function is compiled.
1337   Handle<Object> func_value =
1338       Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1339   CHECK(func_value->IsJSFunction());
1340   Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1341   CHECK(function->shared()->is_compiled());
1342 
1343   // The code will survive at least two GCs.
1344   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1345   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1346   CHECK(function->shared()->is_compiled());
1347 
1348   // Bump the code age so that flushing is triggered.
1349   const int kAgingThreshold = 6;
1350   for (int i = 0; i < kAgingThreshold; i++) {
1351     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1352   }
1353 
1354   // Simulate incremental marking so that the function is enqueued as
1355   // code flushing candidate.
1356   SimulateIncrementalMarking(heap);
1357 
1358   // Enable the debugger and add a breakpoint while incremental marking
1359   // is running so that incremental marking aborts and code flushing is
1360   // disabled.
1361   int position = 0;
1362   Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1363   isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1364   isolate->debug()->ClearAllBreakPoints();
1365 
1366   // Force optimization now that code flushing is disabled.
1367   { v8::HandleScope scope(CcTest::isolate());
1368     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1369   }
1370 
1371   // Simulate one final GC to make sure the candidate queue is sane.
1372   heap->CollectAllGarbage(Heap::kNoGCFlags);
1373   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1374   CHECK(function->is_compiled() || !function->IsOptimized());
1375 }
1376 
1377 
1378 // Count the number of native contexts in the weak list of native contexts.
CountNativeContexts()1379 int CountNativeContexts() {
1380   int count = 0;
1381   Object* object = CcTest::heap()->native_contexts_list();
1382   while (!object->IsUndefined()) {
1383     count++;
1384     object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1385   }
1386   return count;
1387 }
1388 
1389 
1390 // Count the number of user functions in the weak list of optimized
1391 // functions attached to a native context.
CountOptimizedUserFunctions(v8::Handle<v8::Context> context)1392 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1393   int count = 0;
1394   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1395   Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1396   while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1397     count++;
1398     object = JSFunction::cast(object)->next_function_link();
1399   }
1400   return count;
1401 }
1402 
1403 
TEST(TestInternalWeakLists)1404 TEST(TestInternalWeakLists) {
1405   v8::V8::Initialize();
1406 
1407   // Some flags turn Scavenge collections into Mark-sweep collections
1408   // and hence are incompatible with this test case.
1409   if (FLAG_gc_global || FLAG_stress_compaction) return;
1410 
1411   static const int kNumTestContexts = 10;
1412 
1413   Isolate* isolate = CcTest::i_isolate();
1414   Heap* heap = isolate->heap();
1415   HandleScope scope(isolate);
1416   v8::Handle<v8::Context> ctx[kNumTestContexts];
1417 
1418   CHECK_EQ(0, CountNativeContexts());
1419 
1420   // Create a number of global contests which gets linked together.
1421   for (int i = 0; i < kNumTestContexts; i++) {
1422     ctx[i] = v8::Context::New(CcTest::isolate());
1423 
1424     // Collect garbage that might have been created by one of the
1425     // installed extensions.
1426     isolate->compilation_cache()->Clear();
1427     heap->CollectAllGarbage(Heap::kNoGCFlags);
1428 
1429     bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1430 
1431     CHECK_EQ(i + 1, CountNativeContexts());
1432 
1433     ctx[i]->Enter();
1434 
1435     // Create a handle scope so no function objects get stuch in the outer
1436     // handle scope
1437     HandleScope scope(isolate);
1438     const char* source = "function f1() { };"
1439                          "function f2() { };"
1440                          "function f3() { };"
1441                          "function f4() { };"
1442                          "function f5() { };";
1443     CompileRun(source);
1444     CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1445     CompileRun("f1()");
1446     CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1447     CompileRun("f2()");
1448     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1449     CompileRun("f3()");
1450     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1451     CompileRun("f4()");
1452     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1453     CompileRun("f5()");
1454     CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1455 
1456     // Remove function f1, and
1457     CompileRun("f1=null");
1458 
1459     // Scavenge treats these references as strong.
1460     for (int j = 0; j < 10; j++) {
1461       CcTest::heap()->CollectGarbage(NEW_SPACE);
1462       CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1463     }
1464 
1465     // Mark compact handles the weak references.
1466     isolate->compilation_cache()->Clear();
1467     heap->CollectAllGarbage(Heap::kNoGCFlags);
1468     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1469 
1470     // Get rid of f3 and f5 in the same way.
1471     CompileRun("f3=null");
1472     for (int j = 0; j < 10; j++) {
1473       CcTest::heap()->CollectGarbage(NEW_SPACE);
1474       CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1475     }
1476     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1477     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1478     CompileRun("f5=null");
1479     for (int j = 0; j < 10; j++) {
1480       CcTest::heap()->CollectGarbage(NEW_SPACE);
1481       CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1482     }
1483     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1484     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1485 
1486     ctx[i]->Exit();
1487   }
1488 
1489   // Force compilation cache cleanup.
1490   CcTest::heap()->NotifyContextDisposed();
1491   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1492 
1493   // Dispose the native contexts one by one.
1494   for (int i = 0; i < kNumTestContexts; i++) {
1495     // TODO(dcarney): is there a better way to do this?
1496     i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1497     *unsafe = CcTest::heap()->undefined_value();
1498     ctx[i].Clear();
1499 
1500     // Scavenge treats these references as strong.
1501     for (int j = 0; j < 10; j++) {
1502       CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1503       CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1504     }
1505 
1506     // Mark compact handles the weak references.
1507     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1508     CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1509   }
1510 
1511   CHECK_EQ(0, CountNativeContexts());
1512 }
1513 
1514 
1515 // Count the number of native contexts in the weak list of native contexts
1516 // causing a GC after the specified number of elements.
CountNativeContextsWithGC(Isolate * isolate,int n)1517 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1518   Heap* heap = isolate->heap();
1519   int count = 0;
1520   Handle<Object> object(heap->native_contexts_list(), isolate);
1521   while (!object->IsUndefined()) {
1522     count++;
1523     if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1524     object =
1525         Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1526                        isolate);
1527   }
1528   return count;
1529 }
1530 
1531 
1532 // Count the number of user functions in the weak list of optimized
1533 // functions attached to a native context causing a GC after the
1534 // specified number of elements.
CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,int n)1535 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1536                                              int n) {
1537   int count = 0;
1538   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1539   Isolate* isolate = icontext->GetIsolate();
1540   Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1541                         isolate);
1542   while (object->IsJSFunction() &&
1543          !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1544     count++;
1545     if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1546     object = Handle<Object>(
1547         Object::cast(JSFunction::cast(*object)->next_function_link()),
1548         isolate);
1549   }
1550   return count;
1551 }
1552 
1553 
TEST(TestInternalWeakListsTraverseWithGC)1554 TEST(TestInternalWeakListsTraverseWithGC) {
1555   v8::V8::Initialize();
1556   Isolate* isolate = CcTest::i_isolate();
1557 
1558   static const int kNumTestContexts = 10;
1559 
1560   HandleScope scope(isolate);
1561   v8::Handle<v8::Context> ctx[kNumTestContexts];
1562 
1563   CHECK_EQ(0, CountNativeContexts());
1564 
1565   // Create an number of contexts and check the length of the weak list both
1566   // with and without GCs while iterating the list.
1567   for (int i = 0; i < kNumTestContexts; i++) {
1568     ctx[i] = v8::Context::New(CcTest::isolate());
1569     CHECK_EQ(i + 1, CountNativeContexts());
1570     CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1571   }
1572 
1573   bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1574 
1575   // Compile a number of functions the length of the weak list of optimized
1576   // functions both with and without GCs while iterating the list.
1577   ctx[0]->Enter();
1578   const char* source = "function f1() { };"
1579                        "function f2() { };"
1580                        "function f3() { };"
1581                        "function f4() { };"
1582                        "function f5() { };";
1583   CompileRun(source);
1584   CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1585   CompileRun("f1()");
1586   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1587   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1588   CompileRun("f2()");
1589   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1590   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1591   CompileRun("f3()");
1592   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1593   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1594   CompileRun("f4()");
1595   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1596   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1597   CompileRun("f5()");
1598   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1599   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1600 
1601   ctx[0]->Exit();
1602 }
1603 
1604 
TEST(TestSizeOfObjects)1605 TEST(TestSizeOfObjects) {
1606   v8::V8::Initialize();
1607 
1608   // Get initial heap size after several full GCs, which will stabilize
1609   // the heap size and return with sweeping finished completely.
1610   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1611   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1612   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1613   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1614   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1615   MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1616   if (collector->sweeping_in_progress()) {
1617     collector->EnsureSweepingCompleted();
1618   }
1619   int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1620 
1621   {
1622     // Allocate objects on several different old-space pages so that
1623     // concurrent sweeper threads will be busy sweeping the old space on
1624     // subsequent GC runs.
1625     AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1626     int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1627     for (int i = 1; i <= 100; i++) {
1628       CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1629       CHECK_EQ(initial_size + i * filler_size,
1630                static_cast<int>(CcTest::heap()->SizeOfObjects()));
1631     }
1632   }
1633 
1634   // The heap size should go back to initial size after a full GC, even
1635   // though sweeping didn't finish yet.
1636   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1637 
1638   // Normally sweeping would not be complete here, but no guarantees.
1639 
1640   CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1641 
1642   // Waiting for sweeper threads should not change heap size.
1643   if (collector->sweeping_in_progress()) {
1644     collector->EnsureSweepingCompleted();
1645   }
1646   CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1647 }
1648 
1649 
TEST(TestSizeOfObjectsVsHeapIteratorPrecision)1650 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1651   CcTest::InitializeVM();
1652   HeapIterator iterator(CcTest::heap());
1653   intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1654   intptr_t size_of_objects_2 = 0;
1655   for (HeapObject* obj = iterator.next();
1656        obj != NULL;
1657        obj = iterator.next()) {
1658     if (!obj->IsFreeSpace()) {
1659       size_of_objects_2 += obj->Size();
1660     }
1661   }
1662   // Delta must be within 5% of the larger result.
1663   // TODO(gc): Tighten this up by distinguishing between byte
1664   // arrays that are real and those that merely mark free space
1665   // on the heap.
1666   if (size_of_objects_1 > size_of_objects_2) {
1667     intptr_t delta = size_of_objects_1 - size_of_objects_2;
1668     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1669            "Iterator: %" V8_PTR_PREFIX "d, "
1670            "delta: %" V8_PTR_PREFIX "d\n",
1671            size_of_objects_1, size_of_objects_2, delta);
1672     CHECK_GT(size_of_objects_1 / 20, delta);
1673   } else {
1674     intptr_t delta = size_of_objects_2 - size_of_objects_1;
1675     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1676            "Iterator: %" V8_PTR_PREFIX "d, "
1677            "delta: %" V8_PTR_PREFIX "d\n",
1678            size_of_objects_1, size_of_objects_2, delta);
1679     CHECK_GT(size_of_objects_2 / 20, delta);
1680   }
1681 }
1682 
1683 
FillUpNewSpace(NewSpace * new_space)1684 static void FillUpNewSpace(NewSpace* new_space) {
1685   // Fill up new space to the point that it is completely full. Make sure
1686   // that the scavenger does not undo the filling.
1687   Heap* heap = new_space->heap();
1688   Isolate* isolate = heap->isolate();
1689   Factory* factory = isolate->factory();
1690   HandleScope scope(isolate);
1691   AlwaysAllocateScope always_allocate(isolate);
1692   intptr_t available = new_space->Capacity() - new_space->Size();
1693   intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1694   for (intptr_t i = 0; i < number_of_fillers; i++) {
1695     CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1696   }
1697 }
1698 
1699 
TEST(GrowAndShrinkNewSpace)1700 TEST(GrowAndShrinkNewSpace) {
1701   CcTest::InitializeVM();
1702   Heap* heap = CcTest::heap();
1703   NewSpace* new_space = heap->new_space();
1704 
1705   if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1706       heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1707     // The max size cannot exceed the reserved size, since semispaces must be
1708     // always within the reserved space.  We can't test new space growing and
1709     // shrinking if the reserved size is the same as the minimum (initial) size.
1710     return;
1711   }
1712 
1713   // Explicitly growing should double the space capacity.
1714   intptr_t old_capacity, new_capacity;
1715   old_capacity = new_space->TotalCapacity();
1716   new_space->Grow();
1717   new_capacity = new_space->TotalCapacity();
1718   CHECK(2 * old_capacity == new_capacity);
1719 
1720   old_capacity = new_space->TotalCapacity();
1721   FillUpNewSpace(new_space);
1722   new_capacity = new_space->TotalCapacity();
1723   CHECK(old_capacity == new_capacity);
1724 
1725   // Explicitly shrinking should not affect space capacity.
1726   old_capacity = new_space->TotalCapacity();
1727   new_space->Shrink();
1728   new_capacity = new_space->TotalCapacity();
1729   CHECK(old_capacity == new_capacity);
1730 
1731   // Let the scavenger empty the new space.
1732   heap->CollectGarbage(NEW_SPACE);
1733   CHECK_LE(new_space->Size(), old_capacity);
1734 
1735   // Explicitly shrinking should halve the space capacity.
1736   old_capacity = new_space->TotalCapacity();
1737   new_space->Shrink();
1738   new_capacity = new_space->TotalCapacity();
1739   CHECK(old_capacity == 2 * new_capacity);
1740 
1741   // Consecutive shrinking should not affect space capacity.
1742   old_capacity = new_space->TotalCapacity();
1743   new_space->Shrink();
1744   new_space->Shrink();
1745   new_space->Shrink();
1746   new_capacity = new_space->TotalCapacity();
1747   CHECK(old_capacity == new_capacity);
1748 }
1749 
1750 
TEST(CollectingAllAvailableGarbageShrinksNewSpace)1751 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1752   CcTest::InitializeVM();
1753   Heap* heap = CcTest::heap();
1754   if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1755       heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1756     // The max size cannot exceed the reserved size, since semispaces must be
1757     // always within the reserved space.  We can't test new space growing and
1758     // shrinking if the reserved size is the same as the minimum (initial) size.
1759     return;
1760   }
1761 
1762   v8::HandleScope scope(CcTest::isolate());
1763   NewSpace* new_space = heap->new_space();
1764   intptr_t old_capacity, new_capacity;
1765   old_capacity = new_space->TotalCapacity();
1766   new_space->Grow();
1767   new_capacity = new_space->TotalCapacity();
1768   CHECK(2 * old_capacity == new_capacity);
1769   FillUpNewSpace(new_space);
1770   heap->CollectAllAvailableGarbage();
1771   new_capacity = new_space->TotalCapacity();
1772   CHECK(old_capacity == new_capacity);
1773 }
1774 
1775 
NumberOfGlobalObjects()1776 static int NumberOfGlobalObjects() {
1777   int count = 0;
1778   HeapIterator iterator(CcTest::heap());
1779   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1780     if (obj->IsGlobalObject()) count++;
1781   }
1782   return count;
1783 }
1784 
1785 
1786 // Test that we don't embed maps from foreign contexts into
1787 // optimized code.
TEST(LeakNativeContextViaMap)1788 TEST(LeakNativeContextViaMap) {
1789   i::FLAG_allow_natives_syntax = true;
1790   v8::Isolate* isolate = CcTest::isolate();
1791   v8::HandleScope outer_scope(isolate);
1792   v8::Persistent<v8::Context> ctx1p;
1793   v8::Persistent<v8::Context> ctx2p;
1794   {
1795     v8::HandleScope scope(isolate);
1796     ctx1p.Reset(isolate, v8::Context::New(isolate));
1797     ctx2p.Reset(isolate, v8::Context::New(isolate));
1798     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1799   }
1800 
1801   CcTest::heap()->CollectAllAvailableGarbage();
1802   CHECK_EQ(4, NumberOfGlobalObjects());
1803 
1804   {
1805     v8::HandleScope inner_scope(isolate);
1806     CompileRun("var v = {x: 42}");
1807     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1808     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1809     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1810     ctx2->Enter();
1811     ctx2->Global()->Set(v8_str("o"), v);
1812     v8::Local<v8::Value> res = CompileRun(
1813         "function f() { return o.x; }"
1814         "for (var i = 0; i < 10; ++i) f();"
1815         "%OptimizeFunctionOnNextCall(f);"
1816         "f();");
1817     CHECK_EQ(42, res->Int32Value());
1818     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1819     ctx2->Exit();
1820     v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1821     ctx1p.Reset();
1822     isolate->ContextDisposedNotification();
1823   }
1824   CcTest::heap()->CollectAllAvailableGarbage();
1825   CHECK_EQ(2, NumberOfGlobalObjects());
1826   ctx2p.Reset();
1827   CcTest::heap()->CollectAllAvailableGarbage();
1828   CHECK_EQ(0, NumberOfGlobalObjects());
1829 }
1830 
1831 
1832 // Test that we don't embed functions from foreign contexts into
1833 // optimized code.
TEST(LeakNativeContextViaFunction)1834 TEST(LeakNativeContextViaFunction) {
1835   i::FLAG_allow_natives_syntax = true;
1836   v8::Isolate* isolate = CcTest::isolate();
1837   v8::HandleScope outer_scope(isolate);
1838   v8::Persistent<v8::Context> ctx1p;
1839   v8::Persistent<v8::Context> ctx2p;
1840   {
1841     v8::HandleScope scope(isolate);
1842     ctx1p.Reset(isolate, v8::Context::New(isolate));
1843     ctx2p.Reset(isolate, v8::Context::New(isolate));
1844     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1845   }
1846 
1847   CcTest::heap()->CollectAllAvailableGarbage();
1848   CHECK_EQ(4, NumberOfGlobalObjects());
1849 
1850   {
1851     v8::HandleScope inner_scope(isolate);
1852     CompileRun("var v = function() { return 42; }");
1853     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1854     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1855     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1856     ctx2->Enter();
1857     ctx2->Global()->Set(v8_str("o"), v);
1858     v8::Local<v8::Value> res = CompileRun(
1859         "function f(x) { return x(); }"
1860         "for (var i = 0; i < 10; ++i) f(o);"
1861         "%OptimizeFunctionOnNextCall(f);"
1862         "f(o);");
1863     CHECK_EQ(42, res->Int32Value());
1864     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1865     ctx2->Exit();
1866     ctx1->Exit();
1867     ctx1p.Reset();
1868     isolate->ContextDisposedNotification();
1869   }
1870   CcTest::heap()->CollectAllAvailableGarbage();
1871   CHECK_EQ(2, NumberOfGlobalObjects());
1872   ctx2p.Reset();
1873   CcTest::heap()->CollectAllAvailableGarbage();
1874   CHECK_EQ(0, NumberOfGlobalObjects());
1875 }
1876 
1877 
TEST(LeakNativeContextViaMapKeyed)1878 TEST(LeakNativeContextViaMapKeyed) {
1879   i::FLAG_allow_natives_syntax = true;
1880   v8::Isolate* isolate = CcTest::isolate();
1881   v8::HandleScope outer_scope(isolate);
1882   v8::Persistent<v8::Context> ctx1p;
1883   v8::Persistent<v8::Context> ctx2p;
1884   {
1885     v8::HandleScope scope(isolate);
1886     ctx1p.Reset(isolate, v8::Context::New(isolate));
1887     ctx2p.Reset(isolate, v8::Context::New(isolate));
1888     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1889   }
1890 
1891   CcTest::heap()->CollectAllAvailableGarbage();
1892   CHECK_EQ(4, NumberOfGlobalObjects());
1893 
1894   {
1895     v8::HandleScope inner_scope(isolate);
1896     CompileRun("var v = [42, 43]");
1897     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1898     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1899     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1900     ctx2->Enter();
1901     ctx2->Global()->Set(v8_str("o"), v);
1902     v8::Local<v8::Value> res = CompileRun(
1903         "function f() { return o[0]; }"
1904         "for (var i = 0; i < 10; ++i) f();"
1905         "%OptimizeFunctionOnNextCall(f);"
1906         "f();");
1907     CHECK_EQ(42, res->Int32Value());
1908     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1909     ctx2->Exit();
1910     ctx1->Exit();
1911     ctx1p.Reset();
1912     isolate->ContextDisposedNotification();
1913   }
1914   CcTest::heap()->CollectAllAvailableGarbage();
1915   CHECK_EQ(2, NumberOfGlobalObjects());
1916   ctx2p.Reset();
1917   CcTest::heap()->CollectAllAvailableGarbage();
1918   CHECK_EQ(0, NumberOfGlobalObjects());
1919 }
1920 
1921 
TEST(LeakNativeContextViaMapProto)1922 TEST(LeakNativeContextViaMapProto) {
1923   i::FLAG_allow_natives_syntax = true;
1924   v8::Isolate* isolate = CcTest::isolate();
1925   v8::HandleScope outer_scope(isolate);
1926   v8::Persistent<v8::Context> ctx1p;
1927   v8::Persistent<v8::Context> ctx2p;
1928   {
1929     v8::HandleScope scope(isolate);
1930     ctx1p.Reset(isolate, v8::Context::New(isolate));
1931     ctx2p.Reset(isolate, v8::Context::New(isolate));
1932     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1933   }
1934 
1935   CcTest::heap()->CollectAllAvailableGarbage();
1936   CHECK_EQ(4, NumberOfGlobalObjects());
1937 
1938   {
1939     v8::HandleScope inner_scope(isolate);
1940     CompileRun("var v = { y: 42}");
1941     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1942     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1943     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1944     ctx2->Enter();
1945     ctx2->Global()->Set(v8_str("o"), v);
1946     v8::Local<v8::Value> res = CompileRun(
1947         "function f() {"
1948         "  var p = {x: 42};"
1949         "  p.__proto__ = o;"
1950         "  return p.x;"
1951         "}"
1952         "for (var i = 0; i < 10; ++i) f();"
1953         "%OptimizeFunctionOnNextCall(f);"
1954         "f();");
1955     CHECK_EQ(42, res->Int32Value());
1956     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1957     ctx2->Exit();
1958     ctx1->Exit();
1959     ctx1p.Reset();
1960     isolate->ContextDisposedNotification();
1961   }
1962   CcTest::heap()->CollectAllAvailableGarbage();
1963   CHECK_EQ(2, NumberOfGlobalObjects());
1964   ctx2p.Reset();
1965   CcTest::heap()->CollectAllAvailableGarbage();
1966   CHECK_EQ(0, NumberOfGlobalObjects());
1967 }
1968 
1969 
TEST(InstanceOfStubWriteBarrier)1970 TEST(InstanceOfStubWriteBarrier) {
1971   i::FLAG_allow_natives_syntax = true;
1972 #ifdef VERIFY_HEAP
1973   i::FLAG_verify_heap = true;
1974 #endif
1975 
1976   CcTest::InitializeVM();
1977   if (!CcTest::i_isolate()->use_crankshaft()) return;
1978   if (i::FLAG_force_marking_deque_overflows) return;
1979   v8::HandleScope outer_scope(CcTest::isolate());
1980 
1981   {
1982     v8::HandleScope scope(CcTest::isolate());
1983     CompileRun(
1984         "function foo () { }"
1985         "function mkbar () { return new (new Function(\"\")) (); }"
1986         "function f (x) { return (x instanceof foo); }"
1987         "function g () { f(mkbar()); }"
1988         "f(new foo()); f(new foo());"
1989         "%OptimizeFunctionOnNextCall(f);"
1990         "f(new foo()); g();");
1991   }
1992 
1993   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
1994   marking->Abort();
1995   marking->Start();
1996 
1997   Handle<JSFunction> f =
1998       v8::Utils::OpenHandle(
1999           *v8::Handle<v8::Function>::Cast(
2000               CcTest::global()->Get(v8_str("f"))));
2001 
2002   CHECK(f->IsOptimized());
2003 
2004   while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2005          !marking->IsStopped()) {
2006     // Discard any pending GC requests otherwise we will get GC when we enter
2007     // code below.
2008     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2009   }
2010 
2011   CHECK(marking->IsMarking());
2012 
2013   {
2014     v8::HandleScope scope(CcTest::isolate());
2015     v8::Handle<v8::Object> global = CcTest::global();
2016     v8::Handle<v8::Function> g =
2017         v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2018     g->Call(global, 0, NULL);
2019   }
2020 
2021   CcTest::heap()->incremental_marking()->set_should_hurry(true);
2022   CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
2023 }
2024 
2025 
TEST(PrototypeTransitionClearing)2026 TEST(PrototypeTransitionClearing) {
2027   if (FLAG_never_compact) return;
2028   CcTest::InitializeVM();
2029   Isolate* isolate = CcTest::i_isolate();
2030   Factory* factory = isolate->factory();
2031   v8::HandleScope scope(CcTest::isolate());
2032 
2033   CompileRun("var base = {};");
2034   Handle<JSObject> baseObject =
2035       v8::Utils::OpenHandle(
2036           *v8::Handle<v8::Object>::Cast(
2037               CcTest::global()->Get(v8_str("base"))));
2038   int initialTransitions = baseObject->map()->NumberOfProtoTransitions();
2039 
2040   CompileRun(
2041       "var live = [];"
2042       "for (var i = 0; i < 10; i++) {"
2043       "  var object = {};"
2044       "  var prototype = {};"
2045       "  object.__proto__ = prototype;"
2046       "  if (i >= 3) live.push(object, prototype);"
2047       "}");
2048 
2049   // Verify that only dead prototype transitions are cleared.
2050   CHECK_EQ(initialTransitions + 10,
2051       baseObject->map()->NumberOfProtoTransitions());
2052   CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2053   const int transitions = 10 - 3;
2054   CHECK_EQ(initialTransitions + transitions,
2055       baseObject->map()->NumberOfProtoTransitions());
2056 
2057   // Verify that prototype transitions array was compacted.
2058   FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2059   for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2060     int j = Map::kProtoTransitionHeaderSize +
2061         i * Map::kProtoTransitionElementsPerEntry;
2062     CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
2063     Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
2064     CHECK(proto->IsJSObject());
2065   }
2066 
2067   // Make sure next prototype is placed on an old-space evacuation candidate.
2068   Handle<JSObject> prototype;
2069   PagedSpace* space = CcTest::heap()->old_pointer_space();
2070   {
2071     AlwaysAllocateScope always_allocate(isolate);
2072     SimulateFullSpace(space);
2073     prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2074   }
2075 
2076   // Add a prototype on an evacuation candidate and verify that transition
2077   // clearing correctly records slots in prototype transition array.
2078   i::FLAG_always_compact = true;
2079   Handle<Map> map(baseObject->map());
2080   CHECK(!space->LastPage()->Contains(
2081       map->GetPrototypeTransitions()->address()));
2082   CHECK(space->LastPage()->Contains(prototype->address()));
2083 }
2084 
2085 
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking)2086 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2087   i::FLAG_stress_compaction = false;
2088   i::FLAG_allow_natives_syntax = true;
2089 #ifdef VERIFY_HEAP
2090   i::FLAG_verify_heap = true;
2091 #endif
2092 
2093   CcTest::InitializeVM();
2094   if (!CcTest::i_isolate()->use_crankshaft()) return;
2095   v8::HandleScope outer_scope(CcTest::isolate());
2096 
2097   {
2098     v8::HandleScope scope(CcTest::isolate());
2099     CompileRun(
2100         "function f () {"
2101         "  var s = 0;"
2102         "  for (var i = 0; i < 100; i++)  s += i;"
2103         "  return s;"
2104         "}"
2105         "f(); f();"
2106         "%OptimizeFunctionOnNextCall(f);"
2107         "f();");
2108   }
2109   Handle<JSFunction> f =
2110       v8::Utils::OpenHandle(
2111           *v8::Handle<v8::Function>::Cast(
2112               CcTest::global()->Get(v8_str("f"))));
2113   CHECK(f->IsOptimized());
2114 
2115   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2116   marking->Abort();
2117   marking->Start();
2118 
2119   // The following two calls will increment CcTest::heap()->global_ic_age().
2120   const int kLongIdlePauseInMs = 1000;
2121   CcTest::isolate()->ContextDisposedNotification();
2122   CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2123 
2124   while (!marking->IsStopped() && !marking->IsComplete()) {
2125     marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2126   }
2127   if (!marking->IsStopped() || marking->should_hurry()) {
2128     // We don't normally finish a GC via Step(), we normally finish by
2129     // setting the stack guard and then do the final steps in the stack
2130     // guard interrupt.  But here we didn't ask for that, and there is no
2131     // JS code running to trigger the interrupt, so we explicitly finalize
2132     // here.
2133     CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2134                             "Test finalizing incremental mark-sweep");
2135   }
2136 
2137   CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2138   CHECK_EQ(0, f->shared()->opt_count());
2139   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2140 }
2141 
2142 
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep)2143 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2144   i::FLAG_stress_compaction = false;
2145   i::FLAG_allow_natives_syntax = true;
2146 #ifdef VERIFY_HEAP
2147   i::FLAG_verify_heap = true;
2148 #endif
2149 
2150   CcTest::InitializeVM();
2151   if (!CcTest::i_isolate()->use_crankshaft()) return;
2152   v8::HandleScope outer_scope(CcTest::isolate());
2153 
2154   {
2155     v8::HandleScope scope(CcTest::isolate());
2156     CompileRun(
2157         "function f () {"
2158         "  var s = 0;"
2159         "  for (var i = 0; i < 100; i++)  s += i;"
2160         "  return s;"
2161         "}"
2162         "f(); f();"
2163         "%OptimizeFunctionOnNextCall(f);"
2164         "f();");
2165   }
2166   Handle<JSFunction> f =
2167       v8::Utils::OpenHandle(
2168           *v8::Handle<v8::Function>::Cast(
2169               CcTest::global()->Get(v8_str("f"))));
2170   CHECK(f->IsOptimized());
2171 
2172   CcTest::heap()->incremental_marking()->Abort();
2173 
2174   // The following two calls will increment CcTest::heap()->global_ic_age().
2175   // Since incremental marking is off, IdleNotification will do full GC.
2176   const int kLongIdlePauseInMs = 1000;
2177   CcTest::isolate()->ContextDisposedNotification();
2178   CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2179 
2180   CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2181   CHECK_EQ(0, f->shared()->opt_count());
2182   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2183 }
2184 
2185 
2186 // Test that HAllocateObject will always return an object in new-space.
TEST(OptimizedAllocationAlwaysInNewSpace)2187 TEST(OptimizedAllocationAlwaysInNewSpace) {
2188   i::FLAG_allow_natives_syntax = true;
2189   CcTest::InitializeVM();
2190   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2191   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2192   v8::HandleScope scope(CcTest::isolate());
2193 
2194   SimulateFullSpace(CcTest::heap()->new_space());
2195   AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2196   v8::Local<v8::Value> res = CompileRun(
2197       "function c(x) {"
2198       "  this.x = x;"
2199       "  for (var i = 0; i < 32; i++) {"
2200       "    this['x' + i] = x;"
2201       "  }"
2202       "}"
2203       "function f(x) { return new c(x); };"
2204       "f(1); f(2); f(3);"
2205       "%OptimizeFunctionOnNextCall(f);"
2206       "f(4);");
2207   CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2208 
2209   Handle<JSObject> o =
2210       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2211 
2212   CHECK(CcTest::heap()->InNewSpace(*o));
2213 }
2214 
2215 
TEST(OptimizedPretenuringAllocationFolding)2216 TEST(OptimizedPretenuringAllocationFolding) {
2217   i::FLAG_allow_natives_syntax = true;
2218   i::FLAG_expose_gc = true;
2219   CcTest::InitializeVM();
2220   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2221   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2222   v8::HandleScope scope(CcTest::isolate());
2223 
2224   // Grow new space unitl maximum capacity reached.
2225   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2226     CcTest::heap()->new_space()->Grow();
2227   }
2228 
2229   i::ScopedVector<char> source(1024);
2230   i::SNPrintF(
2231       source,
2232       "var number_elements = %d;"
2233       "var elements = new Array();"
2234       "function f() {"
2235       "  for (var i = 0; i < number_elements; i++) {"
2236       "    elements[i] = [[{}], [1.1]];"
2237       "  }"
2238       "  return elements[number_elements-1]"
2239       "};"
2240       "f(); gc();"
2241       "f(); f();"
2242       "%%OptimizeFunctionOnNextCall(f);"
2243       "f();",
2244       AllocationSite::kPretenureMinimumCreated);
2245 
2246   v8::Local<v8::Value> res = CompileRun(source.start());
2247 
2248   v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2249   Handle<JSObject> int_array_handle =
2250       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2251   v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2252   Handle<JSObject> double_array_handle =
2253       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2254 
2255   Handle<JSObject> o =
2256       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2257   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2258   CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2259   CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2260   CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2261   CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2262 }
2263 
2264 
TEST(OptimizedPretenuringObjectArrayLiterals)2265 TEST(OptimizedPretenuringObjectArrayLiterals) {
2266   i::FLAG_allow_natives_syntax = true;
2267   i::FLAG_expose_gc = true;
2268   CcTest::InitializeVM();
2269   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2270   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2271   v8::HandleScope scope(CcTest::isolate());
2272 
2273   // Grow new space unitl maximum capacity reached.
2274   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2275     CcTest::heap()->new_space()->Grow();
2276   }
2277 
2278   i::ScopedVector<char> source(1024);
2279   i::SNPrintF(
2280       source,
2281       "var number_elements = %d;"
2282       "var elements = new Array(number_elements);"
2283       "function f() {"
2284       "  for (var i = 0; i < number_elements; i++) {"
2285       "    elements[i] = [{}, {}, {}];"
2286       "  }"
2287       "  return elements[number_elements - 1];"
2288       "};"
2289       "f(); gc();"
2290       "f(); f();"
2291       "%%OptimizeFunctionOnNextCall(f);"
2292       "f();",
2293       AllocationSite::kPretenureMinimumCreated);
2294 
2295   v8::Local<v8::Value> res = CompileRun(source.start());
2296 
2297   Handle<JSObject> o =
2298       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2299 
2300   CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2301   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2302 }
2303 
2304 
TEST(OptimizedPretenuringMixedInObjectProperties)2305 TEST(OptimizedPretenuringMixedInObjectProperties) {
2306   i::FLAG_allow_natives_syntax = true;
2307   i::FLAG_expose_gc = true;
2308   CcTest::InitializeVM();
2309   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2310   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2311   v8::HandleScope scope(CcTest::isolate());
2312 
2313   // Grow new space unitl maximum capacity reached.
2314   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2315     CcTest::heap()->new_space()->Grow();
2316   }
2317 
2318 
2319   i::ScopedVector<char> source(1024);
2320   i::SNPrintF(
2321       source,
2322       "var number_elements = %d;"
2323       "var elements = new Array(number_elements);"
2324       "function f() {"
2325       "  for (var i = 0; i < number_elements; i++) {"
2326       "    elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2327       "  }"
2328       "  return elements[number_elements - 1];"
2329       "};"
2330       "f(); gc();"
2331       "f(); f();"
2332       "%%OptimizeFunctionOnNextCall(f);"
2333       "f();",
2334       AllocationSite::kPretenureMinimumCreated);
2335 
2336   v8::Local<v8::Value> res = CompileRun(source.start());
2337 
2338   Handle<JSObject> o =
2339       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2340 
2341   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2342   FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2343   FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2344   CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
2345   CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
2346 
2347   JSObject* inner_object =
2348       reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2349   CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2350   CHECK(CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
2351   CHECK(CcTest::heap()->InOldPointerSpace(
2352       inner_object->RawFastPropertyAt(idx2)));
2353 }
2354 
2355 
TEST(OptimizedPretenuringDoubleArrayProperties)2356 TEST(OptimizedPretenuringDoubleArrayProperties) {
2357   i::FLAG_allow_natives_syntax = true;
2358   i::FLAG_expose_gc = true;
2359   CcTest::InitializeVM();
2360   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2361   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2362   v8::HandleScope scope(CcTest::isolate());
2363 
2364   // Grow new space unitl maximum capacity reached.
2365   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2366     CcTest::heap()->new_space()->Grow();
2367   }
2368 
2369   i::ScopedVector<char> source(1024);
2370   i::SNPrintF(
2371       source,
2372       "var number_elements = %d;"
2373       "var elements = new Array(number_elements);"
2374       "function f() {"
2375       "  for (var i = 0; i < number_elements; i++) {"
2376       "    elements[i] = {a: 1.1, b: 2.2};"
2377       "  }"
2378       "  return elements[i - 1];"
2379       "};"
2380       "f(); gc();"
2381       "f(); f();"
2382       "%%OptimizeFunctionOnNextCall(f);"
2383       "f();",
2384       AllocationSite::kPretenureMinimumCreated);
2385 
2386   v8::Local<v8::Value> res = CompileRun(source.start());
2387 
2388   Handle<JSObject> o =
2389       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2390 
2391   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2392   CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2393 }
2394 
2395 
TEST(OptimizedPretenuringdoubleArrayLiterals)2396 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2397   i::FLAG_allow_natives_syntax = true;
2398   i::FLAG_expose_gc = true;
2399   CcTest::InitializeVM();
2400   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2401   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2402   v8::HandleScope scope(CcTest::isolate());
2403 
2404   // Grow new space unitl maximum capacity reached.
2405   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2406     CcTest::heap()->new_space()->Grow();
2407   }
2408 
2409   i::ScopedVector<char> source(1024);
2410   i::SNPrintF(
2411       source,
2412       "var number_elements = %d;"
2413       "var elements = new Array(number_elements);"
2414       "function f() {"
2415       "  for (var i = 0; i < number_elements; i++) {"
2416       "    elements[i] = [1.1, 2.2, 3.3];"
2417       "  }"
2418       "  return elements[number_elements - 1];"
2419       "};"
2420       "f(); gc();"
2421       "f(); f();"
2422       "%%OptimizeFunctionOnNextCall(f);"
2423       "f();",
2424       AllocationSite::kPretenureMinimumCreated);
2425 
2426   v8::Local<v8::Value> res = CompileRun(source.start());
2427 
2428   Handle<JSObject> o =
2429       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2430 
2431   CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2432   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2433 }
2434 
2435 
TEST(OptimizedPretenuringNestedMixedArrayLiterals)2436 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2437   i::FLAG_allow_natives_syntax = true;
2438   i::FLAG_expose_gc = true;
2439   CcTest::InitializeVM();
2440   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2441   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2442   v8::HandleScope scope(CcTest::isolate());
2443 
2444   // Grow new space unitl maximum capacity reached.
2445   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2446     CcTest::heap()->new_space()->Grow();
2447   }
2448 
2449   i::ScopedVector<char> source(1024);
2450   i::SNPrintF(
2451       source,
2452       "var number_elements = 100;"
2453       "var elements = new Array(number_elements);"
2454       "function f() {"
2455       "  for (var i = 0; i < number_elements; i++) {"
2456       "    elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2457       "  }"
2458       "  return elements[number_elements - 1];"
2459       "};"
2460       "f(); gc();"
2461       "f(); f();"
2462       "%%OptimizeFunctionOnNextCall(f);"
2463       "f();");
2464 
2465   v8::Local<v8::Value> res = CompileRun(source.start());
2466 
2467   v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2468   Handle<JSObject> int_array_handle =
2469       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2470   v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2471   Handle<JSObject> double_array_handle =
2472       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2473 
2474   Handle<JSObject> o =
2475       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2476   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2477   CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2478   CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2479   CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2480   CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2481 }
2482 
2483 
TEST(OptimizedPretenuringNestedObjectLiterals)2484 TEST(OptimizedPretenuringNestedObjectLiterals) {
2485   i::FLAG_allow_natives_syntax = true;
2486   i::FLAG_expose_gc = true;
2487   CcTest::InitializeVM();
2488   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2489   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2490   v8::HandleScope scope(CcTest::isolate());
2491 
2492   // Grow new space unitl maximum capacity reached.
2493   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2494     CcTest::heap()->new_space()->Grow();
2495   }
2496 
2497   i::ScopedVector<char> source(1024);
2498   i::SNPrintF(
2499       source,
2500       "var number_elements = %d;"
2501       "var elements = new Array(number_elements);"
2502       "function f() {"
2503       "  for (var i = 0; i < number_elements; i++) {"
2504       "    elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2505       "  }"
2506       "  return elements[number_elements - 1];"
2507       "};"
2508       "f(); gc();"
2509       "f(); f();"
2510       "%%OptimizeFunctionOnNextCall(f);"
2511       "f();",
2512       AllocationSite::kPretenureMinimumCreated);
2513 
2514   v8::Local<v8::Value> res = CompileRun(source.start());
2515 
2516   v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2517   Handle<JSObject> int_array_handle_1 =
2518       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2519   v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2520   Handle<JSObject> int_array_handle_2 =
2521       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2522 
2523   Handle<JSObject> o =
2524       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2525   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2526   CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2527   CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2528   CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2529   CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2530 }
2531 
2532 
TEST(OptimizedPretenuringNestedDoubleLiterals)2533 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2534   i::FLAG_allow_natives_syntax = true;
2535   i::FLAG_expose_gc = true;
2536   CcTest::InitializeVM();
2537   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2538   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2539   v8::HandleScope scope(CcTest::isolate());
2540 
2541   // Grow new space unitl maximum capacity reached.
2542   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2543     CcTest::heap()->new_space()->Grow();
2544   }
2545 
2546   i::ScopedVector<char> source(1024);
2547   i::SNPrintF(
2548       source,
2549       "var number_elements = %d;"
2550       "var elements = new Array(number_elements);"
2551       "function f() {"
2552       "  for (var i = 0; i < number_elements; i++) {"
2553       "    elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2554       "  }"
2555       "  return elements[number_elements - 1];"
2556       "};"
2557       "f(); gc();"
2558       "f(); f();"
2559       "%%OptimizeFunctionOnNextCall(f);"
2560       "f();",
2561       AllocationSite::kPretenureMinimumCreated);
2562 
2563   v8::Local<v8::Value> res = CompileRun(source.start());
2564 
2565   v8::Local<v8::Value> double_array_1 =
2566       v8::Object::Cast(*res)->Get(v8_str("0"));
2567   Handle<JSObject> double_array_handle_1 =
2568       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2569   v8::Local<v8::Value> double_array_2 =
2570       v8::Object::Cast(*res)->Get(v8_str("1"));
2571   Handle<JSObject> double_array_handle_2 =
2572       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2573 
2574   Handle<JSObject> o =
2575       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2576   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2577   CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2578   CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2579   CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2580   CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2581 }
2582 
2583 
2584 // Make sure pretenuring feedback is gathered for constructed objects as well
2585 // as for literals.
TEST(OptimizedPretenuringConstructorCalls)2586 TEST(OptimizedPretenuringConstructorCalls) {
2587   if (!i::FLAG_pretenuring_call_new) {
2588     // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2589     return;
2590   }
2591   i::FLAG_allow_natives_syntax = true;
2592   i::FLAG_expose_gc = true;
2593   CcTest::InitializeVM();
2594   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2595   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2596   v8::HandleScope scope(CcTest::isolate());
2597 
2598   // Grow new space unitl maximum capacity reached.
2599   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2600     CcTest::heap()->new_space()->Grow();
2601   }
2602 
2603   i::ScopedVector<char> source(1024);
2604   // Call new is doing slack tracking for the first
2605   // JSFunction::kGenerousAllocationCount allocations, and we can't find
2606   // mementos during that time.
2607   i::SNPrintF(
2608       source,
2609       "var number_elements = %d;"
2610       "var elements = new Array(number_elements);"
2611       "function foo() {"
2612       "  this.a = 3;"
2613       "  this.b = {};"
2614       "}"
2615       "function f() {"
2616       "  for (var i = 0; i < number_elements; i++) {"
2617       "    elements[i] = new foo();"
2618       "  }"
2619       "  return elements[number_elements - 1];"
2620       "};"
2621       "f(); gc();"
2622       "f(); f();"
2623       "%%OptimizeFunctionOnNextCall(f);"
2624       "f();",
2625       AllocationSite::kPretenureMinimumCreated +
2626       JSFunction::kGenerousAllocationCount);
2627 
2628   v8::Local<v8::Value> res = CompileRun(source.start());
2629 
2630   Handle<JSObject> o =
2631       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2632 
2633   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2634 }
2635 
2636 
TEST(OptimizedPretenuringCallNew)2637 TEST(OptimizedPretenuringCallNew) {
2638   if (!i::FLAG_pretenuring_call_new) {
2639     // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2640     return;
2641   }
2642   i::FLAG_allow_natives_syntax = true;
2643   i::FLAG_expose_gc = true;
2644   CcTest::InitializeVM();
2645   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2646   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2647   v8::HandleScope scope(CcTest::isolate());
2648 
2649   // Grow new space unitl maximum capacity reached.
2650   while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2651     CcTest::heap()->new_space()->Grow();
2652   }
2653 
2654   i::ScopedVector<char> source(1024);
2655   // Call new is doing slack tracking for the first
2656   // JSFunction::kGenerousAllocationCount allocations, and we can't find
2657   // mementos during that time.
2658   i::SNPrintF(
2659       source,
2660       "var number_elements = %d;"
2661       "var elements = new Array(number_elements);"
2662       "function g() { this.a = 0; }"
2663       "function f() {"
2664       "  for (var i = 0; i < number_elements; i++) {"
2665       "    elements[i] = new g();"
2666       "  }"
2667       "  return elements[number_elements - 1];"
2668       "};"
2669       "f(); gc();"
2670       "f(); f();"
2671       "%%OptimizeFunctionOnNextCall(f);"
2672       "f();",
2673       AllocationSite::kPretenureMinimumCreated +
2674       JSFunction::kGenerousAllocationCount);
2675 
2676   v8::Local<v8::Value> res = CompileRun(source.start());
2677 
2678   Handle<JSObject> o =
2679       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2680   CHECK(CcTest::heap()->InOldPointerSpace(*o));
2681 }
2682 
2683 
2684 // Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals)2685 TEST(OptimizedAllocationArrayLiterals) {
2686   i::FLAG_allow_natives_syntax = true;
2687   CcTest::InitializeVM();
2688   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2689   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2690   v8::HandleScope scope(CcTest::isolate());
2691 
2692   v8::Local<v8::Value> res = CompileRun(
2693       "function f() {"
2694       "  var numbers = new Array(1, 2, 3);"
2695       "  numbers[0] = 3.14;"
2696       "  return numbers;"
2697       "};"
2698       "f(); f(); f();"
2699       "%OptimizeFunctionOnNextCall(f);"
2700       "f();");
2701   CHECK_EQ(static_cast<int>(3.14),
2702            v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2703 
2704   Handle<JSObject> o =
2705       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2706 
2707   CHECK(CcTest::heap()->InNewSpace(o->elements()));
2708 }
2709 
2710 
CountMapTransitions(Map * map)2711 static int CountMapTransitions(Map* map) {
2712   return map->transitions()->number_of_transitions();
2713 }
2714 
2715 
2716 // Test that map transitions are cleared and maps are collected with
2717 // incremental marking as well.
TEST(Regress1465)2718 TEST(Regress1465) {
2719   i::FLAG_stress_compaction = false;
2720   i::FLAG_allow_natives_syntax = true;
2721   i::FLAG_trace_incremental_marking = true;
2722   CcTest::InitializeVM();
2723   v8::HandleScope scope(CcTest::isolate());
2724   static const int transitions_count = 256;
2725 
2726   CompileRun("function F() {}");
2727   {
2728     AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2729     for (int i = 0; i < transitions_count; i++) {
2730       EmbeddedVector<char, 64> buffer;
2731       SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2732       CompileRun(buffer.start());
2733     }
2734     CompileRun("var root = new F;");
2735   }
2736 
2737   Handle<JSObject> root =
2738       v8::Utils::OpenHandle(
2739           *v8::Handle<v8::Object>::Cast(
2740               CcTest::global()->Get(v8_str("root"))));
2741 
2742   // Count number of live transitions before marking.
2743   int transitions_before = CountMapTransitions(root->map());
2744   CompileRun("%DebugPrint(root);");
2745   CHECK_EQ(transitions_count, transitions_before);
2746 
2747   SimulateIncrementalMarking(CcTest::heap());
2748   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2749 
2750   // Count number of live transitions after marking.  Note that one transition
2751   // is left, because 'o' still holds an instance of one transition target.
2752   int transitions_after = CountMapTransitions(root->map());
2753   CompileRun("%DebugPrint(root);");
2754   CHECK_EQ(1, transitions_after);
2755 }
2756 
2757 
2758 #ifdef DEBUG
AddTransitions(int transitions_count)2759 static void AddTransitions(int transitions_count) {
2760   AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2761   for (int i = 0; i < transitions_count; i++) {
2762     EmbeddedVector<char, 64> buffer;
2763     SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2764     CompileRun(buffer.start());
2765   }
2766 }
2767 
2768 
GetByName(const char * name)2769 static Handle<JSObject> GetByName(const char* name) {
2770   return v8::Utils::OpenHandle(
2771       *v8::Handle<v8::Object>::Cast(
2772           CcTest::global()->Get(v8_str(name))));
2773 }
2774 
2775 
AddPropertyTo(int gc_count,Handle<JSObject> object,const char * property_name)2776 static void AddPropertyTo(
2777     int gc_count, Handle<JSObject> object, const char* property_name) {
2778   Isolate* isolate = CcTest::i_isolate();
2779   Factory* factory = isolate->factory();
2780   Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2781   Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2782   i::FLAG_gc_interval = gc_count;
2783   i::FLAG_gc_global = true;
2784   CcTest::heap()->set_allocation_timeout(gc_count);
2785   JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
2786 }
2787 
2788 
TEST(TransitionArrayShrinksDuringAllocToZero)2789 TEST(TransitionArrayShrinksDuringAllocToZero) {
2790   i::FLAG_stress_compaction = false;
2791   i::FLAG_allow_natives_syntax = true;
2792   CcTest::InitializeVM();
2793   v8::HandleScope scope(CcTest::isolate());
2794   static const int transitions_count = 10;
2795   CompileRun("function F() { }");
2796   AddTransitions(transitions_count);
2797   CompileRun("var root = new F;");
2798   Handle<JSObject> root = GetByName("root");
2799 
2800   // Count number of live transitions before marking.
2801   int transitions_before = CountMapTransitions(root->map());
2802   CHECK_EQ(transitions_count, transitions_before);
2803 
2804   // Get rid of o
2805   CompileRun("o = new F;"
2806              "root = new F");
2807   root = GetByName("root");
2808   AddPropertyTo(2, root, "funny");
2809 
2810   // Count number of live transitions after marking.  Note that one transition
2811   // is left, because 'o' still holds an instance of one transition target.
2812   int transitions_after = CountMapTransitions(
2813       Map::cast(root->map()->GetBackPointer()));
2814   CHECK_EQ(1, transitions_after);
2815 }
2816 
2817 
TEST(TransitionArrayShrinksDuringAllocToOne)2818 TEST(TransitionArrayShrinksDuringAllocToOne) {
2819   i::FLAG_stress_compaction = false;
2820   i::FLAG_allow_natives_syntax = true;
2821   CcTest::InitializeVM();
2822   v8::HandleScope scope(CcTest::isolate());
2823   static const int transitions_count = 10;
2824   CompileRun("function F() {}");
2825   AddTransitions(transitions_count);
2826   CompileRun("var root = new F;");
2827   Handle<JSObject> root = GetByName("root");
2828 
2829   // Count number of live transitions before marking.
2830   int transitions_before = CountMapTransitions(root->map());
2831   CHECK_EQ(transitions_count, transitions_before);
2832 
2833   root = GetByName("root");
2834   AddPropertyTo(2, root, "funny");
2835 
2836   // Count number of live transitions after marking.  Note that one transition
2837   // is left, because 'o' still holds an instance of one transition target.
2838   int transitions_after = CountMapTransitions(
2839       Map::cast(root->map()->GetBackPointer()));
2840   CHECK_EQ(2, transitions_after);
2841 }
2842 
2843 
TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound)2844 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
2845   i::FLAG_stress_compaction = false;
2846   i::FLAG_allow_natives_syntax = true;
2847   CcTest::InitializeVM();
2848   v8::HandleScope scope(CcTest::isolate());
2849   static const int transitions_count = 10;
2850   CompileRun("function F() {}");
2851   AddTransitions(transitions_count);
2852   CompileRun("var root = new F;");
2853   Handle<JSObject> root = GetByName("root");
2854 
2855   // Count number of live transitions before marking.
2856   int transitions_before = CountMapTransitions(root->map());
2857   CHECK_EQ(transitions_count, transitions_before);
2858 
2859   root = GetByName("root");
2860   AddPropertyTo(0, root, "prop9");
2861   CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
2862 
2863   // Count number of live transitions after marking.  Note that one transition
2864   // is left, because 'o' still holds an instance of one transition target.
2865   int transitions_after = CountMapTransitions(
2866       Map::cast(root->map()->GetBackPointer()));
2867   CHECK_EQ(1, transitions_after);
2868 }
2869 
2870 
TEST(TransitionArraySimpleToFull)2871 TEST(TransitionArraySimpleToFull) {
2872   i::FLAG_stress_compaction = false;
2873   i::FLAG_allow_natives_syntax = true;
2874   CcTest::InitializeVM();
2875   v8::HandleScope scope(CcTest::isolate());
2876   static const int transitions_count = 1;
2877   CompileRun("function F() {}");
2878   AddTransitions(transitions_count);
2879   CompileRun("var root = new F;");
2880   Handle<JSObject> root = GetByName("root");
2881 
2882   // Count number of live transitions before marking.
2883   int transitions_before = CountMapTransitions(root->map());
2884   CHECK_EQ(transitions_count, transitions_before);
2885 
2886   CompileRun("o = new F;"
2887              "root = new F");
2888   root = GetByName("root");
2889   DCHECK(root->map()->transitions()->IsSimpleTransition());
2890   AddPropertyTo(2, root, "happy");
2891 
2892   // Count number of live transitions after marking.  Note that one transition
2893   // is left, because 'o' still holds an instance of one transition target.
2894   int transitions_after = CountMapTransitions(
2895       Map::cast(root->map()->GetBackPointer()));
2896   CHECK_EQ(1, transitions_after);
2897 }
2898 #endif  // DEBUG
2899 
2900 
TEST(Regress2143a)2901 TEST(Regress2143a) {
2902   i::FLAG_collect_maps = true;
2903   i::FLAG_incremental_marking = true;
2904   CcTest::InitializeVM();
2905   v8::HandleScope scope(CcTest::isolate());
2906 
2907   // Prepare a map transition from the root object together with a yet
2908   // untransitioned root object.
2909   CompileRun("var root = new Object;"
2910              "root.foo = 0;"
2911              "root = new Object;");
2912 
2913   SimulateIncrementalMarking(CcTest::heap());
2914 
2915   // Compile a StoreIC that performs the prepared map transition. This
2916   // will restart incremental marking and should make sure the root is
2917   // marked grey again.
2918   CompileRun("function f(o) {"
2919              "  o.foo = 0;"
2920              "}"
2921              "f(new Object);"
2922              "f(root);");
2923 
2924   // This bug only triggers with aggressive IC clearing.
2925   CcTest::heap()->AgeInlineCaches();
2926 
2927   // Explicitly request GC to perform final marking step and sweeping.
2928   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2929 
2930   Handle<JSObject> root =
2931       v8::Utils::OpenHandle(
2932           *v8::Handle<v8::Object>::Cast(
2933               CcTest::global()->Get(v8_str("root"))));
2934 
2935   // The root object should be in a sane state.
2936   CHECK(root->IsJSObject());
2937   CHECK(root->map()->IsMap());
2938 }
2939 
2940 
TEST(Regress2143b)2941 TEST(Regress2143b) {
2942   i::FLAG_collect_maps = true;
2943   i::FLAG_incremental_marking = true;
2944   i::FLAG_allow_natives_syntax = true;
2945   CcTest::InitializeVM();
2946   v8::HandleScope scope(CcTest::isolate());
2947 
2948   // Prepare a map transition from the root object together with a yet
2949   // untransitioned root object.
2950   CompileRun("var root = new Object;"
2951              "root.foo = 0;"
2952              "root = new Object;");
2953 
2954   SimulateIncrementalMarking(CcTest::heap());
2955 
2956   // Compile an optimized LStoreNamedField that performs the prepared
2957   // map transition. This will restart incremental marking and should
2958   // make sure the root is marked grey again.
2959   CompileRun("function f(o) {"
2960              "  o.foo = 0;"
2961              "}"
2962              "f(new Object);"
2963              "f(new Object);"
2964              "%OptimizeFunctionOnNextCall(f);"
2965              "f(root);"
2966              "%DeoptimizeFunction(f);");
2967 
2968   // This bug only triggers with aggressive IC clearing.
2969   CcTest::heap()->AgeInlineCaches();
2970 
2971   // Explicitly request GC to perform final marking step and sweeping.
2972   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2973 
2974   Handle<JSObject> root =
2975       v8::Utils::OpenHandle(
2976           *v8::Handle<v8::Object>::Cast(
2977               CcTest::global()->Get(v8_str("root"))));
2978 
2979   // The root object should be in a sane state.
2980   CHECK(root->IsJSObject());
2981   CHECK(root->map()->IsMap());
2982 }
2983 
2984 
TEST(ReleaseOverReservedPages)2985 TEST(ReleaseOverReservedPages) {
2986   if (FLAG_never_compact) return;
2987   i::FLAG_trace_gc = true;
2988   // The optimizer can allocate stuff, messing up the test.
2989   i::FLAG_crankshaft = false;
2990   i::FLAG_always_opt = false;
2991   CcTest::InitializeVM();
2992   Isolate* isolate = CcTest::i_isolate();
2993   Factory* factory = isolate->factory();
2994   Heap* heap = isolate->heap();
2995   v8::HandleScope scope(CcTest::isolate());
2996   static const int number_of_test_pages = 20;
2997 
2998   // Prepare many pages with low live-bytes count.
2999   PagedSpace* old_pointer_space = heap->old_pointer_space();
3000   CHECK_EQ(1, old_pointer_space->CountTotalPages());
3001   for (int i = 0; i < number_of_test_pages; i++) {
3002     AlwaysAllocateScope always_allocate(isolate);
3003     SimulateFullSpace(old_pointer_space);
3004     factory->NewFixedArray(1, TENURED);
3005   }
3006   CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3007 
3008   // Triggering one GC will cause a lot of garbage to be discovered but
3009   // even spread across all allocated pages.
3010   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
3011                           "triggered for preparation");
3012   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3013 
3014   // Triggering subsequent GCs should cause at least half of the pages
3015   // to be released to the OS after at most two cycles.
3016   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
3017   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3018   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
3019   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
3020 
3021   // Triggering a last-resort GC should cause all pages to be released to the
3022   // OS so that other processes can seize the memory.  If we get a failure here
3023   // where there are 2 pages left instead of 1, then we should increase the
3024   // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
3025   // first page should be small in order to reduce memory used when the VM
3026   // boots, but if the 20 small arrays don't fit on the first page then that's
3027   // an indication that it is too small.
3028   heap->CollectAllAvailableGarbage("triggered really hard");
3029   CHECK_EQ(1, old_pointer_space->CountTotalPages());
3030 }
3031 
3032 
TEST(Regress2237)3033 TEST(Regress2237) {
3034   i::FLAG_stress_compaction = false;
3035   CcTest::InitializeVM();
3036   Isolate* isolate = CcTest::i_isolate();
3037   Factory* factory = isolate->factory();
3038   v8::HandleScope scope(CcTest::isolate());
3039   Handle<String> slice(CcTest::heap()->empty_string());
3040 
3041   {
3042     // Generate a parent that lives in new-space.
3043     v8::HandleScope inner_scope(CcTest::isolate());
3044     const char* c = "This text is long enough to trigger sliced strings.";
3045     Handle<String> s = factory->NewStringFromAsciiChecked(c);
3046     CHECK(s->IsSeqOneByteString());
3047     CHECK(CcTest::heap()->InNewSpace(*s));
3048 
3049     // Generate a sliced string that is based on the above parent and
3050     // lives in old-space.
3051     SimulateFullSpace(CcTest::heap()->new_space());
3052     AlwaysAllocateScope always_allocate(isolate);
3053     Handle<String> t = factory->NewProperSubString(s, 5, 35);
3054     CHECK(t->IsSlicedString());
3055     CHECK(!CcTest::heap()->InNewSpace(*t));
3056     *slice.location() = *t.location();
3057   }
3058 
3059   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3060   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3061   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3062 }
3063 
3064 
3065 #ifdef OBJECT_PRINT
TEST(PrintSharedFunctionInfo)3066 TEST(PrintSharedFunctionInfo) {
3067   CcTest::InitializeVM();
3068   v8::HandleScope scope(CcTest::isolate());
3069   const char* source = "f = function() { return 987654321; }\n"
3070                        "g = function() { return 123456789; }\n";
3071   CompileRun(source);
3072   Handle<JSFunction> g =
3073       v8::Utils::OpenHandle(
3074           *v8::Handle<v8::Function>::Cast(
3075               CcTest::global()->Get(v8_str("g"))));
3076 
3077   OFStream os(stdout);
3078   g->shared()->Print(os);
3079   os << endl;
3080 }
3081 #endif  // OBJECT_PRINT
3082 
3083 
TEST(Regress2211)3084 TEST(Regress2211) {
3085   CcTest::InitializeVM();
3086   v8::HandleScope scope(CcTest::isolate());
3087 
3088   v8::Handle<v8::String> value = v8_str("val string");
3089   Smi* hash = Smi::FromInt(321);
3090   Factory* factory = CcTest::i_isolate()->factory();
3091 
3092   for (int i = 0; i < 2; i++) {
3093     // Store identity hash first and common hidden property second.
3094     v8::Handle<v8::Object> obj = v8::Object::New(CcTest::isolate());
3095     Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
3096     CHECK(internal_obj->HasFastProperties());
3097 
3098     // In the first iteration, set hidden value first and identity hash second.
3099     // In the second iteration, reverse the order.
3100     if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
3101     JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
3102     if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
3103 
3104     // Check values.
3105     CHECK_EQ(hash,
3106              internal_obj->GetHiddenProperty(factory->identity_hash_string()));
3107     CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
3108 
3109     // Check size.
3110     FieldIndex index = FieldIndex::ForDescriptor(internal_obj->map(), 0);
3111     ObjectHashTable* hashtable = ObjectHashTable::cast(
3112         internal_obj->RawFastPropertyAt(index));
3113     // HashTable header (5) and 4 initial entries (8).
3114     CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
3115   }
3116 }
3117 
3118 
TEST(IncrementalMarkingClearsTypeFeedbackInfo)3119 TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
3120   if (i::FLAG_always_opt) return;
3121   CcTest::InitializeVM();
3122   v8::HandleScope scope(CcTest::isolate());
3123   v8::Local<v8::Value> fun1, fun2;
3124 
3125   {
3126     LocalContext env;
3127     CompileRun("function fun() {};");
3128     fun1 = env->Global()->Get(v8_str("fun"));
3129   }
3130 
3131   {
3132     LocalContext env;
3133     CompileRun("function fun() {};");
3134     fun2 = env->Global()->Get(v8_str("fun"));
3135   }
3136 
3137   // Prepare function f that contains type feedback for closures
3138   // originating from two different native contexts.
3139   CcTest::global()->Set(v8_str("fun1"), fun1);
3140   CcTest::global()->Set(v8_str("fun2"), fun2);
3141   CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3142 
3143   Handle<JSFunction> f =
3144       v8::Utils::OpenHandle(
3145           *v8::Handle<v8::Function>::Cast(
3146               CcTest::global()->Get(v8_str("f"))));
3147 
3148   Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3149 
3150   int expected_length = FLAG_vector_ics ? 4 : 2;
3151   CHECK_EQ(expected_length, feedback_vector->length());
3152   for (int i = 0; i < expected_length; i++) {
3153     if ((i % 2) == 1) {
3154       CHECK(feedback_vector->get(i)->IsJSFunction());
3155     }
3156   }
3157 
3158   SimulateIncrementalMarking(CcTest::heap());
3159   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3160 
3161   CHECK_EQ(expected_length, feedback_vector->length());
3162   for (int i = 0; i < expected_length; i++) {
3163     CHECK_EQ(feedback_vector->get(i),
3164              *TypeFeedbackVector::UninitializedSentinel(CcTest::i_isolate()));
3165   }
3166 }
3167 
3168 
FindFirstIC(Code * code,Code::Kind kind)3169 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3170   int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3171              RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3172              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3173   for (RelocIterator it(code, mask); !it.done(); it.next()) {
3174     RelocInfo* info = it.rinfo();
3175     Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3176     if (target->is_inline_cache_stub() && target->kind() == kind) {
3177       return target;
3178     }
3179   }
3180   return NULL;
3181 }
3182 
3183 
TEST(IncrementalMarkingPreservesMonomorphicIC)3184 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3185   if (i::FLAG_always_opt) return;
3186   CcTest::InitializeVM();
3187   v8::HandleScope scope(CcTest::isolate());
3188 
3189   // Prepare function f that contains a monomorphic IC for object
3190   // originating from the same native context.
3191   CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3192              "function f(o) { return o.x; } f(obj); f(obj);");
3193   Handle<JSFunction> f =
3194       v8::Utils::OpenHandle(
3195           *v8::Handle<v8::Function>::Cast(
3196               CcTest::global()->Get(v8_str("f"))));
3197 
3198   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3199   CHECK(ic_before->ic_state() == MONOMORPHIC);
3200 
3201   SimulateIncrementalMarking(CcTest::heap());
3202   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3203 
3204   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3205   CHECK(ic_after->ic_state() == MONOMORPHIC);
3206 }
3207 
3208 
TEST(IncrementalMarkingClearsMonomorphicIC)3209 TEST(IncrementalMarkingClearsMonomorphicIC) {
3210   if (i::FLAG_always_opt) return;
3211   CcTest::InitializeVM();
3212   v8::HandleScope scope(CcTest::isolate());
3213   v8::Local<v8::Value> obj1;
3214 
3215   {
3216     LocalContext env;
3217     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3218     obj1 = env->Global()->Get(v8_str("obj"));
3219   }
3220 
3221   // Prepare function f that contains a monomorphic IC for object
3222   // originating from a different native context.
3223   CcTest::global()->Set(v8_str("obj1"), obj1);
3224   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3225   Handle<JSFunction> f =
3226       v8::Utils::OpenHandle(
3227           *v8::Handle<v8::Function>::Cast(
3228               CcTest::global()->Get(v8_str("f"))));
3229 
3230   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3231   CHECK(ic_before->ic_state() == MONOMORPHIC);
3232 
3233   // Fire context dispose notification.
3234   CcTest::isolate()->ContextDisposedNotification();
3235   SimulateIncrementalMarking(CcTest::heap());
3236   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3237 
3238   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3239   CHECK(IC::IsCleared(ic_after));
3240 }
3241 
3242 
TEST(IncrementalMarkingClearsPolymorphicIC)3243 TEST(IncrementalMarkingClearsPolymorphicIC) {
3244   if (i::FLAG_always_opt) return;
3245   CcTest::InitializeVM();
3246   v8::HandleScope scope(CcTest::isolate());
3247   v8::Local<v8::Value> obj1, obj2;
3248 
3249   {
3250     LocalContext env;
3251     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3252     obj1 = env->Global()->Get(v8_str("obj"));
3253   }
3254 
3255   {
3256     LocalContext env;
3257     CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3258     obj2 = env->Global()->Get(v8_str("obj"));
3259   }
3260 
3261   // Prepare function f that contains a polymorphic IC for objects
3262   // originating from two different native contexts.
3263   CcTest::global()->Set(v8_str("obj1"), obj1);
3264   CcTest::global()->Set(v8_str("obj2"), obj2);
3265   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3266   Handle<JSFunction> f =
3267       v8::Utils::OpenHandle(
3268           *v8::Handle<v8::Function>::Cast(
3269               CcTest::global()->Get(v8_str("f"))));
3270 
3271   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3272   CHECK(ic_before->ic_state() == POLYMORPHIC);
3273 
3274   // Fire context dispose notification.
3275   CcTest::isolate()->ContextDisposedNotification();
3276   SimulateIncrementalMarking(CcTest::heap());
3277   CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3278 
3279   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3280   CHECK(IC::IsCleared(ic_after));
3281 }
3282 
3283 
3284 class SourceResource : public v8::String::ExternalOneByteStringResource {
3285  public:
SourceResource(const char * data)3286   explicit SourceResource(const char* data)
3287     : data_(data), length_(strlen(data)) { }
3288 
Dispose()3289   virtual void Dispose() {
3290     i::DeleteArray(data_);
3291     data_ = NULL;
3292   }
3293 
data() const3294   const char* data() const { return data_; }
3295 
length() const3296   size_t length() const { return length_; }
3297 
IsDisposed()3298   bool IsDisposed() { return data_ == NULL; }
3299 
3300  private:
3301   const char* data_;
3302   size_t length_;
3303 };
3304 
3305 
ReleaseStackTraceDataTest(v8::Isolate * isolate,const char * source,const char * accessor)3306 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3307                                const char* accessor) {
3308   // Test that the data retained by the Error.stack accessor is released
3309   // after the first time the accessor is fired.  We use external string
3310   // to check whether the data is being released since the external string
3311   // resource's callback is fired when the external string is GC'ed.
3312   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3313   v8::HandleScope scope(isolate);
3314   SourceResource* resource = new SourceResource(i::StrDup(source));
3315   {
3316     v8::HandleScope scope(isolate);
3317     v8::Handle<v8::String> source_string =
3318         v8::String::NewExternal(isolate, resource);
3319     i_isolate->heap()->CollectAllAvailableGarbage();
3320     v8::Script::Compile(source_string)->Run();
3321     CHECK(!resource->IsDisposed());
3322   }
3323   // i_isolate->heap()->CollectAllAvailableGarbage();
3324   CHECK(!resource->IsDisposed());
3325 
3326   CompileRun(accessor);
3327   i_isolate->heap()->CollectAllAvailableGarbage();
3328 
3329   // External source has been released.
3330   CHECK(resource->IsDisposed());
3331   delete resource;
3332 }
3333 
3334 
UNINITIALIZED_TEST(ReleaseStackTraceData)3335 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3336   if (i::FLAG_always_opt) {
3337     // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3338     // See: https://codereview.chromium.org/181833004/
3339     return;
3340   }
3341   FLAG_use_ic = false;  // ICs retain objects.
3342   FLAG_concurrent_recompilation = false;
3343   v8::Isolate* isolate = v8::Isolate::New();
3344   {
3345     v8::Isolate::Scope isolate_scope(isolate);
3346     v8::HandleScope handle_scope(isolate);
3347     v8::Context::New(isolate)->Enter();
3348     static const char* source1 = "var error = null;            "
3349     /* Normal Error */           "try {                        "
3350                                  "  throw new Error();         "
3351                                  "} catch (e) {                "
3352                                  "  error = e;                 "
3353                                  "}                            ";
3354     static const char* source2 = "var error = null;            "
3355     /* Stack overflow */         "try {                        "
3356                                  "  (function f() { f(); })(); "
3357                                  "} catch (e) {                "
3358                                  "  error = e;                 "
3359                                  "}                            ";
3360     static const char* source3 = "var error = null;            "
3361     /* Normal Error */           "try {                        "
3362     /* as prototype */           "  throw new Error();         "
3363                                  "} catch (e) {                "
3364                                  "  error = {};                "
3365                                  "  error.__proto__ = e;       "
3366                                  "}                            ";
3367     static const char* source4 = "var error = null;            "
3368     /* Stack overflow */         "try {                        "
3369     /* as prototype   */         "  (function f() { f(); })(); "
3370                                  "} catch (e) {                "
3371                                  "  error = {};                "
3372                                  "  error.__proto__ = e;       "
3373                                  "}                            ";
3374     static const char* getter = "error.stack";
3375     static const char* setter = "error.stack = 0";
3376 
3377     ReleaseStackTraceDataTest(isolate, source1, setter);
3378     ReleaseStackTraceDataTest(isolate, source2, setter);
3379     // We do not test source3 and source4 with setter, since the setter is
3380     // supposed to (untypically) write to the receiver, not the holder.  This is
3381     // to emulate the behavior of a data property.
3382 
3383     ReleaseStackTraceDataTest(isolate, source1, getter);
3384     ReleaseStackTraceDataTest(isolate, source2, getter);
3385     ReleaseStackTraceDataTest(isolate, source3, getter);
3386     ReleaseStackTraceDataTest(isolate, source4, getter);
3387   }
3388   isolate->Dispose();
3389 }
3390 
3391 
TEST(Regress159140)3392 TEST(Regress159140) {
3393   i::FLAG_allow_natives_syntax = true;
3394   i::FLAG_flush_code_incrementally = true;
3395   CcTest::InitializeVM();
3396   Isolate* isolate = CcTest::i_isolate();
3397   Heap* heap = isolate->heap();
3398   HandleScope scope(isolate);
3399 
3400   // Perform one initial GC to enable code flushing.
3401   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3402 
3403   // Prepare several closures that are all eligible for code flushing
3404   // because all reachable ones are not optimized. Make sure that the
3405   // optimized code object is directly reachable through a handle so
3406   // that it is marked black during incremental marking.
3407   Handle<Code> code;
3408   {
3409     HandleScope inner_scope(isolate);
3410     CompileRun("function h(x) {}"
3411                "function mkClosure() {"
3412                "  return function(x) { return x + 1; };"
3413                "}"
3414                "var f = mkClosure();"
3415                "var g = mkClosure();"
3416                "f(1); f(2);"
3417                "g(1); g(2);"
3418                "h(1); h(2);"
3419                "%OptimizeFunctionOnNextCall(f); f(3);"
3420                "%OptimizeFunctionOnNextCall(h); h(3);");
3421 
3422     Handle<JSFunction> f =
3423         v8::Utils::OpenHandle(
3424             *v8::Handle<v8::Function>::Cast(
3425                 CcTest::global()->Get(v8_str("f"))));
3426     CHECK(f->is_compiled());
3427     CompileRun("f = null;");
3428 
3429     Handle<JSFunction> g =
3430         v8::Utils::OpenHandle(
3431             *v8::Handle<v8::Function>::Cast(
3432                 CcTest::global()->Get(v8_str("g"))));
3433     CHECK(g->is_compiled());
3434     const int kAgingThreshold = 6;
3435     for (int i = 0; i < kAgingThreshold; i++) {
3436       g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3437     }
3438 
3439     code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3440   }
3441 
3442   // Simulate incremental marking so that the functions are enqueued as
3443   // code flushing candidates. Then optimize one function. Finally
3444   // finish the GC to complete code flushing.
3445   SimulateIncrementalMarking(heap);
3446   CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3447   heap->CollectAllGarbage(Heap::kNoGCFlags);
3448 
3449   // Unoptimized code is missing and the deoptimizer will go ballistic.
3450   CompileRun("g('bozo');");
3451 }
3452 
3453 
TEST(Regress165495)3454 TEST(Regress165495) {
3455   i::FLAG_allow_natives_syntax = true;
3456   i::FLAG_flush_code_incrementally = true;
3457   CcTest::InitializeVM();
3458   Isolate* isolate = CcTest::i_isolate();
3459   Heap* heap = isolate->heap();
3460   HandleScope scope(isolate);
3461 
3462   // Perform one initial GC to enable code flushing.
3463   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3464 
3465   // Prepare an optimized closure that the optimized code map will get
3466   // populated. Then age the unoptimized code to trigger code flushing
3467   // but make sure the optimized code is unreachable.
3468   {
3469     HandleScope inner_scope(isolate);
3470     CompileRun("function mkClosure() {"
3471                "  return function(x) { return x + 1; };"
3472                "}"
3473                "var f = mkClosure();"
3474                "f(1); f(2);"
3475                "%OptimizeFunctionOnNextCall(f); f(3);");
3476 
3477     Handle<JSFunction> f =
3478         v8::Utils::OpenHandle(
3479             *v8::Handle<v8::Function>::Cast(
3480                 CcTest::global()->Get(v8_str("f"))));
3481     CHECK(f->is_compiled());
3482     const int kAgingThreshold = 6;
3483     for (int i = 0; i < kAgingThreshold; i++) {
3484       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3485     }
3486 
3487     CompileRun("f = null;");
3488   }
3489 
3490   // Simulate incremental marking so that unoptimized code is flushed
3491   // even though it still is cached in the optimized code map.
3492   SimulateIncrementalMarking(heap);
3493   heap->CollectAllGarbage(Heap::kNoGCFlags);
3494 
3495   // Make a new closure that will get code installed from the code map.
3496   // Unoptimized code is missing and the deoptimizer will go ballistic.
3497   CompileRun("var g = mkClosure(); g('bozo');");
3498 }
3499 
3500 
TEST(Regress169209)3501 TEST(Regress169209) {
3502   i::FLAG_stress_compaction = false;
3503   i::FLAG_allow_natives_syntax = true;
3504   i::FLAG_flush_code_incrementally = true;
3505 
3506   CcTest::InitializeVM();
3507   Isolate* isolate = CcTest::i_isolate();
3508   Heap* heap = isolate->heap();
3509   HandleScope scope(isolate);
3510 
3511   // Perform one initial GC to enable code flushing.
3512   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3513 
3514   // Prepare a shared function info eligible for code flushing for which
3515   // the unoptimized code will be replaced during optimization.
3516   Handle<SharedFunctionInfo> shared1;
3517   {
3518     HandleScope inner_scope(isolate);
3519     CompileRun("function f() { return 'foobar'; }"
3520                "function g(x) { if (x) f(); }"
3521                "f();"
3522                "g(false);"
3523                "g(false);");
3524 
3525     Handle<JSFunction> f =
3526         v8::Utils::OpenHandle(
3527             *v8::Handle<v8::Function>::Cast(
3528                 CcTest::global()->Get(v8_str("f"))));
3529     CHECK(f->is_compiled());
3530     const int kAgingThreshold = 6;
3531     for (int i = 0; i < kAgingThreshold; i++) {
3532       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3533     }
3534 
3535     shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3536   }
3537 
3538   // Prepare a shared function info eligible for code flushing that will
3539   // represent the dangling tail of the candidate list.
3540   Handle<SharedFunctionInfo> shared2;
3541   {
3542     HandleScope inner_scope(isolate);
3543     CompileRun("function flushMe() { return 0; }"
3544                "flushMe(1);");
3545 
3546     Handle<JSFunction> f =
3547         v8::Utils::OpenHandle(
3548             *v8::Handle<v8::Function>::Cast(
3549                 CcTest::global()->Get(v8_str("flushMe"))));
3550     CHECK(f->is_compiled());
3551     const int kAgingThreshold = 6;
3552     for (int i = 0; i < kAgingThreshold; i++) {
3553       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3554     }
3555 
3556     shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3557   }
3558 
3559   // Simulate incremental marking and collect code flushing candidates.
3560   SimulateIncrementalMarking(heap);
3561   CHECK(shared1->code()->gc_metadata() != NULL);
3562 
3563   // Optimize function and make sure the unoptimized code is replaced.
3564 #ifdef DEBUG
3565   FLAG_stop_at = "f";
3566 #endif
3567   CompileRun("%OptimizeFunctionOnNextCall(g);"
3568              "g(false);");
3569 
3570   // Finish garbage collection cycle.
3571   heap->CollectAllGarbage(Heap::kNoGCFlags);
3572   CHECK(shared1->code()->gc_metadata() == NULL);
3573 }
3574 
3575 
3576 // Helper function that simulates a fill new-space in the heap.
AllocateAllButNBytes(v8::internal::NewSpace * space,int extra_bytes)3577 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3578                                         int extra_bytes) {
3579   int space_remaining = static_cast<int>(
3580       *space->allocation_limit_address() - *space->allocation_top_address());
3581   CHECK(space_remaining >= extra_bytes);
3582   int new_linear_size = space_remaining - extra_bytes;
3583   v8::internal::AllocationResult allocation =
3584       space->AllocateRaw(new_linear_size);
3585   v8::internal::FreeListNode* node =
3586       v8::internal::FreeListNode::cast(allocation.ToObjectChecked());
3587   node->set_size(space->heap(), new_linear_size);
3588 }
3589 
3590 
TEST(Regress169928)3591 TEST(Regress169928) {
3592   i::FLAG_allow_natives_syntax = true;
3593   i::FLAG_crankshaft = false;
3594   CcTest::InitializeVM();
3595   Isolate* isolate = CcTest::i_isolate();
3596   Factory* factory = isolate->factory();
3597   v8::HandleScope scope(CcTest::isolate());
3598 
3599   // Some flags turn Scavenge collections into Mark-sweep collections
3600   // and hence are incompatible with this test case.
3601   if (FLAG_gc_global || FLAG_stress_compaction) return;
3602 
3603   // Prepare the environment
3604   CompileRun("function fastliteralcase(literal, value) {"
3605              "    literal[0] = value;"
3606              "    return literal;"
3607              "}"
3608              "function get_standard_literal() {"
3609              "    var literal = [1, 2, 3];"
3610              "    return literal;"
3611              "}"
3612              "obj = fastliteralcase(get_standard_literal(), 1);"
3613              "obj = fastliteralcase(get_standard_literal(), 1.5);"
3614              "obj = fastliteralcase(get_standard_literal(), 2);");
3615 
3616   // prepare the heap
3617   v8::Local<v8::String> mote_code_string =
3618       v8_str("fastliteralcase(mote, 2.5);");
3619 
3620   v8::Local<v8::String> array_name = v8_str("mote");
3621   CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
3622 
3623   // First make sure we flip spaces
3624   CcTest::heap()->CollectGarbage(NEW_SPACE);
3625 
3626   // Allocate the object.
3627   Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3628   array_data->set(0, Smi::FromInt(1));
3629   array_data->set(1, Smi::FromInt(2));
3630 
3631   AllocateAllButNBytes(CcTest::heap()->new_space(),
3632                        JSArray::kSize + AllocationMemento::kSize +
3633                        kPointerSize);
3634 
3635   Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3636                                                           FAST_SMI_ELEMENTS,
3637                                                           NOT_TENURED);
3638 
3639   CHECK_EQ(Smi::FromInt(2), array->length());
3640   CHECK(array->HasFastSmiOrObjectElements());
3641 
3642   // We need filler the size of AllocationMemento object, plus an extra
3643   // fill pointer value.
3644   HeapObject* obj = NULL;
3645   AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3646       AllocationMemento::kSize + kPointerSize);
3647   CHECK(allocation.To(&obj));
3648   Address addr_obj = obj->address();
3649   CcTest::heap()->CreateFillerObjectAt(
3650       addr_obj, AllocationMemento::kSize + kPointerSize);
3651 
3652   // Give the array a name, making sure not to allocate strings.
3653   v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3654   CcTest::global()->Set(array_name, array_obj);
3655 
3656   // This should crash with a protection violation if we are running a build
3657   // with the bug.
3658   AlwaysAllocateScope aa_scope(isolate);
3659   v8::Script::Compile(mote_code_string)->Run();
3660 }
3661 
3662 
TEST(Regress168801)3663 TEST(Regress168801) {
3664   if (i::FLAG_never_compact) return;
3665   i::FLAG_always_compact = true;
3666   i::FLAG_cache_optimized_code = false;
3667   i::FLAG_allow_natives_syntax = true;
3668   i::FLAG_flush_code_incrementally = true;
3669   CcTest::InitializeVM();
3670   Isolate* isolate = CcTest::i_isolate();
3671   Heap* heap = isolate->heap();
3672   HandleScope scope(isolate);
3673 
3674   // Perform one initial GC to enable code flushing.
3675   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3676 
3677   // Ensure the code ends up on an evacuation candidate.
3678   SimulateFullSpace(heap->code_space());
3679 
3680   // Prepare an unoptimized function that is eligible for code flushing.
3681   Handle<JSFunction> function;
3682   {
3683     HandleScope inner_scope(isolate);
3684     CompileRun("function mkClosure() {"
3685                "  return function(x) { return x + 1; };"
3686                "}"
3687                "var f = mkClosure();"
3688                "f(1); f(2);");
3689 
3690     Handle<JSFunction> f =
3691         v8::Utils::OpenHandle(
3692             *v8::Handle<v8::Function>::Cast(
3693                 CcTest::global()->Get(v8_str("f"))));
3694     CHECK(f->is_compiled());
3695     const int kAgingThreshold = 6;
3696     for (int i = 0; i < kAgingThreshold; i++) {
3697       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3698     }
3699 
3700     function = inner_scope.CloseAndEscape(handle(*f, isolate));
3701   }
3702 
3703   // Simulate incremental marking so that unoptimized function is enqueued as a
3704   // candidate for code flushing. The shared function info however will not be
3705   // explicitly enqueued.
3706   SimulateIncrementalMarking(heap);
3707 
3708   // Now optimize the function so that it is taken off the candidate list.
3709   {
3710     HandleScope inner_scope(isolate);
3711     CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3712   }
3713 
3714   // This cycle will bust the heap and subsequent cycles will go ballistic.
3715   heap->CollectAllGarbage(Heap::kNoGCFlags);
3716   heap->CollectAllGarbage(Heap::kNoGCFlags);
3717 }
3718 
3719 
TEST(Regress173458)3720 TEST(Regress173458) {
3721   if (i::FLAG_never_compact) return;
3722   i::FLAG_always_compact = true;
3723   i::FLAG_cache_optimized_code = false;
3724   i::FLAG_allow_natives_syntax = true;
3725   i::FLAG_flush_code_incrementally = true;
3726   CcTest::InitializeVM();
3727   Isolate* isolate = CcTest::i_isolate();
3728   Heap* heap = isolate->heap();
3729   HandleScope scope(isolate);
3730 
3731   // Perform one initial GC to enable code flushing.
3732   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3733 
3734   // Ensure the code ends up on an evacuation candidate.
3735   SimulateFullSpace(heap->code_space());
3736 
3737   // Prepare an unoptimized function that is eligible for code flushing.
3738   Handle<JSFunction> function;
3739   {
3740     HandleScope inner_scope(isolate);
3741     CompileRun("function mkClosure() {"
3742                "  return function(x) { return x + 1; };"
3743                "}"
3744                "var f = mkClosure();"
3745                "f(1); f(2);");
3746 
3747     Handle<JSFunction> f =
3748         v8::Utils::OpenHandle(
3749             *v8::Handle<v8::Function>::Cast(
3750                 CcTest::global()->Get(v8_str("f"))));
3751     CHECK(f->is_compiled());
3752     const int kAgingThreshold = 6;
3753     for (int i = 0; i < kAgingThreshold; i++) {
3754       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3755     }
3756 
3757     function = inner_scope.CloseAndEscape(handle(*f, isolate));
3758   }
3759 
3760   // Simulate incremental marking so that unoptimized function is enqueued as a
3761   // candidate for code flushing. The shared function info however will not be
3762   // explicitly enqueued.
3763   SimulateIncrementalMarking(heap);
3764 
3765   // Now enable the debugger which in turn will disable code flushing.
3766   CHECK(isolate->debug()->Load());
3767 
3768   // This cycle will bust the heap and subsequent cycles will go ballistic.
3769   heap->CollectAllGarbage(Heap::kNoGCFlags);
3770   heap->CollectAllGarbage(Heap::kNoGCFlags);
3771 }
3772 
3773 
3774 class DummyVisitor : public ObjectVisitor {
3775  public:
VisitPointers(Object ** start,Object ** end)3776   void VisitPointers(Object** start, Object** end) { }
3777 };
3778 
3779 
TEST(DeferredHandles)3780 TEST(DeferredHandles) {
3781   CcTest::InitializeVM();
3782   Isolate* isolate = CcTest::i_isolate();
3783   Heap* heap = isolate->heap();
3784   v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3785   HandleScopeData* data = isolate->handle_scope_data();
3786   Handle<Object> init(heap->empty_string(), isolate);
3787   while (data->next < data->limit) {
3788     Handle<Object> obj(heap->empty_string(), isolate);
3789   }
3790   // An entire block of handles has been filled.
3791   // Next handle would require a new block.
3792   DCHECK(data->next == data->limit);
3793 
3794   DeferredHandleScope deferred(isolate);
3795   DummyVisitor visitor;
3796   isolate->handle_scope_implementer()->Iterate(&visitor);
3797   delete deferred.Detach();
3798 }
3799 
3800 
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects)3801 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3802   CcTest::InitializeVM();
3803   v8::HandleScope scope(CcTest::isolate());
3804   CompileRun("function f(n) {"
3805              "    var a = new Array(n);"
3806              "    for (var i = 0; i < n; i += 100) a[i] = i;"
3807              "};"
3808              "f(10 * 1024 * 1024);");
3809   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3810   if (marking->IsStopped()) marking->Start();
3811   // This big step should be sufficient to mark the whole array.
3812   marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3813   DCHECK(marking->IsComplete());
3814 }
3815 
3816 
TEST(DisableInlineAllocation)3817 TEST(DisableInlineAllocation) {
3818   i::FLAG_allow_natives_syntax = true;
3819   CcTest::InitializeVM();
3820   v8::HandleScope scope(CcTest::isolate());
3821   CompileRun("function test() {"
3822              "  var x = [];"
3823              "  for (var i = 0; i < 10; i++) {"
3824              "    x[i] = [ {}, [1,2,3], [1,x,3] ];"
3825              "  }"
3826              "}"
3827              "function run() {"
3828              "  %OptimizeFunctionOnNextCall(test);"
3829              "  test();"
3830              "  %DeoptimizeFunction(test);"
3831              "}");
3832 
3833   // Warm-up with inline allocation enabled.
3834   CompileRun("test(); test(); run();");
3835 
3836   // Run test with inline allocation disabled.
3837   CcTest::heap()->DisableInlineAllocation();
3838   CompileRun("run()");
3839 
3840   // Run test with inline allocation re-enabled.
3841   CcTest::heap()->EnableInlineAllocation();
3842   CompileRun("run()");
3843 }
3844 
3845 
AllocationSitesCount(Heap * heap)3846 static int AllocationSitesCount(Heap* heap) {
3847   int count = 0;
3848   for (Object* site = heap->allocation_sites_list();
3849        !(site->IsUndefined());
3850        site = AllocationSite::cast(site)->weak_next()) {
3851     count++;
3852   }
3853   return count;
3854 }
3855 
3856 
TEST(EnsureAllocationSiteDependentCodesProcessed)3857 TEST(EnsureAllocationSiteDependentCodesProcessed) {
3858   if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
3859   i::FLAG_allow_natives_syntax = true;
3860   CcTest::InitializeVM();
3861   Isolate* isolate = CcTest::i_isolate();
3862   v8::internal::Heap* heap = CcTest::heap();
3863   GlobalHandles* global_handles = isolate->global_handles();
3864 
3865   if (!isolate->use_crankshaft()) return;
3866 
3867   // The allocation site at the head of the list is ours.
3868   Handle<AllocationSite> site;
3869   {
3870     LocalContext context;
3871     v8::HandleScope scope(context->GetIsolate());
3872 
3873     int count = AllocationSitesCount(heap);
3874     CompileRun("var bar = function() { return (new Array()); };"
3875                "var a = bar();"
3876                "bar();"
3877                "bar();");
3878 
3879     // One allocation site should have been created.
3880     int new_count = AllocationSitesCount(heap);
3881     CHECK_EQ(new_count, (count + 1));
3882     site = Handle<AllocationSite>::cast(
3883         global_handles->Create(
3884             AllocationSite::cast(heap->allocation_sites_list())));
3885 
3886     CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3887 
3888     DependentCode::GroupStartIndexes starts(site->dependent_code());
3889     CHECK_GE(starts.number_of_entries(), 1);
3890     int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3891     CHECK(site->dependent_code()->is_code_at(index));
3892     Code* function_bar = site->dependent_code()->code_at(index);
3893     Handle<JSFunction> bar_handle =
3894         v8::Utils::OpenHandle(
3895             *v8::Handle<v8::Function>::Cast(
3896                 CcTest::global()->Get(v8_str("bar"))));
3897     CHECK_EQ(bar_handle->code(), function_bar);
3898   }
3899 
3900   // Now make sure that a gc should get rid of the function, even though we
3901   // still have the allocation site alive.
3902   for (int i = 0; i < 4; i++) {
3903     heap->CollectAllGarbage(Heap::kNoGCFlags);
3904   }
3905 
3906   // The site still exists because of our global handle, but the code is no
3907   // longer referred to by dependent_code().
3908   DependentCode::GroupStartIndexes starts(site->dependent_code());
3909   int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3910   CHECK(!(site->dependent_code()->is_code_at(index)));
3911 }
3912 
3913 
TEST(CellsInOptimizedCodeAreWeak)3914 TEST(CellsInOptimizedCodeAreWeak) {
3915   if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
3916   i::FLAG_weak_embedded_objects_in_optimized_code = true;
3917   i::FLAG_allow_natives_syntax = true;
3918   CcTest::InitializeVM();
3919   Isolate* isolate = CcTest::i_isolate();
3920   v8::internal::Heap* heap = CcTest::heap();
3921 
3922   if (!isolate->use_crankshaft()) return;
3923   HandleScope outer_scope(heap->isolate());
3924   Handle<Code> code;
3925   {
3926     LocalContext context;
3927     HandleScope scope(heap->isolate());
3928 
3929     CompileRun("bar = (function() {"
3930                "  function bar() {"
3931                "    return foo(1);"
3932                "  };"
3933                "  var foo = function(x) { with (x) { return 1 + x; } };"
3934                "  bar(foo);"
3935                "  bar(foo);"
3936                "  bar(foo);"
3937                "  %OptimizeFunctionOnNextCall(bar);"
3938                "  bar(foo);"
3939                "  return bar;})();");
3940 
3941     Handle<JSFunction> bar =
3942         v8::Utils::OpenHandle(
3943             *v8::Handle<v8::Function>::Cast(
3944                 CcTest::global()->Get(v8_str("bar"))));
3945     code = scope.CloseAndEscape(Handle<Code>(bar->code()));
3946   }
3947 
3948   // Now make sure that a gc should get rid of the function
3949   for (int i = 0; i < 4; i++) {
3950     heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3951   }
3952 
3953   DCHECK(code->marked_for_deoptimization());
3954 }
3955 
3956 
TEST(ObjectsInOptimizedCodeAreWeak)3957 TEST(ObjectsInOptimizedCodeAreWeak) {
3958   if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
3959   i::FLAG_weak_embedded_objects_in_optimized_code = true;
3960   i::FLAG_allow_natives_syntax = true;
3961   CcTest::InitializeVM();
3962   Isolate* isolate = CcTest::i_isolate();
3963   v8::internal::Heap* heap = CcTest::heap();
3964 
3965   if (!isolate->use_crankshaft()) return;
3966   HandleScope outer_scope(heap->isolate());
3967   Handle<Code> code;
3968   {
3969     LocalContext context;
3970     HandleScope scope(heap->isolate());
3971 
3972     CompileRun("function bar() {"
3973                "  return foo(1);"
3974                "};"
3975                "function foo(x) { with (x) { return 1 + x; } };"
3976                "bar();"
3977                "bar();"
3978                "bar();"
3979                "%OptimizeFunctionOnNextCall(bar);"
3980                "bar();");
3981 
3982     Handle<JSFunction> bar =
3983         v8::Utils::OpenHandle(
3984             *v8::Handle<v8::Function>::Cast(
3985                 CcTest::global()->Get(v8_str("bar"))));
3986     code = scope.CloseAndEscape(Handle<Code>(bar->code()));
3987   }
3988 
3989   // Now make sure that a gc should get rid of the function
3990   for (int i = 0; i < 4; i++) {
3991     heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3992   }
3993 
3994   DCHECK(code->marked_for_deoptimization());
3995 }
3996 
3997 
TEST(NoWeakHashTableLeakWithIncrementalMarking)3998 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
3999   if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4000   if (!i::FLAG_incremental_marking) return;
4001   i::FLAG_weak_embedded_objects_in_optimized_code = true;
4002   i::FLAG_allow_natives_syntax = true;
4003   i::FLAG_compilation_cache = false;
4004   CcTest::InitializeVM();
4005   Isolate* isolate = CcTest::i_isolate();
4006   v8::internal::Heap* heap = CcTest::heap();
4007 
4008   if (!isolate->use_crankshaft()) return;
4009   HandleScope outer_scope(heap->isolate());
4010   for (int i = 0; i < 3; i++) {
4011     SimulateIncrementalMarking(heap);
4012     {
4013       LocalContext context;
4014       HandleScope scope(heap->isolate());
4015       EmbeddedVector<char, 256> source;
4016       SNPrintF(source,
4017                "function bar%d() {"
4018                "  return foo%d(1);"
4019                "};"
4020                "function foo%d(x) { with (x) { return 1 + x; } };"
4021                "bar%d();"
4022                "bar%d();"
4023                "bar%d();"
4024                "%%OptimizeFunctionOnNextCall(bar%d);"
4025                "bar%d();", i, i, i, i, i, i, i, i);
4026       CompileRun(source.start());
4027     }
4028     heap->CollectAllGarbage(i::Heap::kNoGCFlags);
4029   }
4030   int elements = 0;
4031   if (heap->weak_object_to_code_table()->IsHashTable()) {
4032     WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4033     elements = t->NumberOfElements();
4034   }
4035   CHECK_EQ(0, elements);
4036 }
4037 
4038 
OptimizeDummyFunction(const char * name)4039 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4040   EmbeddedVector<char, 256> source;
4041   SNPrintF(source,
4042           "function %s() { return 0; }"
4043           "%s(); %s();"
4044           "%%OptimizeFunctionOnNextCall(%s);"
4045           "%s();", name, name, name, name, name);
4046   CompileRun(source.start());
4047   Handle<JSFunction> fun =
4048       v8::Utils::OpenHandle(
4049           *v8::Handle<v8::Function>::Cast(
4050               CcTest::global()->Get(v8_str(name))));
4051   return fun;
4052 }
4053 
4054 
GetCodeChainLength(Code * code)4055 static int GetCodeChainLength(Code* code) {
4056   int result = 0;
4057   while (code->next_code_link()->IsCode()) {
4058     result++;
4059     code = Code::cast(code->next_code_link());
4060   }
4061   return result;
4062 }
4063 
4064 
TEST(NextCodeLinkIsWeak)4065 TEST(NextCodeLinkIsWeak) {
4066   i::FLAG_allow_natives_syntax = true;
4067   i::FLAG_turbo_deoptimization = true;
4068   CcTest::InitializeVM();
4069   Isolate* isolate = CcTest::i_isolate();
4070   v8::internal::Heap* heap = CcTest::heap();
4071 
4072   if (!isolate->use_crankshaft()) return;
4073   HandleScope outer_scope(heap->isolate());
4074   Handle<Code> code;
4075   heap->CollectAllAvailableGarbage();
4076   int code_chain_length_before, code_chain_length_after;
4077   {
4078     HandleScope scope(heap->isolate());
4079     Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4080     Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4081     CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4082     code_chain_length_before = GetCodeChainLength(immortal->code());
4083     // Keep the immortal code and let the mortal code die.
4084     code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4085     CompileRun("mortal = null; immortal = null;");
4086   }
4087   heap->CollectAllAvailableGarbage();
4088   // Now mortal code should be dead.
4089   code_chain_length_after = GetCodeChainLength(*code);
4090   CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4091 }
4092 
4093 
DummyOptimizedCode(Isolate * isolate)4094 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4095   i::byte buffer[i::Assembler::kMinimalBufferSize];
4096   MacroAssembler masm(isolate, buffer, sizeof(buffer));
4097   CodeDesc desc;
4098   masm.Push(isolate->factory()->undefined_value());
4099   masm.Drop(1);
4100   masm.GetCode(&desc);
4101   Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4102   Handle<Code> code = isolate->factory()->NewCode(
4103       desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4104   CHECK(code->IsCode());
4105   return code;
4106 }
4107 
4108 
TEST(NextCodeLinkIsWeak2)4109 TEST(NextCodeLinkIsWeak2) {
4110   i::FLAG_allow_natives_syntax = true;
4111   CcTest::InitializeVM();
4112   Isolate* isolate = CcTest::i_isolate();
4113   v8::internal::Heap* heap = CcTest::heap();
4114 
4115   if (!isolate->use_crankshaft()) return;
4116   HandleScope outer_scope(heap->isolate());
4117   heap->CollectAllAvailableGarbage();
4118   Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4119   Handle<Code> new_head;
4120   Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4121   {
4122     HandleScope scope(heap->isolate());
4123     Handle<Code> immortal = DummyOptimizedCode(isolate);
4124     Handle<Code> mortal = DummyOptimizedCode(isolate);
4125     mortal->set_next_code_link(*old_head);
4126     immortal->set_next_code_link(*mortal);
4127     context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4128     new_head = scope.CloseAndEscape(immortal);
4129   }
4130   heap->CollectAllAvailableGarbage();
4131   // Now mortal code should be dead.
4132   CHECK_EQ(*old_head, new_head->next_code_link());
4133 }
4134 
4135 
4136 static bool weak_ic_cleared = false;
4137 
ClearWeakIC(const v8::WeakCallbackData<v8::Object,void> & data)4138 static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
4139   printf("clear weak is called\n");
4140   weak_ic_cleared = true;
4141   v8::Persistent<v8::Value>* p =
4142       reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
4143   CHECK(p->IsNearDeath());
4144   p->Reset();
4145 }
4146 
4147 
4148 // Checks that the value returned by execution of the source is weak.
CheckWeakness(const char * source)4149 void CheckWeakness(const char* source) {
4150   i::FLAG_stress_compaction = false;
4151   CcTest::InitializeVM();
4152   v8::Isolate* isolate = CcTest::isolate();
4153   v8::HandleScope scope(isolate);
4154   v8::Persistent<v8::Object> garbage;
4155   {
4156     v8::HandleScope scope(isolate);
4157     garbage.Reset(isolate, CompileRun(source)->ToObject());
4158   }
4159   weak_ic_cleared = false;
4160   garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
4161   Heap* heap = CcTest::i_isolate()->heap();
4162   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4163   CHECK(weak_ic_cleared);
4164 }
4165 
4166 
4167 // Each of the following "weak IC" tests creates an IC that embeds a map with
4168 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
TEST(WeakMapInMonomorphicLoadIC)4169 TEST(WeakMapInMonomorphicLoadIC) {
4170   CheckWeakness("function loadIC(obj) {"
4171                 "  return obj.name;"
4172                 "}"
4173                 " (function() {"
4174                 "   var proto = {'name' : 'weak'};"
4175                 "   var obj = Object.create(proto);"
4176                 "   loadIC(obj);"
4177                 "   loadIC(obj);"
4178                 "   loadIC(obj);"
4179                 "   return proto;"
4180                 " })();");
4181 }
4182 
4183 
TEST(WeakMapInMonomorphicKeyedLoadIC)4184 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4185   CheckWeakness("function keyedLoadIC(obj, field) {"
4186                 "  return obj[field];"
4187                 "}"
4188                 " (function() {"
4189                 "   var proto = {'name' : 'weak'};"
4190                 "   var obj = Object.create(proto);"
4191                 "   keyedLoadIC(obj, 'name');"
4192                 "   keyedLoadIC(obj, 'name');"
4193                 "   keyedLoadIC(obj, 'name');"
4194                 "   return proto;"
4195                 " })();");
4196 }
4197 
4198 
TEST(WeakMapInMonomorphicStoreIC)4199 TEST(WeakMapInMonomorphicStoreIC) {
4200   CheckWeakness("function storeIC(obj, value) {"
4201                 "  obj.name = value;"
4202                 "}"
4203                 " (function() {"
4204                 "   var proto = {'name' : 'weak'};"
4205                 "   var obj = Object.create(proto);"
4206                 "   storeIC(obj, 'x');"
4207                 "   storeIC(obj, 'x');"
4208                 "   storeIC(obj, 'x');"
4209                 "   return proto;"
4210                 " })();");
4211 }
4212 
4213 
TEST(WeakMapInMonomorphicKeyedStoreIC)4214 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4215   CheckWeakness("function keyedStoreIC(obj, field, value) {"
4216                 "  obj[field] = value;"
4217                 "}"
4218                 " (function() {"
4219                 "   var proto = {'name' : 'weak'};"
4220                 "   var obj = Object.create(proto);"
4221                 "   keyedStoreIC(obj, 'x');"
4222                 "   keyedStoreIC(obj, 'x');"
4223                 "   keyedStoreIC(obj, 'x');"
4224                 "   return proto;"
4225                 " })();");
4226 }
4227 
4228 
TEST(WeakMapInMonomorphicCompareNilIC)4229 TEST(WeakMapInMonomorphicCompareNilIC) {
4230   CheckWeakness("function compareNilIC(obj) {"
4231                 "  return obj == null;"
4232                 "}"
4233                 " (function() {"
4234                 "   var proto = {'name' : 'weak'};"
4235                 "   var obj = Object.create(proto);"
4236                 "   compareNilIC(obj);"
4237                 "   compareNilIC(obj);"
4238                 "   compareNilIC(obj);"
4239                 "   return proto;"
4240                 " })();");
4241 }
4242 
4243 
4244 #ifdef DEBUG
TEST(AddInstructionChangesNewSpacePromotion)4245 TEST(AddInstructionChangesNewSpacePromotion) {
4246   i::FLAG_allow_natives_syntax = true;
4247   i::FLAG_expose_gc = true;
4248   i::FLAG_stress_compaction = true;
4249   i::FLAG_gc_interval = 1000;
4250   CcTest::InitializeVM();
4251   if (!i::FLAG_allocation_site_pretenuring) return;
4252   v8::HandleScope scope(CcTest::isolate());
4253   Isolate* isolate = CcTest::i_isolate();
4254   Heap* heap = isolate->heap();
4255 
4256   CompileRun(
4257       "function add(a, b) {"
4258       "  return a + b;"
4259       "}"
4260       "add(1, 2);"
4261       "add(\"a\", \"b\");"
4262       "var oldSpaceObject;"
4263       "gc();"
4264       "function crash(x) {"
4265       "  var object = {a: null, b: null};"
4266       "  var result = add(1.5, x | 0);"
4267       "  object.a = result;"
4268       "  oldSpaceObject = object;"
4269       "  return object;"
4270       "}"
4271       "crash(1);"
4272       "crash(1);"
4273       "%OptimizeFunctionOnNextCall(crash);"
4274       "crash(1);");
4275 
4276   v8::Handle<v8::Object> global = CcTest::global();
4277     v8::Handle<v8::Function> g =
4278         v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
4279   v8::Handle<v8::Value> args1[] = { v8_num(1) };
4280   heap->DisableInlineAllocation();
4281   heap->set_allocation_timeout(1);
4282   g->Call(global, 1, args1);
4283   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4284 }
4285 
4286 
OnFatalErrorExpectOOM(const char * location,const char * message)4287 void OnFatalErrorExpectOOM(const char* location, const char* message) {
4288   // Exit with 0 if the location matches our expectation.
4289   exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4290 }
4291 
4292 
TEST(CEntryStubOOM)4293 TEST(CEntryStubOOM) {
4294   i::FLAG_allow_natives_syntax = true;
4295   CcTest::InitializeVM();
4296   v8::HandleScope scope(CcTest::isolate());
4297   v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
4298 
4299   v8::Handle<v8::Value> result = CompileRun(
4300       "%SetFlags('--gc-interval=1');"
4301       "var a = [];"
4302       "a.__proto__ = [];"
4303       "a.unshift(1)");
4304 
4305   CHECK(result->IsNumber());
4306 }
4307 
4308 #endif  // DEBUG
4309 
4310 
InterruptCallback357137(v8::Isolate * isolate,void * data)4311 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4312 
4313 
RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value> & args)4314 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4315   CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
4316 }
4317 
4318 
TEST(Regress357137)4319 TEST(Regress357137) {
4320   CcTest::InitializeVM();
4321   v8::Isolate* isolate = CcTest::isolate();
4322   v8::HandleScope hscope(isolate);
4323   v8::Handle<v8::ObjectTemplate> global =v8::ObjectTemplate::New(isolate);
4324   global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
4325               v8::FunctionTemplate::New(isolate, RequestInterrupt));
4326   v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
4327   DCHECK(!context.IsEmpty());
4328   v8::Context::Scope cscope(context);
4329 
4330   v8::Local<v8::Value> result = CompileRun(
4331       "var locals = '';"
4332       "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4333       "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4334       "interrupt();"  // This triggers a fake stack overflow in f.
4335       "f()()");
4336   CHECK_EQ(42.0, result->ToNumber()->Value());
4337 }
4338 
4339 
TEST(ArrayShiftSweeping)4340 TEST(ArrayShiftSweeping) {
4341   i::FLAG_expose_gc = true;
4342   CcTest::InitializeVM();
4343   v8::HandleScope scope(CcTest::isolate());
4344   Isolate* isolate = CcTest::i_isolate();
4345   Heap* heap = isolate->heap();
4346 
4347   v8::Local<v8::Value> result = CompileRun(
4348       "var array = new Array(40000);"
4349       "var tmp = new Array(100000);"
4350       "array[0] = 10;"
4351       "gc();"
4352       "gc();"
4353       "array.shift();"
4354       "array;");
4355 
4356   Handle<JSObject> o =
4357       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
4358   CHECK(heap->InOldPointerSpace(o->elements()));
4359   CHECK(heap->InOldPointerSpace(*o));
4360   Page* page = Page::FromAddress(o->elements()->address());
4361   CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
4362         Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
4363 }
4364 
4365 
UNINITIALIZED_TEST(PromotionQueue)4366 UNINITIALIZED_TEST(PromotionQueue) {
4367   i::FLAG_expose_gc = true;
4368   i::FLAG_max_semi_space_size = 2;
4369   v8::Isolate* isolate = v8::Isolate::New();
4370   i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4371   {
4372     v8::Isolate::Scope isolate_scope(isolate);
4373     v8::HandleScope handle_scope(isolate);
4374     v8::Context::New(isolate)->Enter();
4375     Heap* heap = i_isolate->heap();
4376     NewSpace* new_space = heap->new_space();
4377 
4378     // In this test we will try to overwrite the promotion queue which is at the
4379     // end of to-space. To actually make that possible, we need at least two
4380     // semi-space pages and take advantage of fragmentation.
4381     // (1) Grow semi-space to two pages.
4382     // (2) Create a few small long living objects and call the scavenger to
4383     // move them to the other semi-space.
4384     // (3) Create a huge object, i.e., remainder of first semi-space page and
4385     // create another huge object which should be of maximum allocatable memory
4386     // size of the second semi-space page.
4387     // (4) Call the scavenger again.
4388     // What will happen is: the scavenger will promote the objects created in
4389     // (2) and will create promotion queue entries at the end of the second
4390     // semi-space page during the next scavenge when it promotes the objects to
4391     // the old generation. The first allocation of (3) will fill up the first
4392     // semi-space page. The second allocation in (3) will not fit into the
4393     // first semi-space page, but it will overwrite the promotion queue which
4394     // are in the second semi-space page. If the right guards are in place, the
4395     // promotion queue will be evacuated in that case.
4396 
4397     // Grow the semi-space to two pages to make semi-space copy overwrite the
4398     // promotion queue, which will be at the end of the second page.
4399     intptr_t old_capacity = new_space->TotalCapacity();
4400 
4401     // If we are in a low memory config, we can't grow to two pages and we can't
4402     // run this test. This also means the issue we are testing cannot arise, as
4403     // there is no fragmentation.
4404     if (new_space->IsAtMaximumCapacity()) return;
4405 
4406     new_space->Grow();
4407     CHECK(new_space->IsAtMaximumCapacity());
4408     CHECK(2 * old_capacity == new_space->TotalCapacity());
4409 
4410     // Call the scavenger two times to get an empty new space
4411     heap->CollectGarbage(NEW_SPACE);
4412     heap->CollectGarbage(NEW_SPACE);
4413 
4414     // First create a few objects which will survive a scavenge, and will get
4415     // promoted to the old generation later on. These objects will create
4416     // promotion queue entries at the end of the second semi-space page.
4417     const int number_handles = 12;
4418     Handle<FixedArray> handles[number_handles];
4419     for (int i = 0; i < number_handles; i++) {
4420       handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
4421     }
4422     heap->CollectGarbage(NEW_SPACE);
4423 
4424     // Create the first huge object which will exactly fit the first semi-space
4425     // page.
4426     int new_linear_size =
4427         static_cast<int>(*heap->new_space()->allocation_limit_address() -
4428                          *heap->new_space()->allocation_top_address());
4429     int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
4430     Handle<FixedArray> first =
4431         i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4432     CHECK(heap->InNewSpace(*first));
4433 
4434     // Create the second huge object of maximum allocatable second semi-space
4435     // page size.
4436     new_linear_size =
4437         static_cast<int>(*heap->new_space()->allocation_limit_address() -
4438                          *heap->new_space()->allocation_top_address());
4439     length = Page::kMaxRegularHeapObjectSize / kPointerSize -
4440              FixedArray::kHeaderSize;
4441     Handle<FixedArray> second =
4442         i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4443     CHECK(heap->InNewSpace(*second));
4444 
4445     // This scavenge will corrupt memory if the promotion queue is not
4446     // evacuated.
4447     heap->CollectGarbage(NEW_SPACE);
4448   }
4449   isolate->Dispose();
4450 }
4451 
4452 
TEST(Regress388880)4453 TEST(Regress388880) {
4454   i::FLAG_expose_gc = true;
4455   CcTest::InitializeVM();
4456   v8::HandleScope scope(CcTest::isolate());
4457   Isolate* isolate = CcTest::i_isolate();
4458   Factory* factory = isolate->factory();
4459   Heap* heap = isolate->heap();
4460 
4461   Handle<Map> map1 = Map::Create(isolate, 1);
4462   Handle<Map> map2 =
4463       Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
4464                          HeapType::Any(isolate), NONE, Representation::Tagged(),
4465                          OMIT_TRANSITION).ToHandleChecked();
4466 
4467   int desired_offset = Page::kPageSize - map1->instance_size();
4468 
4469   // Allocate fixed array in old pointer space so, that object allocated
4470   // afterwards would end at the end of the page.
4471   {
4472     SimulateFullSpace(heap->old_pointer_space());
4473     int padding_size = desired_offset - Page::kObjectStartOffset;
4474     int padding_array_length =
4475         (padding_size - FixedArray::kHeaderSize) / kPointerSize;
4476 
4477     Handle<FixedArray> temp2 =
4478         factory->NewFixedArray(padding_array_length, TENURED);
4479     Page* page = Page::FromAddress(temp2->address());
4480     CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
4481   }
4482 
4483   Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED, false);
4484   o->set_properties(*factory->empty_fixed_array());
4485 
4486   // Ensure that the object allocated where we need it.
4487   Page* page = Page::FromAddress(o->address());
4488   CHECK_EQ(desired_offset, page->Offset(o->address()));
4489 
4490   // Now we have an object right at the end of the page.
4491 
4492   // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4493   // that would cause crash.
4494   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4495   marking->Abort();
4496   marking->Start();
4497   CHECK(marking->IsMarking());
4498 
4499   // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4500   // when it calls heap->AdjustLiveBytes(...).
4501   JSObject::MigrateToMap(o, map2);
4502 }
4503 
4504 
4505 #ifdef DEBUG
TEST(PathTracer)4506 TEST(PathTracer) {
4507   CcTest::InitializeVM();
4508   v8::HandleScope scope(CcTest::isolate());
4509 
4510   v8::Local<v8::Value> result = CompileRun("'abc'");
4511   Handle<Object> o = v8::Utils::OpenHandle(*result);
4512   CcTest::i_isolate()->heap()->TracePathToObject(*o);
4513 }
4514 #endif  // DEBUG
4515