The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / test / cctest / test-heap.cc @ f230a1cf

History | View | Annotate | Download (120 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include <stdlib.h>
29

    
30
#include "v8.h"
31

    
32
#include "compilation-cache.h"
33
#include "execution.h"
34
#include "factory.h"
35
#include "macro-assembler.h"
36
#include "global-handles.h"
37
#include "stub-cache.h"
38
#include "cctest.h"
39

    
40
using namespace v8::internal;
41

    
42

    
43
// Go through all incremental marking steps in one swoop.
44
static void SimulateIncrementalMarking() {
45
  MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
46
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
47
  if (collector->IsConcurrentSweepingInProgress()) {
48
    collector->WaitUntilSweepingCompleted();
49
  }
50
  CHECK(marking->IsMarking() || marking->IsStopped());
51
  if (marking->IsStopped()) {
52
    marking->Start();
53
  }
54
  CHECK(marking->IsMarking());
55
  while (!marking->IsComplete()) {
56
    marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
57
  }
58
  CHECK(marking->IsComplete());
59
}
60

    
61

    
62
static void CheckMap(Map* map, int type, int instance_size) {
63
  CHECK(map->IsHeapObject());
64
#ifdef DEBUG
65
  CHECK(CcTest::heap()->Contains(map));
66
#endif
67
  CHECK_EQ(CcTest::heap()->meta_map(), map->map());
68
  CHECK_EQ(type, map->instance_type());
69
  CHECK_EQ(instance_size, map->instance_size());
70
}
71

    
72

    
73
TEST(HeapMaps) {
74
  CcTest::InitializeVM();
75
  Heap* heap = CcTest::heap();
76
  CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
77
  CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
78
  CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
79
  CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
80
}
81

    
82

    
83
static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
84
  CHECK(obj->IsOddball());
85
  bool exc;
86
  Handle<Object> handle(obj, isolate);
87
  Object* print_string =
88
      *Execution::ToString(isolate, handle, &exc);
89
  CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
90
}
91

    
92

    
93
static void CheckSmi(Isolate* isolate, int value, const char* string) {
94
  bool exc;
95
  Handle<Object> handle(Smi::FromInt(value), isolate);
96
  Object* print_string =
97
      *Execution::ToString(isolate, handle, &exc);
98
  CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
99
}
100

    
101

    
102
static void CheckNumber(Isolate* isolate, double value, const char* string) {
103
  Object* obj = CcTest::heap()->NumberFromDouble(value)->ToObjectChecked();
104
  CHECK(obj->IsNumber());
105
  bool exc;
106
  Handle<Object> handle(obj, isolate);
107
  Object* print_string =
108
      *Execution::ToString(isolate, handle, &exc);
109
  CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
110
}
111

    
112

    
113
static void CheckFindCodeObject(Isolate* isolate) {
114
  // Test FindCodeObject
115
#define __ assm.
116

    
117
  Assembler assm(isolate, NULL, 0);
118

    
119
  __ nop();  // supported on all architectures
120

    
121
  CodeDesc desc;
122
  assm.GetCode(&desc);
123
  Heap* heap = isolate->heap();
124
  Object* code = heap->CreateCode(
125
      desc,
126
      Code::ComputeFlags(Code::STUB),
127
      Handle<Code>())->ToObjectChecked();
128
  CHECK(code->IsCode());
129

    
130
  HeapObject* obj = HeapObject::cast(code);
131
  Address obj_addr = obj->address();
132

    
133
  for (int i = 0; i < obj->Size(); i += kPointerSize) {
134
    Object* found = isolate->FindCodeObject(obj_addr + i);
135
    CHECK_EQ(code, found);
136
  }
137

    
138
  Object* copy = heap->CreateCode(
139
      desc,
140
      Code::ComputeFlags(Code::STUB),
141
      Handle<Code>())->ToObjectChecked();
142
  CHECK(copy->IsCode());
143
  HeapObject* obj_copy = HeapObject::cast(copy);
144
  Object* not_right = isolate->FindCodeObject(obj_copy->address() +
145
                                              obj_copy->Size() / 2);
146
  CHECK(not_right != code);
147
}
148

    
149

    
150
TEST(HeapObjects) {
151
  CcTest::InitializeVM();
152
  Isolate* isolate = CcTest::i_isolate();
153
  Factory* factory = isolate->factory();
154
  Heap* heap = isolate->heap();
155

    
156
  HandleScope sc(isolate);
157
  Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked();
158
  CHECK(value->IsHeapNumber());
159
  CHECK(value->IsNumber());
160
  CHECK_EQ(1.000123, value->Number());
161

    
162
  value = heap->NumberFromDouble(1.0)->ToObjectChecked();
163
  CHECK(value->IsSmi());
164
  CHECK(value->IsNumber());
165
  CHECK_EQ(1.0, value->Number());
166

    
167
  value = heap->NumberFromInt32(1024)->ToObjectChecked();
168
  CHECK(value->IsSmi());
169
  CHECK(value->IsNumber());
170
  CHECK_EQ(1024.0, value->Number());
171

    
172
  value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
173
  CHECK(value->IsSmi());
174
  CHECK(value->IsNumber());
175
  CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
176

    
177
  value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
178
  CHECK(value->IsSmi());
179
  CHECK(value->IsNumber());
180
  CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
181

    
182
#ifndef V8_TARGET_ARCH_X64
183
  // TODO(lrn): We need a NumberFromIntptr function in order to test this.
184
  value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
185
  CHECK(value->IsHeapNumber());
186
  CHECK(value->IsNumber());
187
  CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
188
#endif
189

    
190
  MaybeObject* maybe_value =
191
      heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
192
  value = maybe_value->ToObjectChecked();
193
  CHECK(value->IsHeapNumber());
194
  CHECK(value->IsNumber());
195
  CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
196
           value->Number());
197

    
198
  maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31);
199
  value = maybe_value->ToObjectChecked();
200
  CHECK(value->IsHeapNumber());
201
  CHECK(value->IsNumber());
202
  CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
203
           value->Number());
204

    
205
  // nan oddball checks
206
  CHECK(heap->nan_value()->IsNumber());
207
  CHECK(std::isnan(heap->nan_value()->Number()));
208

    
209
  Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest "));
210
  CHECK(s->IsString());
211
  CHECK_EQ(10, s->length());
212

    
213
  Handle<String> object_string = Handle<String>::cast(factory->Object_string());
214
  Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
215
  CHECK(JSReceiver::HasLocalProperty(global, object_string));
216

    
217
  // Check ToString for oddballs
218
  CheckOddball(isolate, heap->true_value(), "true");
219
  CheckOddball(isolate, heap->false_value(), "false");
220
  CheckOddball(isolate, heap->null_value(), "null");
221
  CheckOddball(isolate, heap->undefined_value(), "undefined");
222

    
223
  // Check ToString for Smis
224
  CheckSmi(isolate, 0, "0");
225
  CheckSmi(isolate, 42, "42");
226
  CheckSmi(isolate, -42, "-42");
227

    
228
  // Check ToString for Numbers
229
  CheckNumber(isolate, 1.1, "1.1");
230

    
231
  CheckFindCodeObject(isolate);
232
}
233

    
234

    
235
TEST(Tagging) {
236
  CcTest::InitializeVM();
237
  int request = 24;
238
  CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
239
  CHECK(Smi::FromInt(42)->IsSmi());
240
  CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
241
  CHECK_EQ(NEW_SPACE,
242
           Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
243
  CHECK_EQ(OLD_POINTER_SPACE,
244
           Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
245
  CHECK(Failure::Exception()->IsFailure());
246
  CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
247
  CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
248
}
249

    
250

    
251
TEST(GarbageCollection) {
252
  CcTest::InitializeVM();
253
  Isolate* isolate = CcTest::i_isolate();
254
  Heap* heap = isolate->heap();
255
  Factory* factory = isolate->factory();
256

    
257
  HandleScope sc(isolate);
258
  // Check GC.
259
  heap->CollectGarbage(NEW_SPACE);
260

    
261
  Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
262
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
263
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
264
  Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
265
  Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
266
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
267
  Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
268

    
269
  {
270
    HandleScope inner_scope(isolate);
271
    // Allocate a function and keep it in global object's property.
272
    Handle<JSFunction> function =
273
        factory->NewFunction(name, factory->undefined_value());
274
    Handle<Map> initial_map =
275
        factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
276
    function->set_initial_map(*initial_map);
277
    JSReceiver::SetProperty(global, name, function, NONE, kNonStrictMode);
278
    // Allocate an object.  Unrooted after leaving the scope.
279
    Handle<JSObject> obj = factory->NewJSObject(function);
280
    JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
281
    JSReceiver::SetProperty(obj, prop_namex, twenty_four, NONE, kNonStrictMode);
282

    
283
    CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
284
    CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
285
  }
286

    
287
  heap->CollectGarbage(NEW_SPACE);
288

    
289
  // Function should be alive.
290
  CHECK(JSReceiver::HasLocalProperty(global, name));
291
  // Check function is retained.
292
  Object* func_value = CcTest::i_isolate()->context()->global_object()->
293
      GetProperty(*name)->ToObjectChecked();
294
  CHECK(func_value->IsJSFunction());
295
  Handle<JSFunction> function(JSFunction::cast(func_value));
296

    
297
  {
298
    HandleScope inner_scope(isolate);
299
    // Allocate another object, make it reachable from global.
300
    Handle<JSObject> obj = factory->NewJSObject(function);
301
    JSReceiver::SetProperty(global, obj_name, obj, NONE, kNonStrictMode);
302
    JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
303
  }
304

    
305
  // After gc, it should survive.
306
  heap->CollectGarbage(NEW_SPACE);
307

    
308
  CHECK(JSReceiver::HasLocalProperty(global, obj_name));
309
  CHECK(CcTest::i_isolate()->context()->global_object()->
310
        GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
311
  Object* obj = CcTest::i_isolate()->context()->global_object()->
312
      GetProperty(*obj_name)->ToObjectChecked();
313
  JSObject* js_obj = JSObject::cast(obj);
314
  CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
315
}
316

    
317

    
318
static void VerifyStringAllocation(Isolate* isolate, const char* string) {
319
  HandleScope scope(isolate);
320
  Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string));
321
  CHECK_EQ(StrLength(string), s->length());
322
  for (int index = 0; index < s->length(); index++) {
323
    CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
324
  }
325
}
326

    
327

    
328
TEST(String) {
329
  CcTest::InitializeVM();
330
  Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
331

    
332
  VerifyStringAllocation(isolate, "a");
333
  VerifyStringAllocation(isolate, "ab");
334
  VerifyStringAllocation(isolate, "abc");
335
  VerifyStringAllocation(isolate, "abcd");
336
  VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
337
}
338

    
339

    
340
TEST(LocalHandles) {
341
  CcTest::InitializeVM();
342
  Isolate* isolate = CcTest::i_isolate();
343
  Factory* factory = isolate->factory();
344

    
345
  v8::HandleScope scope(CcTest::isolate());
346
  const char* name = "Kasper the spunky";
347
  Handle<String> string = factory->NewStringFromAscii(CStrVector(name));
348
  CHECK_EQ(StrLength(name), string->length());
349
}
350

    
351

    
352
TEST(GlobalHandles) {
353
  CcTest::InitializeVM();
354
  Isolate* isolate = CcTest::i_isolate();
355
  Heap* heap = isolate->heap();
356
  Factory* factory = isolate->factory();
357
  GlobalHandles* global_handles = isolate->global_handles();
358

    
359
  Handle<Object> h1;
360
  Handle<Object> h2;
361
  Handle<Object> h3;
362
  Handle<Object> h4;
363

    
364
  {
365
    HandleScope scope(isolate);
366

    
367
    Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
368
    Handle<Object> u = factory->NewNumber(1.12344);
369

    
370
    h1 = global_handles->Create(*i);
371
    h2 = global_handles->Create(*u);
372
    h3 = global_handles->Create(*i);
373
    h4 = global_handles->Create(*u);
374
  }
375

    
376
  // after gc, it should survive
377
  heap->CollectGarbage(NEW_SPACE);
378

    
379
  CHECK((*h1)->IsString());
380
  CHECK((*h2)->IsHeapNumber());
381
  CHECK((*h3)->IsString());
382
  CHECK((*h4)->IsHeapNumber());
383

    
384
  CHECK_EQ(*h3, *h1);
385
  global_handles->Destroy(h1.location());
386
  global_handles->Destroy(h3.location());
387

    
388
  CHECK_EQ(*h4, *h2);
389
  global_handles->Destroy(h2.location());
390
  global_handles->Destroy(h4.location());
391
}
392

    
393

    
394
static bool WeakPointerCleared = false;
395

    
396
static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
397
                                         v8::Persistent<v8::Value>* handle,
398
                                         void* id) {
399
  if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
400
  handle->Dispose();
401
}
402

    
403

    
404
TEST(WeakGlobalHandlesScavenge) {
405
  i::FLAG_stress_compaction = false;
406
  CcTest::InitializeVM();
407
  Isolate* isolate = CcTest::i_isolate();
408
  Heap* heap = isolate->heap();
409
  Factory* factory = isolate->factory();
410
  GlobalHandles* global_handles = isolate->global_handles();
411

    
412
  WeakPointerCleared = false;
413

    
414
  Handle<Object> h1;
415
  Handle<Object> h2;
416

    
417
  {
418
    HandleScope scope(isolate);
419

    
420
    Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
421
    Handle<Object> u = factory->NewNumber(1.12344);
422

    
423
    h1 = global_handles->Create(*i);
424
    h2 = global_handles->Create(*u);
425
  }
426

    
427
  global_handles->MakeWeak(h2.location(),
428
                           reinterpret_cast<void*>(1234),
429
                           &TestWeakGlobalHandleCallback);
430

    
431
  // Scavenge treats weak pointers as normal roots.
432
  heap->PerformScavenge();
433

    
434
  CHECK((*h1)->IsString());
435
  CHECK((*h2)->IsHeapNumber());
436

    
437
  CHECK(!WeakPointerCleared);
438
  CHECK(!global_handles->IsNearDeath(h2.location()));
439
  CHECK(!global_handles->IsNearDeath(h1.location()));
440

    
441
  global_handles->Destroy(h1.location());
442
  global_handles->Destroy(h2.location());
443
}
444

    
445

    
446
TEST(WeakGlobalHandlesMark) {
447
  CcTest::InitializeVM();
448
  Isolate* isolate = CcTest::i_isolate();
449
  Heap* heap = isolate->heap();
450
  Factory* factory = isolate->factory();
451
  GlobalHandles* global_handles = isolate->global_handles();
452

    
453
  WeakPointerCleared = false;
454

    
455
  Handle<Object> h1;
456
  Handle<Object> h2;
457

    
458
  {
459
    HandleScope scope(isolate);
460

    
461
    Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
462
    Handle<Object> u = factory->NewNumber(1.12344);
463

    
464
    h1 = global_handles->Create(*i);
465
    h2 = global_handles->Create(*u);
466
  }
467

    
468
  // Make sure the objects are promoted.
469
  heap->CollectGarbage(OLD_POINTER_SPACE);
470
  heap->CollectGarbage(NEW_SPACE);
471
  CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
472

    
473
  global_handles->MakeWeak(h2.location(),
474
                           reinterpret_cast<void*>(1234),
475
                           &TestWeakGlobalHandleCallback);
476
  CHECK(!GlobalHandles::IsNearDeath(h1.location()));
477
  CHECK(!GlobalHandles::IsNearDeath(h2.location()));
478

    
479
  // Incremental marking potentially marked handles before they turned weak.
480
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
481

    
482
  CHECK((*h1)->IsString());
483

    
484
  CHECK(WeakPointerCleared);
485
  CHECK(!GlobalHandles::IsNearDeath(h1.location()));
486

    
487
  global_handles->Destroy(h1.location());
488
}
489

    
490

    
491
TEST(DeleteWeakGlobalHandle) {
492
  i::FLAG_stress_compaction = false;
493
  CcTest::InitializeVM();
494
  Isolate* isolate = CcTest::i_isolate();
495
  Heap* heap = isolate->heap();
496
  Factory* factory = isolate->factory();
497
  GlobalHandles* global_handles = isolate->global_handles();
498

    
499
  WeakPointerCleared = false;
500

    
501
  Handle<Object> h;
502

    
503
  {
504
    HandleScope scope(isolate);
505

    
506
    Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
507
    h = global_handles->Create(*i);
508
  }
509

    
510
  global_handles->MakeWeak(h.location(),
511
                           reinterpret_cast<void*>(1234),
512
                           &TestWeakGlobalHandleCallback);
513

    
514
  // Scanvenge does not recognize weak reference.
515
  heap->PerformScavenge();
516

    
517
  CHECK(!WeakPointerCleared);
518

    
519
  // Mark-compact treats weak reference properly.
520
  heap->CollectGarbage(OLD_POINTER_SPACE);
521

    
522
  CHECK(WeakPointerCleared);
523
}
524

    
525

    
526
static const char* not_so_random_string_table[] = {
527
  "abstract",
528
  "boolean",
529
  "break",
530
  "byte",
531
  "case",
532
  "catch",
533
  "char",
534
  "class",
535
  "const",
536
  "continue",
537
  "debugger",
538
  "default",
539
  "delete",
540
  "do",
541
  "double",
542
  "else",
543
  "enum",
544
  "export",
545
  "extends",
546
  "false",
547
  "final",
548
  "finally",
549
  "float",
550
  "for",
551
  "function",
552
  "goto",
553
  "if",
554
  "implements",
555
  "import",
556
  "in",
557
  "instanceof",
558
  "int",
559
  "interface",
560
  "long",
561
  "native",
562
  "new",
563
  "null",
564
  "package",
565
  "private",
566
  "protected",
567
  "public",
568
  "return",
569
  "short",
570
  "static",
571
  "super",
572
  "switch",
573
  "synchronized",
574
  "this",
575
  "throw",
576
  "throws",
577
  "transient",
578
  "true",
579
  "try",
580
  "typeof",
581
  "var",
582
  "void",
583
  "volatile",
584
  "while",
585
  "with",
586
  0
587
};
588

    
589

    
590
static void CheckInternalizedStrings(const char** strings) {
591
  for (const char* string = *strings; *strings != 0; string = *strings++) {
592
    Object* a;
593
    MaybeObject* maybe_a = CcTest::heap()->InternalizeUtf8String(string);
594
    // InternalizeUtf8String may return a failure if a GC is needed.
595
    if (!maybe_a->ToObject(&a)) continue;
596
    CHECK(a->IsInternalizedString());
597
    Object* b;
598
    MaybeObject* maybe_b = CcTest::heap()->InternalizeUtf8String(string);
599
    if (!maybe_b->ToObject(&b)) continue;
600
    CHECK_EQ(b, a);
601
    CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string)));
602
  }
603
}
604

    
605

    
606
TEST(StringTable) {
607
  CcTest::InitializeVM();
608

    
609
  CheckInternalizedStrings(not_so_random_string_table);
610
  CheckInternalizedStrings(not_so_random_string_table);
611
}
612

    
613

    
614
TEST(FunctionAllocation) {
615
  CcTest::InitializeVM();
616
  Isolate* isolate = CcTest::i_isolate();
617
  Factory* factory = isolate->factory();
618

    
619
  v8::HandleScope sc(CcTest::isolate());
620
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
621
  Handle<JSFunction> function =
622
      factory->NewFunction(name, factory->undefined_value());
623
  Handle<Map> initial_map =
624
      factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
625
  function->set_initial_map(*initial_map);
626

    
627
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
628
  Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
629

    
630
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
631
  Handle<JSObject> obj = factory->NewJSObject(function);
632
  JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
633
  CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
634
  // Check that we can add properties to function objects.
635
  JSReceiver::SetProperty(function, prop_name, twenty_four, NONE,
636
                          kNonStrictMode);
637
  CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
638
}
639

    
640

    
641
TEST(ObjectProperties) {
642
  CcTest::InitializeVM();
643
  Isolate* isolate = CcTest::i_isolate();
644
  Factory* factory = isolate->factory();
645

    
646
  v8::HandleScope sc(CcTest::isolate());
647
  String* object_string = String::cast(CcTest::heap()->Object_string());
648
  Object* raw_object = CcTest::i_isolate()->context()->global_object()->
649
      GetProperty(object_string)->ToObjectChecked();
650
  JSFunction* object_function = JSFunction::cast(raw_object);
651
  Handle<JSFunction> constructor(object_function);
652
  Handle<JSObject> obj = factory->NewJSObject(constructor);
653
  Handle<String> first = factory->InternalizeUtf8String("first");
654
  Handle<String> second = factory->InternalizeUtf8String("second");
655

    
656
  Handle<Smi> one(Smi::FromInt(1), isolate);
657
  Handle<Smi> two(Smi::FromInt(2), isolate);
658

    
659
  // check for empty
660
  CHECK(!JSReceiver::HasLocalProperty(obj, first));
661

    
662
  // add first
663
  JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
664
  CHECK(JSReceiver::HasLocalProperty(obj, first));
665

    
666
  // delete first
667
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
668
  CHECK(!JSReceiver::HasLocalProperty(obj, first));
669

    
670
  // add first and then second
671
  JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
672
  JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
673
  CHECK(JSReceiver::HasLocalProperty(obj, first));
674
  CHECK(JSReceiver::HasLocalProperty(obj, second));
675

    
676
  // delete first and then second
677
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
678
  CHECK(JSReceiver::HasLocalProperty(obj, second));
679
  JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
680
  CHECK(!JSReceiver::HasLocalProperty(obj, first));
681
  CHECK(!JSReceiver::HasLocalProperty(obj, second));
682

    
683
  // add first and then second
684
  JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
685
  JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
686
  CHECK(JSReceiver::HasLocalProperty(obj, first));
687
  CHECK(JSReceiver::HasLocalProperty(obj, second));
688

    
689
  // delete second and then first
690
  JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
691
  CHECK(JSReceiver::HasLocalProperty(obj, first));
692
  JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
693
  CHECK(!JSReceiver::HasLocalProperty(obj, first));
694
  CHECK(!JSReceiver::HasLocalProperty(obj, second));
695

    
696
  // check string and internalized string match
697
  const char* string1 = "fisk";
698
  Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1));
699
  JSReceiver::SetProperty(obj, s1, one, NONE, kNonStrictMode);
700
  Handle<String> s1_string = factory->InternalizeUtf8String(string1);
701
  CHECK(JSReceiver::HasLocalProperty(obj, s1_string));
702

    
703
  // check internalized string and string match
704
  const char* string2 = "fugl";
705
  Handle<String> s2_string = factory->InternalizeUtf8String(string2);
706
  JSReceiver::SetProperty(obj, s2_string, one, NONE, kNonStrictMode);
707
  Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2));
708
  CHECK(JSReceiver::HasLocalProperty(obj, s2));
709
}
710

    
711

    
712
TEST(JSObjectMaps) {
713
  CcTest::InitializeVM();
714
  Isolate* isolate = CcTest::i_isolate();
715
  Factory* factory = isolate->factory();
716

    
717
  v8::HandleScope sc(CcTest::isolate());
718
  Handle<String> name = factory->InternalizeUtf8String("theFunction");
719
  Handle<JSFunction> function =
720
      factory->NewFunction(name, factory->undefined_value());
721
  Handle<Map> initial_map =
722
      factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
723
  function->set_initial_map(*initial_map);
724

    
725
  Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
726
  Handle<JSObject> obj = factory->NewJSObject(function);
727

    
728
  // Set a propery
729
  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
730
  JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
731
  CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
732

    
733
  // Check the map has changed
734
  CHECK(*initial_map != obj->map());
735
}
736

    
737

    
738
TEST(JSArray) {
739
  CcTest::InitializeVM();
740
  Isolate* isolate = CcTest::i_isolate();
741
  Factory* factory = isolate->factory();
742

    
743
  v8::HandleScope sc(CcTest::isolate());
744
  Handle<String> name = factory->InternalizeUtf8String("Array");
745
  Object* raw_object = CcTest::i_isolate()->context()->global_object()->
746
      GetProperty(*name)->ToObjectChecked();
747
  Handle<JSFunction> function = Handle<JSFunction>(
748
      JSFunction::cast(raw_object));
749

    
750
  // Allocate the object.
751
  Handle<JSObject> object = factory->NewJSObject(function);
752
  Handle<JSArray> array = Handle<JSArray>::cast(object);
753
  // We just initialized the VM, no heap allocation failure yet.
754
  array->Initialize(0)->ToObjectChecked();
755

    
756
  // Set array length to 0.
757
  array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
758
  CHECK_EQ(Smi::FromInt(0), array->length());
759
  // Must be in fast mode.
760
  CHECK(array->HasFastSmiOrObjectElements());
761

    
762
  // array[length] = name.
763
  array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
764
  CHECK_EQ(Smi::FromInt(1), array->length());
765
  CHECK_EQ(array->GetElement(isolate, 0), *name);
766

    
767
  // Set array length with larger than smi value.
768
  Handle<Object> length =
769
      factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
770
  array->SetElementsLength(*length)->ToObjectChecked();
771

    
772
  uint32_t int_length = 0;
773
  CHECK(length->ToArrayIndex(&int_length));
774
  CHECK_EQ(*length, array->length());
775
  CHECK(array->HasDictionaryElements());  // Must be in slow mode.
776

    
777
  // array[length] = name.
778
  array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
779
  uint32_t new_int_length = 0;
780
  CHECK(array->length()->ToArrayIndex(&new_int_length));
781
  CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
782
  CHECK_EQ(array->GetElement(isolate, int_length), *name);
783
  CHECK_EQ(array->GetElement(isolate, 0), *name);
784
}
785

    
786

    
787
TEST(JSObjectCopy) {
788
  CcTest::InitializeVM();
789
  Isolate* isolate = CcTest::i_isolate();
790
  Factory* factory = isolate->factory();
791

    
792
  v8::HandleScope sc(CcTest::isolate());
793
  String* object_string = String::cast(CcTest::heap()->Object_string());
794
  Object* raw_object = CcTest::i_isolate()->context()->global_object()->
795
      GetProperty(object_string)->ToObjectChecked();
796
  JSFunction* object_function = JSFunction::cast(raw_object);
797
  Handle<JSFunction> constructor(object_function);
798
  Handle<JSObject> obj = factory->NewJSObject(constructor);
799
  Handle<String> first = factory->InternalizeUtf8String("first");
800
  Handle<String> second = factory->InternalizeUtf8String("second");
801

    
802
  Handle<Smi> one(Smi::FromInt(1), isolate);
803
  Handle<Smi> two(Smi::FromInt(2), isolate);
804

    
805
  JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
806
  JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
807

    
808
  obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
809
  obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
810

    
811
  // Make the clone.
812
  Handle<JSObject> clone = JSObject::Copy(obj);
813
  CHECK(!clone.is_identical_to(obj));
814

    
815
  CHECK_EQ(obj->GetElement(isolate, 0), clone->GetElement(isolate, 0));
816
  CHECK_EQ(obj->GetElement(isolate, 1), clone->GetElement(isolate, 1));
817

    
818
  CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
819
  CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
820

    
821
  // Flip the values.
822
  JSReceiver::SetProperty(clone, first, two, NONE, kNonStrictMode);
823
  JSReceiver::SetProperty(clone, second, one, NONE, kNonStrictMode);
824

    
825
  clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
826
  clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
827

    
828
  CHECK_EQ(obj->GetElement(isolate, 1), clone->GetElement(isolate, 0));
829
  CHECK_EQ(obj->GetElement(isolate, 0), clone->GetElement(isolate, 1));
830

    
831
  CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
832
  CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
833
}
834

    
835

    
836
TEST(StringAllocation) {
837
  CcTest::InitializeVM();
838
  Isolate* isolate = CcTest::i_isolate();
839
  Factory* factory = isolate->factory();
840

    
841
  const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
842
  for (int length = 0; length < 100; length++) {
843
    v8::HandleScope scope(CcTest::isolate());
844
    char* non_ascii = NewArray<char>(3 * length + 1);
845
    char* ascii = NewArray<char>(length + 1);
846
    non_ascii[3 * length] = 0;
847
    ascii[length] = 0;
848
    for (int i = 0; i < length; i++) {
849
      ascii[i] = 'a';
850
      non_ascii[3 * i] = chars[0];
851
      non_ascii[3 * i + 1] = chars[1];
852
      non_ascii[3 * i + 2] = chars[2];
853
    }
854
    Handle<String> non_ascii_sym =
855
        factory->InternalizeUtf8String(
856
            Vector<const char>(non_ascii, 3 * length));
857
    CHECK_EQ(length, non_ascii_sym->length());
858
    Handle<String> ascii_sym =
859
        factory->InternalizeOneByteString(OneByteVector(ascii, length));
860
    CHECK_EQ(length, ascii_sym->length());
861
    Handle<String> non_ascii_str =
862
        factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
863
    non_ascii_str->Hash();
864
    CHECK_EQ(length, non_ascii_str->length());
865
    Handle<String> ascii_str =
866
        factory->NewStringFromUtf8(Vector<const char>(ascii, length));
867
    ascii_str->Hash();
868
    CHECK_EQ(length, ascii_str->length());
869
    DeleteArray(non_ascii);
870
    DeleteArray(ascii);
871
  }
872
}
873

    
874

    
875
static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
876
  // Count the number of objects found in the heap.
877
  int found_count = 0;
878
  heap->EnsureHeapIsIterable();
879
  HeapIterator iterator(heap);
880
  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
881
    for (int i = 0; i < size; i++) {
882
      if (*objs[i] == obj) {
883
        found_count++;
884
      }
885
    }
886
  }
887
  return found_count;
888
}
889

    
890

    
891
TEST(Iteration) {
892
  CcTest::InitializeVM();
893
  Isolate* isolate = CcTest::i_isolate();
894
  Factory* factory = isolate->factory();
895
  v8::HandleScope scope(CcTest::isolate());
896

    
897
  // Array of objects to scan haep for.
898
  const int objs_count = 6;
899
  Handle<Object> objs[objs_count];
900
  int next_objs_index = 0;
901

    
902
  // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
903
  objs[next_objs_index++] = factory->NewJSArray(10);
904
  objs[next_objs_index++] = factory->NewJSArray(10,
905
                                                FAST_HOLEY_ELEMENTS,
906
                                                TENURED);
907

    
908
  // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
909
  objs[next_objs_index++] =
910
      factory->NewStringFromAscii(CStrVector("abcdefghij"));
911
  objs[next_objs_index++] =
912
      factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
913

    
914
  // Allocate a large string (for large object space).
915
  int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
916
  char* str = new char[large_size];
917
  for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
918
  str[large_size - 1] = '\0';
919
  objs[next_objs_index++] =
920
      factory->NewStringFromAscii(CStrVector(str), TENURED);
921
  delete[] str;
922

    
923
  // Add a Map object to look for.
924
  objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
925

    
926
  CHECK_EQ(objs_count, next_objs_index);
927
  CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
928
}
929

    
930

    
931
TEST(EmptyHandleEscapeFrom) {
932
  CcTest::InitializeVM();
933

    
934
  v8::HandleScope scope(CcTest::isolate());
935
  Handle<JSObject> runaway;
936

    
937
  {
938
      v8::HandleScope nested(CcTest::isolate());
939
      Handle<JSObject> empty;
940
      runaway = empty.EscapeFrom(&nested);
941
  }
942

    
943
  CHECK(runaway.is_null());
944
}
945

    
946

    
947
static int LenFromSize(int size) {
948
  return (size - FixedArray::kHeaderSize) / kPointerSize;
949
}
950

    
951

    
952
TEST(Regression39128) {
953
  // Test case for crbug.com/39128.
954
  CcTest::InitializeVM();
955
  Isolate* isolate = CcTest::i_isolate();
956
  Factory* factory = isolate->factory();
957
  Heap* heap = isolate->heap();
958

    
959
  // Increase the chance of 'bump-the-pointer' allocation in old space.
960
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
961

    
962
  v8::HandleScope scope(CcTest::isolate());
963

    
964
  // The plan: create JSObject which references objects in new space.
965
  // Then clone this object (forcing it to go into old space) and check
966
  // that region dirty marks are updated correctly.
967

    
968
  // Step 1: prepare a map for the object.  We add 1 inobject property to it.
969
  Handle<JSFunction> object_ctor(
970
      CcTest::i_isolate()->native_context()->object_function());
971
  CHECK(object_ctor->has_initial_map());
972
  Handle<Map> object_map(object_ctor->initial_map());
973
  // Create a map with single inobject property.
974
  Handle<Map> my_map = factory->CopyMap(object_map, 1);
975
  int n_properties = my_map->inobject_properties();
976
  CHECK_GT(n_properties, 0);
977

    
978
  int object_size = my_map->instance_size();
979

    
980
  // Step 2: allocate a lot of objects so to almost fill new space: we need
981
  // just enough room to allocate JSObject and thus fill the newspace.
982

    
983
  int allocation_amount = Min(FixedArray::kMaxSize,
984
                              Page::kMaxNonCodeHeapObjectSize + kPointerSize);
985
  int allocation_len = LenFromSize(allocation_amount);
986
  NewSpace* new_space = heap->new_space();
987
  Address* top_addr = new_space->allocation_top_address();
988
  Address* limit_addr = new_space->allocation_limit_address();
989
  while ((*limit_addr - *top_addr) > allocation_amount) {
990
    CHECK(!heap->always_allocate());
991
    Object* array = heap->AllocateFixedArray(allocation_len)->ToObjectChecked();
992
    CHECK(!array->IsFailure());
993
    CHECK(new_space->Contains(array));
994
  }
995

    
996
  // Step 3: now allocate fixed array and JSObject to fill the whole new space.
997
  int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
998
  int fixed_array_len = LenFromSize(to_fill);
999
  CHECK(fixed_array_len < FixedArray::kMaxLength);
1000

    
1001
  CHECK(!heap->always_allocate());
1002
  Object* array = heap->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
1003
  CHECK(!array->IsFailure());
1004
  CHECK(new_space->Contains(array));
1005

    
1006
  Object* object = heap->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
1007
  CHECK(new_space->Contains(object));
1008
  JSObject* jsobject = JSObject::cast(object);
1009
  CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1010
  CHECK_EQ(0, jsobject->properties()->length());
1011
  // Create a reference to object in new space in jsobject.
1012
  jsobject->FastPropertyAtPut(-1, array);
1013

    
1014
  CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1015

    
1016
  // Step 4: clone jsobject, but force always allocate first to create a clone
1017
  // in old pointer space.
1018
  Address old_pointer_space_top = heap->old_pointer_space()->top();
1019
  AlwaysAllocateScope aa_scope;
1020
  Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked();
1021
  JSObject* clone = JSObject::cast(clone_obj);
1022
  if (clone->address() != old_pointer_space_top) {
1023
    // Alas, got allocated from free list, we cannot do checks.
1024
    return;
1025
  }
1026
  CHECK(heap->old_pointer_space()->Contains(clone->address()));
1027
}
1028

    
1029

    
1030
TEST(TestCodeFlushing) {
1031
  // If we do not flush code this test is invalid.
1032
  if (!FLAG_flush_code) return;
1033
  i::FLAG_allow_natives_syntax = true;
1034
  i::FLAG_optimize_for_size = false;
1035
  CcTest::InitializeVM();
1036
  Isolate* isolate = CcTest::i_isolate();
1037
  Factory* factory = isolate->factory();
1038
  v8::HandleScope scope(CcTest::isolate());
1039
  const char* source = "function foo() {"
1040
                       "  var x = 42;"
1041
                       "  var y = 42;"
1042
                       "  var z = x + y;"
1043
                       "};"
1044
                       "foo()";
1045
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1046

    
1047
  // This compile will add the code to the compilation cache.
1048
  { v8::HandleScope scope(CcTest::isolate());
1049
    CompileRun(source);
1050
  }
1051

    
1052
  // Check function is compiled.
1053
  Object* func_value = CcTest::i_isolate()->context()->global_object()->
1054
      GetProperty(*foo_name)->ToObjectChecked();
1055
  CHECK(func_value->IsJSFunction());
1056
  Handle<JSFunction> function(JSFunction::cast(func_value));
1057
  CHECK(function->shared()->is_compiled());
1058

    
1059
  // The code will survive at least two GCs.
1060
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1061
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1062
  CHECK(function->shared()->is_compiled());
1063

    
1064
  // Simulate several GCs that use full marking.
1065
  const int kAgingThreshold = 6;
1066
  for (int i = 0; i < kAgingThreshold; i++) {
1067
    CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1068
  }
1069

    
1070
  // foo should no longer be in the compilation cache
1071
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1072
  CHECK(!function->is_compiled() || function->IsOptimized());
1073
  // Call foo to get it recompiled.
1074
  CompileRun("foo()");
1075
  CHECK(function->shared()->is_compiled());
1076
  CHECK(function->is_compiled());
1077
}
1078

    
1079

    
1080
TEST(TestCodeFlushingPreAged) {
1081
  // If we do not flush code this test is invalid.
1082
  if (!FLAG_flush_code) return;
1083
  i::FLAG_allow_natives_syntax = true;
1084
  i::FLAG_optimize_for_size = true;
1085
  CcTest::InitializeVM();
1086
  Isolate* isolate = Isolate::Current();
1087
  Factory* factory = isolate->factory();
1088
  v8::HandleScope scope(CcTest::isolate());
1089
  const char* source = "function foo() {"
1090
                       "  var x = 42;"
1091
                       "  var y = 42;"
1092
                       "  var z = x + y;"
1093
                       "};"
1094
                       "foo()";
1095
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1096

    
1097
  // Compile foo, but don't run it.
1098
  { v8::HandleScope scope(CcTest::isolate());
1099
    CompileRun(source);
1100
  }
1101

    
1102
  // Check function is compiled.
1103
  Object* func_value = Isolate::Current()->context()->global_object()->
1104
      GetProperty(*foo_name)->ToObjectChecked();
1105
  CHECK(func_value->IsJSFunction());
1106
  Handle<JSFunction> function(JSFunction::cast(func_value));
1107
  CHECK(function->shared()->is_compiled());
1108

    
1109
  // The code has been run so will survive at least one GC.
1110
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1111
  CHECK(function->shared()->is_compiled());
1112

    
1113
  // The code was only run once, so it should be pre-aged and collected on the
1114
  // next GC.
1115
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1116
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1117

    
1118
  // Execute the function again twice, and ensure it is reset to the young age.
1119
  { v8::HandleScope scope(CcTest::isolate());
1120
    CompileRun("foo();"
1121
               "foo();");
1122
  }
1123

    
1124
  // The code will survive at least two GC now that it is young again.
1125
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1126
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1127
  CHECK(function->shared()->is_compiled());
1128

    
1129
  // Simulate several GCs that use full marking.
1130
  const int kAgingThreshold = 6;
1131
  for (int i = 0; i < kAgingThreshold; i++) {
1132
    CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1133
  }
1134

    
1135
  // foo should no longer be in the compilation cache
1136
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1137
  CHECK(!function->is_compiled() || function->IsOptimized());
1138
  // Call foo to get it recompiled.
1139
  CompileRun("foo()");
1140
  CHECK(function->shared()->is_compiled());
1141
  CHECK(function->is_compiled());
1142
}
1143

    
1144

    
1145
TEST(TestCodeFlushingIncremental) {
1146
  // If we do not flush code this test is invalid.
1147
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1148
  i::FLAG_allow_natives_syntax = true;
1149
  i::FLAG_optimize_for_size = false;
1150
  CcTest::InitializeVM();
1151
  Isolate* isolate = CcTest::i_isolate();
1152
  Factory* factory = isolate->factory();
1153
  v8::HandleScope scope(CcTest::isolate());
1154
  const char* source = "function foo() {"
1155
                       "  var x = 42;"
1156
                       "  var y = 42;"
1157
                       "  var z = x + y;"
1158
                       "};"
1159
                       "foo()";
1160
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1161

    
1162
  // This compile will add the code to the compilation cache.
1163
  { v8::HandleScope scope(CcTest::isolate());
1164
    CompileRun(source);
1165
  }
1166

    
1167
  // Check function is compiled.
1168
  Object* func_value = CcTest::i_isolate()->context()->global_object()->
1169
      GetProperty(*foo_name)->ToObjectChecked();
1170
  CHECK(func_value->IsJSFunction());
1171
  Handle<JSFunction> function(JSFunction::cast(func_value));
1172
  CHECK(function->shared()->is_compiled());
1173

    
1174
  // The code will survive at least two GCs.
1175
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1176
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1177
  CHECK(function->shared()->is_compiled());
1178

    
1179
  // Simulate several GCs that use incremental marking.
1180
  const int kAgingThreshold = 6;
1181
  for (int i = 0; i < kAgingThreshold; i++) {
1182
    SimulateIncrementalMarking();
1183
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1184
  }
1185
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1186
  CHECK(!function->is_compiled() || function->IsOptimized());
1187

    
1188
  // This compile will compile the function again.
1189
  { v8::HandleScope scope(CcTest::isolate());
1190
    CompileRun("foo();");
1191
  }
1192

    
1193
  // Simulate several GCs that use incremental marking but make sure
1194
  // the loop breaks once the function is enqueued as a candidate.
1195
  for (int i = 0; i < kAgingThreshold; i++) {
1196
    SimulateIncrementalMarking();
1197
    if (!function->next_function_link()->IsUndefined()) break;
1198
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1199
  }
1200

    
1201
  // Force optimization while incremental marking is active and while
1202
  // the function is enqueued as a candidate.
1203
  { v8::HandleScope scope(CcTest::isolate());
1204
    CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1205
  }
1206

    
1207
  // Simulate one final GC to make sure the candidate queue is sane.
1208
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1209
  CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1210
  CHECK(function->is_compiled() || !function->IsOptimized());
1211
}
1212

    
1213

    
1214
TEST(TestCodeFlushingIncrementalScavenge) {
1215
  // If we do not flush code this test is invalid.
1216
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1217
  i::FLAG_allow_natives_syntax = true;
1218
  i::FLAG_optimize_for_size = false;
1219
  CcTest::InitializeVM();
1220
  Isolate* isolate = CcTest::i_isolate();
1221
  Factory* factory = isolate->factory();
1222
  v8::HandleScope scope(CcTest::isolate());
1223
  const char* source = "var foo = function() {"
1224
                       "  var x = 42;"
1225
                       "  var y = 42;"
1226
                       "  var z = x + y;"
1227
                       "};"
1228
                       "foo();"
1229
                       "var bar = function() {"
1230
                       "  var x = 23;"
1231
                       "};"
1232
                       "bar();";
1233
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1234
  Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1235

    
1236
  // Perfrom one initial GC to enable code flushing.
1237
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1238

    
1239
  // This compile will add the code to the compilation cache.
1240
  { v8::HandleScope scope(CcTest::isolate());
1241
    CompileRun(source);
1242
  }
1243

    
1244
  // Check functions are compiled.
1245
  Object* func_value = CcTest::i_isolate()->context()->global_object()->
1246
      GetProperty(*foo_name)->ToObjectChecked();
1247
  CHECK(func_value->IsJSFunction());
1248
  Handle<JSFunction> function(JSFunction::cast(func_value));
1249
  CHECK(function->shared()->is_compiled());
1250
  Object* func_value2 = CcTest::i_isolate()->context()->global_object()->
1251
      GetProperty(*bar_name)->ToObjectChecked();
1252
  CHECK(func_value2->IsJSFunction());
1253
  Handle<JSFunction> function2(JSFunction::cast(func_value2));
1254
  CHECK(function2->shared()->is_compiled());
1255

    
1256
  // Clear references to functions so that one of them can die.
1257
  { v8::HandleScope scope(CcTest::isolate());
1258
    CompileRun("foo = 0; bar = 0;");
1259
  }
1260

    
1261
  // Bump the code age so that flushing is triggered while the function
1262
  // object is still located in new-space.
1263
  const int kAgingThreshold = 6;
1264
  for (int i = 0; i < kAgingThreshold; i++) {
1265
    function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1266
    function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1267
  }
1268

    
1269
  // Simulate incremental marking so that the functions are enqueued as
1270
  // code flushing candidates. Then kill one of the functions. Finally
1271
  // perform a scavenge while incremental marking is still running.
1272
  SimulateIncrementalMarking();
1273
  *function2.location() = NULL;
1274
  CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1275

    
1276
  // Simulate one final GC to make sure the candidate queue is sane.
1277
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1278
  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1279
  CHECK(!function->is_compiled() || function->IsOptimized());
1280
}
1281

    
1282

    
1283
TEST(TestCodeFlushingIncrementalAbort) {
1284
  // If we do not flush code this test is invalid.
1285
  if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1286
  i::FLAG_allow_natives_syntax = true;
1287
  i::FLAG_optimize_for_size = false;
1288
  CcTest::InitializeVM();
1289
  Isolate* isolate = CcTest::i_isolate();
1290
  Factory* factory = isolate->factory();
1291
  Heap* heap = isolate->heap();
1292
  v8::HandleScope scope(CcTest::isolate());
1293
  const char* source = "function foo() {"
1294
                       "  var x = 42;"
1295
                       "  var y = 42;"
1296
                       "  var z = x + y;"
1297
                       "};"
1298
                       "foo()";
1299
  Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1300

    
1301
  // This compile will add the code to the compilation cache.
1302
  { v8::HandleScope scope(CcTest::isolate());
1303
    CompileRun(source);
1304
  }
1305

    
1306
  // Check function is compiled.
1307
  Object* func_value = CcTest::i_isolate()->context()->global_object()->
1308
      GetProperty(*foo_name)->ToObjectChecked();
1309
  CHECK(func_value->IsJSFunction());
1310
  Handle<JSFunction> function(JSFunction::cast(func_value));
1311
  CHECK(function->shared()->is_compiled());
1312

    
1313
  // The code will survive at least two GCs.
1314
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1315
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1316
  CHECK(function->shared()->is_compiled());
1317

    
1318
  // Bump the code age so that flushing is triggered.
1319
  const int kAgingThreshold = 6;
1320
  for (int i = 0; i < kAgingThreshold; i++) {
1321
    function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1322
  }
1323

    
1324
  // Simulate incremental marking so that the function is enqueued as
1325
  // code flushing candidate.
1326
  SimulateIncrementalMarking();
1327

    
1328
#ifdef ENABLE_DEBUGGER_SUPPORT
1329
  // Enable the debugger and add a breakpoint while incremental marking
1330
  // is running so that incremental marking aborts and code flushing is
1331
  // disabled.
1332
  int position = 0;
1333
  Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1334
  isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1335
  isolate->debug()->ClearAllBreakPoints();
1336
#endif  // ENABLE_DEBUGGER_SUPPORT
1337

    
1338
  // Force optimization now that code flushing is disabled.
1339
  { v8::HandleScope scope(CcTest::isolate());
1340
    CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1341
  }
1342

    
1343
  // Simulate one final GC to make sure the candidate queue is sane.
1344
  heap->CollectAllGarbage(Heap::kNoGCFlags);
1345
  CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1346
  CHECK(function->is_compiled() || !function->IsOptimized());
1347
}
1348

    
1349

    
1350
// Count the number of native contexts in the weak list of native contexts.
1351
int CountNativeContexts() {
1352
  int count = 0;
1353
  Object* object = CcTest::heap()->native_contexts_list();
1354
  while (!object->IsUndefined()) {
1355
    count++;
1356
    object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1357
  }
1358
  return count;
1359
}
1360

    
1361

    
1362
// Count the number of user functions in the weak list of optimized
1363
// functions attached to a native context.
1364
static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1365
  int count = 0;
1366
  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1367
  Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1368
  while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1369
    count++;
1370
    object = JSFunction::cast(object)->next_function_link();
1371
  }
1372
  return count;
1373
}
1374

    
1375

    
1376
TEST(TestInternalWeakLists) {
1377
  v8::V8::Initialize();
1378

    
1379
  // Some flags turn Scavenge collections into Mark-sweep collections
1380
  // and hence are incompatible with this test case.
1381
  if (FLAG_gc_global || FLAG_stress_compaction) return;
1382

    
1383
  static const int kNumTestContexts = 10;
1384

    
1385
  Isolate* isolate = CcTest::i_isolate();
1386
  Heap* heap = isolate->heap();
1387
  HandleScope scope(isolate);
1388
  v8::Handle<v8::Context> ctx[kNumTestContexts];
1389

    
1390
  CHECK_EQ(0, CountNativeContexts());
1391

    
1392
  // Create a number of global contests which gets linked together.
1393
  for (int i = 0; i < kNumTestContexts; i++) {
1394
    ctx[i] = v8::Context::New(CcTest::isolate());
1395

    
1396
    // Collect garbage that might have been created by one of the
1397
    // installed extensions.
1398
    isolate->compilation_cache()->Clear();
1399
    heap->CollectAllGarbage(Heap::kNoGCFlags);
1400

    
1401
    bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1402

    
1403
    CHECK_EQ(i + 1, CountNativeContexts());
1404

    
1405
    ctx[i]->Enter();
1406

    
1407
    // Create a handle scope so no function objects get stuch in the outer
1408
    // handle scope
1409
    HandleScope scope(isolate);
1410
    const char* source = "function f1() { };"
1411
                         "function f2() { };"
1412
                         "function f3() { };"
1413
                         "function f4() { };"
1414
                         "function f5() { };";
1415
    CompileRun(source);
1416
    CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1417
    CompileRun("f1()");
1418
    CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1419
    CompileRun("f2()");
1420
    CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1421
    CompileRun("f3()");
1422
    CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1423
    CompileRun("f4()");
1424
    CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1425
    CompileRun("f5()");
1426
    CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1427

    
1428
    // Remove function f1, and
1429
    CompileRun("f1=null");
1430

    
1431
    // Scavenge treats these references as strong.
1432
    for (int j = 0; j < 10; j++) {
1433
      CcTest::heap()->PerformScavenge();
1434
      CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1435
    }
1436

    
1437
    // Mark compact handles the weak references.
1438
    isolate->compilation_cache()->Clear();
1439
    heap->CollectAllGarbage(Heap::kNoGCFlags);
1440
    CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1441

    
1442
    // Get rid of f3 and f5 in the same way.
1443
    CompileRun("f3=null");
1444
    for (int j = 0; j < 10; j++) {
1445
      CcTest::heap()->PerformScavenge();
1446
      CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1447
    }
1448
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1449
    CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1450
    CompileRun("f5=null");
1451
    for (int j = 0; j < 10; j++) {
1452
      CcTest::heap()->PerformScavenge();
1453
      CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1454
    }
1455
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1456
    CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1457

    
1458
    ctx[i]->Exit();
1459
  }
1460

    
1461
  // Force compilation cache cleanup.
1462
  CcTest::heap()->NotifyContextDisposed();
1463
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1464

    
1465
  // Dispose the native contexts one by one.
1466
  for (int i = 0; i < kNumTestContexts; i++) {
1467
    // TODO(dcarney): is there a better way to do this?
1468
    i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1469
    *unsafe = CcTest::heap()->undefined_value();
1470
    ctx[i].Clear();
1471

    
1472
    // Scavenge treats these references as strong.
1473
    for (int j = 0; j < 10; j++) {
1474
      CcTest::heap()->PerformScavenge();
1475
      CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1476
    }
1477

    
1478
    // Mark compact handles the weak references.
1479
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1480
    CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1481
  }
1482

    
1483
  CHECK_EQ(0, CountNativeContexts());
1484
}
1485

    
1486

    
1487
// Count the number of native contexts in the weak list of native contexts
1488
// causing a GC after the specified number of elements.
1489
static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1490
  Heap* heap = isolate->heap();
1491
  int count = 0;
1492
  Handle<Object> object(heap->native_contexts_list(), isolate);
1493
  while (!object->IsUndefined()) {
1494
    count++;
1495
    if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1496
    object =
1497
        Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1498
                       isolate);
1499
  }
1500
  return count;
1501
}
1502

    
1503

    
1504
// Count the number of user functions in the weak list of optimized
1505
// functions attached to a native context causing a GC after the
1506
// specified number of elements.
1507
static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1508
                                             int n) {
1509
  int count = 0;
1510
  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1511
  Isolate* isolate = icontext->GetIsolate();
1512
  Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1513
                        isolate);
1514
  while (object->IsJSFunction() &&
1515
         !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1516
    count++;
1517
    if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1518
    object = Handle<Object>(
1519
        Object::cast(JSFunction::cast(*object)->next_function_link()),
1520
        isolate);
1521
  }
1522
  return count;
1523
}
1524

    
1525

    
1526
TEST(TestInternalWeakListsTraverseWithGC) {
1527
  v8::V8::Initialize();
1528
  Isolate* isolate = CcTest::i_isolate();
1529

    
1530
  static const int kNumTestContexts = 10;
1531

    
1532
  HandleScope scope(isolate);
1533
  v8::Handle<v8::Context> ctx[kNumTestContexts];
1534

    
1535
  CHECK_EQ(0, CountNativeContexts());
1536

    
1537
  // Create an number of contexts and check the length of the weak list both
1538
  // with and without GCs while iterating the list.
1539
  for (int i = 0; i < kNumTestContexts; i++) {
1540
    ctx[i] = v8::Context::New(CcTest::isolate());
1541
    CHECK_EQ(i + 1, CountNativeContexts());
1542
    CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1543
  }
1544

    
1545
  bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1546

    
1547
  // Compile a number of functions the length of the weak list of optimized
1548
  // functions both with and without GCs while iterating the list.
1549
  ctx[0]->Enter();
1550
  const char* source = "function f1() { };"
1551
                       "function f2() { };"
1552
                       "function f3() { };"
1553
                       "function f4() { };"
1554
                       "function f5() { };";
1555
  CompileRun(source);
1556
  CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1557
  CompileRun("f1()");
1558
  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1559
  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1560
  CompileRun("f2()");
1561
  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1562
  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1563
  CompileRun("f3()");
1564
  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1565
  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1566
  CompileRun("f4()");
1567
  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1568
  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1569
  CompileRun("f5()");
1570
  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1571
  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1572

    
1573
  ctx[0]->Exit();
1574
}
1575

    
1576

    
1577
TEST(TestSizeOfObjects) {
1578
  v8::V8::Initialize();
1579

    
1580
  // Get initial heap size after several full GCs, which will stabilize
1581
  // the heap size and return with sweeping finished completely.
1582
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1583
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1584
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1585
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1586
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1587
  CHECK(CcTest::heap()->old_pointer_space()->IsLazySweepingComplete());
1588
  int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1589

    
1590
  {
1591
    // Allocate objects on several different old-space pages so that
1592
    // lazy sweeping kicks in for subsequent GC runs.
1593
    AlwaysAllocateScope always_allocate;
1594
    int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1595
    for (int i = 1; i <= 100; i++) {
1596
      CcTest::heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1597
      CHECK_EQ(initial_size + i * filler_size,
1598
               static_cast<int>(CcTest::heap()->SizeOfObjects()));
1599
    }
1600
  }
1601

    
1602
  // The heap size should go back to initial size after a full GC, even
1603
  // though sweeping didn't finish yet.
1604
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1605

    
1606
  // Normally sweeping would not be complete here, but no guarantees.
1607

    
1608
  CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1609

    
1610
  // Advancing the sweeper step-wise should not change the heap size.
1611
  while (!CcTest::heap()->old_pointer_space()->IsLazySweepingComplete()) {
1612
    CcTest::heap()->old_pointer_space()->AdvanceSweeper(KB);
1613
    CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1614
  }
1615
}
1616

    
1617

    
1618
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1619
  CcTest::InitializeVM();
1620
  CcTest::heap()->EnsureHeapIsIterable();
1621
  intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1622
  HeapIterator iterator(CcTest::heap());
1623
  intptr_t size_of_objects_2 = 0;
1624
  for (HeapObject* obj = iterator.next();
1625
       obj != NULL;
1626
       obj = iterator.next()) {
1627
    if (!obj->IsFreeSpace()) {
1628
      size_of_objects_2 += obj->Size();
1629
    }
1630
  }
1631
  // Delta must be within 5% of the larger result.
1632
  // TODO(gc): Tighten this up by distinguishing between byte
1633
  // arrays that are real and those that merely mark free space
1634
  // on the heap.
1635
  if (size_of_objects_1 > size_of_objects_2) {
1636
    intptr_t delta = size_of_objects_1 - size_of_objects_2;
1637
    PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1638
           "Iterator: %" V8_PTR_PREFIX "d, "
1639
           "delta: %" V8_PTR_PREFIX "d\n",
1640
           size_of_objects_1, size_of_objects_2, delta);
1641
    CHECK_GT(size_of_objects_1 / 20, delta);
1642
  } else {
1643
    intptr_t delta = size_of_objects_2 - size_of_objects_1;
1644
    PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1645
           "Iterator: %" V8_PTR_PREFIX "d, "
1646
           "delta: %" V8_PTR_PREFIX "d\n",
1647
           size_of_objects_1, size_of_objects_2, delta);
1648
    CHECK_GT(size_of_objects_2 / 20, delta);
1649
  }
1650
}
1651

    
1652

    
1653
static void FillUpNewSpace(NewSpace* new_space) {
1654
  // Fill up new space to the point that it is completely full. Make sure
1655
  // that the scavenger does not undo the filling.
1656
  Heap* heap = new_space->heap();
1657
  Isolate* isolate = heap->isolate();
1658
  Factory* factory = isolate->factory();
1659
  HandleScope scope(isolate);
1660
  AlwaysAllocateScope always_allocate;
1661
  intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1662
  intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1663
  for (intptr_t i = 0; i < number_of_fillers; i++) {
1664
    CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1665
  }
1666
}
1667

    
1668

    
1669
TEST(GrowAndShrinkNewSpace) {
1670
  CcTest::InitializeVM();
1671
  Heap* heap = CcTest::heap();
1672
  NewSpace* new_space = heap->new_space();
1673

    
1674
  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1675
      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1676
    // The max size cannot exceed the reserved size, since semispaces must be
1677
    // always within the reserved space.  We can't test new space growing and
1678
    // shrinking if the reserved size is the same as the minimum (initial) size.
1679
    return;
1680
  }
1681

    
1682
  // Explicitly growing should double the space capacity.
1683
  intptr_t old_capacity, new_capacity;
1684
  old_capacity = new_space->Capacity();
1685
  new_space->Grow();
1686
  new_capacity = new_space->Capacity();
1687
  CHECK(2 * old_capacity == new_capacity);
1688

    
1689
  old_capacity = new_space->Capacity();
1690
  FillUpNewSpace(new_space);
1691
  new_capacity = new_space->Capacity();
1692
  CHECK(old_capacity == new_capacity);
1693

    
1694
  // Explicitly shrinking should not affect space capacity.
1695
  old_capacity = new_space->Capacity();
1696
  new_space->Shrink();
1697
  new_capacity = new_space->Capacity();
1698
  CHECK(old_capacity == new_capacity);
1699

    
1700
  // Let the scavenger empty the new space.
1701
  heap->CollectGarbage(NEW_SPACE);
1702
  CHECK_LE(new_space->Size(), old_capacity);
1703

    
1704
  // Explicitly shrinking should halve the space capacity.
1705
  old_capacity = new_space->Capacity();
1706
  new_space->Shrink();
1707
  new_capacity = new_space->Capacity();
1708
  CHECK(old_capacity == 2 * new_capacity);
1709

    
1710
  // Consecutive shrinking should not affect space capacity.
1711
  old_capacity = new_space->Capacity();
1712
  new_space->Shrink();
1713
  new_space->Shrink();
1714
  new_space->Shrink();
1715
  new_capacity = new_space->Capacity();
1716
  CHECK(old_capacity == new_capacity);
1717
}
1718

    
1719

    
1720
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1721
  CcTest::InitializeVM();
1722
  Heap* heap = CcTest::heap();
1723
  if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1724
      heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1725
    // The max size cannot exceed the reserved size, since semispaces must be
1726
    // always within the reserved space.  We can't test new space growing and
1727
    // shrinking if the reserved size is the same as the minimum (initial) size.
1728
    return;
1729
  }
1730

    
1731
  v8::HandleScope scope(CcTest::isolate());
1732
  NewSpace* new_space = heap->new_space();
1733
  intptr_t old_capacity, new_capacity;
1734
  old_capacity = new_space->Capacity();
1735
  new_space->Grow();
1736
  new_capacity = new_space->Capacity();
1737
  CHECK(2 * old_capacity == new_capacity);
1738
  FillUpNewSpace(new_space);
1739
  heap->CollectAllAvailableGarbage();
1740
  new_capacity = new_space->Capacity();
1741
  CHECK(old_capacity == new_capacity);
1742
}
1743

    
1744

    
1745
static int NumberOfGlobalObjects() {
1746
  int count = 0;
1747
  HeapIterator iterator(CcTest::heap());
1748
  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1749
    if (obj->IsGlobalObject()) count++;
1750
  }
1751
  return count;
1752
}
1753

    
1754

    
1755
// Test that we don't embed maps from foreign contexts into
1756
// optimized code.
1757
TEST(LeakNativeContextViaMap) {
1758
  i::FLAG_allow_natives_syntax = true;
1759
  v8::Isolate* isolate = CcTest::isolate();
1760
  v8::HandleScope outer_scope(isolate);
1761
  v8::Persistent<v8::Context> ctx1p;
1762
  v8::Persistent<v8::Context> ctx2p;
1763
  {
1764
    v8::HandleScope scope(isolate);
1765
    ctx1p.Reset(isolate, v8::Context::New(isolate));
1766
    ctx2p.Reset(isolate, v8::Context::New(isolate));
1767
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1768
  }
1769

    
1770
  CcTest::heap()->CollectAllAvailableGarbage();
1771
  CHECK_EQ(4, NumberOfGlobalObjects());
1772

    
1773
  {
1774
    v8::HandleScope inner_scope(isolate);
1775
    CompileRun("var v = {x: 42}");
1776
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1777
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1778
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1779
    ctx2->Enter();
1780
    ctx2->Global()->Set(v8_str("o"), v);
1781
    v8::Local<v8::Value> res = CompileRun(
1782
        "function f() { return o.x; }"
1783
        "for (var i = 0; i < 10; ++i) f();"
1784
        "%OptimizeFunctionOnNextCall(f);"
1785
        "f();");
1786
    CHECK_EQ(42, res->Int32Value());
1787
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1788
    ctx2->Exit();
1789
    v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1790
    ctx1p.Dispose();
1791
    v8::V8::ContextDisposedNotification();
1792
  }
1793
  CcTest::heap()->CollectAllAvailableGarbage();
1794
  CHECK_EQ(2, NumberOfGlobalObjects());
1795
  ctx2p.Dispose();
1796
  CcTest::heap()->CollectAllAvailableGarbage();
1797
  CHECK_EQ(0, NumberOfGlobalObjects());
1798
}
1799

    
1800

    
1801
// Test that we don't embed functions from foreign contexts into
1802
// optimized code.
1803
TEST(LeakNativeContextViaFunction) {
1804
  i::FLAG_allow_natives_syntax = true;
1805
  v8::Isolate* isolate = CcTest::isolate();
1806
  v8::HandleScope outer_scope(isolate);
1807
  v8::Persistent<v8::Context> ctx1p;
1808
  v8::Persistent<v8::Context> ctx2p;
1809
  {
1810
    v8::HandleScope scope(isolate);
1811
    ctx1p.Reset(isolate, v8::Context::New(isolate));
1812
    ctx2p.Reset(isolate, v8::Context::New(isolate));
1813
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1814
  }
1815

    
1816
  CcTest::heap()->CollectAllAvailableGarbage();
1817
  CHECK_EQ(4, NumberOfGlobalObjects());
1818

    
1819
  {
1820
    v8::HandleScope inner_scope(isolate);
1821
    CompileRun("var v = function() { return 42; }");
1822
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1823
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1824
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1825
    ctx2->Enter();
1826
    ctx2->Global()->Set(v8_str("o"), v);
1827
    v8::Local<v8::Value> res = CompileRun(
1828
        "function f(x) { return x(); }"
1829
        "for (var i = 0; i < 10; ++i) f(o);"
1830
        "%OptimizeFunctionOnNextCall(f);"
1831
        "f(o);");
1832
    CHECK_EQ(42, res->Int32Value());
1833
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1834
    ctx2->Exit();
1835
    ctx1->Exit();
1836
    ctx1p.Dispose();
1837
    v8::V8::ContextDisposedNotification();
1838
  }
1839
  CcTest::heap()->CollectAllAvailableGarbage();
1840
  CHECK_EQ(2, NumberOfGlobalObjects());
1841
  ctx2p.Dispose();
1842
  CcTest::heap()->CollectAllAvailableGarbage();
1843
  CHECK_EQ(0, NumberOfGlobalObjects());
1844
}
1845

    
1846

    
1847
TEST(LeakNativeContextViaMapKeyed) {
1848
  i::FLAG_allow_natives_syntax = true;
1849
  v8::Isolate* isolate = CcTest::isolate();
1850
  v8::HandleScope outer_scope(isolate);
1851
  v8::Persistent<v8::Context> ctx1p;
1852
  v8::Persistent<v8::Context> ctx2p;
1853
  {
1854
    v8::HandleScope scope(isolate);
1855
    ctx1p.Reset(isolate, v8::Context::New(isolate));
1856
    ctx2p.Reset(isolate, v8::Context::New(isolate));
1857
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1858
  }
1859

    
1860
  CcTest::heap()->CollectAllAvailableGarbage();
1861
  CHECK_EQ(4, NumberOfGlobalObjects());
1862

    
1863
  {
1864
    v8::HandleScope inner_scope(isolate);
1865
    CompileRun("var v = [42, 43]");
1866
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1867
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1868
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1869
    ctx2->Enter();
1870
    ctx2->Global()->Set(v8_str("o"), v);
1871
    v8::Local<v8::Value> res = CompileRun(
1872
        "function f() { return o[0]; }"
1873
        "for (var i = 0; i < 10; ++i) f();"
1874
        "%OptimizeFunctionOnNextCall(f);"
1875
        "f();");
1876
    CHECK_EQ(42, res->Int32Value());
1877
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1878
    ctx2->Exit();
1879
    ctx1->Exit();
1880
    ctx1p.Dispose();
1881
    v8::V8::ContextDisposedNotification();
1882
  }
1883
  CcTest::heap()->CollectAllAvailableGarbage();
1884
  CHECK_EQ(2, NumberOfGlobalObjects());
1885
  ctx2p.Dispose();
1886
  CcTest::heap()->CollectAllAvailableGarbage();
1887
  CHECK_EQ(0, NumberOfGlobalObjects());
1888
}
1889

    
1890

    
1891
TEST(LeakNativeContextViaMapProto) {
1892
  i::FLAG_allow_natives_syntax = true;
1893
  v8::Isolate* isolate = CcTest::isolate();
1894
  v8::HandleScope outer_scope(isolate);
1895
  v8::Persistent<v8::Context> ctx1p;
1896
  v8::Persistent<v8::Context> ctx2p;
1897
  {
1898
    v8::HandleScope scope(isolate);
1899
    ctx1p.Reset(isolate, v8::Context::New(isolate));
1900
    ctx2p.Reset(isolate, v8::Context::New(isolate));
1901
    v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1902
  }
1903

    
1904
  CcTest::heap()->CollectAllAvailableGarbage();
1905
  CHECK_EQ(4, NumberOfGlobalObjects());
1906

    
1907
  {
1908
    v8::HandleScope inner_scope(isolate);
1909
    CompileRun("var v = { y: 42}");
1910
    v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1911
    v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1912
    v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1913
    ctx2->Enter();
1914
    ctx2->Global()->Set(v8_str("o"), v);
1915
    v8::Local<v8::Value> res = CompileRun(
1916
        "function f() {"
1917
        "  var p = {x: 42};"
1918
        "  p.__proto__ = o;"
1919
        "  return p.x;"
1920
        "}"
1921
        "for (var i = 0; i < 10; ++i) f();"
1922
        "%OptimizeFunctionOnNextCall(f);"
1923
        "f();");
1924
    CHECK_EQ(42, res->Int32Value());
1925
    ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1926
    ctx2->Exit();
1927
    ctx1->Exit();
1928
    ctx1p.Dispose();
1929
    v8::V8::ContextDisposedNotification();
1930
  }
1931
  CcTest::heap()->CollectAllAvailableGarbage();
1932
  CHECK_EQ(2, NumberOfGlobalObjects());
1933
  ctx2p.Dispose();
1934
  CcTest::heap()->CollectAllAvailableGarbage();
1935
  CHECK_EQ(0, NumberOfGlobalObjects());
1936
}
1937

    
1938

    
1939
TEST(InstanceOfStubWriteBarrier) {
1940
  i::FLAG_allow_natives_syntax = true;
1941
#ifdef VERIFY_HEAP
1942
  i::FLAG_verify_heap = true;
1943
#endif
1944

    
1945
  CcTest::InitializeVM();
1946
  if (!CcTest::i_isolate()->use_crankshaft()) return;
1947
  if (i::FLAG_force_marking_deque_overflows) return;
1948
  v8::HandleScope outer_scope(CcTest::isolate());
1949

    
1950
  {
1951
    v8::HandleScope scope(CcTest::isolate());
1952
    CompileRun(
1953
        "function foo () { }"
1954
        "function mkbar () { return new (new Function(\"\")) (); }"
1955
        "function f (x) { return (x instanceof foo); }"
1956
        "function g () { f(mkbar()); }"
1957
        "f(new foo()); f(new foo());"
1958
        "%OptimizeFunctionOnNextCall(f);"
1959
        "f(new foo()); g();");
1960
  }
1961

    
1962
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
1963
  marking->Abort();
1964
  marking->Start();
1965

    
1966
  Handle<JSFunction> f =
1967
      v8::Utils::OpenHandle(
1968
          *v8::Handle<v8::Function>::Cast(
1969
              CcTest::global()->Get(v8_str("f"))));
1970

    
1971
  CHECK(f->IsOptimized());
1972

    
1973
  while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1974
         !marking->IsStopped()) {
1975
    // Discard any pending GC requests otherwise we will get GC when we enter
1976
    // code below.
1977
    marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1978
  }
1979

    
1980
  CHECK(marking->IsMarking());
1981

    
1982
  {
1983
    v8::HandleScope scope(CcTest::isolate());
1984
    v8::Handle<v8::Object> global = CcTest::global();
1985
    v8::Handle<v8::Function> g =
1986
        v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1987
    g->Call(global, 0, NULL);
1988
  }
1989

    
1990
  CcTest::heap()->incremental_marking()->set_should_hurry(true);
1991
  CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
1992
}
1993

    
1994

    
1995
TEST(PrototypeTransitionClearing) {
1996
  CcTest::InitializeVM();
1997
  Isolate* isolate = CcTest::i_isolate();
1998
  Factory* factory = isolate->factory();
1999
  v8::HandleScope scope(CcTest::isolate());
2000

    
2001
  CompileRun(
2002
      "var base = {};"
2003
      "var live = [];"
2004
      "for (var i = 0; i < 10; i++) {"
2005
      "  var object = {};"
2006
      "  var prototype = {};"
2007
      "  object.__proto__ = prototype;"
2008
      "  if (i >= 3) live.push(object, prototype);"
2009
      "}");
2010

    
2011
  Handle<JSObject> baseObject =
2012
      v8::Utils::OpenHandle(
2013
          *v8::Handle<v8::Object>::Cast(
2014
              CcTest::global()->Get(v8_str("base"))));
2015

    
2016
  // Verify that only dead prototype transitions are cleared.
2017
  CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
2018
  CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2019
  const int transitions = 10 - 3;
2020
  CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
2021

    
2022
  // Verify that prototype transitions array was compacted.
2023
  FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2024
  for (int i = 0; i < transitions; i++) {
2025
    int j = Map::kProtoTransitionHeaderSize +
2026
        i * Map::kProtoTransitionElementsPerEntry;
2027
    CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
2028
    Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
2029
    CHECK(proto->IsTheHole() || proto->IsJSObject());
2030
  }
2031

    
2032
  // Make sure next prototype is placed on an old-space evacuation candidate.
2033
  Handle<JSObject> prototype;
2034
  PagedSpace* space = CcTest::heap()->old_pointer_space();
2035
  {
2036
    AlwaysAllocateScope always_allocate;
2037
    SimulateFullSpace(space);
2038
    prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2039
  }
2040

    
2041
  // Add a prototype on an evacuation candidate and verify that transition
2042
  // clearing correctly records slots in prototype transition array.
2043
  i::FLAG_always_compact = true;
2044
  Handle<Map> map(baseObject->map());
2045
  CHECK(!space->LastPage()->Contains(
2046
      map->GetPrototypeTransitions()->address()));
2047
  CHECK(space->LastPage()->Contains(prototype->address()));
2048
  JSObject::SetPrototype(baseObject, prototype, false);
2049
  CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
2050
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2051
  CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
2052
}
2053

    
2054

    
2055
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2056
  i::FLAG_stress_compaction = false;
2057
  i::FLAG_allow_natives_syntax = true;
2058
#ifdef VERIFY_HEAP
2059
  i::FLAG_verify_heap = true;
2060
#endif
2061

    
2062
  CcTest::InitializeVM();
2063
  if (!CcTest::i_isolate()->use_crankshaft()) return;
2064
  v8::HandleScope outer_scope(CcTest::isolate());
2065

    
2066
  {
2067
    v8::HandleScope scope(CcTest::isolate());
2068
    CompileRun(
2069
        "function f () {"
2070
        "  var s = 0;"
2071
        "  for (var i = 0; i < 100; i++)  s += i;"
2072
        "  return s;"
2073
        "}"
2074
        "f(); f();"
2075
        "%OptimizeFunctionOnNextCall(f);"
2076
        "f();");
2077
  }
2078
  Handle<JSFunction> f =
2079
      v8::Utils::OpenHandle(
2080
          *v8::Handle<v8::Function>::Cast(
2081
              CcTest::global()->Get(v8_str("f"))));
2082
  CHECK(f->IsOptimized());
2083

    
2084
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2085
  marking->Abort();
2086
  marking->Start();
2087

    
2088
  // The following two calls will increment CcTest::heap()->global_ic_age().
2089
  const int kLongIdlePauseInMs = 1000;
2090
  v8::V8::ContextDisposedNotification();
2091
  v8::V8::IdleNotification(kLongIdlePauseInMs);
2092

    
2093
  while (!marking->IsStopped() && !marking->IsComplete()) {
2094
    marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2095
  }
2096
  if (!marking->IsStopped() || marking->should_hurry()) {
2097
    // We don't normally finish a GC via Step(), we normally finish by
2098
    // setting the stack guard and then do the final steps in the stack
2099
    // guard interrupt.  But here we didn't ask for that, and there is no
2100
    // JS code running to trigger the interrupt, so we explicitly finalize
2101
    // here.
2102
    CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2103
                            "Test finalizing incremental mark-sweep");
2104
  }
2105

    
2106
  CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2107
  CHECK_EQ(0, f->shared()->opt_count());
2108
  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2109
}
2110

    
2111

    
2112
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2113
  i::FLAG_stress_compaction = false;
2114
  i::FLAG_allow_natives_syntax = true;
2115
#ifdef VERIFY_HEAP
2116
  i::FLAG_verify_heap = true;
2117
#endif
2118

    
2119
  CcTest::InitializeVM();
2120
  if (!CcTest::i_isolate()->use_crankshaft()) return;
2121
  v8::HandleScope outer_scope(CcTest::isolate());
2122

    
2123
  {
2124
    v8::HandleScope scope(CcTest::isolate());
2125
    CompileRun(
2126
        "function f () {"
2127
        "  var s = 0;"
2128
        "  for (var i = 0; i < 100; i++)  s += i;"
2129
        "  return s;"
2130
        "}"
2131
        "f(); f();"
2132
        "%OptimizeFunctionOnNextCall(f);"
2133
        "f();");
2134
  }
2135
  Handle<JSFunction> f =
2136
      v8::Utils::OpenHandle(
2137
          *v8::Handle<v8::Function>::Cast(
2138
              CcTest::global()->Get(v8_str("f"))));
2139
  CHECK(f->IsOptimized());
2140

    
2141
  CcTest::heap()->incremental_marking()->Abort();
2142

    
2143
  // The following two calls will increment CcTest::heap()->global_ic_age().
2144
  // Since incremental marking is off, IdleNotification will do full GC.
2145
  const int kLongIdlePauseInMs = 1000;
2146
  v8::V8::ContextDisposedNotification();
2147
  v8::V8::IdleNotification(kLongIdlePauseInMs);
2148

    
2149
  CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2150
  CHECK_EQ(0, f->shared()->opt_count());
2151
  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2152
}
2153

    
2154

    
2155
// Test that HAllocateObject will always return an object in new-space.
2156
TEST(OptimizedAllocationAlwaysInNewSpace) {
2157
  i::FLAG_allow_natives_syntax = true;
2158
  CcTest::InitializeVM();
2159
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2160
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2161
  v8::HandleScope scope(CcTest::isolate());
2162

    
2163
  SimulateFullSpace(CcTest::heap()->new_space());
2164
  AlwaysAllocateScope always_allocate;
2165
  v8::Local<v8::Value> res = CompileRun(
2166
      "function c(x) {"
2167
      "  this.x = x;"
2168
      "  for (var i = 0; i < 32; i++) {"
2169
      "    this['x' + i] = x;"
2170
      "  }"
2171
      "}"
2172
      "function f(x) { return new c(x); };"
2173
      "f(1); f(2); f(3);"
2174
      "%OptimizeFunctionOnNextCall(f);"
2175
      "f(4);");
2176
  CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2177

    
2178
  Handle<JSObject> o =
2179
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2180

    
2181
  CHECK(CcTest::heap()->InNewSpace(*o));
2182
}
2183

    
2184

    
2185
TEST(OptimizedPretenuringAllocationFolding) {
2186
  i::FLAG_allow_natives_syntax = true;
2187
  CcTest::InitializeVM();
2188
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2189
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2190
  v8::HandleScope scope(CcTest::isolate());
2191
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2192

    
2193
  v8::Local<v8::Value> res = CompileRun(
2194
      "function DataObject() {"
2195
      "  this.a = 1.1;"
2196
      "  this.b = [{}];"
2197
      "  this.c = 1.2;"
2198
      "  this.d = [{}];"
2199
      "  this.e = 1.3;"
2200
      "  this.f = [{}];"
2201
      "}"
2202
      "function f() {"
2203
      "  return new DataObject();"
2204
      "};"
2205
      "f(); f(); f();"
2206
      "%OptimizeFunctionOnNextCall(f);"
2207
      "f();");
2208

    
2209
  Handle<JSObject> o =
2210
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2211

    
2212
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(0)));
2213
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(1)));
2214
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(2)));
2215
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(3)));
2216
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(4)));
2217
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(5)));
2218
}
2219

    
2220

    
2221
TEST(OptimizedPretenuringAllocationFoldingBlocks) {
2222
  i::FLAG_allow_natives_syntax = true;
2223
  CcTest::InitializeVM();
2224
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2225
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2226
  v8::HandleScope scope(CcTest::isolate());
2227
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2228

    
2229
  v8::Local<v8::Value> res = CompileRun(
2230
      "function DataObject() {"
2231
      "  this.a = [{}];"
2232
      "  this.b = [{}];"
2233
      "  this.c = 1.1;"
2234
      "  this.d = 1.2;"
2235
      "  this.e = [{}];"
2236
      "  this.f = 1.3;"
2237
      "}"
2238
      "function f() {"
2239
      "  return new DataObject();"
2240
      "};"
2241
      "f(); f(); f();"
2242
      "%OptimizeFunctionOnNextCall(f);"
2243
      "f();");
2244

    
2245
  Handle<JSObject> o =
2246
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2247

    
2248
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(0)));
2249
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(1)));
2250
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(2)));
2251
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(3)));
2252
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(4)));
2253
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(5)));
2254
}
2255

    
2256

    
2257
TEST(OptimizedPretenuringObjectArrayLiterals) {
2258
  i::FLAG_allow_natives_syntax = true;
2259
  CcTest::InitializeVM();
2260
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2261
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2262
  v8::HandleScope scope(CcTest::isolate());
2263
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2264

    
2265
  v8::Local<v8::Value> res = CompileRun(
2266
      "function f() {"
2267
      "  var numbers = [{}, {}, {}];"
2268
      "  return numbers;"
2269
      "};"
2270
      "f(); f(); f();"
2271
      "%OptimizeFunctionOnNextCall(f);"
2272
      "f();");
2273

    
2274
  Handle<JSObject> o =
2275
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2276

    
2277
  CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2278
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2279
}
2280

    
2281

    
2282
TEST(OptimizedPretenuringMixedInObjectProperties) {
2283
  i::FLAG_allow_natives_syntax = true;
2284
  CcTest::InitializeVM();
2285
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2286
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2287
  v8::HandleScope scope(CcTest::isolate());
2288
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2289

    
2290
  v8::Local<v8::Value> res = CompileRun(
2291
      "function f() {"
2292
      "  var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
2293
      "  return numbers;"
2294
      "};"
2295
      "f(); f(); f();"
2296
      "%OptimizeFunctionOnNextCall(f);"
2297
      "f();");
2298

    
2299
  Handle<JSObject> o =
2300
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2301

    
2302
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2303
  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(0)));
2304
  CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(1)));
2305

    
2306
  JSObject* inner_object = reinterpret_cast<JSObject*>(o->RawFastPropertyAt(0));
2307
  CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2308
  CHECK(CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(0)));
2309
  CHECK(CcTest::heap()->InOldPointerSpace(inner_object->RawFastPropertyAt(1)));
2310
}
2311

    
2312

    
2313
TEST(OptimizedPretenuringDoubleArrayProperties) {
2314
  i::FLAG_allow_natives_syntax = true;
2315
  CcTest::InitializeVM();
2316
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2317
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2318
  v8::HandleScope scope(CcTest::isolate());
2319
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2320

    
2321
  v8::Local<v8::Value> res = CompileRun(
2322
      "function f() {"
2323
      "  var numbers = {a: 1.1, b: 2.2};"
2324
      "  return numbers;"
2325
      "};"
2326
      "f(); f(); f();"
2327
      "%OptimizeFunctionOnNextCall(f);"
2328
      "f();");
2329

    
2330
  Handle<JSObject> o =
2331
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2332

    
2333
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2334
  CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2335
}
2336

    
2337

    
2338
TEST(OptimizedPretenuringdoubleArrayLiterals) {
2339
  i::FLAG_allow_natives_syntax = true;
2340
  CcTest::InitializeVM();
2341
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2342
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2343
  v8::HandleScope scope(CcTest::isolate());
2344
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2345

    
2346
  v8::Local<v8::Value> res = CompileRun(
2347
      "function f() {"
2348
      "  var numbers = [1.1, 2.2, 3.3];"
2349
      "  return numbers;"
2350
      "};"
2351
      "f(); f(); f();"
2352
      "%OptimizeFunctionOnNextCall(f);"
2353
      "f();");
2354

    
2355
  Handle<JSObject> o =
2356
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2357

    
2358
  CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2359
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2360
}
2361

    
2362

    
2363
TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2364
  i::FLAG_allow_natives_syntax = true;
2365
  CcTest::InitializeVM();
2366
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2367
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2368
  v8::HandleScope scope(CcTest::isolate());
2369
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2370

    
2371
  v8::Local<v8::Value> res = CompileRun(
2372
      "function f() {"
2373
      "  var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
2374
      "  return numbers;"
2375
      "};"
2376
      "f(); f(); f();"
2377
      "%OptimizeFunctionOnNextCall(f);"
2378
      "f();");
2379

    
2380
  v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2381
  Handle<JSObject> int_array_handle =
2382
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2383
  v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2384
  Handle<JSObject> double_array_handle =
2385
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2386

    
2387
  Handle<JSObject> o =
2388
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2389
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2390
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2391
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2392
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2393
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2394
}
2395

    
2396

    
2397
TEST(OptimizedPretenuringNestedObjectLiterals) {
2398
  i::FLAG_allow_natives_syntax = true;
2399
  CcTest::InitializeVM();
2400
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2401
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2402
  v8::HandleScope scope(CcTest::isolate());
2403
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2404

    
2405
  v8::Local<v8::Value> res = CompileRun(
2406
      "function f() {"
2407
      "  var numbers = [[{}, {}, {}],[{}, {}, {}]];"
2408
      "  return numbers;"
2409
      "};"
2410
      "f(); f(); f();"
2411
      "%OptimizeFunctionOnNextCall(f);"
2412
      "f();");
2413

    
2414
  v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2415
  Handle<JSObject> int_array_handle_1 =
2416
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2417
  v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2418
  Handle<JSObject> int_array_handle_2 =
2419
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2420

    
2421
  Handle<JSObject> o =
2422
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2423
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2424
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2425
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2426
  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2427
  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2428
}
2429

    
2430

    
2431
TEST(OptimizedPretenuringNestedDoubleLiterals) {
2432
  i::FLAG_allow_natives_syntax = true;
2433
  CcTest::InitializeVM();
2434
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2435
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2436
  v8::HandleScope scope(CcTest::isolate());
2437
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2438

    
2439
  v8::Local<v8::Value> res = CompileRun(
2440
      "function f() {"
2441
      "  var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2442
      "  return numbers;"
2443
      "};"
2444
      "f(); f(); f();"
2445
      "%OptimizeFunctionOnNextCall(f);"
2446
      "f();");
2447

    
2448
  v8::Local<v8::Value> double_array_1 =
2449
      v8::Object::Cast(*res)->Get(v8_str("0"));
2450
  Handle<JSObject> double_array_handle_1 =
2451
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2452
  v8::Local<v8::Value> double_array_2 =
2453
      v8::Object::Cast(*res)->Get(v8_str("1"));
2454
  Handle<JSObject> double_array_handle_2 =
2455
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2456

    
2457
  Handle<JSObject> o =
2458
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2459
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2460
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2461
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2462
  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2463
  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2464
}
2465

    
2466

    
2467
// Test regular array literals allocation.
2468
TEST(OptimizedAllocationArrayLiterals) {
2469
  i::FLAG_allow_natives_syntax = true;
2470
  CcTest::InitializeVM();
2471
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2472
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2473
  v8::HandleScope scope(CcTest::isolate());
2474

    
2475
  v8::Local<v8::Value> res = CompileRun(
2476
      "function f() {"
2477
      "  var numbers = new Array(1, 2, 3);"
2478
      "  numbers[0] = 3.14;"
2479
      "  return numbers;"
2480
      "};"
2481
      "f(); f(); f();"
2482
      "%OptimizeFunctionOnNextCall(f);"
2483
      "f();");
2484
  CHECK_EQ(static_cast<int>(3.14),
2485
           v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2486

    
2487
  Handle<JSObject> o =
2488
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2489

    
2490
  CHECK(CcTest::heap()->InNewSpace(o->elements()));
2491
}
2492

    
2493

    
2494
TEST(OptimizedPretenuringCallNew) {
2495
  i::FLAG_allow_natives_syntax = true;
2496
  i::FLAG_pretenuring_call_new = true;
2497
  CcTest::InitializeVM();
2498
  if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2499
  if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2500
  v8::HandleScope scope(CcTest::isolate());
2501
  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2502

    
2503
  AlwaysAllocateScope always_allocate;
2504
  v8::Local<v8::Value> res = CompileRun(
2505
      "function g() { this.a = 0; }"
2506
      "function f() {"
2507
      "  return new g();"
2508
      "};"
2509
      "f(); f(); f();"
2510
      "%OptimizeFunctionOnNextCall(f);"
2511
      "f();");
2512

    
2513
  Handle<JSObject> o =
2514
      v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2515
  CHECK(CcTest::heap()->InOldPointerSpace(*o));
2516
}
2517

    
2518

    
2519
static int CountMapTransitions(Map* map) {
2520
  return map->transitions()->number_of_transitions();
2521
}
2522

    
2523

    
2524
// Test that map transitions are cleared and maps are collected with
2525
// incremental marking as well.
2526
TEST(Regress1465) {
2527
  i::FLAG_stress_compaction = false;
2528
  i::FLAG_allow_natives_syntax = true;
2529
  i::FLAG_trace_incremental_marking = true;
2530
  CcTest::InitializeVM();
2531
  v8::HandleScope scope(CcTest::isolate());
2532
  static const int transitions_count = 256;
2533

    
2534
  {
2535
    AlwaysAllocateScope always_allocate;
2536
    for (int i = 0; i < transitions_count; i++) {
2537
      EmbeddedVector<char, 64> buffer;
2538
      OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
2539
      CompileRun(buffer.start());
2540
    }
2541
    CompileRun("var root = new Object;");
2542
  }
2543

    
2544
  Handle<JSObject> root =
2545
      v8::Utils::OpenHandle(
2546
          *v8::Handle<v8::Object>::Cast(
2547
              CcTest::global()->Get(v8_str("root"))));
2548

    
2549
  // Count number of live transitions before marking.
2550
  int transitions_before = CountMapTransitions(root->map());
2551
  CompileRun("%DebugPrint(root);");
2552
  CHECK_EQ(transitions_count, transitions_before);
2553

    
2554
  SimulateIncrementalMarking();
2555
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2556

    
2557
  // Count number of live transitions after marking.  Note that one transition
2558
  // is left, because 'o' still holds an instance of one transition target.
2559
  int transitions_after = CountMapTransitions(root->map());
2560
  CompileRun("%DebugPrint(root);");
2561
  CHECK_EQ(1, transitions_after);
2562
}
2563

    
2564

    
2565
TEST(Regress2143a) {
2566
  i::FLAG_collect_maps = true;
2567
  i::FLAG_incremental_marking = true;
2568
  CcTest::InitializeVM();
2569
  v8::HandleScope scope(CcTest::isolate());
2570

    
2571
  // Prepare a map transition from the root object together with a yet
2572
  // untransitioned root object.
2573
  CompileRun("var root = new Object;"
2574
             "root.foo = 0;"
2575
             "root = new Object;");
2576

    
2577
  SimulateIncrementalMarking();
2578

    
2579
  // Compile a StoreIC that performs the prepared map transition. This
2580
  // will restart incremental marking and should make sure the root is
2581
  // marked grey again.
2582
  CompileRun("function f(o) {"
2583
             "  o.foo = 0;"
2584
             "}"
2585
             "f(new Object);"
2586
             "f(root);");
2587

    
2588
  // This bug only triggers with aggressive IC clearing.
2589
  CcTest::heap()->AgeInlineCaches();
2590

    
2591
  // Explicitly request GC to perform final marking step and sweeping.
2592
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2593

    
2594
  Handle<JSObject> root =
2595
      v8::Utils::OpenHandle(
2596
          *v8::Handle<v8::Object>::Cast(
2597
              CcTest::global()->Get(v8_str("root"))));
2598

    
2599
  // The root object should be in a sane state.
2600
  CHECK(root->IsJSObject());
2601
  CHECK(root->map()->IsMap());
2602
}
2603

    
2604

    
2605
TEST(Regress2143b) {
2606
  i::FLAG_collect_maps = true;
2607
  i::FLAG_incremental_marking = true;
2608
  i::FLAG_allow_natives_syntax = true;
2609
  CcTest::InitializeVM();
2610
  v8::HandleScope scope(CcTest::isolate());
2611

    
2612
  // Prepare a map transition from the root object together with a yet
2613
  // untransitioned root object.
2614
  CompileRun("var root = new Object;"
2615
             "root.foo = 0;"
2616
             "root = new Object;");
2617

    
2618
  SimulateIncrementalMarking();
2619

    
2620
  // Compile an optimized LStoreNamedField that performs the prepared
2621
  // map transition. This will restart incremental marking and should
2622
  // make sure the root is marked grey again.
2623
  CompileRun("function f(o) {"
2624
             "  o.foo = 0;"
2625
             "}"
2626
             "f(new Object);"
2627
             "f(new Object);"
2628
             "%OptimizeFunctionOnNextCall(f);"
2629
             "f(root);"
2630
             "%DeoptimizeFunction(f);");
2631

    
2632
  // This bug only triggers with aggressive IC clearing.
2633
  CcTest::heap()->AgeInlineCaches();
2634

    
2635
  // Explicitly request GC to perform final marking step and sweeping.
2636
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2637

    
2638
  Handle<JSObject> root =
2639
      v8::Utils::OpenHandle(
2640
          *v8::Handle<v8::Object>::Cast(
2641
              CcTest::global()->Get(v8_str("root"))));
2642

    
2643
  // The root object should be in a sane state.
2644
  CHECK(root->IsJSObject());
2645
  CHECK(root->map()->IsMap());
2646
}
2647

    
2648

    
2649
TEST(ReleaseOverReservedPages) {
2650
  i::FLAG_trace_gc = true;
2651
  // The optimizer can allocate stuff, messing up the test.
2652
  i::FLAG_crankshaft = false;
2653
  i::FLAG_always_opt = false;
2654
  CcTest::InitializeVM();
2655
  Isolate* isolate = CcTest::i_isolate();
2656
  Factory* factory = isolate->factory();
2657
  Heap* heap = isolate->heap();
2658
  v8::HandleScope scope(CcTest::isolate());
2659
  static const int number_of_test_pages = 20;
2660

    
2661
  // Prepare many pages with low live-bytes count.
2662
  PagedSpace* old_pointer_space = heap->old_pointer_space();
2663
  CHECK_EQ(1, old_pointer_space->CountTotalPages());
2664
  for (int i = 0; i < number_of_test_pages; i++) {
2665
    AlwaysAllocateScope always_allocate;
2666
    SimulateFullSpace(old_pointer_space);
2667
    factory->NewFixedArray(1, TENURED);
2668
  }
2669
  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2670

    
2671
  // Triggering one GC will cause a lot of garbage to be discovered but
2672
  // even spread across all allocated pages.
2673
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2674
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2675

    
2676
  // Triggering subsequent GCs should cause at least half of the pages
2677
  // to be released to the OS after at most two cycles.
2678
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2679
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2680
  heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
2681
  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
2682

    
2683
  // Triggering a last-resort GC should cause all pages to be released to the
2684
  // OS so that other processes can seize the memory.  If we get a failure here
2685
  // where there are 2 pages left instead of 1, then we should increase the
2686
  // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
2687
  // first page should be small in order to reduce memory used when the VM
2688
  // boots, but if the 20 small arrays don't fit on the first page then that's
2689
  // an indication that it is too small.
2690
  heap->CollectAllAvailableGarbage("triggered really hard");
2691
  CHECK_EQ(1, old_pointer_space->CountTotalPages());
2692
}
2693

    
2694

    
2695
TEST(Regress2237) {
2696
  i::FLAG_stress_compaction = false;
2697
  CcTest::InitializeVM();
2698
  Isolate* isolate = CcTest::i_isolate();
2699
  Factory* factory = isolate->factory();
2700
  v8::HandleScope scope(CcTest::isolate());
2701
  Handle<String> slice(CcTest::heap()->empty_string());
2702

    
2703
  {
2704
    // Generate a parent that lives in new-space.
2705
    v8::HandleScope inner_scope(CcTest::isolate());
2706
    const char* c = "This text is long enough to trigger sliced strings.";
2707
    Handle<String> s = factory->NewStringFromAscii(CStrVector(c));
2708
    CHECK(s->IsSeqOneByteString());
2709
    CHECK(CcTest::heap()->InNewSpace(*s));
2710

    
2711
    // Generate a sliced string that is based on the above parent and
2712
    // lives in old-space.
2713
    SimulateFullSpace(CcTest::heap()->new_space());
2714
    AlwaysAllocateScope always_allocate;
2715
    Handle<String> t = factory->NewProperSubString(s, 5, 35);
2716
    CHECK(t->IsSlicedString());
2717
    CHECK(!CcTest::heap()->InNewSpace(*t));
2718
    *slice.location() = *t.location();
2719
  }
2720

    
2721
  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2722
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2723
  CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2724
}
2725

    
2726

    
2727
#ifdef OBJECT_PRINT
2728
TEST(PrintSharedFunctionInfo) {
2729
  CcTest::InitializeVM();
2730
  v8::HandleScope scope(CcTest::isolate());
2731
  const char* source = "f = function() { return 987654321; }\n"
2732
                       "g = function() { return 123456789; }\n";
2733
  CompileRun(source);
2734
  Handle<JSFunction> g =
2735
      v8::Utils::OpenHandle(
2736
          *v8::Handle<v8::Function>::Cast(
2737
              CcTest::global()->Get(v8_str("g"))));
2738

    
2739
  DisallowHeapAllocation no_allocation;
2740
  g->shared()->PrintLn();
2741
}
2742
#endif  // OBJECT_PRINT
2743

    
2744

    
2745
TEST(Regress2211) {
2746
  CcTest::InitializeVM();
2747
  v8::HandleScope scope(CcTest::isolate());
2748

    
2749
  v8::Handle<v8::String> value = v8_str("val string");
2750
  Smi* hash = Smi::FromInt(321);
2751
  Heap* heap = CcTest::heap();
2752

    
2753
  for (int i = 0; i < 2; i++) {
2754
    // Store identity hash first and common hidden property second.
2755
    v8::Handle<v8::Object> obj = v8::Object::New();
2756
    Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2757
    CHECK(internal_obj->HasFastProperties());
2758

    
2759
    // In the first iteration, set hidden value first and identity hash second.
2760
    // In the second iteration, reverse the order.
2761
    if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2762
    JSObject::SetIdentityHash(internal_obj, hash);
2763
    if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2764

    
2765
    // Check values.
2766
    CHECK_EQ(hash,
2767
             internal_obj->GetHiddenProperty(heap->identity_hash_string()));
2768
    CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2769

    
2770
    // Check size.
2771
    DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2772
    ObjectHashTable* hashtable = ObjectHashTable::cast(
2773
        internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
2774
    // HashTable header (5) and 4 initial entries (8).
2775
    CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2776
  }
2777
}
2778

    
2779

    
2780
TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2781
  if (i::FLAG_always_opt) return;
2782
  CcTest::InitializeVM();
2783
  v8::HandleScope scope(CcTest::isolate());
2784
  v8::Local<v8::Value> fun1, fun2;
2785

    
2786
  {
2787
    LocalContext env;
2788
    CompileRun("function fun() {};");
2789
    fun1 = env->Global()->Get(v8_str("fun"));
2790
  }
2791

    
2792
  {
2793
    LocalContext env;
2794
    CompileRun("function fun() {};");
2795
    fun2 = env->Global()->Get(v8_str("fun"));
2796
  }
2797

    
2798
  // Prepare function f that contains type feedback for closures
2799
  // originating from two different native contexts.
2800
  CcTest::global()->Set(v8_str("fun1"), fun1);
2801
  CcTest::global()->Set(v8_str("fun2"), fun2);
2802
  CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2803
  Handle<JSFunction> f =
2804
      v8::Utils::OpenHandle(
2805
          *v8::Handle<v8::Function>::Cast(
2806
              CcTest::global()->Get(v8_str("f"))));
2807
  Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
2808
      f->shared()->code()->type_feedback_info())->type_feedback_cells());
2809

    
2810
  CHECK_EQ(2, cells->CellCount());
2811
  CHECK(cells->GetCell(0)->value()->IsJSFunction());
2812
  CHECK(cells->GetCell(1)->value()->IsJSFunction());
2813

    
2814
  SimulateIncrementalMarking();
2815
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2816

    
2817
  CHECK_EQ(2, cells->CellCount());
2818
  CHECK(cells->GetCell(0)->value()->IsTheHole());
2819
  CHECK(cells->GetCell(1)->value()->IsTheHole());
2820
}
2821

    
2822

    
2823
static Code* FindFirstIC(Code* code, Code::Kind kind) {
2824
  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2825
             RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2826
             RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2827
             RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2828
  for (RelocIterator it(code, mask); !it.done(); it.next()) {
2829
    RelocInfo* info = it.rinfo();
2830
    Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2831
    if (target->is_inline_cache_stub() && target->kind() == kind) {
2832
      return target;
2833
    }
2834
  }
2835
  return NULL;
2836
}
2837

    
2838

    
2839
TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2840
  if (i::FLAG_always_opt) return;
2841
  CcTest::InitializeVM();
2842
  v8::HandleScope scope(CcTest::isolate());
2843

    
2844
  // Prepare function f that contains a monomorphic IC for object
2845
  // originating from the same native context.
2846
  CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2847
             "function f(o) { return o.x; } f(obj); f(obj);");
2848
  Handle<JSFunction> f =
2849
      v8::Utils::OpenHandle(
2850
          *v8::Handle<v8::Function>::Cast(
2851
              CcTest::global()->Get(v8_str("f"))));
2852

    
2853
  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2854
  CHECK(ic_before->ic_state() == MONOMORPHIC);
2855

    
2856
  SimulateIncrementalMarking();
2857
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2858

    
2859
  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2860
  CHECK(ic_after->ic_state() == MONOMORPHIC);
2861
}
2862

    
2863

    
2864
TEST(IncrementalMarkingClearsMonomorhpicIC) {
2865
  if (i::FLAG_always_opt) return;
2866
  CcTest::InitializeVM();
2867
  v8::HandleScope scope(CcTest::isolate());
2868
  v8::Local<v8::Value> obj1;
2869

    
2870
  {
2871
    LocalContext env;
2872
    CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2873
    obj1 = env->Global()->Get(v8_str("obj"));
2874
  }
2875

    
2876
  // Prepare function f that contains a monomorphic IC for object
2877
  // originating from a different native context.
2878
  CcTest::global()->Set(v8_str("obj1"), obj1);
2879
  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2880
  Handle<JSFunction> f =
2881
      v8::Utils::OpenHandle(
2882
          *v8::Handle<v8::Function>::Cast(
2883
              CcTest::global()->Get(v8_str("f"))));
2884

    
2885
  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2886
  CHECK(ic_before->ic_state() == MONOMORPHIC);
2887

    
2888
  // Fire context dispose notification.
2889
  v8::V8::ContextDisposedNotification();
2890
  SimulateIncrementalMarking();
2891
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2892

    
2893
  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2894
  CHECK(IC::IsCleared(ic_after));
2895
}
2896

    
2897

    
2898
TEST(IncrementalMarkingClearsPolymorhpicIC) {
2899
  if (i::FLAG_always_opt) return;
2900
  CcTest::InitializeVM();
2901
  v8::HandleScope scope(CcTest::isolate());
2902
  v8::Local<v8::Value> obj1, obj2;
2903

    
2904
  {
2905
    LocalContext env;
2906
    CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2907
    obj1 = env->Global()->Get(v8_str("obj"));
2908
  }
2909

    
2910
  {
2911
    LocalContext env;
2912
    CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2913
    obj2 = env->Global()->Get(v8_str("obj"));
2914
  }
2915

    
2916
  // Prepare function f that contains a polymorphic IC for objects
2917
  // originating from two different native contexts.
2918
  CcTest::global()->Set(v8_str("obj1"), obj1);
2919
  CcTest::global()->Set(v8_str("obj2"), obj2);
2920
  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2921
  Handle<JSFunction> f =
2922
      v8::Utils::OpenHandle(
2923
          *v8::Handle<v8::Function>::Cast(
2924
              CcTest::global()->Get(v8_str("f"))));
2925

    
2926
  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2927
  CHECK(ic_before->ic_state() == POLYMORPHIC);
2928

    
2929
  // Fire context dispose notification.
2930
  v8::V8::ContextDisposedNotification();
2931
  SimulateIncrementalMarking();
2932
  CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2933

    
2934
  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2935
  CHECK(IC::IsCleared(ic_after));
2936
}
2937

    
2938

    
2939
class SourceResource: public v8::String::ExternalAsciiStringResource {
2940
 public:
2941
  explicit SourceResource(const char* data)
2942
    : data_(data), length_(strlen(data)) { }
2943

    
2944
  virtual void Dispose() {
2945
    i::DeleteArray(data_);
2946
    data_ = NULL;
2947
  }
2948

    
2949
  const char* data() const { return data_; }
2950

    
2951
  size_t length() const { return length_; }
2952

    
2953
  bool IsDisposed() { return data_ == NULL; }
2954

    
2955
 private:
2956
  const char* data_;
2957
  size_t length_;
2958
};
2959

    
2960

    
2961
void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
2962
  // Test that the data retained by the Error.stack accessor is released
2963
  // after the first time the accessor is fired.  We use external string
2964
  // to check whether the data is being released since the external string
2965
  // resource's callback is fired when the external string is GC'ed.
2966
  FLAG_use_ic = false;  // ICs retain objects.
2967
  FLAG_concurrent_recompilation = false;
2968
  v8::HandleScope scope(CcTest::isolate());
2969
  SourceResource* resource = new SourceResource(i::StrDup(source));
2970
  {
2971
    v8::HandleScope scope(CcTest::isolate());
2972
    v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
2973
    CcTest::heap()->CollectAllAvailableGarbage();
2974
    v8::Script::Compile(source_string)->Run();
2975
    CHECK(!resource->IsDisposed());
2976
  }
2977
  // CcTest::heap()->CollectAllAvailableGarbage();
2978
  CHECK(!resource->IsDisposed());
2979

    
2980
  CompileRun(accessor);
2981
  CcTest::heap()->CollectAllAvailableGarbage();
2982

    
2983
  // External source has been released.
2984
  CHECK(resource->IsDisposed());
2985
  delete resource;
2986
}
2987

    
2988

    
2989
TEST(ReleaseStackTraceData) {
2990
  CcTest::InitializeVM();
2991
  static const char* source1 = "var error = null;            "
2992
  /* Normal Error */           "try {                        "
2993
                               "  throw new Error();         "
2994
                               "} catch (e) {                "
2995
                               "  error = e;                 "
2996
                               "}                            ";
2997
  static const char* source2 = "var error = null;            "
2998
  /* Stack overflow */         "try {                        "
2999
                               "  (function f() { f(); })(); "
3000
                               "} catch (e) {                "
3001
                               "  error = e;                 "
3002
                               "}                            ";
3003
  static const char* source3 = "var error = null;            "
3004
  /* Normal Error */           "try {                        "
3005
  /* as prototype */           "  throw new Error();         "
3006
                               "} catch (e) {                "
3007
                               "  error = {};                "
3008
                               "  error.__proto__ = e;       "
3009
                               "}                            ";
3010
  static const char* source4 = "var error = null;            "
3011
  /* Stack overflow */         "try {                        "
3012
  /* as prototype   */         "  (function f() { f(); })(); "
3013
                               "} catch (e) {                "
3014
                               "  error = {};                "
3015
                               "  error.__proto__ = e;       "
3016
                               "}                            ";
3017
  static const char* getter = "error.stack";
3018
  static const char* setter = "error.stack = 0";
3019

    
3020
  ReleaseStackTraceDataTest(source1, setter);
3021
  ReleaseStackTraceDataTest(source2, setter);
3022
  // We do not test source3 and source4 with setter, since the setter is
3023
  // supposed to (untypically) write to the receiver, not the holder.  This is
3024
  // to emulate the behavior of a data property.
3025

    
3026
  ReleaseStackTraceDataTest(source1, getter);
3027
  ReleaseStackTraceDataTest(source2, getter);
3028
  ReleaseStackTraceDataTest(source3, getter);
3029
  ReleaseStackTraceDataTest(source4, getter);
3030
}
3031

    
3032

    
3033
TEST(Regression144230) {
3034
  i::FLAG_stress_compaction = false;
3035
  CcTest::InitializeVM();
3036
  Isolate* isolate = CcTest::i_isolate();
3037
  Heap* heap = isolate->heap();
3038
  HandleScope scope(isolate);
3039

    
3040
  // First make sure that the uninitialized CallIC stub is on a single page
3041
  // that will later be selected as an evacuation candidate.
3042
  {
3043
    HandleScope inner_scope(isolate);
3044
    AlwaysAllocateScope always_allocate;
3045
    SimulateFullSpace(heap->code_space());
3046
    isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
3047
  }
3048

    
3049
  // Second compile a CallIC and execute it once so that it gets patched to
3050
  // the pre-monomorphic stub. These code objects are on yet another page.
3051
  {
3052
    HandleScope inner_scope(isolate);
3053
    AlwaysAllocateScope always_allocate;
3054
    SimulateFullSpace(heap->code_space());
3055
    CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
3056
               "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
3057
               "call();");
3058
  }
3059

    
3060
  // Third we fill up the last page of the code space so that it does not get
3061
  // chosen as an evacuation candidate.
3062
  {
3063
    HandleScope inner_scope(isolate);
3064
    AlwaysAllocateScope always_allocate;
3065
    CompileRun("for (var i = 0; i < 2000; i++) {"
3066
               "  eval('function f' + i + '() { return ' + i +'; };' +"
3067
               "       'f' + i + '();');"
3068
               "}");
3069
  }
3070
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3071

    
3072
  // Fourth is the tricky part. Make sure the code containing the CallIC is
3073
  // visited first without clearing the IC. The shared function info is then
3074
  // visited later, causing the CallIC to be cleared.
3075
  Handle<String> name = isolate->factory()->InternalizeUtf8String("call");
3076
  Handle<GlobalObject> global(isolate->context()->global_object());
3077
  Handle<Smi> zero(Smi::FromInt(0), isolate);
3078
  MaybeObject* maybe_call = global->GetProperty(*name);
3079
  JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
3080
  JSReceiver::SetProperty(global, name, zero, NONE, kNonStrictMode);
3081
  isolate->compilation_cache()->Clear();
3082
  call->shared()->set_ic_age(heap->global_ic_age() + 1);
3083
  Handle<Object> call_code(call->code(), isolate);
3084
  Handle<Object> call_function(call, isolate);
3085

    
3086
  // Now we are ready to mess up the heap.
3087
  heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
3088

    
3089
  // Either heap verification caught the problem already or we go kaboom once
3090
  // the CallIC is executed the next time.
3091
  JSReceiver::SetProperty(global, name, call_function, NONE, kNonStrictMode);
3092
  CompileRun("call();");
3093
}
3094

    
3095

    
3096
TEST(Regress159140) {
3097
  i::FLAG_allow_natives_syntax = true;
3098
  i::FLAG_flush_code_incrementally = true;
3099
  CcTest::InitializeVM();
3100
  Isolate* isolate = CcTest::i_isolate();
3101
  Heap* heap = isolate->heap();
3102
  HandleScope scope(isolate);
3103

    
3104
  // Perform one initial GC to enable code flushing.
3105
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3106

    
3107
  // Prepare several closures that are all eligible for code flushing
3108
  // because all reachable ones are not optimized. Make sure that the
3109
  // optimized code object is directly reachable through a handle so
3110
  // that it is marked black during incremental marking.
3111
  Handle<Code> code;
3112
  {
3113
    HandleScope inner_scope(isolate);
3114
    CompileRun("function h(x) {}"
3115
               "function mkClosure() {"
3116
               "  return function(x) { return x + 1; };"
3117
               "}"
3118
               "var f = mkClosure();"
3119
               "var g = mkClosure();"
3120
               "f(1); f(2);"
3121
               "g(1); g(2);"
3122
               "h(1); h(2);"
3123
               "%OptimizeFunctionOnNextCall(f); f(3);"
3124
               "%OptimizeFunctionOnNextCall(h); h(3);");
3125

    
3126
    Handle<JSFunction> f =
3127
        v8::Utils::OpenHandle(
3128
            *v8::Handle<v8::Function>::Cast(
3129
                CcTest::global()->Get(v8_str("f"))));
3130
    CHECK(f->is_compiled());
3131
    CompileRun("f = null;");
3132

    
3133
    Handle<JSFunction> g =
3134
        v8::Utils::OpenHandle(
3135
            *v8::Handle<v8::Function>::Cast(
3136
                CcTest::global()->Get(v8_str("g"))));
3137
    CHECK(g->is_compiled());
3138
    const int kAgingThreshold = 6;
3139
    for (int i = 0; i < kAgingThreshold; i++) {
3140
      g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3141
    }
3142

    
3143
    code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3144
  }
3145

    
3146
  // Simulate incremental marking so that the functions are enqueued as
3147
  // code flushing candidates. Then optimize one function. Finally
3148
  // finish the GC to complete code flushing.
3149
  SimulateIncrementalMarking();
3150
  CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3151
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3152

    
3153
  // Unoptimized code is missing and the deoptimizer will go ballistic.
3154
  CompileRun("g('bozo');");
3155
}
3156

    
3157

    
3158
TEST(Regress165495) {
3159
  i::FLAG_allow_natives_syntax = true;
3160
  i::FLAG_flush_code_incrementally = true;
3161
  CcTest::InitializeVM();
3162
  Isolate* isolate = CcTest::i_isolate();
3163
  Heap* heap = isolate->heap();
3164
  HandleScope scope(isolate);
3165

    
3166
  // Perform one initial GC to enable code flushing.
3167
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3168

    
3169
  // Prepare an optimized closure that the optimized code map will get
3170
  // populated. Then age the unoptimized code to trigger code flushing
3171
  // but make sure the optimized code is unreachable.
3172
  {
3173
    HandleScope inner_scope(isolate);
3174
    CompileRun("function mkClosure() {"
3175
               "  return function(x) { return x + 1; };"
3176
               "}"
3177
               "var f = mkClosure();"
3178
               "f(1); f(2);"
3179
               "%OptimizeFunctionOnNextCall(f); f(3);");
3180

    
3181
    Handle<JSFunction> f =
3182
        v8::Utils::OpenHandle(
3183
            *v8::Handle<v8::Function>::Cast(
3184
                CcTest::global()->Get(v8_str("f"))));
3185
    CHECK(f->is_compiled());
3186
    const int kAgingThreshold = 6;
3187
    for (int i = 0; i < kAgingThreshold; i++) {
3188
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3189
    }
3190

    
3191
    CompileRun("f = null;");
3192
  }
3193

    
3194
  // Simulate incremental marking so that unoptimized code is flushed
3195
  // even though it still is cached in the optimized code map.
3196
  SimulateIncrementalMarking();
3197
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3198

    
3199
  // Make a new closure that will get code installed from the code map.
3200
  // Unoptimized code is missing and the deoptimizer will go ballistic.
3201
  CompileRun("var g = mkClosure(); g('bozo');");
3202
}
3203

    
3204

    
3205
TEST(Regress169209) {
3206
  i::FLAG_stress_compaction = false;
3207
  i::FLAG_allow_natives_syntax = true;
3208
  i::FLAG_flush_code_incrementally = true;
3209

    
3210
  CcTest::InitializeVM();
3211
  Isolate* isolate = CcTest::i_isolate();
3212
  Heap* heap = isolate->heap();
3213
  HandleScope scope(isolate);
3214

    
3215
  // Perform one initial GC to enable code flushing.
3216
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3217

    
3218
  // Prepare a shared function info eligible for code flushing for which
3219
  // the unoptimized code will be replaced during optimization.
3220
  Handle<SharedFunctionInfo> shared1;
3221
  {
3222
    HandleScope inner_scope(isolate);
3223
    CompileRun("function f() { return 'foobar'; }"
3224
               "function g(x) { if (x) f(); }"
3225
               "f();"
3226
               "g(false);"
3227
               "g(false);");
3228

    
3229
    Handle<JSFunction> f =
3230
        v8::Utils::OpenHandle(
3231
            *v8::Handle<v8::Function>::Cast(
3232
                CcTest::global()->Get(v8_str("f"))));
3233
    CHECK(f->is_compiled());
3234
    const int kAgingThreshold = 6;
3235
    for (int i = 0; i < kAgingThreshold; i++) {
3236
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3237
    }
3238

    
3239
    shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3240
  }
3241

    
3242
  // Prepare a shared function info eligible for code flushing that will
3243
  // represent the dangling tail of the candidate list.
3244
  Handle<SharedFunctionInfo> shared2;
3245
  {
3246
    HandleScope inner_scope(isolate);
3247
    CompileRun("function flushMe() { return 0; }"
3248
               "flushMe(1);");
3249

    
3250
    Handle<JSFunction> f =
3251
        v8::Utils::OpenHandle(
3252
            *v8::Handle<v8::Function>::Cast(
3253
                CcTest::global()->Get(v8_str("flushMe"))));
3254
    CHECK(f->is_compiled());
3255
    const int kAgingThreshold = 6;
3256
    for (int i = 0; i < kAgingThreshold; i++) {
3257
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3258
    }
3259

    
3260
    shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3261
  }
3262

    
3263
  // Simulate incremental marking and collect code flushing candidates.
3264
  SimulateIncrementalMarking();
3265
  CHECK(shared1->code()->gc_metadata() != NULL);
3266

    
3267
  // Optimize function and make sure the unoptimized code is replaced.
3268
#ifdef DEBUG
3269
  FLAG_stop_at = "f";
3270
#endif
3271
  CompileRun("%OptimizeFunctionOnNextCall(g);"
3272
             "g(false);");
3273

    
3274
  // Finish garbage collection cycle.
3275
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3276
  CHECK(shared1->code()->gc_metadata() == NULL);
3277
}
3278

    
3279

    
3280
// Helper function that simulates a fill new-space in the heap.
3281
static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3282
                                        int extra_bytes) {
3283
  int space_remaining = static_cast<int>(
3284
      *space->allocation_limit_address() - *space->allocation_top_address());
3285
  CHECK(space_remaining >= extra_bytes);
3286
  int new_linear_size = space_remaining - extra_bytes;
3287
  v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
3288
  v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
3289
  node->set_size(space->heap(), new_linear_size);
3290
}
3291

    
3292

    
3293
TEST(Regress169928) {
3294
  i::FLAG_allow_natives_syntax = true;
3295
  i::FLAG_crankshaft = false;
3296
  CcTest::InitializeVM();
3297
  Isolate* isolate = CcTest::i_isolate();
3298
  Factory* factory = isolate->factory();
3299
  v8::HandleScope scope(CcTest::isolate());
3300

    
3301
  // Some flags turn Scavenge collections into Mark-sweep collections
3302
  // and hence are incompatible with this test case.
3303
  if (FLAG_gc_global || FLAG_stress_compaction) return;
3304

    
3305
  // Prepare the environment
3306
  CompileRun("function fastliteralcase(literal, value) {"
3307
             "    literal[0] = value;"
3308
             "    return literal;"
3309
             "}"
3310
             "function get_standard_literal() {"
3311
             "    var literal = [1, 2, 3];"
3312
             "    return literal;"
3313
             "}"
3314
             "obj = fastliteralcase(get_standard_literal(), 1);"
3315
             "obj = fastliteralcase(get_standard_literal(), 1.5);"
3316
             "obj = fastliteralcase(get_standard_literal(), 2);");
3317

    
3318
  // prepare the heap
3319
  v8::Local<v8::String> mote_code_string =
3320
      v8_str("fastliteralcase(mote, 2.5);");
3321

    
3322
  v8::Local<v8::String> array_name = v8_str("mote");
3323
  CcTest::global()->Set(array_name, v8::Int32::New(0));
3324

    
3325
  // First make sure we flip spaces
3326
  CcTest::heap()->CollectGarbage(NEW_SPACE);
3327

    
3328
  // Allocate the object.
3329
  Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3330
  array_data->set(0, Smi::FromInt(1));
3331
  array_data->set(1, Smi::FromInt(2));
3332

    
3333
  AllocateAllButNBytes(CcTest::heap()->new_space(),
3334
                       JSArray::kSize + AllocationMemento::kSize +
3335
                       kPointerSize);
3336

    
3337
  Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3338
                                                          FAST_SMI_ELEMENTS,
3339
                                                          NOT_TENURED);
3340

    
3341
  CHECK_EQ(Smi::FromInt(2), array->length());
3342
  CHECK(array->HasFastSmiOrObjectElements());
3343

    
3344
  // We need filler the size of AllocationMemento object, plus an extra
3345
  // fill pointer value.
3346
  MaybeObject* maybe_object = CcTest::heap()->AllocateRaw(
3347
      AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
3348
  Object* obj = NULL;
3349
  CHECK(maybe_object->ToObject(&obj));
3350
  Address addr_obj = reinterpret_cast<Address>(
3351
      reinterpret_cast<byte*>(obj - kHeapObjectTag));
3352
  CcTest::heap()->CreateFillerObjectAt(addr_obj,
3353
                             AllocationMemento::kSize + kPointerSize);
3354

    
3355
  // Give the array a name, making sure not to allocate strings.
3356
  v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3357
  CcTest::global()->Set(array_name, array_obj);
3358

    
3359
  // This should crash with a protection violation if we are running a build
3360
  // with the bug.
3361
  AlwaysAllocateScope aa_scope;
3362
  v8::Script::Compile(mote_code_string)->Run();
3363
}
3364

    
3365

    
3366
TEST(Regress168801) {
3367
  i::FLAG_always_compact = true;
3368
  i::FLAG_cache_optimized_code = false;
3369
  i::FLAG_allow_natives_syntax = true;
3370
  i::FLAG_flush_code_incrementally = true;
3371
  CcTest::InitializeVM();
3372
  Isolate* isolate = CcTest::i_isolate();
3373
  Heap* heap = isolate->heap();
3374
  HandleScope scope(isolate);
3375

    
3376
  // Perform one initial GC to enable code flushing.
3377
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3378

    
3379
  // Ensure the code ends up on an evacuation candidate.
3380
  SimulateFullSpace(heap->code_space());
3381

    
3382
  // Prepare an unoptimized function that is eligible for code flushing.
3383
  Handle<JSFunction> function;
3384
  {
3385
    HandleScope inner_scope(isolate);
3386
    CompileRun("function mkClosure() {"
3387
               "  return function(x) { return x + 1; };"
3388
               "}"
3389
               "var f = mkClosure();"
3390
               "f(1); f(2);");
3391

    
3392
    Handle<JSFunction> f =
3393
        v8::Utils::OpenHandle(
3394
            *v8::Handle<v8::Function>::Cast(
3395
                CcTest::global()->Get(v8_str("f"))));
3396
    CHECK(f->is_compiled());
3397
    const int kAgingThreshold = 6;
3398
    for (int i = 0; i < kAgingThreshold; i++) {
3399
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3400
    }
3401

    
3402
    function = inner_scope.CloseAndEscape(handle(*f, isolate));
3403
  }
3404

    
3405
  // Simulate incremental marking so that unoptimized function is enqueued as a
3406
  // candidate for code flushing. The shared function info however will not be
3407
  // explicitly enqueued.
3408
  SimulateIncrementalMarking();
3409

    
3410
  // Now optimize the function so that it is taken off the candidate list.
3411
  {
3412
    HandleScope inner_scope(isolate);
3413
    CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3414
  }
3415

    
3416
  // This cycle will bust the heap and subsequent cycles will go ballistic.
3417
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3418
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3419
}
3420

    
3421

    
3422
TEST(Regress173458) {
3423
  i::FLAG_always_compact = true;
3424
  i::FLAG_cache_optimized_code = false;
3425
  i::FLAG_allow_natives_syntax = true;
3426
  i::FLAG_flush_code_incrementally = true;
3427
  CcTest::InitializeVM();
3428
  Isolate* isolate = CcTest::i_isolate();
3429
  Heap* heap = isolate->heap();
3430
  HandleScope scope(isolate);
3431

    
3432
  // Perform one initial GC to enable code flushing.
3433
  heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3434

    
3435
  // Ensure the code ends up on an evacuation candidate.
3436
  SimulateFullSpace(heap->code_space());
3437

    
3438
  // Prepare an unoptimized function that is eligible for code flushing.
3439
  Handle<JSFunction> function;
3440
  {
3441
    HandleScope inner_scope(isolate);
3442
    CompileRun("function mkClosure() {"
3443
               "  return function(x) { return x + 1; };"
3444
               "}"
3445
               "var f = mkClosure();"
3446
               "f(1); f(2);");
3447

    
3448
    Handle<JSFunction> f =
3449
        v8::Utils::OpenHandle(
3450
            *v8::Handle<v8::Function>::Cast(
3451
                CcTest::global()->Get(v8_str("f"))));
3452
    CHECK(f->is_compiled());
3453
    const int kAgingThreshold = 6;
3454
    for (int i = 0; i < kAgingThreshold; i++) {
3455
      f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3456
    }
3457

    
3458
    function = inner_scope.CloseAndEscape(handle(*f, isolate));
3459
  }
3460

    
3461
  // Simulate incremental marking so that unoptimized function is enqueued as a
3462
  // candidate for code flushing. The shared function info however will not be
3463
  // explicitly enqueued.
3464
  SimulateIncrementalMarking();
3465

    
3466
#ifdef ENABLE_DEBUGGER_SUPPORT
3467
  // Now enable the debugger which in turn will disable code flushing.
3468
  CHECK(isolate->debug()->Load());
3469
#endif  // ENABLE_DEBUGGER_SUPPORT
3470

    
3471
  // This cycle will bust the heap and subsequent cycles will go ballistic.
3472
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3473
  heap->CollectAllGarbage(Heap::kNoGCFlags);
3474
}
3475

    
3476

    
3477
class DummyVisitor : public ObjectVisitor {
3478
 public:
3479
  void VisitPointers(Object** start, Object** end) { }
3480
};
3481

    
3482

    
3483
TEST(DeferredHandles) {
3484
  CcTest::InitializeVM();
3485
  Isolate* isolate = CcTest::i_isolate();
3486
  Heap* heap = isolate->heap();
3487
  v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3488
  v8::ImplementationUtilities::HandleScopeData* data =
3489
      isolate->handle_scope_data();
3490
  Handle<Object> init(heap->empty_string(), isolate);
3491
  while (data->next < data->limit) {
3492
    Handle<Object> obj(heap->empty_string(), isolate);
3493
  }
3494
  // An entire block of handles has been filled.
3495
  // Next handle would require a new block.
3496
  ASSERT(data->next == data->limit);
3497

    
3498
  DeferredHandleScope deferred(isolate);
3499
  DummyVisitor visitor;
3500
  isolate->handle_scope_implementer()->Iterate(&visitor);
3501
  deferred.Detach();
3502
}
3503

    
3504

    
3505
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3506
  CcTest::InitializeVM();
3507
  v8::HandleScope scope(CcTest::isolate());
3508
  CompileRun("function f(n) {"
3509
             "    var a = new Array(n);"
3510
             "    for (var i = 0; i < n; i += 100) a[i] = i;"
3511
             "};"
3512
             "f(10 * 1024 * 1024);");
3513
  IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3514
  if (marking->IsStopped()) marking->Start();
3515
  // This big step should be sufficient to mark the whole array.
3516
  marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3517
  ASSERT(marking->IsComplete());
3518
}