The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / heap.cc @ 40c0f755

History | View | Annotate | Download (104 KB)

1
// Copyright 2009 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "accessors.h"
31
#include "api.h"
32
#include "bootstrapper.h"
33
#include "codegen-inl.h"
34
#include "compilation-cache.h"
35
#include "debug.h"
36
#include "global-handles.h"
37
#include "mark-compact.h"
38
#include "natives.h"
39
#include "scanner.h"
40
#include "scopeinfo.h"
41
#include "v8threads.h"
42

    
43
namespace v8 { namespace internal {
44

    
45
#define ROOT_ALLOCATION(type, name) type* Heap::name##_;
46
  ROOT_LIST(ROOT_ALLOCATION)
47
#undef ROOT_ALLOCATION
48

    
49

    
50
#define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_;
51
  STRUCT_LIST(STRUCT_ALLOCATION)
52
#undef STRUCT_ALLOCATION
53

    
54

    
55
#define SYMBOL_ALLOCATION(name, string) String* Heap::name##_;
56
  SYMBOL_LIST(SYMBOL_ALLOCATION)
57
#undef SYMBOL_ALLOCATION
58

    
59
String* Heap::hidden_symbol_;
60

    
61
NewSpace Heap::new_space_;
62
OldSpace* Heap::old_pointer_space_ = NULL;
63
OldSpace* Heap::old_data_space_ = NULL;
64
OldSpace* Heap::code_space_ = NULL;
65
MapSpace* Heap::map_space_ = NULL;
66
LargeObjectSpace* Heap::lo_space_ = NULL;
67

    
68
static const int kMinimumPromotionLimit = 2*MB;
69
static const int kMinimumAllocationLimit = 8*MB;
70

    
71
int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
72
int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
73

    
74
int Heap::old_gen_exhausted_ = false;
75

    
76
int Heap::amount_of_external_allocated_memory_ = 0;
77
int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
78

    
79
// semispace_size_ should be a power of 2 and old_generation_size_ should be
80
// a multiple of Page::kPageSize.
81
int Heap::semispace_size_  = 2*MB;
82
int Heap::old_generation_size_ = 512*MB;
83
int Heap::initial_semispace_size_ = 256*KB;
84

    
85
GCCallback Heap::global_gc_prologue_callback_ = NULL;
86
GCCallback Heap::global_gc_epilogue_callback_ = NULL;
87

    
88
// Variables set based on semispace_size_ and old_generation_size_ in
89
// ConfigureHeap.
90
int Heap::young_generation_size_ = 0;  // Will be 2 * semispace_size_.
91

    
92
// Double the new space after this many scavenge collections.
93
int Heap::new_space_growth_limit_ = 8;
94
int Heap::scavenge_count_ = 0;
95
Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
96

    
97
int Heap::mc_count_ = 0;
98
int Heap::gc_count_ = 0;
99

    
100
int Heap::always_allocate_scope_depth_ = 0;
101
bool Heap::context_disposed_pending_ = false;
102

    
103
#ifdef DEBUG
104
bool Heap::allocation_allowed_ = true;
105

    
106
int Heap::allocation_timeout_ = 0;
107
bool Heap::disallow_allocation_failure_ = false;
108
#endif  // DEBUG
109

    
110

    
111
int Heap::Capacity() {
112
  if (!HasBeenSetup()) return 0;
113

    
114
  return new_space_.Capacity() +
115
      old_pointer_space_->Capacity() +
116
      old_data_space_->Capacity() +
117
      code_space_->Capacity() +
118
      map_space_->Capacity();
119
}
120

    
121

    
122
int Heap::Available() {
123
  if (!HasBeenSetup()) return 0;
124

    
125
  return new_space_.Available() +
126
      old_pointer_space_->Available() +
127
      old_data_space_->Available() +
128
      code_space_->Available() +
129
      map_space_->Available();
130
}
131

    
132

    
133
bool Heap::HasBeenSetup() {
134
  return old_pointer_space_ != NULL &&
135
         old_data_space_ != NULL &&
136
         code_space_ != NULL &&
137
         map_space_ != NULL &&
138
         lo_space_ != NULL;
139
}
140

    
141

    
142
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
143
  // Is global GC requested?
144
  if (space != NEW_SPACE || FLAG_gc_global) {
145
    Counters::gc_compactor_caused_by_request.Increment();
146
    return MARK_COMPACTOR;
147
  }
148

    
149
  // Is enough data promoted to justify a global GC?
150
  if (OldGenerationPromotionLimitReached()) {
151
    Counters::gc_compactor_caused_by_promoted_data.Increment();
152
    return MARK_COMPACTOR;
153
  }
154

    
155
  // Have allocation in OLD and LO failed?
156
  if (old_gen_exhausted_) {
157
    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
158
    return MARK_COMPACTOR;
159
  }
160

    
161
  // Is there enough space left in OLD to guarantee that a scavenge can
162
  // succeed?
163
  //
164
  // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
165
  // for object promotion. It counts only the bytes that the memory
166
  // allocator has not yet allocated from the OS and assigned to any space,
167
  // and does not count available bytes already in the old space or code
168
  // space.  Undercounting is safe---we may get an unrequested full GC when
169
  // a scavenge would have succeeded.
170
  if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
171
    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
172
    return MARK_COMPACTOR;
173
  }
174

    
175
  // Default
176
  return SCAVENGER;
177
}
178

    
179

    
180
// TODO(1238405): Combine the infrastructure for --heap-stats and
181
// --log-gc to avoid the complicated preprocessor and flag testing.
182
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
183
void Heap::ReportStatisticsBeforeGC() {
184
  // Heap::ReportHeapStatistics will also log NewSpace statistics when
185
  // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set.  The
186
  // following logic is used to avoid double logging.
187
#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
188
  if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
189
  if (FLAG_heap_stats) {
190
    ReportHeapStatistics("Before GC");
191
  } else if (FLAG_log_gc) {
192
    new_space_.ReportStatistics();
193
  }
194
  if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
195
#elif defined(DEBUG)
196
  if (FLAG_heap_stats) {
197
    new_space_.CollectStatistics();
198
    ReportHeapStatistics("Before GC");
199
    new_space_.ClearHistograms();
200
  }
201
#elif defined(ENABLE_LOGGING_AND_PROFILING)
202
  if (FLAG_log_gc) {
203
    new_space_.CollectStatistics();
204
    new_space_.ReportStatistics();
205
    new_space_.ClearHistograms();
206
  }
207
#endif
208
}
209

    
210

    
211
// TODO(1238405): Combine the infrastructure for --heap-stats and
212
// --log-gc to avoid the complicated preprocessor and flag testing.
213
void Heap::ReportStatisticsAfterGC() {
214
  // Similar to the before GC, we use some complicated logic to ensure that
215
  // NewSpace statistics are logged exactly once when --log-gc is turned on.
216
#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
217
  if (FLAG_heap_stats) {
218
    ReportHeapStatistics("After GC");
219
  } else if (FLAG_log_gc) {
220
    new_space_.ReportStatistics();
221
  }
222
#elif defined(DEBUG)
223
  if (FLAG_heap_stats) ReportHeapStatistics("After GC");
224
#elif defined(ENABLE_LOGGING_AND_PROFILING)
225
  if (FLAG_log_gc) new_space_.ReportStatistics();
226
#endif
227
}
228
#endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
229

    
230

    
231
void Heap::GarbageCollectionPrologue() {
232
  gc_count_++;
233
#ifdef DEBUG
234
  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
235
  allow_allocation(false);
236

    
237
  if (FLAG_verify_heap) {
238
    Verify();
239
  }
240

    
241
  if (FLAG_gc_verbose) Print();
242

    
243
  if (FLAG_print_rset) {
244
    // Not all spaces have remembered set bits that we care about.
245
    old_pointer_space_->PrintRSet();
246
    map_space_->PrintRSet();
247
    lo_space_->PrintRSet();
248
  }
249
#endif
250

    
251
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
252
  ReportStatisticsBeforeGC();
253
#endif
254
}
255

    
256
int Heap::SizeOfObjects() {
257
  int total = 0;
258
  AllSpaces spaces;
259
  while (Space* space = spaces.next()) total += space->Size();
260
  return total;
261
}
262

    
263
void Heap::GarbageCollectionEpilogue() {
264
#ifdef DEBUG
265
  allow_allocation(true);
266
  ZapFromSpace();
267

    
268
  if (FLAG_verify_heap) {
269
    Verify();
270
  }
271

    
272
  if (FLAG_print_global_handles) GlobalHandles::Print();
273
  if (FLAG_print_handles) PrintHandles();
274
  if (FLAG_gc_verbose) Print();
275
  if (FLAG_code_stats) ReportCodeStatistics("After GC");
276
#endif
277

    
278
  Counters::alive_after_last_gc.Set(SizeOfObjects());
279

    
280
  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_);
281
  Counters::symbol_table_capacity.Set(symbol_table->Capacity());
282
  Counters::number_of_symbols.Set(symbol_table->NumberOfElements());
283
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
284
  ReportStatisticsAfterGC();
285
#endif
286
}
287

    
288

    
289
void Heap::CollectAllGarbage() {
290
  // Since we are ignoring the return value, the exact choice of space does
291
  // not matter, so long as we do not specify NEW_SPACE, which would not
292
  // cause a full GC.
293
  CollectGarbage(0, OLD_POINTER_SPACE);
294
}
295

    
296

    
297
void Heap::CollectAllGarbageIfContextDisposed() {
298
  // If the garbage collector interface is exposed through the global
299
  // gc() function, we avoid being clever about forcing GCs when
300
  // contexts are disposed and leave it to the embedder to make
301
  // informed decisions about when to force a collection.
302
  if (!FLAG_expose_gc && context_disposed_pending_) {
303
    HistogramTimerScope scope(&Counters::gc_context);
304
    CollectAllGarbage();
305
  }
306
  context_disposed_pending_ = false;
307
}
308

    
309

    
310
void Heap::NotifyContextDisposed() {
311
  context_disposed_pending_ = true;
312
}
313

    
314

    
315
bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
316
  // The VM is in the GC state until exiting this function.
317
  VMState state(GC);
318

    
319
#ifdef DEBUG
320
  // Reset the allocation timeout to the GC interval, but make sure to
321
  // allow at least a few allocations after a collection. The reason
322
  // for this is that we have a lot of allocation sequences and we
323
  // assume that a garbage collection will allow the subsequent
324
  // allocation attempts to go through.
325
  allocation_timeout_ = Max(6, FLAG_gc_interval);
326
#endif
327

    
328
  { GCTracer tracer;
329
    GarbageCollectionPrologue();
330
    // The GC count was incremented in the prologue.  Tell the tracer about
331
    // it.
332
    tracer.set_gc_count(gc_count_);
333

    
334
    GarbageCollector collector = SelectGarbageCollector(space);
335
    // Tell the tracer which collector we've selected.
336
    tracer.set_collector(collector);
337

    
338
    HistogramTimer* rate = (collector == SCAVENGER)
339
        ? &Counters::gc_scavenger
340
        : &Counters::gc_compactor;
341
    rate->Start();
342
    PerformGarbageCollection(space, collector, &tracer);
343
    rate->Stop();
344

    
345
    GarbageCollectionEpilogue();
346
  }
347

    
348

    
349
#ifdef ENABLE_LOGGING_AND_PROFILING
350
  if (FLAG_log_gc) HeapProfiler::WriteSample();
351
#endif
352

    
353
  switch (space) {
354
    case NEW_SPACE:
355
      return new_space_.Available() >= requested_size;
356
    case OLD_POINTER_SPACE:
357
      return old_pointer_space_->Available() >= requested_size;
358
    case OLD_DATA_SPACE:
359
      return old_data_space_->Available() >= requested_size;
360
    case CODE_SPACE:
361
      return code_space_->Available() >= requested_size;
362
    case MAP_SPACE:
363
      return map_space_->Available() >= requested_size;
364
    case LO_SPACE:
365
      return lo_space_->Available() >= requested_size;
366
  }
367
  return false;
368
}
369

    
370

    
371
void Heap::PerformScavenge() {
372
  GCTracer tracer;
373
  PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
374
}
375

    
376

    
377
#ifdef DEBUG
378
// Helper class for verifying the symbol table.
379
class SymbolTableVerifier : public ObjectVisitor {
380
 public:
381
  SymbolTableVerifier() { }
382
  void VisitPointers(Object** start, Object** end) {
383
    // Visit all HeapObject pointers in [start, end).
384
    for (Object** p = start; p < end; p++) {
385
      if ((*p)->IsHeapObject()) {
386
        // Check that the symbol is actually a symbol.
387
        ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
388
      }
389
    }
390
  }
391
};
392
#endif  // DEBUG
393

    
394

    
395
static void VerifySymbolTable() {
396
#ifdef DEBUG
397
  SymbolTableVerifier verifier;
398
  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table());
399
  symbol_table->IterateElements(&verifier);
400
#endif  // DEBUG
401
}
402

    
403

    
404
void Heap::PerformGarbageCollection(AllocationSpace space,
405
                                    GarbageCollector collector,
406
                                    GCTracer* tracer) {
407
  VerifySymbolTable();
408
  if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
409
    ASSERT(!allocation_allowed_);
410
    global_gc_prologue_callback_();
411
  }
412

    
413
  if (collector == MARK_COMPACTOR) {
414
    MarkCompact(tracer);
415

    
416
    int old_gen_size = PromotedSpaceSize();
417
    old_gen_promotion_limit_ =
418
        old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
419
    old_gen_allocation_limit_ =
420
        old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 3);
421
    old_gen_exhausted_ = false;
422

    
423
    // If we have used the mark-compact collector to collect the new
424
    // space, and it has not compacted the new space, we force a
425
    // separate scavenge collection.  This is a hack.  It covers the
426
    // case where (1) a new space collection was requested, (2) the
427
    // collector selection policy selected the mark-compact collector,
428
    // and (3) the mark-compact collector policy selected not to
429
    // compact the new space.  In that case, there is no more (usable)
430
    // free space in the new space after the collection compared to
431
    // before.
432
    if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) {
433
      Scavenge();
434
    }
435
  } else {
436
    Scavenge();
437
  }
438
  Counters::objs_since_last_young.Set(0);
439

    
440
  PostGarbageCollectionProcessing();
441

    
442
  if (collector == MARK_COMPACTOR) {
443
    // Register the amount of external allocated memory.
444
    amount_of_external_allocated_memory_at_last_global_gc_ =
445
        amount_of_external_allocated_memory_;
446
  }
447

    
448
  if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
449
    ASSERT(!allocation_allowed_);
450
    global_gc_epilogue_callback_();
451
  }
452
  VerifySymbolTable();
453
}
454

    
455

    
456
void Heap::PostGarbageCollectionProcessing() {
457
  // Process weak handles post gc.
458
  GlobalHandles::PostGarbageCollectionProcessing();
459
  // Update flat string readers.
460
  FlatStringReader::PostGarbageCollectionProcessing();
461
}
462

    
463

    
464
void Heap::MarkCompact(GCTracer* tracer) {
465
  gc_state_ = MARK_COMPACT;
466
  mc_count_++;
467
  tracer->set_full_gc_count(mc_count_);
468
  LOG(ResourceEvent("markcompact", "begin"));
469

    
470
  MarkCompactCollector::Prepare(tracer);
471

    
472
  bool is_compacting = MarkCompactCollector::IsCompacting();
473

    
474
  MarkCompactPrologue(is_compacting);
475

    
476
  MarkCompactCollector::CollectGarbage();
477

    
478
  MarkCompactEpilogue(is_compacting);
479

    
480
  LOG(ResourceEvent("markcompact", "end"));
481

    
482
  gc_state_ = NOT_IN_GC;
483

    
484
  Shrink();
485

    
486
  Counters::objs_since_last_full.Set(0);
487
  context_disposed_pending_ = false;
488
}
489

    
490

    
491
void Heap::MarkCompactPrologue(bool is_compacting) {
492
  // At any old GC clear the keyed lookup cache to enable collection of unused
493
  // maps.
494
  ClearKeyedLookupCache();
495

    
496
  CompilationCache::MarkCompactPrologue();
497

    
498
  Top::MarkCompactPrologue(is_compacting);
499
  ThreadManager::MarkCompactPrologue(is_compacting);
500
}
501

    
502

    
503
void Heap::MarkCompactEpilogue(bool is_compacting) {
504
  Top::MarkCompactEpilogue(is_compacting);
505
  ThreadManager::MarkCompactEpilogue(is_compacting);
506
}
507

    
508

    
509
Object* Heap::FindCodeObject(Address a) {
510
  Object* obj = code_space_->FindObject(a);
511
  if (obj->IsFailure()) {
512
    obj = lo_space_->FindObject(a);
513
  }
514
  ASSERT(!obj->IsFailure());
515
  return obj;
516
}
517

    
518

    
519
// Helper class for copying HeapObjects
520
class ScavengeVisitor: public ObjectVisitor {
521
 public:
522

    
523
  void VisitPointer(Object** p) { ScavengePointer(p); }
524

    
525
  void VisitPointers(Object** start, Object** end) {
526
    // Copy all HeapObject pointers in [start, end)
527
    for (Object** p = start; p < end; p++) ScavengePointer(p);
528
  }
529

    
530
 private:
531
  void ScavengePointer(Object** p) {
532
    Object* object = *p;
533
    if (!Heap::InNewSpace(object)) return;
534
    Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
535
                         reinterpret_cast<HeapObject*>(object));
536
  }
537
};
538

    
539

    
540
// Shared state read by the scavenge collector and set by ScavengeObject.
541
static Address promoted_top = NULL;
542

    
543

    
544
#ifdef DEBUG
545
// Visitor class to verify pointers in code or data space do not point into
546
// new space.
547
class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
548
 public:
549
  void VisitPointers(Object** start, Object**end) {
550
    for (Object** current = start; current < end; current++) {
551
      if ((*current)->IsHeapObject()) {
552
        ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
553
      }
554
    }
555
  }
556
};
557
#endif
558

    
559
void Heap::Scavenge() {
560
#ifdef DEBUG
561
  if (FLAG_enable_slow_asserts) {
562
    VerifyNonPointerSpacePointersVisitor v;
563
    HeapObjectIterator it(code_space_);
564
    while (it.has_next()) {
565
      HeapObject* object = it.next();
566
      if (object->IsCode()) {
567
        Code::cast(object)->ConvertICTargetsFromAddressToObject();
568
      }
569
      object->Iterate(&v);
570
      if (object->IsCode()) {
571
        Code::cast(object)->ConvertICTargetsFromObjectToAddress();
572
      }
573
    }
574
  }
575
#endif
576

    
577
  gc_state_ = SCAVENGE;
578

    
579
  // Implements Cheney's copying algorithm
580
  LOG(ResourceEvent("scavenge", "begin"));
581

    
582
  scavenge_count_++;
583
  if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
584
      scavenge_count_ > new_space_growth_limit_) {
585
    // Double the size of the new space, and double the limit.  The next
586
    // doubling attempt will occur after the current new_space_growth_limit_
587
    // more collections.
588
    // TODO(1240712): NewSpace::Double has a return value which is
589
    // ignored here.
590
    new_space_.Double();
591
    new_space_growth_limit_ *= 2;
592
  }
593

    
594
  // Flip the semispaces.  After flipping, to space is empty, from space has
595
  // live objects.
596
  new_space_.Flip();
597
  new_space_.ResetAllocationInfo();
598

    
599
  // We need to sweep newly copied objects which can be in either the to space
600
  // or the old space.  For to space objects, we use a mark.  Newly copied
601
  // objects lie between the mark and the allocation top.  For objects
602
  // promoted to old space, we write their addresses downward from the top of
603
  // the new space.  Sweeping newly promoted objects requires an allocation
604
  // pointer and a mark.  Note that the allocation pointer 'top' actually
605
  // moves downward from the high address in the to space.
606
  //
607
  // There is guaranteed to be enough room at the top of the to space for the
608
  // addresses of promoted objects: every object promoted frees up its size in
609
  // bytes from the top of the new space, and objects are at least one pointer
610
  // in size.  Using the new space to record promoted addresses makes the
611
  // scavenge collector agnostic to the allocation strategy (eg, linear or
612
  // free-list) used in old space.
613
  Address new_mark = new_space_.ToSpaceLow();
614
  Address promoted_mark = new_space_.ToSpaceHigh();
615
  promoted_top = new_space_.ToSpaceHigh();
616

    
617
  ScavengeVisitor scavenge_visitor;
618
  // Copy roots.
619
  IterateRoots(&scavenge_visitor);
620

    
621
  // Copy objects reachable from the old generation.  By definition, there
622
  // are no intergenerational pointers in code or data spaces.
623
  IterateRSet(old_pointer_space_, &ScavengePointer);
624
  IterateRSet(map_space_, &ScavengePointer);
625
  lo_space_->IterateRSet(&ScavengePointer);
626

    
627
  bool has_processed_weak_pointers = false;
628

    
629
  while (true) {
630
    ASSERT(new_mark <= new_space_.top());
631
    ASSERT(promoted_mark >= promoted_top);
632

    
633
    // Copy objects reachable from newly copied objects.
634
    while (new_mark < new_space_.top() || promoted_mark > promoted_top) {
635
      // Sweep newly copied objects in the to space.  The allocation pointer
636
      // can change during sweeping.
637
      Address previous_top = new_space_.top();
638
      SemiSpaceIterator new_it(new_space(), new_mark);
639
      while (new_it.has_next()) {
640
        new_it.next()->Iterate(&scavenge_visitor);
641
      }
642
      new_mark = previous_top;
643

    
644
      // Sweep newly copied objects in the old space.  The promotion 'top'
645
      // pointer could change during sweeping.
646
      previous_top = promoted_top;
647
      for (Address current = promoted_mark - kPointerSize;
648
           current >= previous_top;
649
           current -= kPointerSize) {
650
        HeapObject* object = HeapObject::cast(Memory::Object_at(current));
651
        object->Iterate(&scavenge_visitor);
652
        UpdateRSet(object);
653
      }
654
      promoted_mark = previous_top;
655
    }
656

    
657
    if (has_processed_weak_pointers) break;  // We are done.
658
    // Copy objects reachable from weak pointers.
659
    GlobalHandles::IterateWeakRoots(&scavenge_visitor);
660
    has_processed_weak_pointers = true;
661
  }
662

    
663
  // Set age mark.
664
  new_space_.set_age_mark(new_mark);
665

    
666
  LOG(ResourceEvent("scavenge", "end"));
667

    
668
  gc_state_ = NOT_IN_GC;
669
}
670

    
671

    
672
void Heap::ClearRSetRange(Address start, int size_in_bytes) {
673
  uint32_t start_bit;
674
  Address start_word_address =
675
      Page::ComputeRSetBitPosition(start, 0, &start_bit);
676
  uint32_t end_bit;
677
  Address end_word_address =
678
      Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize,
679
                                   0,
680
                                   &end_bit);
681

    
682
  // We want to clear the bits in the starting word starting with the
683
  // first bit, and in the ending word up to and including the last
684
  // bit.  Build a pair of bitmasks to do that.
685
  uint32_t start_bitmask = start_bit - 1;
686
  uint32_t end_bitmask = ~((end_bit << 1) - 1);
687

    
688
  // If the start address and end address are the same, we mask that
689
  // word once, otherwise mask the starting and ending word
690
  // separately and all the ones in between.
691
  if (start_word_address == end_word_address) {
692
    Memory::uint32_at(start_word_address) &= (start_bitmask | end_bitmask);
693
  } else {
694
    Memory::uint32_at(start_word_address) &= start_bitmask;
695
    Memory::uint32_at(end_word_address) &= end_bitmask;
696
    start_word_address += kIntSize;
697
    memset(start_word_address, 0, end_word_address - start_word_address);
698
  }
699
}
700

    
701

    
702
class UpdateRSetVisitor: public ObjectVisitor {
703
 public:
704

    
705
  void VisitPointer(Object** p) {
706
    UpdateRSet(p);
707
  }
708

    
709
  void VisitPointers(Object** start, Object** end) {
710
    // Update a store into slots [start, end), used (a) to update remembered
711
    // set when promoting a young object to old space or (b) to rebuild
712
    // remembered sets after a mark-compact collection.
713
    for (Object** p = start; p < end; p++) UpdateRSet(p);
714
  }
715
 private:
716

    
717
  void UpdateRSet(Object** p) {
718
    // The remembered set should not be set.  It should be clear for objects
719
    // newly copied to old space, and it is cleared before rebuilding in the
720
    // mark-compact collector.
721
    ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
722
    if (Heap::InNewSpace(*p)) {
723
      Page::SetRSet(reinterpret_cast<Address>(p), 0);
724
    }
725
  }
726
};
727

    
728

    
729
int Heap::UpdateRSet(HeapObject* obj) {
730
  ASSERT(!InNewSpace(obj));
731
  // Special handling of fixed arrays to iterate the body based on the start
732
  // address and offset.  Just iterating the pointers as in UpdateRSetVisitor
733
  // will not work because Page::SetRSet needs to have the start of the
734
  // object.
735
  if (obj->IsFixedArray()) {
736
    FixedArray* array = FixedArray::cast(obj);
737
    int length = array->length();
738
    for (int i = 0; i < length; i++) {
739
      int offset = FixedArray::kHeaderSize + i * kPointerSize;
740
      ASSERT(!Page::IsRSetSet(obj->address(), offset));
741
      if (Heap::InNewSpace(array->get(i))) {
742
        Page::SetRSet(obj->address(), offset);
743
      }
744
    }
745
  } else if (!obj->IsCode()) {
746
    // Skip code object, we know it does not contain inter-generational
747
    // pointers.
748
    UpdateRSetVisitor v;
749
    obj->Iterate(&v);
750
  }
751
  return obj->Size();
752
}
753

    
754

    
755
void Heap::RebuildRSets() {
756
  // By definition, we do not care about remembered set bits in code or data
757
  // spaces.
758
  map_space_->ClearRSet();
759
  RebuildRSets(map_space_);
760

    
761
  old_pointer_space_->ClearRSet();
762
  RebuildRSets(old_pointer_space_);
763

    
764
  Heap::lo_space_->ClearRSet();
765
  RebuildRSets(lo_space_);
766
}
767

    
768

    
769
void Heap::RebuildRSets(PagedSpace* space) {
770
  HeapObjectIterator it(space);
771
  while (it.has_next()) Heap::UpdateRSet(it.next());
772
}
773

    
774

    
775
void Heap::RebuildRSets(LargeObjectSpace* space) {
776
  LargeObjectIterator it(space);
777
  while (it.has_next()) Heap::UpdateRSet(it.next());
778
}
779

    
780

    
781
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
782
void Heap::RecordCopiedObject(HeapObject* obj) {
783
  bool should_record = false;
784
#ifdef DEBUG
785
  should_record = FLAG_heap_stats;
786
#endif
787
#ifdef ENABLE_LOGGING_AND_PROFILING
788
  should_record = should_record || FLAG_log_gc;
789
#endif
790
  if (should_record) {
791
    if (new_space_.Contains(obj)) {
792
      new_space_.RecordAllocation(obj);
793
    } else {
794
      new_space_.RecordPromotion(obj);
795
    }
796
  }
797
}
798
#endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
799

    
800

    
801

    
802
HeapObject* Heap::MigrateObject(HeapObject* source,
803
                                HeapObject* target,
804
                                int size) {
805
  // Copy the content of source to target.
806
  CopyBlock(reinterpret_cast<Object**>(target->address()),
807
            reinterpret_cast<Object**>(source->address()),
808
            size);
809

    
810
  // Set the forwarding address.
811
  source->set_map_word(MapWord::FromForwardingAddress(target));
812

    
813
  // Update NewSpace stats if necessary.
814
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
815
  RecordCopiedObject(target);
816
#endif
817

    
818
  return target;
819
}
820

    
821

    
822
// Inlined function.
823
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
824
  ASSERT(InFromSpace(object));
825

    
826
  // We use the first word (where the map pointer usually is) of a heap
827
  // object to record the forwarding pointer.  A forwarding pointer can
828
  // point to an old space, the code space, or the to space of the new
829
  // generation.
830
  MapWord first_word = object->map_word();
831

    
832
  // If the first word is a forwarding address, the object has already been
833
  // copied.
834
  if (first_word.IsForwardingAddress()) {
835
    *p = first_word.ToForwardingAddress();
836
    return;
837
  }
838

    
839
  // Call the slow part of scavenge object.
840
  return ScavengeObjectSlow(p, object);
841
}
842

    
843

    
844
static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
845
  STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
846
  ASSERT(object->map() == map);
847
  InstanceType type = map->instance_type();
848
  if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
849
  ASSERT(object->IsString() && !object->IsSymbol());
850
  return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
851
}
852

    
853

    
854
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
855
  ASSERT(InFromSpace(object));
856
  MapWord first_word = object->map_word();
857
  ASSERT(!first_word.IsForwardingAddress());
858

    
859
  // Optimization: Bypass flattened ConsString objects.
860
  if (IsShortcutCandidate(object, first_word.ToMap())) {
861
    object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
862
    *p = object;
863
    // After patching *p we have to repeat the checks that object is in the
864
    // active semispace of the young generation and not already copied.
865
    if (!InNewSpace(object)) return;
866
    first_word = object->map_word();
867
    if (first_word.IsForwardingAddress()) {
868
      *p = first_word.ToForwardingAddress();
869
      return;
870
    }
871
  }
872

    
873
  int object_size = object->SizeFromMap(first_word.ToMap());
874
  // If the object should be promoted, we try to copy it to old space.
875
  if (ShouldBePromoted(object->address(), object_size)) {
876
    OldSpace* target_space = Heap::TargetSpace(object);
877
    ASSERT(target_space == Heap::old_pointer_space_ ||
878
           target_space == Heap::old_data_space_);
879
    Object* result = target_space->AllocateRaw(object_size);
880
    if (!result->IsFailure()) {
881
      *p = MigrateObject(object, HeapObject::cast(result), object_size);
882
      if (target_space == Heap::old_pointer_space_) {
883
        // Record the object's address at the top of the to space, to allow
884
        // it to be swept by the scavenger.
885
        promoted_top -= kPointerSize;
886
        Memory::Object_at(promoted_top) = *p;
887
      } else {
888
#ifdef DEBUG
889
        // Objects promoted to the data space should not have pointers to
890
        // new space.
891
        VerifyNonPointerSpacePointersVisitor v;
892
        (*p)->Iterate(&v);
893
#endif
894
      }
895
      return;
896
    }
897
  }
898

    
899
  // The object should remain in new space or the old space allocation failed.
900
  Object* result = new_space_.AllocateRaw(object_size);
901
  // Failed allocation at this point is utterly unexpected.
902
  ASSERT(!result->IsFailure());
903
  *p = MigrateObject(object, HeapObject::cast(result), object_size);
904
}
905

    
906

    
907
void Heap::ScavengePointer(HeapObject** p) {
908
  ScavengeObject(p, *p);
909
}
910

    
911

    
912
Object* Heap::AllocatePartialMap(InstanceType instance_type,
913
                                 int instance_size) {
914
  Object* result = AllocateRawMap(Map::kSize);
915
  if (result->IsFailure()) return result;
916

    
917
  // Map::cast cannot be used due to uninitialized map field.
918
  reinterpret_cast<Map*>(result)->set_map(meta_map());
919
  reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
920
  reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
921
  reinterpret_cast<Map*>(result)->set_inobject_properties(0);
922
  reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
923
  return result;
924
}
925

    
926

    
927
Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
928
  Object* result = AllocateRawMap(Map::kSize);
929
  if (result->IsFailure()) return result;
930

    
931
  Map* map = reinterpret_cast<Map*>(result);
932
  map->set_map(meta_map());
933
  map->set_instance_type(instance_type);
934
  map->set_prototype(null_value());
935
  map->set_constructor(null_value());
936
  map->set_instance_size(instance_size);
937
  map->set_inobject_properties(0);
938
  map->set_instance_descriptors(empty_descriptor_array());
939
  map->set_code_cache(empty_fixed_array());
940
  map->set_unused_property_fields(0);
941
  map->set_bit_field(0);
942
  return map;
943
}
944

    
945

    
946
bool Heap::CreateInitialMaps() {
947
  Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
948
  if (obj->IsFailure()) return false;
949

    
950
  // Map::cast cannot be used due to uninitialized map field.
951
  meta_map_ = reinterpret_cast<Map*>(obj);
952
  meta_map()->set_map(meta_map());
953

    
954
  obj = AllocatePartialMap(FIXED_ARRAY_TYPE, Array::kHeaderSize);
955
  if (obj->IsFailure()) return false;
956
  fixed_array_map_ = Map::cast(obj);
957

    
958
  obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
959
  if (obj->IsFailure()) return false;
960
  oddball_map_ = Map::cast(obj);
961

    
962
  // Allocate the empty array
963
  obj = AllocateEmptyFixedArray();
964
  if (obj->IsFailure()) return false;
965
  empty_fixed_array_ = FixedArray::cast(obj);
966

    
967
  obj = Allocate(oddball_map(), OLD_DATA_SPACE);
968
  if (obj->IsFailure()) return false;
969
  null_value_ = obj;
970

    
971
  // Allocate the empty descriptor array.  AllocateMap can now be used.
972
  obj = AllocateEmptyFixedArray();
973
  if (obj->IsFailure()) return false;
974
  // There is a check against empty_descriptor_array() in cast().
975
  empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj);
976

    
977
  // Fix the instance_descriptors for the existing maps.
978
  meta_map()->set_instance_descriptors(empty_descriptor_array());
979
  meta_map()->set_code_cache(empty_fixed_array());
980

    
981
  fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
982
  fixed_array_map()->set_code_cache(empty_fixed_array());
983

    
984
  oddball_map()->set_instance_descriptors(empty_descriptor_array());
985
  oddball_map()->set_code_cache(empty_fixed_array());
986

    
987
  // Fix prototype object for existing maps.
988
  meta_map()->set_prototype(null_value());
989
  meta_map()->set_constructor(null_value());
990

    
991
  fixed_array_map()->set_prototype(null_value());
992
  fixed_array_map()->set_constructor(null_value());
993
  oddball_map()->set_prototype(null_value());
994
  oddball_map()->set_constructor(null_value());
995

    
996
  obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
997
  if (obj->IsFailure()) return false;
998
  heap_number_map_ = Map::cast(obj);
999

    
1000
  obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
1001
  if (obj->IsFailure()) return false;
1002
  proxy_map_ = Map::cast(obj);
1003

    
1004
#define ALLOCATE_STRING_MAP(type, size, name)   \
1005
    obj = AllocateMap(type, size);              \
1006
    if (obj->IsFailure()) return false;         \
1007
    name##_map_ = Map::cast(obj);
1008
  STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
1009
#undef ALLOCATE_STRING_MAP
1010

    
1011
  obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize);
1012
  if (obj->IsFailure()) return false;
1013
  undetectable_short_string_map_ = Map::cast(obj);
1014
  undetectable_short_string_map_->set_is_undetectable();
1015

    
1016
  obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize);
1017
  if (obj->IsFailure()) return false;
1018
  undetectable_medium_string_map_ = Map::cast(obj);
1019
  undetectable_medium_string_map_->set_is_undetectable();
1020

    
1021
  obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize);
1022
  if (obj->IsFailure()) return false;
1023
  undetectable_long_string_map_ = Map::cast(obj);
1024
  undetectable_long_string_map_->set_is_undetectable();
1025

    
1026
  obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
1027
  if (obj->IsFailure()) return false;
1028
  undetectable_short_ascii_string_map_ = Map::cast(obj);
1029
  undetectable_short_ascii_string_map_->set_is_undetectable();
1030

    
1031
  obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
1032
  if (obj->IsFailure()) return false;
1033
  undetectable_medium_ascii_string_map_ = Map::cast(obj);
1034
  undetectable_medium_ascii_string_map_->set_is_undetectable();
1035

    
1036
  obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
1037
  if (obj->IsFailure()) return false;
1038
  undetectable_long_ascii_string_map_ = Map::cast(obj);
1039
  undetectable_long_ascii_string_map_->set_is_undetectable();
1040

    
1041
  obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kHeaderSize);
1042
  if (obj->IsFailure()) return false;
1043
  byte_array_map_ = Map::cast(obj);
1044

    
1045
  obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
1046
  if (obj->IsFailure()) return false;
1047
  code_map_ = Map::cast(obj);
1048

    
1049
  obj = AllocateMap(FILLER_TYPE, kPointerSize);
1050
  if (obj->IsFailure()) return false;
1051
  one_word_filler_map_ = Map::cast(obj);
1052

    
1053
  obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
1054
  if (obj->IsFailure()) return false;
1055
  two_word_filler_map_ = Map::cast(obj);
1056

    
1057
#define ALLOCATE_STRUCT_MAP(NAME, Name, name)      \
1058
  obj = AllocateMap(NAME##_TYPE, Name::kSize);     \
1059
  if (obj->IsFailure()) return false;              \
1060
  name##_map_ = Map::cast(obj);
1061
  STRUCT_LIST(ALLOCATE_STRUCT_MAP)
1062
#undef ALLOCATE_STRUCT_MAP
1063

    
1064
  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1065
  if (obj->IsFailure()) return false;
1066
  hash_table_map_ = Map::cast(obj);
1067

    
1068
  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1069
  if (obj->IsFailure()) return false;
1070
  context_map_ = Map::cast(obj);
1071

    
1072
  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1073
  if (obj->IsFailure()) return false;
1074
  catch_context_map_ = Map::cast(obj);
1075

    
1076
  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
1077
  if (obj->IsFailure()) return false;
1078
  global_context_map_ = Map::cast(obj);
1079

    
1080
  obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize);
1081
  if (obj->IsFailure()) return false;
1082
  boilerplate_function_map_ = Map::cast(obj);
1083

    
1084
  obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize);
1085
  if (obj->IsFailure()) return false;
1086
  shared_function_info_map_ = Map::cast(obj);
1087

    
1088
  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1089
  return true;
1090
}
1091

    
1092

    
1093
Object* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1094
  // Statically ensure that it is safe to allocate heap numbers in paged
1095
  // spaces.
1096
  STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1097
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1098
  Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
1099
  if (result->IsFailure()) return result;
1100

    
1101
  HeapObject::cast(result)->set_map(heap_number_map());
1102
  HeapNumber::cast(result)->set_value(value);
1103
  return result;
1104
}
1105

    
1106

    
1107
Object* Heap::AllocateHeapNumber(double value) {
1108
  // Use general version, if we're forced to always allocate.
1109
  if (always_allocate()) return AllocateHeapNumber(value, NOT_TENURED);
1110
  // This version of AllocateHeapNumber is optimized for
1111
  // allocation in new space.
1112
  STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1113
  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1114
  Object* result = new_space_.AllocateRaw(HeapNumber::kSize);
1115
  if (result->IsFailure()) return result;
1116
  HeapObject::cast(result)->set_map(heap_number_map());
1117
  HeapNumber::cast(result)->set_value(value);
1118
  return result;
1119
}
1120

    
1121

    
1122
Object* Heap::CreateOddball(Map* map,
1123
                            const char* to_string,
1124
                            Object* to_number) {
1125
  Object* result = Allocate(map, OLD_DATA_SPACE);
1126
  if (result->IsFailure()) return result;
1127
  return Oddball::cast(result)->Initialize(to_string, to_number);
1128
}
1129

    
1130

    
1131
bool Heap::CreateApiObjects() {
1132
  Object* obj;
1133

    
1134
  obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1135
  if (obj->IsFailure()) return false;
1136
  neander_map_ = Map::cast(obj);
1137

    
1138
  obj = Heap::AllocateJSObjectFromMap(neander_map_);
1139
  if (obj->IsFailure()) return false;
1140
  Object* elements = AllocateFixedArray(2);
1141
  if (elements->IsFailure()) return false;
1142
  FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1143
  JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1144
  message_listeners_ = JSObject::cast(obj);
1145

    
1146
  return true;
1147
}
1148

    
1149
void Heap::CreateFixedStubs() {
1150
  // Here we create roots for fixed stubs. They are needed at GC
1151
  // for cooking and uncooking (check out frames.cc).
1152
  // The eliminates the need for doing dictionary lookup in the
1153
  // stub cache for these stubs.
1154
  HandleScope scope;
1155
  {
1156
    CEntryStub stub;
1157
    c_entry_code_ = *stub.GetCode();
1158
  }
1159
  {
1160
    CEntryDebugBreakStub stub;
1161
    c_entry_debug_break_code_ = *stub.GetCode();
1162
  }
1163
  {
1164
    JSEntryStub stub;
1165
    js_entry_code_ = *stub.GetCode();
1166
  }
1167
  {
1168
    JSConstructEntryStub stub;
1169
    js_construct_entry_code_ = *stub.GetCode();
1170
  }
1171
}
1172

    
1173

    
1174
bool Heap::CreateInitialObjects() {
1175
  Object* obj;
1176

    
1177
  // The -0 value must be set before NumberFromDouble works.
1178
  obj = AllocateHeapNumber(-0.0, TENURED);
1179
  if (obj->IsFailure()) return false;
1180
  minus_zero_value_ = obj;
1181
  ASSERT(signbit(minus_zero_value_->Number()) != 0);
1182

    
1183
  obj = AllocateHeapNumber(OS::nan_value(), TENURED);
1184
  if (obj->IsFailure()) return false;
1185
  nan_value_ = obj;
1186

    
1187
  obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1188
  if (obj->IsFailure()) return false;
1189
  undefined_value_ = obj;
1190
  ASSERT(!InNewSpace(undefined_value()));
1191

    
1192
  // Allocate initial symbol table.
1193
  obj = SymbolTable::Allocate(kInitialSymbolTableSize);
1194
  if (obj->IsFailure()) return false;
1195
  symbol_table_ = obj;
1196

    
1197
  // Assign the print strings for oddballs after creating symboltable.
1198
  Object* symbol = LookupAsciiSymbol("undefined");
1199
  if (symbol->IsFailure()) return false;
1200
  Oddball::cast(undefined_value_)->set_to_string(String::cast(symbol));
1201
  Oddball::cast(undefined_value_)->set_to_number(nan_value_);
1202

    
1203
  // Assign the print strings for oddballs after creating symboltable.
1204
  symbol = LookupAsciiSymbol("null");
1205
  if (symbol->IsFailure()) return false;
1206
  Oddball::cast(null_value_)->set_to_string(String::cast(symbol));
1207
  Oddball::cast(null_value_)->set_to_number(Smi::FromInt(0));
1208

    
1209
  // Allocate the null_value
1210
  obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
1211
  if (obj->IsFailure()) return false;
1212

    
1213
  obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1));
1214
  if (obj->IsFailure()) return false;
1215
  true_value_ = obj;
1216

    
1217
  obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0));
1218
  if (obj->IsFailure()) return false;
1219
  false_value_ = obj;
1220

    
1221
  obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1));
1222
  if (obj->IsFailure()) return false;
1223
  the_hole_value_ = obj;
1224

    
1225
  // Allocate the empty string.
1226
  obj = AllocateRawAsciiString(0, TENURED);
1227
  if (obj->IsFailure()) return false;
1228
  empty_string_ = String::cast(obj);
1229

    
1230
#define SYMBOL_INITIALIZE(name, string)                 \
1231
  obj = LookupAsciiSymbol(string);                      \
1232
  if (obj->IsFailure()) return false;                   \
1233
  (name##_) = String::cast(obj);
1234
  SYMBOL_LIST(SYMBOL_INITIALIZE)
1235
#undef SYMBOL_INITIALIZE
1236

    
1237
  // Allocate the hidden symbol which is used to identify the hidden properties
1238
  // in JSObjects. The hash code has a special value so that it will not match
1239
  // the empty string when searching for the property. It cannot be part of the
1240
  // SYMBOL_LIST because it needs to be allocated manually with the special
1241
  // hash code in place. The hash code for the hidden_symbol is zero to ensure
1242
  // that it will always be at the first entry in property descriptors.
1243
  obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask);
1244
  if (obj->IsFailure()) return false;
1245
  hidden_symbol_ = String::cast(obj);
1246

    
1247
  // Allocate the proxy for __proto__.
1248
  obj = AllocateProxy((Address) &Accessors::ObjectPrototype);
1249
  if (obj->IsFailure()) return false;
1250
  prototype_accessors_ = Proxy::cast(obj);
1251

    
1252
  // Allocate the code_stubs dictionary.
1253
  obj = Dictionary::Allocate(4);
1254
  if (obj->IsFailure()) return false;
1255
  code_stubs_ = Dictionary::cast(obj);
1256

    
1257
  // Allocate the non_monomorphic_cache used in stub-cache.cc
1258
  obj = Dictionary::Allocate(4);
1259
  if (obj->IsFailure()) return false;
1260
  non_monomorphic_cache_ =  Dictionary::cast(obj);
1261

    
1262
  CreateFixedStubs();
1263

    
1264
  // Allocate the number->string conversion cache
1265
  obj = AllocateFixedArray(kNumberStringCacheSize * 2);
1266
  if (obj->IsFailure()) return false;
1267
  number_string_cache_ = FixedArray::cast(obj);
1268

    
1269
  // Allocate cache for single character strings.
1270
  obj = AllocateFixedArray(String::kMaxAsciiCharCode+1);
1271
  if (obj->IsFailure()) return false;
1272
  single_character_string_cache_ = FixedArray::cast(obj);
1273

    
1274
  // Allocate cache for external strings pointing to native source code.
1275
  obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1276
  if (obj->IsFailure()) return false;
1277
  natives_source_cache_ = FixedArray::cast(obj);
1278

    
1279
  // Handling of script id generation is in Factory::NewScript.
1280
  last_script_id_ = undefined_value();
1281

    
1282
  // Initialize keyed lookup cache.
1283
  ClearKeyedLookupCache();
1284

    
1285
  // Initialize compilation cache.
1286
  CompilationCache::Clear();
1287

    
1288
  return true;
1289
}
1290

    
1291

    
1292
static inline int double_get_hash(double d) {
1293
  DoubleRepresentation rep(d);
1294
  return ((static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32)) &
1295
          (Heap::kNumberStringCacheSize - 1));
1296
}
1297

    
1298

    
1299
static inline int smi_get_hash(Smi* smi) {
1300
  return (smi->value() & (Heap::kNumberStringCacheSize - 1));
1301
}
1302

    
1303

    
1304

    
1305
Object* Heap::GetNumberStringCache(Object* number) {
1306
  int hash;
1307
  if (number->IsSmi()) {
1308
    hash = smi_get_hash(Smi::cast(number));
1309
  } else {
1310
    hash = double_get_hash(number->Number());
1311
  }
1312
  Object* key = number_string_cache_->get(hash * 2);
1313
  if (key == number) {
1314
    return String::cast(number_string_cache_->get(hash * 2 + 1));
1315
  } else if (key->IsHeapNumber() &&
1316
             number->IsHeapNumber() &&
1317
             key->Number() == number->Number()) {
1318
    return String::cast(number_string_cache_->get(hash * 2 + 1));
1319
  }
1320
  return undefined_value();
1321
}
1322

    
1323

    
1324
void Heap::SetNumberStringCache(Object* number, String* string) {
1325
  int hash;
1326
  if (number->IsSmi()) {
1327
    hash = smi_get_hash(Smi::cast(number));
1328
    number_string_cache_->set(hash * 2, number, SKIP_WRITE_BARRIER);
1329
  } else {
1330
    hash = double_get_hash(number->Number());
1331
    number_string_cache_->set(hash * 2, number);
1332
  }
1333
  number_string_cache_->set(hash * 2 + 1, string);
1334
}
1335

    
1336

    
1337
Object* Heap::SmiOrNumberFromDouble(double value,
1338
                                    bool new_object,
1339
                                    PretenureFlag pretenure) {
1340
  // We need to distinguish the minus zero value and this cannot be
1341
  // done after conversion to int. Doing this by comparing bit
1342
  // patterns is faster than using fpclassify() et al.
1343
  static const DoubleRepresentation plus_zero(0.0);
1344
  static const DoubleRepresentation minus_zero(-0.0);
1345
  static const DoubleRepresentation nan(OS::nan_value());
1346
  ASSERT(minus_zero_value_ != NULL);
1347
  ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits));
1348

    
1349
  DoubleRepresentation rep(value);
1350
  if (rep.bits == plus_zero.bits) return Smi::FromInt(0);  // not uncommon
1351
  if (rep.bits == minus_zero.bits) {
1352
    return new_object ? AllocateHeapNumber(-0.0, pretenure)
1353
                      : minus_zero_value_;
1354
  }
1355
  if (rep.bits == nan.bits) {
1356
    return new_object
1357
        ? AllocateHeapNumber(OS::nan_value(), pretenure)
1358
        : nan_value_;
1359
  }
1360

    
1361
  // Try to represent the value as a tagged small integer.
1362
  int int_value = FastD2I(value);
1363
  if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1364
    return Smi::FromInt(int_value);
1365
  }
1366

    
1367
  // Materialize the value in the heap.
1368
  return AllocateHeapNumber(value, pretenure);
1369
}
1370

    
1371

    
1372
Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) {
1373
  return SmiOrNumberFromDouble(value,
1374
                               true /* number object must be new */,
1375
                               pretenure);
1376
}
1377

    
1378

    
1379
Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
1380
  return SmiOrNumberFromDouble(value,
1381
                               false /* use preallocated NaN, -0.0 */,
1382
                               pretenure);
1383
}
1384

    
1385

    
1386
Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
1387
  // Statically ensure that it is safe to allocate proxies in paged spaces.
1388
  STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
1389
  AllocationSpace space =
1390
      (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1391
  Object* result = Allocate(proxy_map(), space);
1392
  if (result->IsFailure()) return result;
1393

    
1394
  Proxy::cast(result)->set_proxy(proxy);
1395
  return result;
1396
}
1397

    
1398

    
1399
Object* Heap::AllocateSharedFunctionInfo(Object* name) {
1400
  Object* result = Allocate(shared_function_info_map(), NEW_SPACE);
1401
  if (result->IsFailure()) return result;
1402

    
1403
  SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
1404
  share->set_name(name);
1405
  Code* illegal = Builtins::builtin(Builtins::Illegal);
1406
  share->set_code(illegal);
1407
  share->set_expected_nof_properties(0);
1408
  share->set_length(0);
1409
  share->set_formal_parameter_count(0);
1410
  share->set_instance_class_name(Object_symbol());
1411
  share->set_function_data(undefined_value());
1412
  share->set_lazy_load_data(undefined_value());
1413
  share->set_script(undefined_value());
1414
  share->set_start_position_and_type(0);
1415
  share->set_debug_info(undefined_value());
1416
  share->set_inferred_name(empty_string());
1417
  return result;
1418
}
1419

    
1420

    
1421
Object* Heap::AllocateConsString(String* first,
1422
                                 String* second) {
1423
  int first_length = first->length();
1424
  int second_length = second->length();
1425
  int length = first_length + second_length;
1426
  bool is_ascii = StringShape(first).IsAsciiRepresentation()
1427
      && StringShape(second).IsAsciiRepresentation();
1428

    
1429
  // If the resulting string is small make a flat string.
1430
  if (length < String::kMinNonFlatLength) {
1431
    ASSERT(first->IsFlat());
1432
    ASSERT(second->IsFlat());
1433
    if (is_ascii) {
1434
      Object* result = AllocateRawAsciiString(length);
1435
      if (result->IsFailure()) return result;
1436
      // Copy the characters into the new object.
1437
      char* dest = SeqAsciiString::cast(result)->GetChars();
1438
      String::WriteToFlat(first, dest, 0, first_length);
1439
      String::WriteToFlat(second, dest + first_length, 0, second_length);
1440
      return result;
1441
    } else {
1442
      Object* result = AllocateRawTwoByteString(length);
1443
      if (result->IsFailure()) return result;
1444
      // Copy the characters into the new object.
1445
      uc16* dest = SeqTwoByteString::cast(result)->GetChars();
1446
      String::WriteToFlat(first, dest, 0, first_length);
1447
      String::WriteToFlat(second, dest + first_length, 0, second_length);
1448
      return result;
1449
    }
1450
  }
1451

    
1452
  Map* map;
1453
  if (length <= String::kMaxShortStringSize) {
1454
    map = is_ascii ? short_cons_ascii_string_map()
1455
      : short_cons_string_map();
1456
  } else if (length <= String::kMaxMediumStringSize) {
1457
    map = is_ascii ? medium_cons_ascii_string_map()
1458
      : medium_cons_string_map();
1459
  } else {
1460
    map = is_ascii ? long_cons_ascii_string_map()
1461
      : long_cons_string_map();
1462
  }
1463

    
1464
  Object* result = Allocate(map, NEW_SPACE);
1465
  if (result->IsFailure()) return result;
1466
  ASSERT(InNewSpace(result));
1467
  ConsString* cons_string = ConsString::cast(result);
1468
  cons_string->set_first(first, SKIP_WRITE_BARRIER);
1469
  cons_string->set_second(second, SKIP_WRITE_BARRIER);
1470
  cons_string->set_length(length);
1471
  return result;
1472
}
1473

    
1474

    
1475
Object* Heap::AllocateSlicedString(String* buffer,
1476
                                   int start,
1477
                                   int end) {
1478
  int length = end - start;
1479

    
1480
  // If the resulting string is small make a sub string.
1481
  if (end - start <= String::kMinNonFlatLength) {
1482
    return Heap::AllocateSubString(buffer, start, end);
1483
  }
1484

    
1485
  Map* map;
1486
  if (length <= String::kMaxShortStringSize) {
1487
    map = StringShape(buffer).IsAsciiRepresentation() ?
1488
      short_sliced_ascii_string_map() :
1489
      short_sliced_string_map();
1490
  } else if (length <= String::kMaxMediumStringSize) {
1491
    map = StringShape(buffer).IsAsciiRepresentation() ?
1492
      medium_sliced_ascii_string_map() :
1493
      medium_sliced_string_map();
1494
  } else {
1495
    map = StringShape(buffer).IsAsciiRepresentation() ?
1496
      long_sliced_ascii_string_map() :
1497
      long_sliced_string_map();
1498
  }
1499

    
1500
  Object* result = Allocate(map, NEW_SPACE);
1501
  if (result->IsFailure()) return result;
1502

    
1503
  SlicedString* sliced_string = SlicedString::cast(result);
1504
  sliced_string->set_buffer(buffer);
1505
  sliced_string->set_start(start);
1506
  sliced_string->set_length(length);
1507

    
1508
  return result;
1509
}
1510

    
1511

    
1512
Object* Heap::AllocateSubString(String* buffer,
1513
                                int start,
1514
                                int end) {
1515
  int length = end - start;
1516

    
1517
  if (length == 1) {
1518
    return Heap::LookupSingleCharacterStringFromCode(
1519
        buffer->Get(start));
1520
  }
1521

    
1522
  // Make an attempt to flatten the buffer to reduce access time.
1523
  if (!buffer->IsFlat()) {
1524
    buffer->TryFlatten();
1525
  }
1526

    
1527
  Object* result = StringShape(buffer).IsAsciiRepresentation()
1528
      ? AllocateRawAsciiString(length)
1529
      : AllocateRawTwoByteString(length);
1530
  if (result->IsFailure()) return result;
1531

    
1532
  // Copy the characters into the new object.
1533
  String* string_result = String::cast(result);
1534
  StringHasher hasher(length);
1535
  int i = 0;
1536
  for (; i < length && hasher.is_array_index(); i++) {
1537
    uc32 c = buffer->Get(start + i);
1538
    hasher.AddCharacter(c);
1539
    string_result->Set(i, c);
1540
  }
1541
  for (; i < length; i++) {
1542
    uc32 c = buffer->Get(start + i);
1543
    hasher.AddCharacterNoIndex(c);
1544
    string_result->Set(i, c);
1545
  }
1546
  string_result->set_length_field(hasher.GetHashField());
1547
  return result;
1548
}
1549

    
1550

    
1551
Object* Heap::AllocateExternalStringFromAscii(
1552
    ExternalAsciiString::Resource* resource) {
1553
  Map* map;
1554
  int length = resource->length();
1555
  if (length <= String::kMaxShortStringSize) {
1556
    map = short_external_ascii_string_map();
1557
  } else if (length <= String::kMaxMediumStringSize) {
1558
    map = medium_external_ascii_string_map();
1559
  } else {
1560
    map = long_external_ascii_string_map();
1561
  }
1562

    
1563
  Object* result = Allocate(map, NEW_SPACE);
1564
  if (result->IsFailure()) return result;
1565

    
1566
  ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
1567
  external_string->set_length(length);
1568
  external_string->set_resource(resource);
1569

    
1570
  return result;
1571
}
1572

    
1573

    
1574
Object* Heap::AllocateExternalStringFromTwoByte(
1575
    ExternalTwoByteString::Resource* resource) {
1576
  int length = resource->length();
1577

    
1578
  Map* map = ExternalTwoByteString::StringMap(length);
1579
  Object* result = Allocate(map, NEW_SPACE);
1580
  if (result->IsFailure()) return result;
1581

    
1582
  ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
1583
  external_string->set_length(length);
1584
  external_string->set_resource(resource);
1585

    
1586
  return result;
1587
}
1588

    
1589

    
1590
Object* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
1591
  if (code <= String::kMaxAsciiCharCode) {
1592
    Object* value = Heap::single_character_string_cache()->get(code);
1593
    if (value != Heap::undefined_value()) return value;
1594

    
1595
    char buffer[1];
1596
    buffer[0] = static_cast<char>(code);
1597
    Object* result = LookupSymbol(Vector<const char>(buffer, 1));
1598

    
1599
    if (result->IsFailure()) return result;
1600
    Heap::single_character_string_cache()->set(code, result);
1601
    return result;
1602
  }
1603

    
1604
  Object* result = Heap::AllocateRawTwoByteString(1);
1605
  if (result->IsFailure()) return result;
1606
  String* answer = String::cast(result);
1607
  answer->Set(0, code);
1608
  return answer;
1609
}
1610

    
1611

    
1612
Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
1613
  if (pretenure == NOT_TENURED) {
1614
    return AllocateByteArray(length);
1615
  }
1616
  int size = ByteArray::SizeFor(length);
1617
  AllocationSpace space =
1618
      size > MaxHeapObjectSize() ? LO_SPACE : OLD_DATA_SPACE;
1619

    
1620
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1621

    
1622
  if (result->IsFailure()) return result;
1623

    
1624
  reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1625
  reinterpret_cast<Array*>(result)->set_length(length);
1626
  return result;
1627
}
1628

    
1629

    
1630
Object* Heap::AllocateByteArray(int length) {
1631
  int size = ByteArray::SizeFor(length);
1632
  AllocationSpace space =
1633
      size > MaxHeapObjectSize() ? LO_SPACE : NEW_SPACE;
1634

    
1635
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
1636

    
1637
  if (result->IsFailure()) return result;
1638

    
1639
  reinterpret_cast<Array*>(result)->set_map(byte_array_map());
1640
  reinterpret_cast<Array*>(result)->set_length(length);
1641
  return result;
1642
}
1643

    
1644

    
1645
void Heap::CreateFillerObjectAt(Address addr, int size) {
1646
  if (size == 0) return;
1647
  HeapObject* filler = HeapObject::FromAddress(addr);
1648
  if (size == kPointerSize) {
1649
    filler->set_map(Heap::one_word_filler_map());
1650
  } else {
1651
    filler->set_map(Heap::byte_array_map());
1652
    ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
1653
  }
1654
}
1655

    
1656

    
1657
Object* Heap::CreateCode(const CodeDesc& desc,
1658
                         ScopeInfo<>* sinfo,
1659
                         Code::Flags flags,
1660
                         Handle<Object> self_reference) {
1661
  // Compute size
1662
  int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
1663
  int sinfo_size = 0;
1664
  if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
1665
  int obj_size = Code::SizeFor(body_size, sinfo_size);
1666
  ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
1667
  Object* result;
1668
  if (obj_size > MaxHeapObjectSize()) {
1669
    result = lo_space_->AllocateRawCode(obj_size);
1670
  } else {
1671
    result = code_space_->AllocateRaw(obj_size);
1672
  }
1673

    
1674
  if (result->IsFailure()) return result;
1675

    
1676
  // Initialize the object
1677
  HeapObject::cast(result)->set_map(code_map());
1678
  Code* code = Code::cast(result);
1679
  code->set_instruction_size(desc.instr_size);
1680
  code->set_relocation_size(desc.reloc_size);
1681
  code->set_sinfo_size(sinfo_size);
1682
  code->set_flags(flags);
1683
  code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS);
1684
  // Allow self references to created code object by patching the handle to
1685
  // point to the newly allocated Code object.
1686
  if (!self_reference.is_null()) {
1687
    *(self_reference.location()) = code;
1688
  }
1689
  // Migrate generated code.
1690
  // The generated code can contain Object** values (typically from handles)
1691
  // that are dereferenced during the copy to point directly to the actual heap
1692
  // objects. These pointers can include references to the code object itself,
1693
  // through the self_reference parameter.
1694
  code->CopyFrom(desc);
1695
  if (sinfo != NULL) sinfo->Serialize(code);  // write scope info
1696
  LOG(CodeAllocateEvent(code, desc.origin));
1697

    
1698
#ifdef DEBUG
1699
  code->Verify();
1700
#endif
1701
  return code;
1702
}
1703

    
1704

    
1705
Object* Heap::CopyCode(Code* code) {
1706
  // Allocate an object the same size as the code object.
1707
  int obj_size = code->Size();
1708
  Object* result;
1709
  if (obj_size > MaxHeapObjectSize()) {
1710
    result = lo_space_->AllocateRawCode(obj_size);
1711
  } else {
1712
    result = code_space_->AllocateRaw(obj_size);
1713
  }
1714

    
1715
  if (result->IsFailure()) return result;
1716

    
1717
  // Copy code object.
1718
  Address old_addr = code->address();
1719
  Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
1720
  CopyBlock(reinterpret_cast<Object**>(new_addr),
1721
            reinterpret_cast<Object**>(old_addr),
1722
            obj_size);
1723
  // Relocate the copy.
1724
  Code* new_code = Code::cast(result);
1725
  new_code->Relocate(new_addr - old_addr);
1726
  return new_code;
1727
}
1728

    
1729

    
1730
Object* Heap::Allocate(Map* map, AllocationSpace space) {
1731
  ASSERT(gc_state_ == NOT_IN_GC);
1732
  ASSERT(map->instance_type() != MAP_TYPE);
1733
  Object* result = AllocateRaw(map->instance_size(),
1734
                               space,
1735
                               TargetSpaceId(map->instance_type()));
1736
  if (result->IsFailure()) return result;
1737
  HeapObject::cast(result)->set_map(map);
1738
  return result;
1739
}
1740

    
1741

    
1742
Object* Heap::InitializeFunction(JSFunction* function,
1743
                                 SharedFunctionInfo* shared,
1744
                                 Object* prototype) {
1745
  ASSERT(!prototype->IsMap());
1746
  function->initialize_properties();
1747
  function->initialize_elements();
1748
  function->set_shared(shared);
1749
  function->set_prototype_or_initial_map(prototype);
1750
  function->set_context(undefined_value());
1751
  function->set_literals(empty_fixed_array(), SKIP_WRITE_BARRIER);
1752
  return function;
1753
}
1754

    
1755

    
1756
Object* Heap::AllocateFunctionPrototype(JSFunction* function) {
1757
  // Allocate the prototype.  Make sure to use the object function
1758
  // from the function's context, since the function can be from a
1759
  // different context.
1760
  JSFunction* object_function =
1761
      function->context()->global_context()->object_function();
1762
  Object* prototype = AllocateJSObject(object_function);
1763
  if (prototype->IsFailure()) return prototype;
1764
  // When creating the prototype for the function we must set its
1765
  // constructor to the function.
1766
  Object* result =
1767
      JSObject::cast(prototype)->SetProperty(constructor_symbol(),
1768
                                             function,
1769
                                             DONT_ENUM);
1770
  if (result->IsFailure()) return result;
1771
  return prototype;
1772
}
1773

    
1774

    
1775
Object* Heap::AllocateFunction(Map* function_map,
1776
                               SharedFunctionInfo* shared,
1777
                               Object* prototype) {
1778
  Object* result = Allocate(function_map, OLD_POINTER_SPACE);
1779
  if (result->IsFailure()) return result;
1780
  return InitializeFunction(JSFunction::cast(result), shared, prototype);
1781
}
1782

    
1783

    
1784
Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
1785
  // To get fast allocation and map sharing for arguments objects we
1786
  // allocate them based on an arguments boilerplate.
1787

    
1788
  // This calls Copy directly rather than using Heap::AllocateRaw so we
1789
  // duplicate the check here.
1790
  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1791

    
1792
  JSObject* boilerplate =
1793
      Top::context()->global_context()->arguments_boilerplate();
1794

    
1795
  // Make the clone.
1796
  Map* map = boilerplate->map();
1797
  int object_size = map->instance_size();
1798
  Object* result = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
1799
  if (result->IsFailure()) return result;
1800

    
1801
  // Copy the content. The arguments boilerplate doesn't have any
1802
  // fields that point to new space so it's safe to skip the write
1803
  // barrier here.
1804
  CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(result)->address()),
1805
            reinterpret_cast<Object**>(boilerplate->address()),
1806
            object_size);
1807

    
1808
  // Set the two properties.
1809
  JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
1810
                                                callee);
1811
  JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
1812
                                                Smi::FromInt(length),
1813
                                                SKIP_WRITE_BARRIER);
1814

    
1815
  // Check the state of the object
1816
  ASSERT(JSObject::cast(result)->HasFastProperties());
1817
  ASSERT(JSObject::cast(result)->HasFastElements());
1818

    
1819
  return result;
1820
}
1821

    
1822

    
1823
Object* Heap::AllocateInitialMap(JSFunction* fun) {
1824
  ASSERT(!fun->has_initial_map());
1825

    
1826
  // First create a new map with the expected number of properties being
1827
  // allocated in-object.
1828
  int expected_nof_properties = fun->shared()->expected_nof_properties();
1829
  int instance_size = JSObject::kHeaderSize +
1830
                      expected_nof_properties * kPointerSize;
1831
  if (instance_size > JSObject::kMaxInstanceSize) {
1832
    instance_size = JSObject::kMaxInstanceSize;
1833
    expected_nof_properties = (instance_size - JSObject::kHeaderSize) /
1834
                              kPointerSize;
1835
  }
1836
  Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
1837
  if (map_obj->IsFailure()) return map_obj;
1838

    
1839
  // Fetch or allocate prototype.
1840
  Object* prototype;
1841
  if (fun->has_instance_prototype()) {
1842
    prototype = fun->instance_prototype();
1843
  } else {
1844
    prototype = AllocateFunctionPrototype(fun);
1845
    if (prototype->IsFailure()) return prototype;
1846
  }
1847
  Map* map = Map::cast(map_obj);
1848
  map->set_inobject_properties(expected_nof_properties);
1849
  map->set_unused_property_fields(expected_nof_properties);
1850
  map->set_prototype(prototype);
1851
  return map;
1852
}
1853

    
1854

    
1855
void Heap::InitializeJSObjectFromMap(JSObject* obj,
1856
                                     FixedArray* properties,
1857
                                     Map* map) {
1858
  obj->set_properties(properties);
1859
  obj->initialize_elements();
1860
  // TODO(1240798): Initialize the object's body using valid initial values
1861
  // according to the object's initial map.  For example, if the map's
1862
  // instance type is JS_ARRAY_TYPE, the length field should be initialized
1863
  // to a number (eg, Smi::FromInt(0)) and the elements initialized to a
1864
  // fixed array (eg, Heap::empty_fixed_array()).  Currently, the object
1865
  // verification code has to cope with (temporarily) invalid objects.  See
1866
  // for example, JSArray::JSArrayVerify).
1867
  obj->InitializeBody(map->instance_size());
1868
}
1869

    
1870

    
1871
Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
1872
  // JSFunctions should be allocated using AllocateFunction to be
1873
  // properly initialized.
1874
  ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
1875

    
1876
  // Allocate the backing storage for the properties.
1877
  int prop_size = map->unused_property_fields() - map->inobject_properties();
1878
  Object* properties = AllocateFixedArray(prop_size);
1879
  if (properties->IsFailure()) return properties;
1880

    
1881
  // Allocate the JSObject.
1882
  AllocationSpace space =
1883
      (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
1884
  if (map->instance_size() > MaxHeapObjectSize()) space = LO_SPACE;
1885
  Object* obj = Allocate(map, space);
1886
  if (obj->IsFailure()) return obj;
1887

    
1888
  // Initialize the JSObject.
1889
  InitializeJSObjectFromMap(JSObject::cast(obj),
1890
                            FixedArray::cast(properties),
1891
                            map);
1892
  return obj;
1893
}
1894

    
1895

    
1896
Object* Heap::AllocateJSObject(JSFunction* constructor,
1897
                               PretenureFlag pretenure) {
1898
  // Allocate the initial map if absent.
1899
  if (!constructor->has_initial_map()) {
1900
    Object* initial_map = AllocateInitialMap(constructor);
1901
    if (initial_map->IsFailure()) return initial_map;
1902
    constructor->set_initial_map(Map::cast(initial_map));
1903
    Map::cast(initial_map)->set_constructor(constructor);
1904
  }
1905
  // Allocate the object based on the constructors initial map.
1906
  return AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
1907
}
1908

    
1909

    
1910
Object* Heap::CopyJSObject(JSObject* source) {
1911
  // Never used to copy functions.  If functions need to be copied we
1912
  // have to be careful to clear the literals array.
1913
  ASSERT(!source->IsJSFunction());
1914

    
1915
  // Make the clone.
1916
  Map* map = source->map();
1917
  int object_size = map->instance_size();
1918
  Object* clone;
1919

    
1920
  // If we're forced to always allocate, we use the general allocation
1921
  // functions which may leave us with an object in old space.
1922
  if (always_allocate()) {
1923
    clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
1924
    if (clone->IsFailure()) return clone;
1925
    Address clone_address = HeapObject::cast(clone)->address();
1926
    CopyBlock(reinterpret_cast<Object**>(clone_address),
1927
              reinterpret_cast<Object**>(source->address()),
1928
              object_size);
1929
    // Update write barrier for all fields that lie beyond the header.
1930
    for (int offset = JSObject::kHeaderSize;
1931
         offset < object_size;
1932
         offset += kPointerSize) {
1933
      RecordWrite(clone_address, offset);
1934
    }
1935
  } else {
1936
    clone = new_space_.AllocateRaw(object_size);
1937
    if (clone->IsFailure()) return clone;
1938
    ASSERT(Heap::InNewSpace(clone));
1939
    // Since we know the clone is allocated in new space, we can copy
1940
    // the contents without worrying about updating the write barrier.
1941
    CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
1942
              reinterpret_cast<Object**>(source->address()),
1943
              object_size);
1944
  }
1945

    
1946
  FixedArray* elements = FixedArray::cast(source->elements());
1947
  FixedArray* properties = FixedArray::cast(source->properties());
1948
  // Update elements if necessary.
1949
  if (elements->length()> 0) {
1950
    Object* elem = CopyFixedArray(elements);
1951
    if (elem->IsFailure()) return elem;
1952
    JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
1953
  }
1954
  // Update properties if necessary.
1955
  if (properties->length() > 0) {
1956
    Object* prop = CopyFixedArray(properties);
1957
    if (prop->IsFailure()) return prop;
1958
    JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
1959
  }
1960
  // Return the new clone.
1961
  return clone;
1962
}
1963

    
1964

    
1965
Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
1966
                                        JSGlobalProxy* object) {
1967
  // Allocate initial map if absent.
1968
  if (!constructor->has_initial_map()) {
1969
    Object* initial_map = AllocateInitialMap(constructor);
1970
    if (initial_map->IsFailure()) return initial_map;
1971
    constructor->set_initial_map(Map::cast(initial_map));
1972
    Map::cast(initial_map)->set_constructor(constructor);
1973
  }
1974

    
1975
  Map* map = constructor->initial_map();
1976

    
1977
  // Check that the already allocated object has the same size as
1978
  // objects allocated using the constructor.
1979
  ASSERT(map->instance_size() == object->map()->instance_size());
1980

    
1981
  // Allocate the backing storage for the properties.
1982
  int prop_size = map->unused_property_fields() - map->inobject_properties();
1983
  Object* properties = AllocateFixedArray(prop_size);
1984
  if (properties->IsFailure()) return properties;
1985

    
1986
  // Reset the map for the object.
1987
  object->set_map(constructor->initial_map());
1988

    
1989
  // Reinitialize the object from the constructor map.
1990
  InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
1991
  return object;
1992
}
1993

    
1994

    
1995
Object* Heap::AllocateStringFromAscii(Vector<const char> string,
1996
                                      PretenureFlag pretenure) {
1997
  Object* result = AllocateRawAsciiString(string.length(), pretenure);
1998
  if (result->IsFailure()) return result;
1999

    
2000
  // Copy the characters into the new object.
2001
  SeqAsciiString* string_result = SeqAsciiString::cast(result);
2002
  for (int i = 0; i < string.length(); i++) {
2003
    string_result->SeqAsciiStringSet(i, string[i]);
2004
  }
2005
  return result;
2006
}
2007

    
2008

    
2009
Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
2010
                                     PretenureFlag pretenure) {
2011
  // Count the number of characters in the UTF-8 string and check if
2012
  // it is an ASCII string.
2013
  Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
2014
  decoder->Reset(string.start(), string.length());
2015
  int chars = 0;
2016
  bool is_ascii = true;
2017
  while (decoder->has_more()) {
2018
    uc32 r = decoder->GetNext();
2019
    if (r > String::kMaxAsciiCharCode) is_ascii = false;
2020
    chars++;
2021
  }
2022

    
2023
  // If the string is ascii, we do not need to convert the characters
2024
  // since UTF8 is backwards compatible with ascii.
2025
  if (is_ascii) return AllocateStringFromAscii(string, pretenure);
2026

    
2027
  Object* result = AllocateRawTwoByteString(chars, pretenure);
2028
  if (result->IsFailure()) return result;
2029

    
2030
  // Convert and copy the characters into the new object.
2031
  String* string_result = String::cast(result);
2032
  decoder->Reset(string.start(), string.length());
2033
  for (int i = 0; i < chars; i++) {
2034
    uc32 r = decoder->GetNext();
2035
    string_result->Set(i, r);
2036
  }
2037
  return result;
2038
}
2039

    
2040

    
2041
Object* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
2042
                                        PretenureFlag pretenure) {
2043
  // Check if the string is an ASCII string.
2044
  int i = 0;
2045
  while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++;
2046

    
2047
  Object* result;
2048
  if (i == string.length()) {  // It's an ASCII string.
2049
    result = AllocateRawAsciiString(string.length(), pretenure);
2050
  } else {  // It's not an ASCII string.
2051
    result = AllocateRawTwoByteString(string.length(), pretenure);
2052
  }
2053
  if (result->IsFailure()) return result;
2054

    
2055
  // Copy the characters into the new object, which may be either ASCII or
2056
  // UTF-16.
2057
  String* string_result = String::cast(result);
2058
  for (int i = 0; i < string.length(); i++) {
2059
    string_result->Set(i, string[i]);
2060
  }
2061
  return result;
2062
}
2063

    
2064

    
2065
Map* Heap::SymbolMapForString(String* string) {
2066
  // If the string is in new space it cannot be used as a symbol.
2067
  if (InNewSpace(string)) return NULL;
2068

    
2069
  // Find the corresponding symbol map for strings.
2070
  Map* map = string->map();
2071

    
2072
  if (map == short_ascii_string_map()) return short_ascii_symbol_map();
2073
  if (map == medium_ascii_string_map()) return medium_ascii_symbol_map();
2074
  if (map == long_ascii_string_map()) return long_ascii_symbol_map();
2075

    
2076
  if (map == short_string_map()) return short_symbol_map();
2077
  if (map == medium_string_map()) return medium_symbol_map();
2078
  if (map == long_string_map()) return long_symbol_map();
2079

    
2080
  if (map == short_cons_string_map()) return short_cons_symbol_map();
2081
  if (map == medium_cons_string_map()) return medium_cons_symbol_map();
2082
  if (map == long_cons_string_map()) return long_cons_symbol_map();
2083

    
2084
  if (map == short_cons_ascii_string_map()) {
2085
    return short_cons_ascii_symbol_map();
2086
  }
2087
  if (map == medium_cons_ascii_string_map()) {
2088
    return medium_cons_ascii_symbol_map();
2089
  }
2090
  if (map == long_cons_ascii_string_map()) {
2091
    return long_cons_ascii_symbol_map();
2092
  }
2093

    
2094
  if (map == short_sliced_string_map()) return short_sliced_symbol_map();
2095
  if (map == medium_sliced_string_map()) return medium_sliced_symbol_map();
2096
  if (map == long_sliced_string_map()) return long_sliced_symbol_map();
2097

    
2098
  if (map == short_sliced_ascii_string_map()) {
2099
    return short_sliced_ascii_symbol_map();
2100
  }
2101
  if (map == medium_sliced_ascii_string_map()) {
2102
    return medium_sliced_ascii_symbol_map();
2103
  }
2104
  if (map == long_sliced_ascii_string_map()) {
2105
    return long_sliced_ascii_symbol_map();
2106
  }
2107

    
2108
  if (map == short_external_string_map()) {
2109
    return short_external_symbol_map();
2110
  }
2111
  if (map == medium_external_string_map()) {
2112
    return medium_external_symbol_map();
2113
  }
2114
  if (map == long_external_string_map()) {
2115
    return long_external_symbol_map();
2116
  }
2117

    
2118
  if (map == short_external_ascii_string_map()) {
2119
    return short_external_ascii_symbol_map();
2120
  }
2121
  if (map == medium_external_ascii_string_map()) {
2122
    return medium_external_ascii_symbol_map();
2123
  }
2124
  if (map == long_external_ascii_string_map()) {
2125
    return long_external_ascii_symbol_map();
2126
  }
2127

    
2128
  // No match found.
2129
  return NULL;
2130
}
2131

    
2132

    
2133
Object* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
2134
                                     int chars,
2135
                                     uint32_t length_field) {
2136
  // Ensure the chars matches the number of characters in the buffer.
2137
  ASSERT(static_cast<unsigned>(chars) == buffer->Length());
2138
  // Determine whether the string is ascii.
2139
  bool is_ascii = true;
2140
  while (buffer->has_more() && is_ascii) {
2141
    if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) is_ascii = false;
2142
  }
2143
  buffer->Rewind();
2144

    
2145
  // Compute map and object size.
2146
  int size;
2147
  Map* map;
2148

    
2149
  if (is_ascii) {
2150
    if (chars <= String::kMaxShortStringSize) {
2151
      map = short_ascii_symbol_map();
2152
    } else if (chars <= String::kMaxMediumStringSize) {
2153
      map = medium_ascii_symbol_map();
2154
    } else {
2155
      map = long_ascii_symbol_map();
2156
    }
2157
    size = SeqAsciiString::SizeFor(chars);
2158
  } else {
2159
    if (chars <= String::kMaxShortStringSize) {
2160
      map = short_symbol_map();
2161
    } else if (chars <= String::kMaxMediumStringSize) {
2162
      map = medium_symbol_map();
2163
    } else {
2164
      map = long_symbol_map();
2165
    }
2166
    size = SeqTwoByteString::SizeFor(chars);
2167
  }
2168

    
2169
  // Allocate string.
2170
  AllocationSpace space =
2171
      (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_DATA_SPACE;
2172
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2173
  if (result->IsFailure()) return result;
2174

    
2175
  reinterpret_cast<HeapObject*>(result)->set_map(map);
2176
  // The hash value contains the length of the string.
2177
  String* answer = String::cast(result);
2178
  answer->set_length_field(length_field);
2179

    
2180
  ASSERT_EQ(size, answer->Size());
2181

    
2182
  // Fill in the characters.
2183
  for (int i = 0; i < chars; i++) {
2184
    answer->Set(i, buffer->GetNext());
2185
  }
2186
  return answer;
2187
}
2188

    
2189

    
2190
Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
2191
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2192
  int size = SeqAsciiString::SizeFor(length);
2193
  if (size > MaxHeapObjectSize()) {
2194
    space = LO_SPACE;
2195
  }
2196

    
2197
  // Use AllocateRaw rather than Allocate because the object's size cannot be
2198
  // determined from the map.
2199
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2200
  if (result->IsFailure()) return result;
2201

    
2202
  // Determine the map based on the string's length.
2203
  Map* map;
2204
  if (length <= String::kMaxShortStringSize) {
2205
    map = short_ascii_string_map();
2206
  } else if (length <= String::kMaxMediumStringSize) {
2207
    map = medium_ascii_string_map();
2208
  } else {
2209
    map = long_ascii_string_map();
2210
  }
2211

    
2212
  // Partially initialize the object.
2213
  HeapObject::cast(result)->set_map(map);
2214
  String::cast(result)->set_length(length);
2215
  ASSERT_EQ(size, HeapObject::cast(result)->Size());
2216
  return result;
2217
}
2218

    
2219

    
2220
Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
2221
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2222
  int size = SeqTwoByteString::SizeFor(length);
2223
  if (size > MaxHeapObjectSize()) {
2224
    space = LO_SPACE;
2225
  }
2226

    
2227
  // Use AllocateRaw rather than Allocate because the object's size cannot be
2228
  // determined from the map.
2229
  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
2230
  if (result->IsFailure()) return result;
2231

    
2232
  // Determine the map based on the string's length.
2233
  Map* map;
2234
  if (length <= String::kMaxShortStringSize) {
2235
    map = short_string_map();
2236
  } else if (length <= String::kMaxMediumStringSize) {
2237
    map = medium_string_map();
2238
  } else {
2239
    map = long_string_map();
2240
  }
2241

    
2242
  // Partially initialize the object.
2243
  HeapObject::cast(result)->set_map(map);
2244
  String::cast(result)->set_length(length);
2245
  ASSERT_EQ(size, HeapObject::cast(result)->Size());
2246
  return result;
2247
}
2248

    
2249

    
2250
Object* Heap::AllocateEmptyFixedArray() {
2251
  int size = FixedArray::SizeFor(0);
2252
  Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
2253
  if (result->IsFailure()) return result;
2254
  // Initialize the object.
2255
  reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2256
  reinterpret_cast<Array*>(result)->set_length(0);
2257
  return result;
2258
}
2259

    
2260

    
2261
Object* Heap::AllocateRawFixedArray(int length) {
2262
  // Use the general function if we're forced to always allocate.
2263
  if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED);
2264
  // Allocate the raw data for a fixed array.
2265
  int size = FixedArray::SizeFor(length);
2266
  return (size > MaxHeapObjectSize())
2267
      ? lo_space_->AllocateRawFixedArray(size)
2268
      : new_space_.AllocateRaw(size);
2269
}
2270

    
2271

    
2272
Object* Heap::CopyFixedArray(FixedArray* src) {
2273
  int len = src->length();
2274
  Object* obj = AllocateRawFixedArray(len);
2275
  if (obj->IsFailure()) return obj;
2276
  if (Heap::InNewSpace(obj)) {
2277
    HeapObject* dst = HeapObject::cast(obj);
2278
    CopyBlock(reinterpret_cast<Object**>(dst->address()),
2279
              reinterpret_cast<Object**>(src->address()),
2280
              FixedArray::SizeFor(len));
2281
    return obj;
2282
  }
2283
  HeapObject::cast(obj)->set_map(src->map());
2284
  FixedArray* result = FixedArray::cast(obj);
2285
  result->set_length(len);
2286
  // Copy the content
2287
  WriteBarrierMode mode = result->GetWriteBarrierMode();
2288
  for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
2289
  return result;
2290
}
2291

    
2292

    
2293
Object* Heap::AllocateFixedArray(int length) {
2294
  if (length == 0) return empty_fixed_array();
2295
  Object* result = AllocateRawFixedArray(length);
2296
  if (!result->IsFailure()) {
2297
    // Initialize header.
2298
    reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2299
    FixedArray* array = FixedArray::cast(result);
2300
    array->set_length(length);
2301
    Object* value = undefined_value();
2302
    // Initialize body.
2303
    for (int index = 0; index < length; index++) {
2304
      array->set(index, value, SKIP_WRITE_BARRIER);
2305
    }
2306
  }
2307
  return result;
2308
}
2309

    
2310

    
2311
Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
2312
  ASSERT(empty_fixed_array()->IsFixedArray());
2313
  if (length == 0) return empty_fixed_array();
2314

    
2315
  int size = FixedArray::SizeFor(length);
2316
  Object* result;
2317
  if (size > MaxHeapObjectSize()) {
2318
    result = lo_space_->AllocateRawFixedArray(size);
2319
  } else {
2320
    AllocationSpace space =
2321
        (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2322
    result = AllocateRaw(size, space, OLD_POINTER_SPACE);
2323
  }
2324
  if (result->IsFailure()) return result;
2325

    
2326
  // Initialize the object.
2327
  reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2328
  FixedArray* array = FixedArray::cast(result);
2329
  array->set_length(length);
2330
  Object* value = undefined_value();
2331
  for (int index = 0; index < length; index++) {
2332
    array->set(index, value, SKIP_WRITE_BARRIER);
2333
  }
2334
  return array;
2335
}
2336

    
2337

    
2338
Object* Heap::AllocateFixedArrayWithHoles(int length) {
2339
  if (length == 0) return empty_fixed_array();
2340
  Object* result = AllocateRawFixedArray(length);
2341
  if (!result->IsFailure()) {
2342
    // Initialize header.
2343
    reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
2344
    FixedArray* array = FixedArray::cast(result);
2345
    array->set_length(length);
2346
    // Initialize body.
2347
    Object* value = the_hole_value();
2348
    for (int index = 0; index < length; index++)  {
2349
      array->set(index, value, SKIP_WRITE_BARRIER);
2350
    }
2351
  }
2352
  return result;
2353
}
2354

    
2355

    
2356
Object* Heap::AllocateHashTable(int length) {
2357
  Object* result = Heap::AllocateFixedArray(length);
2358
  if (result->IsFailure()) return result;
2359
  reinterpret_cast<Array*>(result)->set_map(hash_table_map());
2360
  ASSERT(result->IsDictionary());
2361
  return result;
2362
}
2363

    
2364

    
2365
Object* Heap::AllocateGlobalContext() {
2366
  Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
2367
  if (result->IsFailure()) return result;
2368
  Context* context = reinterpret_cast<Context*>(result);
2369
  context->set_map(global_context_map());
2370
  ASSERT(context->IsGlobalContext());
2371
  ASSERT(result->IsContext());
2372
  return result;
2373
}
2374

    
2375

    
2376
Object* Heap::AllocateFunctionContext(int length, JSFunction* function) {
2377
  ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
2378
  Object* result = Heap::AllocateFixedArray(length);
2379
  if (result->IsFailure()) return result;
2380
  Context* context = reinterpret_cast<Context*>(result);
2381
  context->set_map(context_map());
2382
  context->set_closure(function);
2383
  context->set_fcontext(context);
2384
  context->set_previous(NULL);
2385
  context->set_extension(NULL);
2386
  context->set_global(function->context()->global());
2387
  ASSERT(!context->IsGlobalContext());
2388
  ASSERT(context->is_function_context());
2389
  ASSERT(result->IsContext());
2390
  return result;
2391
}
2392

    
2393

    
2394
Object* Heap::AllocateWithContext(Context* previous,
2395
                                  JSObject* extension,
2396
                                  bool is_catch_context) {
2397
  Object* result = Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
2398
  if (result->IsFailure()) return result;
2399
  Context* context = reinterpret_cast<Context*>(result);
2400
  context->set_map(is_catch_context ? catch_context_map() : context_map());
2401
  context->set_closure(previous->closure());
2402
  context->set_fcontext(previous->fcontext());
2403
  context->set_previous(previous);
2404
  context->set_extension(extension);
2405
  context->set_global(previous->global());
2406
  ASSERT(!context->IsGlobalContext());
2407
  ASSERT(!context->is_function_context());
2408
  ASSERT(result->IsContext());
2409
  return result;
2410
}
2411

    
2412

    
2413
Object* Heap::AllocateStruct(InstanceType type) {
2414
  Map* map;
2415
  switch (type) {
2416
#define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
2417
STRUCT_LIST(MAKE_CASE)
2418
#undef MAKE_CASE
2419
    default:
2420
      UNREACHABLE();
2421
      return Failure::InternalError();
2422
  }
2423
  int size = map->instance_size();
2424
  AllocationSpace space =
2425
      (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_POINTER_SPACE;
2426
  Object* result = Heap::Allocate(map, space);
2427
  if (result->IsFailure()) return result;
2428
  Struct::cast(result)->InitializeBody(size);
2429
  return result;
2430
}
2431

    
2432

    
2433
#ifdef DEBUG
2434

    
2435
void Heap::Print() {
2436
  if (!HasBeenSetup()) return;
2437
  Top::PrintStack();
2438
  AllSpaces spaces;
2439
  while (Space* space = spaces.next()) space->Print();
2440
}
2441

    
2442

    
2443
void Heap::ReportCodeStatistics(const char* title) {
2444
  PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
2445
  PagedSpace::ResetCodeStatistics();
2446
  // We do not look for code in new space, map space, or old space.  If code
2447
  // somehow ends up in those spaces, we would miss it here.
2448
  code_space_->CollectCodeStatistics();
2449
  lo_space_->CollectCodeStatistics();
2450
  PagedSpace::ReportCodeStatistics();
2451
}
2452

    
2453

    
2454
// This function expects that NewSpace's allocated objects histogram is
2455
// populated (via a call to CollectStatistics or else as a side effect of a
2456
// just-completed scavenge collection).
2457
void Heap::ReportHeapStatistics(const char* title) {
2458
  USE(title);
2459
  PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
2460
         title, gc_count_);
2461
  PrintF("mark-compact GC : %d\n", mc_count_);
2462
  PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
2463
  PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
2464

    
2465
  PrintF("\n");
2466
  PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
2467
  GlobalHandles::PrintStats();
2468
  PrintF("\n");
2469

    
2470
  PrintF("Heap statistics : ");
2471
  MemoryAllocator::ReportStatistics();
2472
  PrintF("To space : ");
2473
  new_space_.ReportStatistics();
2474
  PrintF("Old pointer space : ");
2475
  old_pointer_space_->ReportStatistics();
2476
  PrintF("Old data space : ");
2477
  old_data_space_->ReportStatistics();
2478
  PrintF("Code space : ");
2479
  code_space_->ReportStatistics();
2480
  PrintF("Map space : ");
2481
  map_space_->ReportStatistics();
2482
  PrintF("Large object space : ");
2483
  lo_space_->ReportStatistics();
2484
  PrintF(">>>>>> ========================================= >>>>>>\n");
2485
}
2486

    
2487
#endif  // DEBUG
2488

    
2489
bool Heap::Contains(HeapObject* value) {
2490
  return Contains(value->address());
2491
}
2492

    
2493

    
2494
bool Heap::Contains(Address addr) {
2495
  if (OS::IsOutsideAllocatedSpace(addr)) return false;
2496
  return HasBeenSetup() &&
2497
    (new_space_.ToSpaceContains(addr) ||
2498
     old_pointer_space_->Contains(addr) ||
2499
     old_data_space_->Contains(addr) ||
2500
     code_space_->Contains(addr) ||
2501
     map_space_->Contains(addr) ||
2502
     lo_space_->SlowContains(addr));
2503
}
2504

    
2505

    
2506
bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
2507
  return InSpace(value->address(), space);
2508
}
2509

    
2510

    
2511
bool Heap::InSpace(Address addr, AllocationSpace space) {
2512
  if (OS::IsOutsideAllocatedSpace(addr)) return false;
2513
  if (!HasBeenSetup()) return false;
2514

    
2515
  switch (space) {
2516
    case NEW_SPACE:
2517
      return new_space_.ToSpaceContains(addr);
2518
    case OLD_POINTER_SPACE:
2519
      return old_pointer_space_->Contains(addr);
2520
    case OLD_DATA_SPACE:
2521
      return old_data_space_->Contains(addr);
2522
    case CODE_SPACE:
2523
      return code_space_->Contains(addr);
2524
    case MAP_SPACE:
2525
      return map_space_->Contains(addr);
2526
    case LO_SPACE:
2527
      return lo_space_->SlowContains(addr);
2528
  }
2529

    
2530
  return false;
2531
}
2532

    
2533

    
2534
#ifdef DEBUG
2535
void Heap::Verify() {
2536
  ASSERT(HasBeenSetup());
2537

    
2538
  VerifyPointersVisitor visitor;
2539
  Heap::IterateRoots(&visitor);
2540

    
2541
  AllSpaces spaces;
2542
  while (Space* space = spaces.next()) {
2543
    space->Verify();
2544
  }
2545
}
2546
#endif  // DEBUG
2547

    
2548

    
2549
Object* Heap::LookupSymbol(Vector<const char> string) {
2550
  Object* symbol = NULL;
2551
  Object* new_table =
2552
      SymbolTable::cast(symbol_table_)->LookupSymbol(string, &symbol);
2553
  if (new_table->IsFailure()) return new_table;
2554
  symbol_table_ = new_table;
2555
  ASSERT(symbol != NULL);
2556
  return symbol;
2557
}
2558

    
2559

    
2560
Object* Heap::LookupSymbol(String* string) {
2561
  if (string->IsSymbol()) return string;
2562
  Object* symbol = NULL;
2563
  Object* new_table =
2564
      SymbolTable::cast(symbol_table_)->LookupString(string, &symbol);
2565
  if (new_table->IsFailure()) return new_table;
2566
  symbol_table_ = new_table;
2567
  ASSERT(symbol != NULL);
2568
  return symbol;
2569
}
2570

    
2571

    
2572
bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
2573
  if (string->IsSymbol()) {
2574
    *symbol = string;
2575
    return true;
2576
  }
2577
  SymbolTable* table = SymbolTable::cast(symbol_table_);
2578
  return table->LookupSymbolIfExists(string, symbol);
2579
}
2580

    
2581

    
2582
#ifdef DEBUG
2583
void Heap::ZapFromSpace() {
2584
  ASSERT(HAS_HEAP_OBJECT_TAG(kFromSpaceZapValue));
2585
  for (Address a = new_space_.FromSpaceLow();
2586
       a < new_space_.FromSpaceHigh();
2587
       a += kPointerSize) {
2588
    Memory::Address_at(a) = kFromSpaceZapValue;
2589
  }
2590
}
2591
#endif  // DEBUG
2592

    
2593

    
2594
void Heap::IterateRSetRange(Address object_start,
2595
                            Address object_end,
2596
                            Address rset_start,
2597
                            ObjectSlotCallback copy_object_func) {
2598
  Address object_address = object_start;
2599
  Address rset_address = rset_start;
2600

    
2601
  // Loop over all the pointers in [object_start, object_end).
2602
  while (object_address < object_end) {
2603
    uint32_t rset_word = Memory::uint32_at(rset_address);
2604
    if (rset_word != 0) {
2605
      uint32_t result_rset = rset_word;
2606
      for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) {
2607
        // Do not dereference pointers at or past object_end.
2608
        if ((rset_word & bitmask) != 0 && object_address < object_end) {
2609
          Object** object_p = reinterpret_cast<Object**>(object_address);
2610
          if (Heap::InNewSpace(*object_p)) {
2611
            copy_object_func(reinterpret_cast<HeapObject**>(object_p));
2612
          }
2613
          // If this pointer does not need to be remembered anymore, clear
2614
          // the remembered set bit.
2615
          if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
2616
        }
2617
        object_address += kPointerSize;
2618
      }
2619
      // Update the remembered set if it has changed.
2620
      if (result_rset != rset_word) {
2621
        Memory::uint32_at(rset_address) = result_rset;
2622
      }
2623
    } else {
2624
      // No bits in the word were set.  This is the common case.
2625
      object_address += kPointerSize * kBitsPerInt;
2626
    }
2627
    rset_address += kIntSize;
2628
  }
2629
}
2630

    
2631

    
2632
void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
2633
  ASSERT(Page::is_rset_in_use());
2634
  ASSERT(space == old_pointer_space_ || space == map_space_);
2635

    
2636
  PageIterator it(space, PageIterator::PAGES_IN_USE);
2637
  while (it.has_next()) {
2638
    Page* page = it.next();
2639
    IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
2640
                     page->RSetStart(), copy_object_func);
2641
  }
2642
}
2643

    
2644

    
2645
#ifdef DEBUG
2646
#define SYNCHRONIZE_TAG(tag) v->Synchronize(tag)
2647
#else
2648
#define SYNCHRONIZE_TAG(tag)
2649
#endif
2650

    
2651
void Heap::IterateRoots(ObjectVisitor* v) {
2652
  IterateStrongRoots(v);
2653
  v->VisitPointer(reinterpret_cast<Object**>(&symbol_table_));
2654
  SYNCHRONIZE_TAG("symbol_table");
2655
}
2656

    
2657

    
2658
void Heap::IterateStrongRoots(ObjectVisitor* v) {
2659
#define ROOT_ITERATE(type, name) \
2660
  v->VisitPointer(bit_cast<Object**, type**>(&name##_));
2661
  STRONG_ROOT_LIST(ROOT_ITERATE);
2662
#undef ROOT_ITERATE
2663
  SYNCHRONIZE_TAG("strong_root_list");
2664

    
2665
#define STRUCT_MAP_ITERATE(NAME, Name, name) \
2666
  v->VisitPointer(bit_cast<Object**, Map**>(&name##_map_));
2667
  STRUCT_LIST(STRUCT_MAP_ITERATE);
2668
#undef STRUCT_MAP_ITERATE
2669
  SYNCHRONIZE_TAG("struct_map");
2670

    
2671
#define SYMBOL_ITERATE(name, string) \
2672
  v->VisitPointer(bit_cast<Object**, String**>(&name##_));
2673
  SYMBOL_LIST(SYMBOL_ITERATE)
2674
#undef SYMBOL_ITERATE
2675
  v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
2676
  SYNCHRONIZE_TAG("symbol");
2677

    
2678
  Bootstrapper::Iterate(v);
2679
  SYNCHRONIZE_TAG("bootstrapper");
2680
  Top::Iterate(v);
2681
  SYNCHRONIZE_TAG("top");
2682
  Debug::Iterate(v);
2683
  SYNCHRONIZE_TAG("debug");
2684
  CompilationCache::Iterate(v);
2685
  SYNCHRONIZE_TAG("compilationcache");
2686

    
2687
  // Iterate over local handles in handle scopes.
2688
  HandleScopeImplementer::Iterate(v);
2689
  SYNCHRONIZE_TAG("handlescope");
2690

    
2691
  // Iterate over the builtin code objects and code stubs in the heap. Note
2692
  // that it is not strictly necessary to iterate over code objects on
2693
  // scavenge collections.  We still do it here because this same function
2694
  // is used by the mark-sweep collector and the deserializer.
2695
  Builtins::IterateBuiltins(v);
2696
  SYNCHRONIZE_TAG("builtins");
2697

    
2698
  // Iterate over global handles.
2699
  GlobalHandles::IterateRoots(v);
2700
  SYNCHRONIZE_TAG("globalhandles");
2701

    
2702
  // Iterate over pointers being held by inactive threads.
2703
  ThreadManager::Iterate(v);
2704
  SYNCHRONIZE_TAG("threadmanager");
2705
}
2706
#undef SYNCHRONIZE_TAG
2707

    
2708

    
2709
// Flag is set when the heap has been configured.  The heap can be repeatedly
2710
// configured through the API until it is setup.
2711
static bool heap_configured = false;
2712

    
2713
// TODO(1236194): Since the heap size is configurable on the command line
2714
// and through the API, we should gracefully handle the case that the heap
2715
// size is not big enough to fit all the initial objects.
2716
bool Heap::ConfigureHeap(int semispace_size, int old_gen_size) {
2717
  if (HasBeenSetup()) return false;
2718

    
2719
  if (semispace_size > 0) semispace_size_ = semispace_size;
2720
  if (old_gen_size > 0) old_generation_size_ = old_gen_size;
2721

    
2722
  // The new space size must be a power of two to support single-bit testing
2723
  // for containment.
2724
  semispace_size_ = RoundUpToPowerOf2(semispace_size_);
2725
  initial_semispace_size_ = Min(initial_semispace_size_, semispace_size_);
2726
  young_generation_size_ = 2 * semispace_size_;
2727

    
2728
  // The old generation is paged.
2729
  old_generation_size_ = RoundUp(old_generation_size_, Page::kPageSize);
2730

    
2731
  heap_configured = true;
2732
  return true;
2733
}
2734

    
2735

    
2736
bool Heap::ConfigureHeapDefault() {
2737
  return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size);
2738
}
2739

    
2740

    
2741
int Heap::PromotedSpaceSize() {
2742
  return old_pointer_space_->Size()
2743
      + old_data_space_->Size()
2744
      + code_space_->Size()
2745
      + map_space_->Size()
2746
      + lo_space_->Size();
2747
}
2748

    
2749

    
2750
int Heap::PromotedExternalMemorySize() {
2751
  if (amount_of_external_allocated_memory_
2752
      <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
2753
  return amount_of_external_allocated_memory_
2754
      - amount_of_external_allocated_memory_at_last_global_gc_;
2755
}
2756

    
2757

    
2758
bool Heap::Setup(bool create_heap_objects) {
2759
  // Initialize heap spaces and initial maps and objects. Whenever something
2760
  // goes wrong, just return false. The caller should check the results and
2761
  // call Heap::TearDown() to release allocated memory.
2762
  //
2763
  // If the heap is not yet configured (eg, through the API), configure it.
2764
  // Configuration is based on the flags new-space-size (really the semispace
2765
  // size) and old-space-size if set or the initial values of semispace_size_
2766
  // and old_generation_size_ otherwise.
2767
  if (!heap_configured) {
2768
    if (!ConfigureHeapDefault()) return false;
2769
  }
2770

    
2771
  // Setup memory allocator and allocate an initial chunk of memory.  The
2772
  // initial chunk is double the size of the new space to ensure that we can
2773
  // find a pair of semispaces that are contiguous and aligned to their size.
2774
  if (!MemoryAllocator::Setup(MaxCapacity())) return false;
2775
  void* chunk
2776
      = MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
2777
  if (chunk == NULL) return false;
2778

    
2779
  // Put the initial chunk of the old space at the start of the initial
2780
  // chunk, then the two new space semispaces, then the initial chunk of
2781
  // code space.  Align the pair of semispaces to their size, which must be
2782
  // a power of 2.
2783
  ASSERT(IsPowerOf2(young_generation_size_));
2784
  Address code_space_start = reinterpret_cast<Address>(chunk);
2785
  Address new_space_start = RoundUp(code_space_start, young_generation_size_);
2786
  Address old_space_start = new_space_start + young_generation_size_;
2787
  int code_space_size = new_space_start - code_space_start;
2788
  int old_space_size = young_generation_size_ - code_space_size;
2789

    
2790
  // Initialize new space.
2791
  if (!new_space_.Setup(new_space_start, young_generation_size_)) return false;
2792

    
2793
  // Initialize old space, set the maximum capacity to the old generation
2794
  // size. It will not contain code.
2795
  old_pointer_space_ =
2796
      new OldSpace(old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
2797
  if (old_pointer_space_ == NULL) return false;
2798
  if (!old_pointer_space_->Setup(old_space_start, old_space_size >> 1)) {
2799
    return false;
2800
  }
2801
  old_data_space_ =
2802
      new OldSpace(old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
2803
  if (old_data_space_ == NULL) return false;
2804
  if (!old_data_space_->Setup(old_space_start + (old_space_size >> 1),
2805
                              old_space_size >> 1)) {
2806
    return false;
2807
  }
2808

    
2809
  // Initialize the code space, set its maximum capacity to the old
2810
  // generation size. It needs executable memory.
2811
  code_space_ =
2812
      new OldSpace(old_generation_size_, CODE_SPACE, EXECUTABLE);
2813
  if (code_space_ == NULL) return false;
2814
  if (!code_space_->Setup(code_space_start, code_space_size)) return false;
2815

    
2816
  // Initialize map space.
2817
  map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
2818
  if (map_space_ == NULL) return false;
2819
  // Setting up a paged space without giving it a virtual memory range big
2820
  // enough to hold at least a page will cause it to allocate.
2821
  if (!map_space_->Setup(NULL, 0)) return false;
2822

    
2823
  // The large object code space may contain code or data.  We set the memory
2824
  // to be non-executable here for safety, but this means we need to enable it
2825
  // explicitly when allocating large code objects.
2826
  lo_space_ = new LargeObjectSpace(LO_SPACE);
2827
  if (lo_space_ == NULL) return false;
2828
  if (!lo_space_->Setup()) return false;
2829

    
2830
  if (create_heap_objects) {
2831
    // Create initial maps.
2832
    if (!CreateInitialMaps()) return false;
2833
    if (!CreateApiObjects()) return false;
2834

    
2835
    // Create initial objects
2836
    if (!CreateInitialObjects()) return false;
2837
  }
2838

    
2839
  LOG(IntEvent("heap-capacity", Capacity()));
2840
  LOG(IntEvent("heap-available", Available()));
2841

    
2842
  return true;
2843
}
2844

    
2845

    
2846
void Heap::TearDown() {
2847
  GlobalHandles::TearDown();
2848

    
2849
  new_space_.TearDown();
2850

    
2851
  if (old_pointer_space_ != NULL) {
2852
    old_pointer_space_->TearDown();
2853
    delete old_pointer_space_;
2854
    old_pointer_space_ = NULL;
2855
  }
2856

    
2857
  if (old_data_space_ != NULL) {
2858
    old_data_space_->TearDown();
2859
    delete old_data_space_;
2860
    old_data_space_ = NULL;
2861
  }
2862

    
2863
  if (code_space_ != NULL) {
2864
    code_space_->TearDown();
2865
    delete code_space_;
2866
    code_space_ = NULL;
2867
  }
2868

    
2869
  if (map_space_ != NULL) {
2870
    map_space_->TearDown();
2871
    delete map_space_;
2872
    map_space_ = NULL;
2873
  }
2874

    
2875
  if (lo_space_ != NULL) {
2876
    lo_space_->TearDown();
2877
    delete lo_space_;
2878
    lo_space_ = NULL;
2879
  }
2880

    
2881
  MemoryAllocator::TearDown();
2882
}
2883

    
2884

    
2885
void Heap::Shrink() {
2886
  // Try to shrink map, old, and code spaces.
2887
  map_space_->Shrink();
2888
  old_pointer_space_->Shrink();
2889
  old_data_space_->Shrink();
2890
  code_space_->Shrink();
2891
}
2892

    
2893

    
2894
#ifdef ENABLE_HEAP_PROTECTION
2895

    
2896
void Heap::Protect() {
2897
  if (HasBeenSetup()) {
2898
    new_space_.Protect();
2899
    map_space_->Protect();
2900
    old_pointer_space_->Protect();
2901
    old_data_space_->Protect();
2902
    code_space_->Protect();
2903
    lo_space_->Protect();
2904
  }
2905
}
2906

    
2907

    
2908
void Heap::Unprotect() {
2909
  if (HasBeenSetup()) {
2910
    new_space_.Unprotect();
2911
    map_space_->Unprotect();
2912
    old_pointer_space_->Unprotect();
2913
    old_data_space_->Unprotect();
2914
    code_space_->Unprotect();
2915
    lo_space_->Unprotect();
2916
  }
2917
}
2918

    
2919
#endif
2920

    
2921

    
2922
#ifdef DEBUG
2923

    
2924
class PrintHandleVisitor: public ObjectVisitor {
2925
 public:
2926
  void VisitPointers(Object** start, Object** end) {
2927
    for (Object** p = start; p < end; p++)
2928
      PrintF("  handle %p to %p\n", p, *p);
2929
  }
2930
};
2931

    
2932
void Heap::PrintHandles() {
2933
  PrintF("Handles:\n");
2934
  PrintHandleVisitor v;
2935
  HandleScopeImplementer::Iterate(&v);
2936
}
2937

    
2938
#endif
2939

    
2940

    
2941
Space* AllSpaces::next() {
2942
  switch (counter_++) {
2943
    case NEW_SPACE:
2944
      return Heap::new_space();
2945
    case OLD_POINTER_SPACE:
2946
      return Heap::old_pointer_space();
2947
    case OLD_DATA_SPACE:
2948
      return Heap::old_data_space();
2949
    case CODE_SPACE:
2950
      return Heap::code_space();
2951
    case MAP_SPACE:
2952
      return Heap::map_space();
2953
    case LO_SPACE:
2954
      return Heap::lo_space();
2955
    default:
2956
      return NULL;
2957
  }
2958
}
2959

    
2960

    
2961
PagedSpace* PagedSpaces::next() {
2962
  switch (counter_++) {
2963
    case OLD_POINTER_SPACE:
2964
      return Heap::old_pointer_space();
2965
    case OLD_DATA_SPACE:
2966
      return Heap::old_data_space();
2967
    case CODE_SPACE:
2968
      return Heap::code_space();
2969
    case MAP_SPACE:
2970
      return Heap::map_space();
2971
    default:
2972
      return NULL;
2973
  }
2974
}
2975

    
2976

    
2977

    
2978
OldSpace* OldSpaces::next() {
2979
  switch (counter_++) {
2980
    case OLD_POINTER_SPACE:
2981
      return Heap::old_pointer_space();
2982
    case OLD_DATA_SPACE:
2983
      return Heap::old_data_space();
2984
    case CODE_SPACE:
2985
      return Heap::code_space();
2986
    default:
2987
      return NULL;
2988
  }
2989
}
2990

    
2991

    
2992
SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
2993
}
2994

    
2995

    
2996
SpaceIterator::~SpaceIterator() {
2997
  // Delete active iterator if any.
2998
  delete iterator_;
2999
}
3000

    
3001

    
3002
bool SpaceIterator::has_next() {
3003
  // Iterate until no more spaces.
3004
  return current_space_ != LAST_SPACE;
3005
}
3006

    
3007

    
3008
ObjectIterator* SpaceIterator::next() {
3009
  if (iterator_ != NULL) {
3010
    delete iterator_;
3011
    iterator_ = NULL;
3012
    // Move to the next space
3013
    current_space_++;
3014
    if (current_space_ > LAST_SPACE) {
3015
      return NULL;
3016
    }
3017
  }
3018

    
3019
  // Return iterator for the new current space.
3020
  return CreateIterator();
3021
}
3022

    
3023

    
3024
// Create an iterator for the space to iterate.
3025
ObjectIterator* SpaceIterator::CreateIterator() {
3026
  ASSERT(iterator_ == NULL);
3027

    
3028
  switch (current_space_) {
3029
    case NEW_SPACE:
3030
      iterator_ = new SemiSpaceIterator(Heap::new_space());
3031
      break;
3032
    case OLD_POINTER_SPACE:
3033
      iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
3034
      break;
3035
    case OLD_DATA_SPACE:
3036
      iterator_ = new HeapObjectIterator(Heap::old_data_space());
3037
      break;
3038
    case CODE_SPACE:
3039
      iterator_ = new HeapObjectIterator(Heap::code_space());
3040
      break;
3041
    case MAP_SPACE:
3042
      iterator_ = new HeapObjectIterator(Heap::map_space());
3043
      break;
3044
    case LO_SPACE:
3045
      iterator_ = new LargeObjectIterator(Heap::lo_space());
3046
      break;
3047
  }
3048

    
3049
  // Return the newly allocated iterator;
3050
  ASSERT(iterator_ != NULL);
3051
  return iterator_;
3052
}
3053

    
3054

    
3055
HeapIterator::HeapIterator() {
3056
  Init();
3057
}
3058

    
3059

    
3060
HeapIterator::~HeapIterator() {
3061
  Shutdown();
3062
}
3063

    
3064

    
3065
void HeapIterator::Init() {
3066
  // Start the iteration.
3067
  space_iterator_ = new SpaceIterator();
3068
  object_iterator_ = space_iterator_->next();
3069
}
3070

    
3071

    
3072
void HeapIterator::Shutdown() {
3073
  // Make sure the last iterator is deallocated.
3074
  delete space_iterator_;
3075
  space_iterator_ = NULL;
3076
  object_iterator_ = NULL;
3077
}
3078

    
3079

    
3080
bool HeapIterator::has_next() {
3081
  // No iterator means we are done.
3082
  if (object_iterator_ == NULL) return false;
3083

    
3084
  if (object_iterator_->has_next_object()) {
3085
    // If the current iterator has more objects we are fine.
3086
    return true;
3087
  } else {
3088
    // Go though the spaces looking for one that has objects.
3089
    while (space_iterator_->has_next()) {
3090
      object_iterator_ = space_iterator_->next();
3091
      if (object_iterator_->has_next_object()) {
3092
        return true;
3093
      }
3094
    }
3095
  }
3096
  // Done with the last space.
3097
  object_iterator_ = NULL;
3098
  return false;
3099
}
3100

    
3101

    
3102
HeapObject* HeapIterator::next() {
3103
  if (has_next()) {
3104
    return object_iterator_->next_object();
3105
  } else {
3106
    return NULL;
3107
  }
3108
}
3109

    
3110

    
3111
void HeapIterator::reset() {
3112
  // Restart the iterator.
3113
  Shutdown();
3114
  Init();
3115
}
3116

    
3117

    
3118
//
3119
// HeapProfiler class implementation.
3120
//
3121
#ifdef ENABLE_LOGGING_AND_PROFILING
3122
void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
3123
  InstanceType type = obj->map()->instance_type();
3124
  ASSERT(0 <= type && type <= LAST_TYPE);
3125
  info[type].increment_number(1);
3126
  info[type].increment_bytes(obj->Size());
3127
}
3128
#endif
3129

    
3130

    
3131
#ifdef ENABLE_LOGGING_AND_PROFILING
3132
void HeapProfiler::WriteSample() {
3133
  LOG(HeapSampleBeginEvent("Heap", "allocated"));
3134

    
3135
  HistogramInfo info[LAST_TYPE+1];
3136
#define DEF_TYPE_NAME(name) info[name].set_name(#name);
3137
  INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
3138
#undef DEF_TYPE_NAME
3139

    
3140
  HeapIterator iterator;
3141
  while (iterator.has_next()) {
3142
    CollectStats(iterator.next(), info);
3143
  }
3144

    
3145
  // Lump all the string types together.
3146
  int string_number = 0;
3147
  int string_bytes = 0;
3148
#define INCREMENT_SIZE(type, size, name)   \
3149
    string_number += info[type].number();  \
3150
    string_bytes += info[type].bytes();
3151
  STRING_TYPE_LIST(INCREMENT_SIZE)
3152
#undef INCREMENT_SIZE
3153
  if (string_bytes > 0) {
3154
    LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
3155
  }
3156

    
3157
  for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
3158
    if (info[i].bytes() > 0) {
3159
      LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
3160
                              info[i].bytes()));
3161
    }
3162
  }
3163

    
3164
  LOG(HeapSampleEndEvent("Heap", "allocated"));
3165
}
3166

    
3167

    
3168
#endif
3169

    
3170

    
3171

    
3172
#ifdef DEBUG
3173

    
3174
static bool search_for_any_global;
3175
static Object* search_target;
3176
static bool found_target;
3177
static List<Object*> object_stack(20);
3178

    
3179

    
3180
// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3181
static const int kMarkTag = 2;
3182

    
3183
static void MarkObjectRecursively(Object** p);
3184
class MarkObjectVisitor : public ObjectVisitor {
3185
 public:
3186
  void VisitPointers(Object** start, Object** end) {
3187
    // Copy all HeapObject pointers in [start, end)
3188
    for (Object** p = start; p < end; p++) {
3189
      if ((*p)->IsHeapObject())
3190
        MarkObjectRecursively(p);
3191
    }
3192
  }
3193
};
3194

    
3195
static MarkObjectVisitor mark_visitor;
3196

    
3197
static void MarkObjectRecursively(Object** p) {
3198
  if (!(*p)->IsHeapObject()) return;
3199

    
3200
  HeapObject* obj = HeapObject::cast(*p);
3201

    
3202
  Object* map = obj->map();
3203

    
3204
  if (!map->IsHeapObject()) return;  // visited before
3205

    
3206
  if (found_target) return;  // stop if target found
3207
  object_stack.Add(obj);
3208
  if ((search_for_any_global && obj->IsJSGlobalObject()) ||
3209
      (!search_for_any_global && (obj == search_target))) {
3210
    found_target = true;
3211
    return;
3212
  }
3213

    
3214
  if (obj->IsCode()) {
3215
    Code::cast(obj)->ConvertICTargetsFromAddressToObject();
3216
  }
3217

    
3218
  // not visited yet
3219
  Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
3220

    
3221
  Address map_addr = map_p->address();
3222

    
3223
  obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
3224

    
3225
  MarkObjectRecursively(&map);
3226

    
3227
  obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
3228
                   &mark_visitor);
3229

    
3230
  if (!found_target)  // don't pop if found the target
3231
    object_stack.RemoveLast();
3232
}
3233

    
3234

    
3235
static void UnmarkObjectRecursively(Object** p);
3236
class UnmarkObjectVisitor : public ObjectVisitor {
3237
 public:
3238
  void VisitPointers(Object** start, Object** end) {
3239
    // Copy all HeapObject pointers in [start, end)
3240
    for (Object** p = start; p < end; p++) {
3241
      if ((*p)->IsHeapObject())
3242
        UnmarkObjectRecursively(p);
3243
    }
3244
  }
3245
};
3246

    
3247
static UnmarkObjectVisitor unmark_visitor;
3248

    
3249
static void UnmarkObjectRecursively(Object** p) {
3250
  if (!(*p)->IsHeapObject()) return;
3251

    
3252
  HeapObject* obj = HeapObject::cast(*p);
3253

    
3254
  Object* map = obj->map();
3255

    
3256
  if (map->IsHeapObject()) return;  // unmarked already
3257

    
3258
  Address map_addr = reinterpret_cast<Address>(map);
3259

    
3260
  map_addr -= kMarkTag;
3261

    
3262
  ASSERT_TAG_ALIGNED(map_addr);
3263

    
3264
  HeapObject* map_p = HeapObject::FromAddress(map_addr);
3265

    
3266
  obj->set_map(reinterpret_cast<Map*>(map_p));
3267

    
3268
  UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
3269

    
3270
  obj->IterateBody(Map::cast(map_p)->instance_type(),
3271
                   obj->SizeFromMap(Map::cast(map_p)),
3272
                   &unmark_visitor);
3273

    
3274
  if (obj->IsCode()) {
3275
    Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
3276
  }
3277
}
3278

    
3279

    
3280
static void MarkRootObjectRecursively(Object** root) {
3281
  if (search_for_any_global) {
3282
    ASSERT(search_target == NULL);
3283
  } else {
3284
    ASSERT(search_target->IsHeapObject());
3285
  }
3286
  found_target = false;
3287
  object_stack.Clear();
3288

    
3289
  MarkObjectRecursively(root);
3290
  UnmarkObjectRecursively(root);
3291

    
3292
  if (found_target) {
3293
    PrintF("=====================================\n");
3294
    PrintF("====        Path to object       ====\n");
3295
    PrintF("=====================================\n\n");
3296

    
3297
    ASSERT(!object_stack.is_empty());
3298
    for (int i = 0; i < object_stack.length(); i++) {
3299
      if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
3300
      Object* obj = object_stack[i];
3301
      obj->Print();
3302
    }
3303
    PrintF("=====================================\n");
3304
  }
3305
}
3306

    
3307

    
3308
// Helper class for visiting HeapObjects recursively.
3309
class MarkRootVisitor: public ObjectVisitor {
3310
 public:
3311
  void VisitPointers(Object** start, Object** end) {
3312
    // Visit all HeapObject pointers in [start, end)
3313
    for (Object** p = start; p < end; p++) {
3314
      if ((*p)->IsHeapObject())
3315
        MarkRootObjectRecursively(p);
3316
    }
3317
  }
3318
};
3319

    
3320

    
3321
// Triggers a depth-first traversal of reachable objects from roots
3322
// and finds a path to a specific heap object and prints it.
3323
void Heap::TracePathToObject() {
3324
  search_target = NULL;
3325
  search_for_any_global = false;
3326

    
3327
  MarkRootVisitor root_visitor;
3328
  IterateRoots(&root_visitor);
3329
}
3330

    
3331

    
3332
// Triggers a depth-first traversal of reachable objects from roots
3333
// and finds a path to any global object and prints it. Useful for
3334
// determining the source for leaks of global objects.
3335
void Heap::TracePathToGlobal() {
3336
  search_target = NULL;
3337
  search_for_any_global = true;
3338

    
3339
  MarkRootVisitor root_visitor;
3340
  IterateRoots(&root_visitor);
3341
}
3342
#endif
3343

    
3344

    
3345
GCTracer::GCTracer()
3346
    : start_time_(0.0),
3347
      start_size_(0.0),
3348
      gc_count_(0),
3349
      full_gc_count_(0),
3350
      is_compacting_(false),
3351
      marked_count_(0) {
3352
  // These two fields reflect the state of the previous full collection.
3353
  // Set them before they are changed by the collector.
3354
  previous_has_compacted_ = MarkCompactCollector::HasCompacted();
3355
  previous_marked_count_ = MarkCompactCollector::previous_marked_count();
3356
  if (!FLAG_trace_gc) return;
3357
  start_time_ = OS::TimeCurrentMillis();
3358
  start_size_ = SizeOfHeapObjects();
3359
}
3360

    
3361

    
3362
GCTracer::~GCTracer() {
3363
  if (!FLAG_trace_gc) return;
3364
  // Printf ONE line iff flag is set.
3365
  PrintF("%s %.1f -> %.1f MB, %d ms.\n",
3366
         CollectorString(),
3367
         start_size_, SizeOfHeapObjects(),
3368
         static_cast<int>(OS::TimeCurrentMillis() - start_time_));
3369
}
3370

    
3371

    
3372
const char* GCTracer::CollectorString() {
3373
  switch (collector_) {
3374
    case SCAVENGER:
3375
      return "Scavenge";
3376
    case MARK_COMPACTOR:
3377
      return MarkCompactCollector::HasCompacted() ? "Mark-compact"
3378
                                                  : "Mark-sweep";
3379
  }
3380
  return "Unknown GC";
3381
}
3382

    
3383

    
3384
#ifdef DEBUG
3385
bool Heap::GarbageCollectionGreedyCheck() {
3386
  ASSERT(FLAG_gc_greedy);
3387
  if (Bootstrapper::IsActive()) return true;
3388
  if (disallow_allocation_failure()) return true;
3389
  return CollectGarbage(0, NEW_SPACE);
3390
}
3391
#endif
3392

    
3393
} }  // namespace v8::internal