Revision f230a1cf deps/v8/src/heap.cc

View differences:

deps/v8/src/heap.cc
67 67

  
68 68
Heap::Heap()
69 69
    : isolate_(NULL),
70
      code_range_size_(kIs64BitArch ? 512 * MB : 0),
70 71
// semispace_size_ should be a power of 2 and old_generation_size_ should be
71 72
// a multiple of Page::kPageSize.
72
#if V8_TARGET_ARCH_X64
73
#define LUMP_OF_MEMORY (2 * MB)
74
      code_range_size_(512*MB),
75
#else
76
#define LUMP_OF_MEMORY MB
77
      code_range_size_(0),
78
#endif
79
#if defined(ANDROID) || V8_TARGET_ARCH_MIPS
80
      reserved_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
81
      max_semispace_size_(4 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
82
      initial_semispace_size_(Page::kPageSize),
83
      max_old_generation_size_(192*MB),
84
      max_executable_size_(max_old_generation_size_),
85
#else
86
      reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
87
      max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
73
      reserved_semispace_size_(8 * (kPointerSize / 4) * MB),
74
      max_semispace_size_(8 * (kPointerSize / 4)  * MB),
88 75
      initial_semispace_size_(Page::kPageSize),
89
      max_old_generation_size_(700ul * LUMP_OF_MEMORY),
90
      max_executable_size_(256l * LUMP_OF_MEMORY),
91
#endif
92

  
76
      max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
77
      max_executable_size_(256ul * (kPointerSize / 4) * MB),
93 78
// Variables set based on semispace_size_ and old_generation_size_ in
94 79
// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
95 80
// Will be 4 * reserved_semispace_size_ to ensure that young
......
101 86
      contexts_disposed_(0),
102 87
      global_ic_age_(0),
103 88
      flush_monomorphic_ics_(false),
89
      allocation_mementos_found_(0),
104 90
      scan_on_scavenge_pages_(0),
105 91
      new_space_(this),
106 92
      old_pointer_space_(NULL),
......
129 115
      old_gen_exhausted_(false),
130 116
      store_buffer_rebuilder_(store_buffer()),
131 117
      hidden_string_(NULL),
132
      global_gc_prologue_callback_(NULL),
133
      global_gc_epilogue_callback_(NULL),
134 118
      gc_safe_size_of_old_object_(NULL),
135 119
      total_regexp_code_generated_(0),
136 120
      tracer_(NULL),
......
157 141
      mark_sweeps_since_idle_round_started_(0),
158 142
      gc_count_at_last_idle_gc_(0),
159 143
      scavenges_since_last_idle_round_(kIdleScavengeThreshold),
144
      full_codegen_bytes_generated_(0),
145
      crankshaft_codegen_bytes_generated_(0),
160 146
      gcs_since_last_deopt_(0),
161 147
#ifdef VERIFY_HEAP
162
      no_weak_embedded_maps_verification_scope_depth_(0),
148
      no_weak_object_verification_scope_depth_(0),
163 149
#endif
164 150
      promotion_queue_(this),
165 151
      configured_(false),
......
172 158
  max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
173 159
#endif
174 160

  
161
  // Ensure old_generation_size_ is a multiple of kPageSize.
162
  ASSERT(MB >= Page::kPageSize);
163

  
175 164
  intptr_t max_virtual = OS::MaxVirtualMemory();
176 165

  
177 166
  if (max_virtual > 0) {
......
461 450
#endif  // DEBUG
462 451

  
463 452
  store_buffer()->GCPrologue();
453

  
454
  if (FLAG_concurrent_osr) {
455
    isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs();
456
  }
464 457
}
465 458

  
466 459

  
......
521 514
  isolate_->counters()->number_of_symbols()->Set(
522 515
      string_table()->NumberOfElements());
523 516

  
517
  if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) {
518
    isolate_->counters()->codegen_fraction_crankshaft()->AddSample(
519
        static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) /
520
            (crankshaft_codegen_bytes_generated_
521
            + full_codegen_bytes_generated_)));
522
  }
523

  
524 524
  if (CommittedMemory() > 0) {
525 525
    isolate_->counters()->external_fragmentation_total()->AddSample(
526 526
        static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
527 527

  
528
    isolate_->counters()->heap_fraction_new_space()->
529
        AddSample(static_cast<int>(
530
            (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
531
    isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
532
        static_cast<int>(
533
            (old_pointer_space()->CommittedMemory() * 100.0) /
534
            CommittedMemory()));
535
    isolate_->counters()->heap_fraction_old_data_space()->AddSample(
536
        static_cast<int>(
537
            (old_data_space()->CommittedMemory() * 100.0) /
538
            CommittedMemory()));
539
    isolate_->counters()->heap_fraction_code_space()->
540
        AddSample(static_cast<int>(
541
            (code_space()->CommittedMemory() * 100.0) / CommittedMemory()));
528 542
    isolate_->counters()->heap_fraction_map_space()->AddSample(
529 543
        static_cast<int>(
530 544
            (map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
......
535 549
        AddSample(static_cast<int>(
536 550
            (property_cell_space()->CommittedMemory() * 100.0) /
537 551
            CommittedMemory()));
552
    isolate_->counters()->heap_fraction_lo_space()->
553
        AddSample(static_cast<int>(
554
            (lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
538 555

  
539 556
    isolate_->counters()->heap_sample_total_committed()->AddSample(
540 557
        static_cast<int>(CommittedMemory() / KB));
......
548 565
        heap_sample_property_cell_space_committed()->
549 566
            AddSample(static_cast<int>(
550 567
                property_cell_space()->CommittedMemory() / KB));
568
    isolate_->counters()->heap_sample_code_space_committed()->AddSample(
569
        static_cast<int>(code_space()->CommittedMemory() / KB));
551 570
  }
552 571

  
553 572
#define UPDATE_COUNTERS_FOR_SPACE(space)                                       \
......
610 629
  // Note: as weak callbacks can execute arbitrary code, we cannot
611 630
  // hope that eventually there will be no weak callbacks invocations.
612 631
  // Therefore stop recollecting after several attempts.
632
  if (FLAG_concurrent_recompilation) {
633
    // The optimizing compiler may be unnecessarily holding on to memory.
634
    DisallowHeapAllocation no_recursive_gc;
635
    isolate()->optimizing_compiler_thread()->Flush();
636
  }
613 637
  mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
614 638
                                     kReduceMemoryFootprintMask);
615 639
  isolate_->compilation_cache()->Clear();
......
1055 1079

  
1056 1080

  
1057 1081
void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
1058
  if (gc_type == kGCTypeMarkSweepCompact && global_gc_prologue_callback_) {
1059
    global_gc_prologue_callback_();
1060
  }
1061 1082
  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
1062 1083
    if (gc_type & gc_prologue_callbacks_[i].gc_type) {
1063
      gc_prologue_callbacks_[i].callback(gc_type, flags);
1084
      if (!gc_prologue_callbacks_[i].pass_isolate_) {
1085
        v8::GCPrologueCallback callback =
1086
            reinterpret_cast<v8::GCPrologueCallback>(
1087
                gc_prologue_callbacks_[i].callback);
1088
        callback(gc_type, flags);
1089
      } else {
1090
        v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
1091
        gc_prologue_callbacks_[i].callback(isolate, gc_type, flags);
1092
      }
1064 1093
    }
1065 1094
  }
1066 1095
}
......
1069 1098
void Heap::CallGCEpilogueCallbacks(GCType gc_type) {
1070 1099
  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
1071 1100
    if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
1072
      gc_epilogue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
1101
      if (!gc_epilogue_callbacks_[i].pass_isolate_) {
1102
        v8::GCPrologueCallback callback =
1103
            reinterpret_cast<v8::GCPrologueCallback>(
1104
                gc_epilogue_callbacks_[i].callback);
1105
        callback(gc_type, kNoGCCallbackFlags);
1106
      } else {
1107
        v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
1108
        gc_epilogue_callbacks_[i].callback(
1109
            isolate, gc_type, kNoGCCallbackFlags);
1110
      }
1073 1111
    }
1074 1112
  }
1075
  if (gc_type == kGCTypeMarkSweepCompact && global_gc_epilogue_callback_) {
1076
    global_gc_epilogue_callback_();
1077
  }
1078 1113
}
1079 1114

  
1080 1115

  
......
1326 1361
void Heap::Scavenge() {
1327 1362
  RelocationLock relocation_lock(this);
1328 1363

  
1364
  allocation_mementos_found_ = 0;
1365

  
1329 1366
#ifdef VERIFY_HEAP
1330 1367
  if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
1331 1368
#endif
......
1473 1510
  gc_state_ = NOT_IN_GC;
1474 1511

  
1475 1512
  scavenges_since_last_idle_round_++;
1513

  
1514
  if (FLAG_trace_track_allocation_sites && allocation_mementos_found_ > 0) {
1515
    PrintF("AllocationMementos found during scavenge = %d\n",
1516
           allocation_mementos_found_);
1517
  }
1476 1518
}
1477 1519

  
1478 1520

  
......
1947 1989

  
1948 1990

  
1949 1991
STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
1992
STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0);
1950 1993

  
1951 1994

  
1952 1995
INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
......
2091 2134
    if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
2092 2135
      // Update NewSpace stats if necessary.
2093 2136
      RecordCopiedObject(heap, target);
2094
      HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
2095 2137
      Isolate* isolate = heap->isolate();
2138
      HeapProfiler* heap_profiler = isolate->heap_profiler();
2139
      if (heap_profiler->is_profiling()) {
2140
        heap_profiler->ObjectMoveEvent(source->address(), target->address(),
2141
                                       size);
2142
      }
2096 2143
      if (isolate->logger()->is_logging_code_events() ||
2097 2144
          isolate->cpu_profiler()->is_profiling()) {
2098 2145
        if (target->IsSharedFunctionInfo()) {
......
2129 2176
      MaybeObject* maybe_result;
2130 2177

  
2131 2178
      if (object_contents == DATA_OBJECT) {
2132
        // TODO(mstarzinger): Turn this check into a regular assert soon!
2133
        CHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
2179
        ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
2134 2180
        maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
2135 2181
      } else {
2136
        // TODO(mstarzinger): Turn this check into a regular assert soon!
2137
        CHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2182
        ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2138 2183
        maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size);
2139 2184
      }
2140 2185

  
......
2165 2210
        return;
2166 2211
      }
2167 2212
    }
2168
    // TODO(mstarzinger): Turn this check into a regular assert soon!
2169
    CHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
2213
    ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
2170 2214
    MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
2171 2215
    heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2172 2216
    Object* result = allocation->ToObjectUnchecked();
......
2392 2436
MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
2393 2437
                                      int instance_size) {
2394 2438
  Object* result;
2395
  MaybeObject* maybe_result = AllocateRawMap();
2439
  MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2396 2440
  if (!maybe_result->ToObject(&result)) return maybe_result;
2397 2441

  
2398 2442
  // Map::cast cannot be used due to uninitialized map field.
......
2417 2461
                               int instance_size,
2418 2462
                               ElementsKind elements_kind) {
2419 2463
  Object* result;
2420
  MaybeObject* maybe_result = AllocateRawMap();
2464
  MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2421 2465
  if (!maybe_result->To(&result)) return maybe_result;
2422 2466

  
2423 2467
  Map* map = reinterpret_cast<Map*>(result);
......
2650 2694
  set_fixed_double_array_map(Map::cast(obj));
2651 2695

  
2652 2696
  { MaybeObject* maybe_obj =
2697
        AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel);
2698
    if (!maybe_obj->ToObject(&obj)) return false;
2699
  }
2700
  set_constant_pool_array_map(Map::cast(obj));
2701

  
2702
  { MaybeObject* maybe_obj =
2653 2703
        AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
2654 2704
    if (!maybe_obj->ToObject(&obj)) return false;
2655 2705
  }
......
2887 2937
MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2888 2938
  // Statically ensure that it is safe to allocate heap numbers in paged
2889 2939
  // spaces.
2940
  int size = HeapNumber::kSize;
2890 2941
  STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize);
2891
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2942
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
2892 2943

  
2893 2944
  Object* result;
2894
  { MaybeObject* maybe_result =
2895
        AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
2945
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
2896 2946
    if (!maybe_result->ToObject(&result)) return maybe_result;
2897 2947
  }
2898 2948

  
......
2902 2952
}
2903 2953

  
2904 2954

  
2905
MaybeObject* Heap::AllocateHeapNumber(double value) {
2906
  // Use general version, if we're forced to always allocate.
2907
  if (always_allocate()) return AllocateHeapNumber(value, TENURED);
2908

  
2909
  // This version of AllocateHeapNumber is optimized for
2910
  // allocation in new space.
2911
  STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize);
2912
  Object* result;
2913
  { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
2914
    if (!maybe_result->ToObject(&result)) return maybe_result;
2915
  }
2916
  HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2917
  HeapNumber::cast(result)->set_value(value);
2918
  return result;
2919
}
2920

  
2921

  
2922 2955
MaybeObject* Heap::AllocateCell(Object* value) {
2956
  int size = Cell::kSize;
2957
  STATIC_ASSERT(Cell::kSize <= Page::kNonCodeObjectAreaSize);
2958

  
2923 2959
  Object* result;
2924
  { MaybeObject* maybe_result = AllocateRawCell();
2960
  { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
2925 2961
    if (!maybe_result->ToObject(&result)) return maybe_result;
2926 2962
  }
2927 2963
  HeapObject::cast(result)->set_map_no_write_barrier(cell_map());
......
2930 2966
}
2931 2967

  
2932 2968

  
2933
MaybeObject* Heap::AllocatePropertyCell(Object* value) {
2969
MaybeObject* Heap::AllocatePropertyCell() {
2970
  int size = PropertyCell::kSize;
2971
  STATIC_ASSERT(PropertyCell::kSize <= Page::kNonCodeObjectAreaSize);
2972

  
2934 2973
  Object* result;
2935
  MaybeObject* maybe_result = AllocateRawPropertyCell();
2974
  MaybeObject* maybe_result =
2975
      AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE);
2936 2976
  if (!maybe_result->ToObject(&result)) return maybe_result;
2937 2977

  
2938 2978
  HeapObject::cast(result)->set_map_no_write_barrier(
......
2940 2980
  PropertyCell* cell = PropertyCell::cast(result);
2941 2981
  cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2942 2982
                           SKIP_WRITE_BARRIER);
2943
  cell->set_value(value);
2983
  cell->set_value(the_hole_value());
2944 2984
  cell->set_type(Type::None());
2945
  maybe_result = cell->SetValueInferType(value);
2946
  if (maybe_result->IsFailure()) return maybe_result;
2947 2985
  return result;
2948 2986
}
2949 2987

  
......
2958 2996

  
2959 2997

  
2960 2998
MaybeObject* Heap::AllocateAllocationSite() {
2961
  Object* result;
2999
  AllocationSite* site;
2962 3000
  MaybeObject* maybe_result = Allocate(allocation_site_map(),
2963 3001
                                       OLD_POINTER_SPACE);
2964
  if (!maybe_result->ToObject(&result)) return maybe_result;
2965
  AllocationSite* site = AllocationSite::cast(result);
3002
  if (!maybe_result->To(&site)) return maybe_result;
2966 3003
  site->Initialize();
2967 3004

  
2968 3005
  // Link the site
2969 3006
  site->set_weak_next(allocation_sites_list());
2970 3007
  set_allocation_sites_list(site);
2971
  return result;
3008
  return site;
2972 3009
}
2973 3010

  
2974 3011

  
......
4057 4094
  if (length < 0 || length > ByteArray::kMaxLength) {
4058 4095
    return Failure::OutOfMemoryException(0x7);
4059 4096
  }
4060
  if (pretenure == NOT_TENURED) {
4061
    return AllocateByteArray(length);
4062
  }
4063 4097
  int size = ByteArray::SizeFor(length);
4064
  AllocationSpace space =
4065
      (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE;
4066
  Object* result;
4067
  { MaybeObject* maybe_result = AllocateRaw(size, space, space);
4068
    if (!maybe_result->ToObject(&result)) return maybe_result;
4069
  }
4070

  
4071
  reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
4072
      byte_array_map());
4073
  reinterpret_cast<ByteArray*>(result)->set_length(length);
4074
  return result;
4075
}
4076

  
4077

  
4078
MaybeObject* Heap::AllocateByteArray(int length) {
4079
  if (length < 0 || length > ByteArray::kMaxLength) {
4080
    return Failure::OutOfMemoryException(0x8);
4081
  }
4082
  int size = ByteArray::SizeFor(length);
4083
  AllocationSpace space =
4084
      (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
4098
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
4085 4099
  Object* result;
4086 4100
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
4087 4101
    if (!maybe_result->ToObject(&result)) return maybe_result;
......
4112 4126
                                         ExternalArrayType array_type,
4113 4127
                                         void* external_pointer,
4114 4128
                                         PretenureFlag pretenure) {
4115
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4129
  int size = ExternalArray::kAlignedSize;
4130
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
4116 4131
  Object* result;
4117
  { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
4118
                                            space,
4119
                                            OLD_DATA_SPACE);
4132
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
4120 4133
    if (!maybe_result->ToObject(&result)) return maybe_result;
4121 4134
  }
4122 4135

  
......
4134 4147
                              Code::Flags flags,
4135 4148
                              Handle<Object> self_reference,
4136 4149
                              bool immovable,
4137
                              bool crankshafted) {
4150
                              bool crankshafted,
4151
                              int prologue_offset) {
4138 4152
  // Allocate ByteArray before the Code object, so that we do not risk
4139 4153
  // leaving uninitialized Code object (and breaking the heap).
4140 4154
  ByteArray* reloc_info;
......
4184 4198
  code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
4185 4199
  code->set_gc_metadata(Smi::FromInt(0));
4186 4200
  code->set_ic_age(global_ic_age_);
4187
  code->set_prologue_offset(kPrologueOffsetNotSet);
4201
  code->set_prologue_offset(prologue_offset);
4188 4202
  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
4189 4203
    code->set_marked_for_deoptimization(false);
4190 4204
  }
4205

  
4206
#ifdef ENABLE_DEBUGGER_SUPPORT
4207
  if (code->kind() == Code::FUNCTION) {
4208
    code->set_has_debug_break_slots(
4209
        isolate_->debugger()->IsDebuggerActive());
4210
  }
4211
#endif
4212

  
4191 4213
  // Allow self references to created code object by patching the handle to
4192 4214
  // point to the newly allocated Code object.
4193 4215
  if (!self_reference.is_null()) {
......
4310 4332
  AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
4311 4333
      reinterpret_cast<Address>(result) + map->instance_size());
4312 4334
  alloc_memento->set_map_no_write_barrier(allocation_memento_map());
4335
  ASSERT(allocation_site->map() == allocation_site_map());
4313 4336
  alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
4314 4337
  return result;
4315 4338
}
......
4414 4437
    arguments_object_size = kArgumentsObjectSize;
4415 4438
  }
4416 4439

  
4417
  // This calls Copy directly rather than using Heap::AllocateRaw so we
4418
  // duplicate the check here.
4419
  ASSERT(AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
4420

  
4421 4440
  // Check that the size of the boilerplate matches our
4422 4441
  // expectations. The ArgumentsAccessStub::GenerateNewObject relies
4423 4442
  // on the size being a known constant.
......
4553 4572
  }
4554 4573

  
4555 4574
  // Allocate the JSObject.
4556
  AllocationSpace space =
4557
      (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
4558
  if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
4575
  int size = map->instance_size();
4576
  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
4559 4577
  Object* obj;
4560 4578
  MaybeObject* maybe_obj = Allocate(map, space);
4561 4579
  if (!maybe_obj->To(&obj)) return maybe_obj;
......
4588 4606
  }
4589 4607

  
4590 4608
  // Allocate the JSObject.
4591
  AllocationSpace space = NEW_SPACE;
4592
  if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
4609
  int size = map->instance_size();
4610
  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, NOT_TENURED);
4593 4611
  Object* obj;
4594 4612
  MaybeObject* maybe_obj =
4595 4613
      AllocateWithAllocationSite(map, space, allocation_site);
......
4745 4763
}
4746 4764

  
4747 4765

  
4748
MaybeObject* Heap::AllocateJSArrayAndStorageWithAllocationSite(
4749
    ElementsKind elements_kind,
4750
    int length,
4751
    int capacity,
4752
    Handle<AllocationSite> allocation_site,
4753
    ArrayStorageAllocationMode mode) {
4754
  MaybeObject* maybe_array = AllocateJSArrayWithAllocationSite(elements_kind,
4755
      allocation_site);
4756
  JSArray* array;
4757
  if (!maybe_array->To(&array)) return maybe_array;
4758
  return AllocateJSArrayStorage(array, length, capacity, mode);
4759
}
4760

  
4761

  
4762 4766
MaybeObject* Heap::AllocateJSArrayStorage(
4763 4767
    JSArray* array,
4764 4768
    int length,
......
4861 4865
}
4862 4866

  
4863 4867

  
4864
MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
4865
  ASSERT(constructor->has_initial_map());
4866
  Map* map = constructor->initial_map();
4867
  ASSERT(map->is_dictionary_map());
4868

  
4869
  // Make sure no field properties are described in the initial map.
4870
  // This guarantees us that normalizing the properties does not
4871
  // require us to change property values to PropertyCells.
4872
  ASSERT(map->NextFreePropertyIndex() == 0);
4873

  
4874
  // Make sure we don't have a ton of pre-allocated slots in the
4875
  // global objects. They will be unused once we normalize the object.
4876
  ASSERT(map->unused_property_fields() == 0);
4877
  ASSERT(map->inobject_properties() == 0);
4878

  
4879
  // Initial size of the backing store to avoid resize of the storage during
4880
  // bootstrapping. The size differs between the JS global object ad the
4881
  // builtins object.
4882
  int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
4883

  
4884
  // Allocate a dictionary object for backing storage.
4885
  NameDictionary* dictionary;
4886
  MaybeObject* maybe_dictionary =
4887
      NameDictionary::Allocate(
4888
          this,
4889
          map->NumberOfOwnDescriptors() * 2 + initial_size);
4890
  if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4891

  
4892
  // The global object might be created from an object template with accessors.
4893
  // Fill these accessors into the dictionary.
4894
  DescriptorArray* descs = map->instance_descriptors();
4895
  for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
4896
    PropertyDetails details = descs->GetDetails(i);
4897
    ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
4898
    PropertyDetails d = PropertyDetails(details.attributes(), CALLBACKS, i + 1);
4899
    Object* value = descs->GetCallbacksObject(i);
4900
    MaybeObject* maybe_value = AllocatePropertyCell(value);
4901
    if (!maybe_value->ToObject(&value)) return maybe_value;
4902

  
4903
    MaybeObject* maybe_added = dictionary->Add(descs->GetKey(i), value, d);
4904
    if (!maybe_added->To(&dictionary)) return maybe_added;
4905
  }
4906

  
4907
  // Allocate the global object and initialize it with the backing store.
4908
  JSObject* global;
4909
  MaybeObject* maybe_global = Allocate(map, OLD_POINTER_SPACE);
4910
  if (!maybe_global->To(&global)) return maybe_global;
4911

  
4912
  InitializeJSObjectFromMap(global, dictionary, map);
4913

  
4914
  // Create a new map for the global object.
4915
  Map* new_map;
4916
  MaybeObject* maybe_map = map->CopyDropDescriptors();
4917
  if (!maybe_map->To(&new_map)) return maybe_map;
4918
  new_map->set_dictionary_map(true);
4919

  
4920
  // Set up the global object as a normalized object.
4921
  global->set_map(new_map);
4922
  global->set_properties(dictionary);
4923

  
4924
  // Make sure result is a global object with properties in dictionary.
4925
  ASSERT(global->IsGlobalObject());
4926
  ASSERT(!global->HasFastProperties());
4927
  return global;
4928
}
4929

  
4930

  
4931
MaybeObject* Heap::CopyJSObject(JSObject* source) {
4868
MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
4932 4869
  // Never used to copy functions.  If functions need to be copied we
4933 4870
  // have to be careful to clear the literals array.
4934 4871
  SLOW_ASSERT(!source->IsJSFunction());
......
4938 4875
  int object_size = map->instance_size();
4939 4876
  Object* clone;
4940 4877

  
4878
  ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) &&
4879
                          map->instance_type() == JS_ARRAY_TYPE));
4880

  
4941 4881
  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
4942 4882

  
4943 4883
  // If we're forced to always allocate, we use the general allocation
......
4958 4898
  } else {
4959 4899
    wb_mode = SKIP_WRITE_BARRIER;
4960 4900

  
4961
    { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
4901
    { int adjusted_object_size = site != NULL
4902
          ? object_size + AllocationMemento::kSize
4903
          : object_size;
4904
      MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
4962 4905
      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4963 4906
    }
4964 4907
    SLOW_ASSERT(InNewSpace(clone));
......
4967 4910
    CopyBlock(HeapObject::cast(clone)->address(),
4968 4911
              source->address(),
4969 4912
              object_size);
4970
  }
4971

  
4972
  SLOW_ASSERT(
4973
      JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
4974
  FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
4975
  FixedArray* properties = FixedArray::cast(source->properties());
4976
  // Update elements if necessary.
4977
  if (elements->length() > 0) {
4978
    Object* elem;
4979
    { MaybeObject* maybe_elem;
4980
      if (elements->map() == fixed_cow_array_map()) {
4981
        maybe_elem = FixedArray::cast(elements);
4982
      } else if (source->HasFastDoubleElements()) {
4983
        maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
4984
      } else {
4985
        maybe_elem = CopyFixedArray(FixedArray::cast(elements));
4986
      }
4987
      if (!maybe_elem->ToObject(&elem)) return maybe_elem;
4988
    }
4989
    JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
4990
  }
4991
  // Update properties if necessary.
4992
  if (properties->length() > 0) {
4993
    Object* prop;
4994
    { MaybeObject* maybe_prop = CopyFixedArray(properties);
4995
      if (!maybe_prop->ToObject(&prop)) return maybe_prop;
4996
    }
4997
    JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
4998
  }
4999
  // Return the new clone.
5000
  return clone;
5001
}
5002

  
5003

  
5004
MaybeObject* Heap::CopyJSObjectWithAllocationSite(
5005
    JSObject* source,
5006
    AllocationSite* site) {
5007
  // Never used to copy functions.  If functions need to be copied we
5008
  // have to be careful to clear the literals array.
5009
  SLOW_ASSERT(!source->IsJSFunction());
5010

  
5011
  // Make the clone.
5012
  Map* map = source->map();
5013
  int object_size = map->instance_size();
5014
  Object* clone;
5015

  
5016
  ASSERT(AllocationSite::CanTrack(map->instance_type()));
5017
  ASSERT(map->instance_type() == JS_ARRAY_TYPE);
5018
  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
5019

  
5020
  // If we're forced to always allocate, we use the general allocation
5021
  // functions which may leave us with an object in old space.
5022
  int adjusted_object_size = object_size;
5023
  if (always_allocate()) {
5024
    // We'll only track origin if we are certain to allocate in new space
5025
    const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
5026
    if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
5027
      adjusted_object_size += AllocationMemento::kSize;
5028
    }
5029

  
5030
    { MaybeObject* maybe_clone =
5031
          AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
5032
      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
5033
    }
5034
    Address clone_address = HeapObject::cast(clone)->address();
5035
    CopyBlock(clone_address,
5036
              source->address(),
5037
              object_size);
5038
    // Update write barrier for all fields that lie beyond the header.
5039
    int write_barrier_offset = adjusted_object_size > object_size
5040
        ? JSArray::kSize + AllocationMemento::kSize
5041
        : JSObject::kHeaderSize;
5042
    if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
5043
      RecordWrites(clone_address,
5044
                   write_barrier_offset,
5045
                   (object_size - write_barrier_offset) / kPointerSize);
5046
    }
5047 4913

  
5048
    // Track allocation site information, if we failed to allocate it inline.
5049
    if (InNewSpace(clone) &&
5050
        adjusted_object_size == object_size) {
5051
      MaybeObject* maybe_alloc_memento =
5052
          AllocateStruct(ALLOCATION_MEMENTO_TYPE);
5053
      AllocationMemento* alloc_memento;
5054
      if (maybe_alloc_memento->To(&alloc_memento)) {
5055
        alloc_memento->set_map_no_write_barrier(allocation_memento_map());
5056
        alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
4914
    if (site != NULL) {
4915
      AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
4916
          reinterpret_cast<Address>(clone) + object_size);
4917
      alloc_memento->set_map_no_write_barrier(allocation_memento_map());
4918
      ASSERT(site->map() == allocation_site_map());
4919
      alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
4920
      HeapProfiler* profiler = isolate()->heap_profiler();
4921
      if (profiler->is_tracking_allocations()) {
4922
        profiler->UpdateObjectSizeEvent(HeapObject::cast(clone)->address(),
4923
                                        object_size);
4924
        profiler->NewObjectEvent(alloc_memento->address(),
4925
                                 AllocationMemento::kSize);
5057 4926
      }
5058 4927
    }
5059
  } else {
5060
    wb_mode = SKIP_WRITE_BARRIER;
5061
    adjusted_object_size += AllocationMemento::kSize;
5062

  
5063
    { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
5064
      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
5065
    }
5066
    SLOW_ASSERT(InNewSpace(clone));
5067
    // Since we know the clone is allocated in new space, we can copy
5068
    // the contents without worrying about updating the write barrier.
5069
    CopyBlock(HeapObject::cast(clone)->address(),
5070
              source->address(),
5071
              object_size);
5072
  }
5073

  
5074
  if (adjusted_object_size > object_size) {
5075
    AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
5076
        reinterpret_cast<Address>(clone) + object_size);
5077
    alloc_memento->set_map_no_write_barrier(allocation_memento_map());
5078
    alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
5079 4928
  }
5080 4929

  
5081 4930
  SLOW_ASSERT(
......
5366 5215
    map = internalized_string_map();
5367 5216
    size = SeqTwoByteString::SizeFor(chars);
5368 5217
  }
5218
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
5369 5219

  
5370 5220
  // Allocate string.
5371 5221
  Object* result;
5372
  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
5373
                   ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
5374
                   : old_data_space_->AllocateRaw(size);
5222
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5375 5223
    if (!maybe_result->ToObject(&result)) return maybe_result;
5376 5224
  }
5377 5225

  
......
5410 5258
  }
5411 5259
  int size = SeqOneByteString::SizeFor(length);
5412 5260
  ASSERT(size <= SeqOneByteString::kMaxSize);
5413
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5414
  AllocationSpace retry_space = OLD_DATA_SPACE;
5415

  
5416
  if (size > Page::kMaxNonCodeHeapObjectSize) {
5417
    // Allocate in large object space, retry space will be ignored.
5418
    space = LO_SPACE;
5419
  }
5261
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5420 5262

  
5421 5263
  Object* result;
5422
  { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
5264
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5423 5265
    if (!maybe_result->ToObject(&result)) return maybe_result;
5424 5266
  }
5425 5267

  
......
5440 5282
  }
5441 5283
  int size = SeqTwoByteString::SizeFor(length);
5442 5284
  ASSERT(size <= SeqTwoByteString::kMaxSize);
5443
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5444
  AllocationSpace retry_space = OLD_DATA_SPACE;
5445

  
5446
  if (size > Page::kMaxNonCodeHeapObjectSize) {
5447
    // Allocate in large object space, retry space will be ignored.
5448
    space = LO_SPACE;
5449
  }
5285
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5450 5286

  
5451 5287
  Object* result;
5452
  { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
5288
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5453 5289
    if (!maybe_result->ToObject(&result)) return maybe_result;
5454 5290
  }
5455 5291

  
......
5474 5310
}
5475 5311

  
5476 5312

  
5477
MaybeObject* Heap::AllocateJSArrayWithAllocationSite(
5478
    ElementsKind elements_kind,
5479
    Handle<AllocationSite> allocation_site) {
5480
  Context* native_context = isolate()->context()->native_context();
5481
  JSFunction* array_function = native_context->array_function();
5482
  Map* map = array_function->initial_map();
5483
  Object* maybe_map_array = native_context->js_array_maps();
5484
  if (!maybe_map_array->IsUndefined()) {
5485
    Object* maybe_transitioned_map =
5486
        FixedArray::cast(maybe_map_array)->get(elements_kind);
5487
    if (!maybe_transitioned_map->IsUndefined()) {
5488
      map = Map::cast(maybe_transitioned_map);
5489
    }
5490
  }
5491
  return AllocateJSObjectFromMapWithAllocationSite(map, allocation_site);
5492
}
5493

  
5494

  
5495 5313
MaybeObject* Heap::AllocateEmptyFixedArray() {
5496 5314
  int size = FixedArray::SizeFor(0);
5497 5315
  Object* result;
......
5512 5330
}
5513 5331

  
5514 5332

  
5515
MaybeObject* Heap::AllocateRawFixedArray(int length) {
5516
  if (length < 0 || length > FixedArray::kMaxLength) {
5517
    return Failure::OutOfMemoryException(0xd);
5518
  }
5519
  ASSERT(length > 0);
5520
  // Use the general function if we're forced to always allocate.
5521
  if (always_allocate()) return AllocateFixedArray(length, TENURED);
5522
  // Allocate the raw data for a fixed array.
5523
  int size = FixedArray::SizeFor(length);
5524
  return size <= Page::kMaxNonCodeHeapObjectSize
5525
      ? new_space_.AllocateRaw(size)
5526
      : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
5527
}
5528

  
5529

  
5530 5333
MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
5531 5334
  int len = src->length();
5532 5335
  Object* obj;
5533
  { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
5336
  { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED);
5534 5337
    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
5535 5338
  }
5536 5339
  if (InNewSpace(obj)) {
......
5570 5373
}
5571 5374

  
5572 5375

  
5573
MaybeObject* Heap::AllocateFixedArray(int length) {
5574
  ASSERT(length >= 0);
5575
  if (length == 0) return empty_fixed_array();
5576
  Object* result;
5577
  { MaybeObject* maybe_result = AllocateRawFixedArray(length);
5578
    if (!maybe_result->ToObject(&result)) return maybe_result;
5376
MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
5377
                                                Map* map) {
5378
  int int64_entries = src->count_of_int64_entries();
5379
  int ptr_entries = src->count_of_ptr_entries();
5380
  int int32_entries = src->count_of_int32_entries();
5381
  Object* obj;
5382
  { MaybeObject* maybe_obj =
5383
        AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries);
5384
    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
5579 5385
  }
5580
  // Initialize header.
5581
  FixedArray* array = reinterpret_cast<FixedArray*>(result);
5582
  array->set_map_no_write_barrier(fixed_array_map());
5583
  array->set_length(length);
5584
  // Initialize body.
5585
  ASSERT(!InNewSpace(undefined_value()));
5586
  MemsetPointer(array->data_start(), undefined_value(), length);
5587
  return result;
5386
  HeapObject* dst = HeapObject::cast(obj);
5387
  dst->set_map_no_write_barrier(map);
5388
  CopyBlock(
5389
      dst->address() + ConstantPoolArray::kLengthOffset,
5390
      src->address() + ConstantPoolArray::kLengthOffset,
5391
      ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries)
5392
          - ConstantPoolArray::kLengthOffset);
5393
  return obj;
5588 5394
}
5589 5395

  
5590 5396

  
......
5593 5399
    return Failure::OutOfMemoryException(0xe);
5594 5400
  }
5595 5401
  int size = FixedArray::SizeFor(length);
5596
  AllocationSpace space =
5597
      (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
5598
  AllocationSpace retry_space = OLD_POINTER_SPACE;
5402
  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
5599 5403

  
5600
  if (size > Page::kMaxNonCodeHeapObjectSize) {
5601
    // Allocate in large object space, retry space will be ignored.
5602
    space = LO_SPACE;
5603
  }
5604

  
5605
  return AllocateRaw(size, space, retry_space);
5404
  return AllocateRaw(size, space, OLD_POINTER_SPACE);
5606 5405
}
5607 5406

  
5608 5407

  
5609
MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
5610
    Heap* heap,
5611
    int length,
5612
    PretenureFlag pretenure,
5613
    Object* filler) {
5408
MaybeObject* Heap::AllocateFixedArrayWithFiller(int length,
5409
                                                PretenureFlag pretenure,
5410
                                                Object* filler) {
5614 5411
  ASSERT(length >= 0);
5615
  ASSERT(heap->empty_fixed_array()->IsFixedArray());
5616
  if (length == 0) return heap->empty_fixed_array();
5412
  ASSERT(empty_fixed_array()->IsFixedArray());
5413
  if (length == 0) return empty_fixed_array();
5617 5414

  
5618
  ASSERT(!heap->InNewSpace(filler));
5415
  ASSERT(!InNewSpace(filler));
5619 5416
  Object* result;
5620
  { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
5417
  { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure);
5621 5418
    if (!maybe_result->ToObject(&result)) return maybe_result;
5622 5419
  }
5623 5420

  
5624
  HeapObject::cast(result)->set_map_no_write_barrier(heap->fixed_array_map());
5421
  HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map());
5625 5422
  FixedArray* array = FixedArray::cast(result);
5626 5423
  array->set_length(length);
5627 5424
  MemsetPointer(array->data_start(), filler, length);
......
5630 5427

  
5631 5428

  
5632 5429
MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
5633
  return AllocateFixedArrayWithFiller(this,
5634
                                      length,
5635
                                      pretenure,
5636
                                      undefined_value());
5430
  return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
5637 5431
}
5638 5432

  
5639 5433

  
5640 5434
MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
5641 5435
                                               PretenureFlag pretenure) {
5642
  return AllocateFixedArrayWithFiller(this,
5643
                                      length,
5644
                                      pretenure,
5645
                                      the_hole_value());
5436
  return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
5646 5437
}
5647 5438

  
5648 5439

  
......
5650 5441
  if (length == 0) return empty_fixed_array();
5651 5442

  
5652 5443
  Object* obj;
5653
  { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
5444
  { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED);
5654 5445
    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
5655 5446
  }
5656 5447

  
......
5720 5511
    return Failure::OutOfMemoryException(0xf);
5721 5512
  }
5722 5513
  int size = FixedDoubleArray::SizeFor(length);
5723
  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5724
  AllocationSpace retry_space = OLD_DATA_SPACE;
5725

  
5726 5514
#ifndef V8_HOST_ARCH_64_BIT
5727 5515
  size += kPointerSize;
5728 5516
#endif
5517
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5729 5518

  
5730
  if (size > Page::kMaxNonCodeHeapObjectSize) {
5731
    // Allocate in large object space, retry space will be ignored.
5732
    space = LO_SPACE;
5519
  HeapObject* object;
5520
  { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
5521
    if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5733 5522
  }
5734 5523

  
5524
  return EnsureDoubleAligned(this, object, size);
5525
}
5526

  
5527

  
5528
MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
5529
                                             int number_of_ptr_entries,
5530
                                             int number_of_int32_entries) {
5531
  ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 ||
5532
         number_of_int32_entries > 0);
5533
  int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
5534
                                        number_of_ptr_entries,
5535
                                        number_of_int32_entries);
5536
#ifndef V8_HOST_ARCH_64_BIT
5537
  size += kPointerSize;
5538
#endif
5539
  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5540

  
5735 5541
  HeapObject* object;
5736
  { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
5542
  { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE);
5737 5543
    if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5738 5544
  }
5545
  object = EnsureDoubleAligned(this, object, size);
5546
  HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map());
5739 5547

  
5740
  return EnsureDoubleAligned(this, object, size);
5548
  ConstantPoolArray* constant_pool =
5549
      reinterpret_cast<ConstantPoolArray*>(object);
5550
  constant_pool->SetEntryCounts(number_of_int64_entries,
5551
                                number_of_ptr_entries,
5552
                                number_of_int32_entries);
5553
  MemsetPointer(
5554
      HeapObject::RawField(
5555
          constant_pool,
5556
          constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())),
5557
      undefined_value(),
5558
      number_of_ptr_entries);
5559
  return constant_pool;
5741 5560
}
5742 5561

  
5743 5562

  
......
5937 5756
      return Failure::InternalError();
5938 5757
  }
5939 5758
  int size = map->instance_size();
5940
  AllocationSpace space =
5941
      (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
5759
  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5942 5760
  Object* result;
5943 5761
  { MaybeObject* maybe_result = Allocate(map, space);
5944 5762
    if (!maybe_result->ToObject(&result)) return maybe_result;
......
6965 6783
  native_contexts_list_ = undefined_value();
6966 6784
  array_buffers_list_ = undefined_value();
6967 6785
  allocation_sites_list_ = undefined_value();
6786
  weak_object_to_code_table_ = undefined_value();
6968 6787
  return true;
6969 6788
}
6970 6789

  
......
7068 6887
}
7069 6888

  
7070 6889

  
7071
void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
6890
void Heap::AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
6891
                                 GCType gc_type,
6892
                                 bool pass_isolate) {
7072 6893
  ASSERT(callback != NULL);
7073
  GCPrologueCallbackPair pair(callback, gc_type);
6894
  GCPrologueCallbackPair pair(callback, gc_type, pass_isolate);
7074 6895
  ASSERT(!gc_prologue_callbacks_.Contains(pair));
7075 6896
  return gc_prologue_callbacks_.Add(pair);
7076 6897
}
7077 6898

  
7078 6899

  
7079
void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) {
6900
void Heap::RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback) {
7080 6901
  ASSERT(callback != NULL);
7081 6902
  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
7082 6903
    if (gc_prologue_callbacks_[i].callback == callback) {
......
7088 6909
}
7089 6910

  
7090 6911

  
7091
void Heap::AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type) {
6912
void Heap::AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
6913
                                 GCType gc_type,
6914
                                 bool pass_isolate) {
7092 6915
  ASSERT(callback != NULL);
7093
  GCEpilogueCallbackPair pair(callback, gc_type);
6916
  GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate);
7094 6917
  ASSERT(!gc_epilogue_callbacks_.Contains(pair));
7095 6918
  return gc_epilogue_callbacks_.Add(pair);
7096 6919
}
7097 6920

  
7098 6921

  
7099
void Heap::RemoveGCEpilogueCallback(GCEpilogueCallback callback) {
6922
void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) {
7100 6923
  ASSERT(callback != NULL);
7101 6924
  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
7102 6925
    if (gc_epilogue_callbacks_[i].callback == callback) {
......
7108 6931
}
7109 6932

  
7110 6933

  
6934
MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj,
6935
                                                 DependentCode* dep) {
6936
  ASSERT(!InNewSpace(obj));
6937
  ASSERT(!InNewSpace(dep));
6938
  MaybeObject* maybe_obj =
6939
      WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep);
6940
  WeakHashTable* table;
6941
  if (!maybe_obj->To(&table)) return maybe_obj;
6942
  if (ShouldZapGarbage() && weak_object_to_code_table_ != table) {
6943
    WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
6944
  }
6945
  set_weak_object_to_code_table(table);
6946
  ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj));
6947
  return weak_object_to_code_table_;
6948
}
6949

  
6950

  
6951
DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) {
6952
  Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
6953
  if (dep->IsDependentCode()) return DependentCode::cast(dep);
6954
  return DependentCode::cast(empty_fixed_array());
6955
}
6956

  
6957

  
6958
void Heap::EnsureWeakObjectToCodeTable() {
6959
  if (!weak_object_to_code_table()->IsHashTable()) {
6960
    set_weak_object_to_code_table(*isolate()->factory()->NewWeakHashTable(16));
6961
  }
6962
}
6963

  
6964

  
7111 6965
#ifdef DEBUG
7112 6966

  
7113 6967
class PrintHandleVisitor: public ObjectVisitor {
......
8090 7944
      static_cast<int>(object_sizes_last_time_[index]));
8091 7945
  FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
8092 7946
#undef ADJUST_LAST_TIME_OBJECT_COUNT
7947
#define ADJUST_LAST_TIME_OBJECT_COUNT(name)                 \
7948
  index = FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge; \
7949
  counters->count_of_CODE_AGE_##name()->Increment(          \
7950
      static_cast<int>(object_counts_[index]));             \
7951
  counters->count_of_CODE_AGE_##name()->Decrement(          \
7952
      static_cast<int>(object_counts_last_time_[index]));   \
7953
  counters->size_of_CODE_AGE_##name()->Increment(           \
7954
      static_cast<int>(object_sizes_[index]));              \
7955
  counters->size_of_CODE_AGE_##name()->Decrement(          \
7956
      static_cast<int>(object_sizes_last_time_[index]));
7957
  CODE_AGE_LIST_WITH_NO_AGE(ADJUST_LAST_TIME_OBJECT_COUNT)
7958
#undef ADJUST_LAST_TIME_OBJECT_COUNT
8093 7959

  
8094 7960
  OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
8095 7961
  OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));

Also available in: Unified diff