Revision f230a1cf deps/v8/src/heap-inl.h

View differences:

deps/v8/src/heap-inl.h
140 140
  // Compute map and object size.
141 141
  Map* map = ascii_internalized_string_map();
142 142
  int size = SeqOneByteString::SizeFor(str.length());
143
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
143 144

  
144 145
  // Allocate string.
145 146
  Object* result;
146
  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
147
                   ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
148
                   : old_data_space_->AllocateRaw(size);
147
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
149 148
    if (!maybe_result->ToObject(&result)) return maybe_result;
150 149
  }
151 150

  
......
174 173
  // Compute map and object size.
175 174
  Map* map = internalized_string_map();
176 175
  int size = SeqTwoByteString::SizeFor(str.length());
176
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
177 177

  
178 178
  // Allocate string.
179 179
  Object* result;
180
  { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
181
                   ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
182
                   : old_data_space_->AllocateRaw(size);
180
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
183 181
    if (!maybe_result->ToObject(&result)) return maybe_result;
184 182
  }
185 183

  
......
208 206
}
209 207

  
210 208

  
209
MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
210
  return CopyConstantPoolArrayWithMap(src, src->map());
211
}
212

  
213

  
211 214
MaybeObject* Heap::AllocateRaw(int size_in_bytes,
212 215
                               AllocationSpace space,
213 216
                               AllocationSpace retry_space) {
214
  ASSERT(AllowHandleAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
217
  ASSERT(AllowHandleAllocation::IsAllowed());
218
  ASSERT(AllowHeapAllocation::IsAllowed());
219
  ASSERT(gc_state_ == NOT_IN_GC);
215 220
  ASSERT(space != NEW_SPACE ||
216 221
         retry_space == OLD_POINTER_SPACE ||
217 222
         retry_space == OLD_DATA_SPACE ||
......
291 296
}
292 297

  
293 298

  
294
MaybeObject* Heap::AllocateRawMap() {
295
#ifdef DEBUG
296
  isolate_->counters()->objs_since_last_full()->Increment();
297
  isolate_->counters()->objs_since_last_young()->Increment();
298
#endif
299
  MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
300
  if (result->IsFailure()) old_gen_exhausted_ = true;
301
  return result;
302
}
303

  
304

  
305
MaybeObject* Heap::AllocateRawCell() {
306
#ifdef DEBUG
307
  isolate_->counters()->objs_since_last_full()->Increment();
308
  isolate_->counters()->objs_since_last_young()->Increment();
309
#endif
310
  MaybeObject* result = cell_space_->AllocateRaw(Cell::kSize);
311
  if (result->IsFailure()) old_gen_exhausted_ = true;
312
  return result;
313
}
314

  
315

  
316
MaybeObject* Heap::AllocateRawPropertyCell() {
317
#ifdef DEBUG
318
  isolate_->counters()->objs_since_last_full()->Increment();
319
  isolate_->counters()->objs_since_last_young()->Increment();
320
#endif
321
  MaybeObject* result =
322
      property_cell_space_->AllocateRaw(PropertyCell::kSize);
323
  if (result->IsFailure()) old_gen_exhausted_ = true;
324
  return result;
325
}
326

  
327

  
328 299
bool Heap::InNewSpace(Object* object) {
329 300
  bool result = new_space_.Contains(object);
330 301
  ASSERT(!result ||                  // Either not in new space
......
525 496
    return;
526 497
  }
527 498

  
499
  if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
500
    if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) !=
501
        NULL) {
502
      object->GetIsolate()->heap()->allocation_mementos_found_++;
503
    }
504
  }
505

  
528 506
  // AllocationMementos are unrooted and shouldn't survive a scavenge
529 507
  ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
530 508
  // Call the slow part of scavenge object.
......
532 510
}
533 511

  
534 512

  
535
MaybeObject* Heap::AllocateEmptyJSArrayWithAllocationSite(
536
      ElementsKind elements_kind,
537
      Handle<AllocationSite> allocation_site) {
538
  return AllocateJSArrayAndStorageWithAllocationSite(elements_kind, 0, 0,
539
      allocation_site, DONT_INITIALIZE_ARRAY_ELEMENTS);
540
}
541

  
542

  
543 513
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
544 514
  const char* collector_reason = NULL;
545 515
  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
......
847 817

  
848 818

  
849 819
#ifdef VERIFY_HEAP
850
NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() {
820
NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
851 821
  Isolate* isolate = Isolate::Current();
852
  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++;
822
  isolate->heap()->no_weak_object_verification_scope_depth_++;
853 823
}
854 824

  
855 825

  
856
NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() {
826
NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
857 827
  Isolate* isolate = Isolate::Current();
858
  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--;
828
  isolate->heap()->no_weak_object_verification_scope_depth_--;
859 829
}
860 830
#endif
861 831

  

Also available in: Unified diff