Revision f230a1cf deps/v8/src/spaces-inl.h

View differences:

deps/v8/src/spaces-inl.h
28 28
#ifndef V8_SPACES_INL_H_
29 29
#define V8_SPACES_INL_H_
30 30

  
31
#include "heap-profiler.h"
31 32
#include "isolate.h"
32 33
#include "spaces.h"
33 34
#include "v8memory.h"
......
263 264
// allocation) so it can be used by all the allocation functions and for all
264 265
// the paged spaces.
265 266
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
266
  Address current_top = allocation_info_.top;
267
  Address current_top = allocation_info_.top();
267 268
  Address new_top = current_top + size_in_bytes;
268
  if (new_top > allocation_info_.limit) return NULL;
269
  if (new_top > allocation_info_.limit()) return NULL;
269 270

  
270
  allocation_info_.top = new_top;
271
  allocation_info_.set_top(new_top);
271 272
  return HeapObject::FromAddress(current_top);
272 273
}
273 274

  
274 275

  
275 276
// Raw allocation.
276
MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
277
MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes,
278
                                     AllocationType event) {
279
  HeapProfiler* profiler = heap()->isolate()->heap_profiler();
280

  
277 281
  HeapObject* object = AllocateLinearly(size_in_bytes);
278 282
  if (object != NULL) {
279 283
    if (identity() == CODE_SPACE) {
280 284
      SkipList::Update(object->address(), size_in_bytes);
281 285
    }
286
    if (event == NEW_OBJECT && profiler->is_tracking_allocations()) {
287
      profiler->NewObjectEvent(object->address(), size_in_bytes);
288
    }
282 289
    return object;
283 290
  }
284 291

  
......
291 298
    if (identity() == CODE_SPACE) {
292 299
      SkipList::Update(object->address(), size_in_bytes);
293 300
    }
301
    if (event == NEW_OBJECT && profiler->is_tracking_allocations()) {
302
      profiler->NewObjectEvent(object->address(), size_in_bytes);
303
    }
294 304
    return object;
295 305
  }
296 306

  
......
299 309
    if (identity() == CODE_SPACE) {
300 310
      SkipList::Update(object->address(), size_in_bytes);
301 311
    }
312
    if (event == NEW_OBJECT && profiler->is_tracking_allocations()) {
313
      profiler->NewObjectEvent(object->address(), size_in_bytes);
314
    }
302 315
    return object;
303 316
  }
304 317

  
......
311 324

  
312 325

  
313 326
MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) {
314
  Address old_top = allocation_info_.top;
327
  Address old_top = allocation_info_.top();
315 328
#ifdef DEBUG
316 329
  // If we are stressing compaction we waste some memory in new space
317 330
  // in order to get more frequent GCs.
318 331
  if (FLAG_stress_compaction && !heap()->linear_allocation()) {
319
    if (allocation_info_.limit - old_top >= size_in_bytes * 4) {
332
    if (allocation_info_.limit() - old_top >= size_in_bytes * 4) {
320 333
      int filler_size = size_in_bytes * 4;
321 334
      for (int i = 0; i < filler_size; i += kPointerSize) {
322 335
        *(reinterpret_cast<Object**>(old_top + i)) =
323 336
            heap()->one_pointer_filler_map();
324 337
      }
325 338
      old_top += filler_size;
326
      allocation_info_.top += filler_size;
339
      allocation_info_.set_top(allocation_info_.top() + filler_size);
327 340
    }
328 341
  }
329 342
#endif
330 343

  
331
  if (allocation_info_.limit - old_top < size_in_bytes) {
344
  if (allocation_info_.limit() - old_top < size_in_bytes) {
332 345
    return SlowAllocateRaw(size_in_bytes);
333 346
  }
334 347

  
335
  Object* obj = HeapObject::FromAddress(old_top);
336
  allocation_info_.top += size_in_bytes;
348
  HeapObject* obj = HeapObject::FromAddress(old_top);
349
  allocation_info_.set_top(allocation_info_.top() + size_in_bytes);
337 350
  ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
338 351

  
352
  HeapProfiler* profiler = heap()->isolate()->heap_profiler();
353
  if (profiler != NULL && profiler->is_tracking_allocations()) {
354
    profiler->NewObjectEvent(obj->address(), size_in_bytes);
355
  }
356

  
339 357
  return obj;
340 358
}
341 359

  

Also available in: Unified diff