The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / heap-inl.h @ f230a1cf

History | View | Annotate | Download (29 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_HEAP_INL_H_
29
#define V8_HEAP_INL_H_
30

    
31
#include "heap.h"
32
#include "isolate.h"
33
#include "list-inl.h"
34
#include "objects.h"
35
#include "platform.h"
36
#include "v8-counters.h"
37
#include "store-buffer.h"
38
#include "store-buffer-inl.h"
39

    
40
namespace v8 {
41
namespace internal {
42

    
43
void PromotionQueue::insert(HeapObject* target, int size) {
44
  if (emergency_stack_ != NULL) {
45
    emergency_stack_->Add(Entry(target, size));
46
    return;
47
  }
48

    
49
  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
50
    NewSpacePage* rear_page =
51
        NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
52
    ASSERT(!rear_page->prev_page()->is_anchor());
53
    rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
54
    ActivateGuardIfOnTheSamePage();
55
  }
56

    
57
  if (guard_) {
58
    ASSERT(GetHeadPage() ==
59
           Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
60

    
61
    if ((rear_ - 2) < limit_) {
62
      RelocateQueueHead();
63
      emergency_stack_->Add(Entry(target, size));
64
      return;
65
    }
66
  }
67

    
68
  *(--rear_) = reinterpret_cast<intptr_t>(target);
69
  *(--rear_) = size;
70
  // Assert no overflow into live objects.
71
#ifdef DEBUG
72
  SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
73
                              reinterpret_cast<Address>(rear_));
74
#endif
75
}
76

    
77

    
78
void PromotionQueue::ActivateGuardIfOnTheSamePage() {
79
  guard_ = guard_ ||
80
      heap_->new_space()->active_space()->current_page()->address() ==
81
      GetHeadPage()->address();
82
}
83

    
84

    
85
MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
86
                                          PretenureFlag pretenure) {
87
  // Check for ASCII first since this is the common case.
88
  const char* start = str.start();
89
  int length = str.length();
90
  int non_ascii_start = String::NonAsciiStart(start, length);
91
  if (non_ascii_start >= length) {
92
    // If the string is ASCII, we do not need to convert the characters
93
    // since UTF8 is backwards compatible with ASCII.
94
    return AllocateStringFromOneByte(str, pretenure);
95
  }
96
  // Non-ASCII and we need to decode.
97
  return AllocateStringFromUtf8Slow(str, non_ascii_start, pretenure);
98
}
99

    
100

    
101
template<>
102
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
103
  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
104
  // ASCII only check.
105
  return chars == str.length();
106
}
107

    
108

    
109
template<>
110
bool inline Heap::IsOneByte(String* str, int chars) {
111
  return str->IsOneByteRepresentation();
112
}
113

    
114

    
115
MaybeObject* Heap::AllocateInternalizedStringFromUtf8(
116
    Vector<const char> str, int chars, uint32_t hash_field) {
117
  if (IsOneByte(str, chars)) {
118
    return AllocateOneByteInternalizedString(
119
        Vector<const uint8_t>::cast(str), hash_field);
120
  }
121
  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
122
}
123

    
124

    
125
template<typename T>
126
MaybeObject* Heap::AllocateInternalizedStringImpl(
127
    T t, int chars, uint32_t hash_field) {
128
  if (IsOneByte(t, chars)) {
129
    return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
130
  }
131
  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
132
}
133

    
134

    
135
MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str,
136
                                                     uint32_t hash_field) {
137
  if (str.length() > SeqOneByteString::kMaxLength) {
138
    return Failure::OutOfMemoryException(0x2);
139
  }
140
  // Compute map and object size.
141
  Map* map = ascii_internalized_string_map();
142
  int size = SeqOneByteString::SizeFor(str.length());
143
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
144

    
145
  // Allocate string.
146
  Object* result;
147
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
148
    if (!maybe_result->ToObject(&result)) return maybe_result;
149
  }
150

    
151
  // String maps are all immortal immovable objects.
152
  reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
153
  // Set length and hash fields of the allocated string.
154
  String* answer = String::cast(result);
155
  answer->set_length(str.length());
156
  answer->set_hash_field(hash_field);
157

    
158
  ASSERT_EQ(size, answer->Size());
159

    
160
  // Fill in the characters.
161
  OS::MemCopy(answer->address() + SeqOneByteString::kHeaderSize,
162
              str.start(), str.length());
163

    
164
  return answer;
165
}
166

    
167

    
168
MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
169
                                                     uint32_t hash_field) {
170
  if (str.length() > SeqTwoByteString::kMaxLength) {
171
    return Failure::OutOfMemoryException(0x3);
172
  }
173
  // Compute map and object size.
174
  Map* map = internalized_string_map();
175
  int size = SeqTwoByteString::SizeFor(str.length());
176
  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
177

    
178
  // Allocate string.
179
  Object* result;
180
  { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
181
    if (!maybe_result->ToObject(&result)) return maybe_result;
182
  }
183

    
184
  reinterpret_cast<HeapObject*>(result)->set_map(map);
185
  // Set length and hash fields of the allocated string.
186
  String* answer = String::cast(result);
187
  answer->set_length(str.length());
188
  answer->set_hash_field(hash_field);
189

    
190
  ASSERT_EQ(size, answer->Size());
191

    
192
  // Fill in the characters.
193
  OS::MemCopy(answer->address() + SeqTwoByteString::kHeaderSize,
194
              str.start(), str.length() * kUC16Size);
195

    
196
  return answer;
197
}
198

    
199
MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
200
  return CopyFixedArrayWithMap(src, src->map());
201
}
202

    
203

    
204
MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
205
  return CopyFixedDoubleArrayWithMap(src, src->map());
206
}
207

    
208

    
209
MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
210
  return CopyConstantPoolArrayWithMap(src, src->map());
211
}
212

    
213

    
214
MaybeObject* Heap::AllocateRaw(int size_in_bytes,
215
                               AllocationSpace space,
216
                               AllocationSpace retry_space) {
217
  ASSERT(AllowHandleAllocation::IsAllowed());
218
  ASSERT(AllowHeapAllocation::IsAllowed());
219
  ASSERT(gc_state_ == NOT_IN_GC);
220
  ASSERT(space != NEW_SPACE ||
221
         retry_space == OLD_POINTER_SPACE ||
222
         retry_space == OLD_DATA_SPACE ||
223
         retry_space == LO_SPACE);
224
#ifdef DEBUG
225
  if (FLAG_gc_interval >= 0 &&
226
      !disallow_allocation_failure_ &&
227
      Heap::allocation_timeout_-- <= 0) {
228
    return Failure::RetryAfterGC(space);
229
  }
230
  isolate_->counters()->objs_since_last_full()->Increment();
231
  isolate_->counters()->objs_since_last_young()->Increment();
232
#endif
233
  MaybeObject* result;
234
  if (NEW_SPACE == space) {
235
    result = new_space_.AllocateRaw(size_in_bytes);
236
    if (always_allocate() && result->IsFailure()) {
237
      space = retry_space;
238
    } else {
239
      return result;
240
    }
241
  }
242

    
243
  if (OLD_POINTER_SPACE == space) {
244
    result = old_pointer_space_->AllocateRaw(size_in_bytes);
245
  } else if (OLD_DATA_SPACE == space) {
246
    result = old_data_space_->AllocateRaw(size_in_bytes);
247
  } else if (CODE_SPACE == space) {
248
    result = code_space_->AllocateRaw(size_in_bytes);
249
  } else if (LO_SPACE == space) {
250
    result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
251
  } else if (CELL_SPACE == space) {
252
    result = cell_space_->AllocateRaw(size_in_bytes);
253
  } else if (PROPERTY_CELL_SPACE == space) {
254
    result = property_cell_space_->AllocateRaw(size_in_bytes);
255
  } else {
256
    ASSERT(MAP_SPACE == space);
257
    result = map_space_->AllocateRaw(size_in_bytes);
258
  }
259
  if (result->IsFailure()) old_gen_exhausted_ = true;
260
  return result;
261
}
262

    
263

    
264
MaybeObject* Heap::NumberFromInt32(
265
    int32_t value, PretenureFlag pretenure) {
266
  if (Smi::IsValid(value)) return Smi::FromInt(value);
267
  // Bypass NumberFromDouble to avoid various redundant checks.
268
  return AllocateHeapNumber(FastI2D(value), pretenure);
269
}
270

    
271

    
272
MaybeObject* Heap::NumberFromUint32(
273
    uint32_t value, PretenureFlag pretenure) {
274
  if (static_cast<int32_t>(value) >= 0 &&
275
      Smi::IsValid(static_cast<int32_t>(value))) {
276
    return Smi::FromInt(static_cast<int32_t>(value));
277
  }
278
  // Bypass NumberFromDouble to avoid various redundant checks.
279
  return AllocateHeapNumber(FastUI2D(value), pretenure);
280
}
281

    
282

    
283
void Heap::FinalizeExternalString(String* string) {
284
  ASSERT(string->IsExternalString());
285
  v8::String::ExternalStringResourceBase** resource_addr =
286
      reinterpret_cast<v8::String::ExternalStringResourceBase**>(
287
          reinterpret_cast<byte*>(string) +
288
          ExternalString::kResourceOffset -
289
          kHeapObjectTag);
290

    
291
  // Dispose of the C++ object if it has not already been disposed.
292
  if (*resource_addr != NULL) {
293
    (*resource_addr)->Dispose();
294
    *resource_addr = NULL;
295
  }
296
}
297

    
298

    
299
bool Heap::InNewSpace(Object* object) {
300
  bool result = new_space_.Contains(object);
301
  ASSERT(!result ||                  // Either not in new space
302
         gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
303
         InToSpace(object));         // ... or in to-space (where we allocate).
304
  return result;
305
}
306

    
307

    
308
bool Heap::InNewSpace(Address address) {
309
  return new_space_.Contains(address);
310
}
311

    
312

    
313
bool Heap::InFromSpace(Object* object) {
314
  return new_space_.FromSpaceContains(object);
315
}
316

    
317

    
318
bool Heap::InToSpace(Object* object) {
319
  return new_space_.ToSpaceContains(object);
320
}
321

    
322

    
323
bool Heap::InOldPointerSpace(Address address) {
324
  return old_pointer_space_->Contains(address);
325
}
326

    
327

    
328
bool Heap::InOldPointerSpace(Object* object) {
329
  return InOldPointerSpace(reinterpret_cast<Address>(object));
330
}
331

    
332

    
333
bool Heap::InOldDataSpace(Address address) {
334
  return old_data_space_->Contains(address);
335
}
336

    
337

    
338
bool Heap::InOldDataSpace(Object* object) {
339
  return InOldDataSpace(reinterpret_cast<Address>(object));
340
}
341

    
342

    
343
bool Heap::OldGenerationAllocationLimitReached() {
344
  if (!incremental_marking()->IsStopped()) return false;
345
  return OldGenerationSpaceAvailable() < 0;
346
}
347

    
348

    
349
bool Heap::ShouldBePromoted(Address old_address, int object_size) {
350
  // An object should be promoted if:
351
  // - the object has survived a scavenge operation or
352
  // - to space is already 25% full.
353
  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
354
  Address age_mark = new_space_.age_mark();
355
  bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
356
      (!page->ContainsLimit(age_mark) || old_address < age_mark);
357
  return below_mark || (new_space_.Size() + object_size) >=
358
                        (new_space_.EffectiveCapacity() >> 2);
359
}
360

    
361

    
362
void Heap::RecordWrite(Address address, int offset) {
363
  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
364
}
365

    
366

    
367
void Heap::RecordWrites(Address address, int start, int len) {
368
  if (!InNewSpace(address)) {
369
    for (int i = 0; i < len; i++) {
370
      store_buffer_.Mark(address + start + i * kPointerSize);
371
    }
372
  }
373
}
374

    
375

    
376
OldSpace* Heap::TargetSpace(HeapObject* object) {
377
  InstanceType type = object->map()->instance_type();
378
  AllocationSpace space = TargetSpaceId(type);
379
  return (space == OLD_POINTER_SPACE)
380
      ? old_pointer_space_
381
      : old_data_space_;
382
}
383

    
384

    
385
AllocationSpace Heap::TargetSpaceId(InstanceType type) {
386
  // Heap numbers and sequential strings are promoted to old data space, all
387
  // other object types are promoted to old pointer space.  We do not use
388
  // object->IsHeapNumber() and object->IsSeqString() because we already
389
  // know that object has the heap object tag.
390

    
391
  // These objects are never allocated in new space.
392
  ASSERT(type != MAP_TYPE);
393
  ASSERT(type != CODE_TYPE);
394
  ASSERT(type != ODDBALL_TYPE);
395
  ASSERT(type != CELL_TYPE);
396
  ASSERT(type != PROPERTY_CELL_TYPE);
397

    
398
  if (type <= LAST_NAME_TYPE) {
399
    if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
400
    ASSERT(type < FIRST_NONSTRING_TYPE);
401
    // There are four string representations: sequential strings, external
402
    // strings, cons strings, and sliced strings.
403
    // Only the latter two contain non-map-word pointers to heap objects.
404
    return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
405
        ? OLD_POINTER_SPACE
406
        : OLD_DATA_SPACE;
407
  } else {
408
    return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
409
  }
410
}
411

    
412

    
413
bool Heap::AllowedToBeMigrated(HeapObject* object, AllocationSpace dst) {
414
  // Object migration is governed by the following rules:
415
  //
416
  // 1) Objects in new-space can be migrated to one of the old spaces
417
  //    that matches their target space or they stay in new-space.
418
  // 2) Objects in old-space stay in the same space when migrating.
419
  // 3) Fillers (two or more words) can migrate due to left-trimming of
420
  //    fixed arrays in new-space, old-data-space and old-pointer-space.
421
  // 4) Fillers (one word) can never migrate, they are skipped by
422
  //    incremental marking explicitly to prevent invalid pattern.
423
  //
424
  // Since this function is used for debugging only, we do not place
425
  // asserts here, but check everything explicitly.
426
  if (object->map() == one_pointer_filler_map()) return false;
427
  InstanceType type = object->map()->instance_type();
428
  MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
429
  AllocationSpace src = chunk->owner()->identity();
430
  switch (src) {
431
    case NEW_SPACE:
432
      return dst == src || dst == TargetSpaceId(type);
433
    case OLD_POINTER_SPACE:
434
      return dst == src && (dst == TargetSpaceId(type) || object->IsFiller());
435
    case OLD_DATA_SPACE:
436
      return dst == src && dst == TargetSpaceId(type);
437
    case CODE_SPACE:
438
      return dst == src && type == CODE_TYPE;
439
    case MAP_SPACE:
440
    case CELL_SPACE:
441
    case PROPERTY_CELL_SPACE:
442
    case LO_SPACE:
443
      return false;
444
  }
445
  UNREACHABLE();
446
  return false;
447
}
448

    
449

    
450
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
451
  CopyWords(reinterpret_cast<Object**>(dst),
452
            reinterpret_cast<Object**>(src),
453
            static_cast<size_t>(byte_size / kPointerSize));
454
}
455

    
456

    
457
void Heap::MoveBlock(Address dst, Address src, int byte_size) {
458
  ASSERT(IsAligned(byte_size, kPointerSize));
459

    
460
  int size_in_words = byte_size / kPointerSize;
461

    
462
  if ((dst < src) || (dst >= (src + byte_size))) {
463
    Object** src_slot = reinterpret_cast<Object**>(src);
464
    Object** dst_slot = reinterpret_cast<Object**>(dst);
465
    Object** end_slot = src_slot + size_in_words;
466

    
467
    while (src_slot != end_slot) {
468
      *dst_slot++ = *src_slot++;
469
    }
470
  } else {
471
    OS::MemMove(dst, src, static_cast<size_t>(byte_size));
472
  }
473
}
474

    
475

    
476
void Heap::ScavengePointer(HeapObject** p) {
477
  ScavengeObject(p, *p);
478
}
479

    
480

    
481
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
482
  ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
483

    
484
  // We use the first word (where the map pointer usually is) of a heap
485
  // object to record the forwarding pointer.  A forwarding pointer can
486
  // point to an old space, the code space, or the to space of the new
487
  // generation.
488
  MapWord first_word = object->map_word();
489

    
490
  // If the first word is a forwarding address, the object has already been
491
  // copied.
492
  if (first_word.IsForwardingAddress()) {
493
    HeapObject* dest = first_word.ToForwardingAddress();
494
    ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
495
    *p = dest;
496
    return;
497
  }
498

    
499
  if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
500
    if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) !=
501
        NULL) {
502
      object->GetIsolate()->heap()->allocation_mementos_found_++;
503
    }
504
  }
505

    
506
  // AllocationMementos are unrooted and shouldn't survive a scavenge
507
  ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
508
  // Call the slow part of scavenge object.
509
  return ScavengeObjectSlow(p, object);
510
}
511

    
512

    
513
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
514
  const char* collector_reason = NULL;
515
  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
516
  return CollectGarbage(space, collector, gc_reason, collector_reason);
517
}
518

    
519

    
520
MaybeObject* Heap::PrepareForCompare(String* str) {
521
  // Always flatten small strings and force flattening of long strings
522
  // after we have accumulated a certain amount we failed to flatten.
523
  static const int kMaxAlwaysFlattenLength = 32;
524
  static const int kFlattenLongThreshold = 16*KB;
525

    
526
  const int length = str->length();
527
  MaybeObject* obj = str->TryFlatten();
528
  if (length <= kMaxAlwaysFlattenLength ||
529
      unflattened_strings_length_ >= kFlattenLongThreshold) {
530
    return obj;
531
  }
532
  if (obj->IsFailure()) {
533
    unflattened_strings_length_ += length;
534
  }
535
  return str;
536
}
537

    
538

    
539
intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
540
    intptr_t change_in_bytes) {
541
  ASSERT(HasBeenSetUp());
542
  intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
543
  if (change_in_bytes > 0) {
544
    // Avoid overflow.
545
    if (amount > amount_of_external_allocated_memory_) {
546
      amount_of_external_allocated_memory_ = amount;
547
    } else {
548
      // Give up and reset the counters in case of an overflow.
549
      amount_of_external_allocated_memory_ = 0;
550
      amount_of_external_allocated_memory_at_last_global_gc_ = 0;
551
    }
552
    intptr_t amount_since_last_global_gc = PromotedExternalMemorySize();
553
    if (amount_since_last_global_gc > external_allocation_limit_) {
554
      CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
555
    }
556
  } else {
557
    // Avoid underflow.
558
    if (amount >= 0) {
559
      amount_of_external_allocated_memory_ = amount;
560
    } else {
561
      // Give up and reset the counters in case of an underflow.
562
      amount_of_external_allocated_memory_ = 0;
563
      amount_of_external_allocated_memory_at_last_global_gc_ = 0;
564
    }
565
  }
566
  if (FLAG_trace_external_memory) {
567
    PrintPID("%8.0f ms: ", isolate()->time_millis_since_init());
568
    PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, "
569
           "amount=%6" V8_PTR_PREFIX "d KB, since_gc=%6" V8_PTR_PREFIX "d KB, "
570
           "isolate=0x%08" V8PRIxPTR ".\n",
571
           change_in_bytes / KB,
572
           amount_of_external_allocated_memory_ / KB,
573
           PromotedExternalMemorySize() / KB,
574
           reinterpret_cast<intptr_t>(isolate()));
575
  }
576
  ASSERT(amount_of_external_allocated_memory_ >= 0);
577
  return amount_of_external_allocated_memory_;
578
}
579

    
580

    
581
Isolate* Heap::isolate() {
582
  return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
583
      reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
584
}
585

    
586

    
587
#ifdef DEBUG
588
#define GC_GREEDY_CHECK(ISOLATE) \
589
  if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck()
590
#else
591
#define GC_GREEDY_CHECK(ISOLATE) { }
592
#endif
593

    
594
// Calls the FUNCTION_CALL function and retries it up to three times
595
// to guarantee that any allocations performed during the call will
596
// succeed if there's enough memory.
597

    
598
// Warning: Do not use the identifiers __object__, __maybe_object__ or
599
// __scope__ in a call to this macro.
600

    
601
#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\
602
  do {                                                                         \
603
    GC_GREEDY_CHECK(ISOLATE);                                                  \
604
    MaybeObject* __maybe_object__ = FUNCTION_CALL;                             \
605
    Object* __object__ = NULL;                                                 \
606
    if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;                 \
607
    if (__maybe_object__->IsOutOfMemory()) {                                   \
608
      OOM;                                                                     \
609
    }                                                                          \
610
    if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                     \
611
    (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)->        \
612
                                    allocation_space(),                        \
613
                                    "allocation failure");                     \
614
    __maybe_object__ = FUNCTION_CALL;                                          \
615
    if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;                 \
616
    if (__maybe_object__->IsOutOfMemory()) {                                   \
617
      OOM;                                                                     \
618
    }                                                                          \
619
    if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                     \
620
    (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();         \
621
    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");           \
622
    {                                                                          \
623
      AlwaysAllocateScope __scope__;                                           \
624
      __maybe_object__ = FUNCTION_CALL;                                        \
625
    }                                                                          \
626
    if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;                 \
627
    if (__maybe_object__->IsOutOfMemory()) {                                   \
628
      OOM;                                                                     \
629
    }                                                                          \
630
    if (__maybe_object__->IsRetryAfterGC()) {                                  \
631
      /* TODO(1181417): Fix this. */                                           \
632
      v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);  \
633
    }                                                                          \
634
    RETURN_EMPTY;                                                              \
635
  } while (false)
636

    
637
#define CALL_AND_RETRY_OR_DIE(                                             \
638
     ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)                   \
639
  CALL_AND_RETRY(                                                          \
640
      ISOLATE,                                                             \
641
      FUNCTION_CALL,                                                       \
642
      RETURN_VALUE,                                                        \
643
      RETURN_EMPTY,                                                        \
644
      v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY", true))
645

    
646
#define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
647
  CALL_AND_RETRY_OR_DIE(ISOLATE,                                              \
648
                        FUNCTION_CALL,                                        \
649
                        return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
650
                        return Handle<TYPE>())                                \
651

    
652

    
653
#define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL)  \
654
  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
655

    
656

    
657
#define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \
658
  CALL_AND_RETRY(ISOLATE,                                         \
659
                 FUNCTION_CALL,                                   \
660
                 return __object__,                               \
661
                 return __maybe_object__,                         \
662
                 return __maybe_object__)
663

    
664

    
665
void ExternalStringTable::AddString(String* string) {
666
  ASSERT(string->IsExternalString());
667
  if (heap_->InNewSpace(string)) {
668
    new_space_strings_.Add(string);
669
  } else {
670
    old_space_strings_.Add(string);
671
  }
672
}
673

    
674

    
675
void ExternalStringTable::Iterate(ObjectVisitor* v) {
676
  if (!new_space_strings_.is_empty()) {
677
    Object** start = &new_space_strings_[0];
678
    v->VisitPointers(start, start + new_space_strings_.length());
679
  }
680
  if (!old_space_strings_.is_empty()) {
681
    Object** start = &old_space_strings_[0];
682
    v->VisitPointers(start, start + old_space_strings_.length());
683
  }
684
}
685

    
686

    
687
// Verify() is inline to avoid ifdef-s around its calls in release
688
// mode.
689
void ExternalStringTable::Verify() {
690
#ifdef DEBUG
691
  for (int i = 0; i < new_space_strings_.length(); ++i) {
692
    Object* obj = Object::cast(new_space_strings_[i]);
693
    ASSERT(heap_->InNewSpace(obj));
694
    ASSERT(obj != heap_->the_hole_value());
695
  }
696
  for (int i = 0; i < old_space_strings_.length(); ++i) {
697
    Object* obj = Object::cast(old_space_strings_[i]);
698
    ASSERT(!heap_->InNewSpace(obj));
699
    ASSERT(obj != heap_->the_hole_value());
700
  }
701
#endif
702
}
703

    
704

    
705
void ExternalStringTable::AddOldString(String* string) {
706
  ASSERT(string->IsExternalString());
707
  ASSERT(!heap_->InNewSpace(string));
708
  old_space_strings_.Add(string);
709
}
710

    
711

    
712
void ExternalStringTable::ShrinkNewStrings(int position) {
713
  new_space_strings_.Rewind(position);
714
#ifdef VERIFY_HEAP
715
  if (FLAG_verify_heap) {
716
    Verify();
717
  }
718
#endif
719
}
720

    
721

    
722
void Heap::ClearInstanceofCache() {
723
  set_instanceof_cache_function(the_hole_value());
724
}
725

    
726

    
727
Object* Heap::ToBoolean(bool condition) {
728
  return condition ? true_value() : false_value();
729
}
730

    
731

    
732
void Heap::CompletelyClearInstanceofCache() {
733
  set_instanceof_cache_map(the_hole_value());
734
  set_instanceof_cache_function(the_hole_value());
735
}
736

    
737

    
738
MaybeObject* TranscendentalCache::Get(Type type, double input) {
739
  SubCache* cache = caches_[type];
740
  if (cache == NULL) {
741
    caches_[type] = cache = new SubCache(isolate_, type);
742
  }
743
  return cache->Get(input);
744
}
745

    
746

    
747
Address TranscendentalCache::cache_array_address() {
748
  return reinterpret_cast<Address>(caches_);
749
}
750

    
751

    
752
double TranscendentalCache::SubCache::Calculate(double input) {
753
  switch (type_) {
754
    case ACOS:
755
      return acos(input);
756
    case ASIN:
757
      return asin(input);
758
    case ATAN:
759
      return atan(input);
760
    case COS:
761
      return fast_cos(input);
762
    case EXP:
763
      return exp(input);
764
    case LOG:
765
      return fast_log(input);
766
    case SIN:
767
      return fast_sin(input);
768
    case TAN:
769
      return fast_tan(input);
770
    default:
771
      return 0.0;  // Never happens.
772
  }
773
}
774

    
775

    
776
MaybeObject* TranscendentalCache::SubCache::Get(double input) {
777
  Converter c;
778
  c.dbl = input;
779
  int hash = Hash(c);
780
  Element e = elements_[hash];
781
  if (e.in[0] == c.integers[0] &&
782
      e.in[1] == c.integers[1]) {
783
    ASSERT(e.output != NULL);
784
    isolate_->counters()->transcendental_cache_hit()->Increment();
785
    return e.output;
786
  }
787
  double answer = Calculate(input);
788
  isolate_->counters()->transcendental_cache_miss()->Increment();
789
  Object* heap_number;
790
  { MaybeObject* maybe_heap_number =
791
        isolate_->heap()->AllocateHeapNumber(answer);
792
    if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
793
  }
794
  elements_[hash].in[0] = c.integers[0];
795
  elements_[hash].in[1] = c.integers[1];
796
  elements_[hash].output = heap_number;
797
  return heap_number;
798
}
799

    
800

    
801
AlwaysAllocateScope::AlwaysAllocateScope() {
802
  // We shouldn't hit any nested scopes, because that requires
803
  // non-handle code to call handle code. The code still works but
804
  // performance will degrade, so we want to catch this situation
805
  // in debug mode.
806
  Isolate* isolate = Isolate::Current();
807
  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
808
  isolate->heap()->always_allocate_scope_depth_++;
809
}
810

    
811

    
812
AlwaysAllocateScope::~AlwaysAllocateScope() {
813
  Isolate* isolate = Isolate::Current();
814
  isolate->heap()->always_allocate_scope_depth_--;
815
  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
816
}
817

    
818

    
819
#ifdef VERIFY_HEAP
820
NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
821
  Isolate* isolate = Isolate::Current();
822
  isolate->heap()->no_weak_object_verification_scope_depth_++;
823
}
824

    
825

    
826
NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
827
  Isolate* isolate = Isolate::Current();
828
  isolate->heap()->no_weak_object_verification_scope_depth_--;
829
}
830
#endif
831

    
832

    
833
void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
834
  for (Object** current = start; current < end; current++) {
835
    if ((*current)->IsHeapObject()) {
836
      HeapObject* object = HeapObject::cast(*current);
837
      CHECK(object->GetIsolate()->heap()->Contains(object));
838
      CHECK(object->map()->IsMap());
839
    }
840
  }
841
}
842

    
843

    
844
double GCTracer::SizeOfHeapObjects() {
845
  return (static_cast<double>(heap_->SizeOfObjects())) / MB;
846
}
847

    
848

    
849
DisallowAllocationFailure::DisallowAllocationFailure() {
850
#ifdef DEBUG
851
  Isolate* isolate = Isolate::Current();
852
  old_state_ = isolate->heap()->disallow_allocation_failure_;
853
  isolate->heap()->disallow_allocation_failure_ = true;
854
#endif
855
}
856

    
857

    
858
DisallowAllocationFailure::~DisallowAllocationFailure() {
859
#ifdef DEBUG
860
  Isolate* isolate = Isolate::Current();
861
  isolate->heap()->disallow_allocation_failure_ = old_state_;
862
#endif
863
}
864

    
865

    
866
} }  // namespace v8::internal
867

    
868
#endif  // V8_HEAP_INL_H_