The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / heap.h @ f230a1cf

History | View | Annotate | Download (114 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_HEAP_H_
29
#define V8_HEAP_H_
30

    
31
#include <cmath>
32

    
33
#include "allocation.h"
34
#include "assert-scope.h"
35
#include "globals.h"
36
#include "incremental-marking.h"
37
#include "list.h"
38
#include "mark-compact.h"
39
#include "objects-visiting.h"
40
#include "spaces.h"
41
#include "splay-tree-inl.h"
42
#include "store-buffer.h"
43
#include "v8-counters.h"
44
#include "v8globals.h"
45

    
46
namespace v8 {
47
namespace internal {
48

    
49
// Defines all the roots in Heap.
50
#define STRONG_ROOT_LIST(V)                                                    \
51
  V(Map, byte_array_map, ByteArrayMap)                                         \
52
  V(Map, free_space_map, FreeSpaceMap)                                         \
53
  V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
54
  V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
55
  /* Cluster the most popular ones in a few cache lines here at the top.    */ \
56
  V(Smi, store_buffer_top, StoreBufferTop)                                     \
57
  V(Oddball, undefined_value, UndefinedValue)                                  \
58
  V(Oddball, the_hole_value, TheHoleValue)                                     \
59
  V(Oddball, null_value, NullValue)                                            \
60
  V(Oddball, true_value, TrueValue)                                            \
61
  V(Oddball, false_value, FalseValue)                                          \
62
  V(Oddball, uninitialized_value, UninitializedValue)                          \
63
  V(Map, cell_map, CellMap)                                                    \
64
  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
65
  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
66
  V(Map, meta_map, MetaMap)                                                    \
67
  V(Map, heap_number_map, HeapNumberMap)                                       \
68
  V(Map, native_context_map, NativeContextMap)                                 \
69
  V(Map, fixed_array_map, FixedArrayMap)                                       \
70
  V(Map, code_map, CodeMap)                                                    \
71
  V(Map, scope_info_map, ScopeInfoMap)                                         \
72
  V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
73
  V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
74
  V(Map, constant_pool_array_map, ConstantPoolArrayMap)                        \
75
  V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel)       \
76
  V(Map, hash_table_map, HashTableMap)                                         \
77
  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
78
  V(ByteArray, empty_byte_array, EmptyByteArray)                               \
79
  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
80
  V(Smi, stack_limit, StackLimit)                                              \
81
  V(Oddball, arguments_marker, ArgumentsMarker)                                \
82
  /* The roots above this line should be boring from a GC point of view.    */ \
83
  /* This means they are never in new space and never on a page that is     */ \
84
  /* being compacted.                                                       */ \
85
  V(FixedArray, number_string_cache, NumberStringCache)                        \
86
  V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
87
  V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
88
  V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
89
  V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
90
  V(FixedArray, string_split_cache, StringSplitCache)                          \
91
  V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
92
  V(Object, termination_exception, TerminationException)                       \
93
  V(Smi, hash_seed, HashSeed)                                                  \
94
  V(Map, symbol_map, SymbolMap)                                                \
95
  V(Map, string_map, StringMap)                                                \
96
  V(Map, ascii_string_map, AsciiStringMap)                                     \
97
  V(Map, cons_string_map, ConsStringMap)                                       \
98
  V(Map, cons_ascii_string_map, ConsAsciiStringMap)                            \
99
  V(Map, sliced_string_map, SlicedStringMap)                                   \
100
  V(Map, sliced_ascii_string_map, SlicedAsciiStringMap)                        \
101
  V(Map, external_string_map, ExternalStringMap)                               \
102
  V(Map,                                                                       \
103
    external_string_with_one_byte_data_map,                                    \
104
    ExternalStringWithOneByteDataMap)                                          \
105
  V(Map, external_ascii_string_map, ExternalAsciiStringMap)                    \
106
  V(Map, short_external_string_map, ShortExternalStringMap)                    \
107
  V(Map,                                                                       \
108
    short_external_string_with_one_byte_data_map,                              \
109
    ShortExternalStringWithOneByteDataMap)                                     \
110
  V(Map, internalized_string_map, InternalizedStringMap)                       \
111
  V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap)            \
112
  V(Map, cons_internalized_string_map, ConsInternalizedStringMap)              \
113
  V(Map, cons_ascii_internalized_string_map, ConsAsciiInternalizedStringMap)   \
114
  V(Map,                                                                       \
115
    external_internalized_string_map,                                          \
116
    ExternalInternalizedStringMap)                                             \
117
  V(Map,                                                                       \
118
    external_internalized_string_with_one_byte_data_map,                       \
119
    ExternalInternalizedStringWithOneByteDataMap)                              \
120
  V(Map,                                                                       \
121
    external_ascii_internalized_string_map,                                    \
122
    ExternalAsciiInternalizedStringMap)                                        \
123
  V(Map,                                                                       \
124
    short_external_internalized_string_map,                                    \
125
    ShortExternalInternalizedStringMap)                                        \
126
  V(Map,                                                                       \
127
    short_external_internalized_string_with_one_byte_data_map,                 \
128
    ShortExternalInternalizedStringWithOneByteDataMap)                         \
129
  V(Map,                                                                       \
130
    short_external_ascii_internalized_string_map,                              \
131
    ShortExternalAsciiInternalizedStringMap)                                   \
132
  V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
133
  V(Map, undetectable_string_map, UndetectableStringMap)                       \
134
  V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap)            \
135
  V(Map, external_byte_array_map, ExternalByteArrayMap)                        \
136
  V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap)       \
137
  V(Map, external_short_array_map, ExternalShortArrayMap)                      \
138
  V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap)     \
139
  V(Map, external_int_array_map, ExternalIntArrayMap)                          \
140
  V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap)         \
141
  V(Map, external_float_array_map, ExternalFloatArrayMap)                      \
142
  V(Map, external_double_array_map, ExternalDoubleArrayMap)                    \
143
  V(Map, external_pixel_array_map, ExternalPixelArrayMap)                      \
144
  V(ExternalArray, empty_external_byte_array,                                  \
145
      EmptyExternalByteArray)                                                  \
146
  V(ExternalArray, empty_external_unsigned_byte_array,                         \
147
      EmptyExternalUnsignedByteArray)                                          \
148
  V(ExternalArray, empty_external_short_array, EmptyExternalShortArray)        \
149
  V(ExternalArray, empty_external_unsigned_short_array,                        \
150
      EmptyExternalUnsignedShortArray)                                         \
151
  V(ExternalArray, empty_external_int_array, EmptyExternalIntArray)            \
152
  V(ExternalArray, empty_external_unsigned_int_array,                          \
153
      EmptyExternalUnsignedIntArray)                                           \
154
  V(ExternalArray, empty_external_float_array, EmptyExternalFloatArray)        \
155
  V(ExternalArray, empty_external_double_array, EmptyExternalDoubleArray)      \
156
  V(ExternalArray, empty_external_pixel_array,                                 \
157
      EmptyExternalPixelArray)                                                 \
158
  V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap)     \
159
  V(Map, function_context_map, FunctionContextMap)                             \
160
  V(Map, catch_context_map, CatchContextMap)                                   \
161
  V(Map, with_context_map, WithContextMap)                                     \
162
  V(Map, block_context_map, BlockContextMap)                                   \
163
  V(Map, module_context_map, ModuleContextMap)                                 \
164
  V(Map, global_context_map, GlobalContextMap)                                 \
165
  V(Map, oddball_map, OddballMap)                                              \
166
  V(Map, message_object_map, JSMessageObjectMap)                               \
167
  V(Map, foreign_map, ForeignMap)                                              \
168
  V(HeapNumber, nan_value, NanValue)                                           \
169
  V(HeapNumber, infinity_value, InfinityValue)                                 \
170
  V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
171
  V(Map, neander_map, NeanderMap)                                              \
172
  V(JSObject, message_listeners, MessageListeners)                             \
173
  V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
174
  V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
175
  V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
176
  V(Code, js_entry_code, JsEntryCode)                                          \
177
  V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
178
  V(FixedArray, natives_source_cache, NativesSourceCache)                      \
179
  V(Smi, last_script_id, LastScriptId)                                         \
180
  V(Script, empty_script, EmptyScript)                                         \
181
  V(Smi, real_stack_limit, RealStackLimit)                                     \
182
  V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
183
  V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
184
  V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
185
  V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
186
  V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
187
  V(JSObject, observation_state, ObservationState)                             \
188
  V(Map, external_map, ExternalMap)                                            \
189
  V(Symbol, frozen_symbol, FrozenSymbol)                                       \
190
  V(Symbol, elements_transition_symbol, ElementsTransitionSymbol)              \
191
  V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
192
      EmptySlowElementDictionary)                                              \
193
  V(Symbol, observed_symbol, ObservedSymbol)
194

    
195
#define ROOT_LIST(V)                                  \
196
  STRONG_ROOT_LIST(V)                                 \
197
  V(StringTable, string_table, StringTable)
198

    
199
#define INTERNALIZED_STRING_LIST(V)                                      \
200
  V(Array_string, "Array")                                               \
201
  V(Object_string, "Object")                                             \
202
  V(proto_string, "__proto__")                                           \
203
  V(arguments_string, "arguments")                                       \
204
  V(Arguments_string, "Arguments")                                       \
205
  V(call_string, "call")                                                 \
206
  V(apply_string, "apply")                                               \
207
  V(caller_string, "caller")                                             \
208
  V(boolean_string, "boolean")                                           \
209
  V(Boolean_string, "Boolean")                                           \
210
  V(callee_string, "callee")                                             \
211
  V(constructor_string, "constructor")                                   \
212
  V(result_string, ".result")                                            \
213
  V(dot_for_string, ".for.")                                             \
214
  V(eval_string, "eval")                                                 \
215
  V(empty_string, "")                                                    \
216
  V(function_string, "function")                                         \
217
  V(length_string, "length")                                             \
218
  V(module_string, "module")                                             \
219
  V(name_string, "name")                                                 \
220
  V(native_string, "native")                                             \
221
  V(null_string, "null")                                                 \
222
  V(number_string, "number")                                             \
223
  V(Number_string, "Number")                                             \
224
  V(nan_string, "NaN")                                                   \
225
  V(RegExp_string, "RegExp")                                             \
226
  V(source_string, "source")                                             \
227
  V(global_string, "global")                                             \
228
  V(ignore_case_string, "ignoreCase")                                    \
229
  V(multiline_string, "multiline")                                       \
230
  V(input_string, "input")                                               \
231
  V(index_string, "index")                                               \
232
  V(last_index_string, "lastIndex")                                      \
233
  V(object_string, "object")                                             \
234
  V(literals_string, "literals")                                         \
235
  V(prototype_string, "prototype")                                       \
236
  V(string_string, "string")                                             \
237
  V(String_string, "String")                                             \
238
  V(symbol_string, "symbol")                                             \
239
  V(Symbol_string, "Symbol")                                             \
240
  V(Date_string, "Date")                                                 \
241
  V(this_string, "this")                                                 \
242
  V(to_string_string, "toString")                                        \
243
  V(char_at_string, "CharAt")                                            \
244
  V(undefined_string, "undefined")                                       \
245
  V(value_of_string, "valueOf")                                          \
246
  V(stack_string, "stack")                                               \
247
  V(toJSON_string, "toJSON")                                             \
248
  V(InitializeVarGlobal_string, "InitializeVarGlobal")                   \
249
  V(InitializeConstGlobal_string, "InitializeConstGlobal")               \
250
  V(KeyedLoadElementMonomorphic_string,                                  \
251
    "KeyedLoadElementMonomorphic")                                       \
252
  V(KeyedStoreElementMonomorphic_string,                                 \
253
    "KeyedStoreElementMonomorphic")                                      \
254
  V(stack_overflow_string, "kStackOverflowBoilerplate")                  \
255
  V(illegal_access_string, "illegal access")                             \
256
  V(illegal_execution_state_string, "illegal execution state")           \
257
  V(get_string, "get")                                                   \
258
  V(set_string, "set")                                                   \
259
  V(map_field_string, "%map")                                            \
260
  V(elements_field_string, "%elements")                                  \
261
  V(length_field_string, "%length")                                      \
262
  V(cell_value_string, "%cell_value")                                    \
263
  V(function_class_string, "Function")                                   \
264
  V(illegal_argument_string, "illegal argument")                         \
265
  V(MakeReferenceError_string, "MakeReferenceError")                     \
266
  V(MakeSyntaxError_string, "MakeSyntaxError")                           \
267
  V(MakeTypeError_string, "MakeTypeError")                               \
268
  V(invalid_lhs_in_assignment_string, "invalid_lhs_in_assignment")       \
269
  V(invalid_lhs_in_for_in_string, "invalid_lhs_in_for_in")               \
270
  V(invalid_lhs_in_postfix_op_string, "invalid_lhs_in_postfix_op")       \
271
  V(invalid_lhs_in_prefix_op_string, "invalid_lhs_in_prefix_op")         \
272
  V(illegal_return_string, "illegal_return")                             \
273
  V(illegal_break_string, "illegal_break")                               \
274
  V(illegal_continue_string, "illegal_continue")                         \
275
  V(unknown_label_string, "unknown_label")                               \
276
  V(redeclaration_string, "redeclaration")                               \
277
  V(space_string, " ")                                                   \
278
  V(exec_string, "exec")                                                 \
279
  V(zero_string, "0")                                                    \
280
  V(global_eval_string, "GlobalEval")                                    \
281
  V(identity_hash_string, "v8::IdentityHash")                            \
282
  V(closure_string, "(closure)")                                         \
283
  V(use_strict_string, "use strict")                                     \
284
  V(dot_string, ".")                                                     \
285
  V(anonymous_function_string, "(anonymous function)")                   \
286
  V(compare_ic_string, "==")                                             \
287
  V(strict_compare_ic_string, "===")                                     \
288
  V(infinity_string, "Infinity")                                         \
289
  V(minus_infinity_string, "-Infinity")                                  \
290
  V(hidden_stack_trace_string, "v8::hidden_stack_trace")                 \
291
  V(query_colon_string, "(?:)")                                          \
292
  V(Generator_string, "Generator")                                       \
293
  V(throw_string, "throw")                                               \
294
  V(done_string, "done")                                                 \
295
  V(value_string, "value")                                               \
296
  V(next_string, "next")                                                 \
297
  V(byte_length_string, "byteLength")                                    \
298
  V(byte_offset_string, "byteOffset")                                    \
299
  V(buffer_string, "buffer")
300

    
301
// Forward declarations.
302
class GCTracer;
303
class HeapStats;
304
class Isolate;
305
class WeakObjectRetainer;
306

    
307

    
308
typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
309
                                                      Object** pointer);
310

    
311
class StoreBufferRebuilder {
312
 public:
313
  explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
314
      : store_buffer_(store_buffer) {
315
  }
316

    
317
  void Callback(MemoryChunk* page, StoreBufferEvent event);
318

    
319
 private:
320
  StoreBuffer* store_buffer_;
321

    
322
  // We record in this variable how full the store buffer was when we started
323
  // iterating over the current page, finding pointers to new space.  If the
324
  // store buffer overflows again we can exempt the page from the store buffer
325
  // by rewinding to this point instead of having to search the store buffer.
326
  Object*** start_of_current_page_;
327
  // The current page we are scanning in the store buffer iterator.
328
  MemoryChunk* current_page_;
329
};
330

    
331

    
332

    
333
// A queue of objects promoted during scavenge. Each object is accompanied
334
// by it's size to avoid dereferencing a map pointer for scanning.
335
class PromotionQueue {
336
 public:
337
  explicit PromotionQueue(Heap* heap)
338
      : front_(NULL),
339
        rear_(NULL),
340
        limit_(NULL),
341
        emergency_stack_(0),
342
        heap_(heap) { }
343

    
344
  void Initialize();
345

    
346
  void Destroy() {
347
    ASSERT(is_empty());
348
    delete emergency_stack_;
349
    emergency_stack_ = NULL;
350
  }
351

    
352
  inline void ActivateGuardIfOnTheSamePage();
353

    
354
  Page* GetHeadPage() {
355
    return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
356
  }
357

    
358
  void SetNewLimit(Address limit) {
359
    if (!guard_) {
360
      return;
361
    }
362

    
363
    ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
364
    limit_ = reinterpret_cast<intptr_t*>(limit);
365

    
366
    if (limit_ <= rear_) {
367
      return;
368
    }
369

    
370
    RelocateQueueHead();
371
  }
372

    
373
  bool is_empty() {
374
    return (front_ == rear_) &&
375
        (emergency_stack_ == NULL || emergency_stack_->length() == 0);
376
  }
377

    
378
  inline void insert(HeapObject* target, int size);
379

    
380
  void remove(HeapObject** target, int* size) {
381
    ASSERT(!is_empty());
382
    if (front_ == rear_) {
383
      Entry e = emergency_stack_->RemoveLast();
384
      *target = e.obj_;
385
      *size = e.size_;
386
      return;
387
    }
388

    
389
    if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
390
      NewSpacePage* front_page =
391
          NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
392
      ASSERT(!front_page->prev_page()->is_anchor());
393
      front_ =
394
          reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
395
    }
396
    *target = reinterpret_cast<HeapObject*>(*(--front_));
397
    *size = static_cast<int>(*(--front_));
398
    // Assert no underflow.
399
    SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
400
                                reinterpret_cast<Address>(front_));
401
  }
402

    
403
 private:
404
  // The front of the queue is higher in the memory page chain than the rear.
405
  intptr_t* front_;
406
  intptr_t* rear_;
407
  intptr_t* limit_;
408

    
409
  bool guard_;
410

    
411
  static const int kEntrySizeInWords = 2;
412

    
413
  struct Entry {
414
    Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
415

    
416
    HeapObject* obj_;
417
    int size_;
418
  };
419
  List<Entry>* emergency_stack_;
420

    
421
  Heap* heap_;
422

    
423
  void RelocateQueueHead();
424

    
425
  DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
426
};
427

    
428

    
429
typedef void (*ScavengingCallback)(Map* map,
430
                                   HeapObject** slot,
431
                                   HeapObject* object);
432

    
433

    
434
// External strings table is a place where all external strings are
435
// registered.  We need to keep track of such strings to properly
436
// finalize them.
437
class ExternalStringTable {
438
 public:
439
  // Registers an external string.
440
  inline void AddString(String* string);
441

    
442
  inline void Iterate(ObjectVisitor* v);
443

    
444
  // Restores internal invariant and gets rid of collected strings.
445
  // Must be called after each Iterate() that modified the strings.
446
  void CleanUp();
447

    
448
  // Destroys all allocated memory.
449
  void TearDown();
450

    
451
 private:
452
  ExternalStringTable() { }
453

    
454
  friend class Heap;
455

    
456
  inline void Verify();
457

    
458
  inline void AddOldString(String* string);
459

    
460
  // Notifies the table that only a prefix of the new list is valid.
461
  inline void ShrinkNewStrings(int position);
462

    
463
  // To speed up scavenge collections new space string are kept
464
  // separate from old space strings.
465
  List<Object*> new_space_strings_;
466
  List<Object*> old_space_strings_;
467

    
468
  Heap* heap_;
469

    
470
  DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
471
};
472

    
473

    
474
enum ArrayStorageAllocationMode {
475
  DONT_INITIALIZE_ARRAY_ELEMENTS,
476
  INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
477
};
478

    
479

    
480
class Heap {
481
 public:
482
  // Configure heap size before setup. Return false if the heap has been
483
  // set up already.
484
  bool ConfigureHeap(int max_semispace_size,
485
                     intptr_t max_old_gen_size,
486
                     intptr_t max_executable_size);
487
  bool ConfigureHeapDefault();
488

    
489
  // Prepares the heap, setting up memory areas that are needed in the isolate
490
  // without actually creating any objects.
491
  bool SetUp();
492

    
493
  // Bootstraps the object heap with the core set of objects required to run.
494
  // Returns whether it succeeded.
495
  bool CreateHeapObjects();
496

    
497
  // Destroys all memory allocated by the heap.
498
  void TearDown();
499

    
500
  // Set the stack limit in the roots_ array.  Some architectures generate
501
  // code that looks here, because it is faster than loading from the static
502
  // jslimit_/real_jslimit_ variable in the StackGuard.
503
  void SetStackLimits();
504

    
505
  // Returns whether SetUp has been called.
506
  bool HasBeenSetUp();
507

    
508
  // Returns the maximum amount of memory reserved for the heap.  For
509
  // the young generation, we reserve 4 times the amount needed for a
510
  // semi space.  The young generation consists of two semi spaces and
511
  // we reserve twice the amount needed for those in order to ensure
512
  // that new space can be aligned to its size.
513
  intptr_t MaxReserved() {
514
    return 4 * reserved_semispace_size_ + max_old_generation_size_;
515
  }
516
  int MaxSemiSpaceSize() { return max_semispace_size_; }
517
  int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
518
  int InitialSemiSpaceSize() { return initial_semispace_size_; }
519
  intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
520
  intptr_t MaxExecutableSize() { return max_executable_size_; }
521
  int MaxRegularSpaceAllocationSize() { return InitialSemiSpaceSize() * 4/5; }
522

    
523
  // Returns the capacity of the heap in bytes w/o growing. Heap grows when
524
  // more spaces are needed until it reaches the limit.
525
  intptr_t Capacity();
526

    
527
  // Returns the amount of memory currently committed for the heap.
528
  intptr_t CommittedMemory();
529

    
530
  // Returns the amount of executable memory currently committed for the heap.
531
  intptr_t CommittedMemoryExecutable();
532

    
533
  // Returns the amount of phyical memory currently committed for the heap.
534
  size_t CommittedPhysicalMemory();
535

    
536
  // Returns the available bytes in space w/o growing.
537
  // Heap doesn't guarantee that it can allocate an object that requires
538
  // all available bytes. Check MaxHeapObjectSize() instead.
539
  intptr_t Available();
540

    
541
  // Returns of size of all objects residing in the heap.
542
  intptr_t SizeOfObjects();
543

    
544
  // Return the starting address and a mask for the new space.  And-masking an
545
  // address with the mask will result in the start address of the new space
546
  // for all addresses in either semispace.
547
  Address NewSpaceStart() { return new_space_.start(); }
548
  uintptr_t NewSpaceMask() { return new_space_.mask(); }
549
  Address NewSpaceTop() { return new_space_.top(); }
550

    
551
  NewSpace* new_space() { return &new_space_; }
552
  OldSpace* old_pointer_space() { return old_pointer_space_; }
553
  OldSpace* old_data_space() { return old_data_space_; }
554
  OldSpace* code_space() { return code_space_; }
555
  MapSpace* map_space() { return map_space_; }
556
  CellSpace* cell_space() { return cell_space_; }
557
  PropertyCellSpace* property_cell_space() {
558
    return property_cell_space_;
559
  }
560
  LargeObjectSpace* lo_space() { return lo_space_; }
561
  PagedSpace* paged_space(int idx) {
562
    switch (idx) {
563
      case OLD_POINTER_SPACE:
564
        return old_pointer_space();
565
      case OLD_DATA_SPACE:
566
        return old_data_space();
567
      case MAP_SPACE:
568
        return map_space();
569
      case CELL_SPACE:
570
        return cell_space();
571
      case PROPERTY_CELL_SPACE:
572
        return property_cell_space();
573
      case CODE_SPACE:
574
        return code_space();
575
      case NEW_SPACE:
576
      case LO_SPACE:
577
        UNREACHABLE();
578
    }
579
    return NULL;
580
  }
581

    
582
  bool always_allocate() { return always_allocate_scope_depth_ != 0; }
583
  Address always_allocate_scope_depth_address() {
584
    return reinterpret_cast<Address>(&always_allocate_scope_depth_);
585
  }
586
  bool linear_allocation() {
587
    return linear_allocation_scope_depth_ != 0;
588
  }
589

    
590
  Address* NewSpaceAllocationTopAddress() {
591
    return new_space_.allocation_top_address();
592
  }
593
  Address* NewSpaceAllocationLimitAddress() {
594
    return new_space_.allocation_limit_address();
595
  }
596

    
597
  Address* OldPointerSpaceAllocationTopAddress() {
598
    return old_pointer_space_->allocation_top_address();
599
  }
600
  Address* OldPointerSpaceAllocationLimitAddress() {
601
    return old_pointer_space_->allocation_limit_address();
602
  }
603

    
604
  Address* OldDataSpaceAllocationTopAddress() {
605
    return old_data_space_->allocation_top_address();
606
  }
607
  Address* OldDataSpaceAllocationLimitAddress() {
608
    return old_data_space_->allocation_limit_address();
609
  }
610

    
611
  // Uncommit unused semi space.
612
  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
613

    
614
  // Allocates and initializes a new JavaScript object based on a
615
  // constructor.
616
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
617
  // failed.
618
  // Please note this does not perform a garbage collection.
619
  MUST_USE_RESULT MaybeObject* AllocateJSObject(
620
      JSFunction* constructor,
621
      PretenureFlag pretenure = NOT_TENURED);
622

    
623
  MUST_USE_RESULT MaybeObject* AllocateJSObjectWithAllocationSite(
624
      JSFunction* constructor,
625
      Handle<AllocationSite> allocation_site);
626

    
627
  MUST_USE_RESULT MaybeObject* AllocateJSGeneratorObject(
628
      JSFunction* function);
629

    
630
  MUST_USE_RESULT MaybeObject* AllocateJSModule(Context* context,
631
                                                ScopeInfo* scope_info);
632

    
633
  // Allocate a JSArray with no elements
634
  MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
635
      ElementsKind elements_kind,
636
      PretenureFlag pretenure = NOT_TENURED) {
637
    return AllocateJSArrayAndStorage(elements_kind, 0, 0,
638
                                     DONT_INITIALIZE_ARRAY_ELEMENTS,
639
                                     pretenure);
640
  }
641

    
642
  // Allocate a JSArray with a specified length but elements that are left
643
  // uninitialized.
644
  MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
645
      ElementsKind elements_kind,
646
      int length,
647
      int capacity,
648
      ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
649
      PretenureFlag pretenure = NOT_TENURED);
650

    
651
  MUST_USE_RESULT MaybeObject* AllocateJSArrayStorage(
652
      JSArray* array,
653
      int length,
654
      int capacity,
655
      ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS);
656

    
657
  // Allocate a JSArray with no elements
658
  MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
659
      FixedArrayBase* array_base,
660
      ElementsKind elements_kind,
661
      int length,
662
      PretenureFlag pretenure = NOT_TENURED);
663

    
664
  // Returns a deep copy of the JavaScript object.
665
  // Properties and elements are copied too.
666
  // Returns failure if allocation failed.
667
  // Optionally takes an AllocationSite to be appended in an AllocationMemento.
668
  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
669
                                            AllocationSite* site = NULL);
670

    
671
  // Allocates the function prototype.
672
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
673
  // failed.
674
  // Please note this does not perform a garbage collection.
675
  MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
676

    
677
  // Allocates a JS ArrayBuffer object.
678
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
679
  // failed.
680
  // Please note this does not perform a garbage collection.
681
  MUST_USE_RESULT MaybeObject* AllocateJSArrayBuffer();
682

    
683
  // Allocates a Harmony proxy or function proxy.
684
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
685
  // failed.
686
  // Please note this does not perform a garbage collection.
687
  MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
688
                                               Object* prototype);
689

    
690
  MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
691
                                                       Object* call_trap,
692
                                                       Object* construct_trap,
693
                                                       Object* prototype);
694

    
695
  // Reinitialize a JSReceiver into an (empty) JS object of respective type and
696
  // size, but keeping the original prototype.  The receiver must have at least
697
  // the size of the new object.  The object is reinitialized and behaves as an
698
  // object that has been freshly allocated.
699
  // Returns failure if an error occured, otherwise object.
700
  MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
701
                                                      InstanceType type,
702
                                                      int size);
703

    
704
  // Reinitialize an JSGlobalProxy based on a constructor.  The object
705
  // must have the same size as objects allocated using the
706
  // constructor.  The object is reinitialized and behaves as an
707
  // object that has been freshly allocated using the constructor.
708
  MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
709
      JSFunction* constructor, JSGlobalProxy* global);
710

    
711
  // Allocates and initializes a new JavaScript object based on a map.
712
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
713
  // failed.
714
  // Please note this does not perform a garbage collection.
715
  MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
716
      Map* map, PretenureFlag pretenure = NOT_TENURED, bool alloc_props = true);
717

    
718
  MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMapWithAllocationSite(
719
      Map* map, Handle<AllocationSite> allocation_site);
720

    
721
  // Allocates a heap object based on the map.
722
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
723
  // failed.
724
  // Please note this function does not perform a garbage collection.
725
  MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
726

    
727
  MUST_USE_RESULT MaybeObject* AllocateWithAllocationSite(Map* map,
728
      AllocationSpace space, Handle<AllocationSite> allocation_site);
729

    
730
  // Allocates a JS Map in the heap.
731
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
732
  // failed.
733
  // Please note this function does not perform a garbage collection.
734
  MUST_USE_RESULT MaybeObject* AllocateMap(
735
      InstanceType instance_type,
736
      int instance_size,
737
      ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
738

    
739
  // Allocates a partial map for bootstrapping.
740
  MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
741
                                                  int instance_size);
742

    
743
  // Allocate a map for the specified function
744
  MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
745

    
746
  // Allocates an empty code cache.
747
  MUST_USE_RESULT MaybeObject* AllocateCodeCache();
748

    
749
  // Allocates a serialized scope info.
750
  MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
751

    
752
  // Allocates an External object for v8's external API.
753
  MUST_USE_RESULT MaybeObject* AllocateExternal(void* value);
754

    
755
  // Allocates an empty PolymorphicCodeCache.
756
  MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
757

    
758
  // Allocates a pre-tenured empty AccessorPair.
759
  MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
760

    
761
  // Allocates an empty TypeFeedbackInfo.
762
  MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
763

    
764
  // Allocates an AliasedArgumentsEntry.
765
  MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
766

    
767
  // Clear the Instanceof cache (used when a prototype changes).
768
  inline void ClearInstanceofCache();
769

    
770
  // For use during bootup.
771
  void RepairFreeListsAfterBoot();
772

    
773
  // Allocates and fully initializes a String.  There are two String
774
  // encodings: ASCII and two byte. One should choose between the three string
775
  // allocation functions based on the encoding of the string buffer used to
776
  // initialized the string.
777
  //   - ...FromAscii initializes the string from a buffer that is ASCII
778
  //     encoded (it does not check that the buffer is ASCII encoded) and the
779
  //     result will be ASCII encoded.
780
  //   - ...FromUTF8 initializes the string from a buffer that is UTF-8
781
  //     encoded.  If the characters are all single-byte characters, the
782
  //     result will be ASCII encoded, otherwise it will converted to two
783
  //     byte.
784
  //   - ...FromTwoByte initializes the string from a buffer that is two-byte
785
  //     encoded.  If the characters are all single-byte characters, the
786
  //     result will be converted to ASCII, otherwise it will be left as
787
  //     two-byte.
788
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
789
  // failed.
790
  // Please note this does not perform a garbage collection.
791
  MUST_USE_RESULT MaybeObject* AllocateStringFromOneByte(
792
      Vector<const uint8_t> str,
793
      PretenureFlag pretenure = NOT_TENURED);
794
  // TODO(dcarney): remove this function.
795
  MUST_USE_RESULT inline MaybeObject* AllocateStringFromOneByte(
796
      Vector<const char> str,
797
      PretenureFlag pretenure = NOT_TENURED) {
798
    return AllocateStringFromOneByte(Vector<const uint8_t>::cast(str),
799
                                     pretenure);
800
  }
801
  MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
802
      Vector<const char> str,
803
      PretenureFlag pretenure = NOT_TENURED);
804
  MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
805
      Vector<const char> str,
806
      int non_ascii_start,
807
      PretenureFlag pretenure = NOT_TENURED);
808
  MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
809
      Vector<const uc16> str,
810
      PretenureFlag pretenure = NOT_TENURED);
811

    
812
  // Allocates an internalized string in old space based on the character
813
  // stream. Returns Failure::RetryAfterGC(requested_bytes, space) if the
814
  // allocation failed.
815
  // Please note this function does not perform a garbage collection.
816
  MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringFromUtf8(
817
      Vector<const char> str,
818
      int chars,
819
      uint32_t hash_field);
820

    
821
  MUST_USE_RESULT inline MaybeObject* AllocateOneByteInternalizedString(
822
        Vector<const uint8_t> str,
823
        uint32_t hash_field);
824

    
825
  MUST_USE_RESULT inline MaybeObject* AllocateTwoByteInternalizedString(
826
        Vector<const uc16> str,
827
        uint32_t hash_field);
828

    
829
  template<typename T>
830
  static inline bool IsOneByte(T t, int chars);
831

    
832
  template<typename T>
833
  MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringImpl(
834
      T t, int chars, uint32_t hash_field);
835

    
836
  template<bool is_one_byte, typename T>
837
  MUST_USE_RESULT MaybeObject* AllocateInternalizedStringImpl(
838
      T t, int chars, uint32_t hash_field);
839

    
840
  // Allocates and partially initializes a String.  There are two String
841
  // encodings: ASCII and two byte.  These functions allocate a string of the
842
  // given length and set its map and length fields.  The characters of the
843
  // string are uninitialized.
844
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
845
  // failed.
846
  // Please note this does not perform a garbage collection.
847
  MUST_USE_RESULT MaybeObject* AllocateRawOneByteString(
848
      int length,
849
      PretenureFlag pretenure = NOT_TENURED);
850
  MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
851
      int length,
852
      PretenureFlag pretenure = NOT_TENURED);
853

    
854
  // Computes a single character string where the character has code.
855
  // A cache is used for ASCII codes.
856
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
857
  // failed. Please note this does not perform a garbage collection.
858
  MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
859
      uint16_t code);
860

    
861
  // Allocate a byte array of the specified length
862
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
863
  // failed.
864
  // Please note this does not perform a garbage collection.
865
  MUST_USE_RESULT MaybeObject* AllocateByteArray(
866
      int length,
867
      PretenureFlag pretenure = NOT_TENURED);
868

    
869
  // Allocates an external array of the specified length and type.
870
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
871
  // failed.
872
  // Please note this does not perform a garbage collection.
873
  MUST_USE_RESULT MaybeObject* AllocateExternalArray(
874
      int length,
875
      ExternalArrayType array_type,
876
      void* external_pointer,
877
      PretenureFlag pretenure);
878

    
879
  // Allocate a symbol in old space.
880
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
881
  // failed.
882
  // Please note this does not perform a garbage collection.
883
  MUST_USE_RESULT MaybeObject* AllocateSymbol();
884

    
885
  // Allocate a tenured AllocationSite. It's payload is null
886
  MUST_USE_RESULT MaybeObject* AllocateAllocationSite();
887

    
888
  // Allocates a fixed array initialized with undefined values
889
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
890
  // failed.
891
  // Please note this does not perform a garbage collection.
892
  MUST_USE_RESULT MaybeObject* AllocateFixedArray(
893
      int length,
894
      PretenureFlag pretenure = NOT_TENURED);
895

    
896
  // Allocates an uninitialized fixed array. It must be filled by the caller.
897
  //
898
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
899
  // failed.
900
  // Please note this does not perform a garbage collection.
901
  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
902

    
903
  // Move len elements within a given array from src_index index to dst_index
904
  // index.
905
  void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
906

    
907
  // Make a copy of src and return it. Returns
908
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
909
  MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
910

    
911
  // Make a copy of src, set the map, and return the copy. Returns
912
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
913
  MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
914

    
915
  // Make a copy of src and return it. Returns
916
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
917
  MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
918
      FixedDoubleArray* src);
919

    
920
  // Make a copy of src, set the map, and return the copy. Returns
921
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
922
  MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
923
      FixedDoubleArray* src, Map* map);
924

    
925
  // Make a copy of src and return it. Returns
926
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
927
  MUST_USE_RESULT inline MaybeObject* CopyConstantPoolArray(
928
      ConstantPoolArray* src);
929

    
930
  // Make a copy of src, set the map, and return the copy. Returns
931
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
932
  MUST_USE_RESULT MaybeObject* CopyConstantPoolArrayWithMap(
933
      ConstantPoolArray* src, Map* map);
934

    
935
  // Allocates a fixed array initialized with the hole values.
936
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
937
  // failed.
938
  // Please note this does not perform a garbage collection.
939
  MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
940
      int length,
941
      PretenureFlag pretenure = NOT_TENURED);
942

    
943
  MUST_USE_RESULT MaybeObject* AllocateConstantPoolArray(
944
      int first_int64_index,
945
      int first_ptr_index,
946
      int first_int32_index);
947

    
948
  // Allocates a fixed double array with uninitialized values. Returns
949
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
950
  // Please note this does not perform a garbage collection.
951
  MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray(
952
      int length,
953
      PretenureFlag pretenure = NOT_TENURED);
954

    
955
  // Allocates a fixed double array with hole values. Returns
956
  // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
957
  // Please note this does not perform a garbage collection.
958
  MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles(
959
      int length,
960
      PretenureFlag pretenure = NOT_TENURED);
961

    
962
  // AllocateHashTable is identical to AllocateFixedArray except
963
  // that the resulting object has hash_table_map as map.
964
  MUST_USE_RESULT MaybeObject* AllocateHashTable(
965
      int length, PretenureFlag pretenure = NOT_TENURED);
966

    
967
  // Allocate a native (but otherwise uninitialized) context.
968
  MUST_USE_RESULT MaybeObject* AllocateNativeContext();
969

    
970
  // Allocate a global context.
971
  MUST_USE_RESULT MaybeObject* AllocateGlobalContext(JSFunction* function,
972
                                                     ScopeInfo* scope_info);
973

    
974
  // Allocate a module context.
975
  MUST_USE_RESULT MaybeObject* AllocateModuleContext(ScopeInfo* scope_info);
976

    
977
  // Allocate a function context.
978
  MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
979
                                                       JSFunction* function);
980

    
981
  // Allocate a catch context.
982
  MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
983
                                                    Context* previous,
984
                                                    String* name,
985
                                                    Object* thrown_object);
986
  // Allocate a 'with' context.
987
  MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
988
                                                   Context* previous,
989
                                                   JSReceiver* extension);
990

    
991
  // Allocate a block context.
992
  MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
993
                                                    Context* previous,
994
                                                    ScopeInfo* info);
995

    
996
  // Allocates a new utility object in the old generation.
997
  MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
998

    
999
  // Allocates a function initialized with a shared part.
1000
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1001
  // failed.
1002
  // Please note this does not perform a garbage collection.
1003
  MUST_USE_RESULT MaybeObject* AllocateFunction(
1004
      Map* function_map,
1005
      SharedFunctionInfo* shared,
1006
      Object* prototype,
1007
      PretenureFlag pretenure = TENURED);
1008

    
1009
  // Arguments object size.
1010
  static const int kArgumentsObjectSize =
1011
      JSObject::kHeaderSize + 2 * kPointerSize;
1012
  // Strict mode arguments has no callee so it is smaller.
1013
  static const int kArgumentsObjectSizeStrict =
1014
      JSObject::kHeaderSize + 1 * kPointerSize;
1015
  // Indicies for direct access into argument objects.
1016
  static const int kArgumentsLengthIndex = 0;
1017
  // callee is only valid in non-strict mode.
1018
  static const int kArgumentsCalleeIndex = 1;
1019

    
1020
  // Allocates an arguments object - optionally with an elements array.
1021
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1022
  // failed.
1023
  // Please note this does not perform a garbage collection.
1024
  MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
1025
      Object* callee, int length);
1026

    
1027
  // Same as NewNumberFromDouble, but may return a preallocated/immutable
1028
  // number object (e.g., minus_zero_value_, nan_value_)
1029
  MUST_USE_RESULT MaybeObject* NumberFromDouble(
1030
      double value, PretenureFlag pretenure = NOT_TENURED);
1031

    
1032
  // Allocated a HeapNumber from value.
1033
  MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
1034
      double value, PretenureFlag pretenure = NOT_TENURED);
1035

    
1036
  // Converts an int into either a Smi or a HeapNumber object.
1037
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1038
  // failed.
1039
  // Please note this does not perform a garbage collection.
1040
  MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
1041
      int32_t value, PretenureFlag pretenure = NOT_TENURED);
1042

    
1043
  // Converts an int into either a Smi or a HeapNumber object.
1044
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1045
  // failed.
1046
  // Please note this does not perform a garbage collection.
1047
  MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
1048
      uint32_t value, PretenureFlag pretenure = NOT_TENURED);
1049

    
1050
  // Allocates a new foreign object.
1051
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1052
  // failed.
1053
  // Please note this does not perform a garbage collection.
1054
  MUST_USE_RESULT MaybeObject* AllocateForeign(
1055
      Address address, PretenureFlag pretenure = NOT_TENURED);
1056

    
1057
  // Allocates a new SharedFunctionInfo object.
1058
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1059
  // failed.
1060
  // Please note this does not perform a garbage collection.
1061
  MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
1062

    
1063
  // Allocates a new JSMessageObject object.
1064
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1065
  // failed.
1066
  // Please note that this does not perform a garbage collection.
1067
  MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
1068
      String* type,
1069
      JSArray* arguments,
1070
      int start_position,
1071
      int end_position,
1072
      Object* script,
1073
      Object* stack_trace,
1074
      Object* stack_frames);
1075

    
1076
  // Allocates a new cons string object.
1077
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1078
  // failed.
1079
  // Please note this does not perform a garbage collection.
1080
  MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
1081
                                                  String* second);
1082

    
1083
  // Allocates a new sub string object which is a substring of an underlying
1084
  // string buffer stretching from the index start (inclusive) to the index
1085
  // end (exclusive).
1086
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1087
  // failed.
1088
  // Please note this does not perform a garbage collection.
1089
  MUST_USE_RESULT MaybeObject* AllocateSubString(
1090
      String* buffer,
1091
      int start,
1092
      int end,
1093
      PretenureFlag pretenure = NOT_TENURED);
1094

    
1095
  // Allocate a new external string object, which is backed by a string
1096
  // resource that resides outside the V8 heap.
1097
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1098
  // failed.
1099
  // Please note this does not perform a garbage collection.
1100
  MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
1101
      const ExternalAsciiString::Resource* resource);
1102
  MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
1103
      const ExternalTwoByteString::Resource* resource);
1104

    
1105
  // Finalizes an external string by deleting the associated external
1106
  // data and clearing the resource pointer.
1107
  inline void FinalizeExternalString(String* string);
1108

    
1109
  // Allocates an uninitialized object.  The memory is non-executable if the
1110
  // hardware and OS allow.
1111
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1112
  // failed.
1113
  // Please note this function does not perform a garbage collection.
1114
  MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
1115
                                                  AllocationSpace space,
1116
                                                  AllocationSpace retry_space);
1117

    
1118
  // Initialize a filler object to keep the ability to iterate over the heap
1119
  // when shortening objects.
1120
  void CreateFillerObjectAt(Address addr, int size);
1121

    
1122
  // Makes a new native code object
1123
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1124
  // failed. On success, the pointer to the Code object is stored in the
1125
  // self_reference. This allows generated code to reference its own Code
1126
  // object by containing this pointer.
1127
  // Please note this function does not perform a garbage collection.
1128
  MUST_USE_RESULT MaybeObject* CreateCode(
1129
      const CodeDesc& desc,
1130
      Code::Flags flags,
1131
      Handle<Object> self_reference,
1132
      bool immovable = false,
1133
      bool crankshafted = false,
1134
      int prologue_offset = Code::kPrologueOffsetNotSet);
1135

    
1136
  MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
1137

    
1138
  // Copy the code and scope info part of the code object, but insert
1139
  // the provided data as the relocation information.
1140
  MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
1141

    
1142
  // Finds the internalized copy for string in the string table.
1143
  // If not found, a new string is added to the table and returned.
1144
  // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
1145
  // failed.
1146
  // Please note this function does not perform a garbage collection.
1147
  MUST_USE_RESULT MaybeObject* InternalizeUtf8String(Vector<const char> str);
1148
  MUST_USE_RESULT MaybeObject* InternalizeUtf8String(const char* str) {
1149
    return InternalizeUtf8String(CStrVector(str));
1150
  }
1151
  MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
1152
      Vector<const uint8_t> str);
1153
  MUST_USE_RESULT MaybeObject* InternalizeTwoByteString(Vector<const uc16> str);
1154
  MUST_USE_RESULT MaybeObject* InternalizeString(String* str);
1155
  MUST_USE_RESULT MaybeObject* InternalizeOneByteString(
1156
      Handle<SeqOneByteString> string, int from, int length);
1157

    
1158
  bool InternalizeStringIfExists(String* str, String** result);
1159
  bool InternalizeTwoCharsStringIfExists(String* str, String** result);
1160

    
1161
  // Compute the matching internalized string map for a string if possible.
1162
  // NULL is returned if string is in new space or not flattened.
1163
  Map* InternalizedStringMapForString(String* str);
1164

    
1165
  // Tries to flatten a string before compare operation.
1166
  //
1167
  // Returns a failure in case it was decided that flattening was
1168
  // necessary and failed.  Note, if flattening is not necessary the
1169
  // string might stay non-flat even when not a failure is returned.
1170
  //
1171
  // Please note this function does not perform a garbage collection.
1172
  MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
1173

    
1174
  // Converts the given boolean condition to JavaScript boolean value.
1175
  inline Object* ToBoolean(bool condition);
1176

    
1177
  // Code that should be run before and after each GC.  Includes some
1178
  // reporting/verification activities when compiled with DEBUG set.
1179
  void GarbageCollectionPrologue();
1180
  void GarbageCollectionEpilogue();
1181

    
1182
  // Performs garbage collection operation.
1183
  // Returns whether there is a chance that another major GC could
1184
  // collect more garbage.
1185
  bool CollectGarbage(AllocationSpace space,
1186
                      GarbageCollector collector,
1187
                      const char* gc_reason,
1188
                      const char* collector_reason);
1189

    
1190
  // Performs garbage collection operation.
1191
  // Returns whether there is a chance that another major GC could
1192
  // collect more garbage.
1193
  inline bool CollectGarbage(AllocationSpace space,
1194
                             const char* gc_reason = NULL);
1195

    
1196
  static const int kNoGCFlags = 0;
1197
  static const int kSweepPreciselyMask = 1;
1198
  static const int kReduceMemoryFootprintMask = 2;
1199
  static const int kAbortIncrementalMarkingMask = 4;
1200

    
1201
  // Making the heap iterable requires us to sweep precisely and abort any
1202
  // incremental marking as well.
1203
  static const int kMakeHeapIterableMask =
1204
      kSweepPreciselyMask | kAbortIncrementalMarkingMask;
1205

    
1206
  // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
1207
  // non-zero, then the slower precise sweeper is used, which leaves the heap
1208
  // in a state where we can iterate over the heap visiting all objects.
1209
  void CollectAllGarbage(int flags, const char* gc_reason = NULL);
1210

    
1211
  // Last hope GC, should try to squeeze as much as possible.
1212
  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1213

    
1214
  // Check whether the heap is currently iterable.
1215
  bool IsHeapIterable();
1216

    
1217
  // Ensure that we have swept all spaces in such a way that we can iterate
1218
  // over all objects.  May cause a GC.
1219
  void EnsureHeapIsIterable();
1220

    
1221
  // Notify the heap that a context has been disposed.
1222
  int NotifyContextDisposed();
1223

    
1224
  // Utility to invoke the scavenger. This is needed in test code to
1225
  // ensure correct callback for weak global handles.
1226
  void PerformScavenge();
1227

    
1228
  inline void increment_scan_on_scavenge_pages() {
1229
    scan_on_scavenge_pages_++;
1230
    if (FLAG_gc_verbose) {
1231
      PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1232
    }
1233
  }
1234

    
1235
  inline void decrement_scan_on_scavenge_pages() {
1236
    scan_on_scavenge_pages_--;
1237
    if (FLAG_gc_verbose) {
1238
      PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
1239
    }
1240
  }
1241

    
1242
  PromotionQueue* promotion_queue() { return &promotion_queue_; }
1243

    
1244
#ifdef DEBUG
1245
  // Utility used with flag gc-greedy.
1246
  void GarbageCollectionGreedyCheck();
1247
#endif
1248

    
1249
  void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
1250
                             GCType gc_type_filter,
1251
                             bool pass_isolate = true);
1252
  void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
1253

    
1254
  void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
1255
                             GCType gc_type_filter,
1256
                             bool pass_isolate = true);
1257
  void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
1258

    
1259
  // Heap root getters.  We have versions with and without type::cast() here.
1260
  // You can't use type::cast during GC because the assert fails.
1261
  // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1262
  // not corrupt the map.
1263
#define ROOT_ACCESSOR(type, name, camel_name)                                  \
1264
  type* name() {                                                               \
1265
    return type::cast(roots_[k##camel_name##RootIndex]);                       \
1266
  }                                                                            \
1267
  type* raw_unchecked_##name() {                                               \
1268
    return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
1269
  }
1270
  ROOT_LIST(ROOT_ACCESSOR)
1271
#undef ROOT_ACCESSOR
1272

    
1273
// Utility type maps
1274
#define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
1275
    Map* name##_map() {                                                        \
1276
      return Map::cast(roots_[k##Name##MapRootIndex]);                         \
1277
    }
1278
  STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1279
#undef STRUCT_MAP_ACCESSOR
1280

    
1281
#define STRING_ACCESSOR(name, str) String* name() {                            \
1282
    return String::cast(roots_[k##name##RootIndex]);                           \
1283
  }
1284
  INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
1285
#undef STRING_ACCESSOR
1286

    
1287
  // The hidden_string is special because it is the empty string, but does
1288
  // not match the empty string.
1289
  String* hidden_string() { return hidden_string_; }
1290

    
1291
  void set_native_contexts_list(Object* object) {
1292
    native_contexts_list_ = object;
1293
  }
1294
  Object* native_contexts_list() { return native_contexts_list_; }
1295

    
1296
  void set_array_buffers_list(Object* object) {
1297
    array_buffers_list_ = object;
1298
  }
1299
  Object* array_buffers_list() { return array_buffers_list_; }
1300

    
1301
  void set_allocation_sites_list(Object* object) {
1302
    allocation_sites_list_ = object;
1303
  }
1304
  Object* allocation_sites_list() { return allocation_sites_list_; }
1305
  Object** allocation_sites_list_address() { return &allocation_sites_list_; }
1306

    
1307
  Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
1308

    
1309
  // Number of mark-sweeps.
1310
  unsigned int ms_count() { return ms_count_; }
1311

    
1312
  // Iterates over all roots in the heap.
1313
  void IterateRoots(ObjectVisitor* v, VisitMode mode);
1314
  // Iterates over all strong roots in the heap.
1315
  void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1316
  // Iterates over all the other roots in the heap.
1317
  void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1318

    
1319
  // Iterate pointers to from semispace of new space found in memory interval
1320
  // from start to end.
1321
  void IterateAndMarkPointersToFromSpace(Address start,
1322
                                         Address end,
1323
                                         ObjectSlotCallback callback);
1324

    
1325
  // Returns whether the object resides in new space.
1326
  inline bool InNewSpace(Object* object);
1327
  inline bool InNewSpace(Address address);
1328
  inline bool InNewSpacePage(Address address);
1329
  inline bool InFromSpace(Object* object);
1330
  inline bool InToSpace(Object* object);
1331

    
1332
  // Returns whether the object resides in old pointer space.
1333
  inline bool InOldPointerSpace(Address address);
1334
  inline bool InOldPointerSpace(Object* object);
1335

    
1336
  // Returns whether the object resides in old data space.
1337
  inline bool InOldDataSpace(Address address);
1338
  inline bool InOldDataSpace(Object* object);
1339

    
1340
  // Checks whether an address/object in the heap (including auxiliary
1341
  // area and unused area).
1342
  bool Contains(Address addr);
1343
  bool Contains(HeapObject* value);
1344

    
1345
  // Checks whether an address/object in a space.
1346
  // Currently used by tests, serialization and heap verification only.
1347
  bool InSpace(Address addr, AllocationSpace space);
1348
  bool InSpace(HeapObject* value, AllocationSpace space);
1349

    
1350
  // Finds out which space an object should get promoted to based on its type.
1351
  inline OldSpace* TargetSpace(HeapObject* object);
1352
  static inline AllocationSpace TargetSpaceId(InstanceType type);
1353

    
1354
  // Checks whether the given object is allowed to be migrated from it's
1355
  // current space into the given destination space. Used for debugging.
1356
  inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
1357

    
1358
  // Sets the stub_cache_ (only used when expanding the dictionary).
1359
  void public_set_code_stubs(UnseededNumberDictionary* value) {
1360
    roots_[kCodeStubsRootIndex] = value;
1361
  }
1362

    
1363
  // Support for computing object sizes for old objects during GCs. Returns
1364
  // a function that is guaranteed to be safe for computing object sizes in
1365
  // the current GC phase.
1366
  HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
1367
    return gc_safe_size_of_old_object_;
1368
  }
1369

    
1370
  // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1371
  void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
1372
    roots_[kNonMonomorphicCacheRootIndex] = value;
1373
  }
1374

    
1375
  void public_set_empty_script(Script* script) {
1376
    roots_[kEmptyScriptRootIndex] = script;
1377
  }
1378

    
1379
  void public_set_store_buffer_top(Address* top) {
1380
    roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1381
  }
1382

    
1383
  // Generated code can embed this address to get access to the roots.
1384
  Object** roots_array_start() { return roots_; }
1385

    
1386
  Address* store_buffer_top_address() {
1387
    return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1388
  }
1389

    
1390
  // Get address of native contexts list for serialization support.
1391
  Object** native_contexts_list_address() {
1392
    return &native_contexts_list_;
1393
  }
1394

    
1395
#ifdef VERIFY_HEAP
1396
  // Verify the heap is in its normal state before or after a GC.
1397
  void Verify();
1398

    
1399

    
1400
  bool weak_embedded_objects_verification_enabled() {
1401
    return no_weak_object_verification_scope_depth_ == 0;
1402
  }
1403
#endif
1404

    
1405
#ifdef DEBUG
1406
  void Print();
1407
  void PrintHandles();
1408

    
1409
  void OldPointerSpaceCheckStoreBuffer();
1410
  void MapSpaceCheckStoreBuffer();
1411
  void LargeObjectSpaceCheckStoreBuffer();
1412

    
1413
  // Report heap statistics.
1414
  void ReportHeapStatistics(const char* title);
1415
  void ReportCodeStatistics(const char* title);
1416
#endif
1417

    
1418
  // Zapping is needed for verify heap, and always done in debug builds.
1419
  static inline bool ShouldZapGarbage() {
1420
#ifdef DEBUG
1421
    return true;
1422
#else
1423
#ifdef VERIFY_HEAP
1424
    return FLAG_verify_heap;
1425
#else
1426
    return false;
1427
#endif
1428
#endif
1429
  }
1430

    
1431
  // Fill in bogus values in from space
1432
  void ZapFromSpace();
1433

    
1434
  // Print short heap statistics.
1435
  void PrintShortHeapStatistics();
1436

    
1437
  // Makes a new internalized string object
1438
  // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1439
  // failed.
1440
  // Please note this function does not perform a garbage collection.
1441
  MUST_USE_RESULT MaybeObject* CreateInternalizedString(
1442
      const char* str, int length, int hash);
1443
  MUST_USE_RESULT MaybeObject* CreateInternalizedString(String* str);
1444

    
1445
  // Write barrier support for address[offset] = o.
1446
  INLINE(void RecordWrite(Address address, int offset));
1447

    
1448
  // Write barrier support for address[start : start + len[ = o.
1449
  INLINE(void RecordWrites(Address address, int start, int len));
1450

    
1451
  enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
1452
  inline HeapState gc_state() { return gc_state_; }
1453

    
1454
  inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1455

    
1456
#ifdef DEBUG
1457
  void set_allocation_timeout(int timeout) {
1458
    allocation_timeout_ = timeout;
1459
  }
1460

    
1461
  bool disallow_allocation_failure() {
1462
    return disallow_allocation_failure_;
1463
  }
1464

    
1465
  void TracePathToObjectFrom(Object* target, Object* root);
1466
  void TracePathToObject(Object* target);
1467
  void TracePathToGlobal();
1468
#endif
1469

    
1470
  // Callback function passed to Heap::Iterate etc.  Copies an object if
1471
  // necessary, the object might be promoted to an old space.  The caller must
1472
  // ensure the precondition that the object is (a) a heap object and (b) in
1473
  // the heap's from space.
1474
  static inline void ScavengePointer(HeapObject** p);
1475
  static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1476

    
1477
  // Commits from space if it is uncommitted.
1478
  void EnsureFromSpaceIsCommitted();
1479

    
1480
  // Support for partial snapshots.  After calling this we have a linear
1481
  // space to write objects in each space.
1482
  void ReserveSpace(int *sizes, Address* addresses);
1483

    
1484
  //
1485
  // Support for the API.
1486
  //
1487

    
1488
  bool CreateApiObjects();
1489

    
1490
  // Attempt to find the number in a small cache.  If we finds it, return
1491
  // the string representation of the number.  Otherwise return undefined.
1492
  Object* GetNumberStringCache(Object* number);
1493

    
1494
  // Update the cache with a new number-string pair.
1495
  void SetNumberStringCache(Object* number, String* str);
1496

    
1497
  // Adjusts the amount of registered external memory.
1498
  // Returns the adjusted value.
1499
  inline intptr_t AdjustAmountOfExternalAllocatedMemory(
1500
      intptr_t change_in_bytes);
1501

    
1502
  // This is only needed for testing high promotion mode.
1503
  void SetNewSpaceHighPromotionModeActive(bool mode) {
1504
    new_space_high_promotion_mode_active_ = mode;
1505
  }
1506

    
1507
  // Returns the allocation mode (pre-tenuring) based on observed promotion
1508
  // rates of previous collections.
1509
  inline PretenureFlag GetPretenureMode() {
1510
    return FLAG_pretenuring && new_space_high_promotion_mode_active_
1511
        ? TENURED : NOT_TENURED;
1512
  }
1513

    
1514
  inline Address* NewSpaceHighPromotionModeActiveAddress() {
1515
    return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
1516
  }
1517

    
1518
  inline intptr_t PromotedTotalSize() {
1519
    return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1520
  }
1521

    
1522
  inline intptr_t OldGenerationSpaceAvailable() {
1523
    return old_generation_allocation_limit_ - PromotedTotalSize();
1524
  }
1525

    
1526
  inline intptr_t OldGenerationCapacityAvailable() {
1527
    return max_old_generation_size_ - PromotedTotalSize();
1528
  }
1529

    
1530
  static const intptr_t kMinimumOldGenerationAllocationLimit =
1531
      8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1532

    
1533
  intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size) {
1534
    const int divisor = FLAG_stress_compaction ? 10 :
1535
        new_space_high_promotion_mode_active_ ? 1 : 3;
1536
    intptr_t limit =
1537
        Max(old_gen_size + old_gen_size / divisor,
1538
            kMinimumOldGenerationAllocationLimit);
1539
    limit += new_space_.Capacity();
1540
    // TODO(hpayer): Can be removed when when pretenuring is supported for all
1541
    // allocation sites.
1542
    if (IsHighSurvivalRate() && IsStableOrIncreasingSurvivalTrend()) {
1543
      limit *= 2;
1544
    }
1545
    intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
1546
    return Min(limit, halfway_to_the_max);
1547
  }
1548

    
1549
  // Implements the corresponding V8 API function.
1550
  bool IdleNotification(int hint);
1551

    
1552
  // Declare all the root indices.
1553
  enum RootListIndex {
1554
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1555
    STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1556
#undef ROOT_INDEX_DECLARATION
1557

    
1558
#define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1559
    INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1560
#undef STRING_DECLARATION
1561

    
1562
    // Utility type maps
1563
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1564
    STRUCT_LIST(DECLARE_STRUCT_MAP)
1565
#undef DECLARE_STRUCT_MAP
1566

    
1567
    kStringTableRootIndex,
1568
    kStrongRootListLength = kStringTableRootIndex,
1569
    kRootListLength
1570
  };
1571

    
1572
  STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
1573
  STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
1574
  STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1575
  STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1576
  STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1577

    
1578
  // Generated code can embed direct references to non-writable roots if
1579
  // they are in new space.
1580
  static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1581
  // Generated code can treat direct references to this root as constant.
1582
  bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1583

    
1584
  MUST_USE_RESULT MaybeObject* NumberToString(
1585
      Object* number, bool check_number_string_cache = true,
1586
      PretenureFlag pretenure = NOT_TENURED);
1587
  MUST_USE_RESULT MaybeObject* Uint32ToString(
1588
      uint32_t value, bool check_number_string_cache = true);
1589

    
1590
  Map* MapForExternalArrayType(ExternalArrayType array_type);
1591
  RootListIndex RootIndexForExternalArrayType(
1592
      ExternalArrayType array_type);
1593

    
1594
  RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1595
  ExternalArray* EmptyExternalArrayForMap(Map* map);
1596

    
1597
  void RecordStats(HeapStats* stats, bool take_snapshot = false);
1598

    
1599
  // Copy block of memory from src to dst. Size of block should be aligned
1600
  // by pointer size.
1601
  static inline void CopyBlock(Address dst, Address src, int byte_size);
1602

    
1603
  // Optimized version of memmove for blocks with pointer size aligned sizes and
1604
  // pointer size aligned addresses.
1605
  static inline void MoveBlock(Address dst, Address src, int byte_size);
1606

    
1607
  // Check new space expansion criteria and expand semispaces if it was hit.
1608
  void CheckNewSpaceExpansionCriteria();
1609

    
1610
  inline void IncrementYoungSurvivorsCounter(int survived) {
1611
    ASSERT(survived >= 0);
1612
    young_survivors_after_last_gc_ = survived;
1613
    survived_since_last_expansion_ += survived;
1614
  }
1615

    
1616
  inline bool NextGCIsLikelyToBeFull() {
1617
    if (FLAG_gc_global) return true;
1618

    
1619
    if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1620

    
1621
    intptr_t adjusted_allocation_limit =
1622
        old_generation_allocation_limit_ - new_space_.Capacity();
1623

    
1624
    if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1625

    
1626
    return false;
1627
  }
1628

    
1629
  void UpdateNewSpaceReferencesInExternalStringTable(
1630
      ExternalStringTableUpdaterCallback updater_func);
1631

    
1632
  void UpdateReferencesInExternalStringTable(
1633
      ExternalStringTableUpdaterCallback updater_func);
1634

    
1635
  void ProcessWeakReferences(WeakObjectRetainer* retainer);
1636

    
1637
  void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1638

    
1639
  // Helper function that governs the promotion policy from new space to
1640
  // old.  If the object's old address lies below the new space's age
1641
  // mark or if we've already filled the bottom 1/16th of the to space,
1642
  // we try to promote this object.
1643
  inline bool ShouldBePromoted(Address old_address, int object_size);
1644

    
1645
  void ClearJSFunctionResultCaches();
1646

    
1647
  void ClearNormalizedMapCaches();
1648

    
1649
  GCTracer* tracer() { return tracer_; }
1650

    
1651
  // Returns the size of objects residing in non new spaces.
1652
  intptr_t PromotedSpaceSizeOfObjects();
1653

    
1654
  double total_regexp_code_generated() { return total_regexp_code_generated_; }
1655
  void IncreaseTotalRegexpCodeGenerated(int size) {
1656
    total_regexp_code_generated_ += size;
1657
  }
1658

    
1659
  void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1660
    if (is_crankshafted) {
1661
      crankshaft_codegen_bytes_generated_ += size;
1662
    } else {
1663
      full_codegen_bytes_generated_ += size;
1664
    }
1665
  }
1666

    
1667
  // Returns maximum GC pause.
1668
  double get_max_gc_pause() { return max_gc_pause_; }
1669

    
1670
  // Returns maximum size of objects alive after GC.
1671
  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1672

    
1673
  // Returns minimal interval between two subsequent collections.
1674
  double get_min_in_mutator() { return min_in_mutator_; }
1675

    
1676
  // TODO(hpayer): remove, should be handled by GCTracer
1677
  void AddMarkingTime(double marking_time) {
1678
    marking_time_ += marking_time;
1679
  }
1680

    
1681
  double marking_time() const {
1682
    return marking_time_;
1683
  }
1684

    
1685
  // TODO(hpayer): remove, should be handled by GCTracer
1686
  void AddSweepingTime(double sweeping_time) {
1687
    sweeping_time_ += sweeping_time;
1688
  }
1689

    
1690
  double sweeping_time() const {
1691
    return sweeping_time_;
1692
  }
1693

    
1694
  MarkCompactCollector* mark_compact_collector() {
1695
    return &mark_compact_collector_;
1696
  }
1697

    
1698
  StoreBuffer* store_buffer() {
1699
    return &store_buffer_;
1700
  }
1701

    
1702
  Marking* marking() {
1703
    return &marking_;
1704
  }
1705

    
1706
  IncrementalMarking* incremental_marking() {
1707
    return &incremental_marking_;
1708
  }
1709

    
1710
  bool IsSweepingComplete() {
1711
    return !mark_compact_collector()->IsConcurrentSweepingInProgress() &&
1712
           old_data_space()->IsLazySweepingComplete() &&
1713
           old_pointer_space()->IsLazySweepingComplete();
1714
  }
1715

    
1716
  bool AdvanceSweepers(int step_size) {
1717
    ASSERT(!FLAG_parallel_sweeping && !FLAG_concurrent_sweeping);
1718
    bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
1719
    sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
1720
    return sweeping_complete;
1721
  }
1722

    
1723
  bool EnsureSweepersProgressed(int step_size) {
1724
    bool sweeping_complete = old_data_space()->EnsureSweeperProgress(step_size);
1725
    sweeping_complete &= old_pointer_space()->EnsureSweeperProgress(step_size);
1726
    return sweeping_complete;
1727
  }
1728

    
1729
  ExternalStringTable* external_string_table() {
1730
    return &external_string_table_;
1731
  }
1732

    
1733
  // Returns the current sweep generation.
1734
  int sweep_generation() {
1735
    return sweep_generation_;
1736
  }
1737

    
1738
  inline Isolate* isolate();
1739

    
1740
  void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1741
  void CallGCEpilogueCallbacks(GCType gc_type);
1742

    
1743
  inline bool OldGenerationAllocationLimitReached();
1744

    
1745
  inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1746
    scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1747
  }
1748

    
1749
  void QueueMemoryChunkForFree(MemoryChunk* chunk);
1750
  void FreeQueuedChunks();
1751

    
1752
  int gc_count() const { return gc_count_; }
1753

    
1754
  // Completely clear the Instanceof cache (to stop it keeping objects alive
1755
  // around a GC).
1756
  inline void CompletelyClearInstanceofCache();
1757

    
1758
  // The roots that have an index less than this are always in old space.
1759
  static const int kOldSpaceRoots = 0x20;
1760

    
1761
  uint32_t HashSeed() {
1762
    uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1763
    ASSERT(FLAG_randomize_hashes || seed == 0);
1764
    return seed;
1765
  }
1766

    
1767
  void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1768
    ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1769
    set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1770
  }
1771

    
1772
  void SetConstructStubDeoptPCOffset(int pc_offset) {
1773
    ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1774
    set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1775
  }
1776

    
1777
  void SetGetterStubDeoptPCOffset(int pc_offset) {
1778
    ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1779
    set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1780
  }
1781

    
1782
  void SetSetterStubDeoptPCOffset(int pc_offset) {
1783
    ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1784
    set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1785
  }
1786

    
1787
  // For post mortem debugging.
1788
  void RememberUnmappedPage(Address page, bool compacted);
1789

    
1790
  // Global inline caching age: it is incremented on some GCs after context
1791
  // disposal. We use it to flush inline caches.
1792
  int global_ic_age() {
1793
    return global_ic_age_;
1794
  }
1795

    
1796
  void AgeInlineCaches() {
1797
    global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1798
  }
1799

    
1800
  bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1801

    
1802
  intptr_t amount_of_external_allocated_memory() {
1803
    return amount_of_external_allocated_memory_;
1804
  }
1805

    
1806
  // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1807
  // stored in a contiguous linear buffer. Stats groups are stored one after
1808
  // another.
1809
  enum {
1810
    FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
1811
    FIRST_FIXED_ARRAY_SUB_TYPE =
1812
        FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
1813
    FIRST_CODE_AGE_SUB_TYPE =
1814
        FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
1815
    OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kLastCodeAge + 1
1816
  };
1817

    
1818
  void RecordObjectStats(InstanceType type, size_t size) {
1819
    ASSERT(type <= LAST_TYPE);
1820
    object_counts_[type]++;
1821
    object_sizes_[type] += size;
1822
  }
1823

    
1824
  void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1825
    ASSERT(code_sub_type < Code::NUMBER_OF_KINDS);
1826
    ASSERT(code_age < Code::kLastCodeAge);
1827
    object_counts_[FIRST_CODE_KIND_SUB_TYPE + code_sub_type]++;
1828
    object_sizes_[FIRST_CODE_KIND_SUB_TYPE + code_sub_type] += size;
1829
    object_counts_[FIRST_CODE_AGE_SUB_TYPE + code_age]++;
1830
    object_sizes_[FIRST_CODE_AGE_SUB_TYPE + code_age] += size;
1831
  }
1832

    
1833
  void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1834
    ASSERT(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1835
    object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1836
    object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1837
  }
1838

    
1839
  void CheckpointObjectStats();
1840

    
1841
  // We don't use a LockGuard here since we want to lock the heap
1842
  // only when FLAG_concurrent_recompilation is true.
1843
  class RelocationLock {
1844
   public:
1845
    explicit RelocationLock(Heap* heap);
1846

    
1847
    ~RelocationLock() {
1848
      if (FLAG_concurrent_recompilation) {
1849
#ifdef DEBUG
1850
        heap_->relocation_mutex_locked_by_optimizer_thread_ = false;
1851
#endif  // DEBUG
1852
        heap_->relocation_mutex_->Unlock();
1853
      }
1854
    }
1855

    
1856
#ifdef DEBUG
1857
    static bool IsLockedByOptimizerThread(Heap* heap) {
1858
      return heap->relocation_mutex_locked_by_optimizer_thread_;
1859
    }
1860
#endif  // DEBUG
1861

    
1862
   private:
1863
    Heap* heap_;
1864
  };
1865

    
1866
  MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep);
1867

    
1868
  DependentCode* LookupWeakObjectToCodeDependency(Object* obj);
1869

    
1870
  void InitializeWeakObjectToCodeTable() {
1871
    set_weak_object_to_code_table(undefined_value());
1872
  }
1873

    
1874
  void EnsureWeakObjectToCodeTable();
1875

    
1876
 private:
1877
  Heap();
1878

    
1879
  // This can be calculated directly from a pointer to the heap; however, it is
1880
  // more expedient to get at the isolate directly from within Heap methods.
1881
  Isolate* isolate_;
1882

    
1883
  Object* roots_[kRootListLength];
1884

    
1885
  intptr_t code_range_size_;
1886
  int reserved_semispace_size_;
1887
  int max_semispace_size_;
1888
  int initial_semispace_size_;
1889
  intptr_t max_old_generation_size_;
1890
  intptr_t max_executable_size_;
1891

    
1892
  // For keeping track of how much data has survived
1893
  // scavenge since last new space expansion.
1894
  int survived_since_last_expansion_;
1895

    
1896
  // For keeping track on when to flush RegExp code.
1897
  int sweep_generation_;
1898

    
1899
  int always_allocate_scope_depth_;
1900
  int linear_allocation_scope_depth_;
1901

    
1902
  // For keeping track of context disposals.
1903
  int contexts_disposed_;
1904

    
1905
  int global_ic_age_;
1906

    
1907
  bool flush_monomorphic_ics_;
1908

    
1909
  // AllocationMementos found in new space.
1910
  int allocation_mementos_found_;
1911

    
1912
  int scan_on_scavenge_pages_;
1913

    
1914
  NewSpace new_space_;
1915
  OldSpace* old_pointer_space_;
1916
  OldSpace* old_data_space_;
1917
  OldSpace* code_space_;
1918
  MapSpace* map_space_;
1919
  CellSpace* cell_space_;
1920
  PropertyCellSpace* property_cell_space_;
1921
  LargeObjectSpace* lo_space_;
1922
  HeapState gc_state_;
1923
  int gc_post_processing_depth_;
1924

    
1925
  // Returns the amount of external memory registered since last global gc.
1926
  intptr_t PromotedExternalMemorySize();
1927

    
1928
  unsigned int ms_count_;  // how many mark-sweep collections happened
1929
  unsigned int gc_count_;  // how many gc happened
1930

    
1931
  // For post mortem debugging.
1932
  static const int kRememberedUnmappedPages = 128;
1933
  int remembered_unmapped_pages_index_;
1934
  Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1935

    
1936
  // Total length of the strings we failed to flatten since the last GC.
1937
  int unflattened_strings_length_;
1938

    
1939
#define ROOT_ACCESSOR(type, name, camel_name)                                  \
1940
  inline void set_##name(type* value) {                                        \
1941
    /* The deserializer makes use of the fact that these common roots are */   \
1942
    /* never in new space and never on a page that is being compacted.    */   \
1943
    ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value));  \
1944
    roots_[k##camel_name##RootIndex] = value;                                  \
1945
  }
1946
  ROOT_LIST(ROOT_ACCESSOR)
1947
#undef ROOT_ACCESSOR
1948

    
1949
#ifdef DEBUG
1950
  // If the --gc-interval flag is set to a positive value, this
1951
  // variable holds the value indicating the number of allocations
1952
  // remain until the next failure and garbage collection.
1953
  int allocation_timeout_;
1954

    
1955
  // Do we expect to be able to handle allocation failure at this
1956
  // time?
1957
  bool disallow_allocation_failure_;
1958
#endif  // DEBUG
1959

    
1960
  // Indicates that the new space should be kept small due to high promotion
1961
  // rates caused by the mutator allocating a lot of long-lived objects.
1962
  // TODO(hpayer): change to bool if no longer accessed from generated code
1963
  intptr_t new_space_high_promotion_mode_active_;
1964

    
1965
  // Limit that triggers a global GC on the next (normally caused) GC.  This
1966
  // is checked when we have already decided to do a GC to help determine
1967
  // which collector to invoke, before expanding a paged space in the old
1968
  // generation and on every allocation in large object space.
1969
  intptr_t old_generation_allocation_limit_;
1970

    
1971
  // Used to adjust the limits that control the timing of the next GC.
1972
  intptr_t size_of_old_gen_at_last_old_space_gc_;
1973

    
1974
  // Limit on the amount of externally allocated memory allowed
1975
  // between global GCs. If reached a global GC is forced.
1976
  intptr_t external_allocation_limit_;
1977

    
1978
  // The amount of external memory registered through the API kept alive
1979
  // by global handles
1980
  intptr_t amount_of_external_allocated_memory_;
1981

    
1982
  // Caches the amount of external memory registered at the last global gc.
1983
  intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
1984

    
1985
  // Indicates that an allocation has failed in the old generation since the
1986
  // last GC.
1987
  bool old_gen_exhausted_;
1988

    
1989
  // Weak list heads, threaded through the objects.
1990
  // List heads are initilized lazily and contain the undefined_value at start.
1991
  Object* native_contexts_list_;
1992
  Object* array_buffers_list_;
1993
  Object* allocation_sites_list_;
1994

    
1995
  // WeakHashTable that maps objects embedded in optimized code to dependent
1996
  // code list. It is initilized lazily and contains the undefined_value at
1997
  // start.
1998
  Object* weak_object_to_code_table_;
1999

    
2000
  StoreBufferRebuilder store_buffer_rebuilder_;
2001

    
2002
  struct StringTypeTable {
2003
    InstanceType type;
2004
    int size;
2005
    RootListIndex index;
2006
  };
2007

    
2008
  struct ConstantStringTable {
2009
    const char* contents;
2010
    RootListIndex index;
2011
  };
2012

    
2013
  struct StructTable {
2014
    InstanceType type;
2015
    int size;
2016
    RootListIndex index;
2017
  };
2018

    
2019
  static const StringTypeTable string_type_table[];
2020
  static const ConstantStringTable constant_string_table[];
2021
  static const StructTable struct_table[];
2022

    
2023
  // The special hidden string which is an empty string, but does not match
2024
  // any string when looked up in properties.
2025
  String* hidden_string_;
2026

    
2027
  // GC callback function, called before and after mark-compact GC.
2028
  // Allocations in the callback function are disallowed.
2029
  struct GCPrologueCallbackPair {
2030
    GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2031
                           GCType gc_type,
2032
                           bool pass_isolate)
2033
        : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2034
    }
2035
    bool operator==(const GCPrologueCallbackPair& pair) const {
2036
      return pair.callback == callback;
2037
    }
2038
    v8::Isolate::GCPrologueCallback callback;
2039
    GCType gc_type;
2040
    // TODO(dcarney): remove variable
2041
    bool pass_isolate_;
2042
  };
2043
  List<GCPrologueCallbackPair> gc_prologue_callbacks_;
2044

    
2045
  struct GCEpilogueCallbackPair {
2046
    GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
2047
                           GCType gc_type,
2048
                           bool pass_isolate)
2049
        : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
2050
    }
2051
    bool operator==(const GCEpilogueCallbackPair& pair) const {
2052
      return pair.callback == callback;
2053
    }
2054
    v8::Isolate::GCPrologueCallback callback;
2055
    GCType gc_type;
2056
    // TODO(dcarney): remove variable
2057
    bool pass_isolate_;
2058
  };
2059
  List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
2060

    
2061
  // Support for computing object sizes during GC.
2062
  HeapObjectCallback gc_safe_size_of_old_object_;
2063
  static int GcSafeSizeOfOldObject(HeapObject* object);
2064

    
2065
  // Update the GC state. Called from the mark-compact collector.
2066
  void MarkMapPointersAsEncoded(bool encoded) {
2067
    ASSERT(!encoded);
2068
    gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
2069
  }
2070

    
2071
  // Checks whether a global GC is necessary
2072
  GarbageCollector SelectGarbageCollector(AllocationSpace space,
2073
                                          const char** reason);
2074

    
2075
  // Performs garbage collection
2076
  // Returns whether there is a chance another major GC could
2077
  // collect more garbage.
2078
  bool PerformGarbageCollection(GarbageCollector collector,
2079
                                GCTracer* tracer);
2080

    
2081
  inline void UpdateOldSpaceLimits();
2082

    
2083
  // Selects the proper allocation space depending on the given object
2084
  // size, pretenuring decision, and preferred old-space.
2085
  static AllocationSpace SelectSpace(int object_size,
2086
                                     AllocationSpace preferred_old_space,
2087
                                     PretenureFlag pretenure) {
2088
    ASSERT(preferred_old_space == OLD_POINTER_SPACE ||
2089
           preferred_old_space == OLD_DATA_SPACE);
2090
    if (object_size > Page::kMaxNonCodeHeapObjectSize) return LO_SPACE;
2091
    return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
2092
  }
2093

    
2094
  // Allocate an uninitialized fixed array.
2095
  MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(
2096
      int length, PretenureFlag pretenure);
2097

    
2098
  // Allocate an uninitialized fixed double array.
2099
  MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
2100
      int length, PretenureFlag pretenure);
2101

    
2102
  // Allocate an initialized fixed array with the given filler value.
2103
  MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithFiller(
2104
      int length, PretenureFlag pretenure, Object* filler);
2105

    
2106
  // Initializes a JSObject based on its map.
2107
  void InitializeJSObjectFromMap(JSObject* obj,
2108
                                 FixedArray* properties,
2109
                                 Map* map);
2110

    
2111
  bool CreateInitialMaps();
2112
  bool CreateInitialObjects();
2113

    
2114
  // These five Create*EntryStub functions are here and forced to not be inlined
2115
  // because of a gcc-4.4 bug that assigns wrong vtable entries.
2116
  NO_INLINE(void CreateJSEntryStub());
2117
  NO_INLINE(void CreateJSConstructEntryStub());
2118

    
2119
  void CreateFixedStubs();
2120

    
2121
  MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
2122
                                             Object* to_number,
2123
                                             byte kind);
2124

    
2125
  // Allocate a JSArray with no elements
2126
  MUST_USE_RESULT MaybeObject* AllocateJSArray(
2127
      ElementsKind elements_kind,
2128
      PretenureFlag pretenure = NOT_TENURED);
2129

    
2130
  // Allocate empty fixed array.
2131
  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
2132

    
2133
  // Allocate empty external array of given type.
2134
  MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray(
2135
      ExternalArrayType array_type);
2136

    
2137
  // Allocate empty fixed double array.
2138
  MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
2139

    
2140
  // Allocate a tenured simple cell.
2141
  MUST_USE_RESULT MaybeObject* AllocateCell(Object* value);
2142

    
2143
  // Allocate a tenured JS global property cell initialized with the hole.
2144
  MUST_USE_RESULT MaybeObject* AllocatePropertyCell();
2145

    
2146
  // Allocate Box.
2147
  MUST_USE_RESULT MaybeObject* AllocateBox(Object* value,
2148
                                           PretenureFlag pretenure);
2149

    
2150
  // Performs a minor collection in new generation.
2151
  void Scavenge();
2152

    
2153
  static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
2154
      Heap* heap,
2155
      Object** pointer);
2156

    
2157
  Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
2158
  static void ScavengeStoreBufferCallback(Heap* heap,
2159
                                          MemoryChunk* page,
2160
                                          StoreBufferEvent event);
2161

    
2162
  // Performs a major collection in the whole heap.
2163
  void MarkCompact(GCTracer* tracer);
2164

    
2165
  // Code to be run before and after mark-compact.
2166
  void MarkCompactPrologue();
2167

    
2168
  void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
2169
  void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
2170
  void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
2171

    
2172
  // Called on heap tear-down.
2173
  void TearDownArrayBuffers();
2174

    
2175
  // Record statistics before and after garbage collection.
2176
  void ReportStatisticsBeforeGC();
2177
  void ReportStatisticsAfterGC();
2178

    
2179
  // Slow part of scavenge object.
2180
  static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
2181

    
2182
  // Initializes a function with a shared part and prototype.
2183
  // Note: this code was factored out of AllocateFunction such that
2184
  // other parts of the VM could use it. Specifically, a function that creates
2185
  // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
2186
  // Please note this does not perform a garbage collection.
2187
  inline void InitializeFunction(
2188
      JSFunction* function,
2189
      SharedFunctionInfo* shared,
2190
      Object* prototype);
2191

    
2192
  // Total RegExp code ever generated
2193
  double total_regexp_code_generated_;
2194

    
2195
  GCTracer* tracer_;
2196

    
2197
  // Allocates a small number to string cache.
2198
  MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
2199
  // Creates and installs the full-sized number string cache.
2200
  void AllocateFullSizeNumberStringCache();
2201
  // Get the length of the number to string cache based on the max semispace
2202
  // size.
2203
  int FullSizeNumberStringCacheLength();
2204
  // Flush the number to string cache.
2205
  void FlushNumberStringCache();
2206

    
2207
  void UpdateSurvivalRateTrend(int start_new_space_size);
2208

    
2209
  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
2210

    
2211
  static const int kYoungSurvivalRateHighThreshold = 90;
2212
  static const int kYoungSurvivalRateLowThreshold = 10;
2213
  static const int kYoungSurvivalRateAllowedDeviation = 15;
2214

    
2215
  int young_survivors_after_last_gc_;
2216
  int high_survival_rate_period_length_;
2217
  int low_survival_rate_period_length_;
2218
  double survival_rate_;
2219
  SurvivalRateTrend previous_survival_rate_trend_;
2220
  SurvivalRateTrend survival_rate_trend_;
2221

    
2222
  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
2223
    ASSERT(survival_rate_trend != FLUCTUATING);
2224
    previous_survival_rate_trend_ = survival_rate_trend_;
2225
    survival_rate_trend_ = survival_rate_trend;
2226
  }
2227

    
2228
  SurvivalRateTrend survival_rate_trend() {
2229
    if (survival_rate_trend_ == STABLE) {
2230
      return STABLE;
2231
    } else if (previous_survival_rate_trend_ == STABLE) {
2232
      return survival_rate_trend_;
2233
    } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
2234
      return FLUCTUATING;
2235
    } else {
2236
      return survival_rate_trend_;
2237
    }
2238
  }
2239

    
2240
  bool IsStableOrIncreasingSurvivalTrend() {
2241
    switch (survival_rate_trend()) {
2242
      case STABLE:
2243
      case INCREASING:
2244
        return true;
2245
      default:
2246
        return false;
2247
    }
2248
  }
2249

    
2250
  bool IsStableOrDecreasingSurvivalTrend() {
2251
    switch (survival_rate_trend()) {
2252
      case STABLE:
2253
      case DECREASING:
2254
        return true;
2255
      default:
2256
        return false;
2257
    }
2258
  }
2259

    
2260
  bool IsIncreasingSurvivalTrend() {
2261
    return survival_rate_trend() == INCREASING;
2262
  }
2263

    
2264
  bool IsHighSurvivalRate() {
2265
    return high_survival_rate_period_length_ > 0;
2266
  }
2267

    
2268
  bool IsLowSurvivalRate() {
2269
    return low_survival_rate_period_length_ > 0;
2270
  }
2271

    
2272
  void SelectScavengingVisitorsTable();
2273

    
2274
  void StartIdleRound() {
2275
    mark_sweeps_since_idle_round_started_ = 0;
2276
  }
2277

    
2278
  void FinishIdleRound() {
2279
    mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2280
    scavenges_since_last_idle_round_ = 0;
2281
  }
2282

    
2283
  bool EnoughGarbageSinceLastIdleRound() {
2284
    return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2285
  }
2286

    
2287
  // Estimates how many milliseconds a Mark-Sweep would take to complete.
2288
  // In idle notification handler we assume that this function will return:
2289
  // - a number less than 10 for small heaps, which are less than 8Mb.
2290
  // - a number greater than 10 for large heaps, which are greater than 32Mb.
2291
  int TimeMarkSweepWouldTakeInMs() {
2292
    // Rough estimate of how many megabytes of heap can be processed in 1 ms.
2293
    static const int kMbPerMs = 2;
2294

    
2295
    int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
2296
    return heap_size_mb / kMbPerMs;
2297
  }
2298

    
2299
  // Returns true if no more GC work is left.
2300
  bool IdleGlobalGC();
2301

    
2302
  void AdvanceIdleIncrementalMarking(intptr_t step_size);
2303

    
2304
  void ClearObjectStats(bool clear_last_time_stats = false);
2305

    
2306
  void set_weak_object_to_code_table(Object* value) {
2307
    ASSERT(!InNewSpace(value));
2308
    weak_object_to_code_table_ = value;
2309
  }
2310

    
2311
  Object** weak_object_to_code_table_address() {
2312
    return &weak_object_to_code_table_;
2313
  }
2314

    
2315
  static const int kInitialStringTableSize = 2048;
2316
  static const int kInitialEvalCacheSize = 64;
2317
  static const int kInitialNumberStringCacheSize = 256;
2318

    
2319
  // Object counts and used memory by InstanceType
2320
  size_t object_counts_[OBJECT_STATS_COUNT];
2321
  size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2322
  size_t object_sizes_[OBJECT_STATS_COUNT];
2323
  size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2324

    
2325
  // Maximum GC pause.
2326
  double max_gc_pause_;
2327

    
2328
  // Total time spent in GC.
2329
  double total_gc_time_ms_;
2330

    
2331
  // Maximum size of objects alive after GC.
2332
  intptr_t max_alive_after_gc_;
2333

    
2334
  // Minimal interval between two subsequent collections.
2335
  double min_in_mutator_;
2336

    
2337
  // Size of objects alive after last GC.
2338
  intptr_t alive_after_last_gc_;
2339

    
2340
  double last_gc_end_timestamp_;
2341

    
2342
  // Cumulative GC time spent in marking
2343
  double marking_time_;
2344

    
2345
  // Cumulative GC time spent in sweeping
2346
  double sweeping_time_;
2347

    
2348
  MarkCompactCollector mark_compact_collector_;
2349

    
2350
  StoreBuffer store_buffer_;
2351

    
2352
  Marking marking_;
2353

    
2354
  IncrementalMarking incremental_marking_;
2355

    
2356
  int number_idle_notifications_;
2357
  unsigned int last_idle_notification_gc_count_;
2358
  bool last_idle_notification_gc_count_init_;
2359

    
2360
  int mark_sweeps_since_idle_round_started_;
2361
  unsigned int gc_count_at_last_idle_gc_;
2362
  int scavenges_since_last_idle_round_;
2363

    
2364
  // These two counters are monotomically increasing and never reset.
2365
  size_t full_codegen_bytes_generated_;
2366
  size_t crankshaft_codegen_bytes_generated_;
2367

    
2368
  // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2369
  // this variable holds the number of garbage collections since the last
2370
  // deoptimization triggered by garbage collection.
2371
  int gcs_since_last_deopt_;
2372

    
2373
#ifdef VERIFY_HEAP
2374
  int no_weak_object_verification_scope_depth_;
2375
#endif
2376

    
2377
  static const int kMaxMarkSweepsInIdleRound = 7;
2378
  static const int kIdleScavengeThreshold = 5;
2379

    
2380
  // Shared state read by the scavenge collector and set by ScavengeObject.
2381
  PromotionQueue promotion_queue_;
2382

    
2383
  // Flag is set when the heap has been configured.  The heap can be repeatedly
2384
  // configured through the API until it is set up.
2385
  bool configured_;
2386

    
2387
  ExternalStringTable external_string_table_;
2388

    
2389
  VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2390

    
2391
  MemoryChunk* chunks_queued_for_free_;
2392

    
2393
  Mutex* relocation_mutex_;
2394
#ifdef DEBUG
2395
  bool relocation_mutex_locked_by_optimizer_thread_;
2396
#endif  // DEBUG;
2397

    
2398
  friend class Factory;
2399
  friend class GCTracer;
2400
  friend class DisallowAllocationFailure;
2401
  friend class AlwaysAllocateScope;
2402
  friend class Page;
2403
  friend class Isolate;
2404
  friend class MarkCompactCollector;
2405
  friend class MarkCompactMarkingVisitor;
2406
  friend class MapCompact;
2407
#ifdef VERIFY_HEAP
2408
  friend class NoWeakObjectVerificationScope;
2409
#endif
2410

    
2411
  DISALLOW_COPY_AND_ASSIGN(Heap);
2412
};
2413

    
2414

    
2415
class HeapStats {
2416
 public:
2417
  static const int kStartMarker = 0xDECADE00;
2418
  static const int kEndMarker = 0xDECADE01;
2419

    
2420
  int* start_marker;                    //  0
2421
  int* new_space_size;                  //  1
2422
  int* new_space_capacity;              //  2
2423
  intptr_t* old_pointer_space_size;          //  3
2424
  intptr_t* old_pointer_space_capacity;      //  4
2425
  intptr_t* old_data_space_size;             //  5
2426
  intptr_t* old_data_space_capacity;         //  6
2427
  intptr_t* code_space_size;                 //  7
2428
  intptr_t* code_space_capacity;             //  8
2429
  intptr_t* map_space_size;                  //  9
2430
  intptr_t* map_space_capacity;              // 10
2431
  intptr_t* cell_space_size;                 // 11
2432
  intptr_t* cell_space_capacity;             // 12
2433
  intptr_t* lo_space_size;                   // 13
2434
  int* global_handle_count;             // 14
2435
  int* weak_global_handle_count;        // 15
2436
  int* pending_global_handle_count;     // 16
2437
  int* near_death_global_handle_count;  // 17
2438
  int* free_global_handle_count;        // 18
2439
  intptr_t* memory_allocator_size;           // 19
2440
  intptr_t* memory_allocator_capacity;       // 20
2441
  int* objects_per_type;                // 21
2442
  int* size_per_type;                   // 22
2443
  int* os_error;                        // 23
2444
  int* end_marker;                      // 24
2445
  intptr_t* property_cell_space_size;   // 25
2446
  intptr_t* property_cell_space_capacity;    // 26
2447
};
2448

    
2449

    
2450
class DisallowAllocationFailure {
2451
 public:
2452
  inline DisallowAllocationFailure();
2453
  inline ~DisallowAllocationFailure();
2454

    
2455
#ifdef DEBUG
2456
 private:
2457
  bool old_state_;
2458
#endif
2459
};
2460

    
2461

    
2462
class AlwaysAllocateScope {
2463
 public:
2464
  inline AlwaysAllocateScope();
2465
  inline ~AlwaysAllocateScope();
2466

    
2467
 private:
2468
  // Implicitly disable artificial allocation failures.
2469
  DisallowAllocationFailure disallow_allocation_failure_;
2470
};
2471

    
2472
#ifdef VERIFY_HEAP
2473
class NoWeakObjectVerificationScope {
2474
 public:
2475
  inline NoWeakObjectVerificationScope();
2476
  inline ~NoWeakObjectVerificationScope();
2477
};
2478
#endif
2479

    
2480

    
2481
// Visitor class to verify interior pointers in spaces that do not contain
2482
// or care about intergenerational references. All heap object pointers have to
2483
// point into the heap to a location that has a map pointer at its first word.
2484
// Caveat: Heap::Contains is an approximation because it can return true for
2485
// objects in a heap space but above the allocation pointer.
2486
class VerifyPointersVisitor: public ObjectVisitor {
2487
 public:
2488
  inline void VisitPointers(Object** start, Object** end);
2489
};
2490

    
2491

    
2492
// Space iterator for iterating over all spaces of the heap.  Returns each space
2493
// in turn, and null when it is done.
2494
class AllSpaces BASE_EMBEDDED {
2495
 public:
2496
  explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2497
  Space* next();
2498
 private:
2499
  Heap* heap_;
2500
  int counter_;
2501
};
2502

    
2503

    
2504
// Space iterator for iterating over all old spaces of the heap: Old pointer
2505
// space, old data space and code space.  Returns each space in turn, and null
2506
// when it is done.
2507
class OldSpaces BASE_EMBEDDED {
2508
 public:
2509
  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2510
  OldSpace* next();
2511
 private:
2512
  Heap* heap_;
2513
  int counter_;
2514
};
2515

    
2516

    
2517
// Space iterator for iterating over all the paged spaces of the heap: Map
2518
// space, old pointer space, old data space, code space and cell space.  Returns
2519
// each space in turn, and null when it is done.
2520
class PagedSpaces BASE_EMBEDDED {
2521
 public:
2522
  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2523
  PagedSpace* next();
2524
 private:
2525
  Heap* heap_;
2526
  int counter_;
2527
};
2528

    
2529

    
2530
// Space iterator for iterating over all spaces of the heap.
2531
// For each space an object iterator is provided. The deallocation of the
2532
// returned object iterators is handled by the space iterator.
2533
class SpaceIterator : public Malloced {
2534
 public:
2535
  explicit SpaceIterator(Heap* heap);
2536
  SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2537
  virtual ~SpaceIterator();
2538

    
2539
  bool has_next();
2540
  ObjectIterator* next();
2541

    
2542
 private:
2543
  ObjectIterator* CreateIterator();
2544

    
2545
  Heap* heap_;
2546
  int current_space_;  // from enum AllocationSpace.
2547
  ObjectIterator* iterator_;  // object iterator for the current space.
2548
  HeapObjectCallback size_func_;
2549
};
2550

    
2551

    
2552
// A HeapIterator provides iteration over the whole heap. It
2553
// aggregates the specific iterators for the different spaces as
2554
// these can only iterate over one space only.
2555
//
2556
// HeapIterator can skip free list nodes (that is, de-allocated heap
2557
// objects that still remain in the heap). As implementation of free
2558
// nodes filtering uses GC marks, it can't be used during MS/MC GC
2559
// phases. Also, it is forbidden to interrupt iteration in this mode,
2560
// as this will leave heap objects marked (and thus, unusable).
2561
class HeapObjectsFilter;
2562

    
2563
class HeapIterator BASE_EMBEDDED {
2564
 public:
2565
  enum HeapObjectsFiltering {
2566
    kNoFiltering,
2567
    kFilterUnreachable
2568
  };
2569

    
2570
  explicit HeapIterator(Heap* heap);
2571
  HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2572
  ~HeapIterator();
2573

    
2574
  HeapObject* next();
2575
  void reset();
2576

    
2577
 private:
2578
  // Perform the initialization.
2579
  void Init();
2580
  // Perform all necessary shutdown (destruction) work.
2581
  void Shutdown();
2582
  HeapObject* NextObject();
2583

    
2584
  Heap* heap_;
2585
  HeapObjectsFiltering filtering_;
2586
  HeapObjectsFilter* filter_;
2587
  // Space iterator for iterating all the spaces.
2588
  SpaceIterator* space_iterator_;
2589
  // Object iterator for the space currently being iterated.
2590
  ObjectIterator* object_iterator_;
2591
};
2592

    
2593

    
2594
// Cache for mapping (map, property name) into field offset.
2595
// Cleared at startup and prior to mark sweep collection.
2596
class KeyedLookupCache {
2597
 public:
2598
  // Lookup field offset for (map, name). If absent, -1 is returned.
2599
  int Lookup(Map* map, Name* name);
2600

    
2601
  // Update an element in the cache.
2602
  void Update(Map* map, Name* name, int field_offset);
2603

    
2604
  // Clear the cache.
2605
  void Clear();
2606

    
2607
  static const int kLength = 256;
2608
  static const int kCapacityMask = kLength - 1;
2609
  static const int kMapHashShift = 5;
2610
  static const int kHashMask = -4;  // Zero the last two bits.
2611
  static const int kEntriesPerBucket = 4;
2612
  static const int kNotFound = -1;
2613

    
2614
  // kEntriesPerBucket should be a power of 2.
2615
  STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2616
  STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2617

    
2618
 private:
2619
  KeyedLookupCache() {
2620
    for (int i = 0; i < kLength; ++i) {
2621
      keys_[i].map = NULL;
2622
      keys_[i].name = NULL;
2623
      field_offsets_[i] = kNotFound;
2624
    }
2625
  }
2626

    
2627
  static inline int Hash(Map* map, Name* name);
2628

    
2629
  // Get the address of the keys and field_offsets arrays.  Used in
2630
  // generated code to perform cache lookups.
2631
  Address keys_address() {
2632
    return reinterpret_cast<Address>(&keys_);
2633
  }
2634

    
2635
  Address field_offsets_address() {
2636
    return reinterpret_cast<Address>(&field_offsets_);
2637
  }
2638

    
2639
  struct Key {
2640
    Map* map;
2641
    Name* name;
2642
  };
2643

    
2644
  Key keys_[kLength];
2645
  int field_offsets_[kLength];
2646

    
2647
  friend class ExternalReference;
2648
  friend class Isolate;
2649
  DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2650
};
2651

    
2652

    
2653
// Cache for mapping (map, property name) into descriptor index.
2654
// The cache contains both positive and negative results.
2655
// Descriptor index equals kNotFound means the property is absent.
2656
// Cleared at startup and prior to any gc.
2657
class DescriptorLookupCache {
2658
 public:
2659
  // Lookup descriptor index for (map, name).
2660
  // If absent, kAbsent is returned.
2661
  int Lookup(Map* source, Name* name) {
2662
    if (!name->IsUniqueName()) return kAbsent;
2663
    int index = Hash(source, name);
2664
    Key& key = keys_[index];
2665
    if ((key.source == source) && (key.name == name)) return results_[index];
2666
    return kAbsent;
2667
  }
2668

    
2669
  // Update an element in the cache.
2670
  void Update(Map* source, Name* name, int result) {
2671
    ASSERT(result != kAbsent);
2672
    if (name->IsUniqueName()) {
2673
      int index = Hash(source, name);
2674
      Key& key = keys_[index];
2675
      key.source = source;
2676
      key.name = name;
2677
      results_[index] = result;
2678
    }
2679
  }
2680

    
2681
  // Clear the cache.
2682
  void Clear();
2683

    
2684
  static const int kAbsent = -2;
2685

    
2686
 private:
2687
  DescriptorLookupCache() {
2688
    for (int i = 0; i < kLength; ++i) {
2689
      keys_[i].source = NULL;
2690
      keys_[i].name = NULL;
2691
      results_[i] = kAbsent;
2692
    }
2693
  }
2694

    
2695
  static int Hash(Object* source, Name* name) {
2696
    // Uses only lower 32 bits if pointers are larger.
2697
    uint32_t source_hash =
2698
        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
2699
            >> kPointerSizeLog2;
2700
    uint32_t name_hash =
2701
        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
2702
            >> kPointerSizeLog2;
2703
    return (source_hash ^ name_hash) % kLength;
2704
  }
2705

    
2706
  static const int kLength = 64;
2707
  struct Key {
2708
    Map* source;
2709
    Name* name;
2710
  };
2711

    
2712
  Key keys_[kLength];
2713
  int results_[kLength];
2714

    
2715
  friend class Isolate;
2716
  DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2717
};
2718

    
2719

    
2720
// GCTracer collects and prints ONE line after each garbage collector
2721
// invocation IFF --trace_gc is used.
2722

    
2723
class GCTracer BASE_EMBEDDED {
2724
 public:
2725
  class Scope BASE_EMBEDDED {
2726
   public:
2727
    enum ScopeId {
2728
      EXTERNAL,
2729
      MC_MARK,
2730
      MC_SWEEP,
2731
      MC_SWEEP_NEWSPACE,
2732
      MC_EVACUATE_PAGES,
2733
      MC_UPDATE_NEW_TO_NEW_POINTERS,
2734
      MC_UPDATE_ROOT_TO_NEW_POINTERS,
2735
      MC_UPDATE_OLD_TO_NEW_POINTERS,
2736
      MC_UPDATE_POINTERS_TO_EVACUATED,
2737
      MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2738
      MC_UPDATE_MISC_POINTERS,
2739
      MC_WEAKCOLLECTION_PROCESS,
2740
      MC_WEAKCOLLECTION_CLEAR,
2741
      MC_FLUSH_CODE,
2742
      kNumberOfScopes
2743
    };
2744

    
2745
    Scope(GCTracer* tracer, ScopeId scope)
2746
        : tracer_(tracer),
2747
        scope_(scope) {
2748
      start_time_ = OS::TimeCurrentMillis();
2749
    }
2750

    
2751
    ~Scope() {
2752
      ASSERT(scope_ < kNumberOfScopes);  // scope_ is unsigned.
2753
      tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2754
    }
2755

    
2756
   private:
2757
    GCTracer* tracer_;
2758
    ScopeId scope_;
2759
    double start_time_;
2760
  };
2761

    
2762
  explicit GCTracer(Heap* heap,
2763
                    const char* gc_reason,
2764
                    const char* collector_reason);
2765
  ~GCTracer();
2766

    
2767
  // Sets the collector.
2768
  void set_collector(GarbageCollector collector) { collector_ = collector; }
2769

    
2770
  // Sets the GC count.
2771
  void set_gc_count(unsigned int count) { gc_count_ = count; }
2772

    
2773
  // Sets the full GC count.
2774
  void set_full_gc_count(int count) { full_gc_count_ = count; }
2775

    
2776
  void increment_promoted_objects_size(int object_size) {
2777
    promoted_objects_size_ += object_size;
2778
  }
2779

    
2780
  void increment_nodes_died_in_new_space() {
2781
    nodes_died_in_new_space_++;
2782
  }
2783

    
2784
  void increment_nodes_copied_in_new_space() {
2785
    nodes_copied_in_new_space_++;
2786
  }
2787

    
2788
  void increment_nodes_promoted() {
2789
    nodes_promoted_++;
2790
  }
2791

    
2792
 private:
2793
  // Returns a string matching the collector.
2794
  const char* CollectorString();
2795

    
2796
  // Returns size of object in heap (in MB).
2797
  inline double SizeOfHeapObjects();
2798

    
2799
  // Timestamp set in the constructor.
2800
  double start_time_;
2801

    
2802
  // Size of objects in heap set in constructor.
2803
  intptr_t start_object_size_;
2804

    
2805
  // Size of memory allocated from OS set in constructor.
2806
  intptr_t start_memory_size_;
2807

    
2808
  // Type of collector.
2809
  GarbageCollector collector_;
2810

    
2811
  // A count (including this one, e.g. the first collection is 1) of the
2812
  // number of garbage collections.
2813
  unsigned int gc_count_;
2814

    
2815
  // A count (including this one) of the number of full garbage collections.
2816
  int full_gc_count_;
2817

    
2818
  // Amounts of time spent in different scopes during GC.
2819
  double scopes_[Scope::kNumberOfScopes];
2820

    
2821
  // Total amount of space either wasted or contained in one of free lists
2822
  // before the current GC.
2823
  intptr_t in_free_list_or_wasted_before_gc_;
2824

    
2825
  // Difference between space used in the heap at the beginning of the current
2826
  // collection and the end of the previous collection.
2827
  intptr_t allocated_since_last_gc_;
2828

    
2829
  // Amount of time spent in mutator that is time elapsed between end of the
2830
  // previous collection and the beginning of the current one.
2831
  double spent_in_mutator_;
2832

    
2833
  // Size of objects promoted during the current collection.
2834
  intptr_t promoted_objects_size_;
2835

    
2836
  // Number of died nodes in the new space.
2837
  int nodes_died_in_new_space_;
2838

    
2839
  // Number of copied nodes to the new space.
2840
  int nodes_copied_in_new_space_;
2841

    
2842
  // Number of promoted nodes to the old space.
2843
  int nodes_promoted_;
2844

    
2845
  // Incremental marking steps counters.
2846
  int steps_count_;
2847
  double steps_took_;
2848
  double longest_step_;
2849
  int steps_count_since_last_gc_;
2850
  double steps_took_since_last_gc_;
2851

    
2852
  Heap* heap_;
2853

    
2854
  const char* gc_reason_;
2855
  const char* collector_reason_;
2856
};
2857

    
2858

    
2859
class RegExpResultsCache {
2860
 public:
2861
  enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2862

    
2863
  // Attempt to retrieve a cached result.  On failure, 0 is returned as a Smi.
2864
  // On success, the returned result is guaranteed to be a COW-array.
2865
  static Object* Lookup(Heap* heap,
2866
                        String* key_string,
2867
                        Object* key_pattern,
2868
                        ResultsCacheType type);
2869
  // Attempt to add value_array to the cache specified by type.  On success,
2870
  // value_array is turned into a COW-array.
2871
  static void Enter(Heap* heap,
2872
                    String* key_string,
2873
                    Object* key_pattern,
2874
                    FixedArray* value_array,
2875
                    ResultsCacheType type);
2876
  static void Clear(FixedArray* cache);
2877
  static const int kRegExpResultsCacheSize = 0x100;
2878

    
2879
 private:
2880
  static const int kArrayEntriesPerCacheEntry = 4;
2881
  static const int kStringOffset = 0;
2882
  static const int kPatternOffset = 1;
2883
  static const int kArrayOffset = 2;
2884
};
2885

    
2886

    
2887
class TranscendentalCache {
2888
 public:
2889
  enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2890
  static const int kTranscendentalTypeBits = 3;
2891
  STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2892

    
2893
  // Returns a heap number with f(input), where f is a math function specified
2894
  // by the 'type' argument.
2895
  MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2896

    
2897
  // The cache contains raw Object pointers.  This method disposes of
2898
  // them before a garbage collection.
2899
  void Clear();
2900

    
2901
 private:
2902
  class SubCache {
2903
    static const int kCacheSize = 512;
2904

    
2905
    explicit SubCache(Isolate* isolate, Type t);
2906

    
2907
    MUST_USE_RESULT inline MaybeObject* Get(double input);
2908

    
2909
    inline double Calculate(double input);
2910

    
2911
    struct Element {
2912
      uint32_t in[2];
2913
      Object* output;
2914
    };
2915

    
2916
    union Converter {
2917
      double dbl;
2918
      uint32_t integers[2];
2919
    };
2920

    
2921
    inline static int Hash(const Converter& c) {
2922
      uint32_t hash = (c.integers[0] ^ c.integers[1]);
2923
      hash ^= static_cast<int32_t>(hash) >> 16;
2924
      hash ^= static_cast<int32_t>(hash) >> 8;
2925
      return (hash & (kCacheSize - 1));
2926
    }
2927

    
2928
    Element elements_[kCacheSize];
2929
    Type type_;
2930
    Isolate* isolate_;
2931

    
2932
    // Allow access to the caches_ array as an ExternalReference.
2933
    friend class ExternalReference;
2934
    // Inline implementation of the cache.
2935
    friend class TranscendentalCacheStub;
2936
    // For evaluating value.
2937
    friend class TranscendentalCache;
2938

    
2939
    DISALLOW_COPY_AND_ASSIGN(SubCache);
2940
  };
2941

    
2942
  explicit TranscendentalCache(Isolate* isolate) : isolate_(isolate) {
2943
    for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2944
  }
2945

    
2946
  ~TranscendentalCache() {
2947
    for (int i = 0; i < kNumberOfCaches; ++i) delete caches_[i];
2948
  }
2949

    
2950
  // Used to create an external reference.
2951
  inline Address cache_array_address();
2952

    
2953
  // Instantiation
2954
  friend class Isolate;
2955
  // Inline implementation of the caching.
2956
  friend class TranscendentalCacheStub;
2957
  // Allow access to the caches_ array as an ExternalReference.
2958
  friend class ExternalReference;
2959

    
2960
  Isolate* isolate_;
2961
  SubCache* caches_[kNumberOfCaches];
2962
  DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2963
};
2964

    
2965

    
2966
// Abstract base class for checking whether a weak object should be retained.
2967
class WeakObjectRetainer {
2968
 public:
2969
  virtual ~WeakObjectRetainer() {}
2970

    
2971
  // Return whether this object should be retained. If NULL is returned the
2972
  // object has no references. Otherwise the address of the retained object
2973
  // should be returned as in some GC situations the object has been moved.
2974
  virtual Object* RetainAs(Object* object) = 0;
2975
};
2976

    
2977

    
2978
// Intrusive object marking uses least significant bit of
2979
// heap object's map word to mark objects.
2980
// Normally all map words have least significant bit set
2981
// because they contain tagged map pointer.
2982
// If the bit is not set object is marked.
2983
// All objects should be unmarked before resuming
2984
// JavaScript execution.
2985
class IntrusiveMarking {
2986
 public:
2987
  static bool IsMarked(HeapObject* object) {
2988
    return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2989
  }
2990

    
2991
  static void ClearMark(HeapObject* object) {
2992
    uintptr_t map_word = object->map_word().ToRawValue();
2993
    object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2994
    ASSERT(!IsMarked(object));
2995
  }
2996

    
2997
  static void SetMark(HeapObject* object) {
2998
    uintptr_t map_word = object->map_word().ToRawValue();
2999
    object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
3000
    ASSERT(IsMarked(object));
3001
  }
3002

    
3003
  static Map* MapOfMarkedObject(HeapObject* object) {
3004
    uintptr_t map_word = object->map_word().ToRawValue();
3005
    return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
3006
  }
3007

    
3008
  static int SizeOfMarkedObject(HeapObject* object) {
3009
    return object->SizeFromMap(MapOfMarkedObject(object));
3010
  }
3011

    
3012
 private:
3013
  static const uintptr_t kNotMarkedBit = 0x1;
3014
  STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
3015
};
3016

    
3017

    
3018
#ifdef DEBUG
3019
// Helper class for tracing paths to a search target Object from all roots.
3020
// The TracePathFrom() method can be used to trace paths from a specific
3021
// object to the search target object.
3022
class PathTracer : public ObjectVisitor {
3023
 public:
3024
  enum WhatToFind {
3025
    FIND_ALL,   // Will find all matches.
3026
    FIND_FIRST  // Will stop the search after first match.
3027
  };
3028

    
3029
  // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
3030
  // after the first match.  If FIND_ALL is specified, then tracing will be
3031
  // done for all matches.
3032
  PathTracer(Object* search_target,
3033
             WhatToFind what_to_find,
3034
             VisitMode visit_mode)
3035
      : search_target_(search_target),
3036
        found_target_(false),
3037
        found_target_in_trace_(false),
3038
        what_to_find_(what_to_find),
3039
        visit_mode_(visit_mode),
3040
        object_stack_(20),
3041
        no_allocation() {}
3042

    
3043
  virtual void VisitPointers(Object** start, Object** end);
3044

    
3045
  void Reset();
3046
  void TracePathFrom(Object** root);
3047

    
3048
  bool found() const { return found_target_; }
3049

    
3050
  static Object* const kAnyGlobalObject;
3051

    
3052
 protected:
3053
  class MarkVisitor;
3054
  class UnmarkVisitor;
3055

    
3056
  void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
3057
  void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
3058
  virtual void ProcessResults();
3059

    
3060
  // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
3061
  static const int kMarkTag = 2;
3062

    
3063
  Object* search_target_;
3064
  bool found_target_;
3065
  bool found_target_in_trace_;
3066
  WhatToFind what_to_find_;
3067
  VisitMode visit_mode_;
3068
  List<Object*> object_stack_;
3069

    
3070
  DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
3071

    
3072
 private:
3073
  DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
3074
};
3075
#endif  // DEBUG
3076

    
3077
} }  // namespace v8::internal
3078

    
3079
#endif  // V8_HEAP_H_