The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / serialize.cc @ f230a1cf

History | View | Annotate | Download (67.5 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "accessors.h"
31
#include "api.h"
32
#include "bootstrapper.h"
33
#include "deoptimizer.h"
34
#include "execution.h"
35
#include "global-handles.h"
36
#include "ic-inl.h"
37
#include "natives.h"
38
#include "platform.h"
39
#include "runtime.h"
40
#include "serialize.h"
41
#include "snapshot.h"
42
#include "stub-cache.h"
43
#include "v8threads.h"
44

    
45
namespace v8 {
46
namespace internal {
47

    
48

    
49
// -----------------------------------------------------------------------------
50
// Coding of external references.
51

    
52
// The encoding of an external reference. The type is in the high word.
53
// The id is in the low word.
54
static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
55
  return static_cast<uint32_t>(type) << 16 | id;
56
}
57

    
58

    
59
static int* GetInternalPointer(StatsCounter* counter) {
60
  // All counters refer to dummy_counter, if deserializing happens without
61
  // setting up counters.
62
  static int dummy_counter = 0;
63
  return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
64
}
65

    
66

    
67
ExternalReferenceTable* ExternalReferenceTable::instance(Isolate* isolate) {
68
  ExternalReferenceTable* external_reference_table =
69
      isolate->external_reference_table();
70
  if (external_reference_table == NULL) {
71
    external_reference_table = new ExternalReferenceTable(isolate);
72
    isolate->set_external_reference_table(external_reference_table);
73
  }
74
  return external_reference_table;
75
}
76

    
77

    
78
void ExternalReferenceTable::AddFromId(TypeCode type,
79
                                       uint16_t id,
80
                                       const char* name,
81
                                       Isolate* isolate) {
82
  Address address;
83
  switch (type) {
84
    case C_BUILTIN: {
85
      ExternalReference ref(static_cast<Builtins::CFunctionId>(id), isolate);
86
      address = ref.address();
87
      break;
88
    }
89
    case BUILTIN: {
90
      ExternalReference ref(static_cast<Builtins::Name>(id), isolate);
91
      address = ref.address();
92
      break;
93
    }
94
    case RUNTIME_FUNCTION: {
95
      ExternalReference ref(static_cast<Runtime::FunctionId>(id), isolate);
96
      address = ref.address();
97
      break;
98
    }
99
    case IC_UTILITY: {
100
      ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)),
101
                            isolate);
102
      address = ref.address();
103
      break;
104
    }
105
    default:
106
      UNREACHABLE();
107
      return;
108
  }
109
  Add(address, type, id, name);
110
}
111

    
112

    
113
void ExternalReferenceTable::Add(Address address,
114
                                 TypeCode type,
115
                                 uint16_t id,
116
                                 const char* name) {
117
  ASSERT_NE(NULL, address);
118
  ExternalReferenceEntry entry;
119
  entry.address = address;
120
  entry.code = EncodeExternal(type, id);
121
  entry.name = name;
122
  ASSERT_NE(0, entry.code);
123
  refs_.Add(entry);
124
  if (id > max_id_[type]) max_id_[type] = id;
125
}
126

    
127

    
128
void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
129
  for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
130
    max_id_[type_code] = 0;
131
  }
132

    
133
  // The following populates all of the different type of external references
134
  // into the ExternalReferenceTable.
135
  //
136
  // NOTE: This function was originally 100k of code.  It has since been
137
  // rewritten to be mostly table driven, as the callback macro style tends to
138
  // very easily cause code bloat.  Please be careful in the future when adding
139
  // new references.
140

    
141
  struct RefTableEntry {
142
    TypeCode type;
143
    uint16_t id;
144
    const char* name;
145
  };
146

    
147
  static const RefTableEntry ref_table[] = {
148
  // Builtins
149
#define DEF_ENTRY_C(name, ignored) \
150
  { C_BUILTIN, \
151
    Builtins::c_##name, \
152
    "Builtins::" #name },
153

    
154
  BUILTIN_LIST_C(DEF_ENTRY_C)
155
#undef DEF_ENTRY_C
156

    
157
#define DEF_ENTRY_C(name, ignored) \
158
  { BUILTIN, \
159
    Builtins::k##name, \
160
    "Builtins::" #name },
161
#define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
162

    
163
  BUILTIN_LIST_C(DEF_ENTRY_C)
164
  BUILTIN_LIST_A(DEF_ENTRY_A)
165
  BUILTIN_LIST_DEBUG_A(DEF_ENTRY_A)
166
#undef DEF_ENTRY_C
167
#undef DEF_ENTRY_A
168

    
169
  // Runtime functions
170
#define RUNTIME_ENTRY(name, nargs, ressize) \
171
  { RUNTIME_FUNCTION, \
172
    Runtime::k##name, \
173
    "Runtime::" #name },
174

    
175
  RUNTIME_FUNCTION_LIST(RUNTIME_ENTRY)
176
#undef RUNTIME_ENTRY
177

    
178
  // IC utilities
179
#define IC_ENTRY(name) \
180
  { IC_UTILITY, \
181
    IC::k##name, \
182
    "IC::" #name },
183

    
184
  IC_UTIL_LIST(IC_ENTRY)
185
#undef IC_ENTRY
186
  };  // end of ref_table[].
187

    
188
  for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
189
    AddFromId(ref_table[i].type,
190
              ref_table[i].id,
191
              ref_table[i].name,
192
              isolate);
193
  }
194

    
195
#ifdef ENABLE_DEBUGGER_SUPPORT
196
  // Debug addresses
197
  Add(Debug_Address(Debug::k_after_break_target_address).address(isolate),
198
      DEBUG_ADDRESS,
199
      Debug::k_after_break_target_address << kDebugIdShift,
200
      "Debug::after_break_target_address()");
201
  Add(Debug_Address(Debug::k_debug_break_slot_address).address(isolate),
202
      DEBUG_ADDRESS,
203
      Debug::k_debug_break_slot_address << kDebugIdShift,
204
      "Debug::debug_break_slot_address()");
205
  Add(Debug_Address(Debug::k_debug_break_return_address).address(isolate),
206
      DEBUG_ADDRESS,
207
      Debug::k_debug_break_return_address << kDebugIdShift,
208
      "Debug::debug_break_return_address()");
209
  Add(Debug_Address(Debug::k_restarter_frame_function_pointer).address(isolate),
210
      DEBUG_ADDRESS,
211
      Debug::k_restarter_frame_function_pointer << kDebugIdShift,
212
      "Debug::restarter_frame_function_pointer_address()");
213
#endif
214

    
215
  // Stat counters
216
  struct StatsRefTableEntry {
217
    StatsCounter* (Counters::*counter)();
218
    uint16_t id;
219
    const char* name;
220
  };
221

    
222
  const StatsRefTableEntry stats_ref_table[] = {
223
#define COUNTER_ENTRY(name, caption) \
224
  { &Counters::name,    \
225
    Counters::k_##name, \
226
    "Counters::" #name },
227

    
228
  STATS_COUNTER_LIST_1(COUNTER_ENTRY)
229
  STATS_COUNTER_LIST_2(COUNTER_ENTRY)
230
#undef COUNTER_ENTRY
231
  };  // end of stats_ref_table[].
232

    
233
  Counters* counters = isolate->counters();
234
  for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
235
    Add(reinterpret_cast<Address>(GetInternalPointer(
236
            (counters->*(stats_ref_table[i].counter))())),
237
        STATS_COUNTER,
238
        stats_ref_table[i].id,
239
        stats_ref_table[i].name);
240
  }
241

    
242
  // Top addresses
243

    
244
  const char* AddressNames[] = {
245
#define BUILD_NAME_LITERAL(CamelName, hacker_name)      \
246
    "Isolate::" #hacker_name "_address",
247
    FOR_EACH_ISOLATE_ADDRESS_NAME(BUILD_NAME_LITERAL)
248
    NULL
249
#undef BUILD_NAME_LITERAL
250
  };
251

    
252
  for (uint16_t i = 0; i < Isolate::kIsolateAddressCount; ++i) {
253
    Add(isolate->get_address_from_id((Isolate::AddressId)i),
254
        TOP_ADDRESS, i, AddressNames[i]);
255
  }
256

    
257
  // Accessors
258
#define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
259
  Add((Address)&Accessors::name, \
260
      ACCESSOR, \
261
      Accessors::k##name, \
262
      "Accessors::" #name);
263

    
264
  ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
265
#undef ACCESSOR_DESCRIPTOR_DECLARATION
266

    
267
  StubCache* stub_cache = isolate->stub_cache();
268

    
269
  // Stub cache tables
270
  Add(stub_cache->key_reference(StubCache::kPrimary).address(),
271
      STUB_CACHE_TABLE,
272
      1,
273
      "StubCache::primary_->key");
274
  Add(stub_cache->value_reference(StubCache::kPrimary).address(),
275
      STUB_CACHE_TABLE,
276
      2,
277
      "StubCache::primary_->value");
278
  Add(stub_cache->map_reference(StubCache::kPrimary).address(),
279
      STUB_CACHE_TABLE,
280
      3,
281
      "StubCache::primary_->map");
282
  Add(stub_cache->key_reference(StubCache::kSecondary).address(),
283
      STUB_CACHE_TABLE,
284
      4,
285
      "StubCache::secondary_->key");
286
  Add(stub_cache->value_reference(StubCache::kSecondary).address(),
287
      STUB_CACHE_TABLE,
288
      5,
289
      "StubCache::secondary_->value");
290
  Add(stub_cache->map_reference(StubCache::kSecondary).address(),
291
      STUB_CACHE_TABLE,
292
      6,
293
      "StubCache::secondary_->map");
294

    
295
  // Runtime entries
296
  Add(ExternalReference::perform_gc_function(isolate).address(),
297
      RUNTIME_ENTRY,
298
      1,
299
      "Runtime::PerformGC");
300
  Add(ExternalReference::fill_heap_number_with_random_function(
301
          isolate).address(),
302
      RUNTIME_ENTRY,
303
      2,
304
      "V8::FillHeapNumberWithRandom");
305
  Add(ExternalReference::random_uint32_function(isolate).address(),
306
      RUNTIME_ENTRY,
307
      3,
308
      "V8::Random");
309
  Add(ExternalReference::delete_handle_scope_extensions(isolate).address(),
310
      RUNTIME_ENTRY,
311
      4,
312
      "HandleScope::DeleteExtensions");
313
  Add(ExternalReference::
314
          incremental_marking_record_write_function(isolate).address(),
315
      RUNTIME_ENTRY,
316
      5,
317
      "IncrementalMarking::RecordWrite");
318
  Add(ExternalReference::store_buffer_overflow_function(isolate).address(),
319
      RUNTIME_ENTRY,
320
      6,
321
      "StoreBuffer::StoreBufferOverflow");
322
  Add(ExternalReference::
323
          incremental_evacuation_record_write_function(isolate).address(),
324
      RUNTIME_ENTRY,
325
      7,
326
      "IncrementalMarking::RecordWrite");
327

    
328

    
329

    
330
  // Miscellaneous
331
  Add(ExternalReference::roots_array_start(isolate).address(),
332
      UNCLASSIFIED,
333
      3,
334
      "Heap::roots_array_start()");
335
  Add(ExternalReference::address_of_stack_limit(isolate).address(),
336
      UNCLASSIFIED,
337
      4,
338
      "StackGuard::address_of_jslimit()");
339
  Add(ExternalReference::address_of_real_stack_limit(isolate).address(),
340
      UNCLASSIFIED,
341
      5,
342
      "StackGuard::address_of_real_jslimit()");
343
#ifndef V8_INTERPRETED_REGEXP
344
  Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(),
345
      UNCLASSIFIED,
346
      6,
347
      "RegExpStack::limit_address()");
348
  Add(ExternalReference::address_of_regexp_stack_memory_address(
349
          isolate).address(),
350
      UNCLASSIFIED,
351
      7,
352
      "RegExpStack::memory_address()");
353
  Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(),
354
      UNCLASSIFIED,
355
      8,
356
      "RegExpStack::memory_size()");
357
  Add(ExternalReference::address_of_static_offsets_vector(isolate).address(),
358
      UNCLASSIFIED,
359
      9,
360
      "OffsetsVector::static_offsets_vector");
361
#endif  // V8_INTERPRETED_REGEXP
362
  Add(ExternalReference::new_space_start(isolate).address(),
363
      UNCLASSIFIED,
364
      10,
365
      "Heap::NewSpaceStart()");
366
  Add(ExternalReference::new_space_mask(isolate).address(),
367
      UNCLASSIFIED,
368
      11,
369
      "Heap::NewSpaceMask()");
370
  Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(),
371
      UNCLASSIFIED,
372
      12,
373
      "Heap::always_allocate_scope_depth()");
374
  Add(ExternalReference::new_space_allocation_limit_address(isolate).address(),
375
      UNCLASSIFIED,
376
      14,
377
      "Heap::NewSpaceAllocationLimitAddress()");
378
  Add(ExternalReference::new_space_allocation_top_address(isolate).address(),
379
      UNCLASSIFIED,
380
      15,
381
      "Heap::NewSpaceAllocationTopAddress()");
382
#ifdef ENABLE_DEBUGGER_SUPPORT
383
  Add(ExternalReference::debug_break(isolate).address(),
384
      UNCLASSIFIED,
385
      16,
386
      "Debug::Break()");
387
  Add(ExternalReference::debug_step_in_fp_address(isolate).address(),
388
      UNCLASSIFIED,
389
      17,
390
      "Debug::step_in_fp_addr()");
391
#endif
392
  Add(ExternalReference::double_fp_operation(Token::ADD, isolate).address(),
393
      UNCLASSIFIED,
394
      18,
395
      "add_two_doubles");
396
  Add(ExternalReference::double_fp_operation(Token::SUB, isolate).address(),
397
      UNCLASSIFIED,
398
      19,
399
      "sub_two_doubles");
400
  Add(ExternalReference::double_fp_operation(Token::MUL, isolate).address(),
401
      UNCLASSIFIED,
402
      20,
403
      "mul_two_doubles");
404
  Add(ExternalReference::double_fp_operation(Token::DIV, isolate).address(),
405
      UNCLASSIFIED,
406
      21,
407
      "div_two_doubles");
408
  Add(ExternalReference::double_fp_operation(Token::MOD, isolate).address(),
409
      UNCLASSIFIED,
410
      22,
411
      "mod_two_doubles");
412
  Add(ExternalReference::compare_doubles(isolate).address(),
413
      UNCLASSIFIED,
414
      23,
415
      "compare_doubles");
416
#ifndef V8_INTERPRETED_REGEXP
417
  Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(),
418
      UNCLASSIFIED,
419
      24,
420
      "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
421
  Add(ExternalReference::re_check_stack_guard_state(isolate).address(),
422
      UNCLASSIFIED,
423
      25,
424
      "RegExpMacroAssembler*::CheckStackGuardState()");
425
  Add(ExternalReference::re_grow_stack(isolate).address(),
426
      UNCLASSIFIED,
427
      26,
428
      "NativeRegExpMacroAssembler::GrowStack()");
429
  Add(ExternalReference::re_word_character_map().address(),
430
      UNCLASSIFIED,
431
      27,
432
      "NativeRegExpMacroAssembler::word_character_map");
433
#endif  // V8_INTERPRETED_REGEXP
434
  // Keyed lookup cache.
435
  Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(),
436
      UNCLASSIFIED,
437
      28,
438
      "KeyedLookupCache::keys()");
439
  Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(),
440
      UNCLASSIFIED,
441
      29,
442
      "KeyedLookupCache::field_offsets()");
443
  Add(ExternalReference::transcendental_cache_array_address(isolate).address(),
444
      UNCLASSIFIED,
445
      30,
446
      "TranscendentalCache::caches()");
447
  Add(ExternalReference::handle_scope_next_address(isolate).address(),
448
      UNCLASSIFIED,
449
      31,
450
      "HandleScope::next");
451
  Add(ExternalReference::handle_scope_limit_address(isolate).address(),
452
      UNCLASSIFIED,
453
      32,
454
      "HandleScope::limit");
455
  Add(ExternalReference::handle_scope_level_address(isolate).address(),
456
      UNCLASSIFIED,
457
      33,
458
      "HandleScope::level");
459
  Add(ExternalReference::new_deoptimizer_function(isolate).address(),
460
      UNCLASSIFIED,
461
      34,
462
      "Deoptimizer::New()");
463
  Add(ExternalReference::compute_output_frames_function(isolate).address(),
464
      UNCLASSIFIED,
465
      35,
466
      "Deoptimizer::ComputeOutputFrames()");
467
  Add(ExternalReference::address_of_min_int().address(),
468
      UNCLASSIFIED,
469
      36,
470
      "LDoubleConstant::min_int");
471
  Add(ExternalReference::address_of_one_half().address(),
472
      UNCLASSIFIED,
473
      37,
474
      "LDoubleConstant::one_half");
475
  Add(ExternalReference::isolate_address(isolate).address(),
476
      UNCLASSIFIED,
477
      38,
478
      "isolate");
479
  Add(ExternalReference::address_of_minus_zero().address(),
480
      UNCLASSIFIED,
481
      39,
482
      "LDoubleConstant::minus_zero");
483
  Add(ExternalReference::address_of_negative_infinity().address(),
484
      UNCLASSIFIED,
485
      40,
486
      "LDoubleConstant::negative_infinity");
487
  Add(ExternalReference::power_double_double_function(isolate).address(),
488
      UNCLASSIFIED,
489
      41,
490
      "power_double_double_function");
491
  Add(ExternalReference::power_double_int_function(isolate).address(),
492
      UNCLASSIFIED,
493
      42,
494
      "power_double_int_function");
495
  Add(ExternalReference::store_buffer_top(isolate).address(),
496
      UNCLASSIFIED,
497
      43,
498
      "store_buffer_top");
499
  Add(ExternalReference::address_of_canonical_non_hole_nan().address(),
500
      UNCLASSIFIED,
501
      44,
502
      "canonical_nan");
503
  Add(ExternalReference::address_of_the_hole_nan().address(),
504
      UNCLASSIFIED,
505
      45,
506
      "the_hole_nan");
507
  Add(ExternalReference::get_date_field_function(isolate).address(),
508
      UNCLASSIFIED,
509
      46,
510
      "JSDate::GetField");
511
  Add(ExternalReference::date_cache_stamp(isolate).address(),
512
      UNCLASSIFIED,
513
      47,
514
      "date_cache_stamp");
515
  Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
516
      UNCLASSIFIED,
517
      48,
518
      "address_of_pending_message_obj");
519
  Add(ExternalReference::address_of_has_pending_message(isolate).address(),
520
      UNCLASSIFIED,
521
      49,
522
      "address_of_has_pending_message");
523
  Add(ExternalReference::address_of_pending_message_script(isolate).address(),
524
      UNCLASSIFIED,
525
      50,
526
      "pending_message_script");
527
  Add(ExternalReference::get_make_code_young_function(isolate).address(),
528
      UNCLASSIFIED,
529
      51,
530
      "Code::MakeCodeYoung");
531
  Add(ExternalReference::cpu_features().address(),
532
      UNCLASSIFIED,
533
      52,
534
      "cpu_features");
535
  Add(ExternalReference(Runtime::kAllocateInNewSpace, isolate).address(),
536
      UNCLASSIFIED,
537
      53,
538
      "Runtime::AllocateInNewSpace");
539
  Add(ExternalReference::old_pointer_space_allocation_top_address(
540
      isolate).address(),
541
      UNCLASSIFIED,
542
      54,
543
      "Heap::OldPointerSpaceAllocationTopAddress");
544
  Add(ExternalReference::old_pointer_space_allocation_limit_address(
545
      isolate).address(),
546
      UNCLASSIFIED,
547
      55,
548
      "Heap::OldPointerSpaceAllocationLimitAddress");
549
  Add(ExternalReference(Runtime::kAllocateInOldPointerSpace, isolate).address(),
550
      UNCLASSIFIED,
551
      56,
552
      "Runtime::AllocateInOldPointerSpace");
553
  Add(ExternalReference::old_data_space_allocation_top_address(
554
      isolate).address(),
555
      UNCLASSIFIED,
556
      57,
557
      "Heap::OldDataSpaceAllocationTopAddress");
558
  Add(ExternalReference::old_data_space_allocation_limit_address(
559
      isolate).address(),
560
      UNCLASSIFIED,
561
      58,
562
      "Heap::OldDataSpaceAllocationLimitAddress");
563
  Add(ExternalReference(Runtime::kAllocateInOldDataSpace, isolate).address(),
564
      UNCLASSIFIED,
565
      59,
566
      "Runtime::AllocateInOldDataSpace");
567
  Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
568
      address(),
569
      UNCLASSIFIED,
570
      60,
571
      "Heap::NewSpaceAllocationLimitAddress");
572
  Add(ExternalReference::allocation_sites_list_address(isolate).address(),
573
      UNCLASSIFIED,
574
      61,
575
      "Heap::allocation_sites_list_address()");
576
  Add(ExternalReference::record_object_allocation_function(isolate).address(),
577
      UNCLASSIFIED,
578
      62,
579
      "HeapProfiler::RecordObjectAllocationFromMasm");
580
  Add(ExternalReference::address_of_uint32_bias().address(),
581
      UNCLASSIFIED,
582
      63,
583
      "uint32_bias");
584
  Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
585
      UNCLASSIFIED,
586
      64,
587
      "Code::MarkCodeAsExecuted");
588

    
589
  // Add a small set of deopt entry addresses to encoder without generating the
590
  // deopt table code, which isn't possible at deserialization time.
591
  HandleScope scope(isolate);
592
  for (int entry = 0; entry < kDeoptTableSerializeEntryCount; ++entry) {
593
    Address address = Deoptimizer::GetDeoptimizationEntry(
594
        isolate,
595
        entry,
596
        Deoptimizer::LAZY,
597
        Deoptimizer::CALCULATE_ENTRY_ADDRESS);
598
    Add(address, LAZY_DEOPTIMIZATION, 64 + entry, "lazy_deopt");
599
  }
600
}
601

    
602

    
603
ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate)
604
    : encodings_(Match),
605
      isolate_(isolate) {
606
  ExternalReferenceTable* external_references =
607
      ExternalReferenceTable::instance(isolate_);
608
  for (int i = 0; i < external_references->size(); ++i) {
609
    Put(external_references->address(i), i);
610
  }
611
}
612

    
613

    
614
uint32_t ExternalReferenceEncoder::Encode(Address key) const {
615
  int index = IndexOf(key);
616
  ASSERT(key == NULL || index >= 0);
617
  return index >=0 ?
618
         ExternalReferenceTable::instance(isolate_)->code(index) : 0;
619
}
620

    
621

    
622
const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
623
  int index = IndexOf(key);
624
  return index >= 0 ?
625
      ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
626
}
627

    
628

    
629
int ExternalReferenceEncoder::IndexOf(Address key) const {
630
  if (key == NULL) return -1;
631
  HashMap::Entry* entry =
632
      const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
633
  return entry == NULL
634
      ? -1
635
      : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
636
}
637

    
638

    
639
void ExternalReferenceEncoder::Put(Address key, int index) {
640
  HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
641
  entry->value = reinterpret_cast<void*>(index);
642
}
643

    
644

    
645
ExternalReferenceDecoder::ExternalReferenceDecoder(Isolate* isolate)
646
    : encodings_(NewArray<Address*>(kTypeCodeCount)),
647
      isolate_(isolate) {
648
  ExternalReferenceTable* external_references =
649
      ExternalReferenceTable::instance(isolate_);
650
  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
651
    int max = external_references->max_id(type) + 1;
652
    encodings_[type] = NewArray<Address>(max + 1);
653
  }
654
  for (int i = 0; i < external_references->size(); ++i) {
655
    Put(external_references->code(i), external_references->address(i));
656
  }
657
}
658

    
659

    
660
ExternalReferenceDecoder::~ExternalReferenceDecoder() {
661
  for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
662
    DeleteArray(encodings_[type]);
663
  }
664
  DeleteArray(encodings_);
665
}
666

    
667

    
668
bool Serializer::serialization_enabled_ = false;
669
bool Serializer::too_late_to_enable_now_ = false;
670

    
671

    
672
class CodeAddressMap: public CodeEventLogger {
673
 public:
674
  explicit CodeAddressMap(Isolate* isolate)
675
      : isolate_(isolate) {
676
    isolate->logger()->addCodeEventListener(this);
677
  }
678

    
679
  virtual ~CodeAddressMap() {
680
    isolate_->logger()->removeCodeEventListener(this);
681
  }
682

    
683
  virtual void CodeMoveEvent(Address from, Address to) {
684
    address_to_name_map_.Move(from, to);
685
  }
686

    
687
  virtual void CodeDeleteEvent(Address from) {
688
    address_to_name_map_.Remove(from);
689
  }
690

    
691
  const char* Lookup(Address address) {
692
    return address_to_name_map_.Lookup(address);
693
  }
694

    
695
 private:
696
  class NameMap {
697
   public:
698
    NameMap() : impl_(&PointerEquals) {}
699

    
700
    ~NameMap() {
701
      for (HashMap::Entry* p = impl_.Start(); p != NULL; p = impl_.Next(p)) {
702
        DeleteArray(static_cast<const char*>(p->value));
703
      }
704
    }
705

    
706
    void Insert(Address code_address, const char* name, int name_size) {
707
      HashMap::Entry* entry = FindOrCreateEntry(code_address);
708
      if (entry->value == NULL) {
709
        entry->value = CopyName(name, name_size);
710
      }
711
    }
712

    
713
    const char* Lookup(Address code_address) {
714
      HashMap::Entry* entry = FindEntry(code_address);
715
      return (entry != NULL) ? static_cast<const char*>(entry->value) : NULL;
716
    }
717

    
718
    void Remove(Address code_address) {
719
      HashMap::Entry* entry = FindEntry(code_address);
720
      if (entry != NULL) {
721
        DeleteArray(static_cast<char*>(entry->value));
722
        RemoveEntry(entry);
723
      }
724
    }
725

    
726
    void Move(Address from, Address to) {
727
      if (from == to) return;
728
      HashMap::Entry* from_entry = FindEntry(from);
729
      ASSERT(from_entry != NULL);
730
      void* value = from_entry->value;
731
      RemoveEntry(from_entry);
732
      HashMap::Entry* to_entry = FindOrCreateEntry(to);
733
      ASSERT(to_entry->value == NULL);
734
      to_entry->value = value;
735
    }
736

    
737
   private:
738
    static bool PointerEquals(void* lhs, void* rhs) {
739
      return lhs == rhs;
740
    }
741

    
742
    static char* CopyName(const char* name, int name_size) {
743
      char* result = NewArray<char>(name_size + 1);
744
      for (int i = 0; i < name_size; ++i) {
745
        char c = name[i];
746
        if (c == '\0') c = ' ';
747
        result[i] = c;
748
      }
749
      result[name_size] = '\0';
750
      return result;
751
    }
752

    
753
    HashMap::Entry* FindOrCreateEntry(Address code_address) {
754
      return impl_.Lookup(code_address, ComputePointerHash(code_address), true);
755
    }
756

    
757
    HashMap::Entry* FindEntry(Address code_address) {
758
      return impl_.Lookup(code_address,
759
                          ComputePointerHash(code_address),
760
                          false);
761
    }
762

    
763
    void RemoveEntry(HashMap::Entry* entry) {
764
      impl_.Remove(entry->key, entry->hash);
765
    }
766

    
767
    HashMap impl_;
768

    
769
    DISALLOW_COPY_AND_ASSIGN(NameMap);
770
  };
771

    
772
  virtual void LogRecordedBuffer(Code* code,
773
                                 SharedFunctionInfo*,
774
                                 const char* name,
775
                                 int length) {
776
    address_to_name_map_.Insert(code->address(), name, length);
777
  }
778

    
779
  NameMap address_to_name_map_;
780
  Isolate* isolate_;
781
};
782

    
783

    
784
CodeAddressMap* Serializer::code_address_map_ = NULL;
785

    
786

    
787
void Serializer::Enable(Isolate* isolate) {
788
  if (!serialization_enabled_) {
789
    ASSERT(!too_late_to_enable_now_);
790
  }
791
  if (serialization_enabled_) return;
792
  serialization_enabled_ = true;
793
  isolate->InitializeLoggingAndCounters();
794
  code_address_map_ = new CodeAddressMap(isolate);
795
}
796

    
797

    
798
void Serializer::Disable() {
799
  if (!serialization_enabled_) return;
800
  serialization_enabled_ = false;
801
  delete code_address_map_;
802
  code_address_map_ = NULL;
803
}
804

    
805

    
806
Deserializer::Deserializer(SnapshotByteSource* source)
807
    : isolate_(NULL),
808
      source_(source),
809
      external_reference_decoder_(NULL) {
810
  for (int i = 0; i < LAST_SPACE + 1; i++) {
811
    reservations_[i] = kUninitializedReservation;
812
  }
813
}
814

    
815

    
816
void Deserializer::Deserialize(Isolate* isolate) {
817
  isolate_ = isolate;
818
  ASSERT(isolate_ != NULL);
819
  isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
820
  // No active threads.
821
  ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
822
  // No active handles.
823
  ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
824
  ASSERT_EQ(NULL, external_reference_decoder_);
825
  external_reference_decoder_ = new ExternalReferenceDecoder(isolate);
826
  isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
827
  isolate_->heap()->RepairFreeListsAfterBoot();
828
  isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
829

    
830
  isolate_->heap()->set_native_contexts_list(
831
      isolate_->heap()->undefined_value());
832
  isolate_->heap()->set_array_buffers_list(
833
      isolate_->heap()->undefined_value());
834

    
835
  // The allocation site list is build during root iteration, but if no sites
836
  // were encountered then it needs to be initialized to undefined.
837
  if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
838
    isolate_->heap()->set_allocation_sites_list(
839
        isolate_->heap()->undefined_value());
840
  }
841

    
842
  isolate_->heap()->InitializeWeakObjectToCodeTable();
843

    
844
  // Update data pointers to the external strings containing natives sources.
845
  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
846
    Object* source = isolate_->heap()->natives_source_cache()->get(i);
847
    if (!source->IsUndefined()) {
848
      ExternalAsciiString::cast(source)->update_data_cache();
849
    }
850
  }
851

    
852
  // Issue code events for newly deserialized code objects.
853
  LOG_CODE_EVENT(isolate_, LogCodeObjects());
854
  LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
855
}
856

    
857

    
858
void Deserializer::DeserializePartial(Isolate* isolate, Object** root) {
859
  isolate_ = isolate;
860
  for (int i = NEW_SPACE; i < kNumberOfSpaces; i++) {
861
    ASSERT(reservations_[i] != kUninitializedReservation);
862
  }
863
  isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
864
  if (external_reference_decoder_ == NULL) {
865
    external_reference_decoder_ = new ExternalReferenceDecoder(isolate);
866
  }
867

    
868
  // Keep track of the code space start and end pointers in case new
869
  // code objects were unserialized
870
  OldSpace* code_space = isolate_->heap()->code_space();
871
  Address start_address = code_space->top();
872
  VisitPointer(root);
873

    
874
  // There's no code deserialized here. If this assert fires
875
  // then that's changed and logging should be added to notify
876
  // the profiler et al of the new code.
877
  CHECK_EQ(start_address, code_space->top());
878
}
879

    
880

    
881
Deserializer::~Deserializer() {
882
  ASSERT(source_->AtEOF());
883
  if (external_reference_decoder_) {
884
    delete external_reference_decoder_;
885
    external_reference_decoder_ = NULL;
886
  }
887
}
888

    
889

    
890
// This is called on the roots.  It is the driver of the deserialization
891
// process.  It is also called on the body of each function.
892
void Deserializer::VisitPointers(Object** start, Object** end) {
893
  // The space must be new space.  Any other space would cause ReadChunk to try
894
  // to update the remembered using NULL as the address.
895
  ReadChunk(start, end, NEW_SPACE, NULL);
896
}
897

    
898

    
899
void Deserializer::RelinkAllocationSite(AllocationSite* site) {
900
  if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
901
    site->set_weak_next(isolate_->heap()->undefined_value());
902
  } else {
903
    site->set_weak_next(isolate_->heap()->allocation_sites_list());
904
  }
905
  isolate_->heap()->set_allocation_sites_list(site);
906
}
907

    
908

    
909
// This routine writes the new object into the pointer provided and then
910
// returns true if the new object was in young space and false otherwise.
911
// The reason for this strange interface is that otherwise the object is
912
// written very late, which means the FreeSpace map is not set up by the
913
// time we need to use it to mark the space at the end of a page free.
914
void Deserializer::ReadObject(int space_number,
915
                              Object** write_back) {
916
  int size = source_->GetInt() << kObjectAlignmentBits;
917
  Address address = Allocate(space_number, size);
918
  HeapObject* obj = HeapObject::FromAddress(address);
919
  *write_back = obj;
920
  Object** current = reinterpret_cast<Object**>(address);
921
  Object** limit = current + (size >> kPointerSizeLog2);
922
  if (FLAG_log_snapshot_positions) {
923
    LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
924
  }
925
  ReadChunk(current, limit, space_number, address);
926

    
927
  // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
928
  // as a (weak) root. If this root is relocated correctly,
929
  // RelinkAllocationSite() isn't necessary.
930
  if (obj->IsAllocationSite()) {
931
    RelinkAllocationSite(AllocationSite::cast(obj));
932
  }
933

    
934
#ifdef DEBUG
935
  bool is_codespace = (space_number == CODE_SPACE);
936
  ASSERT(obj->IsCode() == is_codespace);
937
#endif
938
}
939

    
940
void Deserializer::ReadChunk(Object** current,
941
                             Object** limit,
942
                             int source_space,
943
                             Address current_object_address) {
944
  Isolate* const isolate = isolate_;
945
  // Write barrier support costs around 1% in startup time.  In fact there
946
  // are no new space objects in current boot snapshots, so it's not needed,
947
  // but that may change.
948
  bool write_barrier_needed = (current_object_address != NULL &&
949
                               source_space != NEW_SPACE &&
950
                               source_space != CELL_SPACE &&
951
                               source_space != PROPERTY_CELL_SPACE &&
952
                               source_space != CODE_SPACE &&
953
                               source_space != OLD_DATA_SPACE);
954
  while (current < limit) {
955
    int data = source_->Get();
956
    switch (data) {
957
#define CASE_STATEMENT(where, how, within, space_number)                       \
958
      case where + how + within + space_number:                                \
959
      ASSERT((where & ~kPointedToMask) == 0);                                  \
960
      ASSERT((how & ~kHowToCodeMask) == 0);                                    \
961
      ASSERT((within & ~kWhereToPointMask) == 0);                              \
962
      ASSERT((space_number & ~kSpaceMask) == 0);
963

    
964
#define CASE_BODY(where, how, within, space_number_if_any)                     \
965
      {                                                                        \
966
        bool emit_write_barrier = false;                                       \
967
        bool current_was_incremented = false;                                  \
968
        int space_number =  space_number_if_any == kAnyOldSpace ?              \
969
                            (data & kSpaceMask) : space_number_if_any;         \
970
        if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
971
          ReadObject(space_number, current);                                   \
972
          emit_write_barrier = (space_number == NEW_SPACE);                    \
973
        } else {                                                               \
974
          Object* new_object = NULL;  /* May not be a real Object pointer. */  \
975
          if (where == kNewObject) {                                           \
976
            ReadObject(space_number, &new_object);                             \
977
          } else if (where == kRootArray) {                                    \
978
            int root_id = source_->GetInt();                                   \
979
            new_object = isolate->heap()->roots_array_start()[root_id];        \
980
            emit_write_barrier = isolate->heap()->InNewSpace(new_object);      \
981
          } else if (where == kPartialSnapshotCache) {                         \
982
            int cache_index = source_->GetInt();                               \
983
            new_object = isolate->serialize_partial_snapshot_cache()           \
984
                [cache_index];                                                 \
985
            emit_write_barrier = isolate->heap()->InNewSpace(new_object);      \
986
          } else if (where == kExternalReference) {                            \
987
            int skip = source_->GetInt();                                      \
988
            current = reinterpret_cast<Object**>(reinterpret_cast<Address>(    \
989
                current) + skip);                                              \
990
            int reference_id = source_->GetInt();                              \
991
            Address address = external_reference_decoder_->                    \
992
                Decode(reference_id);                                          \
993
            new_object = reinterpret_cast<Object*>(address);                   \
994
          } else if (where == kBackref) {                                      \
995
            emit_write_barrier = (space_number == NEW_SPACE);                  \
996
            new_object = GetAddressFromEnd(data & kSpaceMask);                 \
997
          } else {                                                             \
998
            ASSERT(where == kBackrefWithSkip);                                 \
999
            int skip = source_->GetInt();                                      \
1000
            current = reinterpret_cast<Object**>(                              \
1001
                reinterpret_cast<Address>(current) + skip);                    \
1002
            emit_write_barrier = (space_number == NEW_SPACE);                  \
1003
            new_object = GetAddressFromEnd(data & kSpaceMask);                 \
1004
          }                                                                    \
1005
          if (within == kInnerPointer) {                                       \
1006
            if (space_number != CODE_SPACE || new_object->IsCode()) {          \
1007
              Code* new_code_object = reinterpret_cast<Code*>(new_object);     \
1008
              new_object = reinterpret_cast<Object*>(                          \
1009
                  new_code_object->instruction_start());                       \
1010
            } else {                                                           \
1011
              ASSERT(space_number == CODE_SPACE);                              \
1012
              Cell* cell = Cell::cast(new_object);                             \
1013
              new_object = reinterpret_cast<Object*>(                          \
1014
                  cell->ValueAddress());                                       \
1015
            }                                                                  \
1016
          }                                                                    \
1017
          if (how == kFromCode) {                                              \
1018
            Address location_of_branch_data =                                  \
1019
                reinterpret_cast<Address>(current);                            \
1020
            Assembler::deserialization_set_special_target_at(                  \
1021
                location_of_branch_data,                                       \
1022
                reinterpret_cast<Address>(new_object));                        \
1023
            location_of_branch_data += Assembler::kSpecialTargetSize;          \
1024
            current = reinterpret_cast<Object**>(location_of_branch_data);     \
1025
            current_was_incremented = true;                                    \
1026
          } else {                                                             \
1027
            *current = new_object;                                             \
1028
          }                                                                    \
1029
        }                                                                      \
1030
        if (emit_write_barrier && write_barrier_needed) {                      \
1031
          Address current_address = reinterpret_cast<Address>(current);        \
1032
          isolate->heap()->RecordWrite(                                        \
1033
              current_object_address,                                          \
1034
              static_cast<int>(current_address - current_object_address));     \
1035
        }                                                                      \
1036
        if (!current_was_incremented) {                                        \
1037
          current++;                                                           \
1038
        }                                                                      \
1039
        break;                                                                 \
1040
      }                                                                        \
1041

    
1042
// This generates a case and a body for the new space (which has to do extra
1043
// write barrier handling) and handles the other spaces with 8 fall-through
1044
// cases and one body.
1045
#define ALL_SPACES(where, how, within)                                         \
1046
  CASE_STATEMENT(where, how, within, NEW_SPACE)                                \
1047
  CASE_BODY(where, how, within, NEW_SPACE)                                     \
1048
  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE)                           \
1049
  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE)                        \
1050
  CASE_STATEMENT(where, how, within, CODE_SPACE)                               \
1051
  CASE_STATEMENT(where, how, within, CELL_SPACE)                               \
1052
  CASE_STATEMENT(where, how, within, PROPERTY_CELL_SPACE)                      \
1053
  CASE_STATEMENT(where, how, within, MAP_SPACE)                                \
1054
  CASE_BODY(where, how, within, kAnyOldSpace)
1055

    
1056
#define FOUR_CASES(byte_code)             \
1057
  case byte_code:                         \
1058
  case byte_code + 1:                     \
1059
  case byte_code + 2:                     \
1060
  case byte_code + 3:
1061

    
1062
#define SIXTEEN_CASES(byte_code)          \
1063
  FOUR_CASES(byte_code)                   \
1064
  FOUR_CASES(byte_code + 4)               \
1065
  FOUR_CASES(byte_code + 8)               \
1066
  FOUR_CASES(byte_code + 12)
1067

    
1068
#define COMMON_RAW_LENGTHS(f)        \
1069
  f(1)  \
1070
  f(2)  \
1071
  f(3)  \
1072
  f(4)  \
1073
  f(5)  \
1074
  f(6)  \
1075
  f(7)  \
1076
  f(8)  \
1077
  f(9)  \
1078
  f(10) \
1079
  f(11) \
1080
  f(12) \
1081
  f(13) \
1082
  f(14) \
1083
  f(15) \
1084
  f(16) \
1085
  f(17) \
1086
  f(18) \
1087
  f(19) \
1088
  f(20) \
1089
  f(21) \
1090
  f(22) \
1091
  f(23) \
1092
  f(24) \
1093
  f(25) \
1094
  f(26) \
1095
  f(27) \
1096
  f(28) \
1097
  f(29) \
1098
  f(30) \
1099
  f(31)
1100

    
1101
      // We generate 15 cases and bodies that process special tags that combine
1102
      // the raw data tag and the length into one byte.
1103
#define RAW_CASE(index)                                                      \
1104
      case kRawData + index: {                                               \
1105
        byte* raw_data_out = reinterpret_cast<byte*>(current);               \
1106
        source_->CopyRaw(raw_data_out, index * kPointerSize);                \
1107
        current =                                                            \
1108
            reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
1109
        break;                                                               \
1110
      }
1111
      COMMON_RAW_LENGTHS(RAW_CASE)
1112
#undef RAW_CASE
1113

    
1114
      // Deserialize a chunk of raw data that doesn't have one of the popular
1115
      // lengths.
1116
      case kRawData: {
1117
        int size = source_->GetInt();
1118
        byte* raw_data_out = reinterpret_cast<byte*>(current);
1119
        source_->CopyRaw(raw_data_out, size);
1120
        break;
1121
      }
1122

    
1123
      SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance)
1124
      SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance + 16) {
1125
        int root_id = RootArrayConstantFromByteCode(data);
1126
        Object* object = isolate->heap()->roots_array_start()[root_id];
1127
        ASSERT(!isolate->heap()->InNewSpace(object));
1128
        *current++ = object;
1129
        break;
1130
      }
1131

    
1132
      SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance)
1133
      SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance + 16) {
1134
        int root_id = RootArrayConstantFromByteCode(data);
1135
        int skip = source_->GetInt();
1136
        current = reinterpret_cast<Object**>(
1137
            reinterpret_cast<intptr_t>(current) + skip);
1138
        Object* object = isolate->heap()->roots_array_start()[root_id];
1139
        ASSERT(!isolate->heap()->InNewSpace(object));
1140
        *current++ = object;
1141
        break;
1142
      }
1143

    
1144
      case kRepeat: {
1145
        int repeats = source_->GetInt();
1146
        Object* object = current[-1];
1147
        ASSERT(!isolate->heap()->InNewSpace(object));
1148
        for (int i = 0; i < repeats; i++) current[i] = object;
1149
        current += repeats;
1150
        break;
1151
      }
1152

    
1153
      STATIC_ASSERT(kRootArrayNumberOfConstantEncodings ==
1154
                    Heap::kOldSpaceRoots);
1155
      STATIC_ASSERT(kMaxRepeats == 13);
1156
      case kConstantRepeat:
1157
      FOUR_CASES(kConstantRepeat + 1)
1158
      FOUR_CASES(kConstantRepeat + 5)
1159
      FOUR_CASES(kConstantRepeat + 9) {
1160
        int repeats = RepeatsForCode(data);
1161
        Object* object = current[-1];
1162
        ASSERT(!isolate->heap()->InNewSpace(object));
1163
        for (int i = 0; i < repeats; i++) current[i] = object;
1164
        current += repeats;
1165
        break;
1166
      }
1167

    
1168
      // Deserialize a new object and write a pointer to it to the current
1169
      // object.
1170
      ALL_SPACES(kNewObject, kPlain, kStartOfObject)
1171
      // Support for direct instruction pointers in functions.  It's an inner
1172
      // pointer because it points at the entry point, not at the start of the
1173
      // code object.
1174
      CASE_STATEMENT(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
1175
      CASE_BODY(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
1176
      // Deserialize a new code object and write a pointer to its first
1177
      // instruction to the current code object.
1178
      ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
1179
      // Find a recently deserialized object using its offset from the current
1180
      // allocation point and write a pointer to it to the current object.
1181
      ALL_SPACES(kBackref, kPlain, kStartOfObject)
1182
      ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
1183
#if V8_TARGET_ARCH_MIPS
1184
      // Deserialize a new object from pointer found in code and write
1185
      // a pointer to it to the current object. Required only for MIPS, and
1186
      // omitted on the other architectures because it is fully unrolled and
1187
      // would cause bloat.
1188
      ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
1189
      // Find a recently deserialized code object using its offset from the
1190
      // current allocation point and write a pointer to it to the current
1191
      // object. Required only for MIPS.
1192
      ALL_SPACES(kBackref, kFromCode, kStartOfObject)
1193
      ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
1194
#endif
1195
      // Find a recently deserialized code object using its offset from the
1196
      // current allocation point and write a pointer to its first instruction
1197
      // to the current code object or the instruction pointer in a function
1198
      // object.
1199
      ALL_SPACES(kBackref, kFromCode, kInnerPointer)
1200
      ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
1201
      ALL_SPACES(kBackref, kPlain, kInnerPointer)
1202
      ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer)
1203
      // Find an object in the roots array and write a pointer to it to the
1204
      // current object.
1205
      CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
1206
      CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
1207
      // Find an object in the partial snapshots cache and write a pointer to it
1208
      // to the current object.
1209
      CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
1210
      CASE_BODY(kPartialSnapshotCache,
1211
                kPlain,
1212
                kStartOfObject,
1213
                0)
1214
      // Find an code entry in the partial snapshots cache and
1215
      // write a pointer to it to the current object.
1216
      CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
1217
      CASE_BODY(kPartialSnapshotCache,
1218
                kPlain,
1219
                kInnerPointer,
1220
                0)
1221
      // Find an external reference and write a pointer to it to the current
1222
      // object.
1223
      CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0)
1224
      CASE_BODY(kExternalReference,
1225
                kPlain,
1226
                kStartOfObject,
1227
                0)
1228
      // Find an external reference and write a pointer to it in the current
1229
      // code object.
1230
      CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0)
1231
      CASE_BODY(kExternalReference,
1232
                kFromCode,
1233
                kStartOfObject,
1234
                0)
1235

    
1236
#undef CASE_STATEMENT
1237
#undef CASE_BODY
1238
#undef ALL_SPACES
1239

    
1240
      case kSkip: {
1241
        int size = source_->GetInt();
1242
        current = reinterpret_cast<Object**>(
1243
            reinterpret_cast<intptr_t>(current) + size);
1244
        break;
1245
      }
1246

    
1247
      case kNativesStringResource: {
1248
        int index = source_->Get();
1249
        Vector<const char> source_vector = Natives::GetRawScriptSource(index);
1250
        NativesExternalStringResource* resource =
1251
            new NativesExternalStringResource(isolate->bootstrapper(),
1252
                                              source_vector.start(),
1253
                                              source_vector.length());
1254
        *current++ = reinterpret_cast<Object*>(resource);
1255
        break;
1256
      }
1257

    
1258
      case kSynchronize: {
1259
        // If we get here then that indicates that you have a mismatch between
1260
        // the number of GC roots when serializing and deserializing.
1261
        UNREACHABLE();
1262
      }
1263

    
1264
      default:
1265
        UNREACHABLE();
1266
    }
1267
  }
1268
  ASSERT_EQ(limit, current);
1269
}
1270

    
1271

    
1272
void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
1273
  ASSERT(integer < 1 << 22);
1274
  integer <<= 2;
1275
  int bytes = 1;
1276
  if (integer > 0xff) bytes = 2;
1277
  if (integer > 0xffff) bytes = 3;
1278
  integer |= bytes;
1279
  Put(static_cast<int>(integer & 0xff), "IntPart1");
1280
  if (bytes > 1) Put(static_cast<int>((integer >> 8) & 0xff), "IntPart2");
1281
  if (bytes > 2) Put(static_cast<int>((integer >> 16) & 0xff), "IntPart3");
1282
}
1283

    
1284

    
1285
Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink)
1286
    : isolate_(isolate),
1287
      sink_(sink),
1288
      current_root_index_(0),
1289
      external_reference_encoder_(new ExternalReferenceEncoder(isolate)),
1290
      root_index_wave_front_(0) {
1291
  // The serializer is meant to be used only to generate initial heap images
1292
  // from a context in which there is only one isolate.
1293
  for (int i = 0; i <= LAST_SPACE; i++) {
1294
    fullness_[i] = 0;
1295
  }
1296
}
1297

    
1298

    
1299
Serializer::~Serializer() {
1300
  delete external_reference_encoder_;
1301
}
1302

    
1303

    
1304
void StartupSerializer::SerializeStrongReferences() {
1305
  Isolate* isolate = this->isolate();
1306
  // No active threads.
1307
  CHECK_EQ(NULL, isolate->thread_manager()->FirstThreadStateInUse());
1308
  // No active or weak handles.
1309
  CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1310
  CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1311
  CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
1312
  // We don't support serializing installed extensions.
1313
  CHECK(!isolate->has_installed_extensions());
1314

    
1315
  isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1316
}
1317

    
1318

    
1319
void PartialSerializer::Serialize(Object** object) {
1320
  this->VisitPointer(object);
1321
  Pad();
1322
}
1323

    
1324

    
1325
bool Serializer::ShouldBeSkipped(Object** current) {
1326
  Object** roots = isolate()->heap()->roots_array_start();
1327
  return current == &roots[Heap::kStoreBufferTopRootIndex]
1328
      || current == &roots[Heap::kStackLimitRootIndex]
1329
      || current == &roots[Heap::kRealStackLimitRootIndex];
1330
}
1331

    
1332

    
1333
void Serializer::VisitPointers(Object** start, Object** end) {
1334
  Isolate* isolate = this->isolate();;
1335

    
1336
  for (Object** current = start; current < end; current++) {
1337
    if (start == isolate->heap()->roots_array_start()) {
1338
      root_index_wave_front_ =
1339
          Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
1340
    }
1341
    if (ShouldBeSkipped(current)) {
1342
      sink_->Put(kSkip, "Skip");
1343
      sink_->PutInt(kPointerSize, "SkipOneWord");
1344
    } else if ((*current)->IsSmi()) {
1345
      sink_->Put(kRawData + 1, "Smi");
1346
      for (int i = 0; i < kPointerSize; i++) {
1347
        sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1348
      }
1349
    } else {
1350
      SerializeObject(*current, kPlain, kStartOfObject, 0);
1351
    }
1352
  }
1353
}
1354

    
1355

    
1356
// This ensures that the partial snapshot cache keeps things alive during GC and
1357
// tracks their movement.  When it is called during serialization of the startup
1358
// snapshot nothing happens.  When the partial (context) snapshot is created,
1359
// this array is populated with the pointers that the partial snapshot will
1360
// need. As that happens we emit serialized objects to the startup snapshot
1361
// that correspond to the elements of this cache array.  On deserialization we
1362
// therefore need to visit the cache array.  This fills it up with pointers to
1363
// deserialized objects.
1364
void SerializerDeserializer::Iterate(Isolate* isolate,
1365
                                     ObjectVisitor* visitor) {
1366
  if (Serializer::enabled()) return;
1367
  for (int i = 0; ; i++) {
1368
    if (isolate->serialize_partial_snapshot_cache_length() <= i) {
1369
      // Extend the array ready to get a value from the visitor when
1370
      // deserializing.
1371
      isolate->PushToPartialSnapshotCache(Smi::FromInt(0));
1372
    }
1373
    Object** cache = isolate->serialize_partial_snapshot_cache();
1374
    visitor->VisitPointers(&cache[i], &cache[i + 1]);
1375
    // Sentinel is the undefined object, which is a root so it will not normally
1376
    // be found in the cache.
1377
    if (cache[i] == isolate->heap()->undefined_value()) {
1378
      break;
1379
    }
1380
  }
1381
}
1382

    
1383

    
1384
int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
1385
  Isolate* isolate = this->isolate();
1386

    
1387
  for (int i = 0;
1388
       i < isolate->serialize_partial_snapshot_cache_length();
1389
       i++) {
1390
    Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1391
    if (entry == heap_object) return i;
1392
  }
1393

    
1394
  // We didn't find the object in the cache.  So we add it to the cache and
1395
  // then visit the pointer so that it becomes part of the startup snapshot
1396
  // and we can refer to it from the partial snapshot.
1397
  int length = isolate->serialize_partial_snapshot_cache_length();
1398
  isolate->PushToPartialSnapshotCache(heap_object);
1399
  startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object));
1400
  // We don't recurse from the startup snapshot generator into the partial
1401
  // snapshot generator.
1402
  ASSERT(length == isolate->serialize_partial_snapshot_cache_length() - 1);
1403
  return length;
1404
}
1405

    
1406

    
1407
int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
1408
  Heap* heap = isolate()->heap();
1409
  if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
1410
  for (int i = 0; i < root_index_wave_front_; i++) {
1411
    Object* root = heap->roots_array_start()[i];
1412
    if (!root->IsSmi() && root == heap_object) {
1413
#if V8_TARGET_ARCH_MIPS
1414
      if (from == kFromCode) {
1415
        // In order to avoid code bloat in the deserializer we don't have
1416
        // support for the encoding that specifies a particular root should
1417
        // be written into the lui/ori instructions on MIPS.  Therefore we
1418
        // should not generate such serialization data for MIPS.
1419
        return kInvalidRootIndex;
1420
      }
1421
#endif
1422
      return i;
1423
    }
1424
  }
1425
  return kInvalidRootIndex;
1426
}
1427

    
1428

    
1429
// Encode the location of an already deserialized object in order to write its
1430
// location into a later object.  We can encode the location as an offset from
1431
// the start of the deserialized objects or as an offset backwards from the
1432
// current allocation pointer.
1433
void Serializer::SerializeReferenceToPreviousObject(
1434
    int space,
1435
    int address,
1436
    HowToCode how_to_code,
1437
    WhereToPoint where_to_point,
1438
    int skip) {
1439
  int offset = CurrentAllocationAddress(space) - address;
1440
  // Shift out the bits that are always 0.
1441
  offset >>= kObjectAlignmentBits;
1442
  if (skip == 0) {
1443
    sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
1444
  } else {
1445
    sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1446
               "BackRefSerWithSkip");
1447
    sink_->PutInt(skip, "BackRefSkipDistance");
1448
  }
1449
  sink_->PutInt(offset, "offset");
1450
}
1451

    
1452

    
1453
void StartupSerializer::SerializeObject(
1454
    Object* o,
1455
    HowToCode how_to_code,
1456
    WhereToPoint where_to_point,
1457
    int skip) {
1458
  CHECK(o->IsHeapObject());
1459
  HeapObject* heap_object = HeapObject::cast(o);
1460

    
1461
  int root_index;
1462
  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1463
    PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1464
    return;
1465
  }
1466

    
1467
  if (address_mapper_.IsMapped(heap_object)) {
1468
    int space = SpaceOfObject(heap_object);
1469
    int address = address_mapper_.MappedTo(heap_object);
1470
    SerializeReferenceToPreviousObject(space,
1471
                                       address,
1472
                                       how_to_code,
1473
                                       where_to_point,
1474
                                       skip);
1475
  } else {
1476
    if (skip != 0) {
1477
      sink_->Put(kSkip, "FlushPendingSkip");
1478
      sink_->PutInt(skip, "SkipDistance");
1479
    }
1480

    
1481
    // Object has not yet been serialized.  Serialize it here.
1482
    ObjectSerializer object_serializer(this,
1483
                                       heap_object,
1484
                                       sink_,
1485
                                       how_to_code,
1486
                                       where_to_point);
1487
    object_serializer.Serialize();
1488
  }
1489
}
1490

    
1491

    
1492
void StartupSerializer::SerializeWeakReferences() {
1493
  // This phase comes right after the partial serialization (of the snapshot).
1494
  // After we have done the partial serialization the partial snapshot cache
1495
  // will contain some references needed to decode the partial snapshot.  We
1496
  // add one entry with 'undefined' which is the sentinel that the deserializer
1497
  // uses to know it is done deserializing the array.
1498
  Object* undefined = isolate()->heap()->undefined_value();
1499
  VisitPointer(&undefined);
1500
  isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
1501
  Pad();
1502
}
1503

    
1504

    
1505
void Serializer::PutRoot(int root_index,
1506
                         HeapObject* object,
1507
                         SerializerDeserializer::HowToCode how_to_code,
1508
                         SerializerDeserializer::WhereToPoint where_to_point,
1509
                         int skip) {
1510
  if (how_to_code == kPlain &&
1511
      where_to_point == kStartOfObject &&
1512
      root_index < kRootArrayNumberOfConstantEncodings &&
1513
      !isolate()->heap()->InNewSpace(object)) {
1514
    if (skip == 0) {
1515
      sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index,
1516
                 "RootConstant");
1517
    } else {
1518
      sink_->Put(kRootArrayConstants + kHasSkipDistance + root_index,
1519
                 "RootConstant");
1520
      sink_->PutInt(skip, "SkipInPutRoot");
1521
    }
1522
  } else {
1523
    if (skip != 0) {
1524
      sink_->Put(kSkip, "SkipFromPutRoot");
1525
      sink_->PutInt(skip, "SkipFromPutRootDistance");
1526
    }
1527
    sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1528
    sink_->PutInt(root_index, "root_index");
1529
  }
1530
}
1531

    
1532

    
1533
void PartialSerializer::SerializeObject(
1534
    Object* o,
1535
    HowToCode how_to_code,
1536
    WhereToPoint where_to_point,
1537
    int skip) {
1538
  CHECK(o->IsHeapObject());
1539
  HeapObject* heap_object = HeapObject::cast(o);
1540

    
1541
  if (heap_object->IsMap()) {
1542
    // The code-caches link to context-specific code objects, which
1543
    // the startup and context serializes cannot currently handle.
1544
    ASSERT(Map::cast(heap_object)->code_cache() ==
1545
           heap_object->GetHeap()->empty_fixed_array());
1546
  }
1547

    
1548
  int root_index;
1549
  if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1550
    PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1551
    return;
1552
  }
1553

    
1554
  if (ShouldBeInThePartialSnapshotCache(heap_object)) {
1555
    if (skip != 0) {
1556
      sink_->Put(kSkip, "SkipFromSerializeObject");
1557
      sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1558
    }
1559

    
1560
    int cache_index = PartialSnapshotCacheIndex(heap_object);
1561
    sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
1562
               "PartialSnapshotCache");
1563
    sink_->PutInt(cache_index, "partial_snapshot_cache_index");
1564
    return;
1565
  }
1566

    
1567
  // Pointers from the partial snapshot to the objects in the startup snapshot
1568
  // should go through the root array or through the partial snapshot cache.
1569
  // If this is not the case you may have to add something to the root array.
1570
  ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object));
1571
  // All the internalized strings that the partial snapshot needs should be
1572
  // either in the root table or in the partial snapshot cache.
1573
  ASSERT(!heap_object->IsInternalizedString());
1574

    
1575
  if (address_mapper_.IsMapped(heap_object)) {
1576
    int space = SpaceOfObject(heap_object);
1577
    int address = address_mapper_.MappedTo(heap_object);
1578
    SerializeReferenceToPreviousObject(space,
1579
                                       address,
1580
                                       how_to_code,
1581
                                       where_to_point,
1582
                                       skip);
1583
  } else {
1584
    if (skip != 0) {
1585
      sink_->Put(kSkip, "SkipFromSerializeObject");
1586
      sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1587
    }
1588
    // Object has not yet been serialized.  Serialize it here.
1589
    ObjectSerializer serializer(this,
1590
                                heap_object,
1591
                                sink_,
1592
                                how_to_code,
1593
                                where_to_point);
1594
    serializer.Serialize();
1595
  }
1596
}
1597

    
1598

    
1599
void Serializer::ObjectSerializer::Serialize() {
1600
  int space = Serializer::SpaceOfObject(object_);
1601
  int size = object_->Size();
1602

    
1603
  sink_->Put(kNewObject + reference_representation_ + space,
1604
             "ObjectSerialization");
1605
  sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1606

    
1607
  ASSERT(code_address_map_);
1608
  const char* code_name = code_address_map_->Lookup(object_->address());
1609
  LOG(serializer_->isolate_,
1610
      CodeNameEvent(object_->address(), sink_->Position(), code_name));
1611
  LOG(serializer_->isolate_,
1612
      SnapshotPositionEvent(object_->address(), sink_->Position()));
1613

    
1614
  // Mark this object as already serialized.
1615
  int offset = serializer_->Allocate(space, size);
1616
  serializer_->address_mapper()->AddMapping(object_, offset);
1617

    
1618
  // Serialize the map (first word of the object).
1619
  serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject, 0);
1620

    
1621
  // Serialize the rest of the object.
1622
  CHECK_EQ(0, bytes_processed_so_far_);
1623
  bytes_processed_so_far_ = kPointerSize;
1624
  object_->IterateBody(object_->map()->instance_type(), size, this);
1625
  OutputRawData(object_->address() + size);
1626
}
1627

    
1628

    
1629
void Serializer::ObjectSerializer::VisitPointers(Object** start,
1630
                                                 Object** end) {
1631
  Object** current = start;
1632
  while (current < end) {
1633
    while (current < end && (*current)->IsSmi()) current++;
1634
    if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1635

    
1636
    while (current < end && !(*current)->IsSmi()) {
1637
      HeapObject* current_contents = HeapObject::cast(*current);
1638
      int root_index = serializer_->RootIndex(current_contents, kPlain);
1639
      // Repeats are not subject to the write barrier so there are only some
1640
      // objects that can be used in a repeat encoding.  These are the early
1641
      // ones in the root array that are never in new space.
1642
      if (current != start &&
1643
          root_index != kInvalidRootIndex &&
1644
          root_index < kRootArrayNumberOfConstantEncodings &&
1645
          current_contents == current[-1]) {
1646
        ASSERT(!serializer_->isolate()->heap()->InNewSpace(current_contents));
1647
        int repeat_count = 1;
1648
        while (current < end - 1 && current[repeat_count] == current_contents) {
1649
          repeat_count++;
1650
        }
1651
        current += repeat_count;
1652
        bytes_processed_so_far_ += repeat_count * kPointerSize;
1653
        if (repeat_count > kMaxRepeats) {
1654
          sink_->Put(kRepeat, "SerializeRepeats");
1655
          sink_->PutInt(repeat_count, "SerializeRepeats");
1656
        } else {
1657
          sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
1658
        }
1659
      } else {
1660
        serializer_->SerializeObject(
1661
                current_contents, kPlain, kStartOfObject, 0);
1662
        bytes_processed_so_far_ += kPointerSize;
1663
        current++;
1664
      }
1665
    }
1666
  }
1667
}
1668

    
1669

    
1670
void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
1671
  Object** current = rinfo->target_object_address();
1672

    
1673
  int skip = OutputRawData(rinfo->target_address_address(),
1674
                           kCanReturnSkipInsteadOfSkipping);
1675
  HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1676
  serializer_->SerializeObject(*current, representation, kStartOfObject, skip);
1677
  bytes_processed_so_far_ += rinfo->target_address_size();
1678
}
1679

    
1680

    
1681
void Serializer::ObjectSerializer::VisitExternalReference(Address* p) {
1682
  Address references_start = reinterpret_cast<Address>(p);
1683
  int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
1684

    
1685
  sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
1686
  sink_->PutInt(skip, "SkipB4ExternalRef");
1687
  int reference_id = serializer_->EncodeExternalReference(*p);
1688
  sink_->PutInt(reference_id, "reference id");
1689
  bytes_processed_so_far_ += kPointerSize;
1690
}
1691

    
1692

    
1693
void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
1694
  Address references_start = rinfo->target_address_address();
1695
  int skip = OutputRawData(references_start, kCanReturnSkipInsteadOfSkipping);
1696

    
1697
  Address* current = rinfo->target_reference_address();
1698
  int representation = rinfo->IsCodedSpecially() ?
1699
                       kFromCode + kStartOfObject : kPlain + kStartOfObject;
1700
  sink_->Put(kExternalReference + representation, "ExternalRef");
1701
  sink_->PutInt(skip, "SkipB4ExternalRef");
1702
  int reference_id = serializer_->EncodeExternalReference(*current);
1703
  sink_->PutInt(reference_id, "reference id");
1704
  bytes_processed_so_far_ += rinfo->target_address_size();
1705
}
1706

    
1707

    
1708
void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
1709
  Address target_start = rinfo->target_address_address();
1710
  int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
1711
  Address target = rinfo->target_address();
1712
  uint32_t encoding = serializer_->EncodeExternalReference(target);
1713
  CHECK(target == NULL ? encoding == 0 : encoding != 0);
1714
  int representation;
1715
  // Can't use a ternary operator because of gcc.
1716
  if (rinfo->IsCodedSpecially()) {
1717
    representation = kStartOfObject + kFromCode;
1718
  } else {
1719
    representation = kStartOfObject + kPlain;
1720
  }
1721
  sink_->Put(kExternalReference + representation, "ExternalReference");
1722
  sink_->PutInt(skip, "SkipB4ExternalRef");
1723
  sink_->PutInt(encoding, "reference id");
1724
  bytes_processed_so_far_ += rinfo->target_address_size();
1725
}
1726

    
1727

    
1728
void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) {
1729
  CHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
1730
  Address target_start = rinfo->target_address_address();
1731
  int skip = OutputRawData(target_start, kCanReturnSkipInsteadOfSkipping);
1732
  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1733
  serializer_->SerializeObject(target, kFromCode, kInnerPointer, skip);
1734
  bytes_processed_so_far_ += rinfo->target_address_size();
1735
}
1736

    
1737

    
1738
void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) {
1739
  Code* target = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1740
  int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
1741
  serializer_->SerializeObject(target, kPlain, kInnerPointer, skip);
1742
  bytes_processed_so_far_ += kPointerSize;
1743
}
1744

    
1745

    
1746
void Serializer::ObjectSerializer::VisitCell(RelocInfo* rinfo) {
1747
  ASSERT(rinfo->rmode() == RelocInfo::CELL);
1748
  Cell* cell = Cell::cast(rinfo->target_cell());
1749
  int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
1750
  serializer_->SerializeObject(cell, kPlain, kInnerPointer, skip);
1751
}
1752

    
1753

    
1754
void Serializer::ObjectSerializer::VisitExternalAsciiString(
1755
    v8::String::ExternalAsciiStringResource** resource_pointer) {
1756
  Address references_start = reinterpret_cast<Address>(resource_pointer);
1757
  OutputRawData(references_start);
1758
  for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1759
    Object* source =
1760
        serializer_->isolate()->heap()->natives_source_cache()->get(i);
1761
    if (!source->IsUndefined()) {
1762
      ExternalAsciiString* string = ExternalAsciiString::cast(source);
1763
      typedef v8::String::ExternalAsciiStringResource Resource;
1764
      const Resource* resource = string->resource();
1765
      if (resource == *resource_pointer) {
1766
        sink_->Put(kNativesStringResource, "NativesStringResource");
1767
        sink_->PutSection(i, "NativesStringResourceEnd");
1768
        bytes_processed_so_far_ += sizeof(resource);
1769
        return;
1770
      }
1771
    }
1772
  }
1773
  // One of the strings in the natives cache should match the resource.  We
1774
  // can't serialize any other kinds of external strings.
1775
  UNREACHABLE();
1776
}
1777

    
1778

    
1779
int Serializer::ObjectSerializer::OutputRawData(
1780
    Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
1781
  Address object_start = object_->address();
1782
  Address base = object_start + bytes_processed_so_far_;
1783
  int up_to_offset = static_cast<int>(up_to - object_start);
1784
  int to_skip = up_to_offset - bytes_processed_so_far_;
1785
  int bytes_to_output = to_skip;
1786
  bytes_processed_so_far_ +=  to_skip;
1787
  // This assert will fail if the reloc info gives us the target_address_address
1788
  // locations in a non-ascending order.  Luckily that doesn't happen.
1789
  ASSERT(to_skip >= 0);
1790
  bool outputting_code = false;
1791
  if (to_skip != 0 && code_object_ && !code_has_been_output_) {
1792
    // Output the code all at once and fix later.
1793
    bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
1794
    outputting_code = true;
1795
    code_has_been_output_ = true;
1796
  }
1797
  if (bytes_to_output != 0 &&
1798
      (!code_object_ || outputting_code)) {
1799
#define RAW_CASE(index)                                                        \
1800
    if (!outputting_code && bytes_to_output == index * kPointerSize &&         \
1801
        index * kPointerSize == to_skip) {                                     \
1802
      sink_->PutSection(kRawData + index, "RawDataFixed");                     \
1803
      to_skip = 0;  /* This insn already skips. */                             \
1804
    } else  /* NOLINT */
1805
    COMMON_RAW_LENGTHS(RAW_CASE)
1806
#undef RAW_CASE
1807
    {  /* NOLINT */
1808
      // We always end up here if we are outputting the code of a code object.
1809
      sink_->Put(kRawData, "RawData");
1810
      sink_->PutInt(bytes_to_output, "length");
1811
    }
1812
    for (int i = 0; i < bytes_to_output; i++) {
1813
      unsigned int data = base[i];
1814
      sink_->PutSection(data, "Byte");
1815
    }
1816
  }
1817
  if (to_skip != 0 && return_skip == kIgnoringReturn) {
1818
    sink_->Put(kSkip, "Skip");
1819
    sink_->PutInt(to_skip, "SkipDistance");
1820
    to_skip = 0;
1821
  }
1822
  return to_skip;
1823
}
1824

    
1825

    
1826
int Serializer::SpaceOfObject(HeapObject* object) {
1827
  for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1828
    AllocationSpace s = static_cast<AllocationSpace>(i);
1829
    if (object->GetHeap()->InSpace(object, s)) {
1830
      ASSERT(i < kNumberOfSpaces);
1831
      return i;
1832
    }
1833
  }
1834
  UNREACHABLE();
1835
  return 0;
1836
}
1837

    
1838

    
1839
int Serializer::Allocate(int space, int size) {
1840
  CHECK(space >= 0 && space < kNumberOfSpaces);
1841
  int allocation_address = fullness_[space];
1842
  fullness_[space] = allocation_address + size;
1843
  return allocation_address;
1844
}
1845

    
1846

    
1847
int Serializer::SpaceAreaSize(int space) {
1848
  if (space == CODE_SPACE) {
1849
    return isolate_->memory_allocator()->CodePageAreaSize();
1850
  } else {
1851
    return Page::kPageSize - Page::kObjectStartOffset;
1852
  }
1853
}
1854

    
1855

    
1856
void Serializer::Pad() {
1857
  // The non-branching GetInt will read up to 3 bytes too far, so we need
1858
  // to pad the snapshot to make sure we don't read over the end.
1859
  for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
1860
    sink_->Put(kNop, "Padding");
1861
  }
1862
}
1863

    
1864

    
1865
bool SnapshotByteSource::AtEOF() {
1866
  if (0u + length_ - position_ > 2 * sizeof(uint32_t)) return false;
1867
  for (int x = position_; x < length_; x++) {
1868
    if (data_[x] != SerializerDeserializer::nop()) return false;
1869
  }
1870
  return true;
1871
}
1872

    
1873
} }  // namespace v8::internal