The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / deoptimizer.cc @ f230a1cf

History | View | Annotate | Download (108 KB)

1
// Copyright 2013 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "accessors.h"
31
#include "codegen.h"
32
#include "deoptimizer.h"
33
#include "disasm.h"
34
#include "full-codegen.h"
35
#include "global-handles.h"
36
#include "macro-assembler.h"
37
#include "prettyprinter.h"
38

    
39

    
40
namespace v8 {
41
namespace internal {
42

    
43
static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
44
  return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
45
                                  OS::CommitPageSize(),
46
#if defined(__native_client__)
47
  // The Native Client port of V8 uses an interpreter,
48
  // so code pages don't need PROT_EXEC.
49
                                  NOT_EXECUTABLE,
50
#else
51
                                  EXECUTABLE,
52
#endif
53
                                  NULL);
54
}
55

    
56

    
57
DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
58
    : allocator_(allocator),
59
#ifdef ENABLE_DEBUGGER_SUPPORT
60
      deoptimized_frame_info_(NULL),
61
#endif
62
      current_(NULL) {
63
  for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
64
    deopt_entry_code_entries_[i] = -1;
65
    deopt_entry_code_[i] = AllocateCodeChunk(allocator);
66
  }
67
}
68

    
69

    
70
DeoptimizerData::~DeoptimizerData() {
71
  for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
72
    allocator_->Free(deopt_entry_code_[i]);
73
    deopt_entry_code_[i] = NULL;
74
  }
75
}
76

    
77

    
78
#ifdef ENABLE_DEBUGGER_SUPPORT
79
void DeoptimizerData::Iterate(ObjectVisitor* v) {
80
  if (deoptimized_frame_info_ != NULL) {
81
    deoptimized_frame_info_->Iterate(v);
82
  }
83
}
84
#endif
85

    
86

    
87
Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
88
  if (function_->IsHeapObject()) {
89
    // Search all deoptimizing code in the native context of the function.
90
    Context* native_context = function_->context()->native_context();
91
    Object* element = native_context->DeoptimizedCodeListHead();
92
    while (!element->IsUndefined()) {
93
      Code* code = Code::cast(element);
94
      ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
95
      if (code->contains(addr)) return code;
96
      element = code->next_code_link();
97
    }
98
  }
99
  return NULL;
100
}
101

    
102

    
103
// We rely on this function not causing a GC.  It is called from generated code
104
// without having a real stack frame in place.
105
Deoptimizer* Deoptimizer::New(JSFunction* function,
106
                              BailoutType type,
107
                              unsigned bailout_id,
108
                              Address from,
109
                              int fp_to_sp_delta,
110
                              Isolate* isolate) {
111
  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
112
                                             function,
113
                                             type,
114
                                             bailout_id,
115
                                             from,
116
                                             fp_to_sp_delta,
117
                                             NULL);
118
  ASSERT(isolate->deoptimizer_data()->current_ == NULL);
119
  isolate->deoptimizer_data()->current_ = deoptimizer;
120
  return deoptimizer;
121
}
122

    
123

    
124
// No larger than 2K on all platforms
125
static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
126

    
127

    
128
size_t Deoptimizer::GetMaxDeoptTableSize() {
129
  int entries_size =
130
      Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
131
  int commit_page_size = static_cast<int>(OS::CommitPageSize());
132
  int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
133
                    commit_page_size) + 1;
134
  return static_cast<size_t>(commit_page_size * page_count);
135
}
136

    
137

    
138
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
139
  Deoptimizer* result = isolate->deoptimizer_data()->current_;
140
  ASSERT(result != NULL);
141
  result->DeleteFrameDescriptions();
142
  isolate->deoptimizer_data()->current_ = NULL;
143
  return result;
144
}
145

    
146

    
147
int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
148
  if (jsframe_index == 0) return 0;
149

    
150
  int frame_index = 0;
151
  while (jsframe_index >= 0) {
152
    FrameDescription* frame = output_[frame_index];
153
    if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
154
      jsframe_index--;
155
    }
156
    frame_index++;
157
  }
158

    
159
  return frame_index - 1;
160
}
161

    
162

    
163
#ifdef ENABLE_DEBUGGER_SUPPORT
164
DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
165
    JavaScriptFrame* frame,
166
    int jsframe_index,
167
    Isolate* isolate) {
168
  ASSERT(frame->is_optimized());
169
  ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
170

    
171
  // Get the function and code from the frame.
172
  JSFunction* function = frame->function();
173
  Code* code = frame->LookupCode();
174

    
175
  // Locate the deoptimization point in the code. As we are at a call the
176
  // return address must be at a place in the code with deoptimization support.
177
  SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
178
  int deoptimization_index = safepoint_entry.deoptimization_index();
179
  ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex);
180

    
181
  // Always use the actual stack slots when calculating the fp to sp
182
  // delta adding two for the function and context.
183
  unsigned stack_slots = code->stack_slots();
184
  unsigned fp_to_sp_delta = ((stack_slots + 2) * kPointerSize);
185

    
186
  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
187
                                             function,
188
                                             Deoptimizer::DEBUGGER,
189
                                             deoptimization_index,
190
                                             frame->pc(),
191
                                             fp_to_sp_delta,
192
                                             code);
193
  Address tos = frame->fp() - fp_to_sp_delta;
194
  deoptimizer->FillInputFrame(tos, frame);
195

    
196
  // Calculate the output frames.
197
  Deoptimizer::ComputeOutputFrames(deoptimizer);
198

    
199
  // Create the GC safe output frame information and register it for GC
200
  // handling.
201
  ASSERT_LT(jsframe_index, deoptimizer->jsframe_count());
202

    
203
  // Convert JS frame index into frame index.
204
  int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
205

    
206
  bool has_arguments_adaptor =
207
      frame_index > 0 &&
208
      deoptimizer->output_[frame_index - 1]->GetFrameType() ==
209
      StackFrame::ARGUMENTS_ADAPTOR;
210

    
211
  int construct_offset = has_arguments_adaptor ? 2 : 1;
212
  bool has_construct_stub =
213
      frame_index >= construct_offset &&
214
      deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
215
      StackFrame::CONSTRUCT;
216

    
217
  DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
218
                                                        frame_index,
219
                                                        has_arguments_adaptor,
220
                                                        has_construct_stub);
221
  isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
222

    
223
  // Get the "simulated" top and size for the requested frame.
224
  FrameDescription* parameters_frame =
225
      deoptimizer->output_[
226
          has_arguments_adaptor ? (frame_index - 1) : frame_index];
227

    
228
  uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
229
  Address parameters_top = reinterpret_cast<Address>(
230
      parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
231
                                    parameters_size));
232

    
233
  uint32_t expressions_size = info->expression_count() * kPointerSize;
234
  Address expressions_top = reinterpret_cast<Address>(
235
      deoptimizer->output_[frame_index]->GetTop());
236

    
237
  // Done with the GC-unsafe frame descriptions. This re-enables allocation.
238
  deoptimizer->DeleteFrameDescriptions();
239

    
240
  // Allocate a heap number for the doubles belonging to this frame.
241
  deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
242
      parameters_top, parameters_size, expressions_top, expressions_size, info);
243

    
244
  // Finished using the deoptimizer instance.
245
  delete deoptimizer;
246

    
247
  return info;
248
}
249

    
250

    
251
void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
252
                                                 Isolate* isolate) {
253
  ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
254
  delete info;
255
  isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
256
}
257
#endif
258

    
259
void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
260
                                                int count,
261
                                                BailoutType type) {
262
  TableEntryGenerator generator(masm, type, count);
263
  generator.Generate();
264
}
265

    
266

    
267
void Deoptimizer::VisitAllOptimizedFunctionsForContext(
268
    Context* context, OptimizedFunctionVisitor* visitor) {
269
  DisallowHeapAllocation no_allocation;
270

    
271
  ASSERT(context->IsNativeContext());
272

    
273
  visitor->EnterContext(context);
274

    
275
  // Visit the list of optimized functions, removing elements that
276
  // no longer refer to optimized code.
277
  JSFunction* prev = NULL;
278
  Object* element = context->OptimizedFunctionsListHead();
279
  while (!element->IsUndefined()) {
280
    JSFunction* function = JSFunction::cast(element);
281
    Object* next = function->next_function_link();
282
    if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
283
        (visitor->VisitFunction(function),
284
         function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
285
      // The function no longer refers to optimized code, or the visitor
286
      // changed the code to which it refers to no longer be optimized code.
287
      // Remove the function from this list.
288
      if (prev != NULL) {
289
        prev->set_next_function_link(next);
290
      } else {
291
        context->SetOptimizedFunctionsListHead(next);
292
      }
293
      // The visitor should not alter the link directly.
294
      ASSERT(function->next_function_link() == next);
295
      // Set the next function link to undefined to indicate it is no longer
296
      // in the optimized functions list.
297
      function->set_next_function_link(context->GetHeap()->undefined_value());
298
    } else {
299
      // The visitor should not alter the link directly.
300
      ASSERT(function->next_function_link() == next);
301
      // preserve this element.
302
      prev = function;
303
    }
304
    element = next;
305
  }
306

    
307
  visitor->LeaveContext(context);
308
}
309

    
310

    
311
void Deoptimizer::VisitAllOptimizedFunctions(
312
    Isolate* isolate,
313
    OptimizedFunctionVisitor* visitor) {
314
  DisallowHeapAllocation no_allocation;
315

    
316
  // Run through the list of all native contexts.
317
  Object* context = isolate->heap()->native_contexts_list();
318
  while (!context->IsUndefined()) {
319
    VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
320
    context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
321
  }
322
}
323

    
324

    
325
// Unlink functions referring to code marked for deoptimization, then move
326
// marked code from the optimized code list to the deoptimized code list,
327
// and patch code for lazy deopt.
328
void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
329
  DisallowHeapAllocation no_allocation;
330

    
331
  // A "closure" that unlinks optimized code that is going to be
332
  // deoptimized from the functions that refer to it.
333
  class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
334
   public:
335
    virtual void EnterContext(Context* context) { }  // Don't care.
336
    virtual void LeaveContext(Context* context)  { }  // Don't care.
337
    virtual void VisitFunction(JSFunction* function) {
338
      Code* code = function->code();
339
      if (!code->marked_for_deoptimization()) return;
340

    
341
      // Unlink this function and evict from optimized code map.
342
      SharedFunctionInfo* shared = function->shared();
343
      function->set_code(shared->code());
344
      shared->EvictFromOptimizedCodeMap(code, "deoptimized function");
345

    
346
      if (FLAG_trace_deopt) {
347
        PrintF("[deoptimizer unlinked: ");
348
        function->PrintName();
349
        PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
350
      }
351
    }
352
  };
353

    
354
  // Unlink all functions that refer to marked code.
355
  SelectedCodeUnlinker unlinker;
356
  VisitAllOptimizedFunctionsForContext(context, &unlinker);
357

    
358
  // Move marked code from the optimized code list to the deoptimized
359
  // code list, collecting them into a ZoneList.
360
  Isolate* isolate = context->GetHeap()->isolate();
361
  Zone zone(isolate);
362
  ZoneList<Code*> codes(10, &zone);
363

    
364
  // Walk over all optimized code objects in this native context.
365
  Code* prev = NULL;
366
  Object* element = context->OptimizedCodeListHead();
367
  while (!element->IsUndefined()) {
368
    Code* code = Code::cast(element);
369
    ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
370
    Object* next = code->next_code_link();
371
    if (code->marked_for_deoptimization()) {
372
      // Put the code into the list for later patching.
373
      codes.Add(code, &zone);
374

    
375
      if (prev != NULL) {
376
        // Skip this code in the optimized code list.
377
        prev->set_next_code_link(next);
378
      } else {
379
        // There was no previous node, the next node is the new head.
380
        context->SetOptimizedCodeListHead(next);
381
      }
382

    
383
      // Move the code to the _deoptimized_ code list.
384
      code->set_next_code_link(context->DeoptimizedCodeListHead());
385
      context->SetDeoptimizedCodeListHead(code);
386
    } else {
387
      // Not marked; preserve this element.
388
      prev = code;
389
    }
390
    element = next;
391
  }
392

    
393
  // TODO(titzer): we need a handle scope only because of the macro assembler,
394
  // which is only used in EnsureCodeForDeoptimizationEntry.
395
  HandleScope scope(isolate);
396
  // Now patch all the codes for deoptimization.
397
  for (int i = 0; i < codes.length(); i++) {
398
    // It is finally time to die, code object.
399
    // Do platform-specific patching to force any activations to lazy deopt.
400
    PatchCodeForDeoptimization(isolate, codes[i]);
401

    
402
    // We might be in the middle of incremental marking with compaction.
403
    // Tell collector to treat this code object in a special way and
404
    // ignore all slots that might have been recorded on it.
405
    isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
406
  }
407
}
408

    
409

    
410
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
411
  if (FLAG_trace_deopt) {
412
    PrintF("[deoptimize all code in all contexts]\n");
413
  }
414
  DisallowHeapAllocation no_allocation;
415
  // For all contexts, mark all code, then deoptimize.
416
  Object* context = isolate->heap()->native_contexts_list();
417
  while (!context->IsUndefined()) {
418
    Context* native_context = Context::cast(context);
419
    MarkAllCodeForContext(native_context);
420
    DeoptimizeMarkedCodeForContext(native_context);
421
    context = native_context->get(Context::NEXT_CONTEXT_LINK);
422
  }
423
}
424

    
425

    
426
void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
427
  if (FLAG_trace_deopt) {
428
    PrintF("[deoptimize marked code in all contexts]\n");
429
  }
430
  DisallowHeapAllocation no_allocation;
431
  // For all contexts, deoptimize code already marked.
432
  Object* context = isolate->heap()->native_contexts_list();
433
  while (!context->IsUndefined()) {
434
    Context* native_context = Context::cast(context);
435
    DeoptimizeMarkedCodeForContext(native_context);
436
    context = native_context->get(Context::NEXT_CONTEXT_LINK);
437
  }
438
}
439

    
440

    
441
void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
442
  if (FLAG_trace_deopt) {
443
    PrintF("[deoptimize global object @ 0x%08" V8PRIxPTR "]\n",
444
        reinterpret_cast<intptr_t>(object));
445
  }
446
  if (object->IsJSGlobalProxy()) {
447
    Object* proto = object->GetPrototype();
448
    ASSERT(proto->IsJSGlobalObject());
449
    Context* native_context = GlobalObject::cast(proto)->native_context();
450
    MarkAllCodeForContext(native_context);
451
    DeoptimizeMarkedCodeForContext(native_context);
452
  } else if (object->IsGlobalObject()) {
453
    Context* native_context = GlobalObject::cast(object)->native_context();
454
    MarkAllCodeForContext(native_context);
455
    DeoptimizeMarkedCodeForContext(native_context);
456
  }
457
}
458

    
459

    
460
void Deoptimizer::MarkAllCodeForContext(Context* context) {
461
  Object* element = context->OptimizedCodeListHead();
462
  while (!element->IsUndefined()) {
463
    Code* code = Code::cast(element);
464
    ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
465
    code->set_marked_for_deoptimization(true);
466
    element = code->next_code_link();
467
  }
468
}
469

    
470

    
471
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
472
  Code* code = function->code();
473
  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
474
    // Mark the code for deoptimization and unlink any functions that also
475
    // refer to that code. The code cannot be shared across native contexts,
476
    // so we only need to search one.
477
    code->set_marked_for_deoptimization(true);
478
    DeoptimizeMarkedCodeForContext(function->context()->native_context());
479
  }
480
}
481

    
482

    
483
void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
484
  deoptimizer->DoComputeOutputFrames();
485
}
486

    
487

    
488
bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
489
                                  StackFrame::Type frame_type) {
490
  switch (deopt_type) {
491
    case EAGER:
492
    case SOFT:
493
    case LAZY:
494
    case DEBUGGER:
495
      return (frame_type == StackFrame::STUB)
496
          ? FLAG_trace_stub_failures
497
          : FLAG_trace_deopt;
498
  }
499
  UNREACHABLE();
500
  return false;
501
}
502

    
503

    
504
const char* Deoptimizer::MessageFor(BailoutType type) {
505
  switch (type) {
506
    case EAGER: return "eager";
507
    case SOFT: return "soft";
508
    case LAZY: return "lazy";
509
    case DEBUGGER: return "debugger";
510
  }
511
  UNREACHABLE();
512
  return NULL;
513
}
514

    
515

    
516
Deoptimizer::Deoptimizer(Isolate* isolate,
517
                         JSFunction* function,
518
                         BailoutType type,
519
                         unsigned bailout_id,
520
                         Address from,
521
                         int fp_to_sp_delta,
522
                         Code* optimized_code)
523
    : isolate_(isolate),
524
      function_(function),
525
      bailout_id_(bailout_id),
526
      bailout_type_(type),
527
      from_(from),
528
      fp_to_sp_delta_(fp_to_sp_delta),
529
      has_alignment_padding_(0),
530
      input_(NULL),
531
      output_count_(0),
532
      jsframe_count_(0),
533
      output_(NULL),
534
      deferred_objects_tagged_values_(0),
535
      deferred_objects_double_values_(0),
536
      deferred_objects_(0),
537
      deferred_heap_numbers_(0),
538
      jsframe_functions_(0),
539
      jsframe_has_adapted_arguments_(0),
540
      materialized_values_(NULL),
541
      materialized_objects_(NULL),
542
      materialization_value_index_(0),
543
      materialization_object_index_(0),
544
      trace_(false) {
545
  // For COMPILED_STUBs called from builtins, the function pointer is a SMI
546
  // indicating an internal frame.
547
  if (function->IsSmi()) {
548
    function = NULL;
549
  }
550
  ASSERT(from != NULL);
551
  if (function != NULL && function->IsOptimized()) {
552
    function->shared()->increment_deopt_count();
553
    if (bailout_type_ == Deoptimizer::SOFT) {
554
      isolate->counters()->soft_deopts_executed()->Increment();
555
      // Soft deopts shouldn't count against the overall re-optimization count
556
      // that can eventually lead to disabling optimization for a function.
557
      int opt_count = function->shared()->opt_count();
558
      if (opt_count > 0) opt_count--;
559
      function->shared()->set_opt_count(opt_count);
560
    }
561
  }
562
  compiled_code_ = FindOptimizedCode(function, optimized_code);
563

    
564
#if DEBUG
565
  ASSERT(compiled_code_ != NULL);
566
  if (type == EAGER || type == SOFT || type == LAZY) {
567
    ASSERT(compiled_code_->kind() != Code::FUNCTION);
568
  }
569
#endif
570

    
571
  StackFrame::Type frame_type = function == NULL
572
      ? StackFrame::STUB
573
      : StackFrame::JAVA_SCRIPT;
574
  trace_ = TraceEnabledFor(type, frame_type);
575
#ifdef DEBUG
576
  CHECK(AllowHeapAllocation::IsAllowed());
577
  disallow_heap_allocation_ = new DisallowHeapAllocation();
578
#endif  // DEBUG
579
  unsigned size = ComputeInputFrameSize();
580
  input_ = new(size) FrameDescription(size, function);
581
  input_->SetFrameType(frame_type);
582
}
583

    
584

    
585
Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
586
                                     Code* optimized_code) {
587
  switch (bailout_type_) {
588
    case Deoptimizer::SOFT:
589
    case Deoptimizer::EAGER:
590
    case Deoptimizer::LAZY: {
591
      Code* compiled_code = FindDeoptimizingCode(from_);
592
      return (compiled_code == NULL)
593
          ? static_cast<Code*>(isolate_->FindCodeObject(from_))
594
          : compiled_code;
595
    }
596
    case Deoptimizer::DEBUGGER:
597
      ASSERT(optimized_code->contains(from_));
598
      return optimized_code;
599
  }
600
  UNREACHABLE();
601
  return NULL;
602
}
603

    
604

    
605
void Deoptimizer::PrintFunctionName() {
606
  if (function_->IsJSFunction()) {
607
    function_->PrintName();
608
  } else {
609
    PrintF("%s", Code::Kind2String(compiled_code_->kind()));
610
  }
611
}
612

    
613

    
614
Deoptimizer::~Deoptimizer() {
615
  ASSERT(input_ == NULL && output_ == NULL);
616
  ASSERT(disallow_heap_allocation_ == NULL);
617
}
618

    
619

    
620
void Deoptimizer::DeleteFrameDescriptions() {
621
  delete input_;
622
  for (int i = 0; i < output_count_; ++i) {
623
    if (output_[i] != input_) delete output_[i];
624
  }
625
  delete[] output_;
626
  input_ = NULL;
627
  output_ = NULL;
628
#ifdef DEBUG
629
  CHECK(!AllowHeapAllocation::IsAllowed());
630
  CHECK(disallow_heap_allocation_ != NULL);
631
  delete disallow_heap_allocation_;
632
  disallow_heap_allocation_ = NULL;
633
#endif  // DEBUG
634
}
635

    
636

    
637
Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
638
                                            int id,
639
                                            BailoutType type,
640
                                            GetEntryMode mode) {
641
  ASSERT(id >= 0);
642
  if (id >= kMaxNumberOfEntries) return NULL;
643
  if (mode == ENSURE_ENTRY_CODE) {
644
    EnsureCodeForDeoptimizationEntry(isolate, type, id);
645
  } else {
646
    ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
647
  }
648
  DeoptimizerData* data = isolate->deoptimizer_data();
649
  ASSERT(type < kBailoutTypesWithCodeEntry);
650
  MemoryChunk* base = data->deopt_entry_code_[type];
651
  return base->area_start() + (id * table_entry_size_);
652
}
653

    
654

    
655
int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
656
                                     Address addr,
657
                                     BailoutType type) {
658
  DeoptimizerData* data = isolate->deoptimizer_data();
659
  MemoryChunk* base = data->deopt_entry_code_[type];
660
  Address start = base->area_start();
661
  if (base == NULL ||
662
      addr < start ||
663
      addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
664
    return kNotDeoptimizationEntry;
665
  }
666
  ASSERT_EQ(0,
667
            static_cast<int>(addr - start) % table_entry_size_);
668
  return static_cast<int>(addr - start) / table_entry_size_;
669
}
670

    
671

    
672
int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
673
                               BailoutId id,
674
                               SharedFunctionInfo* shared) {
675
  // TODO(kasperl): For now, we do a simple linear search for the PC
676
  // offset associated with the given node id. This should probably be
677
  // changed to a binary search.
678
  int length = data->DeoptPoints();
679
  for (int i = 0; i < length; i++) {
680
    if (data->AstId(i) == id) {
681
      return data->PcAndState(i)->value();
682
    }
683
  }
684
  PrintF("[couldn't find pc offset for node=%d]\n", id.ToInt());
685
  PrintF("[method: %s]\n", *shared->DebugName()->ToCString());
686
  // Print the source code if available.
687
  HeapStringAllocator string_allocator;
688
  StringStream stream(&string_allocator);
689
  shared->SourceCodePrint(&stream, -1);
690
  PrintF("[source:\n%s\n]", *stream.ToCString());
691

    
692
  FATAL("unable to find pc offset during deoptimization");
693
  return -1;
694
}
695

    
696

    
697
int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
698
  int length = 0;
699
  // Count all entries in the deoptimizing code list of every context.
700
  Object* context = isolate->heap()->native_contexts_list();
701
  while (!context->IsUndefined()) {
702
    Context* native_context = Context::cast(context);
703
    Object* element = native_context->DeoptimizedCodeListHead();
704
    while (!element->IsUndefined()) {
705
      Code* code = Code::cast(element);
706
      ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
707
      length++;
708
      element = code->next_code_link();
709
    }
710
    context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
711
  }
712
  return length;
713
}
714

    
715

    
716
// We rely on this function not causing a GC.  It is called from generated code
717
// without having a real stack frame in place.
718
void Deoptimizer::DoComputeOutputFrames() {
719
  // Print some helpful diagnostic information.
720
  if (FLAG_log_timer_events &&
721
      compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
722
    LOG(isolate(), CodeDeoptEvent(compiled_code_));
723
  }
724
  ElapsedTimer timer;
725
  if (trace_) {
726
    timer.Start();
727
    PrintF("[deoptimizing (DEOPT %s): begin 0x%08" V8PRIxPTR " ",
728
           MessageFor(bailout_type_),
729
           reinterpret_cast<intptr_t>(function_));
730
    PrintFunctionName();
731
    PrintF(" @%d, FP to SP delta: %d]\n", bailout_id_, fp_to_sp_delta_);
732
    if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
733
      compiled_code_->PrintDeoptLocation(bailout_id_);
734
    }
735
  }
736

    
737
  // Determine basic deoptimization information.  The optimized frame is
738
  // described by the input data.
739
  DeoptimizationInputData* input_data =
740
      DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
741
  BailoutId node_id = input_data->AstId(bailout_id_);
742
  ByteArray* translations = input_data->TranslationByteArray();
743
  unsigned translation_index =
744
      input_data->TranslationIndex(bailout_id_)->value();
745

    
746
  // Do the input frame to output frame(s) translation.
747
  TranslationIterator iterator(translations, translation_index);
748
  Translation::Opcode opcode =
749
      static_cast<Translation::Opcode>(iterator.Next());
750
  ASSERT(Translation::BEGIN == opcode);
751
  USE(opcode);
752
  // Read the number of output frames and allocate an array for their
753
  // descriptions.
754
  int count = iterator.Next();
755
  iterator.Next();  // Drop JS frames count.
756
  ASSERT(output_ == NULL);
757
  output_ = new FrameDescription*[count];
758
  for (int i = 0; i < count; ++i) {
759
    output_[i] = NULL;
760
  }
761
  output_count_ = count;
762

    
763
  // Translate each output frame.
764
  for (int i = 0; i < count; ++i) {
765
    // Read the ast node id, function, and frame height for this output frame.
766
    Translation::Opcode opcode =
767
        static_cast<Translation::Opcode>(iterator.Next());
768
    switch (opcode) {
769
      case Translation::JS_FRAME:
770
        DoComputeJSFrame(&iterator, i);
771
        jsframe_count_++;
772
        break;
773
      case Translation::ARGUMENTS_ADAPTOR_FRAME:
774
        DoComputeArgumentsAdaptorFrame(&iterator, i);
775
        break;
776
      case Translation::CONSTRUCT_STUB_FRAME:
777
        DoComputeConstructStubFrame(&iterator, i);
778
        break;
779
      case Translation::GETTER_STUB_FRAME:
780
        DoComputeAccessorStubFrame(&iterator, i, false);
781
        break;
782
      case Translation::SETTER_STUB_FRAME:
783
        DoComputeAccessorStubFrame(&iterator, i, true);
784
        break;
785
      case Translation::COMPILED_STUB_FRAME:
786
        DoComputeCompiledStubFrame(&iterator, i);
787
        break;
788
      case Translation::BEGIN:
789
      case Translation::REGISTER:
790
      case Translation::INT32_REGISTER:
791
      case Translation::UINT32_REGISTER:
792
      case Translation::DOUBLE_REGISTER:
793
      case Translation::STACK_SLOT:
794
      case Translation::INT32_STACK_SLOT:
795
      case Translation::UINT32_STACK_SLOT:
796
      case Translation::DOUBLE_STACK_SLOT:
797
      case Translation::LITERAL:
798
      case Translation::ARGUMENTS_OBJECT:
799
      default:
800
        UNREACHABLE();
801
        break;
802
    }
803
  }
804

    
805
  // Print some helpful diagnostic information.
806
  if (trace_) {
807
    double ms = timer.Elapsed().InMillisecondsF();
808
    int index = output_count_ - 1;  // Index of the topmost frame.
809
    JSFunction* function = output_[index]->GetFunction();
810
    PrintF("[deoptimizing (%s): end 0x%08" V8PRIxPTR " ",
811
           MessageFor(bailout_type_),
812
           reinterpret_cast<intptr_t>(function));
813
    PrintFunctionName();
814
    PrintF(" @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
815
           " took %0.3f ms]\n",
816
           bailout_id_,
817
           node_id.ToInt(),
818
           output_[index]->GetPc(),
819
           FullCodeGenerator::State2String(
820
               static_cast<FullCodeGenerator::State>(
821
                   output_[index]->GetState()->value())),
822
           has_alignment_padding_ ? "with padding" : "no padding",
823
           ms);
824
  }
825
}
826

    
827

    
828
void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
829
                                   int frame_index) {
830
  BailoutId node_id = BailoutId(iterator->Next());
831
  JSFunction* function;
832
  if (frame_index != 0) {
833
    function = JSFunction::cast(ComputeLiteral(iterator->Next()));
834
  } else {
835
    int closure_id = iterator->Next();
836
    USE(closure_id);
837
    ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
838
    function = function_;
839
  }
840
  unsigned height = iterator->Next();
841
  unsigned height_in_bytes = height * kPointerSize;
842
  if (trace_) {
843
    PrintF("  translating ");
844
    function->PrintName();
845
    PrintF(" => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
846
  }
847

    
848
  // The 'fixed' part of the frame consists of the incoming parameters and
849
  // the part described by JavaScriptFrameConstants.
850
  unsigned fixed_frame_size = ComputeFixedSize(function);
851
  unsigned input_frame_size = input_->GetFrameSize();
852
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
853

    
854
  // Allocate and store the output frame description.
855
  FrameDescription* output_frame =
856
      new(output_frame_size) FrameDescription(output_frame_size, function);
857
  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
858

    
859
  bool is_bottommost = (0 == frame_index);
860
  bool is_topmost = (output_count_ - 1 == frame_index);
861
  ASSERT(frame_index >= 0 && frame_index < output_count_);
862
  ASSERT(output_[frame_index] == NULL);
863
  output_[frame_index] = output_frame;
864

    
865
  // The top address for the bottommost output frame can be computed from
866
  // the input frame pointer and the output frame's height.  For all
867
  // subsequent output frames, it can be computed from the previous one's
868
  // top address and the current frame's size.
869
  Register fp_reg = JavaScriptFrame::fp_register();
870
  intptr_t top_address;
871
  if (is_bottommost) {
872
    // Determine whether the input frame contains alignment padding.
873
    has_alignment_padding_ = HasAlignmentPadding(function) ? 1 : 0;
874
    // 2 = context and function in the frame.
875
    // If the optimized frame had alignment padding, adjust the frame pointer
876
    // to point to the new position of the old frame pointer after padding
877
    // is removed. Subtract 2 * kPointerSize for the context and function slots.
878
    top_address = input_->GetRegister(fp_reg.code()) - (2 * kPointerSize) -
879
        height_in_bytes + has_alignment_padding_ * kPointerSize;
880
  } else {
881
    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
882
  }
883
  output_frame->SetTop(top_address);
884

    
885
  // Compute the incoming parameter translation.
886
  int parameter_count = function->shared()->formal_parameter_count() + 1;
887
  unsigned output_offset = output_frame_size;
888
  unsigned input_offset = input_frame_size;
889
  for (int i = 0; i < parameter_count; ++i) {
890
    output_offset -= kPointerSize;
891
    DoTranslateCommand(iterator, frame_index, output_offset);
892
  }
893
  input_offset -= (parameter_count * kPointerSize);
894

    
895
  // There are no translation commands for the caller's pc and fp, the
896
  // context, and the function.  Synthesize their values and set them up
897
  // explicitly.
898
  //
899
  // The caller's pc for the bottommost output frame is the same as in the
900
  // input frame.  For all subsequent output frames, it can be read from the
901
  // previous one.  This frame's pc can be computed from the non-optimized
902
  // function code and AST id of the bailout.
903
  output_offset -= kPCOnStackSize;
904
  input_offset -= kPCOnStackSize;
905
  intptr_t value;
906
  if (is_bottommost) {
907
    value = input_->GetFrameSlot(input_offset);
908
  } else {
909
    value = output_[frame_index - 1]->GetPc();
910
  }
911
  output_frame->SetCallerPc(output_offset, value);
912
  if (trace_) {
913
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
914
           V8PRIxPTR  " ; caller's pc\n",
915
           top_address + output_offset, output_offset, value);
916
  }
917

    
918
  // The caller's frame pointer for the bottommost output frame is the same
919
  // as in the input frame.  For all subsequent output frames, it can be
920
  // read from the previous one.  Also compute and set this frame's frame
921
  // pointer.
922
  output_offset -= kFPOnStackSize;
923
  input_offset -= kFPOnStackSize;
924
  if (is_bottommost) {
925
    value = input_->GetFrameSlot(input_offset);
926
  } else {
927
    value = output_[frame_index - 1]->GetFp();
928
  }
929
  output_frame->SetCallerFp(output_offset, value);
930
  intptr_t fp_value = top_address + output_offset;
931
  ASSERT(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
932
      has_alignment_padding_ * kPointerSize) == fp_value);
933
  output_frame->SetFp(fp_value);
934
  if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
935
  if (trace_) {
936
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
937
           V8PRIxPTR " ; caller's fp\n",
938
           fp_value, output_offset, value);
939
  }
940
  ASSERT(!is_bottommost || !has_alignment_padding_ ||
941
         (fp_value & kPointerSize) != 0);
942

    
943
  // For the bottommost output frame the context can be gotten from the input
944
  // frame. For all subsequent output frames it can be gotten from the function
945
  // so long as we don't inline functions that need local contexts.
946
  Register context_reg = JavaScriptFrame::context_register();
947
  output_offset -= kPointerSize;
948
  input_offset -= kPointerSize;
949
  if (is_bottommost) {
950
    value = input_->GetFrameSlot(input_offset);
951
  } else {
952
    value = reinterpret_cast<intptr_t>(function->context());
953
  }
954
  output_frame->SetFrameSlot(output_offset, value);
955
  output_frame->SetContext(value);
956
  if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
957
  if (trace_) {
958
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
959
           V8PRIxPTR "; context\n",
960
           top_address + output_offset, output_offset, value);
961
  }
962

    
963
  // The function was mentioned explicitly in the BEGIN_FRAME.
964
  output_offset -= kPointerSize;
965
  input_offset -= kPointerSize;
966
  value = reinterpret_cast<intptr_t>(function);
967
  // The function for the bottommost output frame should also agree with the
968
  // input frame.
969
  ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
970
  output_frame->SetFrameSlot(output_offset, value);
971
  if (trace_) {
972
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
973
           V8PRIxPTR "; function\n",
974
           top_address + output_offset, output_offset, value);
975
  }
976

    
977
  // Translate the rest of the frame.
978
  for (unsigned i = 0; i < height; ++i) {
979
    output_offset -= kPointerSize;
980
    DoTranslateCommand(iterator, frame_index, output_offset);
981
  }
982
  ASSERT(0 == output_offset);
983

    
984
  // Compute this frame's PC, state, and continuation.
985
  Code* non_optimized_code = function->shared()->code();
986
  FixedArray* raw_data = non_optimized_code->deoptimization_data();
987
  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
988
  Address start = non_optimized_code->instruction_start();
989
  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
990
  unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
991
  intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
992
  output_frame->SetPc(pc_value);
993

    
994
  FullCodeGenerator::State state =
995
      FullCodeGenerator::StateField::decode(pc_and_state);
996
  output_frame->SetState(Smi::FromInt(state));
997

    
998
  // Set the continuation for the topmost frame.
999
  if (is_topmost && bailout_type_ != DEBUGGER) {
1000
    Builtins* builtins = isolate_->builtins();
1001
    Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1002
    if (bailout_type_ == LAZY) {
1003
      continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1004
    } else if (bailout_type_ == SOFT) {
1005
      continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1006
    } else {
1007
      ASSERT(bailout_type_ == EAGER);
1008
    }
1009
    output_frame->SetContinuation(
1010
        reinterpret_cast<intptr_t>(continuation->entry()));
1011
  }
1012
}
1013

    
1014

    
1015
void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
1016
                                                 int frame_index) {
1017
  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1018
  unsigned height = iterator->Next();
1019
  unsigned height_in_bytes = height * kPointerSize;
1020
  if (trace_) {
1021
    PrintF("  translating arguments adaptor => height=%d\n", height_in_bytes);
1022
  }
1023

    
1024
  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
1025
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1026

    
1027
  // Allocate and store the output frame description.
1028
  FrameDescription* output_frame =
1029
      new(output_frame_size) FrameDescription(output_frame_size, function);
1030
  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
1031

    
1032
  // Arguments adaptor can not be topmost or bottommost.
1033
  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
1034
  ASSERT(output_[frame_index] == NULL);
1035
  output_[frame_index] = output_frame;
1036

    
1037
  // The top address of the frame is computed from the previous
1038
  // frame's top and this frame's size.
1039
  intptr_t top_address;
1040
  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1041
  output_frame->SetTop(top_address);
1042

    
1043
  // Compute the incoming parameter translation.
1044
  int parameter_count = height;
1045
  unsigned output_offset = output_frame_size;
1046
  for (int i = 0; i < parameter_count; ++i) {
1047
    output_offset -= kPointerSize;
1048
    DoTranslateCommand(iterator, frame_index, output_offset);
1049
  }
1050

    
1051
  // Read caller's PC from the previous frame.
1052
  output_offset -= kPCOnStackSize;
1053
  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1054
  output_frame->SetCallerPc(output_offset, callers_pc);
1055
  if (trace_) {
1056
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1057
           V8PRIxPTR " ; caller's pc\n",
1058
           top_address + output_offset, output_offset, callers_pc);
1059
  }
1060

    
1061
  // Read caller's FP from the previous frame, and set this frame's FP.
1062
  output_offset -= kFPOnStackSize;
1063
  intptr_t value = output_[frame_index - 1]->GetFp();
1064
  output_frame->SetCallerFp(output_offset, value);
1065
  intptr_t fp_value = top_address + output_offset;
1066
  output_frame->SetFp(fp_value);
1067
  if (trace_) {
1068
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1069
           V8PRIxPTR " ; caller's fp\n",
1070
           fp_value, output_offset, value);
1071
  }
1072

    
1073
  // A marker value is used in place of the context.
1074
  output_offset -= kPointerSize;
1075
  intptr_t context = reinterpret_cast<intptr_t>(
1076
      Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1077
  output_frame->SetFrameSlot(output_offset, context);
1078
  if (trace_) {
1079
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1080
           V8PRIxPTR " ; context (adaptor sentinel)\n",
1081
           top_address + output_offset, output_offset, context);
1082
  }
1083

    
1084
  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1085
  output_offset -= kPointerSize;
1086
  value = reinterpret_cast<intptr_t>(function);
1087
  output_frame->SetFrameSlot(output_offset, value);
1088
  if (trace_) {
1089
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1090
           V8PRIxPTR " ; function\n",
1091
           top_address + output_offset, output_offset, value);
1092
  }
1093

    
1094
  // Number of incoming arguments.
1095
  output_offset -= kPointerSize;
1096
  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1097
  output_frame->SetFrameSlot(output_offset, value);
1098
  if (trace_) {
1099
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1100
           V8PRIxPTR " ; argc (%d)\n",
1101
           top_address + output_offset, output_offset, value, height - 1);
1102
  }
1103

    
1104
  ASSERT(0 == output_offset);
1105

    
1106
  Builtins* builtins = isolate_->builtins();
1107
  Code* adaptor_trampoline =
1108
      builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1109
  intptr_t pc_value = reinterpret_cast<intptr_t>(
1110
      adaptor_trampoline->instruction_start() +
1111
      isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1112
  output_frame->SetPc(pc_value);
1113
}
1114

    
1115

    
1116
void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
1117
                                              int frame_index) {
1118
  Builtins* builtins = isolate_->builtins();
1119
  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1120
  JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
1121
  unsigned height = iterator->Next();
1122
  unsigned height_in_bytes = height * kPointerSize;
1123
  if (trace_) {
1124
    PrintF("  translating construct stub => height=%d\n", height_in_bytes);
1125
  }
1126

    
1127
  unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
1128
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1129

    
1130
  // Allocate and store the output frame description.
1131
  FrameDescription* output_frame =
1132
      new(output_frame_size) FrameDescription(output_frame_size, function);
1133
  output_frame->SetFrameType(StackFrame::CONSTRUCT);
1134

    
1135
  // Construct stub can not be topmost or bottommost.
1136
  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
1137
  ASSERT(output_[frame_index] == NULL);
1138
  output_[frame_index] = output_frame;
1139

    
1140
  // The top address of the frame is computed from the previous
1141
  // frame's top and this frame's size.
1142
  intptr_t top_address;
1143
  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1144
  output_frame->SetTop(top_address);
1145

    
1146
  // Compute the incoming parameter translation.
1147
  int parameter_count = height;
1148
  unsigned output_offset = output_frame_size;
1149
  for (int i = 0; i < parameter_count; ++i) {
1150
    output_offset -= kPointerSize;
1151
    int deferred_object_index = deferred_objects_.length();
1152
    DoTranslateCommand(iterator, frame_index, output_offset);
1153
    // The allocated receiver of a construct stub frame is passed as the
1154
    // receiver parameter through the translation. It might be encoding
1155
    // a captured object, patch the slot address for a captured object.
1156
    if (i == 0 && deferred_objects_.length() > deferred_object_index) {
1157
      ASSERT(!deferred_objects_[deferred_object_index].is_arguments());
1158
      deferred_objects_[deferred_object_index].patch_slot_address(top_address);
1159
    }
1160
  }
1161

    
1162
  // Read caller's PC from the previous frame.
1163
  output_offset -= kPCOnStackSize;
1164
  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1165
  output_frame->SetCallerPc(output_offset, callers_pc);
1166
  if (trace_) {
1167
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1168
           V8PRIxPTR " ; caller's pc\n",
1169
           top_address + output_offset, output_offset, callers_pc);
1170
  }
1171

    
1172
  // Read caller's FP from the previous frame, and set this frame's FP.
1173
  output_offset -= kFPOnStackSize;
1174
  intptr_t value = output_[frame_index - 1]->GetFp();
1175
  output_frame->SetCallerFp(output_offset, value);
1176
  intptr_t fp_value = top_address + output_offset;
1177
  output_frame->SetFp(fp_value);
1178
  if (trace_) {
1179
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1180
           V8PRIxPTR " ; caller's fp\n",
1181
           fp_value, output_offset, value);
1182
  }
1183

    
1184
  // The context can be gotten from the previous frame.
1185
  output_offset -= kPointerSize;
1186
  value = output_[frame_index - 1]->GetContext();
1187
  output_frame->SetFrameSlot(output_offset, value);
1188
  if (trace_) {
1189
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1190
           V8PRIxPTR " ; context\n",
1191
           top_address + output_offset, output_offset, value);
1192
  }
1193

    
1194
  // A marker value is used in place of the function.
1195
  output_offset -= kPointerSize;
1196
  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1197
  output_frame->SetFrameSlot(output_offset, value);
1198
  if (trace_) {
1199
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1200
           V8PRIxPTR " ; function (construct sentinel)\n",
1201
           top_address + output_offset, output_offset, value);
1202
  }
1203

    
1204
  // The output frame reflects a JSConstructStubGeneric frame.
1205
  output_offset -= kPointerSize;
1206
  value = reinterpret_cast<intptr_t>(construct_stub);
1207
  output_frame->SetFrameSlot(output_offset, value);
1208
  if (trace_) {
1209
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1210
           V8PRIxPTR " ; code object\n",
1211
           top_address + output_offset, output_offset, value);
1212
  }
1213

    
1214
  // Number of incoming arguments.
1215
  output_offset -= kPointerSize;
1216
  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1217
  output_frame->SetFrameSlot(output_offset, value);
1218
  if (trace_) {
1219
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1220
           V8PRIxPTR " ; argc (%d)\n",
1221
           top_address + output_offset, output_offset, value, height - 1);
1222
  }
1223

    
1224
  // Constructor function being invoked by the stub (only present on some
1225
  // architectures, indicated by kConstructorOffset).
1226
  if (ConstructFrameConstants::kConstructorOffset != kMinInt) {
1227
    output_offset -= kPointerSize;
1228
    value = reinterpret_cast<intptr_t>(function);
1229
    output_frame->SetFrameSlot(output_offset, value);
1230
    if (trace_) {
1231
      PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1232
             V8PRIxPTR " ; constructor function\n",
1233
             top_address + output_offset, output_offset, value);
1234
    }
1235
  }
1236

    
1237
  // The newly allocated object was passed as receiver in the artificial
1238
  // constructor stub environment created by HEnvironment::CopyForInlining().
1239
  output_offset -= kPointerSize;
1240
  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1241
  output_frame->SetFrameSlot(output_offset, value);
1242
  if (trace_) {
1243
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1244
           V8PRIxPTR " ; allocated receiver\n",
1245
           top_address + output_offset, output_offset, value);
1246
  }
1247

    
1248
  ASSERT(0 == output_offset);
1249

    
1250
  intptr_t pc = reinterpret_cast<intptr_t>(
1251
      construct_stub->instruction_start() +
1252
      isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1253
  output_frame->SetPc(pc);
1254
}
1255

    
1256

    
1257
void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
1258
                                             int frame_index,
1259
                                             bool is_setter_stub_frame) {
1260
  JSFunction* accessor = JSFunction::cast(ComputeLiteral(iterator->Next()));
1261
  // The receiver (and the implicit return value, if any) are expected in
1262
  // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1263
  // frame. This means that we have to use a height of 0.
1264
  unsigned height = 0;
1265
  unsigned height_in_bytes = height * kPointerSize;
1266
  const char* kind = is_setter_stub_frame ? "setter" : "getter";
1267
  if (trace_) {
1268
    PrintF("  translating %s stub => height=%u\n", kind, height_in_bytes);
1269
  }
1270

    
1271
  // We need 1 stack entry for the return address + 4 stack entries from
1272
  // StackFrame::INTERNAL (FP, context, frame type, code object, see
1273
  // MacroAssembler::EnterFrame). For a setter stub frame we need one additional
1274
  // entry for the implicit return value, see
1275
  // StoreStubCompiler::CompileStoreViaSetter.
1276
  unsigned fixed_frame_entries = (kPCOnStackSize / kPointerSize) +
1277
                                 (kFPOnStackSize / kPointerSize) + 3 +
1278
                                 (is_setter_stub_frame ? 1 : 0);
1279
  unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1280
  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1281

    
1282
  // Allocate and store the output frame description.
1283
  FrameDescription* output_frame =
1284
      new(output_frame_size) FrameDescription(output_frame_size, accessor);
1285
  output_frame->SetFrameType(StackFrame::INTERNAL);
1286

    
1287
  // A frame for an accessor stub can not be the topmost or bottommost one.
1288
  ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
1289
  ASSERT(output_[frame_index] == NULL);
1290
  output_[frame_index] = output_frame;
1291

    
1292
  // The top address of the frame is computed from the previous frame's top and
1293
  // this frame's size.
1294
  intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1295
  output_frame->SetTop(top_address);
1296

    
1297
  unsigned output_offset = output_frame_size;
1298

    
1299
  // Read caller's PC from the previous frame.
1300
  output_offset -= kPCOnStackSize;
1301
  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1302
  output_frame->SetCallerPc(output_offset, callers_pc);
1303
  if (trace_) {
1304
    PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1305
           " ; caller's pc\n",
1306
           top_address + output_offset, output_offset, callers_pc);
1307
  }
1308

    
1309
  // Read caller's FP from the previous frame, and set this frame's FP.
1310
  output_offset -= kFPOnStackSize;
1311
  intptr_t value = output_[frame_index - 1]->GetFp();
1312
  output_frame->SetCallerFp(output_offset, value);
1313
  intptr_t fp_value = top_address + output_offset;
1314
  output_frame->SetFp(fp_value);
1315
  if (trace_) {
1316
    PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1317
           " ; caller's fp\n",
1318
           fp_value, output_offset, value);
1319
  }
1320

    
1321
  // The context can be gotten from the previous frame.
1322
  output_offset -= kPointerSize;
1323
  value = output_[frame_index - 1]->GetContext();
1324
  output_frame->SetFrameSlot(output_offset, value);
1325
  if (trace_) {
1326
    PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1327
           " ; context\n",
1328
           top_address + output_offset, output_offset, value);
1329
  }
1330

    
1331
  // A marker value is used in place of the function.
1332
  output_offset -= kPointerSize;
1333
  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1334
  output_frame->SetFrameSlot(output_offset, value);
1335
  if (trace_) {
1336
    PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1337
           " ; function (%s sentinel)\n",
1338
           top_address + output_offset, output_offset, value, kind);
1339
  }
1340

    
1341
  // Get Code object from accessor stub.
1342
  output_offset -= kPointerSize;
1343
  Builtins::Name name = is_setter_stub_frame ?
1344
      Builtins::kStoreIC_Setter_ForDeopt :
1345
      Builtins::kLoadIC_Getter_ForDeopt;
1346
  Code* accessor_stub = isolate_->builtins()->builtin(name);
1347
  value = reinterpret_cast<intptr_t>(accessor_stub);
1348
  output_frame->SetFrameSlot(output_offset, value);
1349
  if (trace_) {
1350
    PrintF("    0x%08" V8PRIxPTR ": [top + %u] <- 0x%08" V8PRIxPTR
1351
           " ; code object\n",
1352
           top_address + output_offset, output_offset, value);
1353
  }
1354

    
1355
  // Skip receiver.
1356
  Translation::Opcode opcode =
1357
      static_cast<Translation::Opcode>(iterator->Next());
1358
  iterator->Skip(Translation::NumberOfOperandsFor(opcode));
1359

    
1360
  if (is_setter_stub_frame) {
1361
    // The implicit return value was part of the artificial setter stub
1362
    // environment.
1363
    output_offset -= kPointerSize;
1364
    DoTranslateCommand(iterator, frame_index, output_offset);
1365
  }
1366

    
1367
  ASSERT(0 == output_offset);
1368

    
1369
  Smi* offset = is_setter_stub_frame ?
1370
      isolate_->heap()->setter_stub_deopt_pc_offset() :
1371
      isolate_->heap()->getter_stub_deopt_pc_offset();
1372
  intptr_t pc = reinterpret_cast<intptr_t>(
1373
      accessor_stub->instruction_start() + offset->value());
1374
  output_frame->SetPc(pc);
1375
}
1376

    
1377

    
1378
void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
1379
                                             int frame_index) {
1380
  //
1381
  //               FROM                                  TO
1382
  //    |          ....           |          |          ....           |
1383
  //    +-------------------------+          +-------------------------+
1384
  //    | JSFunction continuation |          | JSFunction continuation |
1385
  //    +-------------------------+          +-------------------------+
1386
  // |  |    saved frame (FP)     |          |    saved frame (FP)     |
1387
  // |  +=========================+<-fpreg   +=========================+<-fpreg
1388
  // |  |   JSFunction context    |          |   JSFunction context    |
1389
  // v  +-------------------------+          +-------------------------|
1390
  //    |   COMPILED_STUB marker  |          |   STUB_FAILURE marker   |
1391
  //    +-------------------------+          +-------------------------+
1392
  //    |                         |          |  caller args.arguments_ |
1393
  //    | ...                     |          +-------------------------+
1394
  //    |                         |          |  caller args.length_    |
1395
  //    |-------------------------|<-spreg   +-------------------------+
1396
  //                                         |  caller args pointer    |
1397
  //                                         +-------------------------+
1398
  //                                         |  caller stack param 1   |
1399
  //      parameters in registers            +-------------------------+
1400
  //       and spilled to stack              |           ....          |
1401
  //                                         +-------------------------+
1402
  //                                         |  caller stack param n   |
1403
  //                                         +-------------------------+<-spreg
1404
  //                                         reg = number of parameters
1405
  //                                         reg = failure handler address
1406
  //                                         reg = saved frame
1407
  //                                         reg = JSFunction context
1408
  //
1409

    
1410
  ASSERT(compiled_code_->is_crankshafted() &&
1411
         compiled_code_->kind() != Code::OPTIMIZED_FUNCTION);
1412
  int major_key = compiled_code_->major_key();
1413
  CodeStubInterfaceDescriptor* descriptor =
1414
      isolate_->code_stub_interface_descriptor(major_key);
1415

    
1416
  // The output frame must have room for all pushed register parameters
1417
  // and the standard stack frame slots.  Include space for an argument
1418
  // object to the callee and optionally the space to pass the argument
1419
  // object to the stub failure handler.
1420
  ASSERT(descriptor->register_param_count_ >= 0);
1421
  int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
1422
      sizeof(Arguments) + kPointerSize;
1423
  int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
1424
  int input_frame_size = input_->GetFrameSize();
1425
  int output_frame_size = height_in_bytes + fixed_frame_size;
1426
  if (trace_) {
1427
    PrintF("  translating %s => StubFailureTrampolineStub, height=%d\n",
1428
           CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
1429
           height_in_bytes);
1430
  }
1431

    
1432
  // The stub failure trampoline is a single frame.
1433
  FrameDescription* output_frame =
1434
      new(output_frame_size) FrameDescription(output_frame_size, NULL);
1435
  output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1436
  ASSERT(frame_index == 0);
1437
  output_[frame_index] = output_frame;
1438

    
1439
  // The top address for the output frame can be computed from the input
1440
  // frame pointer and the output frame's height. Subtract space for the
1441
  // context and function slots.
1442
  Register fp_reg = StubFailureTrampolineFrame::fp_register();
1443
  intptr_t top_address = input_->GetRegister(fp_reg.code()) -
1444
      (2 * kPointerSize) - height_in_bytes;
1445
  output_frame->SetTop(top_address);
1446

    
1447
  // Read caller's PC (JSFunction continuation) from the input frame.
1448
  unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
1449
  unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1450
  intptr_t value = input_->GetFrameSlot(input_frame_offset);
1451
  output_frame->SetCallerPc(output_frame_offset, value);
1452
  if (trace_) {
1453
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1454
           V8PRIxPTR " ; caller's pc\n",
1455
           top_address + output_frame_offset, output_frame_offset, value);
1456
  }
1457

    
1458
  // Read caller's FP from the input frame, and set this frame's FP.
1459
  input_frame_offset -= kFPOnStackSize;
1460
  value = input_->GetFrameSlot(input_frame_offset);
1461
  output_frame_offset -= kFPOnStackSize;
1462
  output_frame->SetCallerFp(output_frame_offset, value);
1463
  intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
1464
  output_frame->SetRegister(fp_reg.code(), frame_ptr);
1465
  output_frame->SetFp(frame_ptr);
1466
  if (trace_) {
1467
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1468
           V8PRIxPTR " ; caller's fp\n",
1469
           top_address + output_frame_offset, output_frame_offset, value);
1470
  }
1471

    
1472
  // The context can be gotten from the input frame.
1473
  Register context_reg = StubFailureTrampolineFrame::context_register();
1474
  input_frame_offset -= kPointerSize;
1475
  value = input_->GetFrameSlot(input_frame_offset);
1476
  output_frame->SetRegister(context_reg.code(), value);
1477
  output_frame_offset -= kPointerSize;
1478
  output_frame->SetFrameSlot(output_frame_offset, value);
1479
  if (trace_) {
1480
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1481
           V8PRIxPTR " ; context\n",
1482
           top_address + output_frame_offset, output_frame_offset, value);
1483
  }
1484

    
1485
  // A marker value is used in place of the function.
1486
  output_frame_offset -= kPointerSize;
1487
  value = reinterpret_cast<intptr_t>(
1488
      Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1489
  output_frame->SetFrameSlot(output_frame_offset, value);
1490
  if (trace_) {
1491
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1492
           V8PRIxPTR " ; function (stub failure sentinel)\n",
1493
           top_address + output_frame_offset, output_frame_offset, value);
1494
  }
1495

    
1496
  intptr_t caller_arg_count = 0;
1497
  bool arg_count_known = !descriptor->stack_parameter_count_.is_valid();
1498

    
1499
  // Build the Arguments object for the caller's parameters and a pointer to it.
1500
  output_frame_offset -= kPointerSize;
1501
  int args_arguments_offset = output_frame_offset;
1502
  intptr_t the_hole = reinterpret_cast<intptr_t>(
1503
      isolate_->heap()->the_hole_value());
1504
  if (arg_count_known) {
1505
    value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1506
        (caller_arg_count - 1) * kPointerSize;
1507
  } else {
1508
    value = the_hole;
1509
  }
1510

    
1511
  output_frame->SetFrameSlot(args_arguments_offset, value);
1512
  if (trace_) {
1513
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1514
           V8PRIxPTR " ; args.arguments %s\n",
1515
           top_address + args_arguments_offset, args_arguments_offset, value,
1516
           arg_count_known ? "" : "(the hole)");
1517
  }
1518

    
1519
  output_frame_offset -= kPointerSize;
1520
  int length_frame_offset = output_frame_offset;
1521
  value = arg_count_known ? caller_arg_count : the_hole;
1522
  output_frame->SetFrameSlot(length_frame_offset, value);
1523
  if (trace_) {
1524
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1525
           V8PRIxPTR " ; args.length %s\n",
1526
           top_address + length_frame_offset, length_frame_offset, value,
1527
           arg_count_known ? "" : "(the hole)");
1528
  }
1529

    
1530
  output_frame_offset -= kPointerSize;
1531
  value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
1532
      (output_frame_size - output_frame_offset) + kPointerSize;
1533
  output_frame->SetFrameSlot(output_frame_offset, value);
1534
  if (trace_) {
1535
    PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1536
           V8PRIxPTR " ; args*\n",
1537
           top_address + output_frame_offset, output_frame_offset, value);
1538
  }
1539

    
1540
  // Copy the register parameters to the failure frame.
1541
  for (int i = 0; i < descriptor->register_param_count_; ++i) {
1542
    output_frame_offset -= kPointerSize;
1543
    DoTranslateCommand(iterator, 0, output_frame_offset);
1544
  }
1545

    
1546
  if (!arg_count_known) {
1547
    DoTranslateCommand(iterator, 0, length_frame_offset,
1548
                       TRANSLATED_VALUE_IS_NATIVE);
1549
    caller_arg_count = output_frame->GetFrameSlot(length_frame_offset);
1550
    value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1551
        (caller_arg_count - 1) * kPointerSize;
1552
    output_frame->SetFrameSlot(args_arguments_offset, value);
1553
    if (trace_) {
1554
      PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
1555
             V8PRIxPTR " ; args.arguments\n",
1556
             top_address + args_arguments_offset, args_arguments_offset, value);
1557
    }
1558
  }
1559

    
1560
  ASSERT(0 == output_frame_offset);
1561

    
1562
  // Copy the double registers from the input into the output frame.
1563
  CopyDoubleRegisters(output_frame);
1564

    
1565
  // Fill registers containing handler and number of parameters.
1566
  SetPlatformCompiledStubRegisters(output_frame, descriptor);
1567

    
1568
  // Compute this frame's PC, state, and continuation.
1569
  Code* trampoline = NULL;
1570
  StubFunctionMode function_mode = descriptor->function_mode_;
1571
  StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
1572
                                                           isolate_);
1573
  ASSERT(trampoline != NULL);
1574
  output_frame->SetPc(reinterpret_cast<intptr_t>(
1575
      trampoline->instruction_start()));
1576
  output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
1577
  Code* notify_failure =
1578
      isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
1579
  output_frame->SetContinuation(
1580
      reinterpret_cast<intptr_t>(notify_failure->entry()));
1581
}
1582

    
1583

    
1584
Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
1585
  int object_index = materialization_object_index_++;
1586
  ObjectMaterializationDescriptor desc = deferred_objects_[object_index];
1587
  const int length = desc.object_length();
1588

    
1589
  if (desc.duplicate_object() >= 0) {
1590
    // Found a previously materialized object by de-duplication.
1591
    object_index = desc.duplicate_object();
1592
    materialized_objects_->Add(Handle<Object>());
1593
  } else if (desc.is_arguments() && ArgumentsObjectIsAdapted(object_index)) {
1594
    // Use the arguments adapter frame we just built to materialize the
1595
    // arguments object. FunctionGetArguments can't throw an exception.
1596
    Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1597
    Handle<JSObject> arguments = Handle<JSObject>::cast(
1598
        Accessors::FunctionGetArguments(function));
1599
    materialized_objects_->Add(arguments);
1600
    materialization_value_index_ += length;
1601
  } else if (desc.is_arguments()) {
1602
    // Construct an arguments object and copy the parameters to a newly
1603
    // allocated arguments object backing store.
1604
    Handle<JSFunction> function = ArgumentsObjectFunction(object_index);
1605
    Handle<JSObject> arguments =
1606
        isolate_->factory()->NewArgumentsObject(function, length);
1607
    Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
1608
    ASSERT(array->length() == length);
1609
    arguments->set_elements(*array);
1610
    materialized_objects_->Add(arguments);
1611
    for (int i = 0; i < length; ++i) {
1612
      Handle<Object> value = MaterializeNextValue();
1613
      array->set(i, *value);
1614
    }
1615
  } else {
1616
    // Dispatch on the instance type of the object to be materialized.
1617
    // We also need to make sure that the representation of all fields
1618
    // in the given object are general enough to hold a tagged value.
1619
    Handle<Map> map = Map::GeneralizeAllFieldRepresentations(
1620
        Handle<Map>::cast(MaterializeNextValue()), Representation::Tagged());
1621
    switch (map->instance_type()) {
1622
      case HEAP_NUMBER_TYPE: {
1623
        Handle<HeapNumber> object = isolate_->factory()->NewHeapNumber(0.0);
1624
        materialized_objects_->Add(object);
1625
        Handle<Object> number = MaterializeNextValue();
1626
        object->set_value(number->Number());
1627
        materialization_value_index_ += kDoubleSize / kPointerSize - 1;
1628
        break;
1629
      }
1630
      case JS_OBJECT_TYPE: {
1631
        Handle<JSObject> object =
1632
            isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED, false);
1633
        materialized_objects_->Add(object);
1634
        Handle<Object> properties = MaterializeNextValue();
1635
        Handle<Object> elements = MaterializeNextValue();
1636
        object->set_properties(FixedArray::cast(*properties));
1637
        object->set_elements(FixedArrayBase::cast(*elements));
1638
        for (int i = 0; i < length - 3; ++i) {
1639
          Handle<Object> value = MaterializeNextValue();
1640
          object->FastPropertyAtPut(i, *value);
1641
        }
1642
        break;
1643
      }
1644
      case JS_ARRAY_TYPE: {
1645
        Handle<JSArray> object =
1646
            isolate_->factory()->NewJSArray(0, map->elements_kind());
1647
        materialized_objects_->Add(object);
1648
        Handle<Object> properties = MaterializeNextValue();
1649
        Handle<Object> elements = MaterializeNextValue();
1650
        Handle<Object> length = MaterializeNextValue();
1651
        object->set_properties(FixedArray::cast(*properties));
1652
        object->set_elements(FixedArrayBase::cast(*elements));
1653
        object->set_length(*length);
1654
        break;
1655
      }
1656
      default:
1657
        PrintF("[couldn't handle instance type %d]\n", map->instance_type());
1658
        UNREACHABLE();
1659
    }
1660
  }
1661

    
1662
  return materialized_objects_->at(object_index);
1663
}
1664

    
1665

    
1666
Handle<Object> Deoptimizer::MaterializeNextValue() {
1667
  int value_index = materialization_value_index_++;
1668
  Handle<Object> value = materialized_values_->at(value_index);
1669
  if (*value == isolate_->heap()->arguments_marker()) {
1670
    value = MaterializeNextHeapObject();
1671
  }
1672
  return value;
1673
}
1674

    
1675

    
1676
void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
1677
  ASSERT_NE(DEBUGGER, bailout_type_);
1678

    
1679
  // Walk all JavaScript output frames with the given frame iterator.
1680
  for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
1681
    if (frame_index != 0) it->Advance();
1682
    JavaScriptFrame* frame = it->frame();
1683
    jsframe_functions_.Add(handle(frame->function(), isolate_));
1684
    jsframe_has_adapted_arguments_.Add(frame->has_adapted_arguments());
1685
  }
1686

    
1687
  // Handlify all tagged object values before triggering any allocation.
1688
  List<Handle<Object> > values(deferred_objects_tagged_values_.length());
1689
  for (int i = 0; i < deferred_objects_tagged_values_.length(); ++i) {
1690
    values.Add(Handle<Object>(deferred_objects_tagged_values_[i], isolate_));
1691
  }
1692

    
1693
  // Play it safe and clear all unhandlified values before we continue.
1694
  deferred_objects_tagged_values_.Clear();
1695

    
1696
  // Materialize all heap numbers before looking at arguments because when the
1697
  // output frames are used to materialize arguments objects later on they need
1698
  // to already contain valid heap numbers.
1699
  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
1700
    HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
1701
    Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1702
    if (trace_) {
1703
      PrintF("Materialized a new heap number %p [%e] in slot %p\n",
1704
             reinterpret_cast<void*>(*num),
1705
             d.value(),
1706
             d.destination());
1707
    }
1708
    Memory::Object_at(d.destination()) = *num;
1709
  }
1710

    
1711
  // Materialize all heap numbers required for arguments/captured objects.
1712
  for (int i = 0; i < deferred_objects_double_values_.length(); i++) {
1713
    HeapNumberMaterializationDescriptor<int> d =
1714
        deferred_objects_double_values_[i];
1715
    Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1716
    if (trace_) {
1717
      PrintF("Materialized a new heap number %p [%e] for object at %d\n",
1718
             reinterpret_cast<void*>(*num),
1719
             d.value(),
1720
             d.destination());
1721
    }
1722
    ASSERT(values.at(d.destination())->IsTheHole());
1723
    values.Set(d.destination(), num);
1724
  }
1725

    
1726
  // Play it safe and clear all object double values before we continue.
1727
  deferred_objects_double_values_.Clear();
1728

    
1729
  // Materialize arguments/captured objects.
1730
  if (!deferred_objects_.is_empty()) {
1731
    List<Handle<Object> > materialized_objects(deferred_objects_.length());
1732
    materialized_objects_ = &materialized_objects;
1733
    materialized_values_ = &values;
1734

    
1735
    while (materialization_object_index_ < deferred_objects_.length()) {
1736
      int object_index = materialization_object_index_;
1737
      ObjectMaterializationDescriptor descriptor =
1738
          deferred_objects_.at(object_index);
1739

    
1740
      // Find a previously materialized object by de-duplication or
1741
      // materialize a new instance of the object if necessary. Store
1742
      // the materialized object into the frame slot.
1743
      Handle<Object> object = MaterializeNextHeapObject();
1744
      Memory::Object_at(descriptor.slot_address()) = *object;
1745
      if (trace_) {
1746
        if (descriptor.is_arguments()) {
1747
          PrintF("Materialized %sarguments object of length %d for %p: ",
1748
                 ArgumentsObjectIsAdapted(object_index) ? "(adapted) " : "",
1749
                 Handle<JSObject>::cast(object)->elements()->length(),
1750
                 reinterpret_cast<void*>(descriptor.slot_address()));
1751
        } else {
1752
          PrintF("Materialized captured object of size %d for %p: ",
1753
                 Handle<HeapObject>::cast(object)->Size(),
1754
                 reinterpret_cast<void*>(descriptor.slot_address()));
1755
        }
1756
        object->ShortPrint();
1757
        PrintF("\n");
1758
      }
1759
    }
1760

    
1761
    ASSERT(materialization_object_index_ == materialized_objects_->length());
1762
    ASSERT(materialization_value_index_ == materialized_values_->length());
1763
  }
1764
}
1765

    
1766

    
1767
#ifdef ENABLE_DEBUGGER_SUPPORT
1768
void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
1769
    Address parameters_top,
1770
    uint32_t parameters_size,
1771
    Address expressions_top,
1772
    uint32_t expressions_size,
1773
    DeoptimizedFrameInfo* info) {
1774
  ASSERT_EQ(DEBUGGER, bailout_type_);
1775
  Address parameters_bottom = parameters_top + parameters_size;
1776
  Address expressions_bottom = expressions_top + expressions_size;
1777
  for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
1778
    HeapNumberMaterializationDescriptor<Address> d = deferred_heap_numbers_[i];
1779

    
1780
    // Check of the heap number to materialize actually belong to the frame
1781
    // being extracted.
1782
    Address slot = d.destination();
1783
    if (parameters_top <= slot && slot < parameters_bottom) {
1784
      Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1785

    
1786
      int index = (info->parameters_count() - 1) -
1787
          static_cast<int>(slot - parameters_top) / kPointerSize;
1788

    
1789
      if (trace_) {
1790
        PrintF("Materializing a new heap number %p [%e] in slot %p"
1791
               "for parameter slot #%d\n",
1792
               reinterpret_cast<void*>(*num),
1793
               d.value(),
1794
               d.destination(),
1795
               index);
1796
      }
1797

    
1798
      info->SetParameter(index, *num);
1799
    } else if (expressions_top <= slot && slot < expressions_bottom) {
1800
      Handle<Object> num = isolate_->factory()->NewNumber(d.value());
1801

    
1802
      int index = info->expression_count() - 1 -
1803
          static_cast<int>(slot - expressions_top) / kPointerSize;
1804

    
1805
      if (trace_) {
1806
        PrintF("Materializing a new heap number %p [%e] in slot %p"
1807
               "for expression slot #%d\n",
1808
               reinterpret_cast<void*>(*num),
1809
               d.value(),
1810
               d.destination(),
1811
               index);
1812
      }
1813

    
1814
      info->SetExpression(index, *num);
1815
    }
1816
  }
1817
}
1818
#endif
1819

    
1820

    
1821
static const char* TraceValueType(bool is_smi, bool is_native = false) {
1822
  if (is_native) {
1823
    return "native";
1824
  } else if (is_smi) {
1825
    return "smi";
1826
  }
1827

    
1828
  return "heap number";
1829
}
1830

    
1831

    
1832
void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
1833
                                    int object_index,
1834
                                    int field_index) {
1835
  disasm::NameConverter converter;
1836
  Address object_slot = deferred_objects_[object_index].slot_address();
1837

    
1838
  Translation::Opcode opcode =
1839
      static_cast<Translation::Opcode>(iterator->Next());
1840

    
1841
  switch (opcode) {
1842
    case Translation::BEGIN:
1843
    case Translation::JS_FRAME:
1844
    case Translation::ARGUMENTS_ADAPTOR_FRAME:
1845
    case Translation::CONSTRUCT_STUB_FRAME:
1846
    case Translation::GETTER_STUB_FRAME:
1847
    case Translation::SETTER_STUB_FRAME:
1848
    case Translation::COMPILED_STUB_FRAME:
1849
      UNREACHABLE();
1850
      return;
1851

    
1852
    case Translation::REGISTER: {
1853
      int input_reg = iterator->Next();
1854
      intptr_t input_value = input_->GetRegister(input_reg);
1855
      if (trace_) {
1856
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1857
               reinterpret_cast<intptr_t>(object_slot),
1858
               field_index);
1859
        PrintF("0x%08" V8PRIxPTR " ; %s ", input_value,
1860
               converter.NameOfCPURegister(input_reg));
1861
        reinterpret_cast<Object*>(input_value)->ShortPrint();
1862
        PrintF("\n");
1863
      }
1864
      AddObjectTaggedValue(input_value);
1865
      return;
1866
    }
1867

    
1868
    case Translation::INT32_REGISTER: {
1869
      int input_reg = iterator->Next();
1870
      intptr_t value = input_->GetRegister(input_reg);
1871
      bool is_smi = Smi::IsValid(value);
1872
      if (trace_) {
1873
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1874
               reinterpret_cast<intptr_t>(object_slot),
1875
               field_index);
1876
        PrintF("%" V8PRIdPTR " ; %s (%s)\n", value,
1877
               converter.NameOfCPURegister(input_reg),
1878
               TraceValueType(is_smi));
1879
      }
1880
      if (is_smi) {
1881
        intptr_t tagged_value =
1882
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
1883
        AddObjectTaggedValue(tagged_value);
1884
      } else {
1885
        double double_value = static_cast<double>(static_cast<int32_t>(value));
1886
        AddObjectDoubleValue(double_value);
1887
      }
1888
      return;
1889
    }
1890

    
1891
    case Translation::UINT32_REGISTER: {
1892
      int input_reg = iterator->Next();
1893
      uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
1894
      bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
1895
      if (trace_) {
1896
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1897
               reinterpret_cast<intptr_t>(object_slot),
1898
               field_index);
1899
        PrintF("%" V8PRIdPTR " ; uint %s (%s)\n", value,
1900
               converter.NameOfCPURegister(input_reg),
1901
               TraceValueType(is_smi));
1902
      }
1903
      if (is_smi) {
1904
        intptr_t tagged_value =
1905
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
1906
        AddObjectTaggedValue(tagged_value);
1907
      } else {
1908
        double double_value = static_cast<double>(static_cast<uint32_t>(value));
1909
        AddObjectDoubleValue(double_value);
1910
      }
1911
      return;
1912
    }
1913

    
1914
    case Translation::DOUBLE_REGISTER: {
1915
      int input_reg = iterator->Next();
1916
      double value = input_->GetDoubleRegister(input_reg);
1917
      if (trace_) {
1918
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1919
               reinterpret_cast<intptr_t>(object_slot),
1920
               field_index);
1921
        PrintF("%e ; %s\n", value,
1922
               DoubleRegister::AllocationIndexToString(input_reg));
1923
      }
1924
      AddObjectDoubleValue(value);
1925
      return;
1926
    }
1927

    
1928
    case Translation::STACK_SLOT: {
1929
      int input_slot_index = iterator->Next();
1930
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
1931
      intptr_t input_value = input_->GetFrameSlot(input_offset);
1932
      if (trace_) {
1933
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1934
               reinterpret_cast<intptr_t>(object_slot),
1935
               field_index);
1936
        PrintF("0x%08" V8PRIxPTR " ; [sp + %d] ", input_value, input_offset);
1937
        reinterpret_cast<Object*>(input_value)->ShortPrint();
1938
        PrintF("\n");
1939
      }
1940
      AddObjectTaggedValue(input_value);
1941
      return;
1942
    }
1943

    
1944
    case Translation::INT32_STACK_SLOT: {
1945
      int input_slot_index = iterator->Next();
1946
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
1947
      intptr_t value = input_->GetFrameSlot(input_offset);
1948
      bool is_smi = Smi::IsValid(value);
1949
      if (trace_) {
1950
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1951
               reinterpret_cast<intptr_t>(object_slot),
1952
               field_index);
1953
        PrintF("%" V8PRIdPTR " ; [sp + %d] (%s)\n",
1954
               value, input_offset, TraceValueType(is_smi));
1955
      }
1956
      if (is_smi) {
1957
        intptr_t tagged_value =
1958
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
1959
        AddObjectTaggedValue(tagged_value);
1960
      } else {
1961
        double double_value = static_cast<double>(static_cast<int32_t>(value));
1962
        AddObjectDoubleValue(double_value);
1963
      }
1964
      return;
1965
    }
1966

    
1967
    case Translation::UINT32_STACK_SLOT: {
1968
      int input_slot_index = iterator->Next();
1969
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
1970
      uintptr_t value =
1971
          static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
1972
      bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
1973
      if (trace_) {
1974
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1975
               reinterpret_cast<intptr_t>(object_slot),
1976
               field_index);
1977
        PrintF("%" V8PRIdPTR " ; [sp + %d] (uint %s)\n",
1978
               value, input_offset, TraceValueType(is_smi));
1979
      }
1980
      if (is_smi) {
1981
        intptr_t tagged_value =
1982
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
1983
        AddObjectTaggedValue(tagged_value);
1984
      } else {
1985
        double double_value = static_cast<double>(static_cast<uint32_t>(value));
1986
        AddObjectDoubleValue(double_value);
1987
      }
1988
      return;
1989
    }
1990

    
1991
    case Translation::DOUBLE_STACK_SLOT: {
1992
      int input_slot_index = iterator->Next();
1993
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
1994
      double value = input_->GetDoubleFrameSlot(input_offset);
1995
      if (trace_) {
1996
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
1997
               reinterpret_cast<intptr_t>(object_slot),
1998
               field_index);
1999
        PrintF("%e ; [sp + %d]\n", value, input_offset);
2000
      }
2001
      AddObjectDoubleValue(value);
2002
      return;
2003
    }
2004

    
2005
    case Translation::LITERAL: {
2006
      Object* literal = ComputeLiteral(iterator->Next());
2007
      if (trace_) {
2008
        PrintF("      object @0x%08" V8PRIxPTR ": [field #%d] <- ",
2009
               reinterpret_cast<intptr_t>(object_slot),
2010
               field_index);
2011
        literal->ShortPrint();
2012
        PrintF(" ; literal\n");
2013
      }
2014
      intptr_t value = reinterpret_cast<intptr_t>(literal);
2015
      AddObjectTaggedValue(value);
2016
      return;
2017
    }
2018

    
2019
    case Translation::DUPLICATED_OBJECT: {
2020
      int object_index = iterator->Next();
2021
      if (trace_) {
2022
        PrintF("      nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2023
               reinterpret_cast<intptr_t>(object_slot),
2024
               field_index);
2025
        isolate_->heap()->arguments_marker()->ShortPrint();
2026
        PrintF(" ; duplicate of object #%d\n", object_index);
2027
      }
2028
      // Use the materialization marker value as a sentinel and fill in
2029
      // the object after the deoptimized frame is built.
2030
      intptr_t value = reinterpret_cast<intptr_t>(
2031
          isolate_->heap()->arguments_marker());
2032
      AddObjectDuplication(0, object_index);
2033
      AddObjectTaggedValue(value);
2034
      return;
2035
    }
2036

    
2037
    case Translation::ARGUMENTS_OBJECT:
2038
    case Translation::CAPTURED_OBJECT: {
2039
      int length = iterator->Next();
2040
      bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2041
      if (trace_) {
2042
        PrintF("      nested @0x%08" V8PRIxPTR ": [field #%d] <- ",
2043
               reinterpret_cast<intptr_t>(object_slot),
2044
               field_index);
2045
        isolate_->heap()->arguments_marker()->ShortPrint();
2046
        PrintF(" ; object (length = %d, is_args = %d)\n", length, is_args);
2047
      }
2048
      // Use the materialization marker value as a sentinel and fill in
2049
      // the object after the deoptimized frame is built.
2050
      intptr_t value = reinterpret_cast<intptr_t>(
2051
          isolate_->heap()->arguments_marker());
2052
      AddObjectStart(0, length, is_args);
2053
      AddObjectTaggedValue(value);
2054
      // We save the object values on the side and materialize the actual
2055
      // object after the deoptimized frame is built.
2056
      int object_index = deferred_objects_.length() - 1;
2057
      for (int i = 0; i < length; i++) {
2058
        DoTranslateObject(iterator, object_index, i);
2059
      }
2060
      return;
2061
    }
2062
  }
2063
}
2064

    
2065

    
2066
void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
2067
    int frame_index,
2068
    unsigned output_offset,
2069
    DeoptimizerTranslatedValueType value_type) {
2070
  disasm::NameConverter converter;
2071
  // A GC-safe temporary placeholder that we can put in the output frame.
2072
  const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
2073
  bool is_native = value_type == TRANSLATED_VALUE_IS_NATIVE;
2074

    
2075
  Translation::Opcode opcode =
2076
      static_cast<Translation::Opcode>(iterator->Next());
2077

    
2078
  switch (opcode) {
2079
    case Translation::BEGIN:
2080
    case Translation::JS_FRAME:
2081
    case Translation::ARGUMENTS_ADAPTOR_FRAME:
2082
    case Translation::CONSTRUCT_STUB_FRAME:
2083
    case Translation::GETTER_STUB_FRAME:
2084
    case Translation::SETTER_STUB_FRAME:
2085
    case Translation::COMPILED_STUB_FRAME:
2086
      UNREACHABLE();
2087
      return;
2088

    
2089
    case Translation::REGISTER: {
2090
      int input_reg = iterator->Next();
2091
      intptr_t input_value = input_->GetRegister(input_reg);
2092
      if (trace_) {
2093
        PrintF(
2094
            "    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ",
2095
            output_[frame_index]->GetTop() + output_offset,
2096
            output_offset,
2097
            input_value,
2098
            converter.NameOfCPURegister(input_reg));
2099
        reinterpret_cast<Object*>(input_value)->ShortPrint();
2100
        PrintF("\n");
2101
      }
2102
      output_[frame_index]->SetFrameSlot(output_offset, input_value);
2103
      return;
2104
    }
2105

    
2106
    case Translation::INT32_REGISTER: {
2107
      int input_reg = iterator->Next();
2108
      intptr_t value = input_->GetRegister(input_reg);
2109
      bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
2110
          Smi::IsValid(value);
2111
      if (trace_) {
2112
        PrintF(
2113
            "    0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
2114
            output_[frame_index]->GetTop() + output_offset,
2115
            output_offset,
2116
            value,
2117
            converter.NameOfCPURegister(input_reg),
2118
            TraceValueType(is_smi, is_native));
2119
      }
2120
      if (is_smi) {
2121
        intptr_t tagged_value =
2122
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2123
        output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2124
      } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
2125
        output_[frame_index]->SetFrameSlot(output_offset, value);
2126
      } else {
2127
        // We save the untagged value on the side and store a GC-safe
2128
        // temporary placeholder in the frame.
2129
        ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
2130
        AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2131
                       static_cast<double>(static_cast<int32_t>(value)));
2132
        output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2133
      }
2134
      return;
2135
    }
2136

    
2137
    case Translation::UINT32_REGISTER: {
2138
      int input_reg = iterator->Next();
2139
      uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
2140
      bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
2141
          (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2142
      if (trace_) {
2143
        PrintF(
2144
            "    0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
2145
            " ; uint %s (%s)\n",
2146
            output_[frame_index]->GetTop() + output_offset,
2147
            output_offset,
2148
            value,
2149
            converter.NameOfCPURegister(input_reg),
2150
            TraceValueType(is_smi, is_native));
2151
      }
2152
      if (is_smi) {
2153
        intptr_t tagged_value =
2154
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2155
        output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2156
      } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
2157
        output_[frame_index]->SetFrameSlot(output_offset, value);
2158
      } else {
2159
        // We save the untagged value on the side and store a GC-safe
2160
        // temporary placeholder in the frame.
2161
        ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
2162
        AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2163
                       static_cast<double>(static_cast<uint32_t>(value)));
2164
        output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2165
      }
2166
      return;
2167
    }
2168

    
2169
    case Translation::DOUBLE_REGISTER: {
2170
      int input_reg = iterator->Next();
2171
      double value = input_->GetDoubleRegister(input_reg);
2172
      if (trace_) {
2173
        PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- %e ; %s\n",
2174
               output_[frame_index]->GetTop() + output_offset,
2175
               output_offset,
2176
               value,
2177
               DoubleRegister::AllocationIndexToString(input_reg));
2178
      }
2179
      // We save the untagged value on the side and store a GC-safe
2180
      // temporary placeholder in the frame.
2181
      AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2182
      output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2183
      return;
2184
    }
2185

    
2186
    case Translation::STACK_SLOT: {
2187
      int input_slot_index = iterator->Next();
2188
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2189
      intptr_t input_value = input_->GetFrameSlot(input_offset);
2190
      if (trace_) {
2191
        PrintF("    0x%08" V8PRIxPTR ": ",
2192
               output_[frame_index]->GetTop() + output_offset);
2193
        PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
2194
               output_offset,
2195
               input_value,
2196
               input_offset);
2197
        reinterpret_cast<Object*>(input_value)->ShortPrint();
2198
        PrintF("\n");
2199
      }
2200
      output_[frame_index]->SetFrameSlot(output_offset, input_value);
2201
      return;
2202
    }
2203

    
2204
    case Translation::INT32_STACK_SLOT: {
2205
      int input_slot_index = iterator->Next();
2206
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2207
      intptr_t value = input_->GetFrameSlot(input_offset);
2208
      bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
2209
          Smi::IsValid(value);
2210
      if (trace_) {
2211
        PrintF("    0x%08" V8PRIxPTR ": ",
2212
               output_[frame_index]->GetTop() + output_offset);
2213
        PrintF("[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
2214
               output_offset,
2215
               value,
2216
               input_offset,
2217
               TraceValueType(is_smi, is_native));
2218
      }
2219
      if (is_smi) {
2220
        intptr_t tagged_value =
2221
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2222
        output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2223
      } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
2224
        output_[frame_index]->SetFrameSlot(output_offset, value);
2225
      } else {
2226
        // We save the untagged value on the side and store a GC-safe
2227
        // temporary placeholder in the frame.
2228
        ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
2229
        AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2230
                       static_cast<double>(static_cast<int32_t>(value)));
2231
        output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2232
      }
2233
      return;
2234
    }
2235

    
2236
    case Translation::UINT32_STACK_SLOT: {
2237
      int input_slot_index = iterator->Next();
2238
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2239
      uintptr_t value =
2240
          static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
2241
      bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
2242
          (value <= static_cast<uintptr_t>(Smi::kMaxValue));
2243
      if (trace_) {
2244
        PrintF("    0x%08" V8PRIxPTR ": ",
2245
               output_[frame_index]->GetTop() + output_offset);
2246
        PrintF("[top + %d] <- %" V8PRIuPTR " ; [sp + %d] (uint32 %s)\n",
2247
               output_offset,
2248
               value,
2249
               input_offset,
2250
               TraceValueType(is_smi, is_native));
2251
      }
2252
      if (is_smi) {
2253
        intptr_t tagged_value =
2254
            reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
2255
        output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
2256
      } else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
2257
        output_[frame_index]->SetFrameSlot(output_offset, value);
2258
      } else {
2259
        // We save the untagged value on the side and store a GC-safe
2260
        // temporary placeholder in the frame.
2261
        ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
2262
        AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
2263
                       static_cast<double>(static_cast<uint32_t>(value)));
2264
        output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2265
      }
2266
      return;
2267
    }
2268

    
2269
    case Translation::DOUBLE_STACK_SLOT: {
2270
      int input_slot_index = iterator->Next();
2271
      unsigned input_offset = input_->GetOffsetFromSlotIndex(input_slot_index);
2272
      double value = input_->GetDoubleFrameSlot(input_offset);
2273
      if (trace_) {
2274
        PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
2275
               output_[frame_index]->GetTop() + output_offset,
2276
               output_offset,
2277
               value,
2278
               input_offset);
2279
      }
2280
      // We save the untagged value on the side and store a GC-safe
2281
      // temporary placeholder in the frame.
2282
      AddDoubleValue(output_[frame_index]->GetTop() + output_offset, value);
2283
      output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
2284
      return;
2285
    }
2286

    
2287
    case Translation::LITERAL: {
2288
      Object* literal = ComputeLiteral(iterator->Next());
2289
      if (trace_) {
2290
        PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
2291
               output_[frame_index]->GetTop() + output_offset,
2292
               output_offset);
2293
        literal->ShortPrint();
2294
        PrintF(" ; literal\n");
2295
      }
2296
      intptr_t value = reinterpret_cast<intptr_t>(literal);
2297
      output_[frame_index]->SetFrameSlot(output_offset, value);
2298
      return;
2299
    }
2300

    
2301
    case Translation::DUPLICATED_OBJECT: {
2302
      int object_index = iterator->Next();
2303
      if (trace_) {
2304
        PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
2305
               output_[frame_index]->GetTop() + output_offset,
2306
               output_offset);
2307
        isolate_->heap()->arguments_marker()->ShortPrint();
2308
        PrintF(" ; duplicate of object #%d\n", object_index);
2309
      }
2310
      // Use the materialization marker value as a sentinel and fill in
2311
      // the object after the deoptimized frame is built.
2312
      intptr_t value = reinterpret_cast<intptr_t>(
2313
          isolate_->heap()->arguments_marker());
2314
      AddObjectDuplication(output_[frame_index]->GetTop() + output_offset,
2315
                           object_index);
2316
      output_[frame_index]->SetFrameSlot(output_offset, value);
2317
      return;
2318
    }
2319

    
2320
    case Translation::ARGUMENTS_OBJECT:
2321
    case Translation::CAPTURED_OBJECT: {
2322
      int length = iterator->Next();
2323
      bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
2324
      if (trace_) {
2325
        PrintF("    0x%08" V8PRIxPTR ": [top + %d] <- ",
2326
               output_[frame_index]->GetTop() + output_offset,
2327
               output_offset);
2328
        isolate_->heap()->arguments_marker()->ShortPrint();
2329
        PrintF(" ; object (length = %d, is_args = %d)\n", length, is_args);
2330
      }
2331
      // Use the materialization marker value as a sentinel and fill in
2332
      // the object after the deoptimized frame is built.
2333
      intptr_t value = reinterpret_cast<intptr_t>(
2334
          isolate_->heap()->arguments_marker());
2335
      AddObjectStart(output_[frame_index]->GetTop() + output_offset,
2336
                     length, is_args);
2337
      output_[frame_index]->SetFrameSlot(output_offset, value);
2338
      // We save the object values on the side and materialize the actual
2339
      // object after the deoptimized frame is built.
2340
      int object_index = deferred_objects_.length() - 1;
2341
      for (int i = 0; i < length; i++) {
2342
        DoTranslateObject(iterator, object_index, i);
2343
      }
2344
      return;
2345
    }
2346
  }
2347
}
2348

    
2349

    
2350
unsigned Deoptimizer::ComputeInputFrameSize() const {
2351
  unsigned fixed_size = ComputeFixedSize(function_);
2352
  // The fp-to-sp delta already takes the context and the function
2353
  // into account so we have to avoid double counting them (-2).
2354
  unsigned result = fixed_size + fp_to_sp_delta_ - (2 * kPointerSize);
2355
#ifdef DEBUG
2356
  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
2357
    unsigned stack_slots = compiled_code_->stack_slots();
2358
    unsigned outgoing_size = ComputeOutgoingArgumentSize();
2359
    ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
2360
  }
2361
#endif
2362
  return result;
2363
}
2364

    
2365

    
2366
unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
2367
  // The fixed part of the frame consists of the return address, frame
2368
  // pointer, function, context, and all the incoming arguments.
2369
  return ComputeIncomingArgumentSize(function) +
2370
      StandardFrameConstants::kFixedFrameSize;
2371
}
2372

    
2373

    
2374
unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
2375
  // The incoming arguments is the values for formal parameters and
2376
  // the receiver. Every slot contains a pointer.
2377
  if (function->IsSmi()) {
2378
    ASSERT(Smi::cast(function) == Smi::FromInt(StackFrame::STUB));
2379
    return 0;
2380
  }
2381
  unsigned arguments = function->shared()->formal_parameter_count() + 1;
2382
  return arguments * kPointerSize;
2383
}
2384

    
2385

    
2386
unsigned Deoptimizer::ComputeOutgoingArgumentSize() const {
2387
  DeoptimizationInputData* data = DeoptimizationInputData::cast(
2388
      compiled_code_->deoptimization_data());
2389
  unsigned height = data->ArgumentsStackHeight(bailout_id_)->value();
2390
  return height * kPointerSize;
2391
}
2392

    
2393

    
2394
Object* Deoptimizer::ComputeLiteral(int index) const {
2395
  DeoptimizationInputData* data = DeoptimizationInputData::cast(
2396
      compiled_code_->deoptimization_data());
2397
  FixedArray* literals = data->LiteralArray();
2398
  return literals->get(index);
2399
}
2400

    
2401

    
2402
void Deoptimizer::AddObjectStart(intptr_t slot, int length, bool is_args) {
2403
  ObjectMaterializationDescriptor object_desc(
2404
      reinterpret_cast<Address>(slot), jsframe_count_, length, -1, is_args);
2405
  deferred_objects_.Add(object_desc);
2406
}
2407

    
2408

    
2409
void Deoptimizer::AddObjectDuplication(intptr_t slot, int object_index) {
2410
  ObjectMaterializationDescriptor object_desc(
2411
      reinterpret_cast<Address>(slot), jsframe_count_, -1, object_index, false);
2412
  deferred_objects_.Add(object_desc);
2413
}
2414

    
2415

    
2416
void Deoptimizer::AddObjectTaggedValue(intptr_t value) {
2417
  deferred_objects_tagged_values_.Add(reinterpret_cast<Object*>(value));
2418
}
2419

    
2420

    
2421
void Deoptimizer::AddObjectDoubleValue(double value) {
2422
  deferred_objects_tagged_values_.Add(isolate()->heap()->the_hole_value());
2423
  HeapNumberMaterializationDescriptor<int> value_desc(
2424
      deferred_objects_tagged_values_.length() - 1, value);
2425
  deferred_objects_double_values_.Add(value_desc);
2426
}
2427

    
2428

    
2429
void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) {
2430
  HeapNumberMaterializationDescriptor<Address> value_desc(
2431
      reinterpret_cast<Address>(slot_address), value);
2432
  deferred_heap_numbers_.Add(value_desc);
2433
}
2434

    
2435

    
2436
void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
2437
                                                   BailoutType type,
2438
                                                   int max_entry_id) {
2439
  // We cannot run this if the serializer is enabled because this will
2440
  // cause us to emit relocation information for the external
2441
  // references. This is fine because the deoptimizer's code section
2442
  // isn't meant to be serialized at all.
2443
  ASSERT(type == EAGER || type == SOFT || type == LAZY);
2444
  DeoptimizerData* data = isolate->deoptimizer_data();
2445
  int entry_count = data->deopt_entry_code_entries_[type];
2446
  if (max_entry_id < entry_count) return;
2447
  entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2448
  while (max_entry_id >= entry_count) entry_count *= 2;
2449
  ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2450

    
2451
  MacroAssembler masm(isolate, NULL, 16 * KB);
2452
  masm.set_emit_debug_code(false);
2453
  GenerateDeoptimizationEntries(&masm, entry_count, type);
2454
  CodeDesc desc;
2455
  masm.GetCode(&desc);
2456
  ASSERT(!RelocInfo::RequiresRelocation(desc));
2457

    
2458
  MemoryChunk* chunk = data->deopt_entry_code_[type];
2459
  ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2460
         desc.instr_size);
2461
  chunk->CommitArea(desc.instr_size);
2462
  CopyBytes(chunk->area_start(), desc.buffer,
2463
      static_cast<size_t>(desc.instr_size));
2464
  CPU::FlushICache(chunk->area_start(), desc.instr_size);
2465

    
2466
  data->deopt_entry_code_entries_[type] = entry_count;
2467
}
2468

    
2469

    
2470
FrameDescription::FrameDescription(uint32_t frame_size,
2471
                                   JSFunction* function)
2472
    : frame_size_(frame_size),
2473
      function_(function),
2474
      top_(kZapUint32),
2475
      pc_(kZapUint32),
2476
      fp_(kZapUint32),
2477
      context_(kZapUint32) {
2478
  // Zap all the registers.
2479
  for (int r = 0; r < Register::kNumRegisters; r++) {
2480
    SetRegister(r, kZapUint32);
2481
  }
2482

    
2483
  // Zap all the slots.
2484
  for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2485
    SetFrameSlot(o, kZapUint32);
2486
  }
2487
}
2488

    
2489

    
2490
int FrameDescription::ComputeFixedSize() {
2491
  return StandardFrameConstants::kFixedFrameSize +
2492
      (ComputeParametersCount() + 1) * kPointerSize;
2493
}
2494

    
2495

    
2496
unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
2497
  if (slot_index >= 0) {
2498
    // Local or spill slots. Skip the fixed part of the frame
2499
    // including all arguments.
2500
    unsigned base = GetFrameSize() - ComputeFixedSize();
2501
    return base - ((slot_index + 1) * kPointerSize);
2502
  } else {
2503
    // Incoming parameter.
2504
    int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
2505
    unsigned base = GetFrameSize() - arg_size;
2506
    return base - ((slot_index + 1) * kPointerSize);
2507
  }
2508
}
2509

    
2510

    
2511
int FrameDescription::ComputeParametersCount() {
2512
  switch (type_) {
2513
    case StackFrame::JAVA_SCRIPT:
2514
      return function_->shared()->formal_parameter_count();
2515
    case StackFrame::ARGUMENTS_ADAPTOR: {
2516
      // Last slot contains number of incomming arguments as a smi.
2517
      // Can't use GetExpression(0) because it would cause infinite recursion.
2518
      return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
2519
    }
2520
    case StackFrame::STUB:
2521
      return -1;  // Minus receiver.
2522
    default:
2523
      UNREACHABLE();
2524
      return 0;
2525
  }
2526
}
2527

    
2528

    
2529
Object* FrameDescription::GetParameter(int index) {
2530
  ASSERT(index >= 0);
2531
  ASSERT(index < ComputeParametersCount());
2532
  // The slot indexes for incoming arguments are negative.
2533
  unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
2534
  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2535
}
2536

    
2537

    
2538
unsigned FrameDescription::GetExpressionCount() {
2539
  ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
2540
  unsigned size = GetFrameSize() - ComputeFixedSize();
2541
  return size / kPointerSize;
2542
}
2543

    
2544

    
2545
Object* FrameDescription::GetExpression(int index) {
2546
  ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
2547
  unsigned offset = GetOffsetFromSlotIndex(index);
2548
  return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2549
}
2550

    
2551

    
2552
void TranslationBuffer::Add(int32_t value, Zone* zone) {
2553
  // Encode the sign bit in the least significant bit.
2554
  bool is_negative = (value < 0);
2555
  uint32_t bits = ((is_negative ? -value : value) << 1) |
2556
      static_cast<int32_t>(is_negative);
2557
  // Encode the individual bytes using the least significant bit of
2558
  // each byte to indicate whether or not more bytes follow.
2559
  do {
2560
    uint32_t next = bits >> 7;
2561
    contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
2562
    bits = next;
2563
  } while (bits != 0);
2564
}
2565

    
2566

    
2567
int32_t TranslationIterator::Next() {
2568
  // Run through the bytes until we reach one with a least significant
2569
  // bit of zero (marks the end).
2570
  uint32_t bits = 0;
2571
  for (int i = 0; true; i += 7) {
2572
    ASSERT(HasNext());
2573
    uint8_t next = buffer_->get(index_++);
2574
    bits |= (next >> 1) << i;
2575
    if ((next & 1) == 0) break;
2576
  }
2577
  // The bits encode the sign in the least significant bit.
2578
  bool is_negative = (bits & 1) == 1;
2579
  int32_t result = bits >> 1;
2580
  return is_negative ? -result : result;
2581
}
2582

    
2583

    
2584
Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2585
  int length = contents_.length();
2586
  Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
2587
  OS::MemCopy(
2588
      result->GetDataStartAddress(), contents_.ToVector().start(), length);
2589
  return result;
2590
}
2591

    
2592

    
2593
void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
2594
  buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
2595
  buffer_->Add(literal_id, zone());
2596
  buffer_->Add(height, zone());
2597
}
2598

    
2599

    
2600
void Translation::BeginGetterStubFrame(int literal_id) {
2601
  buffer_->Add(GETTER_STUB_FRAME, zone());
2602
  buffer_->Add(literal_id, zone());
2603
}
2604

    
2605

    
2606
void Translation::BeginSetterStubFrame(int literal_id) {
2607
  buffer_->Add(SETTER_STUB_FRAME, zone());
2608
  buffer_->Add(literal_id, zone());
2609
}
2610

    
2611

    
2612
void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
2613
  buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
2614
  buffer_->Add(literal_id, zone());
2615
  buffer_->Add(height, zone());
2616
}
2617

    
2618

    
2619
void Translation::BeginJSFrame(BailoutId node_id,
2620
                               int literal_id,
2621
                               unsigned height) {
2622
  buffer_->Add(JS_FRAME, zone());
2623
  buffer_->Add(node_id.ToInt(), zone());
2624
  buffer_->Add(literal_id, zone());
2625
  buffer_->Add(height, zone());
2626
}
2627

    
2628

    
2629
void Translation::BeginCompiledStubFrame() {
2630
  buffer_->Add(COMPILED_STUB_FRAME, zone());
2631
}
2632

    
2633

    
2634
void Translation::BeginArgumentsObject(int args_length) {
2635
  buffer_->Add(ARGUMENTS_OBJECT, zone());
2636
  buffer_->Add(args_length, zone());
2637
}
2638

    
2639

    
2640
void Translation::BeginCapturedObject(int length) {
2641
  buffer_->Add(CAPTURED_OBJECT, zone());
2642
  buffer_->Add(length, zone());
2643
}
2644

    
2645

    
2646
void Translation::DuplicateObject(int object_index) {
2647
  buffer_->Add(DUPLICATED_OBJECT, zone());
2648
  buffer_->Add(object_index, zone());
2649
}
2650

    
2651

    
2652
void Translation::StoreRegister(Register reg) {
2653
  buffer_->Add(REGISTER, zone());
2654
  buffer_->Add(reg.code(), zone());
2655
}
2656

    
2657

    
2658
void Translation::StoreInt32Register(Register reg) {
2659
  buffer_->Add(INT32_REGISTER, zone());
2660
  buffer_->Add(reg.code(), zone());
2661
}
2662

    
2663

    
2664
void Translation::StoreUint32Register(Register reg) {
2665
  buffer_->Add(UINT32_REGISTER, zone());
2666
  buffer_->Add(reg.code(), zone());
2667
}
2668

    
2669

    
2670
void Translation::StoreDoubleRegister(DoubleRegister reg) {
2671
  buffer_->Add(DOUBLE_REGISTER, zone());
2672
  buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
2673
}
2674

    
2675

    
2676
void Translation::StoreStackSlot(int index) {
2677
  buffer_->Add(STACK_SLOT, zone());
2678
  buffer_->Add(index, zone());
2679
}
2680

    
2681

    
2682
void Translation::StoreInt32StackSlot(int index) {
2683
  buffer_->Add(INT32_STACK_SLOT, zone());
2684
  buffer_->Add(index, zone());
2685
}
2686

    
2687

    
2688
void Translation::StoreUint32StackSlot(int index) {
2689
  buffer_->Add(UINT32_STACK_SLOT, zone());
2690
  buffer_->Add(index, zone());
2691
}
2692

    
2693

    
2694
void Translation::StoreDoubleStackSlot(int index) {
2695
  buffer_->Add(DOUBLE_STACK_SLOT, zone());
2696
  buffer_->Add(index, zone());
2697
}
2698

    
2699

    
2700
void Translation::StoreLiteral(int literal_id) {
2701
  buffer_->Add(LITERAL, zone());
2702
  buffer_->Add(literal_id, zone());
2703
}
2704

    
2705

    
2706
void Translation::StoreArgumentsObject(bool args_known,
2707
                                       int args_index,
2708
                                       int args_length) {
2709
  buffer_->Add(ARGUMENTS_OBJECT, zone());
2710
  buffer_->Add(args_known, zone());
2711
  buffer_->Add(args_index, zone());
2712
  buffer_->Add(args_length, zone());
2713
}
2714

    
2715

    
2716
int Translation::NumberOfOperandsFor(Opcode opcode) {
2717
  switch (opcode) {
2718
    case GETTER_STUB_FRAME:
2719
    case SETTER_STUB_FRAME:
2720
    case DUPLICATED_OBJECT:
2721
    case ARGUMENTS_OBJECT:
2722
    case CAPTURED_OBJECT:
2723
    case REGISTER:
2724
    case INT32_REGISTER:
2725
    case UINT32_REGISTER:
2726
    case DOUBLE_REGISTER:
2727
    case STACK_SLOT:
2728
    case INT32_STACK_SLOT:
2729
    case UINT32_STACK_SLOT:
2730
    case DOUBLE_STACK_SLOT:
2731
    case LITERAL:
2732
    case COMPILED_STUB_FRAME:
2733
      return 1;
2734
    case BEGIN:
2735
    case ARGUMENTS_ADAPTOR_FRAME:
2736
    case CONSTRUCT_STUB_FRAME:
2737
      return 2;
2738
    case JS_FRAME:
2739
      return 3;
2740
  }
2741
  UNREACHABLE();
2742
  return -1;
2743
}
2744

    
2745

    
2746
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
2747

    
2748
const char* Translation::StringFor(Opcode opcode) {
2749
#define TRANSLATION_OPCODE_CASE(item)   case item: return #item;
2750
  switch (opcode) {
2751
    TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
2752
  }
2753
#undef TRANSLATION_OPCODE_CASE
2754
  UNREACHABLE();
2755
  return "";
2756
}
2757

    
2758
#endif
2759

    
2760

    
2761
// We can't intermix stack decoding and allocations because
2762
// deoptimization infrastracture is not GC safe.
2763
// Thus we build a temporary structure in malloced space.
2764
SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
2765
                                            DeoptimizationInputData* data,
2766
                                            JavaScriptFrame* frame) {
2767
  Translation::Opcode opcode =
2768
      static_cast<Translation::Opcode>(iterator->Next());
2769

    
2770
  switch (opcode) {
2771
    case Translation::BEGIN:
2772
    case Translation::JS_FRAME:
2773
    case Translation::ARGUMENTS_ADAPTOR_FRAME:
2774
    case Translation::CONSTRUCT_STUB_FRAME:
2775
    case Translation::GETTER_STUB_FRAME:
2776
    case Translation::SETTER_STUB_FRAME:
2777
      // Peeled off before getting here.
2778
      break;
2779

    
2780
    case Translation::DUPLICATED_OBJECT:
2781
    case Translation::ARGUMENTS_OBJECT:
2782
    case Translation::CAPTURED_OBJECT:
2783
      // This can be only emitted for local slots not for argument slots.
2784
      break;
2785

    
2786
    case Translation::REGISTER:
2787
    case Translation::INT32_REGISTER:
2788
    case Translation::UINT32_REGISTER:
2789
    case Translation::DOUBLE_REGISTER:
2790
      // We are at safepoint which corresponds to call.  All registers are
2791
      // saved by caller so there would be no live registers at this
2792
      // point. Thus these translation commands should not be used.
2793
      break;
2794

    
2795
    case Translation::STACK_SLOT: {
2796
      int slot_index = iterator->Next();
2797
      Address slot_addr = SlotAddress(frame, slot_index);
2798
      return SlotRef(slot_addr, SlotRef::TAGGED);
2799
    }
2800

    
2801
    case Translation::INT32_STACK_SLOT: {
2802
      int slot_index = iterator->Next();
2803
      Address slot_addr = SlotAddress(frame, slot_index);
2804
      return SlotRef(slot_addr, SlotRef::INT32);
2805
    }
2806

    
2807
    case Translation::UINT32_STACK_SLOT: {
2808
      int slot_index = iterator->Next();
2809
      Address slot_addr = SlotAddress(frame, slot_index);
2810
      return SlotRef(slot_addr, SlotRef::UINT32);
2811
    }
2812

    
2813
    case Translation::DOUBLE_STACK_SLOT: {
2814
      int slot_index = iterator->Next();
2815
      Address slot_addr = SlotAddress(frame, slot_index);
2816
      return SlotRef(slot_addr, SlotRef::DOUBLE);
2817
    }
2818

    
2819
    case Translation::LITERAL: {
2820
      int literal_index = iterator->Next();
2821
      return SlotRef(data->GetIsolate(),
2822
                     data->LiteralArray()->get(literal_index));
2823
    }
2824

    
2825
    case Translation::COMPILED_STUB_FRAME:
2826
      UNREACHABLE();
2827
      break;
2828
  }
2829

    
2830
  UNREACHABLE();
2831
  return SlotRef();
2832
}
2833

    
2834

    
2835
void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots,
2836
                                       TranslationIterator* it,
2837
                                       DeoptimizationInputData* data,
2838
                                       JavaScriptFrame* frame) {
2839
  // Process the translation commands for the arguments.
2840

    
2841
  // Skip the translation command for the receiver.
2842
  it->Skip(Translation::NumberOfOperandsFor(
2843
      static_cast<Translation::Opcode>(it->Next())));
2844

    
2845
  // Compute slots for arguments.
2846
  for (int i = 0; i < args_slots->length(); ++i) {
2847
    (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame);
2848
  }
2849
}
2850

    
2851

    
2852
Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
2853
    JavaScriptFrame* frame,
2854
    int inlined_jsframe_index,
2855
    int formal_parameter_count) {
2856
  DisallowHeapAllocation no_gc;
2857
  int deopt_index = Safepoint::kNoDeoptimizationIndex;
2858
  DeoptimizationInputData* data =
2859
      static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
2860
  TranslationIterator it(data->TranslationByteArray(),
2861
                         data->TranslationIndex(deopt_index)->value());
2862
  Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
2863
  ASSERT(opcode == Translation::BEGIN);
2864
  it.Next();  // Drop frame count.
2865
  int jsframe_count = it.Next();
2866
  USE(jsframe_count);
2867
  ASSERT(jsframe_count > inlined_jsframe_index);
2868
  int jsframes_to_skip = inlined_jsframe_index;
2869
  while (true) {
2870
    opcode = static_cast<Translation::Opcode>(it.Next());
2871
    if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
2872
      if (jsframes_to_skip == 0) {
2873
        ASSERT(Translation::NumberOfOperandsFor(opcode) == 2);
2874

    
2875
        it.Skip(1);  // literal id
2876
        int height = it.Next();
2877

    
2878
        // We reached the arguments adaptor frame corresponding to the
2879
        // inlined function in question.  Number of arguments is height - 1.
2880
        Vector<SlotRef> args_slots =
2881
            Vector<SlotRef>::New(height - 1);  // Minus receiver.
2882
        ComputeSlotsForArguments(&args_slots, &it, data, frame);
2883
        return args_slots;
2884
      }
2885
    } else if (opcode == Translation::JS_FRAME) {
2886
      if (jsframes_to_skip == 0) {
2887
        // Skip over operands to advance to the next opcode.
2888
        it.Skip(Translation::NumberOfOperandsFor(opcode));
2889

    
2890
        // We reached the frame corresponding to the inlined function
2891
        // in question.  Process the translation commands for the
2892
        // arguments.  Number of arguments is equal to the number of
2893
        // format parameter count.
2894
        Vector<SlotRef> args_slots =
2895
            Vector<SlotRef>::New(formal_parameter_count);
2896
        ComputeSlotsForArguments(&args_slots, &it, data, frame);
2897
        return args_slots;
2898
      }
2899
      jsframes_to_skip--;
2900
    }
2901

    
2902
    // Skip over operands to advance to the next opcode.
2903
    it.Skip(Translation::NumberOfOperandsFor(opcode));
2904
  }
2905

    
2906
  UNREACHABLE();
2907
  return Vector<SlotRef>();
2908
}
2909

    
2910
#ifdef ENABLE_DEBUGGER_SUPPORT
2911

    
2912
DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
2913
                                           int frame_index,
2914
                                           bool has_arguments_adaptor,
2915
                                           bool has_construct_stub) {
2916
  FrameDescription* output_frame = deoptimizer->output_[frame_index];
2917
  function_ = output_frame->GetFunction();
2918
  has_construct_stub_ = has_construct_stub;
2919
  expression_count_ = output_frame->GetExpressionCount();
2920
  expression_stack_ = new Object*[expression_count_];
2921
  // Get the source position using the unoptimized code.
2922
  Address pc = reinterpret_cast<Address>(output_frame->GetPc());
2923
  Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
2924
  source_position_ = code->SourcePosition(pc);
2925

    
2926
  for (int i = 0; i < expression_count_; i++) {
2927
    SetExpression(i, output_frame->GetExpression(i));
2928
  }
2929

    
2930
  if (has_arguments_adaptor) {
2931
    output_frame = deoptimizer->output_[frame_index - 1];
2932
    ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR);
2933
  }
2934

    
2935
  parameters_count_ = output_frame->ComputeParametersCount();
2936
  parameters_ = new Object*[parameters_count_];
2937
  for (int i = 0; i < parameters_count_; i++) {
2938
    SetParameter(i, output_frame->GetParameter(i));
2939
  }
2940
}
2941

    
2942

    
2943
DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
2944
  delete[] expression_stack_;
2945
  delete[] parameters_;
2946
}
2947

    
2948

    
2949
void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
2950
  v->VisitPointer(BitCast<Object**>(&function_));
2951
  v->VisitPointers(parameters_, parameters_ + parameters_count_);
2952
  v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
2953
}
2954

    
2955
#endif  // ENABLE_DEBUGGER_SUPPORT
2956

    
2957
} }  // namespace v8::internal