The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / x64 / full-codegen-x64.cc @ f230a1cf

History | View | Annotate | Download (168 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_X64
31

    
32
#include "code-stubs.h"
33
#include "codegen.h"
34
#include "compiler.h"
35
#include "debug.h"
36
#include "full-codegen.h"
37
#include "isolate-inl.h"
38
#include "parser.h"
39
#include "scopes.h"
40
#include "stub-cache.h"
41

    
42
namespace v8 {
43
namespace internal {
44

    
45
#define __ ACCESS_MASM(masm_)
46

    
47

    
48
class JumpPatchSite BASE_EMBEDDED {
49
 public:
50
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51
#ifdef DEBUG
52
    info_emitted_ = false;
53
#endif
54
  }
55

    
56
  ~JumpPatchSite() {
57
    ASSERT(patch_site_.is_bound() == info_emitted_);
58
  }
59

    
60
  void EmitJumpIfNotSmi(Register reg,
61
                        Label* target,
62
                        Label::Distance near_jump = Label::kFar) {
63
    __ testb(reg, Immediate(kSmiTagMask));
64
    EmitJump(not_carry, target, near_jump);   // Always taken before patched.
65
  }
66

    
67
  void EmitJumpIfSmi(Register reg,
68
                     Label* target,
69
                     Label::Distance near_jump = Label::kFar) {
70
    __ testb(reg, Immediate(kSmiTagMask));
71
    EmitJump(carry, target, near_jump);  // Never taken before patched.
72
  }
73

    
74
  void EmitPatchInfo() {
75
    if (patch_site_.is_bound()) {
76
      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77
      ASSERT(is_int8(delta_to_patch_site));
78
      __ testl(rax, Immediate(delta_to_patch_site));
79
#ifdef DEBUG
80
      info_emitted_ = true;
81
#endif
82
    } else {
83
      __ nop();  // Signals no inlined code.
84
    }
85
  }
86

    
87
 private:
88
  // jc will be patched with jz, jnc will become jnz.
89
  void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
91
    ASSERT(cc == carry || cc == not_carry);
92
    __ bind(&patch_site_);
93
    __ j(cc, target, near_jump);
94
  }
95

    
96
  MacroAssembler* masm_;
97
  Label patch_site_;
98
#ifdef DEBUG
99
  bool info_emitted_;
100
#endif
101
};
102

    
103

    
104
// Generate code for a JS function.  On entry to the function the receiver
105
// and arguments have been pushed on the stack left to right, with the
106
// return address on top of them.  The actual argument count matches the
107
// formal parameter count expected by the function.
108
//
109
// The live registers are:
110
//   o rdi: the JS function object being called (i.e. ourselves)
111
//   o rsi: our context
112
//   o rbp: our caller's frame pointer
113
//   o rsp: stack pointer (pointing to return address)
114
//
115
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
116
// frames-x64.h for its layout.
117
void FullCodeGenerator::Generate() {
118
  CompilationInfo* info = info_;
119
  handler_table_ =
120
      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121
  profiling_counter_ = isolate()->factory()->NewCell(
122
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
123
  SetFunctionPosition(function());
124
  Comment cmnt(masm_, "[ function compiled by full code generator");
125

    
126
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
127

    
128
#ifdef DEBUG
129
  if (strlen(FLAG_stop_at) > 0 &&
130
      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
131
    __ int3();
132
  }
133
#endif
134

    
135
  // Strict mode functions and builtins need to replace the receiver
136
  // with undefined when called as functions (without an explicit
137
  // receiver object). rcx is zero for method calls and non-zero for
138
  // function calls.
139
  if (!info->is_classic_mode() || info->is_native()) {
140
    Label ok;
141
    __ testq(rcx, rcx);
142
    __ j(zero, &ok, Label::kNear);
143
    StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
144
    __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
145
    __ movq(args.GetReceiverOperand(), kScratchRegister);
146
    __ bind(&ok);
147
  }
148

    
149
  // Open a frame scope to indicate that there is a frame on the stack.  The
150
  // MANUAL indicates that the scope shouldn't actually generate code to set up
151
  // the frame (that is done below).
152
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
153

    
154
  info->set_prologue_offset(masm_->pc_offset());
155
  __ Prologue(BUILD_FUNCTION_FRAME);
156
  info->AddNoFrameRange(0, masm_->pc_offset());
157

    
158
  { Comment cmnt(masm_, "[ Allocate locals");
159
    int locals_count = info->scope()->num_stack_slots();
160
    // Generators allocate locals, if any, in context slots.
161
    ASSERT(!info->function()->is_generator() || locals_count == 0);
162
    if (locals_count == 1) {
163
      __ PushRoot(Heap::kUndefinedValueRootIndex);
164
    } else if (locals_count > 1) {
165
      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
166
      for (int i = 0; i < locals_count; i++) {
167
        __ push(rdx);
168
      }
169
    }
170
  }
171

    
172
  bool function_in_register = true;
173

    
174
  // Possibly allocate a local context.
175
  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
176
  if (heap_slots > 0) {
177
    Comment cmnt(masm_, "[ Allocate context");
178
    // Argument to NewContext is the function, which is still in rdi.
179
    __ push(rdi);
180
    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
181
      __ Push(info->scope()->GetScopeInfo());
182
      __ CallRuntime(Runtime::kNewGlobalContext, 2);
183
    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
184
      FastNewContextStub stub(heap_slots);
185
      __ CallStub(&stub);
186
    } else {
187
      __ CallRuntime(Runtime::kNewFunctionContext, 1);
188
    }
189
    function_in_register = false;
190
    // Context is returned in both rax and rsi.  It replaces the context
191
    // passed to us.  It's saved in the stack and kept live in rsi.
192
    __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
193

    
194
    // Copy any necessary parameters into the context.
195
    int num_parameters = info->scope()->num_parameters();
196
    for (int i = 0; i < num_parameters; i++) {
197
      Variable* var = scope()->parameter(i);
198
      if (var->IsContextSlot()) {
199
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
200
            (num_parameters - 1 - i) * kPointerSize;
201
        // Load parameter from stack.
202
        __ movq(rax, Operand(rbp, parameter_offset));
203
        // Store it in the context.
204
        int context_offset = Context::SlotOffset(var->index());
205
        __ movq(Operand(rsi, context_offset), rax);
206
        // Update the write barrier.  This clobbers rax and rbx.
207
        __ RecordWriteContextSlot(
208
            rsi, context_offset, rax, rbx, kDontSaveFPRegs);
209
      }
210
    }
211
  }
212

    
213
  // Possibly allocate an arguments object.
214
  Variable* arguments = scope()->arguments();
215
  if (arguments != NULL) {
216
    // Arguments object must be allocated after the context object, in
217
    // case the "arguments" or ".arguments" variables are in the context.
218
    Comment cmnt(masm_, "[ Allocate arguments object");
219
    if (function_in_register) {
220
      __ push(rdi);
221
    } else {
222
      __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
223
    }
224
    // The receiver is just before the parameters on the caller's stack.
225
    int num_parameters = info->scope()->num_parameters();
226
    int offset = num_parameters * kPointerSize;
227
    __ lea(rdx,
228
           Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
229
    __ push(rdx);
230
    __ Push(Smi::FromInt(num_parameters));
231
    // Arguments to ArgumentsAccessStub:
232
    //   function, receiver address, parameter count.
233
    // The stub will rewrite receiver and parameter count if the previous
234
    // stack frame was an arguments adapter frame.
235
    ArgumentsAccessStub::Type type;
236
    if (!is_classic_mode()) {
237
      type = ArgumentsAccessStub::NEW_STRICT;
238
    } else if (function()->has_duplicate_parameters()) {
239
      type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
240
    } else {
241
      type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
242
    }
243
    ArgumentsAccessStub stub(type);
244
    __ CallStub(&stub);
245

    
246
    SetVar(arguments, rax, rbx, rdx);
247
  }
248

    
249
  if (FLAG_trace) {
250
    __ CallRuntime(Runtime::kTraceEnter, 0);
251
  }
252

    
253
  // Visit the declarations and body unless there is an illegal
254
  // redeclaration.
255
  if (scope()->HasIllegalRedeclaration()) {
256
    Comment cmnt(masm_, "[ Declarations");
257
    scope()->VisitIllegalRedeclaration(this);
258

    
259
  } else {
260
    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
261
    { Comment cmnt(masm_, "[ Declarations");
262
      // For named function expressions, declare the function name as a
263
      // constant.
264
      if (scope()->is_function_scope() && scope()->function() != NULL) {
265
        VariableDeclaration* function = scope()->function();
266
        ASSERT(function->proxy()->var()->mode() == CONST ||
267
               function->proxy()->var()->mode() == CONST_HARMONY);
268
        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
269
        VisitVariableDeclaration(function);
270
      }
271
      VisitDeclarations(scope()->declarations());
272
    }
273

    
274
    { Comment cmnt(masm_, "[ Stack check");
275
      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
276
      Label ok;
277
      __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
278
      __ j(above_equal, &ok, Label::kNear);
279
      __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
280
      __ bind(&ok);
281
    }
282

    
283
    { Comment cmnt(masm_, "[ Body");
284
      ASSERT(loop_depth() == 0);
285
      VisitStatements(function()->body());
286
      ASSERT(loop_depth() == 0);
287
    }
288
  }
289

    
290
  // Always emit a 'return undefined' in case control fell off the end of
291
  // the body.
292
  { Comment cmnt(masm_, "[ return <undefined>;");
293
    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
294
    EmitReturnSequence();
295
  }
296
}
297

    
298

    
299
void FullCodeGenerator::ClearAccumulator() {
300
  __ Set(rax, 0);
301
}
302

    
303

    
304
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
305
  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
306
  __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
307
                    Smi::FromInt(-delta));
308
}
309

    
310

    
311
void FullCodeGenerator::EmitProfilingCounterReset() {
312
  int reset_value = FLAG_interrupt_budget;
313
  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
314
    // Self-optimization is a one-off thing; if it fails, don't try again.
315
    reset_value = Smi::kMaxValue;
316
  }
317
  __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
318
  __ movq(kScratchRegister,
319
          reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
320
          RelocInfo::NONE64);
321
  __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
322
}
323

    
324

    
325
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
326
                                                Label* back_edge_target) {
327
  Comment cmnt(masm_, "[ Back edge bookkeeping");
328
  Label ok;
329

    
330
  int weight = 1;
331
  if (FLAG_weighted_back_edges) {
332
    ASSERT(back_edge_target->is_bound());
333
    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
334
    weight = Min(kMaxBackEdgeWeight,
335
                 Max(1, distance / kCodeSizeMultiplier));
336
  }
337
  EmitProfilingCounterDecrement(weight);
338
  __ j(positive, &ok, Label::kNear);
339
  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
340

    
341
  // Record a mapping of this PC offset to the OSR id.  This is used to find
342
  // the AST id from the unoptimized code in order to use it as a key into
343
  // the deoptimization input data found in the optimized code.
344
  RecordBackEdge(stmt->OsrEntryId());
345

    
346
  EmitProfilingCounterReset();
347

    
348
  __ bind(&ok);
349
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
350
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
351
  // entry becomes the target of a bailout.  We don't expect it to be, but
352
  // we want it to work if it is.
353
  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
354
}
355

    
356

    
357
void FullCodeGenerator::EmitReturnSequence() {
358
  Comment cmnt(masm_, "[ Return sequence");
359
  if (return_label_.is_bound()) {
360
    __ jmp(&return_label_);
361
  } else {
362
    __ bind(&return_label_);
363
    if (FLAG_trace) {
364
      __ push(rax);
365
      __ CallRuntime(Runtime::kTraceExit, 1);
366
    }
367
    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
368
      // Pretend that the exit is a backwards jump to the entry.
369
      int weight = 1;
370
      if (info_->ShouldSelfOptimize()) {
371
        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
372
      } else if (FLAG_weighted_back_edges) {
373
        int distance = masm_->pc_offset();
374
        weight = Min(kMaxBackEdgeWeight,
375
                     Max(1, distance / kCodeSizeMultiplier));
376
      }
377
      EmitProfilingCounterDecrement(weight);
378
      Label ok;
379
      __ j(positive, &ok, Label::kNear);
380
      __ push(rax);
381
      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
382
        __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
383
        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
384
      } else {
385
        __ call(isolate()->builtins()->InterruptCheck(),
386
                RelocInfo::CODE_TARGET);
387
      }
388
      __ pop(rax);
389
      EmitProfilingCounterReset();
390
      __ bind(&ok);
391
    }
392
#ifdef DEBUG
393
    // Add a label for checking the size of the code used for returning.
394
    Label check_exit_codesize;
395
    masm_->bind(&check_exit_codesize);
396
#endif
397
    CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
398
    __ RecordJSReturn();
399
    // Do not use the leave instruction here because it is too short to
400
    // patch with the code required by the debugger.
401
    __ movq(rsp, rbp);
402
    __ pop(rbp);
403
    int no_frame_start = masm_->pc_offset();
404

    
405
    int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
406
    __ Ret(arguments_bytes, rcx);
407

    
408
#ifdef ENABLE_DEBUGGER_SUPPORT
409
    // Add padding that will be overwritten by a debugger breakpoint.  We
410
    // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
411
    // (3 + 1 + 3).
412
    const int kPadding = Assembler::kJSReturnSequenceLength - 7;
413
    for (int i = 0; i < kPadding; ++i) {
414
      masm_->int3();
415
    }
416
    // Check that the size of the code used for returning is large enough
417
    // for the debugger's requirements.
418
    ASSERT(Assembler::kJSReturnSequenceLength <=
419
           masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
420
#endif
421
    info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
422
  }
423
}
424

    
425

    
426
void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
427
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
428
}
429

    
430

    
431
void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
432
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
433
  codegen()->GetVar(result_register(), var);
434
}
435

    
436

    
437
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
438
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
439
  MemOperand operand = codegen()->VarOperand(var, result_register());
440
  __ push(operand);
441
}
442

    
443

    
444
void FullCodeGenerator::TestContext::Plug(Variable* var) const {
445
  codegen()->GetVar(result_register(), var);
446
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
447
  codegen()->DoTest(this);
448
}
449

    
450

    
451
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
452
}
453

    
454

    
455
void FullCodeGenerator::AccumulatorValueContext::Plug(
456
    Heap::RootListIndex index) const {
457
  __ LoadRoot(result_register(), index);
458
}
459

    
460

    
461
void FullCodeGenerator::StackValueContext::Plug(
462
    Heap::RootListIndex index) const {
463
  __ PushRoot(index);
464
}
465

    
466

    
467
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
468
  codegen()->PrepareForBailoutBeforeSplit(condition(),
469
                                          true,
470
                                          true_label_,
471
                                          false_label_);
472
  if (index == Heap::kUndefinedValueRootIndex ||
473
      index == Heap::kNullValueRootIndex ||
474
      index == Heap::kFalseValueRootIndex) {
475
    if (false_label_ != fall_through_) __ jmp(false_label_);
476
  } else if (index == Heap::kTrueValueRootIndex) {
477
    if (true_label_ != fall_through_) __ jmp(true_label_);
478
  } else {
479
    __ LoadRoot(result_register(), index);
480
    codegen()->DoTest(this);
481
  }
482
}
483

    
484

    
485
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
486
}
487

    
488

    
489
void FullCodeGenerator::AccumulatorValueContext::Plug(
490
    Handle<Object> lit) const {
491
  if (lit->IsSmi()) {
492
    __ SafeMove(result_register(), Smi::cast(*lit));
493
  } else {
494
    __ Move(result_register(), lit);
495
  }
496
}
497

    
498

    
499
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
500
  if (lit->IsSmi()) {
501
    __ SafePush(Smi::cast(*lit));
502
  } else {
503
    __ Push(lit);
504
  }
505
}
506

    
507

    
508
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
509
  codegen()->PrepareForBailoutBeforeSplit(condition(),
510
                                          true,
511
                                          true_label_,
512
                                          false_label_);
513
  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
514
  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
515
    if (false_label_ != fall_through_) __ jmp(false_label_);
516
  } else if (lit->IsTrue() || lit->IsJSObject()) {
517
    if (true_label_ != fall_through_) __ jmp(true_label_);
518
  } else if (lit->IsString()) {
519
    if (String::cast(*lit)->length() == 0) {
520
      if (false_label_ != fall_through_) __ jmp(false_label_);
521
    } else {
522
      if (true_label_ != fall_through_) __ jmp(true_label_);
523
    }
524
  } else if (lit->IsSmi()) {
525
    if (Smi::cast(*lit)->value() == 0) {
526
      if (false_label_ != fall_through_) __ jmp(false_label_);
527
    } else {
528
      if (true_label_ != fall_through_) __ jmp(true_label_);
529
    }
530
  } else {
531
    // For simplicity we always test the accumulator register.
532
    __ Move(result_register(), lit);
533
    codegen()->DoTest(this);
534
  }
535
}
536

    
537

    
538
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
539
                                                   Register reg) const {
540
  ASSERT(count > 0);
541
  __ Drop(count);
542
}
543

    
544

    
545
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
546
    int count,
547
    Register reg) const {
548
  ASSERT(count > 0);
549
  __ Drop(count);
550
  __ Move(result_register(), reg);
551
}
552

    
553

    
554
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
555
                                                       Register reg) const {
556
  ASSERT(count > 0);
557
  if (count > 1) __ Drop(count - 1);
558
  __ movq(Operand(rsp, 0), reg);
559
}
560

    
561

    
562
void FullCodeGenerator::TestContext::DropAndPlug(int count,
563
                                                 Register reg) const {
564
  ASSERT(count > 0);
565
  // For simplicity we always test the accumulator register.
566
  __ Drop(count);
567
  __ Move(result_register(), reg);
568
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
569
  codegen()->DoTest(this);
570
}
571

    
572

    
573
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
574
                                            Label* materialize_false) const {
575
  ASSERT(materialize_true == materialize_false);
576
  __ bind(materialize_true);
577
}
578

    
579

    
580
void FullCodeGenerator::AccumulatorValueContext::Plug(
581
    Label* materialize_true,
582
    Label* materialize_false) const {
583
  Label done;
584
  __ bind(materialize_true);
585
  __ Move(result_register(), isolate()->factory()->true_value());
586
  __ jmp(&done, Label::kNear);
587
  __ bind(materialize_false);
588
  __ Move(result_register(), isolate()->factory()->false_value());
589
  __ bind(&done);
590
}
591

    
592

    
593
void FullCodeGenerator::StackValueContext::Plug(
594
    Label* materialize_true,
595
    Label* materialize_false) const {
596
  Label done;
597
  __ bind(materialize_true);
598
  __ Push(isolate()->factory()->true_value());
599
  __ jmp(&done, Label::kNear);
600
  __ bind(materialize_false);
601
  __ Push(isolate()->factory()->false_value());
602
  __ bind(&done);
603
}
604

    
605

    
606
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
607
                                          Label* materialize_false) const {
608
  ASSERT(materialize_true == true_label_);
609
  ASSERT(materialize_false == false_label_);
610
}
611

    
612

    
613
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
614
}
615

    
616

    
617
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
618
  Heap::RootListIndex value_root_index =
619
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
620
  __ LoadRoot(result_register(), value_root_index);
621
}
622

    
623

    
624
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
625
  Heap::RootListIndex value_root_index =
626
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
627
  __ PushRoot(value_root_index);
628
}
629

    
630

    
631
void FullCodeGenerator::TestContext::Plug(bool flag) const {
632
  codegen()->PrepareForBailoutBeforeSplit(condition(),
633
                                          true,
634
                                          true_label_,
635
                                          false_label_);
636
  if (flag) {
637
    if (true_label_ != fall_through_) __ jmp(true_label_);
638
  } else {
639
    if (false_label_ != fall_through_) __ jmp(false_label_);
640
  }
641
}
642

    
643

    
644
void FullCodeGenerator::DoTest(Expression* condition,
645
                               Label* if_true,
646
                               Label* if_false,
647
                               Label* fall_through) {
648
  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
649
  CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
650
  __ testq(result_register(), result_register());
651
  // The stub returns nonzero for true.
652
  Split(not_zero, if_true, if_false, fall_through);
653
}
654

    
655

    
656
void FullCodeGenerator::Split(Condition cc,
657
                              Label* if_true,
658
                              Label* if_false,
659
                              Label* fall_through) {
660
  if (if_false == fall_through) {
661
    __ j(cc, if_true);
662
  } else if (if_true == fall_through) {
663
    __ j(NegateCondition(cc), if_false);
664
  } else {
665
    __ j(cc, if_true);
666
    __ jmp(if_false);
667
  }
668
}
669

    
670

    
671
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
672
  ASSERT(var->IsStackAllocated());
673
  // Offset is negative because higher indexes are at lower addresses.
674
  int offset = -var->index() * kPointerSize;
675
  // Adjust by a (parameter or local) base offset.
676
  if (var->IsParameter()) {
677
    offset += kFPOnStackSize + kPCOnStackSize +
678
              (info_->scope()->num_parameters() - 1) * kPointerSize;
679
  } else {
680
    offset += JavaScriptFrameConstants::kLocal0Offset;
681
  }
682
  return Operand(rbp, offset);
683
}
684

    
685

    
686
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
687
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
688
  if (var->IsContextSlot()) {
689
    int context_chain_length = scope()->ContextChainLength(var->scope());
690
    __ LoadContext(scratch, context_chain_length);
691
    return ContextOperand(scratch, var->index());
692
  } else {
693
    return StackOperand(var);
694
  }
695
}
696

    
697

    
698
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
699
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
700
  MemOperand location = VarOperand(var, dest);
701
  __ movq(dest, location);
702
}
703

    
704

    
705
void FullCodeGenerator::SetVar(Variable* var,
706
                               Register src,
707
                               Register scratch0,
708
                               Register scratch1) {
709
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
710
  ASSERT(!scratch0.is(src));
711
  ASSERT(!scratch0.is(scratch1));
712
  ASSERT(!scratch1.is(src));
713
  MemOperand location = VarOperand(var, scratch0);
714
  __ movq(location, src);
715

    
716
  // Emit the write barrier code if the location is in the heap.
717
  if (var->IsContextSlot()) {
718
    int offset = Context::SlotOffset(var->index());
719
    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
720
  }
721
}
722

    
723

    
724
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
725
                                                     bool should_normalize,
726
                                                     Label* if_true,
727
                                                     Label* if_false) {
728
  // Only prepare for bailouts before splits if we're in a test
729
  // context. Otherwise, we let the Visit function deal with the
730
  // preparation to avoid preparing with the same AST id twice.
731
  if (!context()->IsTest() || !info_->IsOptimizable()) return;
732

    
733
  Label skip;
734
  if (should_normalize) __ jmp(&skip, Label::kNear);
735
  PrepareForBailout(expr, TOS_REG);
736
  if (should_normalize) {
737
    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
738
    Split(equal, if_true, if_false, NULL);
739
    __ bind(&skip);
740
  }
741
}
742

    
743

    
744
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
745
  // The variable in the declaration always resides in the current context.
746
  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
747
  if (generate_debug_code_) {
748
    // Check that we're not inside a with or catch context.
749
    __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
750
    __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
751
    __ Check(not_equal, kDeclarationInWithContext);
752
    __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
753
    __ Check(not_equal, kDeclarationInCatchContext);
754
  }
755
}
756

    
757

    
758
void FullCodeGenerator::VisitVariableDeclaration(
759
    VariableDeclaration* declaration) {
760
  // If it was not possible to allocate the variable at compile time, we
761
  // need to "declare" it at runtime to make sure it actually exists in the
762
  // local context.
763
  VariableProxy* proxy = declaration->proxy();
764
  VariableMode mode = declaration->mode();
765
  Variable* variable = proxy->var();
766
  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
767
  switch (variable->location()) {
768
    case Variable::UNALLOCATED:
769
      globals_->Add(variable->name(), zone());
770
      globals_->Add(variable->binding_needs_init()
771
                        ? isolate()->factory()->the_hole_value()
772
                    : isolate()->factory()->undefined_value(),
773
                    zone());
774
      break;
775

    
776
    case Variable::PARAMETER:
777
    case Variable::LOCAL:
778
      if (hole_init) {
779
        Comment cmnt(masm_, "[ VariableDeclaration");
780
        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
781
        __ movq(StackOperand(variable), kScratchRegister);
782
      }
783
      break;
784

    
785
    case Variable::CONTEXT:
786
      if (hole_init) {
787
        Comment cmnt(masm_, "[ VariableDeclaration");
788
        EmitDebugCheckDeclarationContext(variable);
789
        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
790
        __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
791
        // No write barrier since the hole value is in old space.
792
        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
793
      }
794
      break;
795

    
796
    case Variable::LOOKUP: {
797
      Comment cmnt(masm_, "[ VariableDeclaration");
798
      __ push(rsi);
799
      __ Push(variable->name());
800
      // Declaration nodes are always introduced in one of four modes.
801
      ASSERT(IsDeclaredVariableMode(mode));
802
      PropertyAttributes attr =
803
          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
804
      __ Push(Smi::FromInt(attr));
805
      // Push initial value, if any.
806
      // Note: For variables we must not push an initial value (such as
807
      // 'undefined') because we may have a (legal) redeclaration and we
808
      // must not destroy the current value.
809
      if (hole_init) {
810
        __ PushRoot(Heap::kTheHoleValueRootIndex);
811
      } else {
812
        __ Push(Smi::FromInt(0));  // Indicates no initial value.
813
      }
814
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
815
      break;
816
    }
817
  }
818
}
819

    
820

    
821
void FullCodeGenerator::VisitFunctionDeclaration(
822
    FunctionDeclaration* declaration) {
823
  VariableProxy* proxy = declaration->proxy();
824
  Variable* variable = proxy->var();
825
  switch (variable->location()) {
826
    case Variable::UNALLOCATED: {
827
      globals_->Add(variable->name(), zone());
828
      Handle<SharedFunctionInfo> function =
829
          Compiler::BuildFunctionInfo(declaration->fun(), script());
830
      // Check for stack-overflow exception.
831
      if (function.is_null()) return SetStackOverflow();
832
      globals_->Add(function, zone());
833
      break;
834
    }
835

    
836
    case Variable::PARAMETER:
837
    case Variable::LOCAL: {
838
      Comment cmnt(masm_, "[ FunctionDeclaration");
839
      VisitForAccumulatorValue(declaration->fun());
840
      __ movq(StackOperand(variable), result_register());
841
      break;
842
    }
843

    
844
    case Variable::CONTEXT: {
845
      Comment cmnt(masm_, "[ FunctionDeclaration");
846
      EmitDebugCheckDeclarationContext(variable);
847
      VisitForAccumulatorValue(declaration->fun());
848
      __ movq(ContextOperand(rsi, variable->index()), result_register());
849
      int offset = Context::SlotOffset(variable->index());
850
      // We know that we have written a function, which is not a smi.
851
      __ RecordWriteContextSlot(rsi,
852
                                offset,
853
                                result_register(),
854
                                rcx,
855
                                kDontSaveFPRegs,
856
                                EMIT_REMEMBERED_SET,
857
                                OMIT_SMI_CHECK);
858
      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
859
      break;
860
    }
861

    
862
    case Variable::LOOKUP: {
863
      Comment cmnt(masm_, "[ FunctionDeclaration");
864
      __ push(rsi);
865
      __ Push(variable->name());
866
      __ Push(Smi::FromInt(NONE));
867
      VisitForStackValue(declaration->fun());
868
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
869
      break;
870
    }
871
  }
872
}
873

    
874

    
875
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
876
  Variable* variable = declaration->proxy()->var();
877
  ASSERT(variable->location() == Variable::CONTEXT);
878
  ASSERT(variable->interface()->IsFrozen());
879

    
880
  Comment cmnt(masm_, "[ ModuleDeclaration");
881
  EmitDebugCheckDeclarationContext(variable);
882

    
883
  // Load instance object.
884
  __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope()));
885
  __ movq(rax, ContextOperand(rax, variable->interface()->Index()));
886
  __ movq(rax, ContextOperand(rax, Context::EXTENSION_INDEX));
887

    
888
  // Assign it.
889
  __ movq(ContextOperand(rsi, variable->index()), rax);
890
  // We know that we have written a module, which is not a smi.
891
  __ RecordWriteContextSlot(rsi,
892
                            Context::SlotOffset(variable->index()),
893
                            rax,
894
                            rcx,
895
                            kDontSaveFPRegs,
896
                            EMIT_REMEMBERED_SET,
897
                            OMIT_SMI_CHECK);
898
  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
899

    
900
  // Traverse into body.
901
  Visit(declaration->module());
902
}
903

    
904

    
905
void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
906
  VariableProxy* proxy = declaration->proxy();
907
  Variable* variable = proxy->var();
908
  switch (variable->location()) {
909
    case Variable::UNALLOCATED:
910
      // TODO(rossberg)
911
      break;
912

    
913
    case Variable::CONTEXT: {
914
      Comment cmnt(masm_, "[ ImportDeclaration");
915
      EmitDebugCheckDeclarationContext(variable);
916
      // TODO(rossberg)
917
      break;
918
    }
919

    
920
    case Variable::PARAMETER:
921
    case Variable::LOCAL:
922
    case Variable::LOOKUP:
923
      UNREACHABLE();
924
  }
925
}
926

    
927

    
928
void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
929
  // TODO(rossberg)
930
}
931

    
932

    
933
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
934
  // Call the runtime to declare the globals.
935
  __ push(rsi);  // The context is the first argument.
936
  __ Push(pairs);
937
  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
938
  __ CallRuntime(Runtime::kDeclareGlobals, 3);
939
  // Return value is ignored.
940
}
941

    
942

    
943
void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
944
  // Call the runtime to declare the modules.
945
  __ Push(descriptions);
946
  __ CallRuntime(Runtime::kDeclareModules, 1);
947
  // Return value is ignored.
948
}
949

    
950

    
951
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
952
  Comment cmnt(masm_, "[ SwitchStatement");
953
  Breakable nested_statement(this, stmt);
954
  SetStatementPosition(stmt);
955

    
956
  // Keep the switch value on the stack until a case matches.
957
  VisitForStackValue(stmt->tag());
958
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
959

    
960
  ZoneList<CaseClause*>* clauses = stmt->cases();
961
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
962

    
963
  Label next_test;  // Recycled for each test.
964
  // Compile all the tests with branches to their bodies.
965
  for (int i = 0; i < clauses->length(); i++) {
966
    CaseClause* clause = clauses->at(i);
967
    clause->body_target()->Unuse();
968

    
969
    // The default is not a test, but remember it as final fall through.
970
    if (clause->is_default()) {
971
      default_clause = clause;
972
      continue;
973
    }
974

    
975
    Comment cmnt(masm_, "[ Case comparison");
976
    __ bind(&next_test);
977
    next_test.Unuse();
978

    
979
    // Compile the label expression.
980
    VisitForAccumulatorValue(clause->label());
981

    
982
    // Perform the comparison as if via '==='.
983
    __ movq(rdx, Operand(rsp, 0));  // Switch value.
984
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
985
    JumpPatchSite patch_site(masm_);
986
    if (inline_smi_code) {
987
      Label slow_case;
988
      __ movq(rcx, rdx);
989
      __ or_(rcx, rax);
990
      patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
991

    
992
      __ cmpq(rdx, rax);
993
      __ j(not_equal, &next_test);
994
      __ Drop(1);  // Switch value is no longer needed.
995
      __ jmp(clause->body_target());
996
      __ bind(&slow_case);
997
    }
998

    
999
    // Record position before stub call for type feedback.
1000
    SetSourcePosition(clause->position());
1001
    Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1002
    CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1003
    patch_site.EmitPatchInfo();
1004

    
1005
    __ testq(rax, rax);
1006
    __ j(not_equal, &next_test);
1007
    __ Drop(1);  // Switch value is no longer needed.
1008
    __ jmp(clause->body_target());
1009
  }
1010

    
1011
  // Discard the test value and jump to the default if present, otherwise to
1012
  // the end of the statement.
1013
  __ bind(&next_test);
1014
  __ Drop(1);  // Switch value is no longer needed.
1015
  if (default_clause == NULL) {
1016
    __ jmp(nested_statement.break_label());
1017
  } else {
1018
    __ jmp(default_clause->body_target());
1019
  }
1020

    
1021
  // Compile all the case bodies.
1022
  for (int i = 0; i < clauses->length(); i++) {
1023
    Comment cmnt(masm_, "[ Case body");
1024
    CaseClause* clause = clauses->at(i);
1025
    __ bind(clause->body_target());
1026
    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1027
    VisitStatements(clause->statements());
1028
  }
1029

    
1030
  __ bind(nested_statement.break_label());
1031
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1032
}
1033

    
1034

    
1035
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1036
  Comment cmnt(masm_, "[ ForInStatement");
1037
  SetStatementPosition(stmt);
1038

    
1039
  Label loop, exit;
1040
  ForIn loop_statement(this, stmt);
1041
  increment_loop_depth();
1042

    
1043
  // Get the object to enumerate over. If the object is null or undefined, skip
1044
  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1045
  VisitForAccumulatorValue(stmt->enumerable());
1046
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1047
  __ j(equal, &exit);
1048
  Register null_value = rdi;
1049
  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1050
  __ cmpq(rax, null_value);
1051
  __ j(equal, &exit);
1052

    
1053
  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1054

    
1055
  // Convert the object to a JS object.
1056
  Label convert, done_convert;
1057
  __ JumpIfSmi(rax, &convert);
1058
  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1059
  __ j(above_equal, &done_convert);
1060
  __ bind(&convert);
1061
  __ push(rax);
1062
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1063
  __ bind(&done_convert);
1064
  __ push(rax);
1065

    
1066
  // Check for proxies.
1067
  Label call_runtime;
1068
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1069
  __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
1070
  __ j(below_equal, &call_runtime);
1071

    
1072
  // Check cache validity in generated code. This is a fast case for
1073
  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1074
  // guarantee cache validity, call the runtime system to check cache
1075
  // validity or get the property names in a fixed array.
1076
  __ CheckEnumCache(null_value, &call_runtime);
1077

    
1078
  // The enum cache is valid.  Load the map of the object being
1079
  // iterated over and use the cache for the iteration.
1080
  Label use_cache;
1081
  __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
1082
  __ jmp(&use_cache, Label::kNear);
1083

    
1084
  // Get the set of properties to enumerate.
1085
  __ bind(&call_runtime);
1086
  __ push(rax);  // Duplicate the enumerable object on the stack.
1087
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1088

    
1089
  // If we got a map from the runtime call, we can do a fast
1090
  // modification check. Otherwise, we got a fixed array, and we have
1091
  // to do a slow check.
1092
  Label fixed_array;
1093
  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1094
                 Heap::kMetaMapRootIndex);
1095
  __ j(not_equal, &fixed_array);
1096

    
1097
  // We got a map in register rax. Get the enumeration cache from it.
1098
  __ bind(&use_cache);
1099

    
1100
  Label no_descriptors;
1101

    
1102
  __ EnumLength(rdx, rax);
1103
  __ Cmp(rdx, Smi::FromInt(0));
1104
  __ j(equal, &no_descriptors);
1105

    
1106
  __ LoadInstanceDescriptors(rax, rcx);
1107
  __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1108
  __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1109

    
1110
  // Set up the four remaining stack slots.
1111
  __ push(rax);  // Map.
1112
  __ push(rcx);  // Enumeration cache.
1113
  __ push(rdx);  // Number of valid entries for the map in the enum cache.
1114
  __ Push(Smi::FromInt(0));  // Initial index.
1115
  __ jmp(&loop);
1116

    
1117
  __ bind(&no_descriptors);
1118
  __ addq(rsp, Immediate(kPointerSize));
1119
  __ jmp(&exit);
1120

    
1121
  // We got a fixed array in register rax. Iterate through that.
1122
  Label non_proxy;
1123
  __ bind(&fixed_array);
1124

    
1125
  Handle<Cell> cell = isolate()->factory()->NewCell(
1126
      Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1127
                     isolate()));
1128
  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1129
  __ Move(rbx, cell);
1130
  __ Move(FieldOperand(rbx, Cell::kValueOffset),
1131
          Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
1132

    
1133
  __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
1134
  __ movq(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
1135
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1136
  __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1137
  __ j(above, &non_proxy);
1138
  __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
1139
  __ bind(&non_proxy);
1140
  __ push(rbx);  // Smi
1141
  __ push(rax);  // Array
1142
  __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1143
  __ push(rax);  // Fixed array length (as smi).
1144
  __ Push(Smi::FromInt(0));  // Initial index.
1145

    
1146
  // Generate code for doing the condition check.
1147
  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1148
  __ bind(&loop);
1149
  __ movq(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
1150
  __ cmpq(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
1151
  __ j(above_equal, loop_statement.break_label());
1152

    
1153
  // Get the current entry of the array into register rbx.
1154
  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
1155
  SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1156
  __ movq(rbx, FieldOperand(rbx,
1157
                            index.reg,
1158
                            index.scale,
1159
                            FixedArray::kHeaderSize));
1160

    
1161
  // Get the expected map from the stack or a smi in the
1162
  // permanent slow case into register rdx.
1163
  __ movq(rdx, Operand(rsp, 3 * kPointerSize));
1164

    
1165
  // Check if the expected map still matches that of the enumerable.
1166
  // If not, we may have to filter the key.
1167
  Label update_each;
1168
  __ movq(rcx, Operand(rsp, 4 * kPointerSize));
1169
  __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1170
  __ j(equal, &update_each, Label::kNear);
1171

    
1172
  // For proxies, no filtering is done.
1173
  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1174
  __ Cmp(rdx, Smi::FromInt(0));
1175
  __ j(equal, &update_each, Label::kNear);
1176

    
1177
  // Convert the entry to a string or null if it isn't a property
1178
  // anymore. If the property has been removed while iterating, we
1179
  // just skip it.
1180
  __ push(rcx);  // Enumerable.
1181
  __ push(rbx);  // Current entry.
1182
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1183
  __ Cmp(rax, Smi::FromInt(0));
1184
  __ j(equal, loop_statement.continue_label());
1185
  __ movq(rbx, rax);
1186

    
1187
  // Update the 'each' property or variable from the possibly filtered
1188
  // entry in register rbx.
1189
  __ bind(&update_each);
1190
  __ movq(result_register(), rbx);
1191
  // Perform the assignment as if via '='.
1192
  { EffectContext context(this);
1193
    EmitAssignment(stmt->each());
1194
  }
1195

    
1196
  // Generate code for the body of the loop.
1197
  Visit(stmt->body());
1198

    
1199
  // Generate code for going to the next element by incrementing the
1200
  // index (smi) stored on top of the stack.
1201
  __ bind(loop_statement.continue_label());
1202
  __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1203

    
1204
  EmitBackEdgeBookkeeping(stmt, &loop);
1205
  __ jmp(&loop);
1206

    
1207
  // Remove the pointers stored on the stack.
1208
  __ bind(loop_statement.break_label());
1209
  __ addq(rsp, Immediate(5 * kPointerSize));
1210

    
1211
  // Exit and decrement the loop depth.
1212
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1213
  __ bind(&exit);
1214
  decrement_loop_depth();
1215
}
1216

    
1217

    
1218
void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1219
  Comment cmnt(masm_, "[ ForOfStatement");
1220
  SetStatementPosition(stmt);
1221

    
1222
  Iteration loop_statement(this, stmt);
1223
  increment_loop_depth();
1224

    
1225
  // var iterator = iterable[@@iterator]()
1226
  VisitForAccumulatorValue(stmt->assign_iterator());
1227

    
1228
  // As with for-in, skip the loop if the iterator is null or undefined.
1229
  __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1230
  __ j(equal, loop_statement.break_label());
1231
  __ CompareRoot(rax, Heap::kNullValueRootIndex);
1232
  __ j(equal, loop_statement.break_label());
1233

    
1234
  // Convert the iterator to a JS object.
1235
  Label convert, done_convert;
1236
  __ JumpIfSmi(rax, &convert);
1237
  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1238
  __ j(above_equal, &done_convert);
1239
  __ bind(&convert);
1240
  __ push(rax);
1241
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1242
  __ bind(&done_convert);
1243

    
1244
  // Loop entry.
1245
  __ bind(loop_statement.continue_label());
1246

    
1247
  // result = iterator.next()
1248
  VisitForEffect(stmt->next_result());
1249

    
1250
  // if (result.done) break;
1251
  Label result_not_done;
1252
  VisitForControl(stmt->result_done(),
1253
                  loop_statement.break_label(),
1254
                  &result_not_done,
1255
                  &result_not_done);
1256
  __ bind(&result_not_done);
1257

    
1258
  // each = result.value
1259
  VisitForEffect(stmt->assign_each());
1260

    
1261
  // Generate code for the body of the loop.
1262
  Visit(stmt->body());
1263

    
1264
  // Check stack before looping.
1265
  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1266
  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1267
  __ jmp(loop_statement.continue_label());
1268

    
1269
  // Exit and decrement the loop depth.
1270
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1271
  __ bind(loop_statement.break_label());
1272
  decrement_loop_depth();
1273
}
1274

    
1275

    
1276
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1277
                                       bool pretenure) {
1278
  // Use the fast case closure allocation code that allocates in new
1279
  // space for nested functions that don't need literals cloning. If
1280
  // we're running with the --always-opt or the --prepare-always-opt
1281
  // flag, we need to use the runtime function so that the new function
1282
  // we are creating here gets a chance to have its code optimized and
1283
  // doesn't just get a copy of the existing unoptimized code.
1284
  if (!FLAG_always_opt &&
1285
      !FLAG_prepare_always_opt &&
1286
      !pretenure &&
1287
      scope()->is_function_scope() &&
1288
      info->num_literals() == 0) {
1289
    FastNewClosureStub stub(info->language_mode(), info->is_generator());
1290
    __ Move(rbx, info);
1291
    __ CallStub(&stub);
1292
  } else {
1293
    __ push(rsi);
1294
    __ Push(info);
1295
    __ Push(pretenure
1296
            ? isolate()->factory()->true_value()
1297
            : isolate()->factory()->false_value());
1298
    __ CallRuntime(Runtime::kNewClosure, 3);
1299
  }
1300
  context()->Plug(rax);
1301
}
1302

    
1303

    
1304
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1305
  Comment cmnt(masm_, "[ VariableProxy");
1306
  EmitVariableLoad(expr);
1307
}
1308

    
1309

    
1310
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1311
                                                      TypeofState typeof_state,
1312
                                                      Label* slow) {
1313
  Register context = rsi;
1314
  Register temp = rdx;
1315

    
1316
  Scope* s = scope();
1317
  while (s != NULL) {
1318
    if (s->num_heap_slots() > 0) {
1319
      if (s->calls_non_strict_eval()) {
1320
        // Check that extension is NULL.
1321
        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1322
                Immediate(0));
1323
        __ j(not_equal, slow);
1324
      }
1325
      // Load next context in chain.
1326
      __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1327
      // Walk the rest of the chain without clobbering rsi.
1328
      context = temp;
1329
    }
1330
    // If no outer scope calls eval, we do not need to check more
1331
    // context extensions.  If we have reached an eval scope, we check
1332
    // all extensions from this point.
1333
    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1334
    s = s->outer_scope();
1335
  }
1336

    
1337
  if (s != NULL && s->is_eval_scope()) {
1338
    // Loop up the context chain.  There is no frame effect so it is
1339
    // safe to use raw labels here.
1340
    Label next, fast;
1341
    if (!context.is(temp)) {
1342
      __ movq(temp, context);
1343
    }
1344
    // Load map for comparison into register, outside loop.
1345
    __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1346
    __ bind(&next);
1347
    // Terminate at native context.
1348
    __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1349
    __ j(equal, &fast, Label::kNear);
1350
    // Check that extension is NULL.
1351
    __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1352
    __ j(not_equal, slow);
1353
    // Load next context in chain.
1354
    __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1355
    __ jmp(&next);
1356
    __ bind(&fast);
1357
  }
1358

    
1359
  // All extension objects were empty and it is safe to use a global
1360
  // load IC call.
1361
  __ movq(rax, GlobalObjectOperand());
1362
  __ Move(rcx, var->name());
1363
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1364
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1365
      ? RelocInfo::CODE_TARGET
1366
      : RelocInfo::CODE_TARGET_CONTEXT;
1367
  CallIC(ic, mode);
1368
}
1369

    
1370

    
1371
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1372
                                                                Label* slow) {
1373
  ASSERT(var->IsContextSlot());
1374
  Register context = rsi;
1375
  Register temp = rbx;
1376

    
1377
  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1378
    if (s->num_heap_slots() > 0) {
1379
      if (s->calls_non_strict_eval()) {
1380
        // Check that extension is NULL.
1381
        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1382
                Immediate(0));
1383
        __ j(not_equal, slow);
1384
      }
1385
      __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1386
      // Walk the rest of the chain without clobbering rsi.
1387
      context = temp;
1388
    }
1389
  }
1390
  // Check that last extension is NULL.
1391
  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1392
  __ j(not_equal, slow);
1393

    
1394
  // This function is used only for loads, not stores, so it's safe to
1395
  // return an rsi-based operand (the write barrier cannot be allowed to
1396
  // destroy the rsi register).
1397
  return ContextOperand(context, var->index());
1398
}
1399

    
1400

    
1401
void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1402
                                                  TypeofState typeof_state,
1403
                                                  Label* slow,
1404
                                                  Label* done) {
1405
  // Generate fast-case code for variables that might be shadowed by
1406
  // eval-introduced variables.  Eval is used a lot without
1407
  // introducing variables.  In those cases, we do not want to
1408
  // perform a runtime call for all variables in the scope
1409
  // containing the eval.
1410
  if (var->mode() == DYNAMIC_GLOBAL) {
1411
    EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1412
    __ jmp(done);
1413
  } else if (var->mode() == DYNAMIC_LOCAL) {
1414
    Variable* local = var->local_if_not_shadowed();
1415
    __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
1416
    if (local->mode() == LET ||
1417
        local->mode() == CONST ||
1418
        local->mode() == CONST_HARMONY) {
1419
      __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1420
      __ j(not_equal, done);
1421
      if (local->mode() == CONST) {
1422
        __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1423
      } else {  // LET || CONST_HARMONY
1424
        __ Push(var->name());
1425
        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1426
      }
1427
    }
1428
    __ jmp(done);
1429
  }
1430
}
1431

    
1432

    
1433
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1434
  // Record position before possible IC call.
1435
  SetSourcePosition(proxy->position());
1436
  Variable* var = proxy->var();
1437

    
1438
  // Three cases: global variables, lookup variables, and all other types of
1439
  // variables.
1440
  switch (var->location()) {
1441
    case Variable::UNALLOCATED: {
1442
      Comment cmnt(masm_, "Global variable");
1443
      // Use inline caching. Variable name is passed in rcx and the global
1444
      // object on the stack.
1445
      __ Move(rcx, var->name());
1446
      __ movq(rax, GlobalObjectOperand());
1447
      Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1448
      CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1449
      context()->Plug(rax);
1450
      break;
1451
    }
1452

    
1453
    case Variable::PARAMETER:
1454
    case Variable::LOCAL:
1455
    case Variable::CONTEXT: {
1456
      Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1457
      if (var->binding_needs_init()) {
1458
        // var->scope() may be NULL when the proxy is located in eval code and
1459
        // refers to a potential outside binding. Currently those bindings are
1460
        // always looked up dynamically, i.e. in that case
1461
        //     var->location() == LOOKUP.
1462
        // always holds.
1463
        ASSERT(var->scope() != NULL);
1464

    
1465
        // Check if the binding really needs an initialization check. The check
1466
        // can be skipped in the following situation: we have a LET or CONST
1467
        // binding in harmony mode, both the Variable and the VariableProxy have
1468
        // the same declaration scope (i.e. they are both in global code, in the
1469
        // same function or in the same eval code) and the VariableProxy is in
1470
        // the source physically located after the initializer of the variable.
1471
        //
1472
        // We cannot skip any initialization checks for CONST in non-harmony
1473
        // mode because const variables may be declared but never initialized:
1474
        //   if (false) { const x; }; var y = x;
1475
        //
1476
        // The condition on the declaration scopes is a conservative check for
1477
        // nested functions that access a binding and are called before the
1478
        // binding is initialized:
1479
        //   function() { f(); let x = 1; function f() { x = 2; } }
1480
        //
1481
        bool skip_init_check;
1482
        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1483
          skip_init_check = false;
1484
        } else {
1485
          // Check that we always have valid source position.
1486
          ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1487
          ASSERT(proxy->position() != RelocInfo::kNoPosition);
1488
          skip_init_check = var->mode() != CONST &&
1489
              var->initializer_position() < proxy->position();
1490
        }
1491

    
1492
        if (!skip_init_check) {
1493
          // Let and const need a read barrier.
1494
          Label done;
1495
          GetVar(rax, var);
1496
          __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1497
          __ j(not_equal, &done, Label::kNear);
1498
          if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1499
            // Throw a reference error when using an uninitialized let/const
1500
            // binding in harmony mode.
1501
            __ Push(var->name());
1502
            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1503
          } else {
1504
            // Uninitalized const bindings outside of harmony mode are unholed.
1505
            ASSERT(var->mode() == CONST);
1506
            __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1507
          }
1508
          __ bind(&done);
1509
          context()->Plug(rax);
1510
          break;
1511
        }
1512
      }
1513
      context()->Plug(var);
1514
      break;
1515
    }
1516

    
1517
    case Variable::LOOKUP: {
1518
      Label done, slow;
1519
      // Generate code for loading from variables potentially shadowed
1520
      // by eval-introduced variables.
1521
      EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1522
      __ bind(&slow);
1523
      Comment cmnt(masm_, "Lookup slot");
1524
      __ push(rsi);  // Context.
1525
      __ Push(var->name());
1526
      __ CallRuntime(Runtime::kLoadContextSlot, 2);
1527
      __ bind(&done);
1528
      context()->Plug(rax);
1529
      break;
1530
    }
1531
  }
1532
}
1533

    
1534

    
1535
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1536
  Comment cmnt(masm_, "[ RegExpLiteral");
1537
  Label materialized;
1538
  // Registers will be used as follows:
1539
  // rdi = JS function.
1540
  // rcx = literals array.
1541
  // rbx = regexp literal.
1542
  // rax = regexp literal clone.
1543
  __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1544
  __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1545
  int literal_offset =
1546
      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1547
  __ movq(rbx, FieldOperand(rcx, literal_offset));
1548
  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1549
  __ j(not_equal, &materialized, Label::kNear);
1550

    
1551
  // Create regexp literal using runtime function
1552
  // Result will be in rax.
1553
  __ push(rcx);
1554
  __ Push(Smi::FromInt(expr->literal_index()));
1555
  __ Push(expr->pattern());
1556
  __ Push(expr->flags());
1557
  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1558
  __ movq(rbx, rax);
1559

    
1560
  __ bind(&materialized);
1561
  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1562
  Label allocated, runtime_allocate;
1563
  __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1564
  __ jmp(&allocated);
1565

    
1566
  __ bind(&runtime_allocate);
1567
  __ push(rbx);
1568
  __ Push(Smi::FromInt(size));
1569
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1570
  __ pop(rbx);
1571

    
1572
  __ bind(&allocated);
1573
  // Copy the content into the newly allocated memory.
1574
  // (Unroll copy loop once for better throughput).
1575
  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1576
    __ movq(rdx, FieldOperand(rbx, i));
1577
    __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1578
    __ movq(FieldOperand(rax, i), rdx);
1579
    __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1580
  }
1581
  if ((size % (2 * kPointerSize)) != 0) {
1582
    __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1583
    __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1584
  }
1585
  context()->Plug(rax);
1586
}
1587

    
1588

    
1589
void FullCodeGenerator::EmitAccessor(Expression* expression) {
1590
  if (expression == NULL) {
1591
    __ PushRoot(Heap::kNullValueRootIndex);
1592
  } else {
1593
    VisitForStackValue(expression);
1594
  }
1595
}
1596

    
1597

    
1598
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1599
  Comment cmnt(masm_, "[ ObjectLiteral");
1600
  Handle<FixedArray> constant_properties = expr->constant_properties();
1601
  int flags = expr->fast_elements()
1602
      ? ObjectLiteral::kFastElements
1603
      : ObjectLiteral::kNoFlags;
1604
  flags |= expr->has_function()
1605
      ? ObjectLiteral::kHasFunction
1606
      : ObjectLiteral::kNoFlags;
1607
  int properties_count = constant_properties->length() / 2;
1608
  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1609
      expr->depth() > 1 || Serializer::enabled() ||
1610
      flags != ObjectLiteral::kFastElements ||
1611
      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1612
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1613
    __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1614
    __ Push(Smi::FromInt(expr->literal_index()));
1615
    __ Push(constant_properties);
1616
    __ Push(Smi::FromInt(flags));
1617
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1618
  } else {
1619
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1620
    __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1621
    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1622
    __ Move(rcx, constant_properties);
1623
    __ Move(rdx, Smi::FromInt(flags));
1624
    FastCloneShallowObjectStub stub(properties_count);
1625
    __ CallStub(&stub);
1626
  }
1627

    
1628
  // If result_saved is true the result is on top of the stack.  If
1629
  // result_saved is false the result is in rax.
1630
  bool result_saved = false;
1631

    
1632
  // Mark all computed expressions that are bound to a key that
1633
  // is shadowed by a later occurrence of the same key. For the
1634
  // marked expressions, no store code is emitted.
1635
  expr->CalculateEmitStore(zone());
1636

    
1637
  AccessorTable accessor_table(zone());
1638
  for (int i = 0; i < expr->properties()->length(); i++) {
1639
    ObjectLiteral::Property* property = expr->properties()->at(i);
1640
    if (property->IsCompileTimeValue()) continue;
1641

    
1642
    Literal* key = property->key();
1643
    Expression* value = property->value();
1644
    if (!result_saved) {
1645
      __ push(rax);  // Save result on the stack
1646
      result_saved = true;
1647
    }
1648
    switch (property->kind()) {
1649
      case ObjectLiteral::Property::CONSTANT:
1650
        UNREACHABLE();
1651
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1652
        ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1653
        // Fall through.
1654
      case ObjectLiteral::Property::COMPUTED:
1655
        if (key->value()->IsInternalizedString()) {
1656
          if (property->emit_store()) {
1657
            VisitForAccumulatorValue(value);
1658
            __ Move(rcx, key->value());
1659
            __ movq(rdx, Operand(rsp, 0));
1660
            Handle<Code> ic = is_classic_mode()
1661
                ? isolate()->builtins()->StoreIC_Initialize()
1662
                : isolate()->builtins()->StoreIC_Initialize_Strict();
1663
            CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1664
            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1665
          } else {
1666
            VisitForEffect(value);
1667
          }
1668
          break;
1669
        }
1670
        __ push(Operand(rsp, 0));  // Duplicate receiver.
1671
        VisitForStackValue(key);
1672
        VisitForStackValue(value);
1673
        if (property->emit_store()) {
1674
          __ Push(Smi::FromInt(NONE));    // PropertyAttributes
1675
          __ CallRuntime(Runtime::kSetProperty, 4);
1676
        } else {
1677
          __ Drop(3);
1678
        }
1679
        break;
1680
      case ObjectLiteral::Property::PROTOTYPE:
1681
        __ push(Operand(rsp, 0));  // Duplicate receiver.
1682
        VisitForStackValue(value);
1683
        if (property->emit_store()) {
1684
          __ CallRuntime(Runtime::kSetPrototype, 2);
1685
        } else {
1686
          __ Drop(2);
1687
        }
1688
        break;
1689
      case ObjectLiteral::Property::GETTER:
1690
        accessor_table.lookup(key)->second->getter = value;
1691
        break;
1692
      case ObjectLiteral::Property::SETTER:
1693
        accessor_table.lookup(key)->second->setter = value;
1694
        break;
1695
    }
1696
  }
1697

    
1698
  // Emit code to define accessors, using only a single call to the runtime for
1699
  // each pair of corresponding getters and setters.
1700
  for (AccessorTable::Iterator it = accessor_table.begin();
1701
       it != accessor_table.end();
1702
       ++it) {
1703
    __ push(Operand(rsp, 0));  // Duplicate receiver.
1704
    VisitForStackValue(it->first);
1705
    EmitAccessor(it->second->getter);
1706
    EmitAccessor(it->second->setter);
1707
    __ Push(Smi::FromInt(NONE));
1708
    __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1709
  }
1710

    
1711
  if (expr->has_function()) {
1712
    ASSERT(result_saved);
1713
    __ push(Operand(rsp, 0));
1714
    __ CallRuntime(Runtime::kToFastProperties, 1);
1715
  }
1716

    
1717
  if (result_saved) {
1718
    context()->PlugTOS();
1719
  } else {
1720
    context()->Plug(rax);
1721
  }
1722
}
1723

    
1724

    
1725
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1726
  Comment cmnt(masm_, "[ ArrayLiteral");
1727

    
1728
  ZoneList<Expression*>* subexprs = expr->values();
1729
  int length = subexprs->length();
1730
  Handle<FixedArray> constant_elements = expr->constant_elements();
1731
  ASSERT_EQ(2, constant_elements->length());
1732
  ElementsKind constant_elements_kind =
1733
      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1734
  bool has_constant_fast_elements =
1735
      IsFastObjectElementsKind(constant_elements_kind);
1736
  Handle<FixedArrayBase> constant_elements_values(
1737
      FixedArrayBase::cast(constant_elements->get(1)));
1738

    
1739
  Heap* heap = isolate()->heap();
1740
  if (has_constant_fast_elements &&
1741
      constant_elements_values->map() == heap->fixed_cow_array_map()) {
1742
    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1743
    // change, so it's possible to specialize the stub in advance.
1744
    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1745
    __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1746
    __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1747
    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1748
    __ Move(rcx, constant_elements);
1749
    FastCloneShallowArrayStub stub(
1750
        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1751
        DONT_TRACK_ALLOCATION_SITE,
1752
        length);
1753
    __ CallStub(&stub);
1754
  } else if (expr->depth() > 1) {
1755
    __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1756
    __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1757
    __ Push(Smi::FromInt(expr->literal_index()));
1758
    __ Push(constant_elements);
1759
    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1760
  } else if (Serializer::enabled() ||
1761
      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1762
    __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1763
    __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1764
    __ Push(Smi::FromInt(expr->literal_index()));
1765
    __ Push(constant_elements);
1766
    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1767
  } else {
1768
    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1769
           FLAG_smi_only_arrays);
1770
    FastCloneShallowArrayStub::Mode mode =
1771
        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1772
    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1773
        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1774

    
1775
    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1776
    // change, so it's possible to specialize the stub in advance.
1777
    if (has_constant_fast_elements) {
1778
      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1779
      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1780
    }
1781

    
1782
    __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1783
    __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
1784
    __ Move(rbx, Smi::FromInt(expr->literal_index()));
1785
    __ Move(rcx, constant_elements);
1786
    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1787
    __ CallStub(&stub);
1788
  }
1789

    
1790
  bool result_saved = false;  // Is the result saved to the stack?
1791

    
1792
  // Emit code to evaluate all the non-constant subexpressions and to store
1793
  // them into the newly cloned array.
1794
  for (int i = 0; i < length; i++) {
1795
    Expression* subexpr = subexprs->at(i);
1796
    // If the subexpression is a literal or a simple materialized literal it
1797
    // is already set in the cloned array.
1798
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1799

    
1800
    if (!result_saved) {
1801
      __ push(rax);  // array literal
1802
      __ Push(Smi::FromInt(expr->literal_index()));
1803
      result_saved = true;
1804
    }
1805
    VisitForAccumulatorValue(subexpr);
1806

    
1807
    if (IsFastObjectElementsKind(constant_elements_kind)) {
1808
      // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1809
      // cannot transition and don't need to call the runtime stub.
1810
      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1811
      __ movq(rbx, Operand(rsp, kPointerSize));  // Copy of array literal.
1812
      __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1813
      // Store the subexpression value in the array's elements.
1814
      __ movq(FieldOperand(rbx, offset), result_register());
1815
      // Update the write barrier for the array store.
1816
      __ RecordWriteField(rbx, offset, result_register(), rcx,
1817
                          kDontSaveFPRegs,
1818
                          EMIT_REMEMBERED_SET,
1819
                          INLINE_SMI_CHECK);
1820
    } else {
1821
      // Store the subexpression value in the array's elements.
1822
      __ Move(rcx, Smi::FromInt(i));
1823
      StoreArrayLiteralElementStub stub;
1824
      __ CallStub(&stub);
1825
    }
1826

    
1827
    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1828
  }
1829

    
1830
  if (result_saved) {
1831
    __ addq(rsp, Immediate(kPointerSize));  // literal index
1832
    context()->PlugTOS();
1833
  } else {
1834
    context()->Plug(rax);
1835
  }
1836
}
1837

    
1838

    
1839
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1840
  Comment cmnt(masm_, "[ Assignment");
1841
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1842
  // on the left-hand side.
1843
  if (!expr->target()->IsValidLeftHandSide()) {
1844
    VisitForEffect(expr->target());
1845
    return;
1846
  }
1847

    
1848
  // Left-hand side can only be a property, a global or a (parameter or local)
1849
  // slot.
1850
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1851
  LhsKind assign_type = VARIABLE;
1852
  Property* property = expr->target()->AsProperty();
1853
  if (property != NULL) {
1854
    assign_type = (property->key()->IsPropertyName())
1855
        ? NAMED_PROPERTY
1856
        : KEYED_PROPERTY;
1857
  }
1858

    
1859
  // Evaluate LHS expression.
1860
  switch (assign_type) {
1861
    case VARIABLE:
1862
      // Nothing to do here.
1863
      break;
1864
    case NAMED_PROPERTY:
1865
      if (expr->is_compound()) {
1866
        // We need the receiver both on the stack and in the accumulator.
1867
        VisitForAccumulatorValue(property->obj());
1868
        __ push(result_register());
1869
      } else {
1870
        VisitForStackValue(property->obj());
1871
      }
1872
      break;
1873
    case KEYED_PROPERTY: {
1874
      if (expr->is_compound()) {
1875
        VisitForStackValue(property->obj());
1876
        VisitForAccumulatorValue(property->key());
1877
        __ movq(rdx, Operand(rsp, 0));
1878
        __ push(rax);
1879
      } else {
1880
        VisitForStackValue(property->obj());
1881
        VisitForStackValue(property->key());
1882
      }
1883
      break;
1884
    }
1885
  }
1886

    
1887
  // For compound assignments we need another deoptimization point after the
1888
  // variable/property load.
1889
  if (expr->is_compound()) {
1890
    { AccumulatorValueContext context(this);
1891
      switch (assign_type) {
1892
        case VARIABLE:
1893
          EmitVariableLoad(expr->target()->AsVariableProxy());
1894
          PrepareForBailout(expr->target(), TOS_REG);
1895
          break;
1896
        case NAMED_PROPERTY:
1897
          EmitNamedPropertyLoad(property);
1898
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1899
          break;
1900
        case KEYED_PROPERTY:
1901
          EmitKeyedPropertyLoad(property);
1902
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1903
          break;
1904
      }
1905
    }
1906

    
1907
    Token::Value op = expr->binary_op();
1908
    __ push(rax);  // Left operand goes on the stack.
1909
    VisitForAccumulatorValue(expr->value());
1910

    
1911
    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1912
        ? OVERWRITE_RIGHT
1913
        : NO_OVERWRITE;
1914
    SetSourcePosition(expr->position() + 1);
1915
    AccumulatorValueContext context(this);
1916
    if (ShouldInlineSmiCase(op)) {
1917
      EmitInlineSmiBinaryOp(expr->binary_operation(),
1918
                            op,
1919
                            mode,
1920
                            expr->target(),
1921
                            expr->value());
1922
    } else {
1923
      EmitBinaryOp(expr->binary_operation(), op, mode);
1924
    }
1925
    // Deoptimization point in case the binary operation may have side effects.
1926
    PrepareForBailout(expr->binary_operation(), TOS_REG);
1927
  } else {
1928
    VisitForAccumulatorValue(expr->value());
1929
  }
1930

    
1931
  // Record source position before possible IC call.
1932
  SetSourcePosition(expr->position());
1933

    
1934
  // Store the value.
1935
  switch (assign_type) {
1936
    case VARIABLE:
1937
      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1938
                             expr->op());
1939
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1940
      context()->Plug(rax);
1941
      break;
1942
    case NAMED_PROPERTY:
1943
      EmitNamedPropertyAssignment(expr);
1944
      break;
1945
    case KEYED_PROPERTY:
1946
      EmitKeyedPropertyAssignment(expr);
1947
      break;
1948
  }
1949
}
1950

    
1951

    
1952
void FullCodeGenerator::VisitYield(Yield* expr) {
1953
  Comment cmnt(masm_, "[ Yield");
1954
  // Evaluate yielded value first; the initial iterator definition depends on
1955
  // this.  It stays on the stack while we update the iterator.
1956
  VisitForStackValue(expr->expression());
1957

    
1958
  switch (expr->yield_kind()) {
1959
    case Yield::SUSPEND:
1960
      // Pop value from top-of-stack slot; box result into result register.
1961
      EmitCreateIteratorResult(false);
1962
      __ push(result_register());
1963
      // Fall through.
1964
    case Yield::INITIAL: {
1965
      Label suspend, continuation, post_runtime, resume;
1966

    
1967
      __ jmp(&suspend);
1968

    
1969
      __ bind(&continuation);
1970
      __ jmp(&resume);
1971

    
1972
      __ bind(&suspend);
1973
      VisitForAccumulatorValue(expr->generator_object());
1974
      ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1975
      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1976
              Smi::FromInt(continuation.pos()));
1977
      __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1978
      __ movq(rcx, rsi);
1979
      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1980
                          kDontSaveFPRegs);
1981
      __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1982
      __ cmpq(rsp, rbx);
1983
      __ j(equal, &post_runtime);
1984
      __ push(rax);  // generator object
1985
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1986
      __ movq(context_register(),
1987
              Operand(rbp, StandardFrameConstants::kContextOffset));
1988
      __ bind(&post_runtime);
1989

    
1990
      __ pop(result_register());
1991
      EmitReturnSequence();
1992

    
1993
      __ bind(&resume);
1994
      context()->Plug(result_register());
1995
      break;
1996
    }
1997

    
1998
    case Yield::FINAL: {
1999
      VisitForAccumulatorValue(expr->generator_object());
2000
      __ Move(FieldOperand(result_register(),
2001
                           JSGeneratorObject::kContinuationOffset),
2002
              Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2003
      // Pop value from top-of-stack slot, box result into result register.
2004
      EmitCreateIteratorResult(true);
2005
      EmitUnwindBeforeReturn();
2006
      EmitReturnSequence();
2007
      break;
2008
    }
2009

    
2010
    case Yield::DELEGATING: {
2011
      VisitForStackValue(expr->generator_object());
2012

    
2013
      // Initial stack layout is as follows:
2014
      // [sp + 1 * kPointerSize] iter
2015
      // [sp + 0 * kPointerSize] g
2016

    
2017
      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2018
      Label l_next, l_call, l_loop;
2019
      // Initial send value is undefined.
2020
      __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2021
      __ jmp(&l_next);
2022

    
2023
      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2024
      __ bind(&l_catch);
2025
      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2026
      __ LoadRoot(rcx, Heap::kthrow_stringRootIndex);    // "throw"
2027
      __ push(rcx);
2028
      __ push(Operand(rsp, 2 * kPointerSize));           // iter
2029
      __ push(rax);                                      // exception
2030
      __ jmp(&l_call);
2031

    
2032
      // try { received = %yield result }
2033
      // Shuffle the received result above a try handler and yield it without
2034
      // re-boxing.
2035
      __ bind(&l_try);
2036
      __ pop(rax);                                       // result
2037
      __ PushTryHandler(StackHandler::CATCH, expr->index());
2038
      const int handler_size = StackHandlerConstants::kSize;
2039
      __ push(rax);                                      // result
2040
      __ jmp(&l_suspend);
2041
      __ bind(&l_continuation);
2042
      __ jmp(&l_resume);
2043
      __ bind(&l_suspend);
2044
      const int generator_object_depth = kPointerSize + handler_size;
2045
      __ movq(rax, Operand(rsp, generator_object_depth));
2046
      __ push(rax);                                      // g
2047
      ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2048
      __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
2049
              Smi::FromInt(l_continuation.pos()));
2050
      __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
2051
      __ movq(rcx, rsi);
2052
      __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
2053
                          kDontSaveFPRegs);
2054
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2055
      __ movq(context_register(),
2056
              Operand(rbp, StandardFrameConstants::kContextOffset));
2057
      __ pop(rax);                                       // result
2058
      EmitReturnSequence();
2059
      __ bind(&l_resume);                                // received in rax
2060
      __ PopTryHandler();
2061

    
2062
      // receiver = iter; f = 'next'; arg = received;
2063
      __ bind(&l_next);
2064
      __ LoadRoot(rcx, Heap::knext_stringRootIndex);     // "next"
2065
      __ push(rcx);
2066
      __ push(Operand(rsp, 2 * kPointerSize));           // iter
2067
      __ push(rax);                                      // received
2068

    
2069
      // result = receiver[f](arg);
2070
      __ bind(&l_call);
2071
      Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
2072
      CallIC(ic);
2073
      __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2074
      __ Drop(1);  // The key is still on the stack; drop it.
2075

    
2076
      // if (!result.done) goto l_try;
2077
      __ bind(&l_loop);
2078
      __ push(rax);                                      // save result
2079
      __ LoadRoot(rcx, Heap::kdone_stringRootIndex);     // "done"
2080
      Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
2081
      CallIC(done_ic);                                   // result.done in rax
2082
      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2083
      CallIC(bool_ic);
2084
      __ testq(result_register(), result_register());
2085
      __ j(zero, &l_try);
2086

    
2087
      // result.value
2088
      __ pop(rax);                                       // result
2089
      __ LoadRoot(rcx, Heap::kvalue_stringRootIndex);    // "value"
2090
      Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
2091
      CallIC(value_ic);                                  // result.value in rax
2092
      context()->DropAndPlug(2, rax);                    // drop iter and g
2093
      break;
2094
    }
2095
  }
2096
}
2097

    
2098

    
2099
void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2100
    Expression *value,
2101
    JSGeneratorObject::ResumeMode resume_mode) {
2102
  // The value stays in rax, and is ultimately read by the resumed generator, as
2103
  // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  rbx
2104
  // will hold the generator object until the activation has been resumed.
2105
  VisitForStackValue(generator);
2106
  VisitForAccumulatorValue(value);
2107
  __ pop(rbx);
2108

    
2109
  // Check generator state.
2110
  Label wrong_state, done;
2111
  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
2112
  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
2113
  __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2114
                Smi::FromInt(0));
2115
  __ j(less_equal, &wrong_state);
2116

    
2117
  // Load suspended function and context.
2118
  __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
2119
  __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
2120

    
2121
  // Push receiver.
2122
  __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
2123

    
2124
  // Push holes for arguments to generator function.
2125
  __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2126
  __ movsxlq(rdx,
2127
             FieldOperand(rdx,
2128
                          SharedFunctionInfo::kFormalParameterCountOffset));
2129
  __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
2130
  Label push_argument_holes, push_frame;
2131
  __ bind(&push_argument_holes);
2132
  __ subq(rdx, Immediate(1));
2133
  __ j(carry, &push_frame);
2134
  __ push(rcx);
2135
  __ jmp(&push_argument_holes);
2136

    
2137
  // Enter a new JavaScript frame, and initialize its slots as they were when
2138
  // the generator was suspended.
2139
  Label resume_frame;
2140
  __ bind(&push_frame);
2141
  __ call(&resume_frame);
2142
  __ jmp(&done);
2143
  __ bind(&resume_frame);
2144
  __ push(rbp);  // Caller's frame pointer.
2145
  __ movq(rbp, rsp);
2146
  __ push(rsi);  // Callee's context.
2147
  __ push(rdi);  // Callee's JS Function.
2148

    
2149
  // Load the operand stack size.
2150
  __ movq(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
2151
  __ movq(rdx, FieldOperand(rdx, FixedArray::kLengthOffset));
2152
  __ SmiToInteger32(rdx, rdx);
2153

    
2154
  // If we are sending a value and there is no operand stack, we can jump back
2155
  // in directly.
2156
  if (resume_mode == JSGeneratorObject::NEXT) {
2157
    Label slow_resume;
2158
    __ cmpq(rdx, Immediate(0));
2159
    __ j(not_zero, &slow_resume);
2160
    __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2161
    __ SmiToInteger64(rcx,
2162
        FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
2163
    __ addq(rdx, rcx);
2164
    __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
2165
            Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2166
    __ jmp(rdx);
2167
    __ bind(&slow_resume);
2168
  }
2169

    
2170
  // Otherwise, we push holes for the operand stack and call the runtime to fix
2171
  // up the stack and the handlers.
2172
  Label push_operand_holes, call_resume;
2173
  __ bind(&push_operand_holes);
2174
  __ subq(rdx, Immediate(1));
2175
  __ j(carry, &call_resume);
2176
  __ push(rcx);
2177
  __ jmp(&push_operand_holes);
2178
  __ bind(&call_resume);
2179
  __ push(rbx);
2180
  __ push(result_register());
2181
  __ Push(Smi::FromInt(resume_mode));
2182
  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2183
  // Not reached: the runtime call returns elsewhere.
2184
  __ Abort(kGeneratorFailedToResume);
2185

    
2186
  // Throw error if we attempt to operate on a running generator.
2187
  __ bind(&wrong_state);
2188
  __ push(rbx);
2189
  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2190

    
2191
  __ bind(&done);
2192
  context()->Plug(result_register());
2193
}
2194

    
2195

    
2196
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2197
  Label gc_required;
2198
  Label allocated;
2199

    
2200
  Handle<Map> map(isolate()->native_context()->generator_result_map());
2201

    
2202
  __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT);
2203
  __ jmp(&allocated);
2204

    
2205
  __ bind(&gc_required);
2206
  __ Push(Smi::FromInt(map->instance_size()));
2207
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2208
  __ movq(context_register(),
2209
          Operand(rbp, StandardFrameConstants::kContextOffset));
2210

    
2211
  __ bind(&allocated);
2212
  __ Move(rbx, map);
2213
  __ pop(rcx);
2214
  __ Move(rdx, isolate()->factory()->ToBoolean(done));
2215
  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2216
  __ movq(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2217
  __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2218
          isolate()->factory()->empty_fixed_array());
2219
  __ Move(FieldOperand(rax, JSObject::kElementsOffset),
2220
          isolate()->factory()->empty_fixed_array());
2221
  __ movq(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset),
2222
          rcx);
2223
  __ movq(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset),
2224
          rdx);
2225

    
2226
  // Only the value field needs a write barrier, as the other values are in the
2227
  // root set.
2228
  __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset,
2229
                      rcx, rdx, kDontSaveFPRegs);
2230
}
2231

    
2232

    
2233
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2234
  SetSourcePosition(prop->position());
2235
  Literal* key = prop->key()->AsLiteral();
2236
  __ Move(rcx, key->value());
2237
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2238
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2239
}
2240

    
2241

    
2242
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2243
  SetSourcePosition(prop->position());
2244
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2245
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2246
}
2247

    
2248

    
2249
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2250
                                              Token::Value op,
2251
                                              OverwriteMode mode,
2252
                                              Expression* left,
2253
                                              Expression* right) {
2254
  // Do combined smi check of the operands. Left operand is on the
2255
  // stack (popped into rdx). Right operand is in rax but moved into
2256
  // rcx to make the shifts easier.
2257
  Label done, stub_call, smi_case;
2258
  __ pop(rdx);
2259
  __ movq(rcx, rax);
2260
  __ or_(rax, rdx);
2261
  JumpPatchSite patch_site(masm_);
2262
  patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2263

    
2264
  __ bind(&stub_call);
2265
  __ movq(rax, rcx);
2266
  BinaryOpStub stub(op, mode);
2267
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2268
         expr->BinaryOperationFeedbackId());
2269
  patch_site.EmitPatchInfo();
2270
  __ jmp(&done, Label::kNear);
2271

    
2272
  __ bind(&smi_case);
2273
  switch (op) {
2274
    case Token::SAR:
2275
      __ SmiShiftArithmeticRight(rax, rdx, rcx);
2276
      break;
2277
    case Token::SHL:
2278
      __ SmiShiftLeft(rax, rdx, rcx);
2279
      break;
2280
    case Token::SHR:
2281
      __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
2282
      break;
2283
    case Token::ADD:
2284
      __ SmiAdd(rax, rdx, rcx, &stub_call);
2285
      break;
2286
    case Token::SUB:
2287
      __ SmiSub(rax, rdx, rcx, &stub_call);
2288
      break;
2289
    case Token::MUL:
2290
      __ SmiMul(rax, rdx, rcx, &stub_call);
2291
      break;
2292
    case Token::BIT_OR:
2293
      __ SmiOr(rax, rdx, rcx);
2294
      break;
2295
    case Token::BIT_AND:
2296
      __ SmiAnd(rax, rdx, rcx);
2297
      break;
2298
    case Token::BIT_XOR:
2299
      __ SmiXor(rax, rdx, rcx);
2300
      break;
2301
    default:
2302
      UNREACHABLE();
2303
      break;
2304
  }
2305

    
2306
  __ bind(&done);
2307
  context()->Plug(rax);
2308
}
2309

    
2310

    
2311
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2312
                                     Token::Value op,
2313
                                     OverwriteMode mode) {
2314
  __ pop(rdx);
2315
  BinaryOpStub stub(op, mode);
2316
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2317
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2318
         expr->BinaryOperationFeedbackId());
2319
  patch_site.EmitPatchInfo();
2320
  context()->Plug(rax);
2321
}
2322

    
2323

    
2324
void FullCodeGenerator::EmitAssignment(Expression* expr) {
2325
  // Invalid left-hand sides are rewritten by the parser to have a 'throw
2326
  // ReferenceError' on the left-hand side.
2327
  if (!expr->IsValidLeftHandSide()) {
2328
    VisitForEffect(expr);
2329
    return;
2330
  }
2331

    
2332
  // Left-hand side can only be a property, a global or a (parameter or local)
2333
  // slot.
2334
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2335
  LhsKind assign_type = VARIABLE;
2336
  Property* prop = expr->AsProperty();
2337
  if (prop != NULL) {
2338
    assign_type = (prop->key()->IsPropertyName())
2339
        ? NAMED_PROPERTY
2340
        : KEYED_PROPERTY;
2341
  }
2342

    
2343
  switch (assign_type) {
2344
    case VARIABLE: {
2345
      Variable* var = expr->AsVariableProxy()->var();
2346
      EffectContext context(this);
2347
      EmitVariableAssignment(var, Token::ASSIGN);
2348
      break;
2349
    }
2350
    case NAMED_PROPERTY: {
2351
      __ push(rax);  // Preserve value.
2352
      VisitForAccumulatorValue(prop->obj());
2353
      __ movq(rdx, rax);
2354
      __ pop(rax);  // Restore value.
2355
      __ Move(rcx, prop->key()->AsLiteral()->value());
2356
      Handle<Code> ic = is_classic_mode()
2357
          ? isolate()->builtins()->StoreIC_Initialize()
2358
          : isolate()->builtins()->StoreIC_Initialize_Strict();
2359
      CallIC(ic);
2360
      break;
2361
    }
2362
    case KEYED_PROPERTY: {
2363
      __ push(rax);  // Preserve value.
2364
      VisitForStackValue(prop->obj());
2365
      VisitForAccumulatorValue(prop->key());
2366
      __ movq(rcx, rax);
2367
      __ pop(rdx);
2368
      __ pop(rax);  // Restore value.
2369
      Handle<Code> ic = is_classic_mode()
2370
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
2371
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2372
      CallIC(ic);
2373
      break;
2374
    }
2375
  }
2376
  context()->Plug(rax);
2377
}
2378

    
2379

    
2380
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2381
                                               Token::Value op) {
2382
  if (var->IsUnallocated()) {
2383
    // Global var, const, or let.
2384
    __ Move(rcx, var->name());
2385
    __ movq(rdx, GlobalObjectOperand());
2386
    Handle<Code> ic = is_classic_mode()
2387
        ? isolate()->builtins()->StoreIC_Initialize()
2388
        : isolate()->builtins()->StoreIC_Initialize_Strict();
2389
    CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2390
  } else if (op == Token::INIT_CONST) {
2391
    // Const initializers need a write barrier.
2392
    ASSERT(!var->IsParameter());  // No const parameters.
2393
    if (var->IsStackLocal()) {
2394
      Label skip;
2395
      __ movq(rdx, StackOperand(var));
2396
      __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2397
      __ j(not_equal, &skip);
2398
      __ movq(StackOperand(var), rax);
2399
      __ bind(&skip);
2400
    } else {
2401
      ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2402
      // Like var declarations, const declarations are hoisted to function
2403
      // scope.  However, unlike var initializers, const initializers are
2404
      // able to drill a hole to that function context, even from inside a
2405
      // 'with' context.  We thus bypass the normal static scope lookup for
2406
      // var->IsContextSlot().
2407
      __ push(rax);
2408
      __ push(rsi);
2409
      __ Push(var->name());
2410
      __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2411
    }
2412

    
2413
  } else if (var->mode() == LET && op != Token::INIT_LET) {
2414
    // Non-initializing assignment to let variable needs a write barrier.
2415
    if (var->IsLookupSlot()) {
2416
      __ push(rax);  // Value.
2417
      __ push(rsi);  // Context.
2418
      __ Push(var->name());
2419
      __ Push(Smi::FromInt(language_mode()));
2420
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2421
    } else {
2422
      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2423
      Label assign;
2424
      MemOperand location = VarOperand(var, rcx);
2425
      __ movq(rdx, location);
2426
      __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2427
      __ j(not_equal, &assign, Label::kNear);
2428
      __ Push(var->name());
2429
      __ CallRuntime(Runtime::kThrowReferenceError, 1);
2430
      __ bind(&assign);
2431
      __ movq(location, rax);
2432
      if (var->IsContextSlot()) {
2433
        __ movq(rdx, rax);
2434
        __ RecordWriteContextSlot(
2435
            rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2436
      }
2437
    }
2438

    
2439
  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2440
    // Assignment to var or initializing assignment to let/const
2441
    // in harmony mode.
2442
    if (var->IsStackAllocated() || var->IsContextSlot()) {
2443
      MemOperand location = VarOperand(var, rcx);
2444
      if (generate_debug_code_ && op == Token::INIT_LET) {
2445
        // Check for an uninitialized let binding.
2446
        __ movq(rdx, location);
2447
        __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2448
        __ Check(equal, kLetBindingReInitialization);
2449
      }
2450
      // Perform the assignment.
2451
      __ movq(location, rax);
2452
      if (var->IsContextSlot()) {
2453
        __ movq(rdx, rax);
2454
        __ RecordWriteContextSlot(
2455
            rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2456
      }
2457
    } else {
2458
      ASSERT(var->IsLookupSlot());
2459
      __ push(rax);  // Value.
2460
      __ push(rsi);  // Context.
2461
      __ Push(var->name());
2462
      __ Push(Smi::FromInt(language_mode()));
2463
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2464
    }
2465
  }
2466
  // Non-initializing assignments to consts are ignored.
2467
}
2468

    
2469

    
2470
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2471
  // Assignment to a property, using a named store IC.
2472
  Property* prop = expr->target()->AsProperty();
2473
  ASSERT(prop != NULL);
2474
  ASSERT(prop->key()->AsLiteral() != NULL);
2475

    
2476
  // Record source code position before IC call.
2477
  SetSourcePosition(expr->position());
2478
  __ Move(rcx, prop->key()->AsLiteral()->value());
2479
  __ pop(rdx);
2480
  Handle<Code> ic = is_classic_mode()
2481
      ? isolate()->builtins()->StoreIC_Initialize()
2482
      : isolate()->builtins()->StoreIC_Initialize_Strict();
2483
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2484

    
2485
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2486
  context()->Plug(rax);
2487
}
2488

    
2489

    
2490
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2491
  // Assignment to a property, using a keyed store IC.
2492

    
2493
  __ pop(rcx);
2494
  __ pop(rdx);
2495
  // Record source code position before IC call.
2496
  SetSourcePosition(expr->position());
2497
  Handle<Code> ic = is_classic_mode()
2498
      ? isolate()->builtins()->KeyedStoreIC_Initialize()
2499
      : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2500
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2501

    
2502
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2503
  context()->Plug(rax);
2504
}
2505

    
2506

    
2507
void FullCodeGenerator::VisitProperty(Property* expr) {
2508
  Comment cmnt(masm_, "[ Property");
2509
  Expression* key = expr->key();
2510

    
2511
  if (key->IsPropertyName()) {
2512
    VisitForAccumulatorValue(expr->obj());
2513
    EmitNamedPropertyLoad(expr);
2514
    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2515
    context()->Plug(rax);
2516
  } else {
2517
    VisitForStackValue(expr->obj());
2518
    VisitForAccumulatorValue(expr->key());
2519
    __ pop(rdx);
2520
    EmitKeyedPropertyLoad(expr);
2521
    context()->Plug(rax);
2522
  }
2523
}
2524

    
2525

    
2526
void FullCodeGenerator::CallIC(Handle<Code> code,
2527
                               RelocInfo::Mode rmode,
2528
                               TypeFeedbackId ast_id) {
2529
  ic_total_count_++;
2530
  __ call(code, rmode, ast_id);
2531
}
2532

    
2533

    
2534
void FullCodeGenerator::EmitCallWithIC(Call* expr,
2535
                                       Handle<Object> name,
2536
                                       RelocInfo::Mode mode) {
2537
  // Code common for calls using the IC.
2538
  ZoneList<Expression*>* args = expr->arguments();
2539
  int arg_count = args->length();
2540
  { PreservePositionScope scope(masm()->positions_recorder());
2541
    for (int i = 0; i < arg_count; i++) {
2542
      VisitForStackValue(args->at(i));
2543
    }
2544
    __ Move(rcx, name);
2545
  }
2546
  // Record source position for debugger.
2547
  SetSourcePosition(expr->position());
2548
  // Call the IC initialization code.
2549
  Handle<Code> ic =
2550
      isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2551
  CallIC(ic, mode, expr->CallFeedbackId());
2552
  RecordJSReturnSite(expr);
2553
  // Restore context register.
2554
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2555
  context()->Plug(rax);
2556
}
2557

    
2558

    
2559
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2560
                                            Expression* key) {
2561
  // Load the key.
2562
  VisitForAccumulatorValue(key);
2563

    
2564
  // Swap the name of the function and the receiver on the stack to follow
2565
  // the calling convention for call ICs.
2566
  __ pop(rcx);
2567
  __ push(rax);
2568
  __ push(rcx);
2569

    
2570
  // Load the arguments.
2571
  ZoneList<Expression*>* args = expr->arguments();
2572
  int arg_count = args->length();
2573
  { PreservePositionScope scope(masm()->positions_recorder());
2574
    for (int i = 0; i < arg_count; i++) {
2575
      VisitForStackValue(args->at(i));
2576
    }
2577
  }
2578
  // Record source position for debugger.
2579
  SetSourcePosition(expr->position());
2580
  // Call the IC initialization code.
2581
  Handle<Code> ic =
2582
      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2583
  __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
2584
  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2585
  RecordJSReturnSite(expr);
2586
  // Restore context register.
2587
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2588
  context()->DropAndPlug(1, rax);  // Drop the key still on the stack.
2589
}
2590

    
2591

    
2592
void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2593
  // Code common for calls using the call stub.
2594
  ZoneList<Expression*>* args = expr->arguments();
2595
  int arg_count = args->length();
2596
  { PreservePositionScope scope(masm()->positions_recorder());
2597
    for (int i = 0; i < arg_count; i++) {
2598
      VisitForStackValue(args->at(i));
2599
    }
2600
  }
2601
  // Record source position for debugger.
2602
  SetSourcePosition(expr->position());
2603

    
2604
  // Record call targets in unoptimized code.
2605
  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2606
  Handle<Object> uninitialized =
2607
      TypeFeedbackCells::UninitializedSentinel(isolate());
2608
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2609
  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2610
  __ Move(rbx, cell);
2611

    
2612
  CallFunctionStub stub(arg_count, flags);
2613
  __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2614
  __ CallStub(&stub, expr->CallFeedbackId());
2615
  RecordJSReturnSite(expr);
2616
  // Restore context register.
2617
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2618
  // Discard the function left on TOS.
2619
  context()->DropAndPlug(1, rax);
2620
}
2621

    
2622

    
2623
void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2624
  // Push copy of the first argument or undefined if it doesn't exist.
2625
  if (arg_count > 0) {
2626
    __ push(Operand(rsp, arg_count * kPointerSize));
2627
  } else {
2628
    __ PushRoot(Heap::kUndefinedValueRootIndex);
2629
  }
2630

    
2631
  // Push the receiver of the enclosing function and do runtime call.
2632
  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2633
  __ push(args.GetReceiverOperand());
2634

    
2635
  // Push the language mode.
2636
  __ Push(Smi::FromInt(language_mode()));
2637

    
2638
  // Push the start position of the scope the calls resides in.
2639
  __ Push(Smi::FromInt(scope()->start_position()));
2640

    
2641
  // Do the runtime call.
2642
  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2643
}
2644

    
2645

    
2646
void FullCodeGenerator::VisitCall(Call* expr) {
2647
#ifdef DEBUG
2648
  // We want to verify that RecordJSReturnSite gets called on all paths
2649
  // through this function.  Avoid early returns.
2650
  expr->return_is_recorded_ = false;
2651
#endif
2652

    
2653
  Comment cmnt(masm_, "[ Call");
2654
  Expression* callee = expr->expression();
2655
  VariableProxy* proxy = callee->AsVariableProxy();
2656
  Property* property = callee->AsProperty();
2657

    
2658
  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
2659
    // In a call to eval, we first call %ResolvePossiblyDirectEval to
2660
    // resolve the function we need to call and the receiver of the call.
2661
    // Then we call the resolved function using the given arguments.
2662
    ZoneList<Expression*>* args = expr->arguments();
2663
    int arg_count = args->length();
2664
    { PreservePositionScope pos_scope(masm()->positions_recorder());
2665
      VisitForStackValue(callee);
2666
      __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
2667

    
2668
      // Push the arguments.
2669
      for (int i = 0; i < arg_count; i++) {
2670
        VisitForStackValue(args->at(i));
2671
      }
2672

    
2673
      // Push a copy of the function (found below the arguments) and resolve
2674
      // eval.
2675
      __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2676
      EmitResolvePossiblyDirectEval(arg_count);
2677

    
2678
      // The runtime call returns a pair of values in rax (function) and
2679
      // rdx (receiver). Touch up the stack with the right values.
2680
      __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2681
      __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2682
    }
2683
    // Record source position for debugger.
2684
    SetSourcePosition(expr->position());
2685
    CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2686
    __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2687
    __ CallStub(&stub);
2688
    RecordJSReturnSite(expr);
2689
    // Restore context register.
2690
    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2691
    context()->DropAndPlug(1, rax);
2692
  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2693
    // Call to a global variable.  Push global object as receiver for the
2694
    // call IC lookup.
2695
    __ push(GlobalObjectOperand());
2696
    EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2697
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2698
    // Call to a lookup slot (dynamically introduced variable).
2699
    Label slow, done;
2700

    
2701
    { PreservePositionScope scope(masm()->positions_recorder());
2702
      // Generate code for loading from variables potentially shadowed by
2703
      // eval-introduced variables.
2704
      EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2705
    }
2706
    __ bind(&slow);
2707
    // Call the runtime to find the function to call (returned in rax) and
2708
    // the object holding it (returned in rdx).
2709
    __ push(context_register());
2710
    __ Push(proxy->name());
2711
    __ CallRuntime(Runtime::kLoadContextSlot, 2);
2712
    __ push(rax);  // Function.
2713
    __ push(rdx);  // Receiver.
2714

    
2715
    // If fast case code has been generated, emit code to push the function
2716
    // and receiver and have the slow path jump around this code.
2717
    if (done.is_linked()) {
2718
      Label call;
2719
      __ jmp(&call, Label::kNear);
2720
      __ bind(&done);
2721
      // Push function.
2722
      __ push(rax);
2723
      // The receiver is implicitly the global receiver. Indicate this by
2724
      // passing the hole to the call function stub.
2725
      __ PushRoot(Heap::kTheHoleValueRootIndex);
2726
      __ bind(&call);
2727
    }
2728

    
2729
    // The receiver is either the global receiver or an object found by
2730
    // LoadContextSlot. That object could be the hole if the receiver is
2731
    // implicitly the global object.
2732
    EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2733
  } else if (property != NULL) {
2734
    { PreservePositionScope scope(masm()->positions_recorder());
2735
      VisitForStackValue(property->obj());
2736
    }
2737
    if (property->key()->IsPropertyName()) {
2738
      EmitCallWithIC(expr,
2739
                     property->key()->AsLiteral()->value(),
2740
                     RelocInfo::CODE_TARGET);
2741
    } else {
2742
      EmitKeyedCallWithIC(expr, property->key());
2743
    }
2744
  } else {
2745
    // Call to an arbitrary expression not handled specially above.
2746
    { PreservePositionScope scope(masm()->positions_recorder());
2747
      VisitForStackValue(callee);
2748
    }
2749
    // Load global receiver object.
2750
    __ movq(rbx, GlobalObjectOperand());
2751
    __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2752
    // Emit function call.
2753
    EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2754
  }
2755

    
2756
#ifdef DEBUG
2757
  // RecordJSReturnSite should have been called.
2758
  ASSERT(expr->return_is_recorded_);
2759
#endif
2760
}
2761

    
2762

    
2763
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2764
  Comment cmnt(masm_, "[ CallNew");
2765
  // According to ECMA-262, section 11.2.2, page 44, the function
2766
  // expression in new calls must be evaluated before the
2767
  // arguments.
2768

    
2769
  // Push constructor on the stack.  If it's not a function it's used as
2770
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2771
  // ignored.
2772
  VisitForStackValue(expr->expression());
2773

    
2774
  // Push the arguments ("left-to-right") on the stack.
2775
  ZoneList<Expression*>* args = expr->arguments();
2776
  int arg_count = args->length();
2777
  for (int i = 0; i < arg_count; i++) {
2778
    VisitForStackValue(args->at(i));
2779
  }
2780

    
2781
  // Call the construct call builtin that handles allocation and
2782
  // constructor invocation.
2783
  SetSourcePosition(expr->position());
2784

    
2785
  // Load function and argument count into rdi and rax.
2786
  __ Set(rax, arg_count);
2787
  __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2788

    
2789
  // Record call targets in unoptimized code, but not in the snapshot.
2790
  Handle<Object> uninitialized =
2791
      TypeFeedbackCells::UninitializedSentinel(isolate());
2792
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2793
  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2794
  __ Move(rbx, cell);
2795

    
2796
  CallConstructStub stub(RECORD_CALL_TARGET);
2797
  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2798
  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2799
  context()->Plug(rax);
2800
}
2801

    
2802

    
2803
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2804
  ZoneList<Expression*>* args = expr->arguments();
2805
  ASSERT(args->length() == 1);
2806

    
2807
  VisitForAccumulatorValue(args->at(0));
2808

    
2809
  Label materialize_true, materialize_false;
2810
  Label* if_true = NULL;
2811
  Label* if_false = NULL;
2812
  Label* fall_through = NULL;
2813
  context()->PrepareTest(&materialize_true, &materialize_false,
2814
                         &if_true, &if_false, &fall_through);
2815

    
2816
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2817
  __ JumpIfSmi(rax, if_true);
2818
  __ jmp(if_false);
2819

    
2820
  context()->Plug(if_true, if_false);
2821
}
2822

    
2823

    
2824
void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2825
  ZoneList<Expression*>* args = expr->arguments();
2826
  ASSERT(args->length() == 1);
2827

    
2828
  VisitForAccumulatorValue(args->at(0));
2829

    
2830
  Label materialize_true, materialize_false;
2831
  Label* if_true = NULL;
2832
  Label* if_false = NULL;
2833
  Label* fall_through = NULL;
2834
  context()->PrepareTest(&materialize_true, &materialize_false,
2835
                         &if_true, &if_false, &fall_through);
2836

    
2837
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2838
  Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2839
  Split(non_negative_smi, if_true, if_false, fall_through);
2840

    
2841
  context()->Plug(if_true, if_false);
2842
}
2843

    
2844

    
2845
void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2846
  ZoneList<Expression*>* args = expr->arguments();
2847
  ASSERT(args->length() == 1);
2848

    
2849
  VisitForAccumulatorValue(args->at(0));
2850

    
2851
  Label materialize_true, materialize_false;
2852
  Label* if_true = NULL;
2853
  Label* if_false = NULL;
2854
  Label* fall_through = NULL;
2855
  context()->PrepareTest(&materialize_true, &materialize_false,
2856
                         &if_true, &if_false, &fall_through);
2857

    
2858
  __ JumpIfSmi(rax, if_false);
2859
  __ CompareRoot(rax, Heap::kNullValueRootIndex);
2860
  __ j(equal, if_true);
2861
  __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2862
  // Undetectable objects behave like undefined when tested with typeof.
2863
  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2864
           Immediate(1 << Map::kIsUndetectable));
2865
  __ j(not_zero, if_false);
2866
  __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2867
  __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2868
  __ j(below, if_false);
2869
  __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2870
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2871
  Split(below_equal, if_true, if_false, fall_through);
2872

    
2873
  context()->Plug(if_true, if_false);
2874
}
2875

    
2876

    
2877
void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2878
  ZoneList<Expression*>* args = expr->arguments();
2879
  ASSERT(args->length() == 1);
2880

    
2881
  VisitForAccumulatorValue(args->at(0));
2882

    
2883
  Label materialize_true, materialize_false;
2884
  Label* if_true = NULL;
2885
  Label* if_false = NULL;
2886
  Label* fall_through = NULL;
2887
  context()->PrepareTest(&materialize_true, &materialize_false,
2888
                         &if_true, &if_false, &fall_through);
2889

    
2890
  __ JumpIfSmi(rax, if_false);
2891
  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2892
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2893
  Split(above_equal, if_true, if_false, fall_through);
2894

    
2895
  context()->Plug(if_true, if_false);
2896
}
2897

    
2898

    
2899
void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2900
  ZoneList<Expression*>* args = expr->arguments();
2901
  ASSERT(args->length() == 1);
2902

    
2903
  VisitForAccumulatorValue(args->at(0));
2904

    
2905
  Label materialize_true, materialize_false;
2906
  Label* if_true = NULL;
2907
  Label* if_false = NULL;
2908
  Label* fall_through = NULL;
2909
  context()->PrepareTest(&materialize_true, &materialize_false,
2910
                         &if_true, &if_false, &fall_through);
2911

    
2912
  __ JumpIfSmi(rax, if_false);
2913
  __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2914
  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2915
           Immediate(1 << Map::kIsUndetectable));
2916
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2917
  Split(not_zero, if_true, if_false, fall_through);
2918

    
2919
  context()->Plug(if_true, if_false);
2920
}
2921

    
2922

    
2923
void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2924
    CallRuntime* expr) {
2925
  ZoneList<Expression*>* args = expr->arguments();
2926
  ASSERT(args->length() == 1);
2927

    
2928
  VisitForAccumulatorValue(args->at(0));
2929

    
2930
  Label materialize_true, materialize_false, skip_lookup;
2931
  Label* if_true = NULL;
2932
  Label* if_false = NULL;
2933
  Label* fall_through = NULL;
2934
  context()->PrepareTest(&materialize_true, &materialize_false,
2935
                         &if_true, &if_false, &fall_through);
2936

    
2937
  __ AssertNotSmi(rax);
2938

    
2939
  // Check whether this map has already been checked to be safe for default
2940
  // valueOf.
2941
  __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2942
  __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2943
           Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2944
  __ j(not_zero, &skip_lookup);
2945

    
2946
  // Check for fast case object. Generate false result for slow case object.
2947
  __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2948
  __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2949
  __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2950
  __ j(equal, if_false);
2951

    
2952
  // Look for valueOf string in the descriptor array, and indicate false if
2953
  // found. Since we omit an enumeration index check, if it is added via a
2954
  // transition that shares its descriptor array, this is a false positive.
2955
  Label entry, loop, done;
2956

    
2957
  // Skip loop if no descriptors are valid.
2958
  __ NumberOfOwnDescriptors(rcx, rbx);
2959
  __ cmpq(rcx, Immediate(0));
2960
  __ j(equal, &done);
2961

    
2962
  __ LoadInstanceDescriptors(rbx, r8);
2963
  // rbx: descriptor array.
2964
  // rcx: valid entries in the descriptor array.
2965
  // Calculate the end of the descriptor array.
2966
  __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize));
2967
  SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2968
  __ lea(rcx,
2969
         Operand(
2970
             r8, index.reg, index.scale, DescriptorArray::kFirstOffset));
2971
  // Calculate location of the first key name.
2972
  __ addq(r8, Immediate(DescriptorArray::kFirstOffset));
2973
  // Loop through all the keys in the descriptor array. If one of these is the
2974
  // internalized string "valueOf" the result is false.
2975
  __ jmp(&entry);
2976
  __ bind(&loop);
2977
  __ movq(rdx, FieldOperand(r8, 0));
2978
  __ Cmp(rdx, isolate()->factory()->value_of_string());
2979
  __ j(equal, if_false);
2980
  __ addq(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2981
  __ bind(&entry);
2982
  __ cmpq(r8, rcx);
2983
  __ j(not_equal, &loop);
2984

    
2985
  __ bind(&done);
2986

    
2987
  // Set the bit in the map to indicate that there is no local valueOf field.
2988
  __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2989
         Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2990

    
2991
  __ bind(&skip_lookup);
2992

    
2993
  // If a valueOf property is not found on the object check that its
2994
  // prototype is the un-modified String prototype. If not result is false.
2995
  __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2996
  __ testq(rcx, Immediate(kSmiTagMask));
2997
  __ j(zero, if_false);
2998
  __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2999
  __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3000
  __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
3001
  __ cmpq(rcx,
3002
          ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3003
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3004
  Split(equal, if_true, if_false, fall_through);
3005

    
3006
  context()->Plug(if_true, if_false);
3007
}
3008

    
3009

    
3010
void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3011
  ZoneList<Expression*>* args = expr->arguments();
3012
  ASSERT(args->length() == 1);
3013

    
3014
  VisitForAccumulatorValue(args->at(0));
3015

    
3016
  Label materialize_true, materialize_false;
3017
  Label* if_true = NULL;
3018
  Label* if_false = NULL;
3019
  Label* fall_through = NULL;
3020
  context()->PrepareTest(&materialize_true, &materialize_false,
3021
                         &if_true, &if_false, &fall_through);
3022

    
3023
  __ JumpIfSmi(rax, if_false);
3024
  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3025
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3026
  Split(equal, if_true, if_false, fall_through);
3027

    
3028
  context()->Plug(if_true, if_false);
3029
}
3030

    
3031

    
3032
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3033
  ZoneList<Expression*>* args = expr->arguments();
3034
  ASSERT(args->length() == 1);
3035

    
3036
  VisitForAccumulatorValue(args->at(0));
3037

    
3038
  Label materialize_true, materialize_false;
3039
  Label* if_true = NULL;
3040
  Label* if_false = NULL;
3041
  Label* fall_through = NULL;
3042
  context()->PrepareTest(&materialize_true, &materialize_false,
3043
                         &if_true, &if_false, &fall_through);
3044

    
3045
  __ JumpIfSmi(rax, if_false);
3046
  __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
3047
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3048
  Split(equal, if_true, if_false, fall_through);
3049

    
3050
  context()->Plug(if_true, if_false);
3051
}
3052

    
3053

    
3054
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3055
  ZoneList<Expression*>* args = expr->arguments();
3056
  ASSERT(args->length() == 1);
3057

    
3058
  VisitForAccumulatorValue(args->at(0));
3059

    
3060
  Label materialize_true, materialize_false;
3061
  Label* if_true = NULL;
3062
  Label* if_false = NULL;
3063
  Label* fall_through = NULL;
3064
  context()->PrepareTest(&materialize_true, &materialize_false,
3065
                         &if_true, &if_false, &fall_through);
3066

    
3067
  __ JumpIfSmi(rax, if_false);
3068
  __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
3069
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3070
  Split(equal, if_true, if_false, fall_through);
3071

    
3072
  context()->Plug(if_true, if_false);
3073
}
3074

    
3075

    
3076

    
3077
void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3078
  ASSERT(expr->arguments()->length() == 0);
3079

    
3080
  Label materialize_true, materialize_false;
3081
  Label* if_true = NULL;
3082
  Label* if_false = NULL;
3083
  Label* fall_through = NULL;
3084
  context()->PrepareTest(&materialize_true, &materialize_false,
3085
                         &if_true, &if_false, &fall_through);
3086

    
3087
  // Get the frame pointer for the calling frame.
3088
  __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3089

    
3090
  // Skip the arguments adaptor frame if it exists.
3091
  Label check_frame_marker;
3092
  __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
3093
         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3094
  __ j(not_equal, &check_frame_marker);
3095
  __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
3096

    
3097
  // Check the marker in the calling frame.
3098
  __ bind(&check_frame_marker);
3099
  __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
3100
         Smi::FromInt(StackFrame::CONSTRUCT));
3101
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3102
  Split(equal, if_true, if_false, fall_through);
3103

    
3104
  context()->Plug(if_true, if_false);
3105
}
3106

    
3107

    
3108
void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3109
  ZoneList<Expression*>* args = expr->arguments();
3110
  ASSERT(args->length() == 2);
3111

    
3112
  // Load the two objects into registers and perform the comparison.
3113
  VisitForStackValue(args->at(0));
3114
  VisitForAccumulatorValue(args->at(1));
3115

    
3116
  Label materialize_true, materialize_false;
3117
  Label* if_true = NULL;
3118
  Label* if_false = NULL;
3119
  Label* fall_through = NULL;
3120
  context()->PrepareTest(&materialize_true, &materialize_false,
3121
                         &if_true, &if_false, &fall_through);
3122

    
3123
  __ pop(rbx);
3124
  __ cmpq(rax, rbx);
3125
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3126
  Split(equal, if_true, if_false, fall_through);
3127

    
3128
  context()->Plug(if_true, if_false);
3129
}
3130

    
3131

    
3132
void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3133
  ZoneList<Expression*>* args = expr->arguments();
3134
  ASSERT(args->length() == 1);
3135

    
3136
  // ArgumentsAccessStub expects the key in rdx and the formal
3137
  // parameter count in rax.
3138
  VisitForAccumulatorValue(args->at(0));
3139
  __ movq(rdx, rax);
3140
  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3141
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3142
  __ CallStub(&stub);
3143
  context()->Plug(rax);
3144
}
3145

    
3146

    
3147
void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3148
  ASSERT(expr->arguments()->length() == 0);
3149

    
3150
  Label exit;
3151
  // Get the number of formal parameters.
3152
  __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
3153

    
3154
  // Check if the calling frame is an arguments adaptor frame.
3155
  __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3156
  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
3157
         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3158
  __ j(not_equal, &exit, Label::kNear);
3159

    
3160
  // Arguments adaptor case: Read the arguments length from the
3161
  // adaptor frame.
3162
  __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3163

    
3164
  __ bind(&exit);
3165
  __ AssertSmi(rax);
3166
  context()->Plug(rax);
3167
}
3168

    
3169

    
3170
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3171
  ZoneList<Expression*>* args = expr->arguments();
3172
  ASSERT(args->length() == 1);
3173
  Label done, null, function, non_function_constructor;
3174

    
3175
  VisitForAccumulatorValue(args->at(0));
3176

    
3177
  // If the object is a smi, we return null.
3178
  __ JumpIfSmi(rax, &null);
3179

    
3180
  // Check that the object is a JS object but take special care of JS
3181
  // functions to make sure they have 'Function' as their class.
3182
  // Assume that there are only two callable types, and one of them is at
3183
  // either end of the type range for JS object types. Saves extra comparisons.
3184
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3185
  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3186
  // Map is now in rax.
3187
  __ j(below, &null);
3188
  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3189
                FIRST_SPEC_OBJECT_TYPE + 1);
3190
  __ j(equal, &function);
3191

    
3192
  __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3193
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3194
                LAST_SPEC_OBJECT_TYPE - 1);
3195
  __ j(equal, &function);
3196
  // Assume that there is no larger type.
3197
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3198

    
3199
  // Check if the constructor in the map is a JS function.
3200
  __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
3201
  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3202
  __ j(not_equal, &non_function_constructor);
3203

    
3204
  // rax now contains the constructor function. Grab the
3205
  // instance class name from there.
3206
  __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
3207
  __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
3208
  __ jmp(&done);
3209

    
3210
  // Functions have class 'Function'.
3211
  __ bind(&function);
3212
  __ Move(rax, isolate()->factory()->function_class_string());
3213
  __ jmp(&done);
3214

    
3215
  // Objects with a non-function constructor have class 'Object'.
3216
  __ bind(&non_function_constructor);
3217
  __ Move(rax, isolate()->factory()->Object_string());
3218
  __ jmp(&done);
3219

    
3220
  // Non-JS objects have class null.
3221
  __ bind(&null);
3222
  __ LoadRoot(rax, Heap::kNullValueRootIndex);
3223

    
3224
  // All done.
3225
  __ bind(&done);
3226

    
3227
  context()->Plug(rax);
3228
}
3229

    
3230

    
3231
void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3232
  // Conditionally generate a log call.
3233
  // Args:
3234
  //   0 (literal string): The type of logging (corresponds to the flags).
3235
  //     This is used to determine whether or not to generate the log call.
3236
  //   1 (string): Format string.  Access the string at argument index 2
3237
  //     with '%2s' (see Logger::LogRuntime for all the formats).
3238
  //   2 (array): Arguments to the format string.
3239
  ZoneList<Expression*>* args = expr->arguments();
3240
  ASSERT_EQ(args->length(), 3);
3241
  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3242
    VisitForStackValue(args->at(1));
3243
    VisitForStackValue(args->at(2));
3244
    __ CallRuntime(Runtime::kLog, 2);
3245
  }
3246
  // Finally, we're expected to leave a value on the top of the stack.
3247
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3248
  context()->Plug(rax);
3249
}
3250

    
3251

    
3252
void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3253
  ASSERT(expr->arguments()->length() == 0);
3254

    
3255
  Label slow_allocate_heapnumber;
3256
  Label heapnumber_allocated;
3257

    
3258
  __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
3259
  __ jmp(&heapnumber_allocated);
3260

    
3261
  __ bind(&slow_allocate_heapnumber);
3262
  // Allocate a heap number.
3263
  __ CallRuntime(Runtime::kNumberAlloc, 0);
3264
  __ movq(rbx, rax);
3265

    
3266
  __ bind(&heapnumber_allocated);
3267

    
3268
  // Return a random uint32 number in rax.
3269
  // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
3270
  __ PrepareCallCFunction(1);
3271
  __ movq(arg_reg_1,
3272
          ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3273
  __ movq(arg_reg_1,
3274
          FieldOperand(arg_reg_1, GlobalObject::kNativeContextOffset));
3275
  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3276

    
3277
  // Convert 32 random bits in rax to 0.(32 random bits) in a double
3278
  // by computing:
3279
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3280
  __ movl(rcx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
3281
  __ movd(xmm1, rcx);
3282
  __ movd(xmm0, rax);
3283
  __ cvtss2sd(xmm1, xmm1);
3284
  __ xorps(xmm0, xmm1);
3285
  __ subsd(xmm0, xmm1);
3286
  __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
3287

    
3288
  __ movq(rax, rbx);
3289
  context()->Plug(rax);
3290
}
3291

    
3292

    
3293
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3294
  // Load the arguments on the stack and call the stub.
3295
  SubStringStub stub;
3296
  ZoneList<Expression*>* args = expr->arguments();
3297
  ASSERT(args->length() == 3);
3298
  VisitForStackValue(args->at(0));
3299
  VisitForStackValue(args->at(1));
3300
  VisitForStackValue(args->at(2));
3301
  __ CallStub(&stub);
3302
  context()->Plug(rax);
3303
}
3304

    
3305

    
3306
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3307
  // Load the arguments on the stack and call the stub.
3308
  RegExpExecStub stub;
3309
  ZoneList<Expression*>* args = expr->arguments();
3310
  ASSERT(args->length() == 4);
3311
  VisitForStackValue(args->at(0));
3312
  VisitForStackValue(args->at(1));
3313
  VisitForStackValue(args->at(2));
3314
  VisitForStackValue(args->at(3));
3315
  __ CallStub(&stub);
3316
  context()->Plug(rax);
3317
}
3318

    
3319

    
3320
void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3321
  ZoneList<Expression*>* args = expr->arguments();
3322
  ASSERT(args->length() == 1);
3323

    
3324
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3325

    
3326
  Label done;
3327
  // If the object is a smi return the object.
3328
  __ JumpIfSmi(rax, &done);
3329
  // If the object is not a value type, return the object.
3330
  __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
3331
  __ j(not_equal, &done);
3332
  __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
3333

    
3334
  __ bind(&done);
3335
  context()->Plug(rax);
3336
}
3337

    
3338

    
3339
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3340
  ZoneList<Expression*>* args = expr->arguments();
3341
  ASSERT(args->length() == 2);
3342
  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3343
  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3344

    
3345
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3346

    
3347
  Label runtime, done, not_date_object;
3348
  Register object = rax;
3349
  Register result = rax;
3350
  Register scratch = rcx;
3351

    
3352
  __ JumpIfSmi(object, &not_date_object);
3353
  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3354
  __ j(not_equal, &not_date_object);
3355

    
3356
  if (index->value() == 0) {
3357
    __ movq(result, FieldOperand(object, JSDate::kValueOffset));
3358
    __ jmp(&done);
3359
  } else {
3360
    if (index->value() < JSDate::kFirstUncachedField) {
3361
      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3362
      Operand stamp_operand = __ ExternalOperand(stamp);
3363
      __ movq(scratch, stamp_operand);
3364
      __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3365
      __ j(not_equal, &runtime, Label::kNear);
3366
      __ movq(result, FieldOperand(object, JSDate::kValueOffset +
3367
                                           kPointerSize * index->value()));
3368
      __ jmp(&done);
3369
    }
3370
    __ bind(&runtime);
3371
    __ PrepareCallCFunction(2);
3372
  __ movq(arg_reg_1, object);
3373
  __ movq(arg_reg_2, index, RelocInfo::NONE64);
3374
    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3375
    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3376
    __ jmp(&done);
3377
  }
3378

    
3379
  __ bind(&not_date_object);
3380
  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3381
  __ bind(&done);
3382
  context()->Plug(rax);
3383
}
3384

    
3385

    
3386
void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
3387
                                                  Register index,
3388
                                                  Register value,
3389
                                                  uint32_t encoding_mask) {
3390
  __ Check(masm()->CheckSmi(index), kNonSmiIndex);
3391
  __ Check(masm()->CheckSmi(value), kNonSmiValue);
3392

    
3393
  __ SmiCompare(index, FieldOperand(string, String::kLengthOffset));
3394
  __ Check(less, kIndexIsTooLarge);
3395

    
3396
  __ SmiCompare(index, Smi::FromInt(0));
3397
  __ Check(greater_equal, kIndexIsNegative);
3398

    
3399
  __ push(value);
3400
  __ movq(value, FieldOperand(string, HeapObject::kMapOffset));
3401
  __ movzxbq(value, FieldOperand(value, Map::kInstanceTypeOffset));
3402

    
3403
  __ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3404
  __ cmpq(value, Immediate(encoding_mask));
3405
  __ Check(equal, kUnexpectedStringType);
3406
  __ pop(value);
3407
}
3408

    
3409

    
3410
void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3411
  ZoneList<Expression*>* args = expr->arguments();
3412
  ASSERT_EQ(3, args->length());
3413

    
3414
  Register string = rax;
3415
  Register index = rbx;
3416
  Register value = rcx;
3417

    
3418
  VisitForStackValue(args->at(1));  // index
3419
  VisitForStackValue(args->at(2));  // value
3420
  __ pop(value);
3421
  __ pop(index);
3422
  VisitForAccumulatorValue(args->at(0));  // string
3423

    
3424
  if (FLAG_debug_code) {
3425
    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3426
    EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3427
  }
3428

    
3429
  __ SmiToInteger32(value, value);
3430
  __ SmiToInteger32(index, index);
3431
  __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3432
          value);
3433
  context()->Plug(string);
3434
}
3435

    
3436

    
3437
void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3438
  ZoneList<Expression*>* args = expr->arguments();
3439
  ASSERT_EQ(3, args->length());
3440

    
3441
  Register string = rax;
3442
  Register index = rbx;
3443
  Register value = rcx;
3444

    
3445
  VisitForStackValue(args->at(1));  // index
3446
  VisitForStackValue(args->at(2));  // value
3447
  __ pop(value);
3448
  __ pop(index);
3449
  VisitForAccumulatorValue(args->at(0));  // string
3450

    
3451
  if (FLAG_debug_code) {
3452
    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3453
    EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3454
  }
3455

    
3456
  __ SmiToInteger32(value, value);
3457
  __ SmiToInteger32(index, index);
3458
  __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize),
3459
          value);
3460
  context()->Plug(rax);
3461
}
3462

    
3463

    
3464
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3465
  // Load the arguments on the stack and call the runtime function.
3466
  ZoneList<Expression*>* args = expr->arguments();
3467
  ASSERT(args->length() == 2);
3468
  VisitForStackValue(args->at(0));
3469
  VisitForStackValue(args->at(1));
3470
  MathPowStub stub(MathPowStub::ON_STACK);
3471
  __ CallStub(&stub);
3472
  context()->Plug(rax);
3473
}
3474

    
3475

    
3476
void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3477
  ZoneList<Expression*>* args = expr->arguments();
3478
  ASSERT(args->length() == 2);
3479

    
3480
  VisitForStackValue(args->at(0));  // Load the object.
3481
  VisitForAccumulatorValue(args->at(1));  // Load the value.
3482
  __ pop(rbx);  // rax = value. rbx = object.
3483

    
3484
  Label done;
3485
  // If the object is a smi, return the value.
3486
  __ JumpIfSmi(rbx, &done);
3487

    
3488
  // If the object is not a value type, return the value.
3489
  __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
3490
  __ j(not_equal, &done);
3491

    
3492
  // Store the value.
3493
  __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
3494
  // Update the write barrier.  Save the value as it will be
3495
  // overwritten by the write barrier code and is needed afterward.
3496
  __ movq(rdx, rax);
3497
  __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
3498

    
3499
  __ bind(&done);
3500
  context()->Plug(rax);
3501
}
3502

    
3503

    
3504
void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3505
  ZoneList<Expression*>* args = expr->arguments();
3506
  ASSERT_EQ(args->length(), 1);
3507

    
3508
  // Load the argument into rax and call the stub.
3509
  VisitForAccumulatorValue(args->at(0));
3510

    
3511
  NumberToStringStub stub;
3512
  __ CallStub(&stub);
3513
  context()->Plug(rax);
3514
}
3515

    
3516

    
3517
void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3518
  ZoneList<Expression*>* args = expr->arguments();
3519
  ASSERT(args->length() == 1);
3520

    
3521
  VisitForAccumulatorValue(args->at(0));
3522

    
3523
  Label done;
3524
  StringCharFromCodeGenerator generator(rax, rbx);
3525
  generator.GenerateFast(masm_);
3526
  __ jmp(&done);
3527

    
3528
  NopRuntimeCallHelper call_helper;
3529
  generator.GenerateSlow(masm_, call_helper);
3530

    
3531
  __ bind(&done);
3532
  context()->Plug(rbx);
3533
}
3534

    
3535

    
3536
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3537
  ZoneList<Expression*>* args = expr->arguments();
3538
  ASSERT(args->length() == 2);
3539

    
3540
  VisitForStackValue(args->at(0));
3541
  VisitForAccumulatorValue(args->at(1));
3542

    
3543
  Register object = rbx;
3544
  Register index = rax;
3545
  Register result = rdx;
3546

    
3547
  __ pop(object);
3548

    
3549
  Label need_conversion;
3550
  Label index_out_of_range;
3551
  Label done;
3552
  StringCharCodeAtGenerator generator(object,
3553
                                      index,
3554
                                      result,
3555
                                      &need_conversion,
3556
                                      &need_conversion,
3557
                                      &index_out_of_range,
3558
                                      STRING_INDEX_IS_NUMBER);
3559
  generator.GenerateFast(masm_);
3560
  __ jmp(&done);
3561

    
3562
  __ bind(&index_out_of_range);
3563
  // When the index is out of range, the spec requires us to return
3564
  // NaN.
3565
  __ LoadRoot(result, Heap::kNanValueRootIndex);
3566
  __ jmp(&done);
3567

    
3568
  __ bind(&need_conversion);
3569
  // Move the undefined value into the result register, which will
3570
  // trigger conversion.
3571
  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3572
  __ jmp(&done);
3573

    
3574
  NopRuntimeCallHelper call_helper;
3575
  generator.GenerateSlow(masm_, call_helper);
3576

    
3577
  __ bind(&done);
3578
  context()->Plug(result);
3579
}
3580

    
3581

    
3582
void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3583
  ZoneList<Expression*>* args = expr->arguments();
3584
  ASSERT(args->length() == 2);
3585

    
3586
  VisitForStackValue(args->at(0));
3587
  VisitForAccumulatorValue(args->at(1));
3588

    
3589
  Register object = rbx;
3590
  Register index = rax;
3591
  Register scratch = rdx;
3592
  Register result = rax;
3593

    
3594
  __ pop(object);
3595

    
3596
  Label need_conversion;
3597
  Label index_out_of_range;
3598
  Label done;
3599
  StringCharAtGenerator generator(object,
3600
                                  index,
3601
                                  scratch,
3602
                                  result,
3603
                                  &need_conversion,
3604
                                  &need_conversion,
3605
                                  &index_out_of_range,
3606
                                  STRING_INDEX_IS_NUMBER);
3607
  generator.GenerateFast(masm_);
3608
  __ jmp(&done);
3609

    
3610
  __ bind(&index_out_of_range);
3611
  // When the index is out of range, the spec requires us to return
3612
  // the empty string.
3613
  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3614
  __ jmp(&done);
3615

    
3616
  __ bind(&need_conversion);
3617
  // Move smi zero into the result register, which will trigger
3618
  // conversion.
3619
  __ Move(result, Smi::FromInt(0));
3620
  __ jmp(&done);
3621

    
3622
  NopRuntimeCallHelper call_helper;
3623
  generator.GenerateSlow(masm_, call_helper);
3624

    
3625
  __ bind(&done);
3626
  context()->Plug(result);
3627
}
3628

    
3629

    
3630
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3631
  ZoneList<Expression*>* args = expr->arguments();
3632
  ASSERT_EQ(2, args->length());
3633

    
3634
  VisitForStackValue(args->at(0));
3635
  VisitForStackValue(args->at(1));
3636

    
3637
  StringAddStub stub(STRING_ADD_CHECK_BOTH);
3638
  __ CallStub(&stub);
3639
  context()->Plug(rax);
3640
}
3641

    
3642

    
3643
void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3644
  ZoneList<Expression*>* args = expr->arguments();
3645
  ASSERT_EQ(2, args->length());
3646

    
3647
  VisitForStackValue(args->at(0));
3648
  VisitForStackValue(args->at(1));
3649

    
3650
  StringCompareStub stub;
3651
  __ CallStub(&stub);
3652
  context()->Plug(rax);
3653
}
3654

    
3655

    
3656
void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3657
  // Load the argument on the stack and call the stub.
3658
  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3659
                               TranscendentalCacheStub::TAGGED);
3660
  ZoneList<Expression*>* args = expr->arguments();
3661
  ASSERT(args->length() == 1);
3662
  VisitForStackValue(args->at(0));
3663
  __ CallStub(&stub);
3664
  context()->Plug(rax);
3665
}
3666

    
3667

    
3668
void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3669
  // Load the argument on the stack and call the stub.
3670
  TranscendentalCacheStub stub(TranscendentalCache::COS,
3671
                               TranscendentalCacheStub::TAGGED);
3672
  ZoneList<Expression*>* args = expr->arguments();
3673
  ASSERT(args->length() == 1);
3674
  VisitForStackValue(args->at(0));
3675
  __ CallStub(&stub);
3676
  context()->Plug(rax);
3677
}
3678

    
3679

    
3680
void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3681
  // Load the argument on the stack and call the stub.
3682
  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3683
                               TranscendentalCacheStub::TAGGED);
3684
  ZoneList<Expression*>* args = expr->arguments();
3685
  ASSERT(args->length() == 1);
3686
  VisitForStackValue(args->at(0));
3687
  __ CallStub(&stub);
3688
  context()->Plug(rax);
3689
}
3690

    
3691

    
3692
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3693
  // Load the argument on the stack and call the stub.
3694
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3695
                               TranscendentalCacheStub::TAGGED);
3696
  ZoneList<Expression*>* args = expr->arguments();
3697
  ASSERT(args->length() == 1);
3698
  VisitForStackValue(args->at(0));
3699
  __ CallStub(&stub);
3700
  context()->Plug(rax);
3701
}
3702

    
3703

    
3704
void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3705
  // Load the argument on the stack and call the runtime function.
3706
  ZoneList<Expression*>* args = expr->arguments();
3707
  ASSERT(args->length() == 1);
3708
  VisitForStackValue(args->at(0));
3709
  __ CallRuntime(Runtime::kMath_sqrt, 1);
3710
  context()->Plug(rax);
3711
}
3712

    
3713

    
3714
void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3715
  ZoneList<Expression*>* args = expr->arguments();
3716
  ASSERT(args->length() >= 2);
3717

    
3718
  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3719
  for (int i = 0; i < arg_count + 1; i++) {
3720
    VisitForStackValue(args->at(i));
3721
  }
3722
  VisitForAccumulatorValue(args->last());  // Function.
3723

    
3724
  Label runtime, done;
3725
  // Check for non-function argument (including proxy).
3726
  __ JumpIfSmi(rax, &runtime);
3727
  __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
3728
  __ j(not_equal, &runtime);
3729

    
3730
  // InvokeFunction requires the function in rdi. Move it in there.
3731
  __ movq(rdi, result_register());
3732
  ParameterCount count(arg_count);
3733
  __ InvokeFunction(rdi, count, CALL_FUNCTION,
3734
                    NullCallWrapper(), CALL_AS_METHOD);
3735
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3736
  __ jmp(&done);
3737

    
3738
  __ bind(&runtime);
3739
  __ push(rax);
3740
  __ CallRuntime(Runtime::kCall, args->length());
3741
  __ bind(&done);
3742

    
3743
  context()->Plug(rax);
3744
}
3745

    
3746

    
3747
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3748
  RegExpConstructResultStub stub;
3749
  ZoneList<Expression*>* args = expr->arguments();
3750
  ASSERT(args->length() == 3);
3751
  VisitForStackValue(args->at(0));
3752
  VisitForStackValue(args->at(1));
3753
  VisitForStackValue(args->at(2));
3754
  __ CallStub(&stub);
3755
  context()->Plug(rax);
3756
}
3757

    
3758

    
3759
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3760
  ZoneList<Expression*>* args = expr->arguments();
3761
  ASSERT_EQ(2, args->length());
3762

    
3763
  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3764
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3765

    
3766
  Handle<FixedArray> jsfunction_result_caches(
3767
      isolate()->native_context()->jsfunction_result_caches());
3768
  if (jsfunction_result_caches->length() <= cache_id) {
3769
    __ Abort(kAttemptToUseUndefinedCache);
3770
    __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3771
    context()->Plug(rax);
3772
    return;
3773
  }
3774

    
3775
  VisitForAccumulatorValue(args->at(1));
3776

    
3777
  Register key = rax;
3778
  Register cache = rbx;
3779
  Register tmp = rcx;
3780
  __ movq(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3781
  __ movq(cache,
3782
          FieldOperand(cache, GlobalObject::kNativeContextOffset));
3783
  __ movq(cache,
3784
          ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3785
  __ movq(cache,
3786
          FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3787

    
3788
  Label done, not_found;
3789
  // tmp now holds finger offset as a smi.
3790
  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3791
  __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3792
  SmiIndex index =
3793
      __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3794
  __ cmpq(key, FieldOperand(cache,
3795
                            index.reg,
3796
                            index.scale,
3797
                            FixedArray::kHeaderSize));
3798
  __ j(not_equal, &not_found, Label::kNear);
3799
  __ movq(rax, FieldOperand(cache,
3800
                            index.reg,
3801
                            index.scale,
3802
                            FixedArray::kHeaderSize + kPointerSize));
3803
  __ jmp(&done, Label::kNear);
3804

    
3805
  __ bind(&not_found);
3806
  // Call runtime to perform the lookup.
3807
  __ push(cache);
3808
  __ push(key);
3809
  __ CallRuntime(Runtime::kGetFromCache, 2);
3810

    
3811
  __ bind(&done);
3812
  context()->Plug(rax);
3813
}
3814

    
3815

    
3816
void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3817
  ZoneList<Expression*>* args = expr->arguments();
3818
  ASSERT_EQ(2, args->length());
3819

    
3820
  Register right = rax;
3821
  Register left = rbx;
3822
  Register tmp = rcx;
3823

    
3824
  VisitForStackValue(args->at(0));
3825
  VisitForAccumulatorValue(args->at(1));
3826
  __ pop(left);
3827

    
3828
  Label done, fail, ok;
3829
  __ cmpq(left, right);
3830
  __ j(equal, &ok, Label::kNear);
3831
  // Fail if either is a non-HeapObject.
3832
  Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3833
  __ j(either_smi, &fail, Label::kNear);
3834
  __ j(zero, &fail, Label::kNear);
3835
  __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3836
  __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3837
          Immediate(JS_REGEXP_TYPE));
3838
  __ j(not_equal, &fail, Label::kNear);
3839
  __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3840
  __ j(not_equal, &fail, Label::kNear);
3841
  __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3842
  __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3843
  __ j(equal, &ok, Label::kNear);
3844
  __ bind(&fail);
3845
  __ Move(rax, isolate()->factory()->false_value());
3846
  __ jmp(&done, Label::kNear);
3847
  __ bind(&ok);
3848
  __ Move(rax, isolate()->factory()->true_value());
3849
  __ bind(&done);
3850

    
3851
  context()->Plug(rax);
3852
}
3853

    
3854

    
3855
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3856
  ZoneList<Expression*>* args = expr->arguments();
3857
  ASSERT(args->length() == 1);
3858

    
3859
  VisitForAccumulatorValue(args->at(0));
3860

    
3861
  Label materialize_true, materialize_false;
3862
  Label* if_true = NULL;
3863
  Label* if_false = NULL;
3864
  Label* fall_through = NULL;
3865
  context()->PrepareTest(&materialize_true, &materialize_false,
3866
                         &if_true, &if_false, &fall_through);
3867

    
3868
  __ testl(FieldOperand(rax, String::kHashFieldOffset),
3869
           Immediate(String::kContainsCachedArrayIndexMask));
3870
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3871
  __ j(zero, if_true);
3872
  __ jmp(if_false);
3873

    
3874
  context()->Plug(if_true, if_false);
3875
}
3876

    
3877

    
3878
void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3879
  ZoneList<Expression*>* args = expr->arguments();
3880
  ASSERT(args->length() == 1);
3881
  VisitForAccumulatorValue(args->at(0));
3882

    
3883
  __ AssertString(rax);
3884

    
3885
  __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3886
  ASSERT(String::kHashShift >= kSmiTagSize);
3887
  __ IndexFromHash(rax, rax);
3888

    
3889
  context()->Plug(rax);
3890
}
3891

    
3892

    
3893
void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3894
  Label bailout, return_result, done, one_char_separator, long_separator,
3895
      non_trivial_array, not_size_one_array, loop,
3896
      loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3897
  ZoneList<Expression*>* args = expr->arguments();
3898
  ASSERT(args->length() == 2);
3899
  // We will leave the separator on the stack until the end of the function.
3900
  VisitForStackValue(args->at(1));
3901
  // Load this to rax (= array)
3902
  VisitForAccumulatorValue(args->at(0));
3903
  // All aliases of the same register have disjoint lifetimes.
3904
  Register array = rax;
3905
  Register elements = no_reg;  // Will be rax.
3906

    
3907
  Register index = rdx;
3908

    
3909
  Register string_length = rcx;
3910

    
3911
  Register string = rsi;
3912

    
3913
  Register scratch = rbx;
3914

    
3915
  Register array_length = rdi;
3916
  Register result_pos = no_reg;  // Will be rdi.
3917

    
3918
  Operand separator_operand =    Operand(rsp, 2 * kPointerSize);
3919
  Operand result_operand =       Operand(rsp, 1 * kPointerSize);
3920
  Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3921
  // Separator operand is already pushed. Make room for the two
3922
  // other stack fields, and clear the direction flag in anticipation
3923
  // of calling CopyBytes.
3924
  __ subq(rsp, Immediate(2 * kPointerSize));
3925
  __ cld();
3926
  // Check that the array is a JSArray
3927
  __ JumpIfSmi(array, &bailout);
3928
  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3929
  __ j(not_equal, &bailout);
3930

    
3931
  // Check that the array has fast elements.
3932
  __ CheckFastElements(scratch, &bailout);
3933

    
3934
  // Array has fast elements, so its length must be a smi.
3935
  // If the array has length zero, return the empty string.
3936
  __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3937
  __ SmiCompare(array_length, Smi::FromInt(0));
3938
  __ j(not_zero, &non_trivial_array);
3939
  __ LoadRoot(rax, Heap::kempty_stringRootIndex);
3940
  __ jmp(&return_result);
3941

    
3942
  // Save the array length on the stack.
3943
  __ bind(&non_trivial_array);
3944
  __ SmiToInteger32(array_length, array_length);
3945
  __ movl(array_length_operand, array_length);
3946

    
3947
  // Save the FixedArray containing array's elements.
3948
  // End of array's live range.
3949
  elements = array;
3950
  __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3951
  array = no_reg;
3952

    
3953

    
3954
  // Check that all array elements are sequential ASCII strings, and
3955
  // accumulate the sum of their lengths, as a smi-encoded value.
3956
  __ Set(index, 0);
3957
  __ Set(string_length, 0);
3958
  // Loop condition: while (index < array_length).
3959
  // Live loop registers: index(int32), array_length(int32), string(String*),
3960
  //                      scratch, string_length(int32), elements(FixedArray*).
3961
  if (generate_debug_code_) {
3962
    __ cmpq(index, array_length);
3963
    __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3964
  }
3965
  __ bind(&loop);
3966
  __ movq(string, FieldOperand(elements,
3967
                               index,
3968
                               times_pointer_size,
3969
                               FixedArray::kHeaderSize));
3970
  __ JumpIfSmi(string, &bailout);
3971
  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3972
  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3973
  __ andb(scratch, Immediate(
3974
      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3975
  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
3976
  __ j(not_equal, &bailout);
3977
  __ AddSmiField(string_length,
3978
                 FieldOperand(string, SeqOneByteString::kLengthOffset));
3979
  __ j(overflow, &bailout);
3980
  __ incl(index);
3981
  __ cmpl(index, array_length);
3982
  __ j(less, &loop);
3983

    
3984
  // Live registers:
3985
  // string_length: Sum of string lengths.
3986
  // elements: FixedArray of strings.
3987
  // index: Array length.
3988
  // array_length: Array length.
3989

    
3990
  // If array_length is 1, return elements[0], a string.
3991
  __ cmpl(array_length, Immediate(1));
3992
  __ j(not_equal, &not_size_one_array);
3993
  __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3994
  __ jmp(&return_result);
3995

    
3996
  __ bind(&not_size_one_array);
3997

    
3998
  // End of array_length live range.
3999
  result_pos = array_length;
4000
  array_length = no_reg;
4001

    
4002
  // Live registers:
4003
  // string_length: Sum of string lengths.
4004
  // elements: FixedArray of strings.
4005
  // index: Array length.
4006

    
4007
  // Check that the separator is a sequential ASCII string.
4008
  __ movq(string, separator_operand);
4009
  __ JumpIfSmi(string, &bailout);
4010
  __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
4011
  __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4012
  __ andb(scratch, Immediate(
4013
      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4014
  __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag));
4015
  __ j(not_equal, &bailout);
4016

    
4017
  // Live registers:
4018
  // string_length: Sum of string lengths.
4019
  // elements: FixedArray of strings.
4020
  // index: Array length.
4021
  // string: Separator string.
4022

    
4023
  // Add (separator length times (array_length - 1)) to string_length.
4024
  __ SmiToInteger32(scratch,
4025
                    FieldOperand(string, SeqOneByteString::kLengthOffset));
4026
  __ decl(index);
4027
  __ imull(scratch, index);
4028
  __ j(overflow, &bailout);
4029
  __ addl(string_length, scratch);
4030
  __ j(overflow, &bailout);
4031

    
4032
  // Live registers and stack values:
4033
  //   string_length: Total length of result string.
4034
  //   elements: FixedArray of strings.
4035
  __ AllocateAsciiString(result_pos, string_length, scratch,
4036
                         index, string, &bailout);
4037
  __ movq(result_operand, result_pos);
4038
  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4039

    
4040
  __ movq(string, separator_operand);
4041
  __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset),
4042
                Smi::FromInt(1));
4043
  __ j(equal, &one_char_separator);
4044
  __ j(greater, &long_separator);
4045

    
4046

    
4047
  // Empty separator case:
4048
  __ Set(index, 0);
4049
  __ movl(scratch, array_length_operand);
4050
  __ jmp(&loop_1_condition);
4051
  // Loop condition: while (index < array_length).
4052
  __ bind(&loop_1);
4053
  // Each iteration of the loop concatenates one string to the result.
4054
  // Live values in registers:
4055
  //   index: which element of the elements array we are adding to the result.
4056
  //   result_pos: the position to which we are currently copying characters.
4057
  //   elements: the FixedArray of strings we are joining.
4058
  //   scratch: array length.
4059

    
4060
  // Get string = array[index].
4061
  __ movq(string, FieldOperand(elements, index,
4062
                               times_pointer_size,
4063
                               FixedArray::kHeaderSize));
4064
  __ SmiToInteger32(string_length,
4065
                    FieldOperand(string, String::kLengthOffset));
4066
  __ lea(string,
4067
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4068
  __ CopyBytes(result_pos, string, string_length);
4069
  __ incl(index);
4070
  __ bind(&loop_1_condition);
4071
  __ cmpl(index, scratch);
4072
  __ j(less, &loop_1);  // Loop while (index < array_length).
4073
  __ jmp(&done);
4074

    
4075
  // Generic bailout code used from several places.
4076
  __ bind(&bailout);
4077
  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
4078
  __ jmp(&return_result);
4079

    
4080

    
4081
  // One-character separator case
4082
  __ bind(&one_char_separator);
4083
  // Get the separator ASCII character value.
4084
  // Register "string" holds the separator.
4085
  __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4086
  __ Set(index, 0);
4087
  // Jump into the loop after the code that copies the separator, so the first
4088
  // element is not preceded by a separator
4089
  __ jmp(&loop_2_entry);
4090
  // Loop condition: while (index < length).
4091
  __ bind(&loop_2);
4092
  // Each iteration of the loop concatenates one string to the result.
4093
  // Live values in registers:
4094
  //   elements: The FixedArray of strings we are joining.
4095
  //   index: which element of the elements array we are adding to the result.
4096
  //   result_pos: the position to which we are currently copying characters.
4097
  //   scratch: Separator character.
4098

    
4099
  // Copy the separator character to the result.
4100
  __ movb(Operand(result_pos, 0), scratch);
4101
  __ incq(result_pos);
4102

    
4103
  __ bind(&loop_2_entry);
4104
  // Get string = array[index].
4105
  __ movq(string, FieldOperand(elements, index,
4106
                               times_pointer_size,
4107
                               FixedArray::kHeaderSize));
4108
  __ SmiToInteger32(string_length,
4109
                    FieldOperand(string, String::kLengthOffset));
4110
  __ lea(string,
4111
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4112
  __ CopyBytes(result_pos, string, string_length);
4113
  __ incl(index);
4114
  __ cmpl(index, array_length_operand);
4115
  __ j(less, &loop_2);  // End while (index < length).
4116
  __ jmp(&done);
4117

    
4118

    
4119
  // Long separator case (separator is more than one character).
4120
  __ bind(&long_separator);
4121

    
4122
  // Make elements point to end of elements array, and index
4123
  // count from -array_length to zero, so we don't need to maintain
4124
  // a loop limit.
4125
  __ movl(index, array_length_operand);
4126
  __ lea(elements, FieldOperand(elements, index, times_pointer_size,
4127
                                FixedArray::kHeaderSize));
4128
  __ neg(index);
4129

    
4130
  // Replace separator string with pointer to its first character, and
4131
  // make scratch be its length.
4132
  __ movq(string, separator_operand);
4133
  __ SmiToInteger32(scratch,
4134
                    FieldOperand(string, String::kLengthOffset));
4135
  __ lea(string,
4136
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4137
  __ movq(separator_operand, string);
4138

    
4139
  // Jump into the loop after the code that copies the separator, so the first
4140
  // element is not preceded by a separator
4141
  __ jmp(&loop_3_entry);
4142
  // Loop condition: while (index < length).
4143
  __ bind(&loop_3);
4144
  // Each iteration of the loop concatenates one string to the result.
4145
  // Live values in registers:
4146
  //   index: which element of the elements array we are adding to the result.
4147
  //   result_pos: the position to which we are currently copying characters.
4148
  //   scratch: Separator length.
4149
  //   separator_operand (rsp[0x10]): Address of first char of separator.
4150

    
4151
  // Copy the separator to the result.
4152
  __ movq(string, separator_operand);
4153
  __ movl(string_length, scratch);
4154
  __ CopyBytes(result_pos, string, string_length, 2);
4155

    
4156
  __ bind(&loop_3_entry);
4157
  // Get string = array[index].
4158
  __ movq(string, Operand(elements, index, times_pointer_size, 0));
4159
  __ SmiToInteger32(string_length,
4160
                    FieldOperand(string, String::kLengthOffset));
4161
  __ lea(string,
4162
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4163
  __ CopyBytes(result_pos, string, string_length);
4164
  __ incq(index);
4165
  __ j(not_equal, &loop_3);  // Loop while (index < 0).
4166

    
4167
  __ bind(&done);
4168
  __ movq(rax, result_operand);
4169

    
4170
  __ bind(&return_result);
4171
  // Drop temp values from the stack, and restore context register.
4172
  __ addq(rsp, Immediate(3 * kPointerSize));
4173
  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4174
  context()->Plug(rax);
4175
}
4176

    
4177

    
4178
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4179
  Handle<String> name = expr->name();
4180
  if (name->length() > 0 && name->Get(0) == '_') {
4181
    Comment cmnt(masm_, "[ InlineRuntimeCall");
4182
    EmitInlineRuntimeCall(expr);
4183
    return;
4184
  }
4185

    
4186
  Comment cmnt(masm_, "[ CallRuntime");
4187
  ZoneList<Expression*>* args = expr->arguments();
4188

    
4189
  if (expr->is_jsruntime()) {
4190
    // Prepare for calling JS runtime function.
4191
    __ movq(rax, GlobalObjectOperand());
4192
    __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
4193
  }
4194

    
4195
  // Push the arguments ("left-to-right").
4196
  int arg_count = args->length();
4197
  for (int i = 0; i < arg_count; i++) {
4198
    VisitForStackValue(args->at(i));
4199
  }
4200

    
4201
  if (expr->is_jsruntime()) {
4202
    // Call the JS runtime function using a call IC.
4203
    __ Move(rcx, expr->name());
4204
    RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4205
    Handle<Code> ic =
4206
        isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
4207
    CallIC(ic, mode, expr->CallRuntimeFeedbackId());
4208
    // Restore context register.
4209
    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4210
  } else {
4211
    __ CallRuntime(expr->function(), arg_count);
4212
  }
4213
  context()->Plug(rax);
4214
}
4215

    
4216

    
4217
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4218
  switch (expr->op()) {
4219
    case Token::DELETE: {
4220
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4221
      Property* property = expr->expression()->AsProperty();
4222
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4223

    
4224
      if (property != NULL) {
4225
        VisitForStackValue(property->obj());
4226
        VisitForStackValue(property->key());
4227
        StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4228
            ? kNonStrictMode : kStrictMode;
4229
        __ Push(Smi::FromInt(strict_mode_flag));
4230
        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4231
        context()->Plug(rax);
4232
      } else if (proxy != NULL) {
4233
        Variable* var = proxy->var();
4234
        // Delete of an unqualified identifier is disallowed in strict mode
4235
        // but "delete this" is allowed.
4236
        ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4237
        if (var->IsUnallocated()) {
4238
          __ push(GlobalObjectOperand());
4239
          __ Push(var->name());
4240
          __ Push(Smi::FromInt(kNonStrictMode));
4241
          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4242
          context()->Plug(rax);
4243
        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4244
          // Result of deleting non-global variables is false.  'this' is
4245
          // not really a variable, though we implement it as one.  The
4246
          // subexpression does not have side effects.
4247
          context()->Plug(var->is_this());
4248
        } else {
4249
          // Non-global variable.  Call the runtime to try to delete from the
4250
          // context where the variable was introduced.
4251
          __ push(context_register());
4252
          __ Push(var->name());
4253
          __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4254
          context()->Plug(rax);
4255
        }
4256
      } else {
4257
        // Result of deleting non-property, non-variable reference is true.
4258
        // The subexpression may have side effects.
4259
        VisitForEffect(expr->expression());
4260
        context()->Plug(true);
4261
      }
4262
      break;
4263
    }
4264

    
4265
    case Token::VOID: {
4266
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4267
      VisitForEffect(expr->expression());
4268
      context()->Plug(Heap::kUndefinedValueRootIndex);
4269
      break;
4270
    }
4271

    
4272
    case Token::NOT: {
4273
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4274
      if (context()->IsEffect()) {
4275
        // Unary NOT has no side effects so it's only necessary to visit the
4276
        // subexpression.  Match the optimizing compiler by not branching.
4277
        VisitForEffect(expr->expression());
4278
      } else if (context()->IsTest()) {
4279
        const TestContext* test = TestContext::cast(context());
4280
        // The labels are swapped for the recursive call.
4281
        VisitForControl(expr->expression(),
4282
                        test->false_label(),
4283
                        test->true_label(),
4284
                        test->fall_through());
4285
        context()->Plug(test->true_label(), test->false_label());
4286
      } else {
4287
        // We handle value contexts explicitly rather than simply visiting
4288
        // for control and plugging the control flow into the context,
4289
        // because we need to prepare a pair of extra administrative AST ids
4290
        // for the optimizing compiler.
4291
        ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4292
        Label materialize_true, materialize_false, done;
4293
        VisitForControl(expr->expression(),
4294
                        &materialize_false,
4295
                        &materialize_true,
4296
                        &materialize_true);
4297
        __ bind(&materialize_true);
4298
        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4299
        if (context()->IsAccumulatorValue()) {
4300
          __ LoadRoot(rax, Heap::kTrueValueRootIndex);
4301
        } else {
4302
          __ PushRoot(Heap::kTrueValueRootIndex);
4303
        }
4304
        __ jmp(&done, Label::kNear);
4305
        __ bind(&materialize_false);
4306
        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4307
        if (context()->IsAccumulatorValue()) {
4308
          __ LoadRoot(rax, Heap::kFalseValueRootIndex);
4309
        } else {
4310
          __ PushRoot(Heap::kFalseValueRootIndex);
4311
        }
4312
        __ bind(&done);
4313
      }
4314
      break;
4315
    }
4316

    
4317
    case Token::TYPEOF: {
4318
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4319
      { StackValueContext context(this);
4320
        VisitForTypeofValue(expr->expression());
4321
      }
4322
      __ CallRuntime(Runtime::kTypeof, 1);
4323
      context()->Plug(rax);
4324
      break;
4325
    }
4326

    
4327
    default:
4328
      UNREACHABLE();
4329
  }
4330
}
4331

    
4332

    
4333
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4334
  Comment cmnt(masm_, "[ CountOperation");
4335
  SetSourcePosition(expr->position());
4336

    
4337
  // Invalid left-hand-sides are rewritten to have a 'throw
4338
  // ReferenceError' as the left-hand side.
4339
  if (!expr->expression()->IsValidLeftHandSide()) {
4340
    VisitForEffect(expr->expression());
4341
    return;
4342
  }
4343

    
4344
  // Expression can only be a property, a global or a (parameter or local)
4345
  // slot.
4346
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4347
  LhsKind assign_type = VARIABLE;
4348
  Property* prop = expr->expression()->AsProperty();
4349
  // In case of a property we use the uninitialized expression context
4350
  // of the key to detect a named property.
4351
  if (prop != NULL) {
4352
    assign_type =
4353
        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4354
  }
4355

    
4356
  // Evaluate expression and get value.
4357
  if (assign_type == VARIABLE) {
4358
    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4359
    AccumulatorValueContext context(this);
4360
    EmitVariableLoad(expr->expression()->AsVariableProxy());
4361
  } else {
4362
    // Reserve space for result of postfix operation.
4363
    if (expr->is_postfix() && !context()->IsEffect()) {
4364
      __ Push(Smi::FromInt(0));
4365
    }
4366
    if (assign_type == NAMED_PROPERTY) {
4367
      VisitForAccumulatorValue(prop->obj());
4368
      __ push(rax);  // Copy of receiver, needed for later store.
4369
      EmitNamedPropertyLoad(prop);
4370
    } else {
4371
      VisitForStackValue(prop->obj());
4372
      VisitForAccumulatorValue(prop->key());
4373
      __ movq(rdx, Operand(rsp, 0));  // Leave receiver on stack
4374
      __ push(rax);  // Copy of key, needed for later store.
4375
      EmitKeyedPropertyLoad(prop);
4376
    }
4377
  }
4378

    
4379
  // We need a second deoptimization point after loading the value
4380
  // in case evaluating the property load my have a side effect.
4381
  if (assign_type == VARIABLE) {
4382
    PrepareForBailout(expr->expression(), TOS_REG);
4383
  } else {
4384
    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4385
  }
4386

    
4387
  // Call ToNumber only if operand is not a smi.
4388
  Label no_conversion;
4389
  if (ShouldInlineSmiCase(expr->op())) {
4390
    __ JumpIfSmi(rax, &no_conversion, Label::kNear);
4391
  }
4392
  ToNumberStub convert_stub;
4393
  __ CallStub(&convert_stub);
4394
  __ bind(&no_conversion);
4395

    
4396
  // Save result for postfix expressions.
4397
  if (expr->is_postfix()) {
4398
    if (!context()->IsEffect()) {
4399
      // Save the result on the stack. If we have a named or keyed property
4400
      // we store the result under the receiver that is currently on top
4401
      // of the stack.
4402
      switch (assign_type) {
4403
        case VARIABLE:
4404
          __ push(rax);
4405
          break;
4406
        case NAMED_PROPERTY:
4407
          __ movq(Operand(rsp, kPointerSize), rax);
4408
          break;
4409
        case KEYED_PROPERTY:
4410
          __ movq(Operand(rsp, 2 * kPointerSize), rax);
4411
          break;
4412
      }
4413
    }
4414
  }
4415

    
4416
  // Inline smi case if we are in a loop.
4417
  Label done, stub_call;
4418
  JumpPatchSite patch_site(masm_);
4419

    
4420
  if (ShouldInlineSmiCase(expr->op())) {
4421
    if (expr->op() == Token::INC) {
4422
      __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4423
    } else {
4424
      __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4425
    }
4426
    __ j(overflow, &stub_call, Label::kNear);
4427
    // We could eliminate this smi check if we split the code at
4428
    // the first smi check before calling ToNumber.
4429
    patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
4430

    
4431
    __ bind(&stub_call);
4432
    // Call stub. Undo operation first.
4433
    if (expr->op() == Token::INC) {
4434
      __ SmiSubConstant(rax, rax, Smi::FromInt(1));
4435
    } else {
4436
      __ SmiAddConstant(rax, rax, Smi::FromInt(1));
4437
    }
4438
  }
4439

    
4440
  // Record position before stub call.
4441
  SetSourcePosition(expr->position());
4442

    
4443
  // Call stub for +1/-1.
4444
  __ movq(rdx, rax);
4445
  __ Move(rax, Smi::FromInt(1));
4446
  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4447
  CallIC(stub.GetCode(isolate()),
4448
         RelocInfo::CODE_TARGET,
4449
         expr->CountBinOpFeedbackId());
4450
  patch_site.EmitPatchInfo();
4451
  __ bind(&done);
4452

    
4453
  // Store the value returned in rax.
4454
  switch (assign_type) {
4455
    case VARIABLE:
4456
      if (expr->is_postfix()) {
4457
        // Perform the assignment as if via '='.
4458
        { EffectContext context(this);
4459
          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4460
                                 Token::ASSIGN);
4461
          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4462
          context.Plug(rax);
4463
        }
4464
        // For all contexts except kEffect: We have the result on
4465
        // top of the stack.
4466
        if (!context()->IsEffect()) {
4467
          context()->PlugTOS();
4468
        }
4469
      } else {
4470
        // Perform the assignment as if via '='.
4471
        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4472
                               Token::ASSIGN);
4473
        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4474
        context()->Plug(rax);
4475
      }
4476
      break;
4477
    case NAMED_PROPERTY: {
4478
      __ Move(rcx, prop->key()->AsLiteral()->value());
4479
      __ pop(rdx);
4480
      Handle<Code> ic = is_classic_mode()
4481
          ? isolate()->builtins()->StoreIC_Initialize()
4482
          : isolate()->builtins()->StoreIC_Initialize_Strict();
4483
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4484
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4485
      if (expr->is_postfix()) {
4486
        if (!context()->IsEffect()) {
4487
          context()->PlugTOS();
4488
        }
4489
      } else {
4490
        context()->Plug(rax);
4491
      }
4492
      break;
4493
    }
4494
    case KEYED_PROPERTY: {
4495
      __ pop(rcx);
4496
      __ pop(rdx);
4497
      Handle<Code> ic = is_classic_mode()
4498
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
4499
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4500
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4501
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4502
      if (expr->is_postfix()) {
4503
        if (!context()->IsEffect()) {
4504
          context()->PlugTOS();
4505
        }
4506
      } else {
4507
        context()->Plug(rax);
4508
      }
4509
      break;
4510
    }
4511
  }
4512
}
4513

    
4514

    
4515
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4516
  VariableProxy* proxy = expr->AsVariableProxy();
4517
  ASSERT(!context()->IsEffect());
4518
  ASSERT(!context()->IsTest());
4519

    
4520
  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4521
    Comment cmnt(masm_, "Global variable");
4522
    __ Move(rcx, proxy->name());
4523
    __ movq(rax, GlobalObjectOperand());
4524
    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4525
    // Use a regular load, not a contextual load, to avoid a reference
4526
    // error.
4527
    CallIC(ic);
4528
    PrepareForBailout(expr, TOS_REG);
4529
    context()->Plug(rax);
4530
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4531
    Label done, slow;
4532

    
4533
    // Generate code for loading from variables potentially shadowed
4534
    // by eval-introduced variables.
4535
    EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4536

    
4537
    __ bind(&slow);
4538
    __ push(rsi);
4539
    __ Push(proxy->name());
4540
    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4541
    PrepareForBailout(expr, TOS_REG);
4542
    __ bind(&done);
4543

    
4544
    context()->Plug(rax);
4545
  } else {
4546
    // This expression cannot throw a reference error at the top level.
4547
    VisitInDuplicateContext(expr);
4548
  }
4549
}
4550

    
4551

    
4552
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4553
                                                 Expression* sub_expr,
4554
                                                 Handle<String> check) {
4555
  Label materialize_true, materialize_false;
4556
  Label* if_true = NULL;
4557
  Label* if_false = NULL;
4558
  Label* fall_through = NULL;
4559
  context()->PrepareTest(&materialize_true, &materialize_false,
4560
                         &if_true, &if_false, &fall_through);
4561

    
4562
  { AccumulatorValueContext context(this);
4563
    VisitForTypeofValue(sub_expr);
4564
  }
4565
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4566

    
4567
  if (check->Equals(isolate()->heap()->number_string())) {
4568
    __ JumpIfSmi(rax, if_true);
4569
    __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
4570
    __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4571
    Split(equal, if_true, if_false, fall_through);
4572
  } else if (check->Equals(isolate()->heap()->string_string())) {
4573
    __ JumpIfSmi(rax, if_false);
4574
    // Check for undetectable objects => false.
4575
    __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4576
    __ j(above_equal, if_false);
4577
    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4578
             Immediate(1 << Map::kIsUndetectable));
4579
    Split(zero, if_true, if_false, fall_through);
4580
  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4581
    __ JumpIfSmi(rax, if_false);
4582
    __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
4583
    Split(equal, if_true, if_false, fall_through);
4584
  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4585
    __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4586
    __ j(equal, if_true);
4587
    __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4588
    Split(equal, if_true, if_false, fall_through);
4589
  } else if (FLAG_harmony_typeof &&
4590
             check->Equals(isolate()->heap()->null_string())) {
4591
    __ CompareRoot(rax, Heap::kNullValueRootIndex);
4592
    Split(equal, if_true, if_false, fall_through);
4593
  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4594
    __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4595
    __ j(equal, if_true);
4596
    __ JumpIfSmi(rax, if_false);
4597
    // Check for undetectable objects => true.
4598
    __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4599
    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4600
             Immediate(1 << Map::kIsUndetectable));
4601
    Split(not_zero, if_true, if_false, fall_through);
4602
  } else if (check->Equals(isolate()->heap()->function_string())) {
4603
    __ JumpIfSmi(rax, if_false);
4604
    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4605
    __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4606
    __ j(equal, if_true);
4607
    __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4608
    Split(equal, if_true, if_false, fall_through);
4609
  } else if (check->Equals(isolate()->heap()->object_string())) {
4610
    __ JumpIfSmi(rax, if_false);
4611
    if (!FLAG_harmony_typeof) {
4612
      __ CompareRoot(rax, Heap::kNullValueRootIndex);
4613
      __ j(equal, if_true);
4614
    }
4615
    __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4616
    __ j(below, if_false);
4617
    __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4618
    __ j(above, if_false);
4619
    // Check for undetectable objects => false.
4620
    __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4621
             Immediate(1 << Map::kIsUndetectable));
4622
    Split(zero, if_true, if_false, fall_through);
4623
  } else {
4624
    if (if_false != fall_through) __ jmp(if_false);
4625
  }
4626
  context()->Plug(if_true, if_false);
4627
}
4628

    
4629

    
4630
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4631
  Comment cmnt(masm_, "[ CompareOperation");
4632
  SetSourcePosition(expr->position());
4633

    
4634
  // First we try a fast inlined version of the compare when one of
4635
  // the operands is a literal.
4636
  if (TryLiteralCompare(expr)) return;
4637

    
4638
  // Always perform the comparison for its control flow.  Pack the result
4639
  // into the expression's context after the comparison is performed.
4640
  Label materialize_true, materialize_false;
4641
  Label* if_true = NULL;
4642
  Label* if_false = NULL;
4643
  Label* fall_through = NULL;
4644
  context()->PrepareTest(&materialize_true, &materialize_false,
4645
                         &if_true, &if_false, &fall_through);
4646

    
4647
  Token::Value op = expr->op();
4648
  VisitForStackValue(expr->left());
4649
  switch (op) {
4650
    case Token::IN:
4651
      VisitForStackValue(expr->right());
4652
      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4653
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4654
      __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4655
      Split(equal, if_true, if_false, fall_through);
4656
      break;
4657

    
4658
    case Token::INSTANCEOF: {
4659
      VisitForStackValue(expr->right());
4660
      InstanceofStub stub(InstanceofStub::kNoFlags);
4661
      __ CallStub(&stub);
4662
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4663
      __ testq(rax, rax);
4664
       // The stub returns 0 for true.
4665
      Split(zero, if_true, if_false, fall_through);
4666
      break;
4667
    }
4668

    
4669
    default: {
4670
      VisitForAccumulatorValue(expr->right());
4671
      Condition cc = CompareIC::ComputeCondition(op);
4672
      __ pop(rdx);
4673

    
4674
      bool inline_smi_code = ShouldInlineSmiCase(op);
4675
      JumpPatchSite patch_site(masm_);
4676
      if (inline_smi_code) {
4677
        Label slow_case;
4678
        __ movq(rcx, rdx);
4679
        __ or_(rcx, rax);
4680
        patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4681
        __ cmpq(rdx, rax);
4682
        Split(cc, if_true, if_false, NULL);
4683
        __ bind(&slow_case);
4684
      }
4685

    
4686
      // Record position and call the compare IC.
4687
      SetSourcePosition(expr->position());
4688
      Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4689
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4690
      patch_site.EmitPatchInfo();
4691

    
4692
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4693
      __ testq(rax, rax);
4694
      Split(cc, if_true, if_false, fall_through);
4695
    }
4696
  }
4697

    
4698
  // Convert the result of the comparison into one expected for this
4699
  // expression's context.
4700
  context()->Plug(if_true, if_false);
4701
}
4702

    
4703

    
4704
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4705
                                              Expression* sub_expr,
4706
                                              NilValue nil) {
4707
  Label materialize_true, materialize_false;
4708
  Label* if_true = NULL;
4709
  Label* if_false = NULL;
4710
  Label* fall_through = NULL;
4711
  context()->PrepareTest(&materialize_true, &materialize_false,
4712
                         &if_true, &if_false, &fall_through);
4713

    
4714
  VisitForAccumulatorValue(sub_expr);
4715
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4716
  if (expr->op() == Token::EQ_STRICT) {
4717
    Heap::RootListIndex nil_value = nil == kNullValue ?
4718
        Heap::kNullValueRootIndex :
4719
        Heap::kUndefinedValueRootIndex;
4720
    __ CompareRoot(rax, nil_value);
4721
    Split(equal, if_true, if_false, fall_through);
4722
  } else {
4723
    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4724
    CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4725
    __ testq(rax, rax);
4726
    Split(not_zero, if_true, if_false, fall_through);
4727
  }
4728
  context()->Plug(if_true, if_false);
4729
}
4730

    
4731

    
4732
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4733
  __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4734
  context()->Plug(rax);
4735
}
4736

    
4737

    
4738
Register FullCodeGenerator::result_register() {
4739
  return rax;
4740
}
4741

    
4742

    
4743
Register FullCodeGenerator::context_register() {
4744
  return rsi;
4745
}
4746

    
4747

    
4748
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4749
  ASSERT(IsAligned(frame_offset, kPointerSize));
4750
  __ movq(Operand(rbp, frame_offset), value);
4751
}
4752

    
4753

    
4754
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4755
  __ movq(dst, ContextOperand(rsi, context_index));
4756
}
4757

    
4758

    
4759
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4760
  Scope* declaration_scope = scope()->DeclarationScope();
4761
  if (declaration_scope->is_global_scope() ||
4762
      declaration_scope->is_module_scope()) {
4763
    // Contexts nested in the native context have a canonical empty function
4764
    // as their closure, not the anonymous closure containing the global
4765
    // code.  Pass a smi sentinel and let the runtime look up the empty
4766
    // function.
4767
    __ Push(Smi::FromInt(0));
4768
  } else if (declaration_scope->is_eval_scope()) {
4769
    // Contexts created by a call to eval have the same closure as the
4770
    // context calling eval, not the anonymous closure containing the eval
4771
    // code.  Fetch it from the context.
4772
    __ push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4773
  } else {
4774
    ASSERT(declaration_scope->is_function_scope());
4775
    __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4776
  }
4777
}
4778

    
4779

    
4780
// ----------------------------------------------------------------------------
4781
// Non-local control flow support.
4782

    
4783

    
4784
void FullCodeGenerator::EnterFinallyBlock() {
4785
  ASSERT(!result_register().is(rdx));
4786
  ASSERT(!result_register().is(rcx));
4787
  // Cook return address on top of stack (smi encoded Code* delta)
4788
  __ PopReturnAddressTo(rdx);
4789
  __ Move(rcx, masm_->CodeObject());
4790
  __ subq(rdx, rcx);
4791
  __ Integer32ToSmi(rdx, rdx);
4792
  __ push(rdx);
4793

    
4794
  // Store result register while executing finally block.
4795
  __ push(result_register());
4796

    
4797
  // Store pending message while executing finally block.
4798
  ExternalReference pending_message_obj =
4799
      ExternalReference::address_of_pending_message_obj(isolate());
4800
  __ Load(rdx, pending_message_obj);
4801
  __ push(rdx);
4802

    
4803
  ExternalReference has_pending_message =
4804
      ExternalReference::address_of_has_pending_message(isolate());
4805
  __ Load(rdx, has_pending_message);
4806
  __ Integer32ToSmi(rdx, rdx);
4807
  __ push(rdx);
4808

    
4809
  ExternalReference pending_message_script =
4810
      ExternalReference::address_of_pending_message_script(isolate());
4811
  __ Load(rdx, pending_message_script);
4812
  __ push(rdx);
4813
}
4814

    
4815

    
4816
void FullCodeGenerator::ExitFinallyBlock() {
4817
  ASSERT(!result_register().is(rdx));
4818
  ASSERT(!result_register().is(rcx));
4819
  // Restore pending message from stack.
4820
  __ pop(rdx);
4821
  ExternalReference pending_message_script =
4822
      ExternalReference::address_of_pending_message_script(isolate());
4823
  __ Store(pending_message_script, rdx);
4824

    
4825
  __ pop(rdx);
4826
  __ SmiToInteger32(rdx, rdx);
4827
  ExternalReference has_pending_message =
4828
      ExternalReference::address_of_has_pending_message(isolate());
4829
  __ Store(has_pending_message, rdx);
4830

    
4831
  __ pop(rdx);
4832
  ExternalReference pending_message_obj =
4833
      ExternalReference::address_of_pending_message_obj(isolate());
4834
  __ Store(pending_message_obj, rdx);
4835

    
4836
  // Restore result register from stack.
4837
  __ pop(result_register());
4838

    
4839
  // Uncook return address.
4840
  __ pop(rdx);
4841
  __ SmiToInteger32(rdx, rdx);
4842
  __ Move(rcx, masm_->CodeObject());
4843
  __ addq(rdx, rcx);
4844
  __ jmp(rdx);
4845
}
4846

    
4847

    
4848
#undef __
4849

    
4850
#define __ ACCESS_MASM(masm())
4851

    
4852
FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4853
    int* stack_depth,
4854
    int* context_length) {
4855
  // The macros used here must preserve the result register.
4856

    
4857
  // Because the handler block contains the context of the finally
4858
  // code, we can restore it directly from there for the finally code
4859
  // rather than iteratively unwinding contexts via their previous
4860
  // links.
4861
  __ Drop(*stack_depth);  // Down to the handler block.
4862
  if (*context_length > 0) {
4863
    // Restore the context to its dedicated register and the stack.
4864
    __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4865
    __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4866
  }
4867
  __ PopTryHandler();
4868
  __ call(finally_entry_);
4869

    
4870
  *stack_depth = 0;
4871
  *context_length = 0;
4872
  return previous_;
4873
}
4874

    
4875

    
4876
#undef __
4877

    
4878

    
4879
static const byte kJnsInstruction = 0x79;
4880
static const byte kJnsOffset = 0x1d;
4881
static const byte kCallInstruction = 0xe8;
4882
static const byte kNopByteOne = 0x66;
4883
static const byte kNopByteTwo = 0x90;
4884

    
4885

    
4886
void BackEdgeTable::PatchAt(Code* unoptimized_code,
4887
                            Address pc,
4888
                            BackEdgeState target_state,
4889
                            Code* replacement_code) {
4890
  Address call_target_address = pc - kIntSize;
4891
  Address jns_instr_address = call_target_address - 3;
4892
  Address jns_offset_address = call_target_address - 2;
4893

    
4894
  switch (target_state) {
4895
    case INTERRUPT:
4896
      //     sub <profiling_counter>, <delta>  ;; Not changed
4897
      //     jns ok
4898
      //     call <interrupt stub>
4899
      //   ok:
4900
      *jns_instr_address = kJnsInstruction;
4901
      *jns_offset_address = kJnsOffset;
4902
      break;
4903
    case ON_STACK_REPLACEMENT:
4904
    case OSR_AFTER_STACK_CHECK:
4905
      //     sub <profiling_counter>, <delta>  ;; Not changed
4906
      //     nop
4907
      //     nop
4908
      //     call <on-stack replacment>
4909
      //   ok:
4910
      *jns_instr_address = kNopByteOne;
4911
      *jns_offset_address = kNopByteTwo;
4912
      break;
4913
  }
4914

    
4915
  Assembler::set_target_address_at(call_target_address,
4916
                                   replacement_code->entry());
4917
  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4918
      unoptimized_code, call_target_address, replacement_code);
4919
}
4920

    
4921

    
4922
BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4923
    Isolate* isolate,
4924
    Code* unoptimized_code,
4925
    Address pc) {
4926
  Address call_target_address = pc - kIntSize;
4927
  Address jns_instr_address = call_target_address - 3;
4928
  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4929

    
4930
  if (*jns_instr_address == kJnsInstruction) {
4931
    ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4932
    ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4933
              Assembler::target_address_at(call_target_address));
4934
    return INTERRUPT;
4935
  }
4936

    
4937
  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4938
  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4939

    
4940
  if (Assembler::target_address_at(call_target_address) ==
4941
      isolate->builtins()->OnStackReplacement()->entry()) {
4942
    return ON_STACK_REPLACEMENT;
4943
  }
4944

    
4945
  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4946
            Assembler::target_address_at(call_target_address));
4947
  return OSR_AFTER_STACK_CHECK;
4948
}
4949

    
4950

    
4951
} }  // namespace v8::internal
4952

    
4953
#endif  // V8_TARGET_ARCH_X64