The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / full-codegen-ia32.cc @ f230a1cf

History | View | Annotate | Download (168 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_IA32
31

    
32
#include "code-stubs.h"
33
#include "codegen.h"
34
#include "compiler.h"
35
#include "debug.h"
36
#include "full-codegen.h"
37
#include "isolate-inl.h"
38
#include "parser.h"
39
#include "scopes.h"
40
#include "stub-cache.h"
41

    
42
namespace v8 {
43
namespace internal {
44

    
45
#define __ ACCESS_MASM(masm_)
46

    
47

    
48
class JumpPatchSite BASE_EMBEDDED {
49
 public:
50
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51
#ifdef DEBUG
52
    info_emitted_ = false;
53
#endif
54
  }
55

    
56
  ~JumpPatchSite() {
57
    ASSERT(patch_site_.is_bound() == info_emitted_);
58
  }
59

    
60
  void EmitJumpIfNotSmi(Register reg,
61
                        Label* target,
62
                        Label::Distance distance = Label::kFar) {
63
    __ test(reg, Immediate(kSmiTagMask));
64
    EmitJump(not_carry, target, distance);  // Always taken before patched.
65
  }
66

    
67
  void EmitJumpIfSmi(Register reg,
68
                     Label* target,
69
                     Label::Distance distance = Label::kFar) {
70
    __ test(reg, Immediate(kSmiTagMask));
71
    EmitJump(carry, target, distance);  // Never taken before patched.
72
  }
73

    
74
  void EmitPatchInfo() {
75
    if (patch_site_.is_bound()) {
76
      int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77
      ASSERT(is_int8(delta_to_patch_site));
78
      __ test(eax, Immediate(delta_to_patch_site));
79
#ifdef DEBUG
80
      info_emitted_ = true;
81
#endif
82
    } else {
83
      __ nop();  // Signals no inlined code.
84
    }
85
  }
86

    
87
 private:
88
  // jc will be patched with jz, jnc will become jnz.
89
  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
91
    ASSERT(cc == carry || cc == not_carry);
92
    __ bind(&patch_site_);
93
    __ j(cc, target, distance);
94
  }
95

    
96
  MacroAssembler* masm_;
97
  Label patch_site_;
98
#ifdef DEBUG
99
  bool info_emitted_;
100
#endif
101
};
102

    
103

    
104
// Generate code for a JS function.  On entry to the function the receiver
105
// and arguments have been pushed on the stack left to right, with the
106
// return address on top of them.  The actual argument count matches the
107
// formal parameter count expected by the function.
108
//
109
// The live registers are:
110
//   o ecx: CallKind
111
//   o edi: the JS function object being called (i.e. ourselves)
112
//   o esi: our context
113
//   o ebp: our caller's frame pointer
114
//   o esp: stack pointer (pointing to return address)
115
//
116
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
117
// frames-ia32.h for its layout.
118
void FullCodeGenerator::Generate() {
119
  CompilationInfo* info = info_;
120
  handler_table_ =
121
      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
122
  profiling_counter_ = isolate()->factory()->NewCell(
123
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
124
  SetFunctionPosition(function());
125
  Comment cmnt(masm_, "[ function compiled by full code generator");
126

    
127
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
128

    
129
#ifdef DEBUG
130
  if (strlen(FLAG_stop_at) > 0 &&
131
      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
132
    __ int3();
133
  }
134
#endif
135

    
136
  // Strict mode functions and builtins need to replace the receiver
137
  // with undefined when called as functions (without an explicit
138
  // receiver object). ecx is zero for method calls and non-zero for
139
  // function calls.
140
  if (!info->is_classic_mode() || info->is_native()) {
141
    Label ok;
142
    __ test(ecx, ecx);
143
    __ j(zero, &ok, Label::kNear);
144
    // +1 for return address.
145
    int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
146
    __ mov(ecx, Operand(esp, receiver_offset));
147
    __ JumpIfSmi(ecx, &ok);
148
    __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
149
    __ j(not_equal, &ok, Label::kNear);
150
    __ mov(Operand(esp, receiver_offset),
151
           Immediate(isolate()->factory()->undefined_value()));
152
    __ bind(&ok);
153
  }
154

    
155
  // Open a frame scope to indicate that there is a frame on the stack.  The
156
  // MANUAL indicates that the scope shouldn't actually generate code to set up
157
  // the frame (that is done below).
158
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
159

    
160
  info->set_prologue_offset(masm_->pc_offset());
161
  __ Prologue(BUILD_FUNCTION_FRAME);
162
  info->AddNoFrameRange(0, masm_->pc_offset());
163

    
164
  { Comment cmnt(masm_, "[ Allocate locals");
165
    int locals_count = info->scope()->num_stack_slots();
166
    // Generators allocate locals, if any, in context slots.
167
    ASSERT(!info->function()->is_generator() || locals_count == 0);
168
    if (locals_count == 1) {
169
      __ push(Immediate(isolate()->factory()->undefined_value()));
170
    } else if (locals_count > 1) {
171
      __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
172
      for (int i = 0; i < locals_count; i++) {
173
        __ push(eax);
174
      }
175
    }
176
  }
177

    
178
  bool function_in_register = true;
179

    
180
  // Possibly allocate a local context.
181
  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
182
  if (heap_slots > 0) {
183
    Comment cmnt(masm_, "[ Allocate context");
184
    // Argument to NewContext is the function, which is still in edi.
185
    __ push(edi);
186
    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
187
      __ Push(info->scope()->GetScopeInfo());
188
      __ CallRuntime(Runtime::kNewGlobalContext, 2);
189
    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
190
      FastNewContextStub stub(heap_slots);
191
      __ CallStub(&stub);
192
    } else {
193
      __ CallRuntime(Runtime::kNewFunctionContext, 1);
194
    }
195
    function_in_register = false;
196
    // Context is returned in both eax and esi.  It replaces the context
197
    // passed to us.  It's saved in the stack and kept live in esi.
198
    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
199

    
200
    // Copy parameters into context if necessary.
201
    int num_parameters = info->scope()->num_parameters();
202
    for (int i = 0; i < num_parameters; i++) {
203
      Variable* var = scope()->parameter(i);
204
      if (var->IsContextSlot()) {
205
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
206
            (num_parameters - 1 - i) * kPointerSize;
207
        // Load parameter from stack.
208
        __ mov(eax, Operand(ebp, parameter_offset));
209
        // Store it in the context.
210
        int context_offset = Context::SlotOffset(var->index());
211
        __ mov(Operand(esi, context_offset), eax);
212
        // Update the write barrier. This clobbers eax and ebx.
213
        __ RecordWriteContextSlot(esi,
214
                                  context_offset,
215
                                  eax,
216
                                  ebx,
217
                                  kDontSaveFPRegs);
218
      }
219
    }
220
  }
221

    
222
  Variable* arguments = scope()->arguments();
223
  if (arguments != NULL) {
224
    // Function uses arguments object.
225
    Comment cmnt(masm_, "[ Allocate arguments object");
226
    if (function_in_register) {
227
      __ push(edi);
228
    } else {
229
      __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
230
    }
231
    // Receiver is just before the parameters on the caller's stack.
232
    int num_parameters = info->scope()->num_parameters();
233
    int offset = num_parameters * kPointerSize;
234
    __ lea(edx,
235
           Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
236
    __ push(edx);
237
    __ push(Immediate(Smi::FromInt(num_parameters)));
238
    // Arguments to ArgumentsAccessStub:
239
    //   function, receiver address, parameter count.
240
    // The stub will rewrite receiver and parameter count if the previous
241
    // stack frame was an arguments adapter frame.
242
    ArgumentsAccessStub::Type type;
243
    if (!is_classic_mode()) {
244
      type = ArgumentsAccessStub::NEW_STRICT;
245
    } else if (function()->has_duplicate_parameters()) {
246
      type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
247
    } else {
248
      type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
249
    }
250
    ArgumentsAccessStub stub(type);
251
    __ CallStub(&stub);
252

    
253
    SetVar(arguments, eax, ebx, edx);
254
  }
255

    
256
  if (FLAG_trace) {
257
    __ CallRuntime(Runtime::kTraceEnter, 0);
258
  }
259

    
260
  // Visit the declarations and body unless there is an illegal
261
  // redeclaration.
262
  if (scope()->HasIllegalRedeclaration()) {
263
    Comment cmnt(masm_, "[ Declarations");
264
    scope()->VisitIllegalRedeclaration(this);
265

    
266
  } else {
267
    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
268
    { Comment cmnt(masm_, "[ Declarations");
269
      // For named function expressions, declare the function name as a
270
      // constant.
271
      if (scope()->is_function_scope() && scope()->function() != NULL) {
272
        VariableDeclaration* function = scope()->function();
273
        ASSERT(function->proxy()->var()->mode() == CONST ||
274
               function->proxy()->var()->mode() == CONST_HARMONY);
275
        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
276
        VisitVariableDeclaration(function);
277
      }
278
      VisitDeclarations(scope()->declarations());
279
    }
280

    
281
    { Comment cmnt(masm_, "[ Stack check");
282
      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
283
      Label ok;
284
      ExternalReference stack_limit =
285
          ExternalReference::address_of_stack_limit(isolate());
286
      __ cmp(esp, Operand::StaticVariable(stack_limit));
287
      __ j(above_equal, &ok, Label::kNear);
288
      __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
289
      __ bind(&ok);
290
    }
291

    
292
    { Comment cmnt(masm_, "[ Body");
293
      ASSERT(loop_depth() == 0);
294
      VisitStatements(function()->body());
295
      ASSERT(loop_depth() == 0);
296
    }
297
  }
298

    
299
  // Always emit a 'return undefined' in case control fell off the end of
300
  // the body.
301
  { Comment cmnt(masm_, "[ return <undefined>;");
302
    __ mov(eax, isolate()->factory()->undefined_value());
303
    EmitReturnSequence();
304
  }
305
}
306

    
307

    
308
void FullCodeGenerator::ClearAccumulator() {
309
  __ Set(eax, Immediate(Smi::FromInt(0)));
310
}
311

    
312

    
313
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
314
  __ mov(ebx, Immediate(profiling_counter_));
315
  __ sub(FieldOperand(ebx, Cell::kValueOffset),
316
         Immediate(Smi::FromInt(delta)));
317
}
318

    
319

    
320
void FullCodeGenerator::EmitProfilingCounterReset() {
321
  int reset_value = FLAG_interrupt_budget;
322
  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
323
    // Self-optimization is a one-off thing: if it fails, don't try again.
324
    reset_value = Smi::kMaxValue;
325
  }
326
  __ mov(ebx, Immediate(profiling_counter_));
327
  __ mov(FieldOperand(ebx, Cell::kValueOffset),
328
         Immediate(Smi::FromInt(reset_value)));
329
}
330

    
331

    
332
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
333
                                                Label* back_edge_target) {
334
  Comment cmnt(masm_, "[ Back edge bookkeeping");
335
  Label ok;
336

    
337
  int weight = 1;
338
  if (FLAG_weighted_back_edges) {
339
    ASSERT(back_edge_target->is_bound());
340
    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
341
    weight = Min(kMaxBackEdgeWeight,
342
                 Max(1, distance / kCodeSizeMultiplier));
343
  }
344
  EmitProfilingCounterDecrement(weight);
345
  __ j(positive, &ok, Label::kNear);
346
  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
347

    
348
  // Record a mapping of this PC offset to the OSR id.  This is used to find
349
  // the AST id from the unoptimized code in order to use it as a key into
350
  // the deoptimization input data found in the optimized code.
351
  RecordBackEdge(stmt->OsrEntryId());
352

    
353
  EmitProfilingCounterReset();
354

    
355
  __ bind(&ok);
356
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
357
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
358
  // entry becomes the target of a bailout.  We don't expect it to be, but
359
  // we want it to work if it is.
360
  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
361
}
362

    
363

    
364
void FullCodeGenerator::EmitReturnSequence() {
365
  Comment cmnt(masm_, "[ Return sequence");
366
  if (return_label_.is_bound()) {
367
    __ jmp(&return_label_);
368
  } else {
369
    // Common return label
370
    __ bind(&return_label_);
371
    if (FLAG_trace) {
372
      __ push(eax);
373
      __ CallRuntime(Runtime::kTraceExit, 1);
374
    }
375
    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
376
      // Pretend that the exit is a backwards jump to the entry.
377
      int weight = 1;
378
      if (info_->ShouldSelfOptimize()) {
379
        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
380
      } else if (FLAG_weighted_back_edges) {
381
        int distance = masm_->pc_offset();
382
        weight = Min(kMaxBackEdgeWeight,
383
                     Max(1, distance / kCodeSizeMultiplier));
384
      }
385
      EmitProfilingCounterDecrement(weight);
386
      Label ok;
387
      __ j(positive, &ok, Label::kNear);
388
      __ push(eax);
389
      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
390
        __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
391
        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
392
      } else {
393
        __ call(isolate()->builtins()->InterruptCheck(),
394
                RelocInfo::CODE_TARGET);
395
      }
396
      __ pop(eax);
397
      EmitProfilingCounterReset();
398
      __ bind(&ok);
399
    }
400
#ifdef DEBUG
401
    // Add a label for checking the size of the code used for returning.
402
    Label check_exit_codesize;
403
    masm_->bind(&check_exit_codesize);
404
#endif
405
    SetSourcePosition(function()->end_position() - 1);
406
    __ RecordJSReturn();
407
    // Do not use the leave instruction here because it is too short to
408
    // patch with the code required by the debugger.
409
    __ mov(esp, ebp);
410
    int no_frame_start = masm_->pc_offset();
411
    __ pop(ebp);
412

    
413
    int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
414
    __ Ret(arguments_bytes, ecx);
415
#ifdef ENABLE_DEBUGGER_SUPPORT
416
    // Check that the size of the code used for returning is large enough
417
    // for the debugger's requirements.
418
    ASSERT(Assembler::kJSReturnSequenceLength <=
419
           masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
420
#endif
421
    info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
422
  }
423
}
424

    
425

    
426
void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
427
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
428
}
429

    
430

    
431
void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
432
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
433
  codegen()->GetVar(result_register(), var);
434
}
435

    
436

    
437
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
438
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
439
  MemOperand operand = codegen()->VarOperand(var, result_register());
440
  // Memory operands can be pushed directly.
441
  __ push(operand);
442
}
443

    
444

    
445
void FullCodeGenerator::TestContext::Plug(Variable* var) const {
446
  // For simplicity we always test the accumulator register.
447
  codegen()->GetVar(result_register(), var);
448
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
449
  codegen()->DoTest(this);
450
}
451

    
452

    
453
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
454
  UNREACHABLE();  // Not used on IA32.
455
}
456

    
457

    
458
void FullCodeGenerator::AccumulatorValueContext::Plug(
459
    Heap::RootListIndex index) const {
460
  UNREACHABLE();  // Not used on IA32.
461
}
462

    
463

    
464
void FullCodeGenerator::StackValueContext::Plug(
465
    Heap::RootListIndex index) const {
466
  UNREACHABLE();  // Not used on IA32.
467
}
468

    
469

    
470
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
471
  UNREACHABLE();  // Not used on IA32.
472
}
473

    
474

    
475
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
476
}
477

    
478

    
479
void FullCodeGenerator::AccumulatorValueContext::Plug(
480
    Handle<Object> lit) const {
481
  if (lit->IsSmi()) {
482
    __ SafeSet(result_register(), Immediate(lit));
483
  } else {
484
    __ Set(result_register(), Immediate(lit));
485
  }
486
}
487

    
488

    
489
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
490
  if (lit->IsSmi()) {
491
    __ SafePush(Immediate(lit));
492
  } else {
493
    __ push(Immediate(lit));
494
  }
495
}
496

    
497

    
498
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
499
  codegen()->PrepareForBailoutBeforeSplit(condition(),
500
                                          true,
501
                                          true_label_,
502
                                          false_label_);
503
  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
504
  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
505
    if (false_label_ != fall_through_) __ jmp(false_label_);
506
  } else if (lit->IsTrue() || lit->IsJSObject()) {
507
    if (true_label_ != fall_through_) __ jmp(true_label_);
508
  } else if (lit->IsString()) {
509
    if (String::cast(*lit)->length() == 0) {
510
      if (false_label_ != fall_through_) __ jmp(false_label_);
511
    } else {
512
      if (true_label_ != fall_through_) __ jmp(true_label_);
513
    }
514
  } else if (lit->IsSmi()) {
515
    if (Smi::cast(*lit)->value() == 0) {
516
      if (false_label_ != fall_through_) __ jmp(false_label_);
517
    } else {
518
      if (true_label_ != fall_through_) __ jmp(true_label_);
519
    }
520
  } else {
521
    // For simplicity we always test the accumulator register.
522
    __ mov(result_register(), lit);
523
    codegen()->DoTest(this);
524
  }
525
}
526

    
527

    
528
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
529
                                                   Register reg) const {
530
  ASSERT(count > 0);
531
  __ Drop(count);
532
}
533

    
534

    
535
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
536
    int count,
537
    Register reg) const {
538
  ASSERT(count > 0);
539
  __ Drop(count);
540
  __ Move(result_register(), reg);
541
}
542

    
543

    
544
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
545
                                                       Register reg) const {
546
  ASSERT(count > 0);
547
  if (count > 1) __ Drop(count - 1);
548
  __ mov(Operand(esp, 0), reg);
549
}
550

    
551

    
552
void FullCodeGenerator::TestContext::DropAndPlug(int count,
553
                                                 Register reg) const {
554
  ASSERT(count > 0);
555
  // For simplicity we always test the accumulator register.
556
  __ Drop(count);
557
  __ Move(result_register(), reg);
558
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
559
  codegen()->DoTest(this);
560
}
561

    
562

    
563
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
564
                                            Label* materialize_false) const {
565
  ASSERT(materialize_true == materialize_false);
566
  __ bind(materialize_true);
567
}
568

    
569

    
570
void FullCodeGenerator::AccumulatorValueContext::Plug(
571
    Label* materialize_true,
572
    Label* materialize_false) const {
573
  Label done;
574
  __ bind(materialize_true);
575
  __ mov(result_register(), isolate()->factory()->true_value());
576
  __ jmp(&done, Label::kNear);
577
  __ bind(materialize_false);
578
  __ mov(result_register(), isolate()->factory()->false_value());
579
  __ bind(&done);
580
}
581

    
582

    
583
void FullCodeGenerator::StackValueContext::Plug(
584
    Label* materialize_true,
585
    Label* materialize_false) const {
586
  Label done;
587
  __ bind(materialize_true);
588
  __ push(Immediate(isolate()->factory()->true_value()));
589
  __ jmp(&done, Label::kNear);
590
  __ bind(materialize_false);
591
  __ push(Immediate(isolate()->factory()->false_value()));
592
  __ bind(&done);
593
}
594

    
595

    
596
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
597
                                          Label* materialize_false) const {
598
  ASSERT(materialize_true == true_label_);
599
  ASSERT(materialize_false == false_label_);
600
}
601

    
602

    
603
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
604
}
605

    
606

    
607
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
608
  Handle<Object> value = flag
609
      ? isolate()->factory()->true_value()
610
      : isolate()->factory()->false_value();
611
  __ mov(result_register(), value);
612
}
613

    
614

    
615
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
616
  Handle<Object> value = flag
617
      ? isolate()->factory()->true_value()
618
      : isolate()->factory()->false_value();
619
  __ push(Immediate(value));
620
}
621

    
622

    
623
void FullCodeGenerator::TestContext::Plug(bool flag) const {
624
  codegen()->PrepareForBailoutBeforeSplit(condition(),
625
                                          true,
626
                                          true_label_,
627
                                          false_label_);
628
  if (flag) {
629
    if (true_label_ != fall_through_) __ jmp(true_label_);
630
  } else {
631
    if (false_label_ != fall_through_) __ jmp(false_label_);
632
  }
633
}
634

    
635

    
636
void FullCodeGenerator::DoTest(Expression* condition,
637
                               Label* if_true,
638
                               Label* if_false,
639
                               Label* fall_through) {
640
  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
641
  CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
642
  __ test(result_register(), result_register());
643
  // The stub returns nonzero for true.
644
  Split(not_zero, if_true, if_false, fall_through);
645
}
646

    
647

    
648
void FullCodeGenerator::Split(Condition cc,
649
                              Label* if_true,
650
                              Label* if_false,
651
                              Label* fall_through) {
652
  if (if_false == fall_through) {
653
    __ j(cc, if_true);
654
  } else if (if_true == fall_through) {
655
    __ j(NegateCondition(cc), if_false);
656
  } else {
657
    __ j(cc, if_true);
658
    __ jmp(if_false);
659
  }
660
}
661

    
662

    
663
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
664
  ASSERT(var->IsStackAllocated());
665
  // Offset is negative because higher indexes are at lower addresses.
666
  int offset = -var->index() * kPointerSize;
667
  // Adjust by a (parameter or local) base offset.
668
  if (var->IsParameter()) {
669
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
670
  } else {
671
    offset += JavaScriptFrameConstants::kLocal0Offset;
672
  }
673
  return Operand(ebp, offset);
674
}
675

    
676

    
677
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
678
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
679
  if (var->IsContextSlot()) {
680
    int context_chain_length = scope()->ContextChainLength(var->scope());
681
    __ LoadContext(scratch, context_chain_length);
682
    return ContextOperand(scratch, var->index());
683
  } else {
684
    return StackOperand(var);
685
  }
686
}
687

    
688

    
689
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
690
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
691
  MemOperand location = VarOperand(var, dest);
692
  __ mov(dest, location);
693
}
694

    
695

    
696
void FullCodeGenerator::SetVar(Variable* var,
697
                               Register src,
698
                               Register scratch0,
699
                               Register scratch1) {
700
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
701
  ASSERT(!scratch0.is(src));
702
  ASSERT(!scratch0.is(scratch1));
703
  ASSERT(!scratch1.is(src));
704
  MemOperand location = VarOperand(var, scratch0);
705
  __ mov(location, src);
706

    
707
  // Emit the write barrier code if the location is in the heap.
708
  if (var->IsContextSlot()) {
709
    int offset = Context::SlotOffset(var->index());
710
    ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
711
    __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
712
  }
713
}
714

    
715

    
716
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
717
                                                     bool should_normalize,
718
                                                     Label* if_true,
719
                                                     Label* if_false) {
720
  // Only prepare for bailouts before splits if we're in a test
721
  // context. Otherwise, we let the Visit function deal with the
722
  // preparation to avoid preparing with the same AST id twice.
723
  if (!context()->IsTest() || !info_->IsOptimizable()) return;
724

    
725
  Label skip;
726
  if (should_normalize) __ jmp(&skip, Label::kNear);
727
  PrepareForBailout(expr, TOS_REG);
728
  if (should_normalize) {
729
    __ cmp(eax, isolate()->factory()->true_value());
730
    Split(equal, if_true, if_false, NULL);
731
    __ bind(&skip);
732
  }
733
}
734

    
735

    
736
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
737
  // The variable in the declaration always resides in the current context.
738
  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
739
  if (generate_debug_code_) {
740
    // Check that we're not inside a with or catch context.
741
    __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
742
    __ cmp(ebx, isolate()->factory()->with_context_map());
743
    __ Check(not_equal, kDeclarationInWithContext);
744
    __ cmp(ebx, isolate()->factory()->catch_context_map());
745
    __ Check(not_equal, kDeclarationInCatchContext);
746
  }
747
}
748

    
749

    
750
void FullCodeGenerator::VisitVariableDeclaration(
751
    VariableDeclaration* declaration) {
752
  // If it was not possible to allocate the variable at compile time, we
753
  // need to "declare" it at runtime to make sure it actually exists in the
754
  // local context.
755
  VariableProxy* proxy = declaration->proxy();
756
  VariableMode mode = declaration->mode();
757
  Variable* variable = proxy->var();
758
  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
759
  switch (variable->location()) {
760
    case Variable::UNALLOCATED:
761
      globals_->Add(variable->name(), zone());
762
      globals_->Add(variable->binding_needs_init()
763
                        ? isolate()->factory()->the_hole_value()
764
                        : isolate()->factory()->undefined_value(), zone());
765
      break;
766

    
767
    case Variable::PARAMETER:
768
    case Variable::LOCAL:
769
      if (hole_init) {
770
        Comment cmnt(masm_, "[ VariableDeclaration");
771
        __ mov(StackOperand(variable),
772
               Immediate(isolate()->factory()->the_hole_value()));
773
      }
774
      break;
775

    
776
    case Variable::CONTEXT:
777
      if (hole_init) {
778
        Comment cmnt(masm_, "[ VariableDeclaration");
779
        EmitDebugCheckDeclarationContext(variable);
780
        __ mov(ContextOperand(esi, variable->index()),
781
               Immediate(isolate()->factory()->the_hole_value()));
782
        // No write barrier since the hole value is in old space.
783
        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
784
      }
785
      break;
786

    
787
    case Variable::LOOKUP: {
788
      Comment cmnt(masm_, "[ VariableDeclaration");
789
      __ push(esi);
790
      __ push(Immediate(variable->name()));
791
      // VariableDeclaration nodes are always introduced in one of four modes.
792
      ASSERT(IsDeclaredVariableMode(mode));
793
      PropertyAttributes attr =
794
          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
795
      __ push(Immediate(Smi::FromInt(attr)));
796
      // Push initial value, if any.
797
      // Note: For variables we must not push an initial value (such as
798
      // 'undefined') because we may have a (legal) redeclaration and we
799
      // must not destroy the current value.
800
      if (hole_init) {
801
        __ push(Immediate(isolate()->factory()->the_hole_value()));
802
      } else {
803
        __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
804
      }
805
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
806
      break;
807
    }
808
  }
809
}
810

    
811

    
812
void FullCodeGenerator::VisitFunctionDeclaration(
813
    FunctionDeclaration* declaration) {
814
  VariableProxy* proxy = declaration->proxy();
815
  Variable* variable = proxy->var();
816
  switch (variable->location()) {
817
    case Variable::UNALLOCATED: {
818
      globals_->Add(variable->name(), zone());
819
      Handle<SharedFunctionInfo> function =
820
          Compiler::BuildFunctionInfo(declaration->fun(), script());
821
      // Check for stack-overflow exception.
822
      if (function.is_null()) return SetStackOverflow();
823
      globals_->Add(function, zone());
824
      break;
825
    }
826

    
827
    case Variable::PARAMETER:
828
    case Variable::LOCAL: {
829
      Comment cmnt(masm_, "[ FunctionDeclaration");
830
      VisitForAccumulatorValue(declaration->fun());
831
      __ mov(StackOperand(variable), result_register());
832
      break;
833
    }
834

    
835
    case Variable::CONTEXT: {
836
      Comment cmnt(masm_, "[ FunctionDeclaration");
837
      EmitDebugCheckDeclarationContext(variable);
838
      VisitForAccumulatorValue(declaration->fun());
839
      __ mov(ContextOperand(esi, variable->index()), result_register());
840
      // We know that we have written a function, which is not a smi.
841
      __ RecordWriteContextSlot(esi,
842
                                Context::SlotOffset(variable->index()),
843
                                result_register(),
844
                                ecx,
845
                                kDontSaveFPRegs,
846
                                EMIT_REMEMBERED_SET,
847
                                OMIT_SMI_CHECK);
848
      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
849
      break;
850
    }
851

    
852
    case Variable::LOOKUP: {
853
      Comment cmnt(masm_, "[ FunctionDeclaration");
854
      __ push(esi);
855
      __ push(Immediate(variable->name()));
856
      __ push(Immediate(Smi::FromInt(NONE)));
857
      VisitForStackValue(declaration->fun());
858
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
859
      break;
860
    }
861
  }
862
}
863

    
864

    
865
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
866
  Variable* variable = declaration->proxy()->var();
867
  ASSERT(variable->location() == Variable::CONTEXT);
868
  ASSERT(variable->interface()->IsFrozen());
869

    
870
  Comment cmnt(masm_, "[ ModuleDeclaration");
871
  EmitDebugCheckDeclarationContext(variable);
872

    
873
  // Load instance object.
874
  __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
875
  __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
876
  __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
877

    
878
  // Assign it.
879
  __ mov(ContextOperand(esi, variable->index()), eax);
880
  // We know that we have written a module, which is not a smi.
881
  __ RecordWriteContextSlot(esi,
882
                            Context::SlotOffset(variable->index()),
883
                            eax,
884
                            ecx,
885
                            kDontSaveFPRegs,
886
                            EMIT_REMEMBERED_SET,
887
                            OMIT_SMI_CHECK);
888
  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
889

    
890
  // Traverse into body.
891
  Visit(declaration->module());
892
}
893

    
894

    
895
void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
896
  VariableProxy* proxy = declaration->proxy();
897
  Variable* variable = proxy->var();
898
  switch (variable->location()) {
899
    case Variable::UNALLOCATED:
900
      // TODO(rossberg)
901
      break;
902

    
903
    case Variable::CONTEXT: {
904
      Comment cmnt(masm_, "[ ImportDeclaration");
905
      EmitDebugCheckDeclarationContext(variable);
906
      // TODO(rossberg)
907
      break;
908
    }
909

    
910
    case Variable::PARAMETER:
911
    case Variable::LOCAL:
912
    case Variable::LOOKUP:
913
      UNREACHABLE();
914
  }
915
}
916

    
917

    
918
void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
919
  // TODO(rossberg)
920
}
921

    
922

    
923
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
924
  // Call the runtime to declare the globals.
925
  __ push(esi);  // The context is the first argument.
926
  __ Push(pairs);
927
  __ Push(Smi::FromInt(DeclareGlobalsFlags()));
928
  __ CallRuntime(Runtime::kDeclareGlobals, 3);
929
  // Return value is ignored.
930
}
931

    
932

    
933
void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
934
  // Call the runtime to declare the modules.
935
  __ Push(descriptions);
936
  __ CallRuntime(Runtime::kDeclareModules, 1);
937
  // Return value is ignored.
938
}
939

    
940

    
941
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
942
  Comment cmnt(masm_, "[ SwitchStatement");
943
  Breakable nested_statement(this, stmt);
944
  SetStatementPosition(stmt);
945

    
946
  // Keep the switch value on the stack until a case matches.
947
  VisitForStackValue(stmt->tag());
948
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
949

    
950
  ZoneList<CaseClause*>* clauses = stmt->cases();
951
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
952

    
953
  Label next_test;  // Recycled for each test.
954
  // Compile all the tests with branches to their bodies.
955
  for (int i = 0; i < clauses->length(); i++) {
956
    CaseClause* clause = clauses->at(i);
957
    clause->body_target()->Unuse();
958

    
959
    // The default is not a test, but remember it as final fall through.
960
    if (clause->is_default()) {
961
      default_clause = clause;
962
      continue;
963
    }
964

    
965
    Comment cmnt(masm_, "[ Case comparison");
966
    __ bind(&next_test);
967
    next_test.Unuse();
968

    
969
    // Compile the label expression.
970
    VisitForAccumulatorValue(clause->label());
971

    
972
    // Perform the comparison as if via '==='.
973
    __ mov(edx, Operand(esp, 0));  // Switch value.
974
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
975
    JumpPatchSite patch_site(masm_);
976
    if (inline_smi_code) {
977
      Label slow_case;
978
      __ mov(ecx, edx);
979
      __ or_(ecx, eax);
980
      patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
981

    
982
      __ cmp(edx, eax);
983
      __ j(not_equal, &next_test);
984
      __ Drop(1);  // Switch value is no longer needed.
985
      __ jmp(clause->body_target());
986
      __ bind(&slow_case);
987
    }
988

    
989
    // Record position before stub call for type feedback.
990
    SetSourcePosition(clause->position());
991
    Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
992
    CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
993
    patch_site.EmitPatchInfo();
994
    __ test(eax, eax);
995
    __ j(not_equal, &next_test);
996
    __ Drop(1);  // Switch value is no longer needed.
997
    __ jmp(clause->body_target());
998
  }
999

    
1000
  // Discard the test value and jump to the default if present, otherwise to
1001
  // the end of the statement.
1002
  __ bind(&next_test);
1003
  __ Drop(1);  // Switch value is no longer needed.
1004
  if (default_clause == NULL) {
1005
    __ jmp(nested_statement.break_label());
1006
  } else {
1007
    __ jmp(default_clause->body_target());
1008
  }
1009

    
1010
  // Compile all the case bodies.
1011
  for (int i = 0; i < clauses->length(); i++) {
1012
    Comment cmnt(masm_, "[ Case body");
1013
    CaseClause* clause = clauses->at(i);
1014
    __ bind(clause->body_target());
1015
    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1016
    VisitStatements(clause->statements());
1017
  }
1018

    
1019
  __ bind(nested_statement.break_label());
1020
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1021
}
1022

    
1023

    
1024
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1025
  Comment cmnt(masm_, "[ ForInStatement");
1026
  SetStatementPosition(stmt);
1027

    
1028
  Label loop, exit;
1029
  ForIn loop_statement(this, stmt);
1030
  increment_loop_depth();
1031

    
1032
  // Get the object to enumerate over. If the object is null or undefined, skip
1033
  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1034
  VisitForAccumulatorValue(stmt->enumerable());
1035
  __ cmp(eax, isolate()->factory()->undefined_value());
1036
  __ j(equal, &exit);
1037
  __ cmp(eax, isolate()->factory()->null_value());
1038
  __ j(equal, &exit);
1039

    
1040
  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1041

    
1042
  // Convert the object to a JS object.
1043
  Label convert, done_convert;
1044
  __ JumpIfSmi(eax, &convert, Label::kNear);
1045
  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1046
  __ j(above_equal, &done_convert, Label::kNear);
1047
  __ bind(&convert);
1048
  __ push(eax);
1049
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1050
  __ bind(&done_convert);
1051
  __ push(eax);
1052

    
1053
  // Check for proxies.
1054
  Label call_runtime, use_cache, fixed_array;
1055
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1056
  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1057
  __ j(below_equal, &call_runtime);
1058

    
1059
  // Check cache validity in generated code. This is a fast case for
1060
  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1061
  // guarantee cache validity, call the runtime system to check cache
1062
  // validity or get the property names in a fixed array.
1063
  __ CheckEnumCache(&call_runtime);
1064

    
1065
  __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1066
  __ jmp(&use_cache, Label::kNear);
1067

    
1068
  // Get the set of properties to enumerate.
1069
  __ bind(&call_runtime);
1070
  __ push(eax);
1071
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1072
  __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1073
         isolate()->factory()->meta_map());
1074
  __ j(not_equal, &fixed_array);
1075

    
1076

    
1077
  // We got a map in register eax. Get the enumeration cache from it.
1078
  Label no_descriptors;
1079
  __ bind(&use_cache);
1080

    
1081
  __ EnumLength(edx, eax);
1082
  __ cmp(edx, Immediate(Smi::FromInt(0)));
1083
  __ j(equal, &no_descriptors);
1084

    
1085
  __ LoadInstanceDescriptors(eax, ecx);
1086
  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1087
  __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1088

    
1089
  // Set up the four remaining stack slots.
1090
  __ push(eax);  // Map.
1091
  __ push(ecx);  // Enumeration cache.
1092
  __ push(edx);  // Number of valid entries for the map in the enum cache.
1093
  __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1094
  __ jmp(&loop);
1095

    
1096
  __ bind(&no_descriptors);
1097
  __ add(esp, Immediate(kPointerSize));
1098
  __ jmp(&exit);
1099

    
1100
  // We got a fixed array in register eax. Iterate through that.
1101
  Label non_proxy;
1102
  __ bind(&fixed_array);
1103

    
1104
  Handle<Cell> cell = isolate()->factory()->NewCell(
1105
      Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1106
                     isolate()));
1107
  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1108
  __ LoadHeapObject(ebx, cell);
1109
  __ mov(FieldOperand(ebx, Cell::kValueOffset),
1110
         Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1111

    
1112
  __ mov(ebx, Immediate(Smi::FromInt(1)));  // Smi indicates slow check
1113
  __ mov(ecx, Operand(esp, 0 * kPointerSize));  // Get enumerated object
1114
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1115
  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1116
  __ j(above, &non_proxy);
1117
  __ mov(ebx, Immediate(Smi::FromInt(0)));  // Zero indicates proxy
1118
  __ bind(&non_proxy);
1119
  __ push(ebx);  // Smi
1120
  __ push(eax);  // Array
1121
  __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1122
  __ push(eax);  // Fixed array length (as smi).
1123
  __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1124

    
1125
  // Generate code for doing the condition check.
1126
  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1127
  __ bind(&loop);
1128
  __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1129
  __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1130
  __ j(above_equal, loop_statement.break_label());
1131

    
1132
  // Get the current entry of the array into register ebx.
1133
  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1134
  __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1135

    
1136
  // Get the expected map from the stack or a smi in the
1137
  // permanent slow case into register edx.
1138
  __ mov(edx, Operand(esp, 3 * kPointerSize));
1139

    
1140
  // Check if the expected map still matches that of the enumerable.
1141
  // If not, we may have to filter the key.
1142
  Label update_each;
1143
  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1144
  __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1145
  __ j(equal, &update_each, Label::kNear);
1146

    
1147
  // For proxies, no filtering is done.
1148
  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1149
  ASSERT(Smi::FromInt(0) == 0);
1150
  __ test(edx, edx);
1151
  __ j(zero, &update_each);
1152

    
1153
  // Convert the entry to a string or null if it isn't a property
1154
  // anymore. If the property has been removed while iterating, we
1155
  // just skip it.
1156
  __ push(ecx);  // Enumerable.
1157
  __ push(ebx);  // Current entry.
1158
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1159
  __ test(eax, eax);
1160
  __ j(equal, loop_statement.continue_label());
1161
  __ mov(ebx, eax);
1162

    
1163
  // Update the 'each' property or variable from the possibly filtered
1164
  // entry in register ebx.
1165
  __ bind(&update_each);
1166
  __ mov(result_register(), ebx);
1167
  // Perform the assignment as if via '='.
1168
  { EffectContext context(this);
1169
    EmitAssignment(stmt->each());
1170
  }
1171

    
1172
  // Generate code for the body of the loop.
1173
  Visit(stmt->body());
1174

    
1175
  // Generate code for going to the next element by incrementing the
1176
  // index (smi) stored on top of the stack.
1177
  __ bind(loop_statement.continue_label());
1178
  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1179

    
1180
  EmitBackEdgeBookkeeping(stmt, &loop);
1181
  __ jmp(&loop);
1182

    
1183
  // Remove the pointers stored on the stack.
1184
  __ bind(loop_statement.break_label());
1185
  __ add(esp, Immediate(5 * kPointerSize));
1186

    
1187
  // Exit and decrement the loop depth.
1188
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1189
  __ bind(&exit);
1190
  decrement_loop_depth();
1191
}
1192

    
1193

    
1194
void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1195
  Comment cmnt(masm_, "[ ForOfStatement");
1196
  SetStatementPosition(stmt);
1197

    
1198
  Iteration loop_statement(this, stmt);
1199
  increment_loop_depth();
1200

    
1201
  // var iterator = iterable[@@iterator]()
1202
  VisitForAccumulatorValue(stmt->assign_iterator());
1203

    
1204
  // As with for-in, skip the loop if the iterator is null or undefined.
1205
  __ CompareRoot(eax, Heap::kUndefinedValueRootIndex);
1206
  __ j(equal, loop_statement.break_label());
1207
  __ CompareRoot(eax, Heap::kNullValueRootIndex);
1208
  __ j(equal, loop_statement.break_label());
1209

    
1210
  // Convert the iterator to a JS object.
1211
  Label convert, done_convert;
1212
  __ JumpIfSmi(eax, &convert);
1213
  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1214
  __ j(above_equal, &done_convert);
1215
  __ bind(&convert);
1216
  __ push(eax);
1217
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1218
  __ bind(&done_convert);
1219

    
1220
  // Loop entry.
1221
  __ bind(loop_statement.continue_label());
1222

    
1223
  // result = iterator.next()
1224
  VisitForEffect(stmt->next_result());
1225

    
1226
  // if (result.done) break;
1227
  Label result_not_done;
1228
  VisitForControl(stmt->result_done(),
1229
                  loop_statement.break_label(),
1230
                  &result_not_done,
1231
                  &result_not_done);
1232
  __ bind(&result_not_done);
1233

    
1234
  // each = result.value
1235
  VisitForEffect(stmt->assign_each());
1236

    
1237
  // Generate code for the body of the loop.
1238
  Visit(stmt->body());
1239

    
1240
  // Check stack before looping.
1241
  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1242
  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1243
  __ jmp(loop_statement.continue_label());
1244

    
1245
  // Exit and decrement the loop depth.
1246
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1247
  __ bind(loop_statement.break_label());
1248
  decrement_loop_depth();
1249
}
1250

    
1251

    
1252
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1253
                                       bool pretenure) {
1254
  // Use the fast case closure allocation code that allocates in new
1255
  // space for nested functions that don't need literals cloning. If
1256
  // we're running with the --always-opt or the --prepare-always-opt
1257
  // flag, we need to use the runtime function so that the new function
1258
  // we are creating here gets a chance to have its code optimized and
1259
  // doesn't just get a copy of the existing unoptimized code.
1260
  if (!FLAG_always_opt &&
1261
      !FLAG_prepare_always_opt &&
1262
      !pretenure &&
1263
      scope()->is_function_scope() &&
1264
      info->num_literals() == 0) {
1265
    FastNewClosureStub stub(info->language_mode(), info->is_generator());
1266
    __ mov(ebx, Immediate(info));
1267
    __ CallStub(&stub);
1268
  } else {
1269
    __ push(esi);
1270
    __ push(Immediate(info));
1271
    __ push(Immediate(pretenure
1272
                      ? isolate()->factory()->true_value()
1273
                      : isolate()->factory()->false_value()));
1274
    __ CallRuntime(Runtime::kNewClosure, 3);
1275
  }
1276
  context()->Plug(eax);
1277
}
1278

    
1279

    
1280
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1281
  Comment cmnt(masm_, "[ VariableProxy");
1282
  EmitVariableLoad(expr);
1283
}
1284

    
1285

    
1286
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1287
                                                      TypeofState typeof_state,
1288
                                                      Label* slow) {
1289
  Register context = esi;
1290
  Register temp = edx;
1291

    
1292
  Scope* s = scope();
1293
  while (s != NULL) {
1294
    if (s->num_heap_slots() > 0) {
1295
      if (s->calls_non_strict_eval()) {
1296
        // Check that extension is NULL.
1297
        __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1298
               Immediate(0));
1299
        __ j(not_equal, slow);
1300
      }
1301
      // Load next context in chain.
1302
      __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1303
      // Walk the rest of the chain without clobbering esi.
1304
      context = temp;
1305
    }
1306
    // If no outer scope calls eval, we do not need to check more
1307
    // context extensions.  If we have reached an eval scope, we check
1308
    // all extensions from this point.
1309
    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1310
    s = s->outer_scope();
1311
  }
1312

    
1313
  if (s != NULL && s->is_eval_scope()) {
1314
    // Loop up the context chain.  There is no frame effect so it is
1315
    // safe to use raw labels here.
1316
    Label next, fast;
1317
    if (!context.is(temp)) {
1318
      __ mov(temp, context);
1319
    }
1320
    __ bind(&next);
1321
    // Terminate at native context.
1322
    __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1323
           Immediate(isolate()->factory()->native_context_map()));
1324
    __ j(equal, &fast, Label::kNear);
1325
    // Check that extension is NULL.
1326
    __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1327
    __ j(not_equal, slow);
1328
    // Load next context in chain.
1329
    __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1330
    __ jmp(&next);
1331
    __ bind(&fast);
1332
  }
1333

    
1334
  // All extension objects were empty and it is safe to use a global
1335
  // load IC call.
1336
  __ mov(edx, GlobalObjectOperand());
1337
  __ mov(ecx, var->name());
1338
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1339
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1340
      ? RelocInfo::CODE_TARGET
1341
      : RelocInfo::CODE_TARGET_CONTEXT;
1342
  CallIC(ic, mode);
1343
}
1344

    
1345

    
1346
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1347
                                                                Label* slow) {
1348
  ASSERT(var->IsContextSlot());
1349
  Register context = esi;
1350
  Register temp = ebx;
1351

    
1352
  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1353
    if (s->num_heap_slots() > 0) {
1354
      if (s->calls_non_strict_eval()) {
1355
        // Check that extension is NULL.
1356
        __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1357
               Immediate(0));
1358
        __ j(not_equal, slow);
1359
      }
1360
      __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1361
      // Walk the rest of the chain without clobbering esi.
1362
      context = temp;
1363
    }
1364
  }
1365
  // Check that last extension is NULL.
1366
  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1367
  __ j(not_equal, slow);
1368

    
1369
  // This function is used only for loads, not stores, so it's safe to
1370
  // return an esi-based operand (the write barrier cannot be allowed to
1371
  // destroy the esi register).
1372
  return ContextOperand(context, var->index());
1373
}
1374

    
1375

    
1376
void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1377
                                                  TypeofState typeof_state,
1378
                                                  Label* slow,
1379
                                                  Label* done) {
1380
  // Generate fast-case code for variables that might be shadowed by
1381
  // eval-introduced variables.  Eval is used a lot without
1382
  // introducing variables.  In those cases, we do not want to
1383
  // perform a runtime call for all variables in the scope
1384
  // containing the eval.
1385
  if (var->mode() == DYNAMIC_GLOBAL) {
1386
    EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1387
    __ jmp(done);
1388
  } else if (var->mode() == DYNAMIC_LOCAL) {
1389
    Variable* local = var->local_if_not_shadowed();
1390
    __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1391
    if (local->mode() == LET ||
1392
        local->mode() == CONST ||
1393
        local->mode() == CONST_HARMONY) {
1394
      __ cmp(eax, isolate()->factory()->the_hole_value());
1395
      __ j(not_equal, done);
1396
      if (local->mode() == CONST) {
1397
        __ mov(eax, isolate()->factory()->undefined_value());
1398
      } else {  // LET || CONST_HARMONY
1399
        __ push(Immediate(var->name()));
1400
        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1401
      }
1402
    }
1403
    __ jmp(done);
1404
  }
1405
}
1406

    
1407

    
1408
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1409
  // Record position before possible IC call.
1410
  SetSourcePosition(proxy->position());
1411
  Variable* var = proxy->var();
1412

    
1413
  // Three cases: global variables, lookup variables, and all other types of
1414
  // variables.
1415
  switch (var->location()) {
1416
    case Variable::UNALLOCATED: {
1417
      Comment cmnt(masm_, "Global variable");
1418
      // Use inline caching. Variable name is passed in ecx and the global
1419
      // object in eax.
1420
      __ mov(edx, GlobalObjectOperand());
1421
      __ mov(ecx, var->name());
1422
      Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1423
      CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1424
      context()->Plug(eax);
1425
      break;
1426
    }
1427

    
1428
    case Variable::PARAMETER:
1429
    case Variable::LOCAL:
1430
    case Variable::CONTEXT: {
1431
      Comment cmnt(masm_, var->IsContextSlot()
1432
                              ? "Context variable"
1433
                              : "Stack variable");
1434
      if (var->binding_needs_init()) {
1435
        // var->scope() may be NULL when the proxy is located in eval code and
1436
        // refers to a potential outside binding. Currently those bindings are
1437
        // always looked up dynamically, i.e. in that case
1438
        //     var->location() == LOOKUP.
1439
        // always holds.
1440
        ASSERT(var->scope() != NULL);
1441

    
1442
        // Check if the binding really needs an initialization check. The check
1443
        // can be skipped in the following situation: we have a LET or CONST
1444
        // binding in harmony mode, both the Variable and the VariableProxy have
1445
        // the same declaration scope (i.e. they are both in global code, in the
1446
        // same function or in the same eval code) and the VariableProxy is in
1447
        // the source physically located after the initializer of the variable.
1448
        //
1449
        // We cannot skip any initialization checks for CONST in non-harmony
1450
        // mode because const variables may be declared but never initialized:
1451
        //   if (false) { const x; }; var y = x;
1452
        //
1453
        // The condition on the declaration scopes is a conservative check for
1454
        // nested functions that access a binding and are called before the
1455
        // binding is initialized:
1456
        //   function() { f(); let x = 1; function f() { x = 2; } }
1457
        //
1458
        bool skip_init_check;
1459
        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1460
          skip_init_check = false;
1461
        } else {
1462
          // Check that we always have valid source position.
1463
          ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1464
          ASSERT(proxy->position() != RelocInfo::kNoPosition);
1465
          skip_init_check = var->mode() != CONST &&
1466
              var->initializer_position() < proxy->position();
1467
        }
1468

    
1469
        if (!skip_init_check) {
1470
          // Let and const need a read barrier.
1471
          Label done;
1472
          GetVar(eax, var);
1473
          __ cmp(eax, isolate()->factory()->the_hole_value());
1474
          __ j(not_equal, &done, Label::kNear);
1475
          if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1476
            // Throw a reference error when using an uninitialized let/const
1477
            // binding in harmony mode.
1478
            __ push(Immediate(var->name()));
1479
            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1480
          } else {
1481
            // Uninitalized const bindings outside of harmony mode are unholed.
1482
            ASSERT(var->mode() == CONST);
1483
            __ mov(eax, isolate()->factory()->undefined_value());
1484
          }
1485
          __ bind(&done);
1486
          context()->Plug(eax);
1487
          break;
1488
        }
1489
      }
1490
      context()->Plug(var);
1491
      break;
1492
    }
1493

    
1494
    case Variable::LOOKUP: {
1495
      Label done, slow;
1496
      // Generate code for loading from variables potentially shadowed
1497
      // by eval-introduced variables.
1498
      EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1499
      __ bind(&slow);
1500
      Comment cmnt(masm_, "Lookup variable");
1501
      __ push(esi);  // Context.
1502
      __ push(Immediate(var->name()));
1503
      __ CallRuntime(Runtime::kLoadContextSlot, 2);
1504
      __ bind(&done);
1505
      context()->Plug(eax);
1506
      break;
1507
    }
1508
  }
1509
}
1510

    
1511

    
1512
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1513
  Comment cmnt(masm_, "[ RegExpLiteral");
1514
  Label materialized;
1515
  // Registers will be used as follows:
1516
  // edi = JS function.
1517
  // ecx = literals array.
1518
  // ebx = regexp literal.
1519
  // eax = regexp literal clone.
1520
  __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1521
  __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1522
  int literal_offset =
1523
      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1524
  __ mov(ebx, FieldOperand(ecx, literal_offset));
1525
  __ cmp(ebx, isolate()->factory()->undefined_value());
1526
  __ j(not_equal, &materialized, Label::kNear);
1527

    
1528
  // Create regexp literal using runtime function
1529
  // Result will be in eax.
1530
  __ push(ecx);
1531
  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1532
  __ push(Immediate(expr->pattern()));
1533
  __ push(Immediate(expr->flags()));
1534
  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1535
  __ mov(ebx, eax);
1536

    
1537
  __ bind(&materialized);
1538
  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1539
  Label allocated, runtime_allocate;
1540
  __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1541
  __ jmp(&allocated);
1542

    
1543
  __ bind(&runtime_allocate);
1544
  __ push(ebx);
1545
  __ push(Immediate(Smi::FromInt(size)));
1546
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1547
  __ pop(ebx);
1548

    
1549
  __ bind(&allocated);
1550
  // Copy the content into the newly allocated memory.
1551
  // (Unroll copy loop once for better throughput).
1552
  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1553
    __ mov(edx, FieldOperand(ebx, i));
1554
    __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1555
    __ mov(FieldOperand(eax, i), edx);
1556
    __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1557
  }
1558
  if ((size % (2 * kPointerSize)) != 0) {
1559
    __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1560
    __ mov(FieldOperand(eax, size - kPointerSize), edx);
1561
  }
1562
  context()->Plug(eax);
1563
}
1564

    
1565

    
1566
void FullCodeGenerator::EmitAccessor(Expression* expression) {
1567
  if (expression == NULL) {
1568
    __ push(Immediate(isolate()->factory()->null_value()));
1569
  } else {
1570
    VisitForStackValue(expression);
1571
  }
1572
}
1573

    
1574

    
1575
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1576
  Comment cmnt(masm_, "[ ObjectLiteral");
1577
  Handle<FixedArray> constant_properties = expr->constant_properties();
1578
  int flags = expr->fast_elements()
1579
      ? ObjectLiteral::kFastElements
1580
      : ObjectLiteral::kNoFlags;
1581
  flags |= expr->has_function()
1582
      ? ObjectLiteral::kHasFunction
1583
      : ObjectLiteral::kNoFlags;
1584
  int properties_count = constant_properties->length() / 2;
1585
  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1586
      expr->depth() > 1 || Serializer::enabled() ||
1587
      flags != ObjectLiteral::kFastElements ||
1588
      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1589
    __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1590
    __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1591
    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1592
    __ push(Immediate(constant_properties));
1593
    __ push(Immediate(Smi::FromInt(flags)));
1594
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1595
  } else {
1596
    __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1597
    __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1598
    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1599
    __ mov(ecx, Immediate(constant_properties));
1600
    __ mov(edx, Immediate(Smi::FromInt(flags)));
1601
    FastCloneShallowObjectStub stub(properties_count);
1602
    __ CallStub(&stub);
1603
  }
1604

    
1605
  // If result_saved is true the result is on top of the stack.  If
1606
  // result_saved is false the result is in eax.
1607
  bool result_saved = false;
1608

    
1609
  // Mark all computed expressions that are bound to a key that
1610
  // is shadowed by a later occurrence of the same key. For the
1611
  // marked expressions, no store code is emitted.
1612
  expr->CalculateEmitStore(zone());
1613

    
1614
  AccessorTable accessor_table(zone());
1615
  for (int i = 0; i < expr->properties()->length(); i++) {
1616
    ObjectLiteral::Property* property = expr->properties()->at(i);
1617
    if (property->IsCompileTimeValue()) continue;
1618

    
1619
    Literal* key = property->key();
1620
    Expression* value = property->value();
1621
    if (!result_saved) {
1622
      __ push(eax);  // Save result on the stack
1623
      result_saved = true;
1624
    }
1625
    switch (property->kind()) {
1626
      case ObjectLiteral::Property::CONSTANT:
1627
        UNREACHABLE();
1628
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1629
        ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1630
        // Fall through.
1631
      case ObjectLiteral::Property::COMPUTED:
1632
        if (key->value()->IsInternalizedString()) {
1633
          if (property->emit_store()) {
1634
            VisitForAccumulatorValue(value);
1635
            __ mov(ecx, Immediate(key->value()));
1636
            __ mov(edx, Operand(esp, 0));
1637
            Handle<Code> ic = is_classic_mode()
1638
                ? isolate()->builtins()->StoreIC_Initialize()
1639
                : isolate()->builtins()->StoreIC_Initialize_Strict();
1640
            CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1641
            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1642
          } else {
1643
            VisitForEffect(value);
1644
          }
1645
          break;
1646
        }
1647
        __ push(Operand(esp, 0));  // Duplicate receiver.
1648
        VisitForStackValue(key);
1649
        VisitForStackValue(value);
1650
        if (property->emit_store()) {
1651
          __ push(Immediate(Smi::FromInt(NONE)));  // PropertyAttributes
1652
          __ CallRuntime(Runtime::kSetProperty, 4);
1653
        } else {
1654
          __ Drop(3);
1655
        }
1656
        break;
1657
      case ObjectLiteral::Property::PROTOTYPE:
1658
        __ push(Operand(esp, 0));  // Duplicate receiver.
1659
        VisitForStackValue(value);
1660
        if (property->emit_store()) {
1661
          __ CallRuntime(Runtime::kSetPrototype, 2);
1662
        } else {
1663
          __ Drop(2);
1664
        }
1665
        break;
1666
      case ObjectLiteral::Property::GETTER:
1667
        accessor_table.lookup(key)->second->getter = value;
1668
        break;
1669
      case ObjectLiteral::Property::SETTER:
1670
        accessor_table.lookup(key)->second->setter = value;
1671
        break;
1672
    }
1673
  }
1674

    
1675
  // Emit code to define accessors, using only a single call to the runtime for
1676
  // each pair of corresponding getters and setters.
1677
  for (AccessorTable::Iterator it = accessor_table.begin();
1678
       it != accessor_table.end();
1679
       ++it) {
1680
    __ push(Operand(esp, 0));  // Duplicate receiver.
1681
    VisitForStackValue(it->first);
1682
    EmitAccessor(it->second->getter);
1683
    EmitAccessor(it->second->setter);
1684
    __ push(Immediate(Smi::FromInt(NONE)));
1685
    __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1686
  }
1687

    
1688
  if (expr->has_function()) {
1689
    ASSERT(result_saved);
1690
    __ push(Operand(esp, 0));
1691
    __ CallRuntime(Runtime::kToFastProperties, 1);
1692
  }
1693

    
1694
  if (result_saved) {
1695
    context()->PlugTOS();
1696
  } else {
1697
    context()->Plug(eax);
1698
  }
1699
}
1700

    
1701

    
1702
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1703
  Comment cmnt(masm_, "[ ArrayLiteral");
1704

    
1705
  ZoneList<Expression*>* subexprs = expr->values();
1706
  int length = subexprs->length();
1707
  Handle<FixedArray> constant_elements = expr->constant_elements();
1708
  ASSERT_EQ(2, constant_elements->length());
1709
  ElementsKind constant_elements_kind =
1710
      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1711
  bool has_constant_fast_elements =
1712
      IsFastObjectElementsKind(constant_elements_kind);
1713
  Handle<FixedArrayBase> constant_elements_values(
1714
      FixedArrayBase::cast(constant_elements->get(1)));
1715

    
1716
  Heap* heap = isolate()->heap();
1717
  if (has_constant_fast_elements &&
1718
      constant_elements_values->map() == heap->fixed_cow_array_map()) {
1719
    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1720
    // change, so it's possible to specialize the stub in advance.
1721
    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1722
    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1723
    __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1724
    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1725
    __ mov(ecx, Immediate(constant_elements));
1726
    FastCloneShallowArrayStub stub(
1727
        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1728
        DONT_TRACK_ALLOCATION_SITE,
1729
        length);
1730
    __ CallStub(&stub);
1731
  } else if (expr->depth() > 1) {
1732
    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1733
    __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1734
    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1735
    __ push(Immediate(constant_elements));
1736
    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1737
  } else if (Serializer::enabled() ||
1738
      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1739
    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1740
    __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1741
    __ push(Immediate(Smi::FromInt(expr->literal_index())));
1742
    __ push(Immediate(constant_elements));
1743
    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1744
  } else {
1745
    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1746
           FLAG_smi_only_arrays);
1747
    FastCloneShallowArrayStub::Mode mode =
1748
        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1749
    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1750
        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1751

    
1752
    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1753
    // change, so it's possible to specialize the stub in advance.
1754
    if (has_constant_fast_elements) {
1755
      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1756
      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1757
    }
1758

    
1759
    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1760
    __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1761
    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1762
    __ mov(ecx, Immediate(constant_elements));
1763
    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1764
    __ CallStub(&stub);
1765
  }
1766

    
1767
  bool result_saved = false;  // Is the result saved to the stack?
1768

    
1769
  // Emit code to evaluate all the non-constant subexpressions and to store
1770
  // them into the newly cloned array.
1771
  for (int i = 0; i < length; i++) {
1772
    Expression* subexpr = subexprs->at(i);
1773
    // If the subexpression is a literal or a simple materialized literal it
1774
    // is already set in the cloned array.
1775
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1776

    
1777
    if (!result_saved) {
1778
      __ push(eax);  // array literal.
1779
      __ push(Immediate(Smi::FromInt(expr->literal_index())));
1780
      result_saved = true;
1781
    }
1782
    VisitForAccumulatorValue(subexpr);
1783

    
1784
    if (IsFastObjectElementsKind(constant_elements_kind)) {
1785
      // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1786
      // cannot transition and don't need to call the runtime stub.
1787
      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1788
      __ mov(ebx, Operand(esp, kPointerSize));  // Copy of array literal.
1789
      __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1790
      // Store the subexpression value in the array's elements.
1791
      __ mov(FieldOperand(ebx, offset), result_register());
1792
      // Update the write barrier for the array store.
1793
      __ RecordWriteField(ebx, offset, result_register(), ecx,
1794
                          kDontSaveFPRegs,
1795
                          EMIT_REMEMBERED_SET,
1796
                          INLINE_SMI_CHECK);
1797
    } else {
1798
      // Store the subexpression value in the array's elements.
1799
      __ mov(ecx, Immediate(Smi::FromInt(i)));
1800
      StoreArrayLiteralElementStub stub;
1801
      __ CallStub(&stub);
1802
    }
1803

    
1804
    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1805
  }
1806

    
1807
  if (result_saved) {
1808
    __ add(esp, Immediate(kPointerSize));  // literal index
1809
    context()->PlugTOS();
1810
  } else {
1811
    context()->Plug(eax);
1812
  }
1813
}
1814

    
1815

    
1816
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1817
  Comment cmnt(masm_, "[ Assignment");
1818
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1819
  // on the left-hand side.
1820
  if (!expr->target()->IsValidLeftHandSide()) {
1821
    VisitForEffect(expr->target());
1822
    return;
1823
  }
1824

    
1825
  // Left-hand side can only be a property, a global or a (parameter or local)
1826
  // slot.
1827
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1828
  LhsKind assign_type = VARIABLE;
1829
  Property* property = expr->target()->AsProperty();
1830
  if (property != NULL) {
1831
    assign_type = (property->key()->IsPropertyName())
1832
        ? NAMED_PROPERTY
1833
        : KEYED_PROPERTY;
1834
  }
1835

    
1836
  // Evaluate LHS expression.
1837
  switch (assign_type) {
1838
    case VARIABLE:
1839
      // Nothing to do here.
1840
      break;
1841
    case NAMED_PROPERTY:
1842
      if (expr->is_compound()) {
1843
        // We need the receiver both on the stack and in edx.
1844
        VisitForStackValue(property->obj());
1845
        __ mov(edx, Operand(esp, 0));
1846
      } else {
1847
        VisitForStackValue(property->obj());
1848
      }
1849
      break;
1850
    case KEYED_PROPERTY: {
1851
      if (expr->is_compound()) {
1852
        VisitForStackValue(property->obj());
1853
        VisitForStackValue(property->key());
1854
        __ mov(edx, Operand(esp, kPointerSize));  // Object.
1855
        __ mov(ecx, Operand(esp, 0));             // Key.
1856
      } else {
1857
        VisitForStackValue(property->obj());
1858
        VisitForStackValue(property->key());
1859
      }
1860
      break;
1861
    }
1862
  }
1863

    
1864
  // For compound assignments we need another deoptimization point after the
1865
  // variable/property load.
1866
  if (expr->is_compound()) {
1867
    AccumulatorValueContext result_context(this);
1868
    { AccumulatorValueContext left_operand_context(this);
1869
      switch (assign_type) {
1870
        case VARIABLE:
1871
          EmitVariableLoad(expr->target()->AsVariableProxy());
1872
          PrepareForBailout(expr->target(), TOS_REG);
1873
          break;
1874
        case NAMED_PROPERTY:
1875
          EmitNamedPropertyLoad(property);
1876
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1877
          break;
1878
        case KEYED_PROPERTY:
1879
          EmitKeyedPropertyLoad(property);
1880
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1881
          break;
1882
      }
1883
    }
1884

    
1885
    Token::Value op = expr->binary_op();
1886
    __ push(eax);  // Left operand goes on the stack.
1887
    VisitForAccumulatorValue(expr->value());
1888

    
1889
    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1890
        ? OVERWRITE_RIGHT
1891
        : NO_OVERWRITE;
1892
    SetSourcePosition(expr->position() + 1);
1893
    if (ShouldInlineSmiCase(op)) {
1894
      EmitInlineSmiBinaryOp(expr->binary_operation(),
1895
                            op,
1896
                            mode,
1897
                            expr->target(),
1898
                            expr->value());
1899
    } else {
1900
      EmitBinaryOp(expr->binary_operation(), op, mode);
1901
    }
1902

    
1903
    // Deoptimization point in case the binary operation may have side effects.
1904
    PrepareForBailout(expr->binary_operation(), TOS_REG);
1905
  } else {
1906
    VisitForAccumulatorValue(expr->value());
1907
  }
1908

    
1909
  // Record source position before possible IC call.
1910
  SetSourcePosition(expr->position());
1911

    
1912
  // Store the value.
1913
  switch (assign_type) {
1914
    case VARIABLE:
1915
      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1916
                             expr->op());
1917
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1918
      context()->Plug(eax);
1919
      break;
1920
    case NAMED_PROPERTY:
1921
      EmitNamedPropertyAssignment(expr);
1922
      break;
1923
    case KEYED_PROPERTY:
1924
      EmitKeyedPropertyAssignment(expr);
1925
      break;
1926
  }
1927
}
1928

    
1929

    
1930
void FullCodeGenerator::VisitYield(Yield* expr) {
1931
  Comment cmnt(masm_, "[ Yield");
1932
  // Evaluate yielded value first; the initial iterator definition depends on
1933
  // this.  It stays on the stack while we update the iterator.
1934
  VisitForStackValue(expr->expression());
1935

    
1936
  switch (expr->yield_kind()) {
1937
    case Yield::SUSPEND:
1938
      // Pop value from top-of-stack slot; box result into result register.
1939
      EmitCreateIteratorResult(false);
1940
      __ push(result_register());
1941
      // Fall through.
1942
    case Yield::INITIAL: {
1943
      Label suspend, continuation, post_runtime, resume;
1944

    
1945
      __ jmp(&suspend);
1946

    
1947
      __ bind(&continuation);
1948
      __ jmp(&resume);
1949

    
1950
      __ bind(&suspend);
1951
      VisitForAccumulatorValue(expr->generator_object());
1952
      ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1953
      __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1954
             Immediate(Smi::FromInt(continuation.pos())));
1955
      __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1956
      __ mov(ecx, esi);
1957
      __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1958
                          kDontSaveFPRegs);
1959
      __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1960
      __ cmp(esp, ebx);
1961
      __ j(equal, &post_runtime);
1962
      __ push(eax);  // generator object
1963
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1964
      __ mov(context_register(),
1965
             Operand(ebp, StandardFrameConstants::kContextOffset));
1966
      __ bind(&post_runtime);
1967
      __ pop(result_register());
1968
      EmitReturnSequence();
1969

    
1970
      __ bind(&resume);
1971
      context()->Plug(result_register());
1972
      break;
1973
    }
1974

    
1975
    case Yield::FINAL: {
1976
      VisitForAccumulatorValue(expr->generator_object());
1977
      __ mov(FieldOperand(result_register(),
1978
                          JSGeneratorObject::kContinuationOffset),
1979
             Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1980
      // Pop value from top-of-stack slot, box result into result register.
1981
      EmitCreateIteratorResult(true);
1982
      EmitUnwindBeforeReturn();
1983
      EmitReturnSequence();
1984
      break;
1985
    }
1986

    
1987
    case Yield::DELEGATING: {
1988
      VisitForStackValue(expr->generator_object());
1989

    
1990
      // Initial stack layout is as follows:
1991
      // [sp + 1 * kPointerSize] iter
1992
      // [sp + 0 * kPointerSize] g
1993

    
1994
      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1995
      Label l_next, l_call, l_loop;
1996
      // Initial send value is undefined.
1997
      __ mov(eax, isolate()->factory()->undefined_value());
1998
      __ jmp(&l_next);
1999

    
2000
      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2001
      __ bind(&l_catch);
2002
      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2003
      __ mov(ecx, isolate()->factory()->throw_string());  // "throw"
2004
      __ push(ecx);                                      // "throw"
2005
      __ push(Operand(esp, 2 * kPointerSize));           // iter
2006
      __ push(eax);                                      // exception
2007
      __ jmp(&l_call);
2008

    
2009
      // try { received = %yield result }
2010
      // Shuffle the received result above a try handler and yield it without
2011
      // re-boxing.
2012
      __ bind(&l_try);
2013
      __ pop(eax);                                       // result
2014
      __ PushTryHandler(StackHandler::CATCH, expr->index());
2015
      const int handler_size = StackHandlerConstants::kSize;
2016
      __ push(eax);                                      // result
2017
      __ jmp(&l_suspend);
2018
      __ bind(&l_continuation);
2019
      __ jmp(&l_resume);
2020
      __ bind(&l_suspend);
2021
      const int generator_object_depth = kPointerSize + handler_size;
2022
      __ mov(eax, Operand(esp, generator_object_depth));
2023
      __ push(eax);                                      // g
2024
      ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2025
      __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2026
             Immediate(Smi::FromInt(l_continuation.pos())));
2027
      __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2028
      __ mov(ecx, esi);
2029
      __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2030
                          kDontSaveFPRegs);
2031
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2032
      __ mov(context_register(),
2033
             Operand(ebp, StandardFrameConstants::kContextOffset));
2034
      __ pop(eax);                                       // result
2035
      EmitReturnSequence();
2036
      __ bind(&l_resume);                                // received in eax
2037
      __ PopTryHandler();
2038

    
2039
      // receiver = iter; f = iter.next; arg = received;
2040
      __ bind(&l_next);
2041
      __ mov(ecx, isolate()->factory()->next_string());  // "next"
2042
      __ push(ecx);
2043
      __ push(Operand(esp, 2 * kPointerSize));           // iter
2044
      __ push(eax);                                      // received
2045

    
2046
      // result = receiver[f](arg);
2047
      __ bind(&l_call);
2048
      Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
2049
      CallIC(ic);
2050
      __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2051
      __ Drop(1);  // The key is still on the stack; drop it.
2052

    
2053
      // if (!result.done) goto l_try;
2054
      __ bind(&l_loop);
2055
      __ push(eax);                                      // save result
2056
      __ mov(edx, eax);                                  // result
2057
      __ mov(ecx, isolate()->factory()->done_string());  // "done"
2058
      Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
2059
      CallIC(done_ic);                                   // result.done in eax
2060
      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2061
      CallIC(bool_ic);
2062
      __ test(eax, eax);
2063
      __ j(zero, &l_try);
2064

    
2065
      // result.value
2066
      __ pop(edx);                                       // result
2067
      __ mov(ecx, isolate()->factory()->value_string());  // "value"
2068
      Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
2069
      CallIC(value_ic);                                  // result.value in eax
2070
      context()->DropAndPlug(2, eax);                    // drop iter and g
2071
      break;
2072
    }
2073
  }
2074
}
2075

    
2076

    
2077
void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2078
    Expression *value,
2079
    JSGeneratorObject::ResumeMode resume_mode) {
2080
  // The value stays in eax, and is ultimately read by the resumed generator, as
2081
  // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  ebx
2082
  // will hold the generator object until the activation has been resumed.
2083
  VisitForStackValue(generator);
2084
  VisitForAccumulatorValue(value);
2085
  __ pop(ebx);
2086

    
2087
  // Check generator state.
2088
  Label wrong_state, done;
2089
  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
2090
  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
2091
  __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2092
         Immediate(Smi::FromInt(0)));
2093
  __ j(less_equal, &wrong_state);
2094

    
2095
  // Load suspended function and context.
2096
  __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2097
  __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2098

    
2099
  // Push receiver.
2100
  __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2101

    
2102
  // Push holes for arguments to generator function.
2103
  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2104
  __ mov(edx,
2105
         FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2106
  __ mov(ecx, isolate()->factory()->the_hole_value());
2107
  Label push_argument_holes, push_frame;
2108
  __ bind(&push_argument_holes);
2109
  __ sub(edx, Immediate(Smi::FromInt(1)));
2110
  __ j(carry, &push_frame);
2111
  __ push(ecx);
2112
  __ jmp(&push_argument_holes);
2113

    
2114
  // Enter a new JavaScript frame, and initialize its slots as they were when
2115
  // the generator was suspended.
2116
  Label resume_frame;
2117
  __ bind(&push_frame);
2118
  __ call(&resume_frame);
2119
  __ jmp(&done);
2120
  __ bind(&resume_frame);
2121
  __ push(ebp);  // Caller's frame pointer.
2122
  __ mov(ebp, esp);
2123
  __ push(esi);  // Callee's context.
2124
  __ push(edi);  // Callee's JS Function.
2125

    
2126
  // Load the operand stack size.
2127
  __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2128
  __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2129
  __ SmiUntag(edx);
2130

    
2131
  // If we are sending a value and there is no operand stack, we can jump back
2132
  // in directly.
2133
  if (resume_mode == JSGeneratorObject::NEXT) {
2134
    Label slow_resume;
2135
    __ cmp(edx, Immediate(0));
2136
    __ j(not_zero, &slow_resume);
2137
    __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2138
    __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2139
    __ SmiUntag(ecx);
2140
    __ add(edx, ecx);
2141
    __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2142
           Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2143
    __ jmp(edx);
2144
    __ bind(&slow_resume);
2145
  }
2146

    
2147
  // Otherwise, we push holes for the operand stack and call the runtime to fix
2148
  // up the stack and the handlers.
2149
  Label push_operand_holes, call_resume;
2150
  __ bind(&push_operand_holes);
2151
  __ sub(edx, Immediate(1));
2152
  __ j(carry, &call_resume);
2153
  __ push(ecx);
2154
  __ jmp(&push_operand_holes);
2155
  __ bind(&call_resume);
2156
  __ push(ebx);
2157
  __ push(result_register());
2158
  __ Push(Smi::FromInt(resume_mode));
2159
  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2160
  // Not reached: the runtime call returns elsewhere.
2161
  __ Abort(kGeneratorFailedToResume);
2162

    
2163
  // Throw error if we attempt to operate on a running generator.
2164
  __ bind(&wrong_state);
2165
  __ push(ebx);
2166
  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2167

    
2168
  __ bind(&done);
2169
  context()->Plug(result_register());
2170
}
2171

    
2172

    
2173
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2174
  Label gc_required;
2175
  Label allocated;
2176

    
2177
  Handle<Map> map(isolate()->native_context()->generator_result_map());
2178

    
2179
  __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2180
  __ jmp(&allocated);
2181

    
2182
  __ bind(&gc_required);
2183
  __ Push(Smi::FromInt(map->instance_size()));
2184
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2185
  __ mov(context_register(),
2186
         Operand(ebp, StandardFrameConstants::kContextOffset));
2187

    
2188
  __ bind(&allocated);
2189
  __ mov(ebx, map);
2190
  __ pop(ecx);
2191
  __ mov(edx, isolate()->factory()->ToBoolean(done));
2192
  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2193
  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2194
  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2195
         isolate()->factory()->empty_fixed_array());
2196
  __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2197
         isolate()->factory()->empty_fixed_array());
2198
  __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2199
  __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2200

    
2201
  // Only the value field needs a write barrier, as the other values are in the
2202
  // root set.
2203
  __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2204
                      ecx, edx, kDontSaveFPRegs);
2205
}
2206

    
2207

    
2208
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2209
  SetSourcePosition(prop->position());
2210
  Literal* key = prop->key()->AsLiteral();
2211
  ASSERT(!key->value()->IsSmi());
2212
  __ mov(ecx, Immediate(key->value()));
2213
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2214
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2215
}
2216

    
2217

    
2218
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2219
  SetSourcePosition(prop->position());
2220
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2221
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2222
}
2223

    
2224

    
2225
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2226
                                              Token::Value op,
2227
                                              OverwriteMode mode,
2228
                                              Expression* left,
2229
                                              Expression* right) {
2230
  // Do combined smi check of the operands. Left operand is on the
2231
  // stack. Right operand is in eax.
2232
  Label smi_case, done, stub_call;
2233
  __ pop(edx);
2234
  __ mov(ecx, eax);
2235
  __ or_(eax, edx);
2236
  JumpPatchSite patch_site(masm_);
2237
  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2238

    
2239
  __ bind(&stub_call);
2240
  __ mov(eax, ecx);
2241
  BinaryOpStub stub(op, mode);
2242
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2243
         expr->BinaryOperationFeedbackId());
2244
  patch_site.EmitPatchInfo();
2245
  __ jmp(&done, Label::kNear);
2246

    
2247
  // Smi case.
2248
  __ bind(&smi_case);
2249
  __ mov(eax, edx);  // Copy left operand in case of a stub call.
2250

    
2251
  switch (op) {
2252
    case Token::SAR:
2253
      __ SmiUntag(eax);
2254
      __ SmiUntag(ecx);
2255
      __ sar_cl(eax);  // No checks of result necessary
2256
      __ SmiTag(eax);
2257
      break;
2258
    case Token::SHL: {
2259
      Label result_ok;
2260
      __ SmiUntag(eax);
2261
      __ SmiUntag(ecx);
2262
      __ shl_cl(eax);
2263
      // Check that the *signed* result fits in a smi.
2264
      __ cmp(eax, 0xc0000000);
2265
      __ j(positive, &result_ok);
2266
      __ SmiTag(ecx);
2267
      __ jmp(&stub_call);
2268
      __ bind(&result_ok);
2269
      __ SmiTag(eax);
2270
      break;
2271
    }
2272
    case Token::SHR: {
2273
      Label result_ok;
2274
      __ SmiUntag(eax);
2275
      __ SmiUntag(ecx);
2276
      __ shr_cl(eax);
2277
      __ test(eax, Immediate(0xc0000000));
2278
      __ j(zero, &result_ok);
2279
      __ SmiTag(ecx);
2280
      __ jmp(&stub_call);
2281
      __ bind(&result_ok);
2282
      __ SmiTag(eax);
2283
      break;
2284
    }
2285
    case Token::ADD:
2286
      __ add(eax, ecx);
2287
      __ j(overflow, &stub_call);
2288
      break;
2289
    case Token::SUB:
2290
      __ sub(eax, ecx);
2291
      __ j(overflow, &stub_call);
2292
      break;
2293
    case Token::MUL: {
2294
      __ SmiUntag(eax);
2295
      __ imul(eax, ecx);
2296
      __ j(overflow, &stub_call);
2297
      __ test(eax, eax);
2298
      __ j(not_zero, &done, Label::kNear);
2299
      __ mov(ebx, edx);
2300
      __ or_(ebx, ecx);
2301
      __ j(negative, &stub_call);
2302
      break;
2303
    }
2304
    case Token::BIT_OR:
2305
      __ or_(eax, ecx);
2306
      break;
2307
    case Token::BIT_AND:
2308
      __ and_(eax, ecx);
2309
      break;
2310
    case Token::BIT_XOR:
2311
      __ xor_(eax, ecx);
2312
      break;
2313
    default:
2314
      UNREACHABLE();
2315
  }
2316

    
2317
  __ bind(&done);
2318
  context()->Plug(eax);
2319
}
2320

    
2321

    
2322
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2323
                                     Token::Value op,
2324
                                     OverwriteMode mode) {
2325
  __ pop(edx);
2326
  BinaryOpStub stub(op, mode);
2327
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2328
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2329
         expr->BinaryOperationFeedbackId());
2330
  patch_site.EmitPatchInfo();
2331
  context()->Plug(eax);
2332
}
2333

    
2334

    
2335
void FullCodeGenerator::EmitAssignment(Expression* expr) {
2336
  // Invalid left-hand sides are rewritten by the parser to have a 'throw
2337
  // ReferenceError' on the left-hand side.
2338
  if (!expr->IsValidLeftHandSide()) {
2339
    VisitForEffect(expr);
2340
    return;
2341
  }
2342

    
2343
  // Left-hand side can only be a property, a global or a (parameter or local)
2344
  // slot.
2345
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2346
  LhsKind assign_type = VARIABLE;
2347
  Property* prop = expr->AsProperty();
2348
  if (prop != NULL) {
2349
    assign_type = (prop->key()->IsPropertyName())
2350
        ? NAMED_PROPERTY
2351
        : KEYED_PROPERTY;
2352
  }
2353

    
2354
  switch (assign_type) {
2355
    case VARIABLE: {
2356
      Variable* var = expr->AsVariableProxy()->var();
2357
      EffectContext context(this);
2358
      EmitVariableAssignment(var, Token::ASSIGN);
2359
      break;
2360
    }
2361
    case NAMED_PROPERTY: {
2362
      __ push(eax);  // Preserve value.
2363
      VisitForAccumulatorValue(prop->obj());
2364
      __ mov(edx, eax);
2365
      __ pop(eax);  // Restore value.
2366
      __ mov(ecx, prop->key()->AsLiteral()->value());
2367
      Handle<Code> ic = is_classic_mode()
2368
          ? isolate()->builtins()->StoreIC_Initialize()
2369
          : isolate()->builtins()->StoreIC_Initialize_Strict();
2370
      CallIC(ic);
2371
      break;
2372
    }
2373
    case KEYED_PROPERTY: {
2374
      __ push(eax);  // Preserve value.
2375
      VisitForStackValue(prop->obj());
2376
      VisitForAccumulatorValue(prop->key());
2377
      __ mov(ecx, eax);
2378
      __ pop(edx);  // Receiver.
2379
      __ pop(eax);  // Restore value.
2380
      Handle<Code> ic = is_classic_mode()
2381
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
2382
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2383
      CallIC(ic);
2384
      break;
2385
    }
2386
  }
2387
  context()->Plug(eax);
2388
}
2389

    
2390

    
2391
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2392
                                               Token::Value op) {
2393
  if (var->IsUnallocated()) {
2394
    // Global var, const, or let.
2395
    __ mov(ecx, var->name());
2396
    __ mov(edx, GlobalObjectOperand());
2397
    Handle<Code> ic = is_classic_mode()
2398
        ? isolate()->builtins()->StoreIC_Initialize()
2399
        : isolate()->builtins()->StoreIC_Initialize_Strict();
2400
    CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2401

    
2402
  } else if (op == Token::INIT_CONST) {
2403
    // Const initializers need a write barrier.
2404
    ASSERT(!var->IsParameter());  // No const parameters.
2405
    if (var->IsStackLocal()) {
2406
      Label skip;
2407
      __ mov(edx, StackOperand(var));
2408
      __ cmp(edx, isolate()->factory()->the_hole_value());
2409
      __ j(not_equal, &skip);
2410
      __ mov(StackOperand(var), eax);
2411
      __ bind(&skip);
2412
    } else {
2413
      ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2414
      // Like var declarations, const declarations are hoisted to function
2415
      // scope.  However, unlike var initializers, const initializers are
2416
      // able to drill a hole to that function context, even from inside a
2417
      // 'with' context.  We thus bypass the normal static scope lookup for
2418
      // var->IsContextSlot().
2419
      __ push(eax);
2420
      __ push(esi);
2421
      __ push(Immediate(var->name()));
2422
      __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2423
    }
2424

    
2425
  } else if (var->mode() == LET && op != Token::INIT_LET) {
2426
    // Non-initializing assignment to let variable needs a write barrier.
2427
    if (var->IsLookupSlot()) {
2428
      __ push(eax);  // Value.
2429
      __ push(esi);  // Context.
2430
      __ push(Immediate(var->name()));
2431
      __ push(Immediate(Smi::FromInt(language_mode())));
2432
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2433
    } else {
2434
      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2435
      Label assign;
2436
      MemOperand location = VarOperand(var, ecx);
2437
      __ mov(edx, location);
2438
      __ cmp(edx, isolate()->factory()->the_hole_value());
2439
      __ j(not_equal, &assign, Label::kNear);
2440
      __ push(Immediate(var->name()));
2441
      __ CallRuntime(Runtime::kThrowReferenceError, 1);
2442
      __ bind(&assign);
2443
      __ mov(location, eax);
2444
      if (var->IsContextSlot()) {
2445
        __ mov(edx, eax);
2446
        int offset = Context::SlotOffset(var->index());
2447
        __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2448
      }
2449
    }
2450

    
2451
  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2452
    // Assignment to var or initializing assignment to let/const
2453
    // in harmony mode.
2454
    if (var->IsStackAllocated() || var->IsContextSlot()) {
2455
      MemOperand location = VarOperand(var, ecx);
2456
      if (generate_debug_code_ && op == Token::INIT_LET) {
2457
        // Check for an uninitialized let binding.
2458
        __ mov(edx, location);
2459
        __ cmp(edx, isolate()->factory()->the_hole_value());
2460
        __ Check(equal, kLetBindingReInitialization);
2461
      }
2462
      // Perform the assignment.
2463
      __ mov(location, eax);
2464
      if (var->IsContextSlot()) {
2465
        __ mov(edx, eax);
2466
        int offset = Context::SlotOffset(var->index());
2467
        __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2468
      }
2469
    } else {
2470
      ASSERT(var->IsLookupSlot());
2471
      __ push(eax);  // Value.
2472
      __ push(esi);  // Context.
2473
      __ push(Immediate(var->name()));
2474
      __ push(Immediate(Smi::FromInt(language_mode())));
2475
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2476
    }
2477
  }
2478
  // Non-initializing assignments to consts are ignored.
2479
}
2480

    
2481

    
2482
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2483
  // Assignment to a property, using a named store IC.
2484
  // eax    : value
2485
  // esp[0] : receiver
2486

    
2487
  Property* prop = expr->target()->AsProperty();
2488
  ASSERT(prop != NULL);
2489
  ASSERT(prop->key()->AsLiteral() != NULL);
2490

    
2491
  // Record source code position before IC call.
2492
  SetSourcePosition(expr->position());
2493
  __ mov(ecx, prop->key()->AsLiteral()->value());
2494
  __ pop(edx);
2495
  Handle<Code> ic = is_classic_mode()
2496
      ? isolate()->builtins()->StoreIC_Initialize()
2497
      : isolate()->builtins()->StoreIC_Initialize_Strict();
2498
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2499

    
2500
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2501
  context()->Plug(eax);
2502
}
2503

    
2504

    
2505
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2506
  // Assignment to a property, using a keyed store IC.
2507
  // eax               : value
2508
  // esp[0]            : key
2509
  // esp[kPointerSize] : receiver
2510

    
2511
  __ pop(ecx);  // Key.
2512
  __ pop(edx);
2513
  // Record source code position before IC call.
2514
  SetSourcePosition(expr->position());
2515
  Handle<Code> ic = is_classic_mode()
2516
      ? isolate()->builtins()->KeyedStoreIC_Initialize()
2517
      : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2518
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2519

    
2520
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2521
  context()->Plug(eax);
2522
}
2523

    
2524

    
2525
void FullCodeGenerator::VisitProperty(Property* expr) {
2526
  Comment cmnt(masm_, "[ Property");
2527
  Expression* key = expr->key();
2528

    
2529
  if (key->IsPropertyName()) {
2530
    VisitForAccumulatorValue(expr->obj());
2531
    __ mov(edx, result_register());
2532
    EmitNamedPropertyLoad(expr);
2533
    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2534
    context()->Plug(eax);
2535
  } else {
2536
    VisitForStackValue(expr->obj());
2537
    VisitForAccumulatorValue(expr->key());
2538
    __ pop(edx);                     // Object.
2539
    __ mov(ecx, result_register());  // Key.
2540
    EmitKeyedPropertyLoad(expr);
2541
    context()->Plug(eax);
2542
  }
2543
}
2544

    
2545

    
2546
void FullCodeGenerator::CallIC(Handle<Code> code,
2547
                               RelocInfo::Mode rmode,
2548
                               TypeFeedbackId ast_id) {
2549
  ic_total_count_++;
2550
  __ call(code, rmode, ast_id);
2551
}
2552

    
2553

    
2554

    
2555

    
2556
void FullCodeGenerator::EmitCallWithIC(Call* expr,
2557
                                       Handle<Object> name,
2558
                                       RelocInfo::Mode mode) {
2559
  // Code common for calls using the IC.
2560
  ZoneList<Expression*>* args = expr->arguments();
2561
  int arg_count = args->length();
2562
  { PreservePositionScope scope(masm()->positions_recorder());
2563
    for (int i = 0; i < arg_count; i++) {
2564
      VisitForStackValue(args->at(i));
2565
    }
2566
    __ Set(ecx, Immediate(name));
2567
  }
2568
  // Record source position of the IC call.
2569
  SetSourcePosition(expr->position());
2570
  Handle<Code> ic =
2571
      isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2572
  CallIC(ic, mode, expr->CallFeedbackId());
2573
  RecordJSReturnSite(expr);
2574
  // Restore context register.
2575
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2576
  context()->Plug(eax);
2577
}
2578

    
2579

    
2580
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2581
                                            Expression* key) {
2582
  // Load the key.
2583
  VisitForAccumulatorValue(key);
2584

    
2585
  // Swap the name of the function and the receiver on the stack to follow
2586
  // the calling convention for call ICs.
2587
  __ pop(ecx);
2588
  __ push(eax);
2589
  __ push(ecx);
2590

    
2591
  // Load the arguments.
2592
  ZoneList<Expression*>* args = expr->arguments();
2593
  int arg_count = args->length();
2594
  { PreservePositionScope scope(masm()->positions_recorder());
2595
    for (int i = 0; i < arg_count; i++) {
2596
      VisitForStackValue(args->at(i));
2597
    }
2598
  }
2599
  // Record source position of the IC call.
2600
  SetSourcePosition(expr->position());
2601
  Handle<Code> ic =
2602
      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2603
  __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize));  // Key.
2604
  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2605
  RecordJSReturnSite(expr);
2606
  // Restore context register.
2607
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2608
  context()->DropAndPlug(1, eax);  // Drop the key still on the stack.
2609
}
2610

    
2611

    
2612
void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2613
  // Code common for calls using the call stub.
2614
  ZoneList<Expression*>* args = expr->arguments();
2615
  int arg_count = args->length();
2616
  { PreservePositionScope scope(masm()->positions_recorder());
2617
    for (int i = 0; i < arg_count; i++) {
2618
      VisitForStackValue(args->at(i));
2619
    }
2620
  }
2621
  // Record source position for debugger.
2622
  SetSourcePosition(expr->position());
2623

    
2624
  // Record call targets in unoptimized code.
2625
  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2626
  Handle<Object> uninitialized =
2627
      TypeFeedbackCells::UninitializedSentinel(isolate());
2628
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2629
  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2630
  __ mov(ebx, cell);
2631

    
2632
  CallFunctionStub stub(arg_count, flags);
2633
  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2634
  __ CallStub(&stub, expr->CallFeedbackId());
2635

    
2636
  RecordJSReturnSite(expr);
2637
  // Restore context register.
2638
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2639
  context()->DropAndPlug(1, eax);
2640
}
2641

    
2642

    
2643
void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2644
  // Push copy of the first argument or undefined if it doesn't exist.
2645
  if (arg_count > 0) {
2646
    __ push(Operand(esp, arg_count * kPointerSize));
2647
  } else {
2648
    __ push(Immediate(isolate()->factory()->undefined_value()));
2649
  }
2650

    
2651
  // Push the receiver of the enclosing function.
2652
  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2653
  // Push the language mode.
2654
  __ push(Immediate(Smi::FromInt(language_mode())));
2655

    
2656
  // Push the start position of the scope the calls resides in.
2657
  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2658

    
2659
  // Do the runtime call.
2660
  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2661
}
2662

    
2663

    
2664
void FullCodeGenerator::VisitCall(Call* expr) {
2665
#ifdef DEBUG
2666
  // We want to verify that RecordJSReturnSite gets called on all paths
2667
  // through this function.  Avoid early returns.
2668
  expr->return_is_recorded_ = false;
2669
#endif
2670

    
2671
  Comment cmnt(masm_, "[ Call");
2672
  Expression* callee = expr->expression();
2673
  VariableProxy* proxy = callee->AsVariableProxy();
2674
  Property* property = callee->AsProperty();
2675

    
2676
  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
2677
    // In a call to eval, we first call %ResolvePossiblyDirectEval to
2678
    // resolve the function we need to call and the receiver of the call.
2679
    // Then we call the resolved function using the given arguments.
2680
    ZoneList<Expression*>* args = expr->arguments();
2681
    int arg_count = args->length();
2682
    { PreservePositionScope pos_scope(masm()->positions_recorder());
2683
      VisitForStackValue(callee);
2684
      // Reserved receiver slot.
2685
      __ push(Immediate(isolate()->factory()->undefined_value()));
2686
      // Push the arguments.
2687
      for (int i = 0; i < arg_count; i++) {
2688
        VisitForStackValue(args->at(i));
2689
      }
2690

    
2691
      // Push a copy of the function (found below the arguments) and
2692
      // resolve eval.
2693
      __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2694
      EmitResolvePossiblyDirectEval(arg_count);
2695

    
2696
      // The runtime call returns a pair of values in eax (function) and
2697
      // edx (receiver). Touch up the stack with the right values.
2698
      __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2699
      __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2700
    }
2701
    // Record source position for debugger.
2702
    SetSourcePosition(expr->position());
2703
    CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2704
    __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2705
    __ CallStub(&stub);
2706
    RecordJSReturnSite(expr);
2707
    // Restore context register.
2708
    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2709
    context()->DropAndPlug(1, eax);
2710

    
2711
  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2712
    // Push global object as receiver for the call IC.
2713
    __ push(GlobalObjectOperand());
2714
    EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2715

    
2716
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2717
    // Call to a lookup slot (dynamically introduced variable).
2718
    Label slow, done;
2719
    { PreservePositionScope scope(masm()->positions_recorder());
2720
      // Generate code for loading from variables potentially shadowed by
2721
      // eval-introduced variables.
2722
      EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2723
    }
2724
    __ bind(&slow);
2725
    // Call the runtime to find the function to call (returned in eax) and
2726
    // the object holding it (returned in edx).
2727
    __ push(context_register());
2728
    __ push(Immediate(proxy->name()));
2729
    __ CallRuntime(Runtime::kLoadContextSlot, 2);
2730
    __ push(eax);  // Function.
2731
    __ push(edx);  // Receiver.
2732

    
2733
    // If fast case code has been generated, emit code to push the function
2734
    // and receiver and have the slow path jump around this code.
2735
    if (done.is_linked()) {
2736
      Label call;
2737
      __ jmp(&call, Label::kNear);
2738
      __ bind(&done);
2739
      // Push function.
2740
      __ push(eax);
2741
      // The receiver is implicitly the global receiver. Indicate this by
2742
      // passing the hole to the call function stub.
2743
      __ push(Immediate(isolate()->factory()->the_hole_value()));
2744
      __ bind(&call);
2745
    }
2746

    
2747
    // The receiver is either the global receiver or an object found by
2748
    // LoadContextSlot. That object could be the hole if the receiver is
2749
    // implicitly the global object.
2750
    EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2751

    
2752
  } else if (property != NULL) {
2753
    { PreservePositionScope scope(masm()->positions_recorder());
2754
      VisitForStackValue(property->obj());
2755
    }
2756
    if (property->key()->IsPropertyName()) {
2757
      EmitCallWithIC(expr,
2758
                     property->key()->AsLiteral()->value(),
2759
                     RelocInfo::CODE_TARGET);
2760
    } else {
2761
      EmitKeyedCallWithIC(expr, property->key());
2762
    }
2763

    
2764
  } else {
2765
    // Call to an arbitrary expression not handled specially above.
2766
    { PreservePositionScope scope(masm()->positions_recorder());
2767
      VisitForStackValue(callee);
2768
    }
2769
    // Load global receiver object.
2770
    __ mov(ebx, GlobalObjectOperand());
2771
    __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2772
    // Emit function call.
2773
    EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2774
  }
2775

    
2776
#ifdef DEBUG
2777
  // RecordJSReturnSite should have been called.
2778
  ASSERT(expr->return_is_recorded_);
2779
#endif
2780
}
2781

    
2782

    
2783
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2784
  Comment cmnt(masm_, "[ CallNew");
2785
  // According to ECMA-262, section 11.2.2, page 44, the function
2786
  // expression in new calls must be evaluated before the
2787
  // arguments.
2788

    
2789
  // Push constructor on the stack.  If it's not a function it's used as
2790
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2791
  // ignored.
2792
  VisitForStackValue(expr->expression());
2793

    
2794
  // Push the arguments ("left-to-right") on the stack.
2795
  ZoneList<Expression*>* args = expr->arguments();
2796
  int arg_count = args->length();
2797
  for (int i = 0; i < arg_count; i++) {
2798
    VisitForStackValue(args->at(i));
2799
  }
2800

    
2801
  // Call the construct call builtin that handles allocation and
2802
  // constructor invocation.
2803
  SetSourcePosition(expr->position());
2804

    
2805
  // Load function and argument count into edi and eax.
2806
  __ Set(eax, Immediate(arg_count));
2807
  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2808

    
2809
  // Record call targets in unoptimized code.
2810
  Handle<Object> uninitialized =
2811
      TypeFeedbackCells::UninitializedSentinel(isolate());
2812
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2813
  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2814
  __ mov(ebx, cell);
2815

    
2816
  CallConstructStub stub(RECORD_CALL_TARGET);
2817
  __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2818
  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2819
  context()->Plug(eax);
2820
}
2821

    
2822

    
2823
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2824
  ZoneList<Expression*>* args = expr->arguments();
2825
  ASSERT(args->length() == 1);
2826

    
2827
  VisitForAccumulatorValue(args->at(0));
2828

    
2829
  Label materialize_true, materialize_false;
2830
  Label* if_true = NULL;
2831
  Label* if_false = NULL;
2832
  Label* fall_through = NULL;
2833
  context()->PrepareTest(&materialize_true, &materialize_false,
2834
                         &if_true, &if_false, &fall_through);
2835

    
2836
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2837
  __ test(eax, Immediate(kSmiTagMask));
2838
  Split(zero, if_true, if_false, fall_through);
2839

    
2840
  context()->Plug(if_true, if_false);
2841
}
2842

    
2843

    
2844
void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2845
  ZoneList<Expression*>* args = expr->arguments();
2846
  ASSERT(args->length() == 1);
2847

    
2848
  VisitForAccumulatorValue(args->at(0));
2849

    
2850
  Label materialize_true, materialize_false;
2851
  Label* if_true = NULL;
2852
  Label* if_false = NULL;
2853
  Label* fall_through = NULL;
2854
  context()->PrepareTest(&materialize_true, &materialize_false,
2855
                         &if_true, &if_false, &fall_through);
2856

    
2857
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2858
  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2859
  Split(zero, if_true, if_false, fall_through);
2860

    
2861
  context()->Plug(if_true, if_false);
2862
}
2863

    
2864

    
2865
void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2866
  ZoneList<Expression*>* args = expr->arguments();
2867
  ASSERT(args->length() == 1);
2868

    
2869
  VisitForAccumulatorValue(args->at(0));
2870

    
2871
  Label materialize_true, materialize_false;
2872
  Label* if_true = NULL;
2873
  Label* if_false = NULL;
2874
  Label* fall_through = NULL;
2875
  context()->PrepareTest(&materialize_true, &materialize_false,
2876
                         &if_true, &if_false, &fall_through);
2877

    
2878
  __ JumpIfSmi(eax, if_false);
2879
  __ cmp(eax, isolate()->factory()->null_value());
2880
  __ j(equal, if_true);
2881
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2882
  // Undetectable objects behave like undefined when tested with typeof.
2883
  __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2884
  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2885
  __ j(not_zero, if_false);
2886
  __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2887
  __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2888
  __ j(below, if_false);
2889
  __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2890
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2891
  Split(below_equal, if_true, if_false, fall_through);
2892

    
2893
  context()->Plug(if_true, if_false);
2894
}
2895

    
2896

    
2897
void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2898
  ZoneList<Expression*>* args = expr->arguments();
2899
  ASSERT(args->length() == 1);
2900

    
2901
  VisitForAccumulatorValue(args->at(0));
2902

    
2903
  Label materialize_true, materialize_false;
2904
  Label* if_true = NULL;
2905
  Label* if_false = NULL;
2906
  Label* fall_through = NULL;
2907
  context()->PrepareTest(&materialize_true, &materialize_false,
2908
                         &if_true, &if_false, &fall_through);
2909

    
2910
  __ JumpIfSmi(eax, if_false);
2911
  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2912
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2913
  Split(above_equal, if_true, if_false, fall_through);
2914

    
2915
  context()->Plug(if_true, if_false);
2916
}
2917

    
2918

    
2919
void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2920
  ZoneList<Expression*>* args = expr->arguments();
2921
  ASSERT(args->length() == 1);
2922

    
2923
  VisitForAccumulatorValue(args->at(0));
2924

    
2925
  Label materialize_true, materialize_false;
2926
  Label* if_true = NULL;
2927
  Label* if_false = NULL;
2928
  Label* fall_through = NULL;
2929
  context()->PrepareTest(&materialize_true, &materialize_false,
2930
                         &if_true, &if_false, &fall_through);
2931

    
2932
  __ JumpIfSmi(eax, if_false);
2933
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2934
  __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2935
  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2936
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2937
  Split(not_zero, if_true, if_false, fall_through);
2938

    
2939
  context()->Plug(if_true, if_false);
2940
}
2941

    
2942

    
2943
void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2944
    CallRuntime* expr) {
2945
  ZoneList<Expression*>* args = expr->arguments();
2946
  ASSERT(args->length() == 1);
2947

    
2948
  VisitForAccumulatorValue(args->at(0));
2949

    
2950
  Label materialize_true, materialize_false, skip_lookup;
2951
  Label* if_true = NULL;
2952
  Label* if_false = NULL;
2953
  Label* fall_through = NULL;
2954
  context()->PrepareTest(&materialize_true, &materialize_false,
2955
                         &if_true, &if_false, &fall_through);
2956

    
2957
  __ AssertNotSmi(eax);
2958

    
2959
  // Check whether this map has already been checked to be safe for default
2960
  // valueOf.
2961
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2962
  __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2963
            1 << Map::kStringWrapperSafeForDefaultValueOf);
2964
  __ j(not_zero, &skip_lookup);
2965

    
2966
  // Check for fast case object. Return false for slow case objects.
2967
  __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2968
  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2969
  __ cmp(ecx, isolate()->factory()->hash_table_map());
2970
  __ j(equal, if_false);
2971

    
2972
  // Look for valueOf string in the descriptor array, and indicate false if
2973
  // found. Since we omit an enumeration index check, if it is added via a
2974
  // transition that shares its descriptor array, this is a false positive.
2975
  Label entry, loop, done;
2976

    
2977
  // Skip loop if no descriptors are valid.
2978
  __ NumberOfOwnDescriptors(ecx, ebx);
2979
  __ cmp(ecx, 0);
2980
  __ j(equal, &done);
2981

    
2982
  __ LoadInstanceDescriptors(ebx, ebx);
2983
  // ebx: descriptor array.
2984
  // ecx: valid entries in the descriptor array.
2985
  // Calculate the end of the descriptor array.
2986
  STATIC_ASSERT(kSmiTag == 0);
2987
  STATIC_ASSERT(kSmiTagSize == 1);
2988
  STATIC_ASSERT(kPointerSize == 4);
2989
  __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
2990
  __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
2991
  // Calculate location of the first key name.
2992
  __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
2993
  // Loop through all the keys in the descriptor array. If one of these is the
2994
  // internalized string "valueOf" the result is false.
2995
  __ jmp(&entry);
2996
  __ bind(&loop);
2997
  __ mov(edx, FieldOperand(ebx, 0));
2998
  __ cmp(edx, isolate()->factory()->value_of_string());
2999
  __ j(equal, if_false);
3000
  __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3001
  __ bind(&entry);
3002
  __ cmp(ebx, ecx);
3003
  __ j(not_equal, &loop);
3004

    
3005
  __ bind(&done);
3006

    
3007
  // Reload map as register ebx was used as temporary above.
3008
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3009

    
3010
  // Set the bit in the map to indicate that there is no local valueOf field.
3011
  __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3012
         Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3013

    
3014
  __ bind(&skip_lookup);
3015

    
3016
  // If a valueOf property is not found on the object check that its
3017
  // prototype is the un-modified String prototype. If not result is false.
3018
  __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3019
  __ JumpIfSmi(ecx, if_false);
3020
  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3021
  __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3022
  __ mov(edx,
3023
         FieldOperand(edx, GlobalObject::kNativeContextOffset));
3024
  __ cmp(ecx,
3025
         ContextOperand(edx,
3026
                        Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3027
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3028
  Split(equal, if_true, if_false, fall_through);
3029

    
3030
  context()->Plug(if_true, if_false);
3031
}
3032

    
3033

    
3034
void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3035
  ZoneList<Expression*>* args = expr->arguments();
3036
  ASSERT(args->length() == 1);
3037

    
3038
  VisitForAccumulatorValue(args->at(0));
3039

    
3040
  Label materialize_true, materialize_false;
3041
  Label* if_true = NULL;
3042
  Label* if_false = NULL;
3043
  Label* fall_through = NULL;
3044
  context()->PrepareTest(&materialize_true, &materialize_false,
3045
                         &if_true, &if_false, &fall_through);
3046

    
3047
  __ JumpIfSmi(eax, if_false);
3048
  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3049
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3050
  Split(equal, if_true, if_false, fall_through);
3051

    
3052
  context()->Plug(if_true, if_false);
3053
}
3054

    
3055

    
3056
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3057
  ZoneList<Expression*>* args = expr->arguments();
3058
  ASSERT(args->length() == 1);
3059

    
3060
  VisitForAccumulatorValue(args->at(0));
3061

    
3062
  Label materialize_true, materialize_false;
3063
  Label* if_true = NULL;
3064
  Label* if_false = NULL;
3065
  Label* fall_through = NULL;
3066
  context()->PrepareTest(&materialize_true, &materialize_false,
3067
                         &if_true, &if_false, &fall_through);
3068

    
3069
  __ JumpIfSmi(eax, if_false);
3070
  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3071
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3072
  Split(equal, if_true, if_false, fall_through);
3073

    
3074
  context()->Plug(if_true, if_false);
3075
}
3076

    
3077

    
3078
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3079
  ZoneList<Expression*>* args = expr->arguments();
3080
  ASSERT(args->length() == 1);
3081

    
3082
  VisitForAccumulatorValue(args->at(0));
3083

    
3084
  Label materialize_true, materialize_false;
3085
  Label* if_true = NULL;
3086
  Label* if_false = NULL;
3087
  Label* fall_through = NULL;
3088
  context()->PrepareTest(&materialize_true, &materialize_false,
3089
                         &if_true, &if_false, &fall_through);
3090

    
3091
  __ JumpIfSmi(eax, if_false);
3092
  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3093
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3094
  Split(equal, if_true, if_false, fall_through);
3095

    
3096
  context()->Plug(if_true, if_false);
3097
}
3098

    
3099

    
3100

    
3101
void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3102
  ASSERT(expr->arguments()->length() == 0);
3103

    
3104
  Label materialize_true, materialize_false;
3105
  Label* if_true = NULL;
3106
  Label* if_false = NULL;
3107
  Label* fall_through = NULL;
3108
  context()->PrepareTest(&materialize_true, &materialize_false,
3109
                         &if_true, &if_false, &fall_through);
3110

    
3111
  // Get the frame pointer for the calling frame.
3112
  __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3113

    
3114
  // Skip the arguments adaptor frame if it exists.
3115
  Label check_frame_marker;
3116
  __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3117
         Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3118
  __ j(not_equal, &check_frame_marker);
3119
  __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3120

    
3121
  // Check the marker in the calling frame.
3122
  __ bind(&check_frame_marker);
3123
  __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3124
         Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3125
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3126
  Split(equal, if_true, if_false, fall_through);
3127

    
3128
  context()->Plug(if_true, if_false);
3129
}
3130

    
3131

    
3132
void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3133
  ZoneList<Expression*>* args = expr->arguments();
3134
  ASSERT(args->length() == 2);
3135

    
3136
  // Load the two objects into registers and perform the comparison.
3137
  VisitForStackValue(args->at(0));
3138
  VisitForAccumulatorValue(args->at(1));
3139

    
3140
  Label materialize_true, materialize_false;
3141
  Label* if_true = NULL;
3142
  Label* if_false = NULL;
3143
  Label* fall_through = NULL;
3144
  context()->PrepareTest(&materialize_true, &materialize_false,
3145
                         &if_true, &if_false, &fall_through);
3146

    
3147
  __ pop(ebx);
3148
  __ cmp(eax, ebx);
3149
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3150
  Split(equal, if_true, if_false, fall_through);
3151

    
3152
  context()->Plug(if_true, if_false);
3153
}
3154

    
3155

    
3156
void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3157
  ZoneList<Expression*>* args = expr->arguments();
3158
  ASSERT(args->length() == 1);
3159

    
3160
  // ArgumentsAccessStub expects the key in edx and the formal
3161
  // parameter count in eax.
3162
  VisitForAccumulatorValue(args->at(0));
3163
  __ mov(edx, eax);
3164
  __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3165
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3166
  __ CallStub(&stub);
3167
  context()->Plug(eax);
3168
}
3169

    
3170

    
3171
void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3172
  ASSERT(expr->arguments()->length() == 0);
3173

    
3174
  Label exit;
3175
  // Get the number of formal parameters.
3176
  __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3177

    
3178
  // Check if the calling frame is an arguments adaptor frame.
3179
  __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3180
  __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3181
         Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3182
  __ j(not_equal, &exit);
3183

    
3184
  // Arguments adaptor case: Read the arguments length from the
3185
  // adaptor frame.
3186
  __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3187

    
3188
  __ bind(&exit);
3189
  __ AssertSmi(eax);
3190
  context()->Plug(eax);
3191
}
3192

    
3193

    
3194
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3195
  ZoneList<Expression*>* args = expr->arguments();
3196
  ASSERT(args->length() == 1);
3197
  Label done, null, function, non_function_constructor;
3198

    
3199
  VisitForAccumulatorValue(args->at(0));
3200

    
3201
  // If the object is a smi, we return null.
3202
  __ JumpIfSmi(eax, &null);
3203

    
3204
  // Check that the object is a JS object but take special care of JS
3205
  // functions to make sure they have 'Function' as their class.
3206
  // Assume that there are only two callable types, and one of them is at
3207
  // either end of the type range for JS object types. Saves extra comparisons.
3208
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3209
  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3210
  // Map is now in eax.
3211
  __ j(below, &null);
3212
  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3213
                FIRST_SPEC_OBJECT_TYPE + 1);
3214
  __ j(equal, &function);
3215

    
3216
  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3217
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3218
                LAST_SPEC_OBJECT_TYPE - 1);
3219
  __ j(equal, &function);
3220
  // Assume that there is no larger type.
3221
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3222

    
3223
  // Check if the constructor in the map is a JS function.
3224
  __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
3225
  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3226
  __ j(not_equal, &non_function_constructor);
3227

    
3228
  // eax now contains the constructor function. Grab the
3229
  // instance class name from there.
3230
  __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3231
  __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3232
  __ jmp(&done);
3233

    
3234
  // Functions have class 'Function'.
3235
  __ bind(&function);
3236
  __ mov(eax, isolate()->factory()->function_class_string());
3237
  __ jmp(&done);
3238

    
3239
  // Objects with a non-function constructor have class 'Object'.
3240
  __ bind(&non_function_constructor);
3241
  __ mov(eax, isolate()->factory()->Object_string());
3242
  __ jmp(&done);
3243

    
3244
  // Non-JS objects have class null.
3245
  __ bind(&null);
3246
  __ mov(eax, isolate()->factory()->null_value());
3247

    
3248
  // All done.
3249
  __ bind(&done);
3250

    
3251
  context()->Plug(eax);
3252
}
3253

    
3254

    
3255
void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3256
  // Conditionally generate a log call.
3257
  // Args:
3258
  //   0 (literal string): The type of logging (corresponds to the flags).
3259
  //     This is used to determine whether or not to generate the log call.
3260
  //   1 (string): Format string.  Access the string at argument index 2
3261
  //     with '%2s' (see Logger::LogRuntime for all the formats).
3262
  //   2 (array): Arguments to the format string.
3263
  ZoneList<Expression*>* args = expr->arguments();
3264
  ASSERT_EQ(args->length(), 3);
3265
  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3266
    VisitForStackValue(args->at(1));
3267
    VisitForStackValue(args->at(2));
3268
    __ CallRuntime(Runtime::kLog, 2);
3269
  }
3270
  // Finally, we're expected to leave a value on the top of the stack.
3271
  __ mov(eax, isolate()->factory()->undefined_value());
3272
  context()->Plug(eax);
3273
}
3274

    
3275

    
3276
void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3277
  ASSERT(expr->arguments()->length() == 0);
3278

    
3279
  Label slow_allocate_heapnumber;
3280
  Label heapnumber_allocated;
3281

    
3282
  __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
3283
  __ jmp(&heapnumber_allocated);
3284

    
3285
  __ bind(&slow_allocate_heapnumber);
3286
  // Allocate a heap number.
3287
  __ CallRuntime(Runtime::kNumberAlloc, 0);
3288
  __ mov(edi, eax);
3289

    
3290
  __ bind(&heapnumber_allocated);
3291

    
3292
  __ PrepareCallCFunction(1, ebx);
3293
  __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3294
  __ mov(eax, FieldOperand(eax, GlobalObject::kNativeContextOffset));
3295
  __ mov(Operand(esp, 0), eax);
3296
  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3297

    
3298
  // Convert 32 random bits in eax to 0.(32 random bits) in a double
3299
  // by computing:
3300
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3301
  // This is implemented on both SSE2 and FPU.
3302
  if (CpuFeatures::IsSupported(SSE2)) {
3303
    CpuFeatureScope fscope(masm(), SSE2);
3304
    __ mov(ebx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
3305
    __ movd(xmm1, ebx);
3306
    __ movd(xmm0, eax);
3307
    __ cvtss2sd(xmm1, xmm1);
3308
    __ xorps(xmm0, xmm1);
3309
    __ subsd(xmm0, xmm1);
3310
    __ movsd(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
3311
  } else {
3312
    // 0x4130000000000000 is 1.0 x 2^20 as a double.
3313
    __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
3314
           Immediate(0x41300000));
3315
    __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
3316
    __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
3317
    __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
3318
    __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
3319
    __ fsubp(1);
3320
    __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
3321
  }
3322
  __ mov(eax, edi);
3323
  context()->Plug(eax);
3324
}
3325

    
3326

    
3327
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3328
  // Load the arguments on the stack and call the stub.
3329
  SubStringStub stub;
3330
  ZoneList<Expression*>* args = expr->arguments();
3331
  ASSERT(args->length() == 3);
3332
  VisitForStackValue(args->at(0));
3333
  VisitForStackValue(args->at(1));
3334
  VisitForStackValue(args->at(2));
3335
  __ CallStub(&stub);
3336
  context()->Plug(eax);
3337
}
3338

    
3339

    
3340
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3341
  // Load the arguments on the stack and call the stub.
3342
  RegExpExecStub stub;
3343
  ZoneList<Expression*>* args = expr->arguments();
3344
  ASSERT(args->length() == 4);
3345
  VisitForStackValue(args->at(0));
3346
  VisitForStackValue(args->at(1));
3347
  VisitForStackValue(args->at(2));
3348
  VisitForStackValue(args->at(3));
3349
  __ CallStub(&stub);
3350
  context()->Plug(eax);
3351
}
3352

    
3353

    
3354
void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3355
  ZoneList<Expression*>* args = expr->arguments();
3356
  ASSERT(args->length() == 1);
3357

    
3358
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3359

    
3360
  Label done;
3361
  // If the object is a smi return the object.
3362
  __ JumpIfSmi(eax, &done, Label::kNear);
3363
  // If the object is not a value type, return the object.
3364
  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3365
  __ j(not_equal, &done, Label::kNear);
3366
  __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3367

    
3368
  __ bind(&done);
3369
  context()->Plug(eax);
3370
}
3371

    
3372

    
3373
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3374
  ZoneList<Expression*>* args = expr->arguments();
3375
  ASSERT(args->length() == 2);
3376
  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3377
  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3378

    
3379
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3380

    
3381
  Label runtime, done, not_date_object;
3382
  Register object = eax;
3383
  Register result = eax;
3384
  Register scratch = ecx;
3385

    
3386
  __ JumpIfSmi(object, &not_date_object);
3387
  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3388
  __ j(not_equal, &not_date_object);
3389

    
3390
  if (index->value() == 0) {
3391
    __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3392
    __ jmp(&done);
3393
  } else {
3394
    if (index->value() < JSDate::kFirstUncachedField) {
3395
      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3396
      __ mov(scratch, Operand::StaticVariable(stamp));
3397
      __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3398
      __ j(not_equal, &runtime, Label::kNear);
3399
      __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3400
                                          kPointerSize * index->value()));
3401
      __ jmp(&done);
3402
    }
3403
    __ bind(&runtime);
3404
    __ PrepareCallCFunction(2, scratch);
3405
    __ mov(Operand(esp, 0), object);
3406
    __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3407
    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3408
    __ jmp(&done);
3409
  }
3410

    
3411
  __ bind(&not_date_object);
3412
  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3413
  __ bind(&done);
3414
  context()->Plug(result);
3415
}
3416

    
3417

    
3418
void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
3419
                                                  Register index,
3420
                                                  Register value,
3421
                                                  uint32_t encoding_mask) {
3422
  __ test(index, Immediate(kSmiTagMask));
3423
  __ Check(zero, kNonSmiIndex);
3424
  __ test(value, Immediate(kSmiTagMask));
3425
  __ Check(zero, kNonSmiValue);
3426

    
3427
  __ cmp(index, FieldOperand(string, String::kLengthOffset));
3428
  __ Check(less, kIndexIsTooLarge);
3429

    
3430
  __ cmp(index, Immediate(Smi::FromInt(0)));
3431
  __ Check(greater_equal, kIndexIsNegative);
3432

    
3433
  __ push(value);
3434
  __ mov(value, FieldOperand(string, HeapObject::kMapOffset));
3435
  __ movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
3436

    
3437
  __ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3438
  __ cmp(value, Immediate(encoding_mask));
3439
  __ Check(equal, kUnexpectedStringType);
3440
  __ pop(value);
3441
}
3442

    
3443

    
3444
void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3445
  ZoneList<Expression*>* args = expr->arguments();
3446
  ASSERT_EQ(3, args->length());
3447

    
3448
  Register string = eax;
3449
  Register index = ebx;
3450
  Register value = ecx;
3451

    
3452
  VisitForStackValue(args->at(1));  // index
3453
  VisitForStackValue(args->at(2));  // value
3454
  __ pop(value);
3455
  __ pop(index);
3456
  VisitForAccumulatorValue(args->at(0));  // string
3457

    
3458

    
3459
  if (FLAG_debug_code) {
3460
    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3461
    EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3462
  }
3463

    
3464
  __ SmiUntag(value);
3465
  __ SmiUntag(index);
3466
  __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3467
           value);
3468
  context()->Plug(string);
3469
}
3470

    
3471

    
3472
void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3473
  ZoneList<Expression*>* args = expr->arguments();
3474
  ASSERT_EQ(3, args->length());
3475

    
3476
  Register string = eax;
3477
  Register index = ebx;
3478
  Register value = ecx;
3479

    
3480
  VisitForStackValue(args->at(1));  // index
3481
  VisitForStackValue(args->at(2));  // value
3482
  __ pop(value);
3483
  __ pop(index);
3484
  VisitForAccumulatorValue(args->at(0));  // string
3485

    
3486
  if (FLAG_debug_code) {
3487
    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3488
    EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3489
  }
3490

    
3491
  __ SmiUntag(value);
3492
  // No need to untag a smi for two-byte addressing.
3493
  __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3494
           value);
3495
  context()->Plug(string);
3496
}
3497

    
3498

    
3499
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3500
  // Load the arguments on the stack and call the runtime function.
3501
  ZoneList<Expression*>* args = expr->arguments();
3502
  ASSERT(args->length() == 2);
3503
  VisitForStackValue(args->at(0));
3504
  VisitForStackValue(args->at(1));
3505

    
3506
  if (CpuFeatures::IsSupported(SSE2)) {
3507
    MathPowStub stub(MathPowStub::ON_STACK);
3508
    __ CallStub(&stub);
3509
  } else {
3510
    __ CallRuntime(Runtime::kMath_pow, 2);
3511
  }
3512
  context()->Plug(eax);
3513
}
3514

    
3515

    
3516
void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3517
  ZoneList<Expression*>* args = expr->arguments();
3518
  ASSERT(args->length() == 2);
3519

    
3520
  VisitForStackValue(args->at(0));  // Load the object.
3521
  VisitForAccumulatorValue(args->at(1));  // Load the value.
3522
  __ pop(ebx);  // eax = value. ebx = object.
3523

    
3524
  Label done;
3525
  // If the object is a smi, return the value.
3526
  __ JumpIfSmi(ebx, &done, Label::kNear);
3527

    
3528
  // If the object is not a value type, return the value.
3529
  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3530
  __ j(not_equal, &done, Label::kNear);
3531

    
3532
  // Store the value.
3533
  __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3534

    
3535
  // Update the write barrier.  Save the value as it will be
3536
  // overwritten by the write barrier code and is needed afterward.
3537
  __ mov(edx, eax);
3538
  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3539

    
3540
  __ bind(&done);
3541
  context()->Plug(eax);
3542
}
3543

    
3544

    
3545
void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3546
  ZoneList<Expression*>* args = expr->arguments();
3547
  ASSERT_EQ(args->length(), 1);
3548

    
3549
  // Load the argument into eax and call the stub.
3550
  VisitForAccumulatorValue(args->at(0));
3551

    
3552
  NumberToStringStub stub;
3553
  __ CallStub(&stub);
3554
  context()->Plug(eax);
3555
}
3556

    
3557

    
3558
void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3559
  ZoneList<Expression*>* args = expr->arguments();
3560
  ASSERT(args->length() == 1);
3561

    
3562
  VisitForAccumulatorValue(args->at(0));
3563

    
3564
  Label done;
3565
  StringCharFromCodeGenerator generator(eax, ebx);
3566
  generator.GenerateFast(masm_);
3567
  __ jmp(&done);
3568

    
3569
  NopRuntimeCallHelper call_helper;
3570
  generator.GenerateSlow(masm_, call_helper);
3571

    
3572
  __ bind(&done);
3573
  context()->Plug(ebx);
3574
}
3575

    
3576

    
3577
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3578
  ZoneList<Expression*>* args = expr->arguments();
3579
  ASSERT(args->length() == 2);
3580

    
3581
  VisitForStackValue(args->at(0));
3582
  VisitForAccumulatorValue(args->at(1));
3583

    
3584
  Register object = ebx;
3585
  Register index = eax;
3586
  Register result = edx;
3587

    
3588
  __ pop(object);
3589

    
3590
  Label need_conversion;
3591
  Label index_out_of_range;
3592
  Label done;
3593
  StringCharCodeAtGenerator generator(object,
3594
                                      index,
3595
                                      result,
3596
                                      &need_conversion,
3597
                                      &need_conversion,
3598
                                      &index_out_of_range,
3599
                                      STRING_INDEX_IS_NUMBER);
3600
  generator.GenerateFast(masm_);
3601
  __ jmp(&done);
3602

    
3603
  __ bind(&index_out_of_range);
3604
  // When the index is out of range, the spec requires us to return
3605
  // NaN.
3606
  __ Set(result, Immediate(isolate()->factory()->nan_value()));
3607
  __ jmp(&done);
3608

    
3609
  __ bind(&need_conversion);
3610
  // Move the undefined value into the result register, which will
3611
  // trigger conversion.
3612
  __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3613
  __ jmp(&done);
3614

    
3615
  NopRuntimeCallHelper call_helper;
3616
  generator.GenerateSlow(masm_, call_helper);
3617

    
3618
  __ bind(&done);
3619
  context()->Plug(result);
3620
}
3621

    
3622

    
3623
void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3624
  ZoneList<Expression*>* args = expr->arguments();
3625
  ASSERT(args->length() == 2);
3626

    
3627
  VisitForStackValue(args->at(0));
3628
  VisitForAccumulatorValue(args->at(1));
3629

    
3630
  Register object = ebx;
3631
  Register index = eax;
3632
  Register scratch = edx;
3633
  Register result = eax;
3634

    
3635
  __ pop(object);
3636

    
3637
  Label need_conversion;
3638
  Label index_out_of_range;
3639
  Label done;
3640
  StringCharAtGenerator generator(object,
3641
                                  index,
3642
                                  scratch,
3643
                                  result,
3644
                                  &need_conversion,
3645
                                  &need_conversion,
3646
                                  &index_out_of_range,
3647
                                  STRING_INDEX_IS_NUMBER);
3648
  generator.GenerateFast(masm_);
3649
  __ jmp(&done);
3650

    
3651
  __ bind(&index_out_of_range);
3652
  // When the index is out of range, the spec requires us to return
3653
  // the empty string.
3654
  __ Set(result, Immediate(isolate()->factory()->empty_string()));
3655
  __ jmp(&done);
3656

    
3657
  __ bind(&need_conversion);
3658
  // Move smi zero into the result register, which will trigger
3659
  // conversion.
3660
  __ Set(result, Immediate(Smi::FromInt(0)));
3661
  __ jmp(&done);
3662

    
3663
  NopRuntimeCallHelper call_helper;
3664
  generator.GenerateSlow(masm_, call_helper);
3665

    
3666
  __ bind(&done);
3667
  context()->Plug(result);
3668
}
3669

    
3670

    
3671
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3672
  ZoneList<Expression*>* args = expr->arguments();
3673
  ASSERT_EQ(2, args->length());
3674

    
3675
  VisitForStackValue(args->at(0));
3676
  VisitForStackValue(args->at(1));
3677

    
3678
  StringAddStub stub(STRING_ADD_CHECK_BOTH);
3679
  __ CallStub(&stub);
3680
  context()->Plug(eax);
3681
}
3682

    
3683

    
3684
void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3685
  ZoneList<Expression*>* args = expr->arguments();
3686
  ASSERT_EQ(2, args->length());
3687

    
3688
  VisitForStackValue(args->at(0));
3689
  VisitForStackValue(args->at(1));
3690

    
3691
  StringCompareStub stub;
3692
  __ CallStub(&stub);
3693
  context()->Plug(eax);
3694
}
3695

    
3696

    
3697
void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3698
  // Load the argument on the stack and call the stub.
3699
  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3700
                               TranscendentalCacheStub::TAGGED);
3701
  ZoneList<Expression*>* args = expr->arguments();
3702
  ASSERT(args->length() == 1);
3703
  VisitForStackValue(args->at(0));
3704
  __ CallStub(&stub);
3705
  context()->Plug(eax);
3706
}
3707

    
3708

    
3709
void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3710
  // Load the argument on the stack and call the stub.
3711
  TranscendentalCacheStub stub(TranscendentalCache::COS,
3712
                               TranscendentalCacheStub::TAGGED);
3713
  ZoneList<Expression*>* args = expr->arguments();
3714
  ASSERT(args->length() == 1);
3715
  VisitForStackValue(args->at(0));
3716
  __ CallStub(&stub);
3717
  context()->Plug(eax);
3718
}
3719

    
3720

    
3721
void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3722
  // Load the argument on the stack and call the stub.
3723
  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3724
                               TranscendentalCacheStub::TAGGED);
3725
  ZoneList<Expression*>* args = expr->arguments();
3726
  ASSERT(args->length() == 1);
3727
  VisitForStackValue(args->at(0));
3728
  __ CallStub(&stub);
3729
  context()->Plug(eax);
3730
}
3731

    
3732

    
3733
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3734
  // Load the argument on the stack and call the stub.
3735
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3736
                               TranscendentalCacheStub::TAGGED);
3737
  ZoneList<Expression*>* args = expr->arguments();
3738
  ASSERT(args->length() == 1);
3739
  VisitForStackValue(args->at(0));
3740
  __ CallStub(&stub);
3741
  context()->Plug(eax);
3742
}
3743

    
3744

    
3745
void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3746
  // Load the argument on the stack and call the runtime function.
3747
  ZoneList<Expression*>* args = expr->arguments();
3748
  ASSERT(args->length() == 1);
3749
  VisitForStackValue(args->at(0));
3750
  __ CallRuntime(Runtime::kMath_sqrt, 1);
3751
  context()->Plug(eax);
3752
}
3753

    
3754

    
3755
void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3756
  ZoneList<Expression*>* args = expr->arguments();
3757
  ASSERT(args->length() >= 2);
3758

    
3759
  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3760
  for (int i = 0; i < arg_count + 1; ++i) {
3761
    VisitForStackValue(args->at(i));
3762
  }
3763
  VisitForAccumulatorValue(args->last());  // Function.
3764

    
3765
  Label runtime, done;
3766
  // Check for non-function argument (including proxy).
3767
  __ JumpIfSmi(eax, &runtime);
3768
  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3769
  __ j(not_equal, &runtime);
3770

    
3771
  // InvokeFunction requires the function in edi. Move it in there.
3772
  __ mov(edi, result_register());
3773
  ParameterCount count(arg_count);
3774
  __ InvokeFunction(edi, count, CALL_FUNCTION,
3775
                    NullCallWrapper(), CALL_AS_METHOD);
3776
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3777
  __ jmp(&done);
3778

    
3779
  __ bind(&runtime);
3780
  __ push(eax);
3781
  __ CallRuntime(Runtime::kCall, args->length());
3782
  __ bind(&done);
3783

    
3784
  context()->Plug(eax);
3785
}
3786

    
3787

    
3788
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3789
  // Load the arguments on the stack and call the stub.
3790
  RegExpConstructResultStub stub;
3791
  ZoneList<Expression*>* args = expr->arguments();
3792
  ASSERT(args->length() == 3);
3793
  VisitForStackValue(args->at(0));
3794
  VisitForStackValue(args->at(1));
3795
  VisitForStackValue(args->at(2));
3796
  __ CallStub(&stub);
3797
  context()->Plug(eax);
3798
}
3799

    
3800

    
3801
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3802
  ZoneList<Expression*>* args = expr->arguments();
3803
  ASSERT_EQ(2, args->length());
3804

    
3805
  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3806
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3807

    
3808
  Handle<FixedArray> jsfunction_result_caches(
3809
      isolate()->native_context()->jsfunction_result_caches());
3810
  if (jsfunction_result_caches->length() <= cache_id) {
3811
    __ Abort(kAttemptToUseUndefinedCache);
3812
    __ mov(eax, isolate()->factory()->undefined_value());
3813
    context()->Plug(eax);
3814
    return;
3815
  }
3816

    
3817
  VisitForAccumulatorValue(args->at(1));
3818

    
3819
  Register key = eax;
3820
  Register cache = ebx;
3821
  Register tmp = ecx;
3822
  __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
3823
  __ mov(cache,
3824
         FieldOperand(cache, GlobalObject::kNativeContextOffset));
3825
  __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3826
  __ mov(cache,
3827
         FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3828

    
3829
  Label done, not_found;
3830
  // tmp now holds finger offset as a smi.
3831
  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3832
  __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3833
  __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3834
  __ j(not_equal, &not_found);
3835

    
3836
  __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3837
  __ jmp(&done);
3838

    
3839
  __ bind(&not_found);
3840
  // Call runtime to perform the lookup.
3841
  __ push(cache);
3842
  __ push(key);
3843
  __ CallRuntime(Runtime::kGetFromCache, 2);
3844

    
3845
  __ bind(&done);
3846
  context()->Plug(eax);
3847
}
3848

    
3849

    
3850
void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3851
  ZoneList<Expression*>* args = expr->arguments();
3852
  ASSERT_EQ(2, args->length());
3853

    
3854
  Register right = eax;
3855
  Register left = ebx;
3856
  Register tmp = ecx;
3857

    
3858
  VisitForStackValue(args->at(0));
3859
  VisitForAccumulatorValue(args->at(1));
3860
  __ pop(left);
3861

    
3862
  Label done, fail, ok;
3863
  __ cmp(left, right);
3864
  __ j(equal, &ok);
3865
  // Fail if either is a non-HeapObject.
3866
  __ mov(tmp, left);
3867
  __ and_(tmp, right);
3868
  __ JumpIfSmi(tmp, &fail);
3869
  __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3870
  __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3871
  __ j(not_equal, &fail);
3872
  __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3873
  __ j(not_equal, &fail);
3874
  __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3875
  __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3876
  __ j(equal, &ok);
3877
  __ bind(&fail);
3878
  __ mov(eax, Immediate(isolate()->factory()->false_value()));
3879
  __ jmp(&done);
3880
  __ bind(&ok);
3881
  __ mov(eax, Immediate(isolate()->factory()->true_value()));
3882
  __ bind(&done);
3883

    
3884
  context()->Plug(eax);
3885
}
3886

    
3887

    
3888
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3889
  ZoneList<Expression*>* args = expr->arguments();
3890
  ASSERT(args->length() == 1);
3891

    
3892
  VisitForAccumulatorValue(args->at(0));
3893

    
3894
  __ AssertString(eax);
3895

    
3896
  Label materialize_true, materialize_false;
3897
  Label* if_true = NULL;
3898
  Label* if_false = NULL;
3899
  Label* fall_through = NULL;
3900
  context()->PrepareTest(&materialize_true, &materialize_false,
3901
                         &if_true, &if_false, &fall_through);
3902

    
3903
  __ test(FieldOperand(eax, String::kHashFieldOffset),
3904
          Immediate(String::kContainsCachedArrayIndexMask));
3905
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3906
  Split(zero, if_true, if_false, fall_through);
3907

    
3908
  context()->Plug(if_true, if_false);
3909
}
3910

    
3911

    
3912
void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3913
  ZoneList<Expression*>* args = expr->arguments();
3914
  ASSERT(args->length() == 1);
3915
  VisitForAccumulatorValue(args->at(0));
3916

    
3917
  __ AssertString(eax);
3918

    
3919
  __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3920
  __ IndexFromHash(eax, eax);
3921

    
3922
  context()->Plug(eax);
3923
}
3924

    
3925

    
3926
void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3927
  Label bailout, done, one_char_separator, long_separator,
3928
      non_trivial_array, not_size_one_array, loop,
3929
      loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3930

    
3931
  ZoneList<Expression*>* args = expr->arguments();
3932
  ASSERT(args->length() == 2);
3933
  // We will leave the separator on the stack until the end of the function.
3934
  VisitForStackValue(args->at(1));
3935
  // Load this to eax (= array)
3936
  VisitForAccumulatorValue(args->at(0));
3937
  // All aliases of the same register have disjoint lifetimes.
3938
  Register array = eax;
3939
  Register elements = no_reg;  // Will be eax.
3940

    
3941
  Register index = edx;
3942

    
3943
  Register string_length = ecx;
3944

    
3945
  Register string = esi;
3946

    
3947
  Register scratch = ebx;
3948

    
3949
  Register array_length = edi;
3950
  Register result_pos = no_reg;  // Will be edi.
3951

    
3952
  // Separator operand is already pushed.
3953
  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3954
  Operand result_operand = Operand(esp, 1 * kPointerSize);
3955
  Operand array_length_operand = Operand(esp, 0);
3956
  __ sub(esp, Immediate(2 * kPointerSize));
3957
  __ cld();
3958
  // Check that the array is a JSArray
3959
  __ JumpIfSmi(array, &bailout);
3960
  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3961
  __ j(not_equal, &bailout);
3962

    
3963
  // Check that the array has fast elements.
3964
  __ CheckFastElements(scratch, &bailout);
3965

    
3966
  // If the array has length zero, return the empty string.
3967
  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3968
  __ SmiUntag(array_length);
3969
  __ j(not_zero, &non_trivial_array);
3970
  __ mov(result_operand, isolate()->factory()->empty_string());
3971
  __ jmp(&done);
3972

    
3973
  // Save the array length.
3974
  __ bind(&non_trivial_array);
3975
  __ mov(array_length_operand, array_length);
3976

    
3977
  // Save the FixedArray containing array's elements.
3978
  // End of array's live range.
3979
  elements = array;
3980
  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3981
  array = no_reg;
3982

    
3983

    
3984
  // Check that all array elements are sequential ASCII strings, and
3985
  // accumulate the sum of their lengths, as a smi-encoded value.
3986
  __ Set(index, Immediate(0));
3987
  __ Set(string_length, Immediate(0));
3988
  // Loop condition: while (index < length).
3989
  // Live loop registers: index, array_length, string,
3990
  //                      scratch, string_length, elements.
3991
  if (generate_debug_code_) {
3992
    __ cmp(index, array_length);
3993
    __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3994
  }
3995
  __ bind(&loop);
3996
  __ mov(string, FieldOperand(elements,
3997
                              index,
3998
                              times_pointer_size,
3999
                              FixedArray::kHeaderSize));
4000
  __ JumpIfSmi(string, &bailout);
4001
  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4002
  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4003
  __ and_(scratch, Immediate(
4004
      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4005
  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4006
  __ j(not_equal, &bailout);
4007
  __ add(string_length,
4008
         FieldOperand(string, SeqOneByteString::kLengthOffset));
4009
  __ j(overflow, &bailout);
4010
  __ add(index, Immediate(1));
4011
  __ cmp(index, array_length);
4012
  __ j(less, &loop);
4013

    
4014
  // If array_length is 1, return elements[0], a string.
4015
  __ cmp(array_length, 1);
4016
  __ j(not_equal, &not_size_one_array);
4017
  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4018
  __ mov(result_operand, scratch);
4019
  __ jmp(&done);
4020

    
4021
  __ bind(&not_size_one_array);
4022

    
4023
  // End of array_length live range.
4024
  result_pos = array_length;
4025
  array_length = no_reg;
4026

    
4027
  // Live registers:
4028
  // string_length: Sum of string lengths, as a smi.
4029
  // elements: FixedArray of strings.
4030

    
4031
  // Check that the separator is a flat ASCII string.
4032
  __ mov(string, separator_operand);
4033
  __ JumpIfSmi(string, &bailout);
4034
  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4035
  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4036
  __ and_(scratch, Immediate(
4037
      kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4038
  __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4039
  __ j(not_equal, &bailout);
4040

    
4041
  // Add (separator length times array_length) - separator length
4042
  // to string_length.
4043
  __ mov(scratch, separator_operand);
4044
  __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4045
  __ sub(string_length, scratch);  // May be negative, temporarily.
4046
  __ imul(scratch, array_length_operand);
4047
  __ j(overflow, &bailout);
4048
  __ add(string_length, scratch);
4049
  __ j(overflow, &bailout);
4050

    
4051
  __ shr(string_length, 1);
4052
  // Live registers and stack values:
4053
  //   string_length
4054
  //   elements
4055
  __ AllocateAsciiString(result_pos, string_length, scratch,
4056
                         index, string, &bailout);
4057
  __ mov(result_operand, result_pos);
4058
  __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4059

    
4060

    
4061
  __ mov(string, separator_operand);
4062
  __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4063
         Immediate(Smi::FromInt(1)));
4064
  __ j(equal, &one_char_separator);
4065
  __ j(greater, &long_separator);
4066

    
4067

    
4068
  // Empty separator case
4069
  __ mov(index, Immediate(0));
4070
  __ jmp(&loop_1_condition);
4071
  // Loop condition: while (index < length).
4072
  __ bind(&loop_1);
4073
  // Each iteration of the loop concatenates one string to the result.
4074
  // Live values in registers:
4075
  //   index: which element of the elements array we are adding to the result.
4076
  //   result_pos: the position to which we are currently copying characters.
4077
  //   elements: the FixedArray of strings we are joining.
4078

    
4079
  // Get string = array[index].
4080
  __ mov(string, FieldOperand(elements, index,
4081
                              times_pointer_size,
4082
                              FixedArray::kHeaderSize));
4083
  __ mov(string_length,
4084
         FieldOperand(string, String::kLengthOffset));
4085
  __ shr(string_length, 1);
4086
  __ lea(string,
4087
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4088
  __ CopyBytes(string, result_pos, string_length, scratch);
4089
  __ add(index, Immediate(1));
4090
  __ bind(&loop_1_condition);
4091
  __ cmp(index, array_length_operand);
4092
  __ j(less, &loop_1);  // End while (index < length).
4093
  __ jmp(&done);
4094

    
4095

    
4096

    
4097
  // One-character separator case
4098
  __ bind(&one_char_separator);
4099
  // Replace separator with its ASCII character value.
4100
  __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4101
  __ mov_b(separator_operand, scratch);
4102

    
4103
  __ Set(index, Immediate(0));
4104
  // Jump into the loop after the code that copies the separator, so the first
4105
  // element is not preceded by a separator
4106
  __ jmp(&loop_2_entry);
4107
  // Loop condition: while (index < length).
4108
  __ bind(&loop_2);
4109
  // Each iteration of the loop concatenates one string to the result.
4110
  // Live values in registers:
4111
  //   index: which element of the elements array we are adding to the result.
4112
  //   result_pos: the position to which we are currently copying characters.
4113

    
4114
  // Copy the separator character to the result.
4115
  __ mov_b(scratch, separator_operand);
4116
  __ mov_b(Operand(result_pos, 0), scratch);
4117
  __ inc(result_pos);
4118

    
4119
  __ bind(&loop_2_entry);
4120
  // Get string = array[index].
4121
  __ mov(string, FieldOperand(elements, index,
4122
                              times_pointer_size,
4123
                              FixedArray::kHeaderSize));
4124
  __ mov(string_length,
4125
         FieldOperand(string, String::kLengthOffset));
4126
  __ shr(string_length, 1);
4127
  __ lea(string,
4128
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4129
  __ CopyBytes(string, result_pos, string_length, scratch);
4130
  __ add(index, Immediate(1));
4131

    
4132
  __ cmp(index, array_length_operand);
4133
  __ j(less, &loop_2);  // End while (index < length).
4134
  __ jmp(&done);
4135

    
4136

    
4137
  // Long separator case (separator is more than one character).
4138
  __ bind(&long_separator);
4139

    
4140
  __ Set(index, Immediate(0));
4141
  // Jump into the loop after the code that copies the separator, so the first
4142
  // element is not preceded by a separator
4143
  __ jmp(&loop_3_entry);
4144
  // Loop condition: while (index < length).
4145
  __ bind(&loop_3);
4146
  // Each iteration of the loop concatenates one string to the result.
4147
  // Live values in registers:
4148
  //   index: which element of the elements array we are adding to the result.
4149
  //   result_pos: the position to which we are currently copying characters.
4150

    
4151
  // Copy the separator to the result.
4152
  __ mov(string, separator_operand);
4153
  __ mov(string_length,
4154
         FieldOperand(string, String::kLengthOffset));
4155
  __ shr(string_length, 1);
4156
  __ lea(string,
4157
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4158
  __ CopyBytes(string, result_pos, string_length, scratch);
4159

    
4160
  __ bind(&loop_3_entry);
4161
  // Get string = array[index].
4162
  __ mov(string, FieldOperand(elements, index,
4163
                              times_pointer_size,
4164
                              FixedArray::kHeaderSize));
4165
  __ mov(string_length,
4166
         FieldOperand(string, String::kLengthOffset));
4167
  __ shr(string_length, 1);
4168
  __ lea(string,
4169
         FieldOperand(string, SeqOneByteString::kHeaderSize));
4170
  __ CopyBytes(string, result_pos, string_length, scratch);
4171
  __ add(index, Immediate(1));
4172

    
4173
  __ cmp(index, array_length_operand);
4174
  __ j(less, &loop_3);  // End while (index < length).
4175
  __ jmp(&done);
4176

    
4177

    
4178
  __ bind(&bailout);
4179
  __ mov(result_operand, isolate()->factory()->undefined_value());
4180
  __ bind(&done);
4181
  __ mov(eax, result_operand);
4182
  // Drop temp values from the stack, and restore context register.
4183
  __ add(esp, Immediate(3 * kPointerSize));
4184

    
4185
  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4186
  context()->Plug(eax);
4187
}
4188

    
4189

    
4190
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4191
  Handle<String> name = expr->name();
4192
  if (name->length() > 0 && name->Get(0) == '_') {
4193
    Comment cmnt(masm_, "[ InlineRuntimeCall");
4194
    EmitInlineRuntimeCall(expr);
4195
    return;
4196
  }
4197

    
4198
  Comment cmnt(masm_, "[ CallRuntime");
4199
  ZoneList<Expression*>* args = expr->arguments();
4200

    
4201
  if (expr->is_jsruntime()) {
4202
    // Prepare for calling JS runtime function.
4203
    __ mov(eax, GlobalObjectOperand());
4204
    __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4205
  }
4206

    
4207
  // Push the arguments ("left-to-right").
4208
  int arg_count = args->length();
4209
  for (int i = 0; i < arg_count; i++) {
4210
    VisitForStackValue(args->at(i));
4211
  }
4212

    
4213
  if (expr->is_jsruntime()) {
4214
    // Call the JS runtime function via a call IC.
4215
    __ Set(ecx, Immediate(expr->name()));
4216
    RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4217
    Handle<Code> ic =
4218
        isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
4219
    CallIC(ic, mode, expr->CallRuntimeFeedbackId());
4220
    // Restore context register.
4221
    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4222
  } else {
4223
    // Call the C runtime function.
4224
    __ CallRuntime(expr->function(), arg_count);
4225
  }
4226
  context()->Plug(eax);
4227
}
4228

    
4229

    
4230
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4231
  switch (expr->op()) {
4232
    case Token::DELETE: {
4233
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4234
      Property* property = expr->expression()->AsProperty();
4235
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4236

    
4237
      if (property != NULL) {
4238
        VisitForStackValue(property->obj());
4239
        VisitForStackValue(property->key());
4240
        StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4241
            ? kNonStrictMode : kStrictMode;
4242
        __ push(Immediate(Smi::FromInt(strict_mode_flag)));
4243
        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4244
        context()->Plug(eax);
4245
      } else if (proxy != NULL) {
4246
        Variable* var = proxy->var();
4247
        // Delete of an unqualified identifier is disallowed in strict mode
4248
        // but "delete this" is allowed.
4249
        ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4250
        if (var->IsUnallocated()) {
4251
          __ push(GlobalObjectOperand());
4252
          __ push(Immediate(var->name()));
4253
          __ push(Immediate(Smi::FromInt(kNonStrictMode)));
4254
          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4255
          context()->Plug(eax);
4256
        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4257
          // Result of deleting non-global variables is false.  'this' is
4258
          // not really a variable, though we implement it as one.  The
4259
          // subexpression does not have side effects.
4260
          context()->Plug(var->is_this());
4261
        } else {
4262
          // Non-global variable.  Call the runtime to try to delete from the
4263
          // context where the variable was introduced.
4264
          __ push(context_register());
4265
          __ push(Immediate(var->name()));
4266
          __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4267
          context()->Plug(eax);
4268
        }
4269
      } else {
4270
        // Result of deleting non-property, non-variable reference is true.
4271
        // The subexpression may have side effects.
4272
        VisitForEffect(expr->expression());
4273
        context()->Plug(true);
4274
      }
4275
      break;
4276
    }
4277

    
4278
    case Token::VOID: {
4279
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4280
      VisitForEffect(expr->expression());
4281
      context()->Plug(isolate()->factory()->undefined_value());
4282
      break;
4283
    }
4284

    
4285
    case Token::NOT: {
4286
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4287
      if (context()->IsEffect()) {
4288
        // Unary NOT has no side effects so it's only necessary to visit the
4289
        // subexpression.  Match the optimizing compiler by not branching.
4290
        VisitForEffect(expr->expression());
4291
      } else if (context()->IsTest()) {
4292
        const TestContext* test = TestContext::cast(context());
4293
        // The labels are swapped for the recursive call.
4294
        VisitForControl(expr->expression(),
4295
                        test->false_label(),
4296
                        test->true_label(),
4297
                        test->fall_through());
4298
        context()->Plug(test->true_label(), test->false_label());
4299
      } else {
4300
        // We handle value contexts explicitly rather than simply visiting
4301
        // for control and plugging the control flow into the context,
4302
        // because we need to prepare a pair of extra administrative AST ids
4303
        // for the optimizing compiler.
4304
        ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4305
        Label materialize_true, materialize_false, done;
4306
        VisitForControl(expr->expression(),
4307
                        &materialize_false,
4308
                        &materialize_true,
4309
                        &materialize_true);
4310
        __ bind(&materialize_true);
4311
        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4312
        if (context()->IsAccumulatorValue()) {
4313
          __ mov(eax, isolate()->factory()->true_value());
4314
        } else {
4315
          __ Push(isolate()->factory()->true_value());
4316
        }
4317
        __ jmp(&done, Label::kNear);
4318
        __ bind(&materialize_false);
4319
        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4320
        if (context()->IsAccumulatorValue()) {
4321
          __ mov(eax, isolate()->factory()->false_value());
4322
        } else {
4323
          __ Push(isolate()->factory()->false_value());
4324
        }
4325
        __ bind(&done);
4326
      }
4327
      break;
4328
    }
4329

    
4330
    case Token::TYPEOF: {
4331
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4332
      { StackValueContext context(this);
4333
        VisitForTypeofValue(expr->expression());
4334
      }
4335
      __ CallRuntime(Runtime::kTypeof, 1);
4336
      context()->Plug(eax);
4337
      break;
4338
    }
4339

    
4340
    default:
4341
      UNREACHABLE();
4342
  }
4343
}
4344

    
4345

    
4346
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4347
  Comment cmnt(masm_, "[ CountOperation");
4348
  SetSourcePosition(expr->position());
4349

    
4350
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4351
  // as the left-hand side.
4352
  if (!expr->expression()->IsValidLeftHandSide()) {
4353
    VisitForEffect(expr->expression());
4354
    return;
4355
  }
4356

    
4357
  // Expression can only be a property, a global or a (parameter or local)
4358
  // slot.
4359
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4360
  LhsKind assign_type = VARIABLE;
4361
  Property* prop = expr->expression()->AsProperty();
4362
  // In case of a property we use the uninitialized expression context
4363
  // of the key to detect a named property.
4364
  if (prop != NULL) {
4365
    assign_type =
4366
        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4367
  }
4368

    
4369
  // Evaluate expression and get value.
4370
  if (assign_type == VARIABLE) {
4371
    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4372
    AccumulatorValueContext context(this);
4373
    EmitVariableLoad(expr->expression()->AsVariableProxy());
4374
  } else {
4375
    // Reserve space for result of postfix operation.
4376
    if (expr->is_postfix() && !context()->IsEffect()) {
4377
      __ push(Immediate(Smi::FromInt(0)));
4378
    }
4379
    if (assign_type == NAMED_PROPERTY) {
4380
      // Put the object both on the stack and in edx.
4381
      VisitForAccumulatorValue(prop->obj());
4382
      __ push(eax);
4383
      __ mov(edx, eax);
4384
      EmitNamedPropertyLoad(prop);
4385
    } else {
4386
      VisitForStackValue(prop->obj());
4387
      VisitForStackValue(prop->key());
4388
      __ mov(edx, Operand(esp, kPointerSize));  // Object.
4389
      __ mov(ecx, Operand(esp, 0));             // Key.
4390
      EmitKeyedPropertyLoad(prop);
4391
    }
4392
  }
4393

    
4394
  // We need a second deoptimization point after loading the value
4395
  // in case evaluating the property load my have a side effect.
4396
  if (assign_type == VARIABLE) {
4397
    PrepareForBailout(expr->expression(), TOS_REG);
4398
  } else {
4399
    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4400
  }
4401

    
4402
  // Call ToNumber only if operand is not a smi.
4403
  Label no_conversion;
4404
  if (ShouldInlineSmiCase(expr->op())) {
4405
    __ JumpIfSmi(eax, &no_conversion, Label::kNear);
4406
  }
4407
  ToNumberStub convert_stub;
4408
  __ CallStub(&convert_stub);
4409
  __ bind(&no_conversion);
4410

    
4411
  // Save result for postfix expressions.
4412
  if (expr->is_postfix()) {
4413
    if (!context()->IsEffect()) {
4414
      // Save the result on the stack. If we have a named or keyed property
4415
      // we store the result under the receiver that is currently on top
4416
      // of the stack.
4417
      switch (assign_type) {
4418
        case VARIABLE:
4419
          __ push(eax);
4420
          break;
4421
        case NAMED_PROPERTY:
4422
          __ mov(Operand(esp, kPointerSize), eax);
4423
          break;
4424
        case KEYED_PROPERTY:
4425
          __ mov(Operand(esp, 2 * kPointerSize), eax);
4426
          break;
4427
      }
4428
    }
4429
  }
4430

    
4431
  // Inline smi case if we are in a loop.
4432
  Label done, stub_call;
4433
  JumpPatchSite patch_site(masm_);
4434

    
4435
  if (ShouldInlineSmiCase(expr->op())) {
4436
    if (expr->op() == Token::INC) {
4437
      __ add(eax, Immediate(Smi::FromInt(1)));
4438
    } else {
4439
      __ sub(eax, Immediate(Smi::FromInt(1)));
4440
    }
4441
    __ j(overflow, &stub_call, Label::kNear);
4442
    // We could eliminate this smi check if we split the code at
4443
    // the first smi check before calling ToNumber.
4444
    patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
4445

    
4446
    __ bind(&stub_call);
4447
    // Call stub. Undo operation first.
4448
    if (expr->op() == Token::INC) {
4449
      __ sub(eax, Immediate(Smi::FromInt(1)));
4450
    } else {
4451
      __ add(eax, Immediate(Smi::FromInt(1)));
4452
    }
4453
  }
4454

    
4455
  // Record position before stub call.
4456
  SetSourcePosition(expr->position());
4457

    
4458
  // Call stub for +1/-1.
4459
  __ mov(edx, eax);
4460
  __ mov(eax, Immediate(Smi::FromInt(1)));
4461
  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4462
  CallIC(stub.GetCode(isolate()),
4463
         RelocInfo::CODE_TARGET,
4464
         expr->CountBinOpFeedbackId());
4465
  patch_site.EmitPatchInfo();
4466
  __ bind(&done);
4467

    
4468
  // Store the value returned in eax.
4469
  switch (assign_type) {
4470
    case VARIABLE:
4471
      if (expr->is_postfix()) {
4472
        // Perform the assignment as if via '='.
4473
        { EffectContext context(this);
4474
          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4475
                                 Token::ASSIGN);
4476
          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4477
          context.Plug(eax);
4478
        }
4479
        // For all contexts except EffectContext We have the result on
4480
        // top of the stack.
4481
        if (!context()->IsEffect()) {
4482
          context()->PlugTOS();
4483
        }
4484
      } else {
4485
        // Perform the assignment as if via '='.
4486
        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4487
                               Token::ASSIGN);
4488
        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4489
        context()->Plug(eax);
4490
      }
4491
      break;
4492
    case NAMED_PROPERTY: {
4493
      __ mov(ecx, prop->key()->AsLiteral()->value());
4494
      __ pop(edx);
4495
      Handle<Code> ic = is_classic_mode()
4496
          ? isolate()->builtins()->StoreIC_Initialize()
4497
          : isolate()->builtins()->StoreIC_Initialize_Strict();
4498
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4499
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4500
      if (expr->is_postfix()) {
4501
        if (!context()->IsEffect()) {
4502
          context()->PlugTOS();
4503
        }
4504
      } else {
4505
        context()->Plug(eax);
4506
      }
4507
      break;
4508
    }
4509
    case KEYED_PROPERTY: {
4510
      __ pop(ecx);
4511
      __ pop(edx);
4512
      Handle<Code> ic = is_classic_mode()
4513
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
4514
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4515
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4516
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4517
      if (expr->is_postfix()) {
4518
        // Result is on the stack
4519
        if (!context()->IsEffect()) {
4520
          context()->PlugTOS();
4521
        }
4522
      } else {
4523
        context()->Plug(eax);
4524
      }
4525
      break;
4526
    }
4527
  }
4528
}
4529

    
4530

    
4531
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4532
  VariableProxy* proxy = expr->AsVariableProxy();
4533
  ASSERT(!context()->IsEffect());
4534
  ASSERT(!context()->IsTest());
4535

    
4536
  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4537
    Comment cmnt(masm_, "Global variable");
4538
    __ mov(edx, GlobalObjectOperand());
4539
    __ mov(ecx, Immediate(proxy->name()));
4540
    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4541
    // Use a regular load, not a contextual load, to avoid a reference
4542
    // error.
4543
    CallIC(ic);
4544
    PrepareForBailout(expr, TOS_REG);
4545
    context()->Plug(eax);
4546
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4547
    Label done, slow;
4548

    
4549
    // Generate code for loading from variables potentially shadowed
4550
    // by eval-introduced variables.
4551
    EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4552

    
4553
    __ bind(&slow);
4554
    __ push(esi);
4555
    __ push(Immediate(proxy->name()));
4556
    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4557
    PrepareForBailout(expr, TOS_REG);
4558
    __ bind(&done);
4559

    
4560
    context()->Plug(eax);
4561
  } else {
4562
    // This expression cannot throw a reference error at the top level.
4563
    VisitInDuplicateContext(expr);
4564
  }
4565
}
4566

    
4567

    
4568
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4569
                                                 Expression* sub_expr,
4570
                                                 Handle<String> check) {
4571
  Label materialize_true, materialize_false;
4572
  Label* if_true = NULL;
4573
  Label* if_false = NULL;
4574
  Label* fall_through = NULL;
4575
  context()->PrepareTest(&materialize_true, &materialize_false,
4576
                         &if_true, &if_false, &fall_through);
4577

    
4578
  { AccumulatorValueContext context(this);
4579
    VisitForTypeofValue(sub_expr);
4580
  }
4581
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4582

    
4583
  if (check->Equals(isolate()->heap()->number_string())) {
4584
    __ JumpIfSmi(eax, if_true);
4585
    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4586
           isolate()->factory()->heap_number_map());
4587
    Split(equal, if_true, if_false, fall_through);
4588
  } else if (check->Equals(isolate()->heap()->string_string())) {
4589
    __ JumpIfSmi(eax, if_false);
4590
    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4591
    __ j(above_equal, if_false);
4592
    // Check for undetectable objects => false.
4593
    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4594
              1 << Map::kIsUndetectable);
4595
    Split(zero, if_true, if_false, fall_through);
4596
  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4597
    __ JumpIfSmi(eax, if_false);
4598
    __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4599
    Split(equal, if_true, if_false, fall_through);
4600
  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4601
    __ cmp(eax, isolate()->factory()->true_value());
4602
    __ j(equal, if_true);
4603
    __ cmp(eax, isolate()->factory()->false_value());
4604
    Split(equal, if_true, if_false, fall_through);
4605
  } else if (FLAG_harmony_typeof &&
4606
             check->Equals(isolate()->heap()->null_string())) {
4607
    __ cmp(eax, isolate()->factory()->null_value());
4608
    Split(equal, if_true, if_false, fall_through);
4609
  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4610
    __ cmp(eax, isolate()->factory()->undefined_value());
4611
    __ j(equal, if_true);
4612
    __ JumpIfSmi(eax, if_false);
4613
    // Check for undetectable objects => true.
4614
    __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4615
    __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4616
    __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4617
    Split(not_zero, if_true, if_false, fall_through);
4618
  } else if (check->Equals(isolate()->heap()->function_string())) {
4619
    __ JumpIfSmi(eax, if_false);
4620
    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4621
    __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4622
    __ j(equal, if_true);
4623
    __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4624
    Split(equal, if_true, if_false, fall_through);
4625
  } else if (check->Equals(isolate()->heap()->object_string())) {
4626
    __ JumpIfSmi(eax, if_false);
4627
    if (!FLAG_harmony_typeof) {
4628
      __ cmp(eax, isolate()->factory()->null_value());
4629
      __ j(equal, if_true);
4630
    }
4631
    __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4632
    __ j(below, if_false);
4633
    __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4634
    __ j(above, if_false);
4635
    // Check for undetectable objects => false.
4636
    __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4637
              1 << Map::kIsUndetectable);
4638
    Split(zero, if_true, if_false, fall_through);
4639
  } else {
4640
    if (if_false != fall_through) __ jmp(if_false);
4641
  }
4642
  context()->Plug(if_true, if_false);
4643
}
4644

    
4645

    
4646
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4647
  Comment cmnt(masm_, "[ CompareOperation");
4648
  SetSourcePosition(expr->position());
4649

    
4650
  // First we try a fast inlined version of the compare when one of
4651
  // the operands is a literal.
4652
  if (TryLiteralCompare(expr)) return;
4653

    
4654
  // Always perform the comparison for its control flow.  Pack the result
4655
  // into the expression's context after the comparison is performed.
4656
  Label materialize_true, materialize_false;
4657
  Label* if_true = NULL;
4658
  Label* if_false = NULL;
4659
  Label* fall_through = NULL;
4660
  context()->PrepareTest(&materialize_true, &materialize_false,
4661
                         &if_true, &if_false, &fall_through);
4662

    
4663
  Token::Value op = expr->op();
4664
  VisitForStackValue(expr->left());
4665
  switch (op) {
4666
    case Token::IN:
4667
      VisitForStackValue(expr->right());
4668
      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4669
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4670
      __ cmp(eax, isolate()->factory()->true_value());
4671
      Split(equal, if_true, if_false, fall_through);
4672
      break;
4673

    
4674
    case Token::INSTANCEOF: {
4675
      VisitForStackValue(expr->right());
4676
      InstanceofStub stub(InstanceofStub::kNoFlags);
4677
      __ CallStub(&stub);
4678
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4679
      __ test(eax, eax);
4680
      // The stub returns 0 for true.
4681
      Split(zero, if_true, if_false, fall_through);
4682
      break;
4683
    }
4684

    
4685
    default: {
4686
      VisitForAccumulatorValue(expr->right());
4687
      Condition cc = CompareIC::ComputeCondition(op);
4688
      __ pop(edx);
4689

    
4690
      bool inline_smi_code = ShouldInlineSmiCase(op);
4691
      JumpPatchSite patch_site(masm_);
4692
      if (inline_smi_code) {
4693
        Label slow_case;
4694
        __ mov(ecx, edx);
4695
        __ or_(ecx, eax);
4696
        patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4697
        __ cmp(edx, eax);
4698
        Split(cc, if_true, if_false, NULL);
4699
        __ bind(&slow_case);
4700
      }
4701

    
4702
      // Record position and call the compare IC.
4703
      SetSourcePosition(expr->position());
4704
      Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4705
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4706
      patch_site.EmitPatchInfo();
4707

    
4708
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4709
      __ test(eax, eax);
4710
      Split(cc, if_true, if_false, fall_through);
4711
    }
4712
  }
4713

    
4714
  // Convert the result of the comparison into one expected for this
4715
  // expression's context.
4716
  context()->Plug(if_true, if_false);
4717
}
4718

    
4719

    
4720
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4721
                                              Expression* sub_expr,
4722
                                              NilValue nil) {
4723
  Label materialize_true, materialize_false;
4724
  Label* if_true = NULL;
4725
  Label* if_false = NULL;
4726
  Label* fall_through = NULL;
4727
  context()->PrepareTest(&materialize_true, &materialize_false,
4728
                         &if_true, &if_false, &fall_through);
4729

    
4730
  VisitForAccumulatorValue(sub_expr);
4731
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4732

    
4733
  Handle<Object> nil_value = nil == kNullValue
4734
      ? isolate()->factory()->null_value()
4735
      : isolate()->factory()->undefined_value();
4736
  if (expr->op() == Token::EQ_STRICT) {
4737
    __ cmp(eax, nil_value);
4738
    Split(equal, if_true, if_false, fall_through);
4739
  } else {
4740
    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4741
    CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4742
    __ test(eax, eax);
4743
    Split(not_zero, if_true, if_false, fall_through);
4744
  }
4745
  context()->Plug(if_true, if_false);
4746
}
4747

    
4748

    
4749
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4750
  __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4751
  context()->Plug(eax);
4752
}
4753

    
4754

    
4755
Register FullCodeGenerator::result_register() {
4756
  return eax;
4757
}
4758

    
4759

    
4760
Register FullCodeGenerator::context_register() {
4761
  return esi;
4762
}
4763

    
4764

    
4765
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4766
  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4767
  __ mov(Operand(ebp, frame_offset), value);
4768
}
4769

    
4770

    
4771
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4772
  __ mov(dst, ContextOperand(esi, context_index));
4773
}
4774

    
4775

    
4776
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4777
  Scope* declaration_scope = scope()->DeclarationScope();
4778
  if (declaration_scope->is_global_scope() ||
4779
      declaration_scope->is_module_scope()) {
4780
    // Contexts nested in the native context have a canonical empty function
4781
    // as their closure, not the anonymous closure containing the global
4782
    // code.  Pass a smi sentinel and let the runtime look up the empty
4783
    // function.
4784
    __ push(Immediate(Smi::FromInt(0)));
4785
  } else if (declaration_scope->is_eval_scope()) {
4786
    // Contexts nested inside eval code have the same closure as the context
4787
    // calling eval, not the anonymous closure containing the eval code.
4788
    // Fetch it from the context.
4789
    __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4790
  } else {
4791
    ASSERT(declaration_scope->is_function_scope());
4792
    __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4793
  }
4794
}
4795

    
4796

    
4797
// ----------------------------------------------------------------------------
4798
// Non-local control flow support.
4799

    
4800
void FullCodeGenerator::EnterFinallyBlock() {
4801
  // Cook return address on top of stack (smi encoded Code* delta)
4802
  ASSERT(!result_register().is(edx));
4803
  __ pop(edx);
4804
  __ sub(edx, Immediate(masm_->CodeObject()));
4805
  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4806
  STATIC_ASSERT(kSmiTag == 0);
4807
  __ SmiTag(edx);
4808
  __ push(edx);
4809

    
4810
  // Store result register while executing finally block.
4811
  __ push(result_register());
4812

    
4813
  // Store pending message while executing finally block.
4814
  ExternalReference pending_message_obj =
4815
      ExternalReference::address_of_pending_message_obj(isolate());
4816
  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4817
  __ push(edx);
4818

    
4819
  ExternalReference has_pending_message =
4820
      ExternalReference::address_of_has_pending_message(isolate());
4821
  __ mov(edx, Operand::StaticVariable(has_pending_message));
4822
  __ SmiTag(edx);
4823
  __ push(edx);
4824

    
4825
  ExternalReference pending_message_script =
4826
      ExternalReference::address_of_pending_message_script(isolate());
4827
  __ mov(edx, Operand::StaticVariable(pending_message_script));
4828
  __ push(edx);
4829
}
4830

    
4831

    
4832
void FullCodeGenerator::ExitFinallyBlock() {
4833
  ASSERT(!result_register().is(edx));
4834
  // Restore pending message from stack.
4835
  __ pop(edx);
4836
  ExternalReference pending_message_script =
4837
      ExternalReference::address_of_pending_message_script(isolate());
4838
  __ mov(Operand::StaticVariable(pending_message_script), edx);
4839

    
4840
  __ pop(edx);
4841
  __ SmiUntag(edx);
4842
  ExternalReference has_pending_message =
4843
      ExternalReference::address_of_has_pending_message(isolate());
4844
  __ mov(Operand::StaticVariable(has_pending_message), edx);
4845

    
4846
  __ pop(edx);
4847
  ExternalReference pending_message_obj =
4848
      ExternalReference::address_of_pending_message_obj(isolate());
4849
  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4850

    
4851
  // Restore result register from stack.
4852
  __ pop(result_register());
4853

    
4854
  // Uncook return address.
4855
  __ pop(edx);
4856
  __ SmiUntag(edx);
4857
  __ add(edx, Immediate(masm_->CodeObject()));
4858
  __ jmp(edx);
4859
}
4860

    
4861

    
4862
#undef __
4863

    
4864
#define __ ACCESS_MASM(masm())
4865

    
4866
FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4867
    int* stack_depth,
4868
    int* context_length) {
4869
  // The macros used here must preserve the result register.
4870

    
4871
  // Because the handler block contains the context of the finally
4872
  // code, we can restore it directly from there for the finally code
4873
  // rather than iteratively unwinding contexts via their previous
4874
  // links.
4875
  __ Drop(*stack_depth);  // Down to the handler block.
4876
  if (*context_length > 0) {
4877
    // Restore the context to its dedicated register and the stack.
4878
    __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4879
    __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4880
  }
4881
  __ PopTryHandler();
4882
  __ call(finally_entry_);
4883

    
4884
  *stack_depth = 0;
4885
  *context_length = 0;
4886
  return previous_;
4887
}
4888

    
4889
#undef __
4890

    
4891

    
4892
static const byte kJnsInstruction = 0x79;
4893
static const byte kJnsOffset = 0x11;
4894
static const byte kCallInstruction = 0xe8;
4895
static const byte kNopByteOne = 0x66;
4896
static const byte kNopByteTwo = 0x90;
4897

    
4898

    
4899
void BackEdgeTable::PatchAt(Code* unoptimized_code,
4900
                            Address pc,
4901
                            BackEdgeState target_state,
4902
                            Code* replacement_code) {
4903
  Address call_target_address = pc - kIntSize;
4904
  Address jns_instr_address = call_target_address - 3;
4905
  Address jns_offset_address = call_target_address - 2;
4906

    
4907
  switch (target_state) {
4908
    case INTERRUPT:
4909
      //     sub <profiling_counter>, <delta>  ;; Not changed
4910
      //     jns ok
4911
      //     call <interrupt stub>
4912
      //   ok:
4913
      *jns_instr_address = kJnsInstruction;
4914
      *jns_offset_address = kJnsOffset;
4915
      break;
4916
    case ON_STACK_REPLACEMENT:
4917
    case OSR_AFTER_STACK_CHECK:
4918
      //     sub <profiling_counter>, <delta>  ;; Not changed
4919
      //     nop
4920
      //     nop
4921
      //     call <on-stack replacment>
4922
      //   ok:
4923
      *jns_instr_address = kNopByteOne;
4924
      *jns_offset_address = kNopByteTwo;
4925
      break;
4926
  }
4927

    
4928
  Assembler::set_target_address_at(call_target_address,
4929
                                   replacement_code->entry());
4930
  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4931
      unoptimized_code, call_target_address, replacement_code);
4932
}
4933

    
4934

    
4935
BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4936
    Isolate* isolate,
4937
    Code* unoptimized_code,
4938
    Address pc) {
4939
  Address call_target_address = pc - kIntSize;
4940
  Address jns_instr_address = call_target_address - 3;
4941
  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4942

    
4943
  if (*jns_instr_address == kJnsInstruction) {
4944
    ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4945
    ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4946
              Assembler::target_address_at(call_target_address));
4947
    return INTERRUPT;
4948
  }
4949

    
4950
  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4951
  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4952

    
4953
  if (Assembler::target_address_at(call_target_address) ==
4954
      isolate->builtins()->OnStackReplacement()->entry()) {
4955
    return ON_STACK_REPLACEMENT;
4956
  }
4957

    
4958
  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4959
            Assembler::target_address_at(call_target_address));
4960
  return OSR_AFTER_STACK_CHECK;
4961
}
4962

    
4963

    
4964
} }  // namespace v8::internal
4965

    
4966
#endif  // V8_TARGET_ARCH_IA32