The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / mips / full-codegen-mips.cc @ f230a1cf

History | View | Annotate | Download (172 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_MIPS
31

    
32
// Note on Mips implementation:
33
//
34
// The result_register() for mips is the 'v0' register, which is defined
35
// by the ABI to contain function return values. However, the first
36
// parameter to a function is defined to be 'a0'. So there are many
37
// places where we have to move a previous result in v0 to a0 for the
38
// next call: mov(a0, v0). This is not needed on the other architectures.
39

    
40
#include "code-stubs.h"
41
#include "codegen.h"
42
#include "compiler.h"
43
#include "debug.h"
44
#include "full-codegen.h"
45
#include "isolate-inl.h"
46
#include "parser.h"
47
#include "scopes.h"
48
#include "stub-cache.h"
49

    
50
#include "mips/code-stubs-mips.h"
51
#include "mips/macro-assembler-mips.h"
52

    
53
namespace v8 {
54
namespace internal {
55

    
56
#define __ ACCESS_MASM(masm_)
57

    
58

    
59
// A patch site is a location in the code which it is possible to patch. This
60
// class has a number of methods to emit the code which is patchable and the
61
// method EmitPatchInfo to record a marker back to the patchable code. This
62
// marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63
// (raw 16 bit immediate value is used) is the delta from the pc to the first
64
// instruction of the patchable code.
65
// The marker instruction is effectively a NOP (dest is zero_reg) and will
66
// never be emitted by normal code.
67
class JumpPatchSite BASE_EMBEDDED {
68
 public:
69
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
70
#ifdef DEBUG
71
    info_emitted_ = false;
72
#endif
73
  }
74

    
75
  ~JumpPatchSite() {
76
    ASSERT(patch_site_.is_bound() == info_emitted_);
77
  }
78

    
79
  // When initially emitting this ensure that a jump is always generated to skip
80
  // the inlined smi code.
81
  void EmitJumpIfNotSmi(Register reg, Label* target) {
82
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
83
    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84
    __ bind(&patch_site_);
85
    __ andi(at, reg, 0);
86
    // Always taken before patched.
87
    __ Branch(target, eq, at, Operand(zero_reg));
88
  }
89

    
90
  // When initially emitting this ensure that a jump is never generated to skip
91
  // the inlined smi code.
92
  void EmitJumpIfSmi(Register reg, Label* target) {
93
    Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
95
    __ bind(&patch_site_);
96
    __ andi(at, reg, 0);
97
    // Never taken before patched.
98
    __ Branch(target, ne, at, Operand(zero_reg));
99
  }
100

    
101
  void EmitPatchInfo() {
102
    if (patch_site_.is_bound()) {
103
      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104
      Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105
      __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
106
#ifdef DEBUG
107
      info_emitted_ = true;
108
#endif
109
    } else {
110
      __ nop();  // Signals no inlined code.
111
    }
112
  }
113

    
114
 private:
115
  MacroAssembler* masm_;
116
  Label patch_site_;
117
#ifdef DEBUG
118
  bool info_emitted_;
119
#endif
120
};
121

    
122

    
123
// Generate code for a JS function.  On entry to the function the receiver
124
// and arguments have been pushed on the stack left to right.  The actual
125
// argument count matches the formal parameter count expected by the
126
// function.
127
//
128
// The live registers are:
129
//   o a1: the JS function object being called (i.e. ourselves)
130
//   o cp: our context
131
//   o fp: our caller's frame pointer
132
//   o sp: stack pointer
133
//   o ra: return address
134
//
135
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
136
// frames-mips.h for its layout.
137
void FullCodeGenerator::Generate() {
138
  CompilationInfo* info = info_;
139
  handler_table_ =
140
      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141
  profiling_counter_ = isolate()->factory()->NewCell(
142
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
143
  SetFunctionPosition(function());
144
  Comment cmnt(masm_, "[ function compiled by full code generator");
145

    
146
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
147

    
148
#ifdef DEBUG
149
  if (strlen(FLAG_stop_at) > 0 &&
150
      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
151
    __ stop("stop-at");
152
  }
153
#endif
154

    
155
  // Strict mode functions and builtins need to replace the receiver
156
  // with undefined when called as functions (without an explicit
157
  // receiver object). t1 is zero for method calls and non-zero for
158
  // function calls.
159
  if (!info->is_classic_mode() || info->is_native()) {
160
    Label ok;
161
    __ Branch(&ok, eq, t1, Operand(zero_reg));
162
    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
163
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
164
    __ sw(a2, MemOperand(sp, receiver_offset));
165
    __ bind(&ok);
166
  }
167

    
168
  // Open a frame scope to indicate that there is a frame on the stack.  The
169
  // MANUAL indicates that the scope shouldn't actually generate code to set up
170
  // the frame (that is done below).
171
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
172

    
173
  info->set_prologue_offset(masm_->pc_offset());
174
  __ Prologue(BUILD_FUNCTION_FRAME);
175
  info->AddNoFrameRange(0, masm_->pc_offset());
176

    
177
  { Comment cmnt(masm_, "[ Allocate locals");
178
    int locals_count = info->scope()->num_stack_slots();
179
    // Generators allocate locals, if any, in context slots.
180
    ASSERT(!info->function()->is_generator() || locals_count == 0);
181
    if (locals_count > 0) {
182
      __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
183
      for (int i = 0; i < locals_count; i++) {
184
        __ push(at);
185
      }
186
    }
187
  }
188

    
189
  bool function_in_register = true;
190

    
191
  // Possibly allocate a local context.
192
  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
193
  if (heap_slots > 0) {
194
    Comment cmnt(masm_, "[ Allocate context");
195
    // Argument to NewContext is the function, which is still in a1.
196
    __ push(a1);
197
    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
198
      __ Push(info->scope()->GetScopeInfo());
199
      __ CallRuntime(Runtime::kNewGlobalContext, 2);
200
    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
201
      FastNewContextStub stub(heap_slots);
202
      __ CallStub(&stub);
203
    } else {
204
      __ CallRuntime(Runtime::kNewFunctionContext, 1);
205
    }
206
    function_in_register = false;
207
    // Context is returned in both v0 and cp.  It replaces the context
208
    // passed to us.  It's saved in the stack and kept live in cp.
209
    __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
210
    // Copy any necessary parameters into the context.
211
    int num_parameters = info->scope()->num_parameters();
212
    for (int i = 0; i < num_parameters; i++) {
213
      Variable* var = scope()->parameter(i);
214
      if (var->IsContextSlot()) {
215
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216
                                 (num_parameters - 1 - i) * kPointerSize;
217
        // Load parameter from stack.
218
        __ lw(a0, MemOperand(fp, parameter_offset));
219
        // Store it in the context.
220
        MemOperand target = ContextOperand(cp, var->index());
221
        __ sw(a0, target);
222

    
223
        // Update the write barrier.
224
        __ RecordWriteContextSlot(
225
            cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
226
      }
227
    }
228
  }
229

    
230
  Variable* arguments = scope()->arguments();
231
  if (arguments != NULL) {
232
    // Function uses arguments object.
233
    Comment cmnt(masm_, "[ Allocate arguments object");
234
    if (!function_in_register) {
235
      // Load this again, if it's used by the local context below.
236
      __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
237
    } else {
238
      __ mov(a3, a1);
239
    }
240
    // Receiver is just before the parameters on the caller's stack.
241
    int num_parameters = info->scope()->num_parameters();
242
    int offset = num_parameters * kPointerSize;
243
    __ Addu(a2, fp,
244
           Operand(StandardFrameConstants::kCallerSPOffset + offset));
245
    __ li(a1, Operand(Smi::FromInt(num_parameters)));
246
    __ Push(a3, a2, a1);
247

    
248
    // Arguments to ArgumentsAccessStub:
249
    //   function, receiver address, parameter count.
250
    // The stub will rewrite receiever and parameter count if the previous
251
    // stack frame was an arguments adapter frame.
252
    ArgumentsAccessStub::Type type;
253
    if (!is_classic_mode()) {
254
      type = ArgumentsAccessStub::NEW_STRICT;
255
    } else if (function()->has_duplicate_parameters()) {
256
      type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
257
    } else {
258
      type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
259
    }
260
    ArgumentsAccessStub stub(type);
261
    __ CallStub(&stub);
262

    
263
    SetVar(arguments, v0, a1, a2);
264
  }
265

    
266
  if (FLAG_trace) {
267
    __ CallRuntime(Runtime::kTraceEnter, 0);
268
  }
269

    
270
  // Visit the declarations and body unless there is an illegal
271
  // redeclaration.
272
  if (scope()->HasIllegalRedeclaration()) {
273
    Comment cmnt(masm_, "[ Declarations");
274
    scope()->VisitIllegalRedeclaration(this);
275

    
276
  } else {
277
    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
278
    { Comment cmnt(masm_, "[ Declarations");
279
      // For named function expressions, declare the function name as a
280
      // constant.
281
      if (scope()->is_function_scope() && scope()->function() != NULL) {
282
        VariableDeclaration* function = scope()->function();
283
        ASSERT(function->proxy()->var()->mode() == CONST ||
284
               function->proxy()->var()->mode() == CONST_HARMONY);
285
        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
286
        VisitVariableDeclaration(function);
287
      }
288
      VisitDeclarations(scope()->declarations());
289
    }
290

    
291
    { Comment cmnt(masm_, "[ Stack check");
292
      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
293
      Label ok;
294
      __ LoadRoot(t0, Heap::kStackLimitRootIndex);
295
      __ Branch(&ok, hs, sp, Operand(t0));
296
      __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
297
      __ bind(&ok);
298
    }
299

    
300
    { Comment cmnt(masm_, "[ Body");
301
      ASSERT(loop_depth() == 0);
302
      VisitStatements(function()->body());
303
      ASSERT(loop_depth() == 0);
304
    }
305
  }
306

    
307
  // Always emit a 'return undefined' in case control fell off the end of
308
  // the body.
309
  { Comment cmnt(masm_, "[ return <undefined>;");
310
    __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
311
  }
312
  EmitReturnSequence();
313
}
314

    
315

    
316
void FullCodeGenerator::ClearAccumulator() {
317
  ASSERT(Smi::FromInt(0) == 0);
318
  __ mov(v0, zero_reg);
319
}
320

    
321

    
322
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
323
  __ li(a2, Operand(profiling_counter_));
324
  __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
325
  __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
326
  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
327
}
328

    
329

    
330
void FullCodeGenerator::EmitProfilingCounterReset() {
331
  int reset_value = FLAG_interrupt_budget;
332
  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
333
    // Self-optimization is a one-off thing: if it fails, don't try again.
334
    reset_value = Smi::kMaxValue;
335
  }
336
  if (isolate()->IsDebuggerActive()) {
337
    // Detect debug break requests as soon as possible.
338
    reset_value = FLAG_interrupt_budget >> 4;
339
  }
340
  __ li(a2, Operand(profiling_counter_));
341
  __ li(a3, Operand(Smi::FromInt(reset_value)));
342
  __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
343
}
344

    
345

    
346
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
347
                                                Label* back_edge_target) {
348
  // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
349
  // to make sure it is constant. Branch may emit a skip-or-jump sequence
350
  // instead of the normal Branch. It seems that the "skip" part of that
351
  // sequence is about as long as this Branch would be so it is safe to ignore
352
  // that.
353
  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
354
  Comment cmnt(masm_, "[ Back edge bookkeeping");
355
  Label ok;
356
  int weight = 1;
357
  if (FLAG_weighted_back_edges) {
358
    ASSERT(back_edge_target->is_bound());
359
    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
360
    weight = Min(kMaxBackEdgeWeight,
361
                 Max(1, distance / kCodeSizeMultiplier));
362
  }
363
  EmitProfilingCounterDecrement(weight);
364
  __ slt(at, a3, zero_reg);
365
  __ beq(at, zero_reg, &ok);
366
  // Call will emit a li t9 first, so it is safe to use the delay slot.
367
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
368
  // Record a mapping of this PC offset to the OSR id.  This is used to find
369
  // the AST id from the unoptimized code in order to use it as a key into
370
  // the deoptimization input data found in the optimized code.
371
  RecordBackEdge(stmt->OsrEntryId());
372
  EmitProfilingCounterReset();
373

    
374
  __ bind(&ok);
375
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
376
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
377
  // entry becomes the target of a bailout.  We don't expect it to be, but
378
  // we want it to work if it is.
379
  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
380
}
381

    
382

    
383
void FullCodeGenerator::EmitReturnSequence() {
384
  Comment cmnt(masm_, "[ Return sequence");
385
  if (return_label_.is_bound()) {
386
    __ Branch(&return_label_);
387
  } else {
388
    __ bind(&return_label_);
389
    if (FLAG_trace) {
390
      // Push the return value on the stack as the parameter.
391
      // Runtime::TraceExit returns its parameter in v0.
392
      __ push(v0);
393
      __ CallRuntime(Runtime::kTraceExit, 1);
394
    }
395
    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
396
      // Pretend that the exit is a backwards jump to the entry.
397
      int weight = 1;
398
      if (info_->ShouldSelfOptimize()) {
399
        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
400
      } else if (FLAG_weighted_back_edges) {
401
        int distance = masm_->pc_offset();
402
        weight = Min(kMaxBackEdgeWeight,
403
                     Max(1, distance / kCodeSizeMultiplier));
404
      }
405
      EmitProfilingCounterDecrement(weight);
406
      Label ok;
407
      __ Branch(&ok, ge, a3, Operand(zero_reg));
408
      __ push(v0);
409
      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
410
        __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
411
        __ push(a2);
412
        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
413
      } else {
414
        __ Call(isolate()->builtins()->InterruptCheck(),
415
                RelocInfo::CODE_TARGET);
416
      }
417
      __ pop(v0);
418
      EmitProfilingCounterReset();
419
      __ bind(&ok);
420
    }
421

    
422
#ifdef DEBUG
423
    // Add a label for checking the size of the code used for returning.
424
    Label check_exit_codesize;
425
    masm_->bind(&check_exit_codesize);
426
#endif
427
    // Make sure that the constant pool is not emitted inside of the return
428
    // sequence.
429
    { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
430
      // Here we use masm_-> instead of the __ macro to avoid the code coverage
431
      // tool from instrumenting as we rely on the code size here.
432
      int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
433
      CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
434
      __ RecordJSReturn();
435
      masm_->mov(sp, fp);
436
      int no_frame_start = masm_->pc_offset();
437
      masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
438
      masm_->Addu(sp, sp, Operand(sp_delta));
439
      masm_->Jump(ra);
440
      info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
441
    }
442

    
443
#ifdef DEBUG
444
    // Check that the size of the code used for returning is large enough
445
    // for the debugger's requirements.
446
    ASSERT(Assembler::kJSReturnSequenceInstructions <=
447
           masm_->InstructionsGeneratedSince(&check_exit_codesize));
448
#endif
449
  }
450
}
451

    
452

    
453
void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
454
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455
}
456

    
457

    
458
void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
459
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
460
  codegen()->GetVar(result_register(), var);
461
}
462

    
463

    
464
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466
  codegen()->GetVar(result_register(), var);
467
  __ push(result_register());
468
}
469

    
470

    
471
void FullCodeGenerator::TestContext::Plug(Variable* var) const {
472
  // For simplicity we always test the accumulator register.
473
  codegen()->GetVar(result_register(), var);
474
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
475
  codegen()->DoTest(this);
476
}
477

    
478

    
479
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
480
}
481

    
482

    
483
void FullCodeGenerator::AccumulatorValueContext::Plug(
484
    Heap::RootListIndex index) const {
485
  __ LoadRoot(result_register(), index);
486
}
487

    
488

    
489
void FullCodeGenerator::StackValueContext::Plug(
490
    Heap::RootListIndex index) const {
491
  __ LoadRoot(result_register(), index);
492
  __ push(result_register());
493
}
494

    
495

    
496
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497
  codegen()->PrepareForBailoutBeforeSplit(condition(),
498
                                          true,
499
                                          true_label_,
500
                                          false_label_);
501
  if (index == Heap::kUndefinedValueRootIndex ||
502
      index == Heap::kNullValueRootIndex ||
503
      index == Heap::kFalseValueRootIndex) {
504
    if (false_label_ != fall_through_) __ Branch(false_label_);
505
  } else if (index == Heap::kTrueValueRootIndex) {
506
    if (true_label_ != fall_through_) __ Branch(true_label_);
507
  } else {
508
    __ LoadRoot(result_register(), index);
509
    codegen()->DoTest(this);
510
  }
511
}
512

    
513

    
514
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
515
}
516

    
517

    
518
void FullCodeGenerator::AccumulatorValueContext::Plug(
519
    Handle<Object> lit) const {
520
  __ li(result_register(), Operand(lit));
521
}
522

    
523

    
524
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
525
  // Immediates cannot be pushed directly.
526
  __ li(result_register(), Operand(lit));
527
  __ push(result_register());
528
}
529

    
530

    
531
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
532
  codegen()->PrepareForBailoutBeforeSplit(condition(),
533
                                          true,
534
                                          true_label_,
535
                                          false_label_);
536
  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
537
  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
538
    if (false_label_ != fall_through_) __ Branch(false_label_);
539
  } else if (lit->IsTrue() || lit->IsJSObject()) {
540
    if (true_label_ != fall_through_) __ Branch(true_label_);
541
  } else if (lit->IsString()) {
542
    if (String::cast(*lit)->length() == 0) {
543
      if (false_label_ != fall_through_) __ Branch(false_label_);
544
    } else {
545
      if (true_label_ != fall_through_) __ Branch(true_label_);
546
    }
547
  } else if (lit->IsSmi()) {
548
    if (Smi::cast(*lit)->value() == 0) {
549
      if (false_label_ != fall_through_) __ Branch(false_label_);
550
    } else {
551
      if (true_label_ != fall_through_) __ Branch(true_label_);
552
    }
553
  } else {
554
    // For simplicity we always test the accumulator register.
555
    __ li(result_register(), Operand(lit));
556
    codegen()->DoTest(this);
557
  }
558
}
559

    
560

    
561
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
562
                                                   Register reg) const {
563
  ASSERT(count > 0);
564
  __ Drop(count);
565
}
566

    
567

    
568
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
569
    int count,
570
    Register reg) const {
571
  ASSERT(count > 0);
572
  __ Drop(count);
573
  __ Move(result_register(), reg);
574
}
575

    
576

    
577
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
578
                                                       Register reg) const {
579
  ASSERT(count > 0);
580
  if (count > 1) __ Drop(count - 1);
581
  __ sw(reg, MemOperand(sp, 0));
582
}
583

    
584

    
585
void FullCodeGenerator::TestContext::DropAndPlug(int count,
586
                                                 Register reg) const {
587
  ASSERT(count > 0);
588
  // For simplicity we always test the accumulator register.
589
  __ Drop(count);
590
  __ Move(result_register(), reg);
591
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
592
  codegen()->DoTest(this);
593
}
594

    
595

    
596
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
597
                                            Label* materialize_false) const {
598
  ASSERT(materialize_true == materialize_false);
599
  __ bind(materialize_true);
600
}
601

    
602

    
603
void FullCodeGenerator::AccumulatorValueContext::Plug(
604
    Label* materialize_true,
605
    Label* materialize_false) const {
606
  Label done;
607
  __ bind(materialize_true);
608
  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
609
  __ Branch(&done);
610
  __ bind(materialize_false);
611
  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
612
  __ bind(&done);
613
}
614

    
615

    
616
void FullCodeGenerator::StackValueContext::Plug(
617
    Label* materialize_true,
618
    Label* materialize_false) const {
619
  Label done;
620
  __ bind(materialize_true);
621
  __ LoadRoot(at, Heap::kTrueValueRootIndex);
622
  __ push(at);
623
  __ Branch(&done);
624
  __ bind(materialize_false);
625
  __ LoadRoot(at, Heap::kFalseValueRootIndex);
626
  __ push(at);
627
  __ bind(&done);
628
}
629

    
630

    
631
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
632
                                          Label* materialize_false) const {
633
  ASSERT(materialize_true == true_label_);
634
  ASSERT(materialize_false == false_label_);
635
}
636

    
637

    
638
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
639
}
640

    
641

    
642
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
643
  Heap::RootListIndex value_root_index =
644
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
645
  __ LoadRoot(result_register(), value_root_index);
646
}
647

    
648

    
649
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
650
  Heap::RootListIndex value_root_index =
651
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652
  __ LoadRoot(at, value_root_index);
653
  __ push(at);
654
}
655

    
656

    
657
void FullCodeGenerator::TestContext::Plug(bool flag) const {
658
  codegen()->PrepareForBailoutBeforeSplit(condition(),
659
                                          true,
660
                                          true_label_,
661
                                          false_label_);
662
  if (flag) {
663
    if (true_label_ != fall_through_) __ Branch(true_label_);
664
  } else {
665
    if (false_label_ != fall_through_) __ Branch(false_label_);
666
  }
667
}
668

    
669

    
670
void FullCodeGenerator::DoTest(Expression* condition,
671
                               Label* if_true,
672
                               Label* if_false,
673
                               Label* fall_through) {
674
  __ mov(a0, result_register());
675
  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
676
  CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
677
  __ mov(at, zero_reg);
678
  Split(ne, v0, Operand(at), if_true, if_false, fall_through);
679
}
680

    
681

    
682
void FullCodeGenerator::Split(Condition cc,
683
                              Register lhs,
684
                              const Operand&  rhs,
685
                              Label* if_true,
686
                              Label* if_false,
687
                              Label* fall_through) {
688
  if (if_false == fall_through) {
689
    __ Branch(if_true, cc, lhs, rhs);
690
  } else if (if_true == fall_through) {
691
    __ Branch(if_false, NegateCondition(cc), lhs, rhs);
692
  } else {
693
    __ Branch(if_true, cc, lhs, rhs);
694
    __ Branch(if_false);
695
  }
696
}
697

    
698

    
699
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
700
  ASSERT(var->IsStackAllocated());
701
  // Offset is negative because higher indexes are at lower addresses.
702
  int offset = -var->index() * kPointerSize;
703
  // Adjust by a (parameter or local) base offset.
704
  if (var->IsParameter()) {
705
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
706
  } else {
707
    offset += JavaScriptFrameConstants::kLocal0Offset;
708
  }
709
  return MemOperand(fp, offset);
710
}
711

    
712

    
713
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
714
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715
  if (var->IsContextSlot()) {
716
    int context_chain_length = scope()->ContextChainLength(var->scope());
717
    __ LoadContext(scratch, context_chain_length);
718
    return ContextOperand(scratch, var->index());
719
  } else {
720
    return StackOperand(var);
721
  }
722
}
723

    
724

    
725
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
726
  // Use destination as scratch.
727
  MemOperand location = VarOperand(var, dest);
728
  __ lw(dest, location);
729
}
730

    
731

    
732
void FullCodeGenerator::SetVar(Variable* var,
733
                               Register src,
734
                               Register scratch0,
735
                               Register scratch1) {
736
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
737
  ASSERT(!scratch0.is(src));
738
  ASSERT(!scratch0.is(scratch1));
739
  ASSERT(!scratch1.is(src));
740
  MemOperand location = VarOperand(var, scratch0);
741
  __ sw(src, location);
742
  // Emit the write barrier code if the location is in the heap.
743
  if (var->IsContextSlot()) {
744
    __ RecordWriteContextSlot(scratch0,
745
                              location.offset(),
746
                              src,
747
                              scratch1,
748
                              kRAHasBeenSaved,
749
                              kDontSaveFPRegs);
750
  }
751
}
752

    
753

    
754
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
755
                                                     bool should_normalize,
756
                                                     Label* if_true,
757
                                                     Label* if_false) {
758
  // Only prepare for bailouts before splits if we're in a test
759
  // context. Otherwise, we let the Visit function deal with the
760
  // preparation to avoid preparing with the same AST id twice.
761
  if (!context()->IsTest() || !info_->IsOptimizable()) return;
762

    
763
  Label skip;
764
  if (should_normalize) __ Branch(&skip);
765
  PrepareForBailout(expr, TOS_REG);
766
  if (should_normalize) {
767
    __ LoadRoot(t0, Heap::kTrueValueRootIndex);
768
    Split(eq, a0, Operand(t0), if_true, if_false, NULL);
769
    __ bind(&skip);
770
  }
771
}
772

    
773

    
774
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
775
  // The variable in the declaration always resides in the current function
776
  // context.
777
  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
778
  if (generate_debug_code_) {
779
    // Check that we're not inside a with or catch context.
780
    __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
781
    __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
782
    __ Check(ne, kDeclarationInWithContext,
783
        a1, Operand(t0));
784
    __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
785
    __ Check(ne, kDeclarationInCatchContext,
786
        a1, Operand(t0));
787
  }
788
}
789

    
790

    
791
void FullCodeGenerator::VisitVariableDeclaration(
792
    VariableDeclaration* declaration) {
793
  // If it was not possible to allocate the variable at compile time, we
794
  // need to "declare" it at runtime to make sure it actually exists in the
795
  // local context.
796
  VariableProxy* proxy = declaration->proxy();
797
  VariableMode mode = declaration->mode();
798
  Variable* variable = proxy->var();
799
  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
800
  switch (variable->location()) {
801
    case Variable::UNALLOCATED:
802
      globals_->Add(variable->name(), zone());
803
      globals_->Add(variable->binding_needs_init()
804
                        ? isolate()->factory()->the_hole_value()
805
                        : isolate()->factory()->undefined_value(),
806
                    zone());
807
      break;
808

    
809
    case Variable::PARAMETER:
810
    case Variable::LOCAL:
811
      if (hole_init) {
812
        Comment cmnt(masm_, "[ VariableDeclaration");
813
        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
814
        __ sw(t0, StackOperand(variable));
815
      }
816
      break;
817

    
818
      case Variable::CONTEXT:
819
      if (hole_init) {
820
        Comment cmnt(masm_, "[ VariableDeclaration");
821
        EmitDebugCheckDeclarationContext(variable);
822
          __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
823
          __ sw(at, ContextOperand(cp, variable->index()));
824
          // No write barrier since the_hole_value is in old space.
825
          PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
826
      }
827
      break;
828

    
829
    case Variable::LOOKUP: {
830
      Comment cmnt(masm_, "[ VariableDeclaration");
831
      __ li(a2, Operand(variable->name()));
832
      // Declaration nodes are always introduced in one of four modes.
833
      ASSERT(IsDeclaredVariableMode(mode));
834
      PropertyAttributes attr =
835
          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
836
      __ li(a1, Operand(Smi::FromInt(attr)));
837
      // Push initial value, if any.
838
      // Note: For variables we must not push an initial value (such as
839
      // 'undefined') because we may have a (legal) redeclaration and we
840
      // must not destroy the current value.
841
      if (hole_init) {
842
        __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
843
        __ Push(cp, a2, a1, a0);
844
      } else {
845
        ASSERT(Smi::FromInt(0) == 0);
846
        __ mov(a0, zero_reg);  // Smi::FromInt(0) indicates no initial value.
847
        __ Push(cp, a2, a1, a0);
848
      }
849
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
850
      break;
851
    }
852
  }
853
}
854

    
855

    
856
void FullCodeGenerator::VisitFunctionDeclaration(
857
    FunctionDeclaration* declaration) {
858
  VariableProxy* proxy = declaration->proxy();
859
  Variable* variable = proxy->var();
860
  switch (variable->location()) {
861
    case Variable::UNALLOCATED: {
862
      globals_->Add(variable->name(), zone());
863
      Handle<SharedFunctionInfo> function =
864
          Compiler::BuildFunctionInfo(declaration->fun(), script());
865
      // Check for stack-overflow exception.
866
      if (function.is_null()) return SetStackOverflow();
867
      globals_->Add(function, zone());
868
      break;
869
    }
870

    
871
    case Variable::PARAMETER:
872
    case Variable::LOCAL: {
873
      Comment cmnt(masm_, "[ FunctionDeclaration");
874
      VisitForAccumulatorValue(declaration->fun());
875
      __ sw(result_register(), StackOperand(variable));
876
      break;
877
    }
878

    
879
    case Variable::CONTEXT: {
880
      Comment cmnt(masm_, "[ FunctionDeclaration");
881
      EmitDebugCheckDeclarationContext(variable);
882
      VisitForAccumulatorValue(declaration->fun());
883
      __ sw(result_register(), ContextOperand(cp, variable->index()));
884
      int offset = Context::SlotOffset(variable->index());
885
      // We know that we have written a function, which is not a smi.
886
      __ RecordWriteContextSlot(cp,
887
                                offset,
888
                                result_register(),
889
                                a2,
890
                                kRAHasBeenSaved,
891
                                kDontSaveFPRegs,
892
                                EMIT_REMEMBERED_SET,
893
                                OMIT_SMI_CHECK);
894
      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
895
      break;
896
    }
897

    
898
    case Variable::LOOKUP: {
899
      Comment cmnt(masm_, "[ FunctionDeclaration");
900
      __ li(a2, Operand(variable->name()));
901
      __ li(a1, Operand(Smi::FromInt(NONE)));
902
      __ Push(cp, a2, a1);
903
      // Push initial value for function declaration.
904
      VisitForStackValue(declaration->fun());
905
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
906
      break;
907
    }
908
  }
909
}
910

    
911

    
912
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
913
  Variable* variable = declaration->proxy()->var();
914
  ASSERT(variable->location() == Variable::CONTEXT);
915
  ASSERT(variable->interface()->IsFrozen());
916

    
917
  Comment cmnt(masm_, "[ ModuleDeclaration");
918
  EmitDebugCheckDeclarationContext(variable);
919

    
920
  // Load instance object.
921
  __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
922
  __ lw(a1, ContextOperand(a1, variable->interface()->Index()));
923
  __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
924

    
925
  // Assign it.
926
  __ sw(a1, ContextOperand(cp, variable->index()));
927
  // We know that we have written a module, which is not a smi.
928
  __ RecordWriteContextSlot(cp,
929
                            Context::SlotOffset(variable->index()),
930
                            a1,
931
                            a3,
932
                            kRAHasBeenSaved,
933
                            kDontSaveFPRegs,
934
                            EMIT_REMEMBERED_SET,
935
                            OMIT_SMI_CHECK);
936
  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
937

    
938
  // Traverse into body.
939
  Visit(declaration->module());
940
}
941

    
942

    
943
void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
944
  VariableProxy* proxy = declaration->proxy();
945
  Variable* variable = proxy->var();
946
  switch (variable->location()) {
947
    case Variable::UNALLOCATED:
948
      // TODO(rossberg)
949
      break;
950

    
951
    case Variable::CONTEXT: {
952
      Comment cmnt(masm_, "[ ImportDeclaration");
953
      EmitDebugCheckDeclarationContext(variable);
954
      // TODO(rossberg)
955
      break;
956
    }
957

    
958
    case Variable::PARAMETER:
959
    case Variable::LOCAL:
960
    case Variable::LOOKUP:
961
      UNREACHABLE();
962
  }
963
}
964

    
965

    
966
void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
967
  // TODO(rossberg)
968
}
969

    
970

    
971
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
972
  // Call the runtime to declare the globals.
973
  // The context is the first argument.
974
  __ li(a1, Operand(pairs));
975
  __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
976
  __ Push(cp, a1, a0);
977
  __ CallRuntime(Runtime::kDeclareGlobals, 3);
978
  // Return value is ignored.
979
}
980

    
981

    
982
void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
983
  // Call the runtime to declare the modules.
984
  __ Push(descriptions);
985
  __ CallRuntime(Runtime::kDeclareModules, 1);
986
  // Return value is ignored.
987
}
988

    
989

    
990
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
991
  Comment cmnt(masm_, "[ SwitchStatement");
992
  Breakable nested_statement(this, stmt);
993
  SetStatementPosition(stmt);
994

    
995
  // Keep the switch value on the stack until a case matches.
996
  VisitForStackValue(stmt->tag());
997
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
998

    
999
  ZoneList<CaseClause*>* clauses = stmt->cases();
1000
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
1001

    
1002
  Label next_test;  // Recycled for each test.
1003
  // Compile all the tests with branches to their bodies.
1004
  for (int i = 0; i < clauses->length(); i++) {
1005
    CaseClause* clause = clauses->at(i);
1006
    clause->body_target()->Unuse();
1007

    
1008
    // The default is not a test, but remember it as final fall through.
1009
    if (clause->is_default()) {
1010
      default_clause = clause;
1011
      continue;
1012
    }
1013

    
1014
    Comment cmnt(masm_, "[ Case comparison");
1015
    __ bind(&next_test);
1016
    next_test.Unuse();
1017

    
1018
    // Compile the label expression.
1019
    VisitForAccumulatorValue(clause->label());
1020
    __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
1021

    
1022
    // Perform the comparison as if via '==='.
1023
    __ lw(a1, MemOperand(sp, 0));  // Switch value.
1024
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1025
    JumpPatchSite patch_site(masm_);
1026
    if (inline_smi_code) {
1027
      Label slow_case;
1028
      __ or_(a2, a1, a0);
1029
      patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1030

    
1031
      __ Branch(&next_test, ne, a1, Operand(a0));
1032
      __ Drop(1);  // Switch value is no longer needed.
1033
      __ Branch(clause->body_target());
1034

    
1035
      __ bind(&slow_case);
1036
    }
1037

    
1038
    // Record position before stub call for type feedback.
1039
    SetSourcePosition(clause->position());
1040
    Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1041
    CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1042
    patch_site.EmitPatchInfo();
1043

    
1044
    __ Branch(&next_test, ne, v0, Operand(zero_reg));
1045
    __ Drop(1);  // Switch value is no longer needed.
1046
    __ Branch(clause->body_target());
1047
  }
1048

    
1049
  // Discard the test value and jump to the default if present, otherwise to
1050
  // the end of the statement.
1051
  __ bind(&next_test);
1052
  __ Drop(1);  // Switch value is no longer needed.
1053
  if (default_clause == NULL) {
1054
    __ Branch(nested_statement.break_label());
1055
  } else {
1056
    __ Branch(default_clause->body_target());
1057
  }
1058

    
1059
  // Compile all the case bodies.
1060
  for (int i = 0; i < clauses->length(); i++) {
1061
    Comment cmnt(masm_, "[ Case body");
1062
    CaseClause* clause = clauses->at(i);
1063
    __ bind(clause->body_target());
1064
    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1065
    VisitStatements(clause->statements());
1066
  }
1067

    
1068
  __ bind(nested_statement.break_label());
1069
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1070
}
1071

    
1072

    
1073
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1074
  Comment cmnt(masm_, "[ ForInStatement");
1075
  SetStatementPosition(stmt);
1076

    
1077
  Label loop, exit;
1078
  ForIn loop_statement(this, stmt);
1079
  increment_loop_depth();
1080

    
1081
  // Get the object to enumerate over. If the object is null or undefined, skip
1082
  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1083
  VisitForAccumulatorValue(stmt->enumerable());
1084
  __ mov(a0, result_register());  // Result as param to InvokeBuiltin below.
1085
  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1086
  __ Branch(&exit, eq, a0, Operand(at));
1087
  Register null_value = t1;
1088
  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1089
  __ Branch(&exit, eq, a0, Operand(null_value));
1090
  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1091
  __ mov(a0, v0);
1092
  // Convert the object to a JS object.
1093
  Label convert, done_convert;
1094
  __ JumpIfSmi(a0, &convert);
1095
  __ GetObjectType(a0, a1, a1);
1096
  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1097
  __ bind(&convert);
1098
  __ push(a0);
1099
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1100
  __ mov(a0, v0);
1101
  __ bind(&done_convert);
1102
  __ push(a0);
1103

    
1104
  // Check for proxies.
1105
  Label call_runtime;
1106
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1107
  __ GetObjectType(a0, a1, a1);
1108
  __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1109

    
1110
  // Check cache validity in generated code. This is a fast case for
1111
  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1112
  // guarantee cache validity, call the runtime system to check cache
1113
  // validity or get the property names in a fixed array.
1114
  __ CheckEnumCache(null_value, &call_runtime);
1115

    
1116
  // The enum cache is valid.  Load the map of the object being
1117
  // iterated over and use the cache for the iteration.
1118
  Label use_cache;
1119
  __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1120
  __ Branch(&use_cache);
1121

    
1122
  // Get the set of properties to enumerate.
1123
  __ bind(&call_runtime);
1124
  __ push(a0);  // Duplicate the enumerable object on the stack.
1125
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1126

    
1127
  // If we got a map from the runtime call, we can do a fast
1128
  // modification check. Otherwise, we got a fixed array, and we have
1129
  // to do a slow check.
1130
  Label fixed_array;
1131
  __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1132
  __ LoadRoot(at, Heap::kMetaMapRootIndex);
1133
  __ Branch(&fixed_array, ne, a2, Operand(at));
1134

    
1135
  // We got a map in register v0. Get the enumeration cache from it.
1136
  Label no_descriptors;
1137
  __ bind(&use_cache);
1138

    
1139
  __ EnumLength(a1, v0);
1140
  __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1141

    
1142
  __ LoadInstanceDescriptors(v0, a2);
1143
  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1144
  __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1145

    
1146
  // Set up the four remaining stack slots.
1147
  __ push(v0);  // Map.
1148
  __ li(a0, Operand(Smi::FromInt(0)));
1149
  // Push enumeration cache, enumeration cache length (as smi) and zero.
1150
  __ Push(a2, a1, a0);
1151
  __ jmp(&loop);
1152

    
1153
  __ bind(&no_descriptors);
1154
  __ Drop(1);
1155
  __ jmp(&exit);
1156

    
1157
  // We got a fixed array in register v0. Iterate through that.
1158
  Label non_proxy;
1159
  __ bind(&fixed_array);
1160

    
1161
  Handle<Cell> cell = isolate()->factory()->NewCell(
1162
      Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1163
                     isolate()));
1164
  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1165
  __ LoadHeapObject(a1, cell);
1166
  __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1167
  __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset));
1168

    
1169
  __ li(a1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
1170
  __ lw(a2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
1171
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1172
  __ GetObjectType(a2, a3, a3);
1173
  __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1174
  __ li(a1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
1175
  __ bind(&non_proxy);
1176
  __ Push(a1, v0);  // Smi and array
1177
  __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1178
  __ li(a0, Operand(Smi::FromInt(0)));
1179
  __ Push(a1, a0);  // Fixed array length (as smi) and initial index.
1180

    
1181
  // Generate code for doing the condition check.
1182
  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1183
  __ bind(&loop);
1184
  // Load the current count to a0, load the length to a1.
1185
  __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1186
  __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1187
  __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1188

    
1189
  // Get the current entry of the array into register a3.
1190
  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1191
  __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1192
  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1193
  __ addu(t0, a2, t0);  // Array base + scaled (smi) index.
1194
  __ lw(a3, MemOperand(t0));  // Current entry.
1195

    
1196
  // Get the expected map from the stack or a smi in the
1197
  // permanent slow case into register a2.
1198
  __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1199

    
1200
  // Check if the expected map still matches that of the enumerable.
1201
  // If not, we may have to filter the key.
1202
  Label update_each;
1203
  __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1204
  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1205
  __ Branch(&update_each, eq, t0, Operand(a2));
1206

    
1207
  // For proxies, no filtering is done.
1208
  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1209
  ASSERT_EQ(Smi::FromInt(0), 0);
1210
  __ Branch(&update_each, eq, a2, Operand(zero_reg));
1211

    
1212
  // Convert the entry to a string or (smi) 0 if it isn't a property
1213
  // any more. If the property has been removed while iterating, we
1214
  // just skip it.
1215
  __ push(a1);  // Enumerable.
1216
  __ push(a3);  // Current entry.
1217
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1218
  __ mov(a3, result_register());
1219
  __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1220

    
1221
  // Update the 'each' property or variable from the possibly filtered
1222
  // entry in register a3.
1223
  __ bind(&update_each);
1224
  __ mov(result_register(), a3);
1225
  // Perform the assignment as if via '='.
1226
  { EffectContext context(this);
1227
    EmitAssignment(stmt->each());
1228
  }
1229

    
1230
  // Generate code for the body of the loop.
1231
  Visit(stmt->body());
1232

    
1233
  // Generate code for the going to the next element by incrementing
1234
  // the index (smi) stored on top of the stack.
1235
  __ bind(loop_statement.continue_label());
1236
  __ pop(a0);
1237
  __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1238
  __ push(a0);
1239

    
1240
  EmitBackEdgeBookkeeping(stmt, &loop);
1241
  __ Branch(&loop);
1242

    
1243
  // Remove the pointers stored on the stack.
1244
  __ bind(loop_statement.break_label());
1245
  __ Drop(5);
1246

    
1247
  // Exit and decrement the loop depth.
1248
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1249
  __ bind(&exit);
1250
  decrement_loop_depth();
1251
}
1252

    
1253

    
1254
void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1255
  Comment cmnt(masm_, "[ ForOfStatement");
1256
  SetStatementPosition(stmt);
1257

    
1258
  Iteration loop_statement(this, stmt);
1259
  increment_loop_depth();
1260

    
1261
  // var iterator = iterable[@@iterator]()
1262
  VisitForAccumulatorValue(stmt->assign_iterator());
1263
  __ mov(a0, v0);
1264

    
1265
  // As with for-in, skip the loop if the iterator is null or undefined.
1266
  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1267
  __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1268
  __ LoadRoot(at, Heap::kNullValueRootIndex);
1269
  __ Branch(loop_statement.break_label(), eq, a0, Operand(at));
1270

    
1271
  // Convert the iterator to a JS object.
1272
  Label convert, done_convert;
1273
  __ JumpIfSmi(a0, &convert);
1274
  __ GetObjectType(a0, a1, a1);
1275
  __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1276
  __ bind(&convert);
1277
  __ push(a0);
1278
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1279
  __ mov(a0, v0);
1280
  __ bind(&done_convert);
1281
  __ push(a0);
1282

    
1283
  // Loop entry.
1284
  __ bind(loop_statement.continue_label());
1285

    
1286
  // result = iterator.next()
1287
  VisitForEffect(stmt->next_result());
1288

    
1289
  // if (result.done) break;
1290
  Label result_not_done;
1291
  VisitForControl(stmt->result_done(),
1292
                  loop_statement.break_label(),
1293
                  &result_not_done,
1294
                  &result_not_done);
1295
  __ bind(&result_not_done);
1296

    
1297
  // each = result.value
1298
  VisitForEffect(stmt->assign_each());
1299

    
1300
  // Generate code for the body of the loop.
1301
  Visit(stmt->body());
1302

    
1303
  // Check stack before looping.
1304
  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1305
  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1306
  __ jmp(loop_statement.continue_label());
1307

    
1308
  // Exit and decrement the loop depth.
1309
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1310
  __ bind(loop_statement.break_label());
1311
  decrement_loop_depth();
1312
}
1313

    
1314

    
1315
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1316
                                       bool pretenure) {
1317
  // Use the fast case closure allocation code that allocates in new
1318
  // space for nested functions that don't need literals cloning. If
1319
  // we're running with the --always-opt or the --prepare-always-opt
1320
  // flag, we need to use the runtime function so that the new function
1321
  // we are creating here gets a chance to have its code optimized and
1322
  // doesn't just get a copy of the existing unoptimized code.
1323
  if (!FLAG_always_opt &&
1324
      !FLAG_prepare_always_opt &&
1325
      !pretenure &&
1326
      scope()->is_function_scope() &&
1327
      info->num_literals() == 0) {
1328
    FastNewClosureStub stub(info->language_mode(), info->is_generator());
1329
    __ li(a2, Operand(info));
1330
    __ CallStub(&stub);
1331
  } else {
1332
    __ li(a0, Operand(info));
1333
    __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1334
                              : Heap::kFalseValueRootIndex);
1335
    __ Push(cp, a0, a1);
1336
    __ CallRuntime(Runtime::kNewClosure, 3);
1337
  }
1338
  context()->Plug(v0);
1339
}
1340

    
1341

    
1342
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1343
  Comment cmnt(masm_, "[ VariableProxy");
1344
  EmitVariableLoad(expr);
1345
}
1346

    
1347

    
1348
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1349
                                                      TypeofState typeof_state,
1350
                                                      Label* slow) {
1351
  Register current = cp;
1352
  Register next = a1;
1353
  Register temp = a2;
1354

    
1355
  Scope* s = scope();
1356
  while (s != NULL) {
1357
    if (s->num_heap_slots() > 0) {
1358
      if (s->calls_non_strict_eval()) {
1359
        // Check that extension is NULL.
1360
        __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1361
        __ Branch(slow, ne, temp, Operand(zero_reg));
1362
      }
1363
      // Load next context in chain.
1364
      __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1365
      // Walk the rest of the chain without clobbering cp.
1366
      current = next;
1367
    }
1368
    // If no outer scope calls eval, we do not need to check more
1369
    // context extensions.
1370
    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1371
    s = s->outer_scope();
1372
  }
1373

    
1374
  if (s->is_eval_scope()) {
1375
    Label loop, fast;
1376
    if (!current.is(next)) {
1377
      __ Move(next, current);
1378
    }
1379
    __ bind(&loop);
1380
    // Terminate at native context.
1381
    __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1382
    __ LoadRoot(t0, Heap::kNativeContextMapRootIndex);
1383
    __ Branch(&fast, eq, temp, Operand(t0));
1384
    // Check that extension is NULL.
1385
    __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1386
    __ Branch(slow, ne, temp, Operand(zero_reg));
1387
    // Load next context in chain.
1388
    __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1389
    __ Branch(&loop);
1390
    __ bind(&fast);
1391
  }
1392

    
1393
  __ lw(a0, GlobalObjectOperand());
1394
  __ li(a2, Operand(var->name()));
1395
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1396
      ? RelocInfo::CODE_TARGET
1397
      : RelocInfo::CODE_TARGET_CONTEXT;
1398
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1399
  CallIC(ic, mode);
1400
}
1401

    
1402

    
1403
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1404
                                                                Label* slow) {
1405
  ASSERT(var->IsContextSlot());
1406
  Register context = cp;
1407
  Register next = a3;
1408
  Register temp = t0;
1409

    
1410
  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1411
    if (s->num_heap_slots() > 0) {
1412
      if (s->calls_non_strict_eval()) {
1413
        // Check that extension is NULL.
1414
        __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1415
        __ Branch(slow, ne, temp, Operand(zero_reg));
1416
      }
1417
      __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1418
      // Walk the rest of the chain without clobbering cp.
1419
      context = next;
1420
    }
1421
  }
1422
  // Check that last extension is NULL.
1423
  __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1424
  __ Branch(slow, ne, temp, Operand(zero_reg));
1425

    
1426
  // This function is used only for loads, not stores, so it's safe to
1427
  // return an cp-based operand (the write barrier cannot be allowed to
1428
  // destroy the cp register).
1429
  return ContextOperand(context, var->index());
1430
}
1431

    
1432

    
1433
void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1434
                                                  TypeofState typeof_state,
1435
                                                  Label* slow,
1436
                                                  Label* done) {
1437
  // Generate fast-case code for variables that might be shadowed by
1438
  // eval-introduced variables.  Eval is used a lot without
1439
  // introducing variables.  In those cases, we do not want to
1440
  // perform a runtime call for all variables in the scope
1441
  // containing the eval.
1442
  if (var->mode() == DYNAMIC_GLOBAL) {
1443
    EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1444
    __ Branch(done);
1445
  } else if (var->mode() == DYNAMIC_LOCAL) {
1446
    Variable* local = var->local_if_not_shadowed();
1447
    __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1448
    if (local->mode() == LET ||
1449
        local->mode() == CONST ||
1450
        local->mode() == CONST_HARMONY) {
1451
      __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1452
      __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1453
      if (local->mode() == CONST) {
1454
        __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1455
        __ Movz(v0, a0, at);  // Conditional move: return Undefined if TheHole.
1456
      } else {  // LET || CONST_HARMONY
1457
        __ Branch(done, ne, at, Operand(zero_reg));
1458
        __ li(a0, Operand(var->name()));
1459
        __ push(a0);
1460
        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1461
      }
1462
    }
1463
    __ Branch(done);
1464
  }
1465
}
1466

    
1467

    
1468
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1469
  // Record position before possible IC call.
1470
  SetSourcePosition(proxy->position());
1471
  Variable* var = proxy->var();
1472

    
1473
  // Three cases: global variables, lookup variables, and all other types of
1474
  // variables.
1475
  switch (var->location()) {
1476
    case Variable::UNALLOCATED: {
1477
      Comment cmnt(masm_, "Global variable");
1478
      // Use inline caching. Variable name is passed in a2 and the global
1479
      // object (receiver) in a0.
1480
      __ lw(a0, GlobalObjectOperand());
1481
      __ li(a2, Operand(var->name()));
1482
      Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1483
      CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1484
      context()->Plug(v0);
1485
      break;
1486
    }
1487

    
1488
    case Variable::PARAMETER:
1489
    case Variable::LOCAL:
1490
    case Variable::CONTEXT: {
1491
      Comment cmnt(masm_, var->IsContextSlot()
1492
                              ? "Context variable"
1493
                              : "Stack variable");
1494
      if (var->binding_needs_init()) {
1495
        // var->scope() may be NULL when the proxy is located in eval code and
1496
        // refers to a potential outside binding. Currently those bindings are
1497
        // always looked up dynamically, i.e. in that case
1498
        //     var->location() == LOOKUP.
1499
        // always holds.
1500
        ASSERT(var->scope() != NULL);
1501

    
1502
        // Check if the binding really needs an initialization check. The check
1503
        // can be skipped in the following situation: we have a LET or CONST
1504
        // binding in harmony mode, both the Variable and the VariableProxy have
1505
        // the same declaration scope (i.e. they are both in global code, in the
1506
        // same function or in the same eval code) and the VariableProxy is in
1507
        // the source physically located after the initializer of the variable.
1508
        //
1509
        // We cannot skip any initialization checks for CONST in non-harmony
1510
        // mode because const variables may be declared but never initialized:
1511
        //   if (false) { const x; }; var y = x;
1512
        //
1513
        // The condition on the declaration scopes is a conservative check for
1514
        // nested functions that access a binding and are called before the
1515
        // binding is initialized:
1516
        //   function() { f(); let x = 1; function f() { x = 2; } }
1517
        //
1518
        bool skip_init_check;
1519
        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1520
          skip_init_check = false;
1521
        } else {
1522
          // Check that we always have valid source position.
1523
          ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1524
          ASSERT(proxy->position() != RelocInfo::kNoPosition);
1525
          skip_init_check = var->mode() != CONST &&
1526
              var->initializer_position() < proxy->position();
1527
        }
1528

    
1529
        if (!skip_init_check) {
1530
          // Let and const need a read barrier.
1531
          GetVar(v0, var);
1532
          __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1533
          __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1534
          if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1535
            // Throw a reference error when using an uninitialized let/const
1536
            // binding in harmony mode.
1537
            Label done;
1538
            __ Branch(&done, ne, at, Operand(zero_reg));
1539
            __ li(a0, Operand(var->name()));
1540
            __ push(a0);
1541
            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1542
            __ bind(&done);
1543
          } else {
1544
            // Uninitalized const bindings outside of harmony mode are unholed.
1545
            ASSERT(var->mode() == CONST);
1546
            __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1547
            __ Movz(v0, a0, at);  // Conditional move: Undefined if TheHole.
1548
          }
1549
          context()->Plug(v0);
1550
          break;
1551
        }
1552
      }
1553
      context()->Plug(var);
1554
      break;
1555
    }
1556

    
1557
    case Variable::LOOKUP: {
1558
      Label done, slow;
1559
      // Generate code for loading from variables potentially shadowed
1560
      // by eval-introduced variables.
1561
      EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1562
      __ bind(&slow);
1563
      Comment cmnt(masm_, "Lookup variable");
1564
      __ li(a1, Operand(var->name()));
1565
      __ Push(cp, a1);  // Context and name.
1566
      __ CallRuntime(Runtime::kLoadContextSlot, 2);
1567
      __ bind(&done);
1568
      context()->Plug(v0);
1569
    }
1570
  }
1571
}
1572

    
1573

    
1574
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1575
  Comment cmnt(masm_, "[ RegExpLiteral");
1576
  Label materialized;
1577
  // Registers will be used as follows:
1578
  // t1 = materialized value (RegExp literal)
1579
  // t0 = JS function, literals array
1580
  // a3 = literal index
1581
  // a2 = RegExp pattern
1582
  // a1 = RegExp flags
1583
  // a0 = RegExp literal clone
1584
  __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1585
  __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1586
  int literal_offset =
1587
      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1588
  __ lw(t1, FieldMemOperand(t0, literal_offset));
1589
  __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1590
  __ Branch(&materialized, ne, t1, Operand(at));
1591

    
1592
  // Create regexp literal using runtime function.
1593
  // Result will be in v0.
1594
  __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1595
  __ li(a2, Operand(expr->pattern()));
1596
  __ li(a1, Operand(expr->flags()));
1597
  __ Push(t0, a3, a2, a1);
1598
  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1599
  __ mov(t1, v0);
1600

    
1601
  __ bind(&materialized);
1602
  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1603
  Label allocated, runtime_allocate;
1604
  __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1605
  __ jmp(&allocated);
1606

    
1607
  __ bind(&runtime_allocate);
1608
  __ push(t1);
1609
  __ li(a0, Operand(Smi::FromInt(size)));
1610
  __ push(a0);
1611
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1612
  __ pop(t1);
1613

    
1614
  __ bind(&allocated);
1615

    
1616
  // After this, registers are used as follows:
1617
  // v0: Newly allocated regexp.
1618
  // t1: Materialized regexp.
1619
  // a2: temp.
1620
  __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1621
  context()->Plug(v0);
1622
}
1623

    
1624

    
1625
void FullCodeGenerator::EmitAccessor(Expression* expression) {
1626
  if (expression == NULL) {
1627
    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1628
    __ push(a1);
1629
  } else {
1630
    VisitForStackValue(expression);
1631
  }
1632
}
1633

    
1634

    
1635
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1636
  Comment cmnt(masm_, "[ ObjectLiteral");
1637
  Handle<FixedArray> constant_properties = expr->constant_properties();
1638
  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1639
  __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1640
  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1641
  __ li(a1, Operand(constant_properties));
1642
  int flags = expr->fast_elements()
1643
      ? ObjectLiteral::kFastElements
1644
      : ObjectLiteral::kNoFlags;
1645
  flags |= expr->has_function()
1646
      ? ObjectLiteral::kHasFunction
1647
      : ObjectLiteral::kNoFlags;
1648
  __ li(a0, Operand(Smi::FromInt(flags)));
1649
  int properties_count = constant_properties->length() / 2;
1650
  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1651
      expr->depth() > 1 || Serializer::enabled() ||
1652
      flags != ObjectLiteral::kFastElements ||
1653
      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1654
    __ Push(a3, a2, a1, a0);
1655
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1656
  } else {
1657
    FastCloneShallowObjectStub stub(properties_count);
1658
    __ CallStub(&stub);
1659
  }
1660

    
1661
  // If result_saved is true the result is on top of the stack.  If
1662
  // result_saved is false the result is in v0.
1663
  bool result_saved = false;
1664

    
1665
  // Mark all computed expressions that are bound to a key that
1666
  // is shadowed by a later occurrence of the same key. For the
1667
  // marked expressions, no store code is emitted.
1668
  expr->CalculateEmitStore(zone());
1669

    
1670
  AccessorTable accessor_table(zone());
1671
  for (int i = 0; i < expr->properties()->length(); i++) {
1672
    ObjectLiteral::Property* property = expr->properties()->at(i);
1673
    if (property->IsCompileTimeValue()) continue;
1674

    
1675
    Literal* key = property->key();
1676
    Expression* value = property->value();
1677
    if (!result_saved) {
1678
      __ push(v0);  // Save result on stack.
1679
      result_saved = true;
1680
    }
1681
    switch (property->kind()) {
1682
      case ObjectLiteral::Property::CONSTANT:
1683
        UNREACHABLE();
1684
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1685
        ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1686
        // Fall through.
1687
      case ObjectLiteral::Property::COMPUTED:
1688
        if (key->value()->IsInternalizedString()) {
1689
          if (property->emit_store()) {
1690
            VisitForAccumulatorValue(value);
1691
            __ mov(a0, result_register());
1692
            __ li(a2, Operand(key->value()));
1693
            __ lw(a1, MemOperand(sp));
1694
            Handle<Code> ic = is_classic_mode()
1695
                ? isolate()->builtins()->StoreIC_Initialize()
1696
                : isolate()->builtins()->StoreIC_Initialize_Strict();
1697
            CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1698
            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1699
          } else {
1700
            VisitForEffect(value);
1701
          }
1702
          break;
1703
        }
1704
        // Duplicate receiver on stack.
1705
        __ lw(a0, MemOperand(sp));
1706
        __ push(a0);
1707
        VisitForStackValue(key);
1708
        VisitForStackValue(value);
1709
        if (property->emit_store()) {
1710
          __ li(a0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes.
1711
          __ push(a0);
1712
          __ CallRuntime(Runtime::kSetProperty, 4);
1713
        } else {
1714
          __ Drop(3);
1715
        }
1716
        break;
1717
      case ObjectLiteral::Property::PROTOTYPE:
1718
        // Duplicate receiver on stack.
1719
        __ lw(a0, MemOperand(sp));
1720
        __ push(a0);
1721
        VisitForStackValue(value);
1722
        if (property->emit_store()) {
1723
          __ CallRuntime(Runtime::kSetPrototype, 2);
1724
        } else {
1725
          __ Drop(2);
1726
        }
1727
        break;
1728
      case ObjectLiteral::Property::GETTER:
1729
        accessor_table.lookup(key)->second->getter = value;
1730
        break;
1731
      case ObjectLiteral::Property::SETTER:
1732
        accessor_table.lookup(key)->second->setter = value;
1733
        break;
1734
    }
1735
  }
1736

    
1737
  // Emit code to define accessors, using only a single call to the runtime for
1738
  // each pair of corresponding getters and setters.
1739
  for (AccessorTable::Iterator it = accessor_table.begin();
1740
       it != accessor_table.end();
1741
       ++it) {
1742
    __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1743
    __ push(a0);
1744
    VisitForStackValue(it->first);
1745
    EmitAccessor(it->second->getter);
1746
    EmitAccessor(it->second->setter);
1747
    __ li(a0, Operand(Smi::FromInt(NONE)));
1748
    __ push(a0);
1749
    __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1750
  }
1751

    
1752
  if (expr->has_function()) {
1753
    ASSERT(result_saved);
1754
    __ lw(a0, MemOperand(sp));
1755
    __ push(a0);
1756
    __ CallRuntime(Runtime::kToFastProperties, 1);
1757
  }
1758

    
1759
  if (result_saved) {
1760
    context()->PlugTOS();
1761
  } else {
1762
    context()->Plug(v0);
1763
  }
1764
}
1765

    
1766

    
1767
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1768
  Comment cmnt(masm_, "[ ArrayLiteral");
1769

    
1770
  ZoneList<Expression*>* subexprs = expr->values();
1771
  int length = subexprs->length();
1772

    
1773
  Handle<FixedArray> constant_elements = expr->constant_elements();
1774
  ASSERT_EQ(2, constant_elements->length());
1775
  ElementsKind constant_elements_kind =
1776
      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1777
  bool has_fast_elements =
1778
      IsFastObjectElementsKind(constant_elements_kind);
1779
  Handle<FixedArrayBase> constant_elements_values(
1780
      FixedArrayBase::cast(constant_elements->get(1)));
1781

    
1782
  __ mov(a0, result_register());
1783
  __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1784
  __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1785
  __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1786
  __ li(a1, Operand(constant_elements));
1787
  if (has_fast_elements && constant_elements_values->map() ==
1788
      isolate()->heap()->fixed_cow_array_map()) {
1789
    FastCloneShallowArrayStub stub(
1790
        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1791
        DONT_TRACK_ALLOCATION_SITE,
1792
        length);
1793
    __ CallStub(&stub);
1794
    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1795
        1, a1, a2);
1796
  } else if (expr->depth() > 1) {
1797
    __ Push(a3, a2, a1);
1798
    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1799
  } else if (Serializer::enabled() ||
1800
      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1801
    __ Push(a3, a2, a1);
1802
    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1803
  } else {
1804
    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1805
           FLAG_smi_only_arrays);
1806
    FastCloneShallowArrayStub::Mode mode =
1807
        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1808
    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1809
        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1810

    
1811
    if (has_fast_elements) {
1812
      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1813
      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1814
    }
1815

    
1816
    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1817
    __ CallStub(&stub);
1818
  }
1819

    
1820
  bool result_saved = false;  // Is the result saved to the stack?
1821

    
1822
  // Emit code to evaluate all the non-constant subexpressions and to store
1823
  // them into the newly cloned array.
1824
  for (int i = 0; i < length; i++) {
1825
    Expression* subexpr = subexprs->at(i);
1826
    // If the subexpression is a literal or a simple materialized literal it
1827
    // is already set in the cloned array.
1828
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1829

    
1830
    if (!result_saved) {
1831
      __ push(v0);  // array literal
1832
      __ Push(Smi::FromInt(expr->literal_index()));
1833
      result_saved = true;
1834
    }
1835

    
1836
    VisitForAccumulatorValue(subexpr);
1837

    
1838
    if (IsFastObjectElementsKind(constant_elements_kind)) {
1839
      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1840
      __ lw(t2, MemOperand(sp, kPointerSize));  // Copy of array literal.
1841
      __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1842
      __ sw(result_register(), FieldMemOperand(a1, offset));
1843
      // Update the write barrier for the array store.
1844
      __ RecordWriteField(a1, offset, result_register(), a2,
1845
                          kRAHasBeenSaved, kDontSaveFPRegs,
1846
                          EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1847
    } else {
1848
      __ li(a3, Operand(Smi::FromInt(i)));
1849
      __ mov(a0, result_register());
1850
      StoreArrayLiteralElementStub stub;
1851
      __ CallStub(&stub);
1852
    }
1853

    
1854
    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1855
  }
1856
  if (result_saved) {
1857
    __ Pop();  // literal index
1858
    context()->PlugTOS();
1859
  } else {
1860
    context()->Plug(v0);
1861
  }
1862
}
1863

    
1864

    
1865
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1866
  Comment cmnt(masm_, "[ Assignment");
1867
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1868
  // on the left-hand side.
1869
  if (!expr->target()->IsValidLeftHandSide()) {
1870
    VisitForEffect(expr->target());
1871
    return;
1872
  }
1873

    
1874
  // Left-hand side can only be a property, a global or a (parameter or local)
1875
  // slot.
1876
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1877
  LhsKind assign_type = VARIABLE;
1878
  Property* property = expr->target()->AsProperty();
1879
  if (property != NULL) {
1880
    assign_type = (property->key()->IsPropertyName())
1881
        ? NAMED_PROPERTY
1882
        : KEYED_PROPERTY;
1883
  }
1884

    
1885
  // Evaluate LHS expression.
1886
  switch (assign_type) {
1887
    case VARIABLE:
1888
      // Nothing to do here.
1889
      break;
1890
    case NAMED_PROPERTY:
1891
      if (expr->is_compound()) {
1892
        // We need the receiver both on the stack and in the accumulator.
1893
        VisitForAccumulatorValue(property->obj());
1894
        __ push(result_register());
1895
      } else {
1896
        VisitForStackValue(property->obj());
1897
      }
1898
      break;
1899
    case KEYED_PROPERTY:
1900
      // We need the key and receiver on both the stack and in v0 and a1.
1901
      if (expr->is_compound()) {
1902
        VisitForStackValue(property->obj());
1903
        VisitForAccumulatorValue(property->key());
1904
        __ lw(a1, MemOperand(sp, 0));
1905
        __ push(v0);
1906
      } else {
1907
        VisitForStackValue(property->obj());
1908
        VisitForStackValue(property->key());
1909
      }
1910
      break;
1911
  }
1912

    
1913
  // For compound assignments we need another deoptimization point after the
1914
  // variable/property load.
1915
  if (expr->is_compound()) {
1916
    { AccumulatorValueContext context(this);
1917
      switch (assign_type) {
1918
        case VARIABLE:
1919
          EmitVariableLoad(expr->target()->AsVariableProxy());
1920
          PrepareForBailout(expr->target(), TOS_REG);
1921
          break;
1922
        case NAMED_PROPERTY:
1923
          EmitNamedPropertyLoad(property);
1924
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1925
          break;
1926
        case KEYED_PROPERTY:
1927
          EmitKeyedPropertyLoad(property);
1928
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1929
          break;
1930
      }
1931
    }
1932

    
1933
    Token::Value op = expr->binary_op();
1934
    __ push(v0);  // Left operand goes on the stack.
1935
    VisitForAccumulatorValue(expr->value());
1936

    
1937
    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1938
        ? OVERWRITE_RIGHT
1939
        : NO_OVERWRITE;
1940
    SetSourcePosition(expr->position() + 1);
1941
    AccumulatorValueContext context(this);
1942
    if (ShouldInlineSmiCase(op)) {
1943
      EmitInlineSmiBinaryOp(expr->binary_operation(),
1944
                            op,
1945
                            mode,
1946
                            expr->target(),
1947
                            expr->value());
1948
    } else {
1949
      EmitBinaryOp(expr->binary_operation(), op, mode);
1950
    }
1951

    
1952
    // Deoptimization point in case the binary operation may have side effects.
1953
    PrepareForBailout(expr->binary_operation(), TOS_REG);
1954
  } else {
1955
    VisitForAccumulatorValue(expr->value());
1956
  }
1957

    
1958
  // Record source position before possible IC call.
1959
  SetSourcePosition(expr->position());
1960

    
1961
  // Store the value.
1962
  switch (assign_type) {
1963
    case VARIABLE:
1964
      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1965
                             expr->op());
1966
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1967
      context()->Plug(v0);
1968
      break;
1969
    case NAMED_PROPERTY:
1970
      EmitNamedPropertyAssignment(expr);
1971
      break;
1972
    case KEYED_PROPERTY:
1973
      EmitKeyedPropertyAssignment(expr);
1974
      break;
1975
  }
1976
}
1977

    
1978

    
1979
void FullCodeGenerator::VisitYield(Yield* expr) {
1980
  Comment cmnt(masm_, "[ Yield");
1981
  // Evaluate yielded value first; the initial iterator definition depends on
1982
  // this.  It stays on the stack while we update the iterator.
1983
  VisitForStackValue(expr->expression());
1984

    
1985
  switch (expr->yield_kind()) {
1986
    case Yield::SUSPEND:
1987
      // Pop value from top-of-stack slot; box result into result register.
1988
      EmitCreateIteratorResult(false);
1989
      __ push(result_register());
1990
      // Fall through.
1991
    case Yield::INITIAL: {
1992
      Label suspend, continuation, post_runtime, resume;
1993

    
1994
      __ jmp(&suspend);
1995

    
1996
      __ bind(&continuation);
1997
      __ jmp(&resume);
1998

    
1999
      __ bind(&suspend);
2000
      VisitForAccumulatorValue(expr->generator_object());
2001
      ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2002
      __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2003
      __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2004
      __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2005
      __ mov(a1, cp);
2006
      __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2007
                          kRAHasBeenSaved, kDontSaveFPRegs);
2008
      __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2009
      __ Branch(&post_runtime, eq, sp, Operand(a1));
2010
      __ push(v0);  // generator object
2011
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2012
      __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2013
      __ bind(&post_runtime);
2014
      __ pop(result_register());
2015
      EmitReturnSequence();
2016

    
2017
      __ bind(&resume);
2018
      context()->Plug(result_register());
2019
      break;
2020
    }
2021

    
2022
    case Yield::FINAL: {
2023
      VisitForAccumulatorValue(expr->generator_object());
2024
      __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2025
      __ sw(a1, FieldMemOperand(result_register(),
2026
                                JSGeneratorObject::kContinuationOffset));
2027
      // Pop value from top-of-stack slot, box result into result register.
2028
      EmitCreateIteratorResult(true);
2029
      EmitUnwindBeforeReturn();
2030
      EmitReturnSequence();
2031
      break;
2032
    }
2033

    
2034
    case Yield::DELEGATING: {
2035
      VisitForStackValue(expr->generator_object());
2036

    
2037
      // Initial stack layout is as follows:
2038
      // [sp + 1 * kPointerSize] iter
2039
      // [sp + 0 * kPointerSize] g
2040

    
2041
      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2042
      Label l_next, l_call, l_loop;
2043
      // Initial send value is undefined.
2044
      __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2045
      __ Branch(&l_next);
2046

    
2047
      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2048
      __ bind(&l_catch);
2049
      __ mov(a0, v0);
2050
      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2051
      __ LoadRoot(a2, Heap::kthrow_stringRootIndex);     // "throw"
2052
      __ lw(a3, MemOperand(sp, 1 * kPointerSize));       // iter
2053
      __ push(a3);                                       // iter
2054
      __ push(a0);                                       // exception
2055
      __ jmp(&l_call);
2056

    
2057
      // try { received = %yield result }
2058
      // Shuffle the received result above a try handler and yield it without
2059
      // re-boxing.
2060
      __ bind(&l_try);
2061
      __ pop(a0);                                        // result
2062
      __ PushTryHandler(StackHandler::CATCH, expr->index());
2063
      const int handler_size = StackHandlerConstants::kSize;
2064
      __ push(a0);                                       // result
2065
      __ jmp(&l_suspend);
2066
      __ bind(&l_continuation);
2067
      __ mov(a0, v0);
2068
      __ jmp(&l_resume);
2069
      __ bind(&l_suspend);
2070
      const int generator_object_depth = kPointerSize + handler_size;
2071
      __ lw(a0, MemOperand(sp, generator_object_depth));
2072
      __ push(a0);                                       // g
2073
      ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2074
      __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2075
      __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2076
      __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2077
      __ mov(a1, cp);
2078
      __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2079
                          kRAHasBeenSaved, kDontSaveFPRegs);
2080
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2081
      __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2082
      __ pop(v0);                                        // result
2083
      EmitReturnSequence();
2084
      __ mov(a0, v0);
2085
      __ bind(&l_resume);                                // received in a0
2086
      __ PopTryHandler();
2087

    
2088
      // receiver = iter; f = 'next'; arg = received;
2089
      __ bind(&l_next);
2090
      __ LoadRoot(a2, Heap::knext_stringRootIndex);      // "next"
2091
      __ lw(a3, MemOperand(sp, 1 * kPointerSize));       // iter
2092
      __ push(a3);                                       // iter
2093
      __ push(a0);                                       // received
2094

    
2095
      // result = receiver[f](arg);
2096
      __ bind(&l_call);
2097
      Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
2098
      CallIC(ic);
2099
      __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2100

    
2101
      // if (!result.done) goto l_try;
2102
      __ bind(&l_loop);
2103
      __ mov(a0, v0);
2104
      __ push(a0);                                       // save result
2105
      __ LoadRoot(a2, Heap::kdone_stringRootIndex);      // "done"
2106
      Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
2107
      CallIC(done_ic);                                   // result.done in v0
2108
      __ mov(a0, v0);
2109
      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2110
      CallIC(bool_ic);
2111
      __ Branch(&l_try, eq, v0, Operand(zero_reg));
2112

    
2113
      // result.value
2114
      __ pop(a0);                                        // result
2115
      __ LoadRoot(a2, Heap::kvalue_stringRootIndex);     // "value"
2116
      Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
2117
      CallIC(value_ic);                                  // result.value in v0
2118
      context()->DropAndPlug(2, v0);                     // drop iter and g
2119
      break;
2120
    }
2121
  }
2122
}
2123

    
2124

    
2125
void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2126
    Expression *value,
2127
    JSGeneratorObject::ResumeMode resume_mode) {
2128
  // The value stays in a0, and is ultimately read by the resumed generator, as
2129
  // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  a1
2130
  // will hold the generator object until the activation has been resumed.
2131
  VisitForStackValue(generator);
2132
  VisitForAccumulatorValue(value);
2133
  __ pop(a1);
2134

    
2135
  // Check generator state.
2136
  Label wrong_state, done;
2137
  __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2138
  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
2139
  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
2140
  __ Branch(&wrong_state, le, a3, Operand(zero_reg));
2141

    
2142
  // Load suspended function and context.
2143
  __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2144
  __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2145

    
2146
  // Load receiver and store as the first argument.
2147
  __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2148
  __ push(a2);
2149

    
2150
  // Push holes for the rest of the arguments to the generator function.
2151
  __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
2152
  __ lw(a3,
2153
        FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2154
  __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2155
  Label push_argument_holes, push_frame;
2156
  __ bind(&push_argument_holes);
2157
  __ Subu(a3, a3, Operand(Smi::FromInt(1)));
2158
  __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2159
  __ push(a2);
2160
  __ jmp(&push_argument_holes);
2161

    
2162
  // Enter a new JavaScript frame, and initialize its slots as they were when
2163
  // the generator was suspended.
2164
  Label resume_frame;
2165
  __ bind(&push_frame);
2166
  __ Call(&resume_frame);
2167
  __ jmp(&done);
2168
  __ bind(&resume_frame);
2169
  __ push(ra);  // Return address.
2170
  __ push(fp);  // Caller's frame pointer.
2171
  __ mov(fp, sp);
2172
  __ push(cp);  // Callee's context.
2173
  __ push(t0);  // Callee's JS Function.
2174

    
2175
  // Load the operand stack size.
2176
  __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2177
  __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2178
  __ SmiUntag(a3);
2179

    
2180
  // If we are sending a value and there is no operand stack, we can jump back
2181
  // in directly.
2182
  if (resume_mode == JSGeneratorObject::NEXT) {
2183
    Label slow_resume;
2184
    __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2185
    __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
2186
    __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2187
    __ SmiUntag(a2);
2188
    __ Addu(a3, a3, Operand(a2));
2189
    __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2190
    __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2191
    __ Jump(a3);
2192
    __ bind(&slow_resume);
2193
  }
2194

    
2195
  // Otherwise, we push holes for the operand stack and call the runtime to fix
2196
  // up the stack and the handlers.
2197
  Label push_operand_holes, call_resume;
2198
  __ bind(&push_operand_holes);
2199
  __ Subu(a3, a3, Operand(1));
2200
  __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2201
  __ push(a2);
2202
  __ Branch(&push_operand_holes);
2203
  __ bind(&call_resume);
2204
  __ push(a1);
2205
  __ push(result_register());
2206
  __ Push(Smi::FromInt(resume_mode));
2207
  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2208
  // Not reached: the runtime call returns elsewhere.
2209
  __ stop("not-reached");
2210

    
2211
  // Throw error if we attempt to operate on a running generator.
2212
  __ bind(&wrong_state);
2213
  __ push(a1);
2214
  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2215

    
2216
  __ bind(&done);
2217
  context()->Plug(result_register());
2218
}
2219

    
2220

    
2221
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2222
  Label gc_required;
2223
  Label allocated;
2224

    
2225
  Handle<Map> map(isolate()->native_context()->generator_result_map());
2226

    
2227
  __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2228
  __ jmp(&allocated);
2229

    
2230
  __ bind(&gc_required);
2231
  __ Push(Smi::FromInt(map->instance_size()));
2232
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2233
  __ lw(context_register(),
2234
        MemOperand(fp, StandardFrameConstants::kContextOffset));
2235

    
2236
  __ bind(&allocated);
2237
  __ li(a1, Operand(map));
2238
  __ pop(a2);
2239
  __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2240
  __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2241
  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2242
  __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2243
  __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2244
  __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2245
  __ sw(a2,
2246
        FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2247
  __ sw(a3,
2248
        FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2249

    
2250
  // Only the value field needs a write barrier, as the other values are in the
2251
  // root set.
2252
  __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2253
                      a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2254
}
2255

    
2256

    
2257
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2258
  SetSourcePosition(prop->position());
2259
  Literal* key = prop->key()->AsLiteral();
2260
  __ mov(a0, result_register());
2261
  __ li(a2, Operand(key->value()));
2262
  // Call load IC. It has arguments receiver and property name a0 and a2.
2263
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2264
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2265
}
2266

    
2267

    
2268
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2269
  SetSourcePosition(prop->position());
2270
  __ mov(a0, result_register());
2271
  // Call keyed load IC. It has arguments key and receiver in a0 and a1.
2272
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2273
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2274
}
2275

    
2276

    
2277
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2278
                                              Token::Value op,
2279
                                              OverwriteMode mode,
2280
                                              Expression* left_expr,
2281
                                              Expression* right_expr) {
2282
  Label done, smi_case, stub_call;
2283

    
2284
  Register scratch1 = a2;
2285
  Register scratch2 = a3;
2286

    
2287
  // Get the arguments.
2288
  Register left = a1;
2289
  Register right = a0;
2290
  __ pop(left);
2291
  __ mov(a0, result_register());
2292

    
2293
  // Perform combined smi check on both operands.
2294
  __ Or(scratch1, left, Operand(right));
2295
  STATIC_ASSERT(kSmiTag == 0);
2296
  JumpPatchSite patch_site(masm_);
2297
  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2298

    
2299
  __ bind(&stub_call);
2300
  BinaryOpStub stub(op, mode);
2301
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2302
         expr->BinaryOperationFeedbackId());
2303
  patch_site.EmitPatchInfo();
2304
  __ jmp(&done);
2305

    
2306
  __ bind(&smi_case);
2307
  // Smi case. This code works the same way as the smi-smi case in the type
2308
  // recording binary operation stub, see
2309
  // BinaryOpStub::GenerateSmiSmiOperation for comments.
2310
  switch (op) {
2311
    case Token::SAR:
2312
      __ Branch(&stub_call);
2313
      __ GetLeastBitsFromSmi(scratch1, right, 5);
2314
      __ srav(right, left, scratch1);
2315
      __ And(v0, right, Operand(~kSmiTagMask));
2316
      break;
2317
    case Token::SHL: {
2318
      __ Branch(&stub_call);
2319
      __ SmiUntag(scratch1, left);
2320
      __ GetLeastBitsFromSmi(scratch2, right, 5);
2321
      __ sllv(scratch1, scratch1, scratch2);
2322
      __ Addu(scratch2, scratch1, Operand(0x40000000));
2323
      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2324
      __ SmiTag(v0, scratch1);
2325
      break;
2326
    }
2327
    case Token::SHR: {
2328
      __ Branch(&stub_call);
2329
      __ SmiUntag(scratch1, left);
2330
      __ GetLeastBitsFromSmi(scratch2, right, 5);
2331
      __ srlv(scratch1, scratch1, scratch2);
2332
      __ And(scratch2, scratch1, 0xc0000000);
2333
      __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2334
      __ SmiTag(v0, scratch1);
2335
      break;
2336
    }
2337
    case Token::ADD:
2338
      __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2339
      __ BranchOnOverflow(&stub_call, scratch1);
2340
      break;
2341
    case Token::SUB:
2342
      __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2343
      __ BranchOnOverflow(&stub_call, scratch1);
2344
      break;
2345
    case Token::MUL: {
2346
      __ SmiUntag(scratch1, right);
2347
      __ Mult(left, scratch1);
2348
      __ mflo(scratch1);
2349
      __ mfhi(scratch2);
2350
      __ sra(scratch1, scratch1, 31);
2351
      __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
2352
      __ mflo(v0);
2353
      __ Branch(&done, ne, v0, Operand(zero_reg));
2354
      __ Addu(scratch2, right, left);
2355
      __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2356
      ASSERT(Smi::FromInt(0) == 0);
2357
      __ mov(v0, zero_reg);
2358
      break;
2359
    }
2360
    case Token::BIT_OR:
2361
      __ Or(v0, left, Operand(right));
2362
      break;
2363
    case Token::BIT_AND:
2364
      __ And(v0, left, Operand(right));
2365
      break;
2366
    case Token::BIT_XOR:
2367
      __ Xor(v0, left, Operand(right));
2368
      break;
2369
    default:
2370
      UNREACHABLE();
2371
  }
2372

    
2373
  __ bind(&done);
2374
  context()->Plug(v0);
2375
}
2376

    
2377

    
2378
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2379
                                     Token::Value op,
2380
                                     OverwriteMode mode) {
2381
  __ mov(a0, result_register());
2382
  __ pop(a1);
2383
  BinaryOpStub stub(op, mode);
2384
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2385
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2386
         expr->BinaryOperationFeedbackId());
2387
  patch_site.EmitPatchInfo();
2388
  context()->Plug(v0);
2389
}
2390

    
2391

    
2392
void FullCodeGenerator::EmitAssignment(Expression* expr) {
2393
  // Invalid left-hand sides are rewritten by the parser to have a 'throw
2394
  // ReferenceError' on the left-hand side.
2395
  if (!expr->IsValidLeftHandSide()) {
2396
    VisitForEffect(expr);
2397
    return;
2398
  }
2399

    
2400
  // Left-hand side can only be a property, a global or a (parameter or local)
2401
  // slot.
2402
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2403
  LhsKind assign_type = VARIABLE;
2404
  Property* prop = expr->AsProperty();
2405
  if (prop != NULL) {
2406
    assign_type = (prop->key()->IsPropertyName())
2407
        ? NAMED_PROPERTY
2408
        : KEYED_PROPERTY;
2409
  }
2410

    
2411
  switch (assign_type) {
2412
    case VARIABLE: {
2413
      Variable* var = expr->AsVariableProxy()->var();
2414
      EffectContext context(this);
2415
      EmitVariableAssignment(var, Token::ASSIGN);
2416
      break;
2417
    }
2418
    case NAMED_PROPERTY: {
2419
      __ push(result_register());  // Preserve value.
2420
      VisitForAccumulatorValue(prop->obj());
2421
      __ mov(a1, result_register());
2422
      __ pop(a0);  // Restore value.
2423
      __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2424
      Handle<Code> ic = is_classic_mode()
2425
          ? isolate()->builtins()->StoreIC_Initialize()
2426
          : isolate()->builtins()->StoreIC_Initialize_Strict();
2427
      CallIC(ic);
2428
      break;
2429
    }
2430
    case KEYED_PROPERTY: {
2431
      __ push(result_register());  // Preserve value.
2432
      VisitForStackValue(prop->obj());
2433
      VisitForAccumulatorValue(prop->key());
2434
      __ mov(a1, result_register());
2435
      __ pop(a2);
2436
      __ pop(a0);  // Restore value.
2437
      Handle<Code> ic = is_classic_mode()
2438
        ? isolate()->builtins()->KeyedStoreIC_Initialize()
2439
        : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2440
      CallIC(ic);
2441
      break;
2442
    }
2443
  }
2444
  context()->Plug(v0);
2445
}
2446

    
2447

    
2448
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2449
                                               Token::Value op) {
2450
  if (var->IsUnallocated()) {
2451
    // Global var, const, or let.
2452
    __ mov(a0, result_register());
2453
    __ li(a2, Operand(var->name()));
2454
    __ lw(a1, GlobalObjectOperand());
2455
    Handle<Code> ic = is_classic_mode()
2456
        ? isolate()->builtins()->StoreIC_Initialize()
2457
        : isolate()->builtins()->StoreIC_Initialize_Strict();
2458
    CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2459

    
2460
  } else if (op == Token::INIT_CONST) {
2461
    // Const initializers need a write barrier.
2462
    ASSERT(!var->IsParameter());  // No const parameters.
2463
    if (var->IsStackLocal()) {
2464
      Label skip;
2465
      __ lw(a1, StackOperand(var));
2466
      __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2467
      __ Branch(&skip, ne, a1, Operand(t0));
2468
      __ sw(result_register(), StackOperand(var));
2469
      __ bind(&skip);
2470
    } else {
2471
      ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2472
      // Like var declarations, const declarations are hoisted to function
2473
      // scope.  However, unlike var initializers, const initializers are
2474
      // able to drill a hole to that function context, even from inside a
2475
      // 'with' context.  We thus bypass the normal static scope lookup for
2476
      // var->IsContextSlot().
2477
      __ push(v0);
2478
      __ li(a0, Operand(var->name()));
2479
      __ Push(cp, a0);  // Context and name.
2480
      __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2481
    }
2482

    
2483
  } else if (var->mode() == LET && op != Token::INIT_LET) {
2484
    // Non-initializing assignment to let variable needs a write barrier.
2485
    if (var->IsLookupSlot()) {
2486
      __ push(v0);  // Value.
2487
      __ li(a1, Operand(var->name()));
2488
      __ li(a0, Operand(Smi::FromInt(language_mode())));
2489
      __ Push(cp, a1, a0);  // Context, name, strict mode.
2490
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2491
    } else {
2492
      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2493
      Label assign;
2494
      MemOperand location = VarOperand(var, a1);
2495
      __ lw(a3, location);
2496
      __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2497
      __ Branch(&assign, ne, a3, Operand(t0));
2498
      __ li(a3, Operand(var->name()));
2499
      __ push(a3);
2500
      __ CallRuntime(Runtime::kThrowReferenceError, 1);
2501
      // Perform the assignment.
2502
      __ bind(&assign);
2503
      __ sw(result_register(), location);
2504
      if (var->IsContextSlot()) {
2505
        // RecordWrite may destroy all its register arguments.
2506
        __ mov(a3, result_register());
2507
        int offset = Context::SlotOffset(var->index());
2508
        __ RecordWriteContextSlot(
2509
            a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2510
      }
2511
    }
2512

    
2513
  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2514
    // Assignment to var or initializing assignment to let/const
2515
    // in harmony mode.
2516
    if (var->IsStackAllocated() || var->IsContextSlot()) {
2517
      MemOperand location = VarOperand(var, a1);
2518
      if (generate_debug_code_ && op == Token::INIT_LET) {
2519
        // Check for an uninitialized let binding.
2520
        __ lw(a2, location);
2521
        __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2522
        __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2523
      }
2524
      // Perform the assignment.
2525
      __ sw(v0, location);
2526
      if (var->IsContextSlot()) {
2527
        __ mov(a3, v0);
2528
        int offset = Context::SlotOffset(var->index());
2529
        __ RecordWriteContextSlot(
2530
            a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2531
      }
2532
    } else {
2533
      ASSERT(var->IsLookupSlot());
2534
      __ push(v0);  // Value.
2535
      __ li(a1, Operand(var->name()));
2536
      __ li(a0, Operand(Smi::FromInt(language_mode())));
2537
      __ Push(cp, a1, a0);  // Context, name, strict mode.
2538
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2539
    }
2540
  }
2541
    // Non-initializing assignments to consts are ignored.
2542
}
2543

    
2544

    
2545
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2546
  // Assignment to a property, using a named store IC.
2547
  Property* prop = expr->target()->AsProperty();
2548
  ASSERT(prop != NULL);
2549
  ASSERT(prop->key()->AsLiteral() != NULL);
2550

    
2551
  // Record source code position before IC call.
2552
  SetSourcePosition(expr->position());
2553
  __ mov(a0, result_register());  // Load the value.
2554
  __ li(a2, Operand(prop->key()->AsLiteral()->value()));
2555
  __ pop(a1);
2556

    
2557
  Handle<Code> ic = is_classic_mode()
2558
        ? isolate()->builtins()->StoreIC_Initialize()
2559
        : isolate()->builtins()->StoreIC_Initialize_Strict();
2560
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2561

    
2562
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2563
  context()->Plug(v0);
2564
}
2565

    
2566

    
2567
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2568
  // Assignment to a property, using a keyed store IC.
2569

    
2570
  // Record source code position before IC call.
2571
  SetSourcePosition(expr->position());
2572
  // Call keyed store IC.
2573
  // The arguments are:
2574
  // - a0 is the value,
2575
  // - a1 is the key,
2576
  // - a2 is the receiver.
2577
  __ mov(a0, result_register());
2578
  __ pop(a1);  // Key.
2579
  __ pop(a2);
2580

    
2581
  Handle<Code> ic = is_classic_mode()
2582
      ? isolate()->builtins()->KeyedStoreIC_Initialize()
2583
      : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2584
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2585

    
2586
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2587
  context()->Plug(v0);
2588
}
2589

    
2590

    
2591
void FullCodeGenerator::VisitProperty(Property* expr) {
2592
  Comment cmnt(masm_, "[ Property");
2593
  Expression* key = expr->key();
2594

    
2595
  if (key->IsPropertyName()) {
2596
    VisitForAccumulatorValue(expr->obj());
2597
    EmitNamedPropertyLoad(expr);
2598
    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2599
    context()->Plug(v0);
2600
  } else {
2601
    VisitForStackValue(expr->obj());
2602
    VisitForAccumulatorValue(expr->key());
2603
    __ pop(a1);
2604
    EmitKeyedPropertyLoad(expr);
2605
    context()->Plug(v0);
2606
  }
2607
}
2608

    
2609

    
2610
void FullCodeGenerator::CallIC(Handle<Code> code,
2611
                               RelocInfo::Mode rmode,
2612
                               TypeFeedbackId id) {
2613
  ic_total_count_++;
2614
  __ Call(code, rmode, id);
2615
}
2616

    
2617

    
2618
void FullCodeGenerator::EmitCallWithIC(Call* expr,
2619
                                       Handle<Object> name,
2620
                                       RelocInfo::Mode mode) {
2621
  // Code common for calls using the IC.
2622
  ZoneList<Expression*>* args = expr->arguments();
2623
  int arg_count = args->length();
2624
  { PreservePositionScope scope(masm()->positions_recorder());
2625
    for (int i = 0; i < arg_count; i++) {
2626
      VisitForStackValue(args->at(i));
2627
    }
2628
    __ li(a2, Operand(name));
2629
  }
2630
  // Record source position for debugger.
2631
  SetSourcePosition(expr->position());
2632
  // Call the IC initialization code.
2633
  Handle<Code> ic =
2634
      isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2635
  CallIC(ic, mode, expr->CallFeedbackId());
2636
  RecordJSReturnSite(expr);
2637
  // Restore context register.
2638
  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2639
  context()->Plug(v0);
2640
}
2641

    
2642

    
2643
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2644
                                            Expression* key) {
2645
  // Load the key.
2646
  VisitForAccumulatorValue(key);
2647

    
2648
  // Swap the name of the function and the receiver on the stack to follow
2649
  // the calling convention for call ICs.
2650
  __ pop(a1);
2651
  __ push(v0);
2652
  __ push(a1);
2653

    
2654
  // Code common for calls using the IC.
2655
  ZoneList<Expression*>* args = expr->arguments();
2656
  int arg_count = args->length();
2657
  { PreservePositionScope scope(masm()->positions_recorder());
2658
    for (int i = 0; i < arg_count; i++) {
2659
      VisitForStackValue(args->at(i));
2660
    }
2661
  }
2662
  // Record source position for debugger.
2663
  SetSourcePosition(expr->position());
2664
  // Call the IC initialization code.
2665
  Handle<Code> ic =
2666
      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2667
  __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
2668
  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2669
  RecordJSReturnSite(expr);
2670
  // Restore context register.
2671
  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2672
  context()->DropAndPlug(1, v0);  // Drop the key still on the stack.
2673
}
2674

    
2675

    
2676
void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2677
  // Code common for calls using the call stub.
2678
  ZoneList<Expression*>* args = expr->arguments();
2679
  int arg_count = args->length();
2680
  { PreservePositionScope scope(masm()->positions_recorder());
2681
    for (int i = 0; i < arg_count; i++) {
2682
      VisitForStackValue(args->at(i));
2683
    }
2684
  }
2685
  // Record source position for debugger.
2686
  SetSourcePosition(expr->position());
2687

    
2688
  // Record call targets.
2689
  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2690
  Handle<Object> uninitialized =
2691
      TypeFeedbackCells::UninitializedSentinel(isolate());
2692
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2693
  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2694
  __ li(a2, Operand(cell));
2695

    
2696
  CallFunctionStub stub(arg_count, flags);
2697
  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2698
  __ CallStub(&stub, expr->CallFeedbackId());
2699
  RecordJSReturnSite(expr);
2700
  // Restore context register.
2701
  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2702
  context()->DropAndPlug(1, v0);
2703
}
2704

    
2705

    
2706
void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2707
  // Push copy of the first argument or undefined if it doesn't exist.
2708
  if (arg_count > 0) {
2709
    __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2710
  } else {
2711
    __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2712
  }
2713
  __ push(a1);
2714

    
2715
  // Push the receiver of the enclosing function.
2716
  int receiver_offset = 2 + info_->scope()->num_parameters();
2717
  __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2718
  __ push(a1);
2719
  // Push the language mode.
2720
  __ li(a1, Operand(Smi::FromInt(language_mode())));
2721
  __ push(a1);
2722

    
2723
  // Push the start position of the scope the calls resides in.
2724
  __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2725
  __ push(a1);
2726

    
2727
  // Do the runtime call.
2728
  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2729
}
2730

    
2731

    
2732
void FullCodeGenerator::VisitCall(Call* expr) {
2733
#ifdef DEBUG
2734
  // We want to verify that RecordJSReturnSite gets called on all paths
2735
  // through this function.  Avoid early returns.
2736
  expr->return_is_recorded_ = false;
2737
#endif
2738

    
2739
  Comment cmnt(masm_, "[ Call");
2740
  Expression* callee = expr->expression();
2741
  VariableProxy* proxy = callee->AsVariableProxy();
2742
  Property* property = callee->AsProperty();
2743

    
2744
  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
2745
    // In a call to eval, we first call %ResolvePossiblyDirectEval to
2746
    // resolve the function we need to call and the receiver of the
2747
    // call.  Then we call the resolved function using the given
2748
    // arguments.
2749
    ZoneList<Expression*>* args = expr->arguments();
2750
    int arg_count = args->length();
2751

    
2752
    { PreservePositionScope pos_scope(masm()->positions_recorder());
2753
      VisitForStackValue(callee);
2754
      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2755
      __ push(a2);  // Reserved receiver slot.
2756

    
2757
      // Push the arguments.
2758
      for (int i = 0; i < arg_count; i++) {
2759
        VisitForStackValue(args->at(i));
2760
      }
2761

    
2762
      // Push a copy of the function (found below the arguments) and
2763
      // resolve eval.
2764
      __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2765
      __ push(a1);
2766
      EmitResolvePossiblyDirectEval(arg_count);
2767

    
2768
      // The runtime call returns a pair of values in v0 (function) and
2769
      // v1 (receiver). Touch up the stack with the right values.
2770
      __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2771
      __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2772
    }
2773
    // Record source position for debugger.
2774
    SetSourcePosition(expr->position());
2775
    CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2776
    __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2777
    __ CallStub(&stub);
2778
    RecordJSReturnSite(expr);
2779
    // Restore context register.
2780
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2781
    context()->DropAndPlug(1, v0);
2782
  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2783
    // Push global object as receiver for the call IC.
2784
    __ lw(a0, GlobalObjectOperand());
2785
    __ push(a0);
2786
    EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2787
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2788
    // Call to a lookup slot (dynamically introduced variable).
2789
    Label slow, done;
2790

    
2791
    { PreservePositionScope scope(masm()->positions_recorder());
2792
      // Generate code for loading from variables potentially shadowed
2793
      // by eval-introduced variables.
2794
      EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2795
    }
2796

    
2797
    __ bind(&slow);
2798
    // Call the runtime to find the function to call (returned in v0)
2799
    // and the object holding it (returned in v1).
2800
    __ push(context_register());
2801
    __ li(a2, Operand(proxy->name()));
2802
    __ push(a2);
2803
    __ CallRuntime(Runtime::kLoadContextSlot, 2);
2804
    __ Push(v0, v1);  // Function, receiver.
2805

    
2806
    // If fast case code has been generated, emit code to push the
2807
    // function and receiver and have the slow path jump around this
2808
    // code.
2809
    if (done.is_linked()) {
2810
      Label call;
2811
      __ Branch(&call);
2812
      __ bind(&done);
2813
      // Push function.
2814
      __ push(v0);
2815
      // The receiver is implicitly the global receiver. Indicate this
2816
      // by passing the hole to the call function stub.
2817
      __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2818
      __ push(a1);
2819
      __ bind(&call);
2820
    }
2821

    
2822
    // The receiver is either the global receiver or an object found
2823
    // by LoadContextSlot. That object could be the hole if the
2824
    // receiver is implicitly the global object.
2825
    EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2826
  } else if (property != NULL) {
2827
    { PreservePositionScope scope(masm()->positions_recorder());
2828
      VisitForStackValue(property->obj());
2829
    }
2830
    if (property->key()->IsPropertyName()) {
2831
      EmitCallWithIC(expr,
2832
                     property->key()->AsLiteral()->value(),
2833
                     RelocInfo::CODE_TARGET);
2834
    } else {
2835
      EmitKeyedCallWithIC(expr, property->key());
2836
    }
2837
  } else {
2838
    // Call to an arbitrary expression not handled specially above.
2839
    { PreservePositionScope scope(masm()->positions_recorder());
2840
      VisitForStackValue(callee);
2841
    }
2842
    // Load global receiver object.
2843
    __ lw(a1, GlobalObjectOperand());
2844
    __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2845
    __ push(a1);
2846
    // Emit function call.
2847
    EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2848
  }
2849

    
2850
#ifdef DEBUG
2851
  // RecordJSReturnSite should have been called.
2852
  ASSERT(expr->return_is_recorded_);
2853
#endif
2854
}
2855

    
2856

    
2857
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2858
  Comment cmnt(masm_, "[ CallNew");
2859
  // According to ECMA-262, section 11.2.2, page 44, the function
2860
  // expression in new calls must be evaluated before the
2861
  // arguments.
2862

    
2863
  // Push constructor on the stack.  If it's not a function it's used as
2864
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2865
  // ignored.
2866
  VisitForStackValue(expr->expression());
2867

    
2868
  // Push the arguments ("left-to-right") on the stack.
2869
  ZoneList<Expression*>* args = expr->arguments();
2870
  int arg_count = args->length();
2871
  for (int i = 0; i < arg_count; i++) {
2872
    VisitForStackValue(args->at(i));
2873
  }
2874

    
2875
  // Call the construct call builtin that handles allocation and
2876
  // constructor invocation.
2877
  SetSourcePosition(expr->position());
2878

    
2879
  // Load function and argument count into a1 and a0.
2880
  __ li(a0, Operand(arg_count));
2881
  __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2882

    
2883
  // Record call targets in unoptimized code.
2884
  Handle<Object> uninitialized =
2885
     TypeFeedbackCells::UninitializedSentinel(isolate());
2886
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2887
  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2888
  __ li(a2, Operand(cell));
2889

    
2890
  CallConstructStub stub(RECORD_CALL_TARGET);
2891
  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2892
  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2893
  context()->Plug(v0);
2894
}
2895

    
2896

    
2897
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2898
  ZoneList<Expression*>* args = expr->arguments();
2899
  ASSERT(args->length() == 1);
2900

    
2901
  VisitForAccumulatorValue(args->at(0));
2902

    
2903
  Label materialize_true, materialize_false;
2904
  Label* if_true = NULL;
2905
  Label* if_false = NULL;
2906
  Label* fall_through = NULL;
2907
  context()->PrepareTest(&materialize_true, &materialize_false,
2908
                         &if_true, &if_false, &fall_through);
2909

    
2910
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2911
  __ And(t0, v0, Operand(kSmiTagMask));
2912
  Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2913

    
2914
  context()->Plug(if_true, if_false);
2915
}
2916

    
2917

    
2918
void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2919
  ZoneList<Expression*>* args = expr->arguments();
2920
  ASSERT(args->length() == 1);
2921

    
2922
  VisitForAccumulatorValue(args->at(0));
2923

    
2924
  Label materialize_true, materialize_false;
2925
  Label* if_true = NULL;
2926
  Label* if_false = NULL;
2927
  Label* fall_through = NULL;
2928
  context()->PrepareTest(&materialize_true, &materialize_false,
2929
                         &if_true, &if_false, &fall_through);
2930

    
2931
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2932
  __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2933
  Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2934

    
2935
  context()->Plug(if_true, if_false);
2936
}
2937

    
2938

    
2939
void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2940
  ZoneList<Expression*>* args = expr->arguments();
2941
  ASSERT(args->length() == 1);
2942

    
2943
  VisitForAccumulatorValue(args->at(0));
2944

    
2945
  Label materialize_true, materialize_false;
2946
  Label* if_true = NULL;
2947
  Label* if_false = NULL;
2948
  Label* fall_through = NULL;
2949
  context()->PrepareTest(&materialize_true, &materialize_false,
2950
                         &if_true, &if_false, &fall_through);
2951

    
2952
  __ JumpIfSmi(v0, if_false);
2953
  __ LoadRoot(at, Heap::kNullValueRootIndex);
2954
  __ Branch(if_true, eq, v0, Operand(at));
2955
  __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
2956
  // Undetectable objects behave like undefined when tested with typeof.
2957
  __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2958
  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2959
  __ Branch(if_false, ne, at, Operand(zero_reg));
2960
  __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2961
  __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2962
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2963
  Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2964
        if_true, if_false, fall_through);
2965

    
2966
  context()->Plug(if_true, if_false);
2967
}
2968

    
2969

    
2970
void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2971
  ZoneList<Expression*>* args = expr->arguments();
2972
  ASSERT(args->length() == 1);
2973

    
2974
  VisitForAccumulatorValue(args->at(0));
2975

    
2976
  Label materialize_true, materialize_false;
2977
  Label* if_true = NULL;
2978
  Label* if_false = NULL;
2979
  Label* fall_through = NULL;
2980
  context()->PrepareTest(&materialize_true, &materialize_false,
2981
                         &if_true, &if_false, &fall_through);
2982

    
2983
  __ JumpIfSmi(v0, if_false);
2984
  __ GetObjectType(v0, a1, a1);
2985
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2986
  Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2987
        if_true, if_false, fall_through);
2988

    
2989
  context()->Plug(if_true, if_false);
2990
}
2991

    
2992

    
2993
void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2994
  ZoneList<Expression*>* args = expr->arguments();
2995
  ASSERT(args->length() == 1);
2996

    
2997
  VisitForAccumulatorValue(args->at(0));
2998

    
2999
  Label materialize_true, materialize_false;
3000
  Label* if_true = NULL;
3001
  Label* if_false = NULL;
3002
  Label* fall_through = NULL;
3003
  context()->PrepareTest(&materialize_true, &materialize_false,
3004
                         &if_true, &if_false, &fall_through);
3005

    
3006
  __ JumpIfSmi(v0, if_false);
3007
  __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3008
  __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3009
  __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3010
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3011
  Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3012

    
3013
  context()->Plug(if_true, if_false);
3014
}
3015

    
3016

    
3017
void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3018
    CallRuntime* expr) {
3019
  ZoneList<Expression*>* args = expr->arguments();
3020
  ASSERT(args->length() == 1);
3021

    
3022
  VisitForAccumulatorValue(args->at(0));
3023

    
3024
  Label materialize_true, materialize_false, skip_lookup;
3025
  Label* if_true = NULL;
3026
  Label* if_false = NULL;
3027
  Label* fall_through = NULL;
3028
  context()->PrepareTest(&materialize_true, &materialize_false,
3029
                         &if_true, &if_false, &fall_through);
3030

    
3031
  __ AssertNotSmi(v0);
3032

    
3033
  __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3034
  __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
3035
  __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3036
  __ Branch(&skip_lookup, ne, t0, Operand(zero_reg));
3037

    
3038
  // Check for fast case object. Generate false result for slow case object.
3039
  __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3040
  __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3041
  __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
3042
  __ Branch(if_false, eq, a2, Operand(t0));
3043

    
3044
  // Look for valueOf name in the descriptor array, and indicate false if
3045
  // found. Since we omit an enumeration index check, if it is added via a
3046
  // transition that shares its descriptor array, this is a false positive.
3047
  Label entry, loop, done;
3048

    
3049
  // Skip loop if no descriptors are valid.
3050
  __ NumberOfOwnDescriptors(a3, a1);
3051
  __ Branch(&done, eq, a3, Operand(zero_reg));
3052

    
3053
  __ LoadInstanceDescriptors(a1, t0);
3054
  // t0: descriptor array.
3055
  // a3: valid entries in the descriptor array.
3056
  STATIC_ASSERT(kSmiTag == 0);
3057
  STATIC_ASSERT(kSmiTagSize == 1);
3058
  STATIC_ASSERT(kPointerSize == 4);
3059
  __ li(at, Operand(DescriptorArray::kDescriptorSize));
3060
  __ Mul(a3, a3, at);
3061
  // Calculate location of the first key name.
3062
  __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3063
  // Calculate the end of the descriptor array.
3064
  __ mov(a2, t0);
3065
  __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
3066
  __ Addu(a2, a2, t1);
3067

    
3068
  // Loop through all the keys in the descriptor array. If one of these is the
3069
  // string "valueOf" the result is false.
3070
  // The use of t2 to store the valueOf string assumes that it is not otherwise
3071
  // used in the loop below.
3072
  __ li(t2, Operand(isolate()->factory()->value_of_string()));
3073
  __ jmp(&entry);
3074
  __ bind(&loop);
3075
  __ lw(a3, MemOperand(t0, 0));
3076
  __ Branch(if_false, eq, a3, Operand(t2));
3077
  __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3078
  __ bind(&entry);
3079
  __ Branch(&loop, ne, t0, Operand(a2));
3080

    
3081
  __ bind(&done);
3082

    
3083
  // Set the bit in the map to indicate that there is no local valueOf field.
3084
  __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3085
  __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3086
  __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3087

    
3088
  __ bind(&skip_lookup);
3089

    
3090
  // If a valueOf property is not found on the object check that its
3091
  // prototype is the un-modified String prototype. If not result is false.
3092
  __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3093
  __ JumpIfSmi(a2, if_false);
3094
  __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3095
  __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3096
  __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3097
  __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3098
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3099
  Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3100

    
3101
  context()->Plug(if_true, if_false);
3102
}
3103

    
3104

    
3105
void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3106
  ZoneList<Expression*>* args = expr->arguments();
3107
  ASSERT(args->length() == 1);
3108

    
3109
  VisitForAccumulatorValue(args->at(0));
3110

    
3111
  Label materialize_true, materialize_false;
3112
  Label* if_true = NULL;
3113
  Label* if_false = NULL;
3114
  Label* fall_through = NULL;
3115
  context()->PrepareTest(&materialize_true, &materialize_false,
3116
                         &if_true, &if_false, &fall_through);
3117

    
3118
  __ JumpIfSmi(v0, if_false);
3119
  __ GetObjectType(v0, a1, a2);
3120
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3121
  __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3122
  __ Branch(if_false);
3123

    
3124
  context()->Plug(if_true, if_false);
3125
}
3126

    
3127

    
3128
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3129
  ZoneList<Expression*>* args = expr->arguments();
3130
  ASSERT(args->length() == 1);
3131

    
3132
  VisitForAccumulatorValue(args->at(0));
3133

    
3134
  Label materialize_true, materialize_false;
3135
  Label* if_true = NULL;
3136
  Label* if_false = NULL;
3137
  Label* fall_through = NULL;
3138
  context()->PrepareTest(&materialize_true, &materialize_false,
3139
                         &if_true, &if_false, &fall_through);
3140

    
3141
  __ JumpIfSmi(v0, if_false);
3142
  __ GetObjectType(v0, a1, a1);
3143
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3144
  Split(eq, a1, Operand(JS_ARRAY_TYPE),
3145
        if_true, if_false, fall_through);
3146

    
3147
  context()->Plug(if_true, if_false);
3148
}
3149

    
3150

    
3151
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3152
  ZoneList<Expression*>* args = expr->arguments();
3153
  ASSERT(args->length() == 1);
3154

    
3155
  VisitForAccumulatorValue(args->at(0));
3156

    
3157
  Label materialize_true, materialize_false;
3158
  Label* if_true = NULL;
3159
  Label* if_false = NULL;
3160
  Label* fall_through = NULL;
3161
  context()->PrepareTest(&materialize_true, &materialize_false,
3162
                         &if_true, &if_false, &fall_through);
3163

    
3164
  __ JumpIfSmi(v0, if_false);
3165
  __ GetObjectType(v0, a1, a1);
3166
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3167
  Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3168

    
3169
  context()->Plug(if_true, if_false);
3170
}
3171

    
3172

    
3173
void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3174
  ASSERT(expr->arguments()->length() == 0);
3175

    
3176
  Label materialize_true, materialize_false;
3177
  Label* if_true = NULL;
3178
  Label* if_false = NULL;
3179
  Label* fall_through = NULL;
3180
  context()->PrepareTest(&materialize_true, &materialize_false,
3181
                         &if_true, &if_false, &fall_through);
3182

    
3183
  // Get the frame pointer for the calling frame.
3184
  __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3185

    
3186
  // Skip the arguments adaptor frame if it exists.
3187
  Label check_frame_marker;
3188
  __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3189
  __ Branch(&check_frame_marker, ne,
3190
            a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3191
  __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3192

    
3193
  // Check the marker in the calling frame.
3194
  __ bind(&check_frame_marker);
3195
  __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3196
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3197
  Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3198
        if_true, if_false, fall_through);
3199

    
3200
  context()->Plug(if_true, if_false);
3201
}
3202

    
3203

    
3204
void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3205
  ZoneList<Expression*>* args = expr->arguments();
3206
  ASSERT(args->length() == 2);
3207

    
3208
  // Load the two objects into registers and perform the comparison.
3209
  VisitForStackValue(args->at(0));
3210
  VisitForAccumulatorValue(args->at(1));
3211

    
3212
  Label materialize_true, materialize_false;
3213
  Label* if_true = NULL;
3214
  Label* if_false = NULL;
3215
  Label* fall_through = NULL;
3216
  context()->PrepareTest(&materialize_true, &materialize_false,
3217
                         &if_true, &if_false, &fall_through);
3218

    
3219
  __ pop(a1);
3220
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3221
  Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3222

    
3223
  context()->Plug(if_true, if_false);
3224
}
3225

    
3226

    
3227
void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3228
  ZoneList<Expression*>* args = expr->arguments();
3229
  ASSERT(args->length() == 1);
3230

    
3231
  // ArgumentsAccessStub expects the key in a1 and the formal
3232
  // parameter count in a0.
3233
  VisitForAccumulatorValue(args->at(0));
3234
  __ mov(a1, v0);
3235
  __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3236
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3237
  __ CallStub(&stub);
3238
  context()->Plug(v0);
3239
}
3240

    
3241

    
3242
void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3243
  ASSERT(expr->arguments()->length() == 0);
3244
  Label exit;
3245
  // Get the number of formal parameters.
3246
  __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3247

    
3248
  // Check if the calling frame is an arguments adaptor frame.
3249
  __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3250
  __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3251
  __ Branch(&exit, ne, a3,
3252
            Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3253

    
3254
  // Arguments adaptor case: Read the arguments length from the
3255
  // adaptor frame.
3256
  __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3257

    
3258
  __ bind(&exit);
3259
  context()->Plug(v0);
3260
}
3261

    
3262

    
3263
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3264
  ZoneList<Expression*>* args = expr->arguments();
3265
  ASSERT(args->length() == 1);
3266
  Label done, null, function, non_function_constructor;
3267

    
3268
  VisitForAccumulatorValue(args->at(0));
3269

    
3270
  // If the object is a smi, we return null.
3271
  __ JumpIfSmi(v0, &null);
3272

    
3273
  // Check that the object is a JS object but take special care of JS
3274
  // functions to make sure they have 'Function' as their class.
3275
  // Assume that there are only two callable types, and one of them is at
3276
  // either end of the type range for JS object types. Saves extra comparisons.
3277
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3278
  __ GetObjectType(v0, v0, a1);  // Map is now in v0.
3279
  __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3280

    
3281
  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3282
                FIRST_SPEC_OBJECT_TYPE + 1);
3283
  __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3284

    
3285
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3286
                LAST_SPEC_OBJECT_TYPE - 1);
3287
  __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3288
  // Assume that there is no larger type.
3289
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3290

    
3291
  // Check if the constructor in the map is a JS function.
3292
  __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3293
  __ GetObjectType(v0, a1, a1);
3294
  __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3295

    
3296
  // v0 now contains the constructor function. Grab the
3297
  // instance class name from there.
3298
  __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3299
  __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3300
  __ Branch(&done);
3301

    
3302
  // Functions have class 'Function'.
3303
  __ bind(&function);
3304
  __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex);
3305
  __ jmp(&done);
3306

    
3307
  // Objects with a non-function constructor have class 'Object'.
3308
  __ bind(&non_function_constructor);
3309
  __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3310
  __ jmp(&done);
3311

    
3312
  // Non-JS objects have class null.
3313
  __ bind(&null);
3314
  __ LoadRoot(v0, Heap::kNullValueRootIndex);
3315

    
3316
  // All done.
3317
  __ bind(&done);
3318

    
3319
  context()->Plug(v0);
3320
}
3321

    
3322

    
3323
void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3324
  // Conditionally generate a log call.
3325
  // Args:
3326
  //   0 (literal string): The type of logging (corresponds to the flags).
3327
  //     This is used to determine whether or not to generate the log call.
3328
  //   1 (string): Format string.  Access the string at argument index 2
3329
  //     with '%2s' (see Logger::LogRuntime for all the formats).
3330
  //   2 (array): Arguments to the format string.
3331
  ZoneList<Expression*>* args = expr->arguments();
3332
  ASSERT_EQ(args->length(), 3);
3333
  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3334
    VisitForStackValue(args->at(1));
3335
    VisitForStackValue(args->at(2));
3336
    __ CallRuntime(Runtime::kLog, 2);
3337
  }
3338

    
3339
  // Finally, we're expected to leave a value on the top of the stack.
3340
  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3341
  context()->Plug(v0);
3342
}
3343

    
3344

    
3345
void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3346
  ASSERT(expr->arguments()->length() == 0);
3347
  Label slow_allocate_heapnumber;
3348
  Label heapnumber_allocated;
3349

    
3350
  // Save the new heap number in callee-saved register s0, since
3351
  // we call out to external C code below.
3352
  __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3353
  __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
3354
  __ jmp(&heapnumber_allocated);
3355

    
3356
  __ bind(&slow_allocate_heapnumber);
3357

    
3358
  // Allocate a heap number.
3359
  __ CallRuntime(Runtime::kNumberAlloc, 0);
3360
  __ mov(s0, v0);   // Save result in s0, so it is saved thru CFunc call.
3361

    
3362
  __ bind(&heapnumber_allocated);
3363

    
3364
  // Convert 32 random bits in v0 to 0.(32 random bits) in a double
3365
  // by computing:
3366
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3367
  __ PrepareCallCFunction(1, a0);
3368
  __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3369
  __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
3370
  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3371

    
3372
  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3373
  __ li(a1, Operand(0x41300000));
3374
  // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
3375
  __ Move(f12, v0, a1);
3376
  // Move 0x4130000000000000 to FPU.
3377
  __ Move(f14, zero_reg, a1);
3378
  // Subtract and store the result in the heap number.
3379
  __ sub_d(f0, f12, f14);
3380
  __ sdc1(f0, FieldMemOperand(s0, HeapNumber::kValueOffset));
3381
  __ mov(v0, s0);
3382

    
3383
  context()->Plug(v0);
3384
}
3385

    
3386

    
3387
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3388
  // Load the arguments on the stack and call the stub.
3389
  SubStringStub stub;
3390
  ZoneList<Expression*>* args = expr->arguments();
3391
  ASSERT(args->length() == 3);
3392
  VisitForStackValue(args->at(0));
3393
  VisitForStackValue(args->at(1));
3394
  VisitForStackValue(args->at(2));
3395
  __ CallStub(&stub);
3396
  context()->Plug(v0);
3397
}
3398

    
3399

    
3400
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3401
  // Load the arguments on the stack and call the stub.
3402
  RegExpExecStub stub;
3403
  ZoneList<Expression*>* args = expr->arguments();
3404
  ASSERT(args->length() == 4);
3405
  VisitForStackValue(args->at(0));
3406
  VisitForStackValue(args->at(1));
3407
  VisitForStackValue(args->at(2));
3408
  VisitForStackValue(args->at(3));
3409
  __ CallStub(&stub);
3410
  context()->Plug(v0);
3411
}
3412

    
3413

    
3414
void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3415
  ZoneList<Expression*>* args = expr->arguments();
3416
  ASSERT(args->length() == 1);
3417

    
3418
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3419

    
3420
  Label done;
3421
  // If the object is a smi return the object.
3422
  __ JumpIfSmi(v0, &done);
3423
  // If the object is not a value type, return the object.
3424
  __ GetObjectType(v0, a1, a1);
3425
  __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3426

    
3427
  __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3428

    
3429
  __ bind(&done);
3430
  context()->Plug(v0);
3431
}
3432

    
3433

    
3434
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3435
  ZoneList<Expression*>* args = expr->arguments();
3436
  ASSERT(args->length() == 2);
3437
  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3438
  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3439

    
3440
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3441

    
3442
  Label runtime, done, not_date_object;
3443
  Register object = v0;
3444
  Register result = v0;
3445
  Register scratch0 = t5;
3446
  Register scratch1 = a1;
3447

    
3448
  __ JumpIfSmi(object, &not_date_object);
3449
  __ GetObjectType(object, scratch1, scratch1);
3450
  __ Branch(&not_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3451

    
3452
  if (index->value() == 0) {
3453
    __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3454
    __ jmp(&done);
3455
  } else {
3456
    if (index->value() < JSDate::kFirstUncachedField) {
3457
      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3458
      __ li(scratch1, Operand(stamp));
3459
      __ lw(scratch1, MemOperand(scratch1));
3460
      __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3461
      __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3462
      __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3463
                                            kPointerSize * index->value()));
3464
      __ jmp(&done);
3465
    }
3466
    __ bind(&runtime);
3467
    __ PrepareCallCFunction(2, scratch1);
3468
    __ li(a1, Operand(index));
3469
    __ Move(a0, object);
3470
    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3471
    __ jmp(&done);
3472
  }
3473

    
3474
  __ bind(&not_date_object);
3475
  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3476
  __ bind(&done);
3477
  context()->Plug(v0);
3478
}
3479

    
3480

    
3481
void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
3482
                                                  Register index,
3483
                                                  Register value,
3484
                                                  uint32_t encoding_mask) {
3485
  __ And(at, index, Operand(kSmiTagMask));
3486
  __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3487
  __ And(at, value, Operand(kSmiTagMask));
3488
  __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3489

    
3490
  __ lw(at, FieldMemOperand(string, String::kLengthOffset));
3491
  __ Check(lt, kIndexIsTooLarge, index, Operand(at));
3492

    
3493
  __ Check(ge, kIndexIsNegative, index, Operand(zero_reg));
3494

    
3495
  __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
3496
  __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
3497

    
3498
  __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask));
3499
  __ Subu(at, at, Operand(encoding_mask));
3500
  __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
3501
}
3502

    
3503

    
3504
void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3505
  ZoneList<Expression*>* args = expr->arguments();
3506
  ASSERT_EQ(3, args->length());
3507

    
3508
  Register string = v0;
3509
  Register index = a1;
3510
  Register value = a2;
3511

    
3512
  VisitForStackValue(args->at(1));  // index
3513
  VisitForStackValue(args->at(2));  // value
3514
  __ pop(value);
3515
  __ pop(index);
3516
  VisitForAccumulatorValue(args->at(0));  // string
3517

    
3518
  if (FLAG_debug_code) {
3519
    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3520
    EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3521
  }
3522

    
3523
  __ SmiUntag(value, value);
3524
  __ Addu(at,
3525
          string,
3526
          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3527
  __ SmiUntag(index);
3528
  __ Addu(at, at, index);
3529
  __ sb(value, MemOperand(at));
3530
  context()->Plug(string);
3531
}
3532

    
3533

    
3534
void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3535
  ZoneList<Expression*>* args = expr->arguments();
3536
  ASSERT_EQ(3, args->length());
3537

    
3538
  Register string = v0;
3539
  Register index = a1;
3540
  Register value = a2;
3541

    
3542
  VisitForStackValue(args->at(1));  // index
3543
  VisitForStackValue(args->at(2));  // value
3544
  __ pop(value);
3545
  __ pop(index);
3546
  VisitForAccumulatorValue(args->at(0));  // string
3547

    
3548
  if (FLAG_debug_code) {
3549
    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3550
    EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3551
  }
3552

    
3553
  __ SmiUntag(value, value);
3554
  __ Addu(at,
3555
          string,
3556
          Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3557
  __ Addu(at, at, index);
3558
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3559
  __ sh(value, MemOperand(at));
3560
    context()->Plug(string);
3561
}
3562

    
3563

    
3564
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3565
  // Load the arguments on the stack and call the runtime function.
3566
  ZoneList<Expression*>* args = expr->arguments();
3567
  ASSERT(args->length() == 2);
3568
  VisitForStackValue(args->at(0));
3569
  VisitForStackValue(args->at(1));
3570
  MathPowStub stub(MathPowStub::ON_STACK);
3571
  __ CallStub(&stub);
3572
  context()->Plug(v0);
3573
}
3574

    
3575

    
3576
void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3577
  ZoneList<Expression*>* args = expr->arguments();
3578
  ASSERT(args->length() == 2);
3579

    
3580
  VisitForStackValue(args->at(0));  // Load the object.
3581
  VisitForAccumulatorValue(args->at(1));  // Load the value.
3582
  __ pop(a1);  // v0 = value. a1 = object.
3583

    
3584
  Label done;
3585
  // If the object is a smi, return the value.
3586
  __ JumpIfSmi(a1, &done);
3587

    
3588
  // If the object is not a value type, return the value.
3589
  __ GetObjectType(a1, a2, a2);
3590
  __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3591

    
3592
  // Store the value.
3593
  __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3594
  // Update the write barrier.  Save the value as it will be
3595
  // overwritten by the write barrier code and is needed afterward.
3596
  __ mov(a2, v0);
3597
  __ RecordWriteField(
3598
      a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3599

    
3600
  __ bind(&done);
3601
  context()->Plug(v0);
3602
}
3603

    
3604

    
3605
void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3606
  ZoneList<Expression*>* args = expr->arguments();
3607
  ASSERT_EQ(args->length(), 1);
3608

    
3609
  // Load the argument into a0 and call the stub.
3610
  VisitForAccumulatorValue(args->at(0));
3611
  __ mov(a0, result_register());
3612

    
3613
  NumberToStringStub stub;
3614
  __ CallStub(&stub);
3615
  context()->Plug(v0);
3616
}
3617

    
3618

    
3619
void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3620
  ZoneList<Expression*>* args = expr->arguments();
3621
  ASSERT(args->length() == 1);
3622

    
3623
  VisitForAccumulatorValue(args->at(0));
3624

    
3625
  Label done;
3626
  StringCharFromCodeGenerator generator(v0, a1);
3627
  generator.GenerateFast(masm_);
3628
  __ jmp(&done);
3629

    
3630
  NopRuntimeCallHelper call_helper;
3631
  generator.GenerateSlow(masm_, call_helper);
3632

    
3633
  __ bind(&done);
3634
  context()->Plug(a1);
3635
}
3636

    
3637

    
3638
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3639
  ZoneList<Expression*>* args = expr->arguments();
3640
  ASSERT(args->length() == 2);
3641

    
3642
  VisitForStackValue(args->at(0));
3643
  VisitForAccumulatorValue(args->at(1));
3644
  __ mov(a0, result_register());
3645

    
3646
  Register object = a1;
3647
  Register index = a0;
3648
  Register result = v0;
3649

    
3650
  __ pop(object);
3651

    
3652
  Label need_conversion;
3653
  Label index_out_of_range;
3654
  Label done;
3655
  StringCharCodeAtGenerator generator(object,
3656
                                      index,
3657
                                      result,
3658
                                      &need_conversion,
3659
                                      &need_conversion,
3660
                                      &index_out_of_range,
3661
                                      STRING_INDEX_IS_NUMBER);
3662
  generator.GenerateFast(masm_);
3663
  __ jmp(&done);
3664

    
3665
  __ bind(&index_out_of_range);
3666
  // When the index is out of range, the spec requires us to return
3667
  // NaN.
3668
  __ LoadRoot(result, Heap::kNanValueRootIndex);
3669
  __ jmp(&done);
3670

    
3671
  __ bind(&need_conversion);
3672
  // Load the undefined value into the result register, which will
3673
  // trigger conversion.
3674
  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3675
  __ jmp(&done);
3676

    
3677
  NopRuntimeCallHelper call_helper;
3678
  generator.GenerateSlow(masm_, call_helper);
3679

    
3680
  __ bind(&done);
3681
  context()->Plug(result);
3682
}
3683

    
3684

    
3685
void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3686
  ZoneList<Expression*>* args = expr->arguments();
3687
  ASSERT(args->length() == 2);
3688

    
3689
  VisitForStackValue(args->at(0));
3690
  VisitForAccumulatorValue(args->at(1));
3691
  __ mov(a0, result_register());
3692

    
3693
  Register object = a1;
3694
  Register index = a0;
3695
  Register scratch = a3;
3696
  Register result = v0;
3697

    
3698
  __ pop(object);
3699

    
3700
  Label need_conversion;
3701
  Label index_out_of_range;
3702
  Label done;
3703
  StringCharAtGenerator generator(object,
3704
                                  index,
3705
                                  scratch,
3706
                                  result,
3707
                                  &need_conversion,
3708
                                  &need_conversion,
3709
                                  &index_out_of_range,
3710
                                  STRING_INDEX_IS_NUMBER);
3711
  generator.GenerateFast(masm_);
3712
  __ jmp(&done);
3713

    
3714
  __ bind(&index_out_of_range);
3715
  // When the index is out of range, the spec requires us to return
3716
  // the empty string.
3717
  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3718
  __ jmp(&done);
3719

    
3720
  __ bind(&need_conversion);
3721
  // Move smi zero into the result register, which will trigger
3722
  // conversion.
3723
  __ li(result, Operand(Smi::FromInt(0)));
3724
  __ jmp(&done);
3725

    
3726
  NopRuntimeCallHelper call_helper;
3727
  generator.GenerateSlow(masm_, call_helper);
3728

    
3729
  __ bind(&done);
3730
  context()->Plug(result);
3731
}
3732

    
3733

    
3734
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3735
  ZoneList<Expression*>* args = expr->arguments();
3736
  ASSERT_EQ(2, args->length());
3737
  VisitForStackValue(args->at(0));
3738
  VisitForStackValue(args->at(1));
3739

    
3740
  StringAddStub stub(STRING_ADD_CHECK_BOTH);
3741
  __ CallStub(&stub);
3742
  context()->Plug(v0);
3743
}
3744

    
3745

    
3746
void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3747
  ZoneList<Expression*>* args = expr->arguments();
3748
  ASSERT_EQ(2, args->length());
3749

    
3750
  VisitForStackValue(args->at(0));
3751
  VisitForStackValue(args->at(1));
3752

    
3753
  StringCompareStub stub;
3754
  __ CallStub(&stub);
3755
  context()->Plug(v0);
3756
}
3757

    
3758

    
3759
void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3760
  // Load the argument on the stack and call the stub.
3761
  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3762
                               TranscendentalCacheStub::TAGGED);
3763
  ZoneList<Expression*>* args = expr->arguments();
3764
  ASSERT(args->length() == 1);
3765
  VisitForStackValue(args->at(0));
3766
  __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3767
  __ CallStub(&stub);
3768
  context()->Plug(v0);
3769
}
3770

    
3771

    
3772
void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3773
  // Load the argument on the stack and call the stub.
3774
  TranscendentalCacheStub stub(TranscendentalCache::COS,
3775
                               TranscendentalCacheStub::TAGGED);
3776
  ZoneList<Expression*>* args = expr->arguments();
3777
  ASSERT(args->length() == 1);
3778
  VisitForStackValue(args->at(0));
3779
  __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3780
  __ CallStub(&stub);
3781
  context()->Plug(v0);
3782
}
3783

    
3784

    
3785
void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3786
  // Load the argument on the stack and call the stub.
3787
  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3788
                               TranscendentalCacheStub::TAGGED);
3789
  ZoneList<Expression*>* args = expr->arguments();
3790
  ASSERT(args->length() == 1);
3791
  VisitForStackValue(args->at(0));
3792
  __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3793
  __ CallStub(&stub);
3794
  context()->Plug(v0);
3795
}
3796

    
3797

    
3798
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3799
  // Load the argument on the stack and call the stub.
3800
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3801
                               TranscendentalCacheStub::TAGGED);
3802
  ZoneList<Expression*>* args = expr->arguments();
3803
  ASSERT(args->length() == 1);
3804
  VisitForStackValue(args->at(0));
3805
  __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3806
  __ CallStub(&stub);
3807
  context()->Plug(v0);
3808
}
3809

    
3810

    
3811
void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3812
  // Load the argument on the stack and call the runtime function.
3813
  ZoneList<Expression*>* args = expr->arguments();
3814
  ASSERT(args->length() == 1);
3815
  VisitForStackValue(args->at(0));
3816
  __ CallRuntime(Runtime::kMath_sqrt, 1);
3817
  context()->Plug(v0);
3818
}
3819

    
3820

    
3821
void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3822
  ZoneList<Expression*>* args = expr->arguments();
3823
  ASSERT(args->length() >= 2);
3824

    
3825
  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3826
  for (int i = 0; i < arg_count + 1; i++) {
3827
    VisitForStackValue(args->at(i));
3828
  }
3829
  VisitForAccumulatorValue(args->last());  // Function.
3830

    
3831
  Label runtime, done;
3832
  // Check for non-function argument (including proxy).
3833
  __ JumpIfSmi(v0, &runtime);
3834
  __ GetObjectType(v0, a1, a1);
3835
  __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3836

    
3837
  // InvokeFunction requires the function in a1. Move it in there.
3838
  __ mov(a1, result_register());
3839
  ParameterCount count(arg_count);
3840
  __ InvokeFunction(a1, count, CALL_FUNCTION,
3841
                    NullCallWrapper(), CALL_AS_METHOD);
3842
  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3843
  __ jmp(&done);
3844

    
3845
  __ bind(&runtime);
3846
  __ push(v0);
3847
  __ CallRuntime(Runtime::kCall, args->length());
3848
  __ bind(&done);
3849

    
3850
  context()->Plug(v0);
3851
}
3852

    
3853

    
3854
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3855
  RegExpConstructResultStub stub;
3856
  ZoneList<Expression*>* args = expr->arguments();
3857
  ASSERT(args->length() == 3);
3858
  VisitForStackValue(args->at(0));
3859
  VisitForStackValue(args->at(1));
3860
  VisitForStackValue(args->at(2));
3861
  __ CallStub(&stub);
3862
  context()->Plug(v0);
3863
}
3864

    
3865

    
3866
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3867
  ZoneList<Expression*>* args = expr->arguments();
3868
  ASSERT_EQ(2, args->length());
3869

    
3870
  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3871
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3872

    
3873
  Handle<FixedArray> jsfunction_result_caches(
3874
      isolate()->native_context()->jsfunction_result_caches());
3875
  if (jsfunction_result_caches->length() <= cache_id) {
3876
    __ Abort(kAttemptToUseUndefinedCache);
3877
    __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3878
    context()->Plug(v0);
3879
    return;
3880
  }
3881

    
3882
  VisitForAccumulatorValue(args->at(1));
3883

    
3884
  Register key = v0;
3885
  Register cache = a1;
3886
  __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3887
  __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3888
  __ lw(cache,
3889
         ContextOperand(
3890
             cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3891
  __ lw(cache,
3892
         FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3893

    
3894

    
3895
  Label done, not_found;
3896
  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3897
  __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3898
  // a2 now holds finger offset as a smi.
3899
  __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3900
  // a3 now points to the start of fixed array elements.
3901
  __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3902
  __ addu(a3, a3, at);
3903
  // a3 now points to key of indexed element of cache.
3904
  __ lw(a2, MemOperand(a3));
3905
  __ Branch(&not_found, ne, key, Operand(a2));
3906

    
3907
  __ lw(v0, MemOperand(a3, kPointerSize));
3908
  __ Branch(&done);
3909

    
3910
  __ bind(&not_found);
3911
  // Call runtime to perform the lookup.
3912
  __ Push(cache, key);
3913
  __ CallRuntime(Runtime::kGetFromCache, 2);
3914

    
3915
  __ bind(&done);
3916
  context()->Plug(v0);
3917
}
3918

    
3919

    
3920
void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3921
  ZoneList<Expression*>* args = expr->arguments();
3922
  ASSERT_EQ(2, args->length());
3923

    
3924
  Register right = v0;
3925
  Register left = a1;
3926
  Register tmp = a2;
3927
  Register tmp2 = a3;
3928

    
3929
  VisitForStackValue(args->at(0));
3930
  VisitForAccumulatorValue(args->at(1));  // Result (right) in v0.
3931
  __ pop(left);
3932

    
3933
  Label done, fail, ok;
3934
  __ Branch(&ok, eq, left, Operand(right));
3935
  // Fail if either is a non-HeapObject.
3936
  __ And(tmp, left, Operand(right));
3937
  __ JumpIfSmi(tmp, &fail);
3938
  __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3939
  __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3940
  __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3941
  __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3942
  __ Branch(&fail, ne, tmp, Operand(tmp2));
3943
  __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3944
  __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3945
  __ Branch(&ok, eq, tmp, Operand(tmp2));
3946
  __ bind(&fail);
3947
  __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3948
  __ jmp(&done);
3949
  __ bind(&ok);
3950
  __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3951
  __ bind(&done);
3952

    
3953
  context()->Plug(v0);
3954
}
3955

    
3956

    
3957
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3958
  ZoneList<Expression*>* args = expr->arguments();
3959
  VisitForAccumulatorValue(args->at(0));
3960

    
3961
  Label materialize_true, materialize_false;
3962
  Label* if_true = NULL;
3963
  Label* if_false = NULL;
3964
  Label* fall_through = NULL;
3965
  context()->PrepareTest(&materialize_true, &materialize_false,
3966
                         &if_true, &if_false, &fall_through);
3967

    
3968
  __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3969
  __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3970

    
3971
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3972
  Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3973

    
3974
  context()->Plug(if_true, if_false);
3975
}
3976

    
3977

    
3978
void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3979
  ZoneList<Expression*>* args = expr->arguments();
3980
  ASSERT(args->length() == 1);
3981
  VisitForAccumulatorValue(args->at(0));
3982

    
3983
  __ AssertString(v0);
3984

    
3985
  __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3986
  __ IndexFromHash(v0, v0);
3987

    
3988
  context()->Plug(v0);
3989
}
3990

    
3991

    
3992
void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3993
  Label bailout, done, one_char_separator, long_separator,
3994
      non_trivial_array, not_size_one_array, loop,
3995
      empty_separator_loop, one_char_separator_loop,
3996
      one_char_separator_loop_entry, long_separator_loop;
3997
  ZoneList<Expression*>* args = expr->arguments();
3998
  ASSERT(args->length() == 2);
3999
  VisitForStackValue(args->at(1));
4000
  VisitForAccumulatorValue(args->at(0));
4001

    
4002
  // All aliases of the same register have disjoint lifetimes.
4003
  Register array = v0;
4004
  Register elements = no_reg;  // Will be v0.
4005
  Register result = no_reg;  // Will be v0.
4006
  Register separator = a1;
4007
  Register array_length = a2;
4008
  Register result_pos = no_reg;  // Will be a2.
4009
  Register string_length = a3;
4010
  Register string = t0;
4011
  Register element = t1;
4012
  Register elements_end = t2;
4013
  Register scratch1 = t3;
4014
  Register scratch2 = t5;
4015
  Register scratch3 = t4;
4016

    
4017
  // Separator operand is on the stack.
4018
  __ pop(separator);
4019

    
4020
  // Check that the array is a JSArray.
4021
  __ JumpIfSmi(array, &bailout);
4022
  __ GetObjectType(array, scratch1, scratch2);
4023
  __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4024

    
4025
  // Check that the array has fast elements.
4026
  __ CheckFastElements(scratch1, scratch2, &bailout);
4027

    
4028
  // If the array has length zero, return the empty string.
4029
  __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4030
  __ SmiUntag(array_length);
4031
  __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4032
  __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4033
  __ Branch(&done);
4034

    
4035
  __ bind(&non_trivial_array);
4036

    
4037
  // Get the FixedArray containing array's elements.
4038
  elements = array;
4039
  __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4040
  array = no_reg;  // End of array's live range.
4041

    
4042
  // Check that all array elements are sequential ASCII strings, and
4043
  // accumulate the sum of their lengths, as a smi-encoded value.
4044
  __ mov(string_length, zero_reg);
4045
  __ Addu(element,
4046
          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4047
  __ sll(elements_end, array_length, kPointerSizeLog2);
4048
  __ Addu(elements_end, element, elements_end);
4049
  // Loop condition: while (element < elements_end).
4050
  // Live values in registers:
4051
  //   elements: Fixed array of strings.
4052
  //   array_length: Length of the fixed array of strings (not smi)
4053
  //   separator: Separator string
4054
  //   string_length: Accumulated sum of string lengths (smi).
4055
  //   element: Current array element.
4056
  //   elements_end: Array end.
4057
  if (generate_debug_code_) {
4058
    __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
4059
        array_length, Operand(zero_reg));
4060
  }
4061
  __ bind(&loop);
4062
  __ lw(string, MemOperand(element));
4063
  __ Addu(element, element, kPointerSize);
4064
  __ JumpIfSmi(string, &bailout);
4065
  __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4066
  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4067
  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4068
  __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4069
  __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4070
  __ BranchOnOverflow(&bailout, scratch3);
4071
  __ Branch(&loop, lt, element, Operand(elements_end));
4072

    
4073
  // If array_length is 1, return elements[0], a string.
4074
  __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4075
  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4076
  __ Branch(&done);
4077

    
4078
  __ bind(&not_size_one_array);
4079

    
4080
  // Live values in registers:
4081
  //   separator: Separator string
4082
  //   array_length: Length of the array.
4083
  //   string_length: Sum of string lengths (smi).
4084
  //   elements: FixedArray of strings.
4085

    
4086
  // Check that the separator is a flat ASCII string.
4087
  __ JumpIfSmi(separator, &bailout);
4088
  __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4089
  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4090
  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4091

    
4092
  // Add (separator length times array_length) - separator length to the
4093
  // string_length to get the length of the result string. array_length is not
4094
  // smi but the other values are, so the result is a smi.
4095
  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4096
  __ Subu(string_length, string_length, Operand(scratch1));
4097
  __ Mult(array_length, scratch1);
4098
  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4099
  // zero.
4100
  __ mfhi(scratch2);
4101
  __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4102
  __ mflo(scratch2);
4103
  __ And(scratch3, scratch2, Operand(0x80000000));
4104
  __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
4105
  __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4106
  __ BranchOnOverflow(&bailout, scratch3);
4107
  __ SmiUntag(string_length);
4108

    
4109
  // Get first element in the array to free up the elements register to be used
4110
  // for the result.
4111
  __ Addu(element,
4112
          elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4113
  result = elements;  // End of live range for elements.
4114
  elements = no_reg;
4115
  // Live values in registers:
4116
  //   element: First array element
4117
  //   separator: Separator string
4118
  //   string_length: Length of result string (not smi)
4119
  //   array_length: Length of the array.
4120
  __ AllocateAsciiString(result,
4121
                         string_length,
4122
                         scratch1,
4123
                         scratch2,
4124
                         elements_end,
4125
                         &bailout);
4126
  // Prepare for looping. Set up elements_end to end of the array. Set
4127
  // result_pos to the position of the result where to write the first
4128
  // character.
4129
  __ sll(elements_end, array_length, kPointerSizeLog2);
4130
  __ Addu(elements_end, element, elements_end);
4131
  result_pos = array_length;  // End of live range for array_length.
4132
  array_length = no_reg;
4133
  __ Addu(result_pos,
4134
          result,
4135
          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4136

    
4137
  // Check the length of the separator.
4138
  __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4139
  __ li(at, Operand(Smi::FromInt(1)));
4140
  __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4141
  __ Branch(&long_separator, gt, scratch1, Operand(at));
4142

    
4143
  // Empty separator case.
4144
  __ bind(&empty_separator_loop);
4145
  // Live values in registers:
4146
  //   result_pos: the position to which we are currently copying characters.
4147
  //   element: Current array element.
4148
  //   elements_end: Array end.
4149

    
4150
  // Copy next array element to the result.
4151
  __ lw(string, MemOperand(element));
4152
  __ Addu(element, element, kPointerSize);
4153
  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4154
  __ SmiUntag(string_length);
4155
  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4156
  __ CopyBytes(string, result_pos, string_length, scratch1);
4157
  // End while (element < elements_end).
4158
  __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4159
  ASSERT(result.is(v0));
4160
  __ Branch(&done);
4161

    
4162
  // One-character separator case.
4163
  __ bind(&one_char_separator);
4164
  // Replace separator with its ASCII character value.
4165
  __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4166
  // Jump into the loop after the code that copies the separator, so the first
4167
  // element is not preceded by a separator.
4168
  __ jmp(&one_char_separator_loop_entry);
4169

    
4170
  __ bind(&one_char_separator_loop);
4171
  // Live values in registers:
4172
  //   result_pos: the position to which we are currently copying characters.
4173
  //   element: Current array element.
4174
  //   elements_end: Array end.
4175
  //   separator: Single separator ASCII char (in lower byte).
4176

    
4177
  // Copy the separator character to the result.
4178
  __ sb(separator, MemOperand(result_pos));
4179
  __ Addu(result_pos, result_pos, 1);
4180

    
4181
  // Copy next array element to the result.
4182
  __ bind(&one_char_separator_loop_entry);
4183
  __ lw(string, MemOperand(element));
4184
  __ Addu(element, element, kPointerSize);
4185
  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4186
  __ SmiUntag(string_length);
4187
  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4188
  __ CopyBytes(string, result_pos, string_length, scratch1);
4189
  // End while (element < elements_end).
4190
  __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4191
  ASSERT(result.is(v0));
4192
  __ Branch(&done);
4193

    
4194
  // Long separator case (separator is more than one character). Entry is at the
4195
  // label long_separator below.
4196
  __ bind(&long_separator_loop);
4197
  // Live values in registers:
4198
  //   result_pos: the position to which we are currently copying characters.
4199
  //   element: Current array element.
4200
  //   elements_end: Array end.
4201
  //   separator: Separator string.
4202

    
4203
  // Copy the separator to the result.
4204
  __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
4205
  __ SmiUntag(string_length);
4206
  __ Addu(string,
4207
          separator,
4208
          Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4209
  __ CopyBytes(string, result_pos, string_length, scratch1);
4210

    
4211
  __ bind(&long_separator);
4212
  __ lw(string, MemOperand(element));
4213
  __ Addu(element, element, kPointerSize);
4214
  __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
4215
  __ SmiUntag(string_length);
4216
  __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4217
  __ CopyBytes(string, result_pos, string_length, scratch1);
4218
  // End while (element < elements_end).
4219
  __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4220
  ASSERT(result.is(v0));
4221
  __ Branch(&done);
4222

    
4223
  __ bind(&bailout);
4224
  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4225
  __ bind(&done);
4226
  context()->Plug(v0);
4227
}
4228

    
4229

    
4230
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4231
  Handle<String> name = expr->name();
4232
  if (name->length() > 0 && name->Get(0) == '_') {
4233
    Comment cmnt(masm_, "[ InlineRuntimeCall");
4234
    EmitInlineRuntimeCall(expr);
4235
    return;
4236
  }
4237

    
4238
  Comment cmnt(masm_, "[ CallRuntime");
4239
  ZoneList<Expression*>* args = expr->arguments();
4240

    
4241
  if (expr->is_jsruntime()) {
4242
    // Prepare for calling JS runtime function.
4243
    __ lw(a0, GlobalObjectOperand());
4244
    __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
4245
    __ push(a0);
4246
  }
4247

    
4248
  // Push the arguments ("left-to-right").
4249
  int arg_count = args->length();
4250
  for (int i = 0; i < arg_count; i++) {
4251
    VisitForStackValue(args->at(i));
4252
  }
4253

    
4254
  if (expr->is_jsruntime()) {
4255
    // Call the JS runtime function.
4256
    __ li(a2, Operand(expr->name()));
4257
    RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4258
    Handle<Code> ic =
4259
        isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
4260
    CallIC(ic, mode, expr->CallRuntimeFeedbackId());
4261
    // Restore context register.
4262
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4263
  } else {
4264
    // Call the C runtime function.
4265
    __ CallRuntime(expr->function(), arg_count);
4266
  }
4267
  context()->Plug(v0);
4268
}
4269

    
4270

    
4271
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4272
  switch (expr->op()) {
4273
    case Token::DELETE: {
4274
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4275
      Property* property = expr->expression()->AsProperty();
4276
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4277

    
4278
      if (property != NULL) {
4279
        VisitForStackValue(property->obj());
4280
        VisitForStackValue(property->key());
4281
        StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4282
            ? kNonStrictMode : kStrictMode;
4283
        __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
4284
        __ push(a1);
4285
        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4286
        context()->Plug(v0);
4287
      } else if (proxy != NULL) {
4288
        Variable* var = proxy->var();
4289
        // Delete of an unqualified identifier is disallowed in strict mode
4290
        // but "delete this" is allowed.
4291
        ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4292
        if (var->IsUnallocated()) {
4293
          __ lw(a2, GlobalObjectOperand());
4294
          __ li(a1, Operand(var->name()));
4295
          __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
4296
          __ Push(a2, a1, a0);
4297
          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4298
          context()->Plug(v0);
4299
        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4300
          // Result of deleting non-global, non-dynamic variables is false.
4301
          // The subexpression does not have side effects.
4302
          context()->Plug(var->is_this());
4303
        } else {
4304
          // Non-global variable.  Call the runtime to try to delete from the
4305
          // context where the variable was introduced.
4306
          __ push(context_register());
4307
          __ li(a2, Operand(var->name()));
4308
          __ push(a2);
4309
          __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4310
          context()->Plug(v0);
4311
        }
4312
      } else {
4313
        // Result of deleting non-property, non-variable reference is true.
4314
        // The subexpression may have side effects.
4315
        VisitForEffect(expr->expression());
4316
        context()->Plug(true);
4317
      }
4318
      break;
4319
    }
4320

    
4321
    case Token::VOID: {
4322
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4323
      VisitForEffect(expr->expression());
4324
      context()->Plug(Heap::kUndefinedValueRootIndex);
4325
      break;
4326
    }
4327

    
4328
    case Token::NOT: {
4329
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4330
      if (context()->IsEffect()) {
4331
        // Unary NOT has no side effects so it's only necessary to visit the
4332
        // subexpression.  Match the optimizing compiler by not branching.
4333
        VisitForEffect(expr->expression());
4334
      } else if (context()->IsTest()) {
4335
        const TestContext* test = TestContext::cast(context());
4336
        // The labels are swapped for the recursive call.
4337
        VisitForControl(expr->expression(),
4338
                        test->false_label(),
4339
                        test->true_label(),
4340
                        test->fall_through());
4341
        context()->Plug(test->true_label(), test->false_label());
4342
      } else {
4343
        // We handle value contexts explicitly rather than simply visiting
4344
        // for control and plugging the control flow into the context,
4345
        // because we need to prepare a pair of extra administrative AST ids
4346
        // for the optimizing compiler.
4347
        ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4348
        Label materialize_true, materialize_false, done;
4349
        VisitForControl(expr->expression(),
4350
                        &materialize_false,
4351
                        &materialize_true,
4352
                        &materialize_true);
4353
        __ bind(&materialize_true);
4354
        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4355
        __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4356
        if (context()->IsStackValue()) __ push(v0);
4357
        __ jmp(&done);
4358
        __ bind(&materialize_false);
4359
        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4360
        __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4361
        if (context()->IsStackValue()) __ push(v0);
4362
        __ bind(&done);
4363
      }
4364
      break;
4365
    }
4366

    
4367
    case Token::TYPEOF: {
4368
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4369
      { StackValueContext context(this);
4370
        VisitForTypeofValue(expr->expression());
4371
      }
4372
      __ CallRuntime(Runtime::kTypeof, 1);
4373
      context()->Plug(v0);
4374
      break;
4375
    }
4376

    
4377
    default:
4378
      UNREACHABLE();
4379
  }
4380
}
4381

    
4382

    
4383
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4384
  Comment cmnt(masm_, "[ CountOperation");
4385
  SetSourcePosition(expr->position());
4386

    
4387
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4388
  // as the left-hand side.
4389
  if (!expr->expression()->IsValidLeftHandSide()) {
4390
    VisitForEffect(expr->expression());
4391
    return;
4392
  }
4393

    
4394
  // Expression can only be a property, a global or a (parameter or local)
4395
  // slot.
4396
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4397
  LhsKind assign_type = VARIABLE;
4398
  Property* prop = expr->expression()->AsProperty();
4399
  // In case of a property we use the uninitialized expression context
4400
  // of the key to detect a named property.
4401
  if (prop != NULL) {
4402
    assign_type =
4403
        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4404
  }
4405

    
4406
  // Evaluate expression and get value.
4407
  if (assign_type == VARIABLE) {
4408
    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4409
    AccumulatorValueContext context(this);
4410
    EmitVariableLoad(expr->expression()->AsVariableProxy());
4411
  } else {
4412
    // Reserve space for result of postfix operation.
4413
    if (expr->is_postfix() && !context()->IsEffect()) {
4414
      __ li(at, Operand(Smi::FromInt(0)));
4415
      __ push(at);
4416
    }
4417
    if (assign_type == NAMED_PROPERTY) {
4418
      // Put the object both on the stack and in the accumulator.
4419
      VisitForAccumulatorValue(prop->obj());
4420
      __ push(v0);
4421
      EmitNamedPropertyLoad(prop);
4422
    } else {
4423
      VisitForStackValue(prop->obj());
4424
      VisitForAccumulatorValue(prop->key());
4425
      __ lw(a1, MemOperand(sp, 0));
4426
      __ push(v0);
4427
      EmitKeyedPropertyLoad(prop);
4428
    }
4429
  }
4430

    
4431
  // We need a second deoptimization point after loading the value
4432
  // in case evaluating the property load my have a side effect.
4433
  if (assign_type == VARIABLE) {
4434
    PrepareForBailout(expr->expression(), TOS_REG);
4435
  } else {
4436
    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4437
  }
4438

    
4439
  // Call ToNumber only if operand is not a smi.
4440
  Label no_conversion;
4441
  if (ShouldInlineSmiCase(expr->op())) {
4442
    __ JumpIfSmi(v0, &no_conversion);
4443
  }
4444
  __ mov(a0, v0);
4445
  ToNumberStub convert_stub;
4446
  __ CallStub(&convert_stub);
4447
  __ bind(&no_conversion);
4448

    
4449
  // Save result for postfix expressions.
4450
  if (expr->is_postfix()) {
4451
    if (!context()->IsEffect()) {
4452
      // Save the result on the stack. If we have a named or keyed property
4453
      // we store the result under the receiver that is currently on top
4454
      // of the stack.
4455
      switch (assign_type) {
4456
        case VARIABLE:
4457
          __ push(v0);
4458
          break;
4459
        case NAMED_PROPERTY:
4460
          __ sw(v0, MemOperand(sp, kPointerSize));
4461
          break;
4462
        case KEYED_PROPERTY:
4463
          __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4464
          break;
4465
      }
4466
    }
4467
  }
4468
  __ mov(a0, result_register());
4469

    
4470
  // Inline smi case if we are in a loop.
4471
  Label stub_call, done;
4472
  JumpPatchSite patch_site(masm_);
4473

    
4474
  int count_value = expr->op() == Token::INC ? 1 : -1;
4475
  if (ShouldInlineSmiCase(expr->op())) {
4476
    __ li(a1, Operand(Smi::FromInt(count_value)));
4477
    __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4478
    __ BranchOnOverflow(&stub_call, t0);  // Do stub on overflow.
4479

    
4480
    // We could eliminate this smi check if we split the code at
4481
    // the first smi check before calling ToNumber.
4482
    patch_site.EmitJumpIfSmi(v0, &done);
4483
    __ bind(&stub_call);
4484
  }
4485
  __ mov(a1, a0);
4486
  __ li(a0, Operand(Smi::FromInt(count_value)));
4487

    
4488
  // Record position before stub call.
4489
  SetSourcePosition(expr->position());
4490

    
4491
  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4492
  CallIC(stub.GetCode(isolate()),
4493
         RelocInfo::CODE_TARGET,
4494
         expr->CountBinOpFeedbackId());
4495
  patch_site.EmitPatchInfo();
4496
  __ bind(&done);
4497

    
4498
  // Store the value returned in v0.
4499
  switch (assign_type) {
4500
    case VARIABLE:
4501
      if (expr->is_postfix()) {
4502
        { EffectContext context(this);
4503
          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4504
                                 Token::ASSIGN);
4505
          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4506
          context.Plug(v0);
4507
        }
4508
        // For all contexts except EffectConstant we have the result on
4509
        // top of the stack.
4510
        if (!context()->IsEffect()) {
4511
          context()->PlugTOS();
4512
        }
4513
      } else {
4514
        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4515
                               Token::ASSIGN);
4516
        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4517
        context()->Plug(v0);
4518
      }
4519
      break;
4520
    case NAMED_PROPERTY: {
4521
      __ mov(a0, result_register());  // Value.
4522
      __ li(a2, Operand(prop->key()->AsLiteral()->value()));  // Name.
4523
      __ pop(a1);  // Receiver.
4524
      Handle<Code> ic = is_classic_mode()
4525
          ? isolate()->builtins()->StoreIC_Initialize()
4526
          : isolate()->builtins()->StoreIC_Initialize_Strict();
4527
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4528
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4529
      if (expr->is_postfix()) {
4530
        if (!context()->IsEffect()) {
4531
          context()->PlugTOS();
4532
        }
4533
      } else {
4534
        context()->Plug(v0);
4535
      }
4536
      break;
4537
    }
4538
    case KEYED_PROPERTY: {
4539
      __ mov(a0, result_register());  // Value.
4540
      __ pop(a1);  // Key.
4541
      __ pop(a2);  // Receiver.
4542
      Handle<Code> ic = is_classic_mode()
4543
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
4544
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4545
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4546
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4547
      if (expr->is_postfix()) {
4548
        if (!context()->IsEffect()) {
4549
          context()->PlugTOS();
4550
        }
4551
      } else {
4552
        context()->Plug(v0);
4553
      }
4554
      break;
4555
    }
4556
  }
4557
}
4558

    
4559

    
4560
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4561
  ASSERT(!context()->IsEffect());
4562
  ASSERT(!context()->IsTest());
4563
  VariableProxy* proxy = expr->AsVariableProxy();
4564
  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4565
    Comment cmnt(masm_, "Global variable");
4566
    __ lw(a0, GlobalObjectOperand());
4567
    __ li(a2, Operand(proxy->name()));
4568
    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4569
    // Use a regular load, not a contextual load, to avoid a reference
4570
    // error.
4571
    CallIC(ic);
4572
    PrepareForBailout(expr, TOS_REG);
4573
    context()->Plug(v0);
4574
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4575
    Label done, slow;
4576

    
4577
    // Generate code for loading from variables potentially shadowed
4578
    // by eval-introduced variables.
4579
    EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4580

    
4581
    __ bind(&slow);
4582
    __ li(a0, Operand(proxy->name()));
4583
    __ Push(cp, a0);
4584
    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4585
    PrepareForBailout(expr, TOS_REG);
4586
    __ bind(&done);
4587

    
4588
    context()->Plug(v0);
4589
  } else {
4590
    // This expression cannot throw a reference error at the top level.
4591
    VisitInDuplicateContext(expr);
4592
  }
4593
}
4594

    
4595
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4596
                                                 Expression* sub_expr,
4597
                                                 Handle<String> check) {
4598
  Label materialize_true, materialize_false;
4599
  Label* if_true = NULL;
4600
  Label* if_false = NULL;
4601
  Label* fall_through = NULL;
4602
  context()->PrepareTest(&materialize_true, &materialize_false,
4603
                         &if_true, &if_false, &fall_through);
4604

    
4605
  { AccumulatorValueContext context(this);
4606
    VisitForTypeofValue(sub_expr);
4607
  }
4608
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4609

    
4610
  if (check->Equals(isolate()->heap()->number_string())) {
4611
    __ JumpIfSmi(v0, if_true);
4612
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4613
    __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4614
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4615
  } else if (check->Equals(isolate()->heap()->string_string())) {
4616
    __ JumpIfSmi(v0, if_false);
4617
    // Check for undetectable objects => false.
4618
    __ GetObjectType(v0, v0, a1);
4619
    __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4620
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4621
    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4622
    Split(eq, a1, Operand(zero_reg),
4623
          if_true, if_false, fall_through);
4624
  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4625
    __ JumpIfSmi(v0, if_false);
4626
    __ GetObjectType(v0, v0, a1);
4627
    Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4628
  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4629
    __ LoadRoot(at, Heap::kTrueValueRootIndex);
4630
    __ Branch(if_true, eq, v0, Operand(at));
4631
    __ LoadRoot(at, Heap::kFalseValueRootIndex);
4632
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4633
  } else if (FLAG_harmony_typeof &&
4634
             check->Equals(isolate()->heap()->null_string())) {
4635
    __ LoadRoot(at, Heap::kNullValueRootIndex);
4636
    Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4637
  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4638
    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4639
    __ Branch(if_true, eq, v0, Operand(at));
4640
    __ JumpIfSmi(v0, if_false);
4641
    // Check for undetectable objects => true.
4642
    __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4643
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4644
    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4645
    Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4646
  } else if (check->Equals(isolate()->heap()->function_string())) {
4647
    __ JumpIfSmi(v0, if_false);
4648
    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4649
    __ GetObjectType(v0, v0, a1);
4650
    __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4651
    Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4652
          if_true, if_false, fall_through);
4653
  } else if (check->Equals(isolate()->heap()->object_string())) {
4654
    __ JumpIfSmi(v0, if_false);
4655
    if (!FLAG_harmony_typeof) {
4656
      __ LoadRoot(at, Heap::kNullValueRootIndex);
4657
      __ Branch(if_true, eq, v0, Operand(at));
4658
    }
4659
    // Check for JS objects => true.
4660
    __ GetObjectType(v0, v0, a1);
4661
    __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4662
    __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4663
    __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4664
    // Check for undetectable objects => false.
4665
    __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4666
    __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4667
    Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4668
  } else {
4669
    if (if_false != fall_through) __ jmp(if_false);
4670
  }
4671
  context()->Plug(if_true, if_false);
4672
}
4673

    
4674

    
4675
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4676
  Comment cmnt(masm_, "[ CompareOperation");
4677
  SetSourcePosition(expr->position());
4678

    
4679
  // First we try a fast inlined version of the compare when one of
4680
  // the operands is a literal.
4681
  if (TryLiteralCompare(expr)) return;
4682

    
4683
  // Always perform the comparison for its control flow.  Pack the result
4684
  // into the expression's context after the comparison is performed.
4685
  Label materialize_true, materialize_false;
4686
  Label* if_true = NULL;
4687
  Label* if_false = NULL;
4688
  Label* fall_through = NULL;
4689
  context()->PrepareTest(&materialize_true, &materialize_false,
4690
                         &if_true, &if_false, &fall_through);
4691

    
4692
  Token::Value op = expr->op();
4693
  VisitForStackValue(expr->left());
4694
  switch (op) {
4695
    case Token::IN:
4696
      VisitForStackValue(expr->right());
4697
      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4698
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4699
      __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4700
      Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4701
      break;
4702

    
4703
    case Token::INSTANCEOF: {
4704
      VisitForStackValue(expr->right());
4705
      InstanceofStub stub(InstanceofStub::kNoFlags);
4706
      __ CallStub(&stub);
4707
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4708
      // The stub returns 0 for true.
4709
      Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4710
      break;
4711
    }
4712

    
4713
    default: {
4714
      VisitForAccumulatorValue(expr->right());
4715
      Condition cc = CompareIC::ComputeCondition(op);
4716
      __ mov(a0, result_register());
4717
      __ pop(a1);
4718

    
4719
      bool inline_smi_code = ShouldInlineSmiCase(op);
4720
      JumpPatchSite patch_site(masm_);
4721
      if (inline_smi_code) {
4722
        Label slow_case;
4723
        __ Or(a2, a0, Operand(a1));
4724
        patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4725
        Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4726
        __ bind(&slow_case);
4727
      }
4728
      // Record position and call the compare IC.
4729
      SetSourcePosition(expr->position());
4730
      Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4731
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4732
      patch_site.EmitPatchInfo();
4733
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4734
      Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4735
    }
4736
  }
4737

    
4738
  // Convert the result of the comparison into one expected for this
4739
  // expression's context.
4740
  context()->Plug(if_true, if_false);
4741
}
4742

    
4743

    
4744
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4745
                                              Expression* sub_expr,
4746
                                              NilValue nil) {
4747
  Label materialize_true, materialize_false;
4748
  Label* if_true = NULL;
4749
  Label* if_false = NULL;
4750
  Label* fall_through = NULL;
4751
  context()->PrepareTest(&materialize_true, &materialize_false,
4752
                         &if_true, &if_false, &fall_through);
4753

    
4754
  VisitForAccumulatorValue(sub_expr);
4755
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4756
  __ mov(a0, result_register());
4757
  if (expr->op() == Token::EQ_STRICT) {
4758
    Heap::RootListIndex nil_value = nil == kNullValue ?
4759
        Heap::kNullValueRootIndex :
4760
        Heap::kUndefinedValueRootIndex;
4761
    __ LoadRoot(a1, nil_value);
4762
    Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4763
  } else {
4764
    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4765
    CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4766
    Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4767
  }
4768
  context()->Plug(if_true, if_false);
4769
}
4770

    
4771

    
4772
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4773
  __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4774
  context()->Plug(v0);
4775
}
4776

    
4777

    
4778
Register FullCodeGenerator::result_register() {
4779
  return v0;
4780
}
4781

    
4782

    
4783
Register FullCodeGenerator::context_register() {
4784
  return cp;
4785
}
4786

    
4787

    
4788
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4789
  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4790
  __ sw(value, MemOperand(fp, frame_offset));
4791
}
4792

    
4793

    
4794
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4795
  __ lw(dst, ContextOperand(cp, context_index));
4796
}
4797

    
4798

    
4799
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4800
  Scope* declaration_scope = scope()->DeclarationScope();
4801
  if (declaration_scope->is_global_scope() ||
4802
      declaration_scope->is_module_scope()) {
4803
    // Contexts nested in the native context have a canonical empty function
4804
    // as their closure, not the anonymous closure containing the global
4805
    // code.  Pass a smi sentinel and let the runtime look up the empty
4806
    // function.
4807
    __ li(at, Operand(Smi::FromInt(0)));
4808
  } else if (declaration_scope->is_eval_scope()) {
4809
    // Contexts created by a call to eval have the same closure as the
4810
    // context calling eval, not the anonymous closure containing the eval
4811
    // code.  Fetch it from the context.
4812
    __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4813
  } else {
4814
    ASSERT(declaration_scope->is_function_scope());
4815
    __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4816
  }
4817
  __ push(at);
4818
}
4819

    
4820

    
4821
// ----------------------------------------------------------------------------
4822
// Non-local control flow support.
4823

    
4824
void FullCodeGenerator::EnterFinallyBlock() {
4825
  ASSERT(!result_register().is(a1));
4826
  // Store result register while executing finally block.
4827
  __ push(result_register());
4828
  // Cook return address in link register to stack (smi encoded Code* delta).
4829
  __ Subu(a1, ra, Operand(masm_->CodeObject()));
4830
  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4831
  STATIC_ASSERT(0 == kSmiTag);
4832
  __ Addu(a1, a1, Operand(a1));  // Convert to smi.
4833

    
4834
  // Store result register while executing finally block.
4835
  __ push(a1);
4836

    
4837
  // Store pending message while executing finally block.
4838
  ExternalReference pending_message_obj =
4839
      ExternalReference::address_of_pending_message_obj(isolate());
4840
  __ li(at, Operand(pending_message_obj));
4841
  __ lw(a1, MemOperand(at));
4842
  __ push(a1);
4843

    
4844
  ExternalReference has_pending_message =
4845
      ExternalReference::address_of_has_pending_message(isolate());
4846
  __ li(at, Operand(has_pending_message));
4847
  __ lw(a1, MemOperand(at));
4848
  __ SmiTag(a1);
4849
  __ push(a1);
4850

    
4851
  ExternalReference pending_message_script =
4852
      ExternalReference::address_of_pending_message_script(isolate());
4853
  __ li(at, Operand(pending_message_script));
4854
  __ lw(a1, MemOperand(at));
4855
  __ push(a1);
4856
}
4857

    
4858

    
4859
void FullCodeGenerator::ExitFinallyBlock() {
4860
  ASSERT(!result_register().is(a1));
4861
  // Restore pending message from stack.
4862
  __ pop(a1);
4863
  ExternalReference pending_message_script =
4864
      ExternalReference::address_of_pending_message_script(isolate());
4865
  __ li(at, Operand(pending_message_script));
4866
  __ sw(a1, MemOperand(at));
4867

    
4868
  __ pop(a1);
4869
  __ SmiUntag(a1);
4870
  ExternalReference has_pending_message =
4871
      ExternalReference::address_of_has_pending_message(isolate());
4872
  __ li(at, Operand(has_pending_message));
4873
  __ sw(a1, MemOperand(at));
4874

    
4875
  __ pop(a1);
4876
  ExternalReference pending_message_obj =
4877
      ExternalReference::address_of_pending_message_obj(isolate());
4878
  __ li(at, Operand(pending_message_obj));
4879
  __ sw(a1, MemOperand(at));
4880

    
4881
  // Restore result register from stack.
4882
  __ pop(a1);
4883

    
4884
  // Uncook return address and return.
4885
  __ pop(result_register());
4886
  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4887
  __ sra(a1, a1, 1);  // Un-smi-tag value.
4888
  __ Addu(at, a1, Operand(masm_->CodeObject()));
4889
  __ Jump(at);
4890
}
4891

    
4892

    
4893
#undef __
4894

    
4895
#define __ ACCESS_MASM(masm())
4896

    
4897
FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4898
    int* stack_depth,
4899
    int* context_length) {
4900
  // The macros used here must preserve the result register.
4901

    
4902
  // Because the handler block contains the context of the finally
4903
  // code, we can restore it directly from there for the finally code
4904
  // rather than iteratively unwinding contexts via their previous
4905
  // links.
4906
  __ Drop(*stack_depth);  // Down to the handler block.
4907
  if (*context_length > 0) {
4908
    // Restore the context to its dedicated register and the stack.
4909
    __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4910
    __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4911
  }
4912
  __ PopTryHandler();
4913
  __ Call(finally_entry_);
4914

    
4915
  *stack_depth = 0;
4916
  *context_length = 0;
4917
  return previous_;
4918
}
4919

    
4920

    
4921
#undef __
4922

    
4923

    
4924
void BackEdgeTable::PatchAt(Code* unoptimized_code,
4925
                            Address pc,
4926
                            BackEdgeState target_state,
4927
                            Code* replacement_code) {
4928
  static const int kInstrSize = Assembler::kInstrSize;
4929
  Address branch_address = pc - 6 * kInstrSize;
4930
  CodePatcher patcher(branch_address, 1);
4931

    
4932
  switch (target_state) {
4933
    case INTERRUPT:
4934
      // slt at, a3, zero_reg (in case of count based interrupts)
4935
      // beq at, zero_reg, ok
4936
      // lui t9, <interrupt stub address> upper
4937
      // ori t9, <interrupt stub address> lower
4938
      // jalr t9
4939
      // nop
4940
      // ok-label ----- pc_after points here
4941
      patcher.masm()->slt(at, a3, zero_reg);
4942
      break;
4943
    case ON_STACK_REPLACEMENT:
4944
    case OSR_AFTER_STACK_CHECK:
4945
      // addiu at, zero_reg, 1
4946
      // beq at, zero_reg, ok  ;; Not changed
4947
      // lui t9, <on-stack replacement address> upper
4948
      // ori t9, <on-stack replacement address> lower
4949
      // jalr t9  ;; Not changed
4950
      // nop  ;; Not changed
4951
      // ok-label ----- pc_after points here
4952
      patcher.masm()->addiu(at, zero_reg, 1);
4953
      break;
4954
  }
4955
  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4956
  // Replace the stack check address in the load-immediate (lui/ori pair)
4957
  // with the entry address of the replacement code.
4958
  Assembler::set_target_address_at(pc_immediate_load_address,
4959
                                   replacement_code->entry());
4960

    
4961
  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4962
      unoptimized_code, pc_immediate_load_address, replacement_code);
4963
}
4964

    
4965

    
4966
BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4967
    Isolate* isolate,
4968
    Code* unoptimized_code,
4969
    Address pc) {
4970
  static const int kInstrSize = Assembler::kInstrSize;
4971
  Address branch_address = pc - 6 * kInstrSize;
4972
  Address pc_immediate_load_address = pc - 4 * kInstrSize;
4973

    
4974
  ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize)));
4975
  if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4976
    ASSERT(reinterpret_cast<uint32_t>(
4977
        Assembler::target_address_at(pc_immediate_load_address)) ==
4978
           reinterpret_cast<uint32_t>(
4979
               isolate->builtins()->InterruptCheck()->entry()));
4980
    return INTERRUPT;
4981
  }
4982

    
4983
  ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4984

    
4985
  if (reinterpret_cast<uint32_t>(
4986
      Assembler::target_address_at(pc_immediate_load_address)) ==
4987
          reinterpret_cast<uint32_t>(
4988
              isolate->builtins()->OnStackReplacement()->entry())) {
4989
    return ON_STACK_REPLACEMENT;
4990
  }
4991

    
4992
  ASSERT(reinterpret_cast<uint32_t>(
4993
      Assembler::target_address_at(pc_immediate_load_address)) ==
4994
         reinterpret_cast<uint32_t>(
4995
             isolate->builtins()->OsrAfterStackCheck()->entry()));
4996
  return OSR_AFTER_STACK_CHECK;
4997
}
4998

    
4999

    
5000
} }  // namespace v8::internal
5001

    
5002
#endif  // V8_TARGET_ARCH_MIPS