The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / arm / full-codegen-arm.cc @ f230a1cf

History | View | Annotate | Download (168 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_ARM
31

    
32
#include "code-stubs.h"
33
#include "codegen.h"
34
#include "compiler.h"
35
#include "debug.h"
36
#include "full-codegen.h"
37
#include "isolate-inl.h"
38
#include "parser.h"
39
#include "scopes.h"
40
#include "stub-cache.h"
41

    
42
#include "arm/code-stubs-arm.h"
43
#include "arm/macro-assembler-arm.h"
44

    
45
namespace v8 {
46
namespace internal {
47

    
48
#define __ ACCESS_MASM(masm_)
49

    
50

    
51
// A patch site is a location in the code which it is possible to patch. This
52
// class has a number of methods to emit the code which is patchable and the
53
// method EmitPatchInfo to record a marker back to the patchable code. This
54
// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55
// immediate value is used) is the delta from the pc to the first instruction of
56
// the patchable code.
57
class JumpPatchSite BASE_EMBEDDED {
58
 public:
59
  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60
#ifdef DEBUG
61
    info_emitted_ = false;
62
#endif
63
  }
64

    
65
  ~JumpPatchSite() {
66
    ASSERT(patch_site_.is_bound() == info_emitted_);
67
  }
68

    
69
  // When initially emitting this ensure that a jump is always generated to skip
70
  // the inlined smi code.
71
  void EmitJumpIfNotSmi(Register reg, Label* target) {
72
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
73
    Assembler::BlockConstPoolScope block_const_pool(masm_);
74
    __ bind(&patch_site_);
75
    __ cmp(reg, Operand(reg));
76
    __ b(eq, target);  // Always taken before patched.
77
  }
78

    
79
  // When initially emitting this ensure that a jump is never generated to skip
80
  // the inlined smi code.
81
  void EmitJumpIfSmi(Register reg, Label* target) {
82
    ASSERT(!patch_site_.is_bound() && !info_emitted_);
83
    Assembler::BlockConstPoolScope block_const_pool(masm_);
84
    __ bind(&patch_site_);
85
    __ cmp(reg, Operand(reg));
86
    __ b(ne, target);  // Never taken before patched.
87
  }
88

    
89
  void EmitPatchInfo() {
90
    // Block literal pool emission whilst recording patch site information.
91
    Assembler::BlockConstPoolScope block_const_pool(masm_);
92
    if (patch_site_.is_bound()) {
93
      int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94
      Register reg;
95
      reg.set_code(delta_to_patch_site / kOff12Mask);
96
      __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97
#ifdef DEBUG
98
      info_emitted_ = true;
99
#endif
100
    } else {
101
      __ nop();  // Signals no inlined code.
102
    }
103
  }
104

    
105
 private:
106
  MacroAssembler* masm_;
107
  Label patch_site_;
108
#ifdef DEBUG
109
  bool info_emitted_;
110
#endif
111
};
112

    
113

    
114
// Generate code for a JS function.  On entry to the function the receiver
115
// and arguments have been pushed on the stack left to right.  The actual
116
// argument count matches the formal parameter count expected by the
117
// function.
118
//
119
// The live registers are:
120
//   o r1: the JS function object being called (i.e., ourselves)
121
//   o cp: our context
122
//   o fp: our caller's frame pointer
123
//   o sp: stack pointer
124
//   o lr: return address
125
//
126
// The function builds a JS frame.  Please see JavaScriptFrameConstants in
127
// frames-arm.h for its layout.
128
void FullCodeGenerator::Generate() {
129
  CompilationInfo* info = info_;
130
  handler_table_ =
131
      isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132
  profiling_counter_ = isolate()->factory()->NewCell(
133
      Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
134
  SetFunctionPosition(function());
135
  Comment cmnt(masm_, "[ function compiled by full code generator");
136

    
137
  ProfileEntryHookStub::MaybeCallEntryHook(masm_);
138

    
139
#ifdef DEBUG
140
  if (strlen(FLAG_stop_at) > 0 &&
141
      info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
142
    __ stop("stop-at");
143
  }
144
#endif
145

    
146
  // Strict mode functions and builtins need to replace the receiver
147
  // with undefined when called as functions (without an explicit
148
  // receiver object). r5 is zero for method calls and non-zero for
149
  // function calls.
150
  if (!info->is_classic_mode() || info->is_native()) {
151
    __ cmp(r5, Operand::Zero());
152
    int receiver_offset = info->scope()->num_parameters() * kPointerSize;
153
    __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
154
    __ str(r2, MemOperand(sp, receiver_offset), ne);
155
  }
156

    
157
  // Open a frame scope to indicate that there is a frame on the stack.  The
158
  // MANUAL indicates that the scope shouldn't actually generate code to set up
159
  // the frame (that is done below).
160
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
161

    
162
  info->set_prologue_offset(masm_->pc_offset());
163
  __ Prologue(BUILD_FUNCTION_FRAME);
164
  info->AddNoFrameRange(0, masm_->pc_offset());
165

    
166
  { Comment cmnt(masm_, "[ Allocate locals");
167
    int locals_count = info->scope()->num_stack_slots();
168
    // Generators allocate locals, if any, in context slots.
169
    ASSERT(!info->function()->is_generator() || locals_count == 0);
170
    if (locals_count > 0) {
171
      __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
172
      for (int i = 0; i < locals_count; i++) {
173
        __ push(ip);
174
      }
175
    }
176
  }
177

    
178
  bool function_in_register = true;
179

    
180
  // Possibly allocate a local context.
181
  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
182
  if (heap_slots > 0) {
183
    // Argument to NewContext is the function, which is still in r1.
184
    Comment cmnt(masm_, "[ Allocate context");
185
    __ push(r1);
186
    if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
187
      __ Push(info->scope()->GetScopeInfo());
188
      __ CallRuntime(Runtime::kNewGlobalContext, 2);
189
    } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
190
      FastNewContextStub stub(heap_slots);
191
      __ CallStub(&stub);
192
    } else {
193
      __ CallRuntime(Runtime::kNewFunctionContext, 1);
194
    }
195
    function_in_register = false;
196
    // Context is returned in both r0 and cp.  It replaces the context
197
    // passed to us.  It's saved in the stack and kept live in cp.
198
    __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
199
    // Copy any necessary parameters into the context.
200
    int num_parameters = info->scope()->num_parameters();
201
    for (int i = 0; i < num_parameters; i++) {
202
      Variable* var = scope()->parameter(i);
203
      if (var->IsContextSlot()) {
204
        int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205
            (num_parameters - 1 - i) * kPointerSize;
206
        // Load parameter from stack.
207
        __ ldr(r0, MemOperand(fp, parameter_offset));
208
        // Store it in the context.
209
        MemOperand target = ContextOperand(cp, var->index());
210
        __ str(r0, target);
211

    
212
        // Update the write barrier.
213
        __ RecordWriteContextSlot(
214
            cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
215
      }
216
    }
217
  }
218

    
219
  Variable* arguments = scope()->arguments();
220
  if (arguments != NULL) {
221
    // Function uses arguments object.
222
    Comment cmnt(masm_, "[ Allocate arguments object");
223
    if (!function_in_register) {
224
      // Load this again, if it's used by the local context below.
225
      __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
226
    } else {
227
      __ mov(r3, r1);
228
    }
229
    // Receiver is just before the parameters on the caller's stack.
230
    int num_parameters = info->scope()->num_parameters();
231
    int offset = num_parameters * kPointerSize;
232
    __ add(r2, fp,
233
           Operand(StandardFrameConstants::kCallerSPOffset + offset));
234
    __ mov(r1, Operand(Smi::FromInt(num_parameters)));
235
    __ Push(r3, r2, r1);
236

    
237
    // Arguments to ArgumentsAccessStub:
238
    //   function, receiver address, parameter count.
239
    // The stub will rewrite receiever and parameter count if the previous
240
    // stack frame was an arguments adapter frame.
241
    ArgumentsAccessStub::Type type;
242
    if (!is_classic_mode()) {
243
      type = ArgumentsAccessStub::NEW_STRICT;
244
    } else if (function()->has_duplicate_parameters()) {
245
      type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
246
    } else {
247
      type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
248
    }
249
    ArgumentsAccessStub stub(type);
250
    __ CallStub(&stub);
251

    
252
    SetVar(arguments, r0, r1, r2);
253
  }
254

    
255
  if (FLAG_trace) {
256
    __ CallRuntime(Runtime::kTraceEnter, 0);
257
  }
258

    
259
  // Visit the declarations and body unless there is an illegal
260
  // redeclaration.
261
  if (scope()->HasIllegalRedeclaration()) {
262
    Comment cmnt(masm_, "[ Declarations");
263
    scope()->VisitIllegalRedeclaration(this);
264

    
265
  } else {
266
    PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
267
    { Comment cmnt(masm_, "[ Declarations");
268
      // For named function expressions, declare the function name as a
269
      // constant.
270
      if (scope()->is_function_scope() && scope()->function() != NULL) {
271
        VariableDeclaration* function = scope()->function();
272
        ASSERT(function->proxy()->var()->mode() == CONST ||
273
               function->proxy()->var()->mode() == CONST_HARMONY);
274
        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
275
        VisitVariableDeclaration(function);
276
      }
277
      VisitDeclarations(scope()->declarations());
278
    }
279

    
280
    { Comment cmnt(masm_, "[ Stack check");
281
      PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
282
      Label ok;
283
      __ LoadRoot(ip, Heap::kStackLimitRootIndex);
284
      __ cmp(sp, Operand(ip));
285
      __ b(hs, &ok);
286
      PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
287
      __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
288
      __ bind(&ok);
289
    }
290

    
291
    { Comment cmnt(masm_, "[ Body");
292
      ASSERT(loop_depth() == 0);
293
      VisitStatements(function()->body());
294
      ASSERT(loop_depth() == 0);
295
    }
296
  }
297

    
298
  // Always emit a 'return undefined' in case control fell off the end of
299
  // the body.
300
  { Comment cmnt(masm_, "[ return <undefined>;");
301
    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
302
  }
303
  EmitReturnSequence();
304

    
305
  // Force emit the constant pool, so it doesn't get emitted in the middle
306
  // of the back edge table.
307
  masm()->CheckConstPool(true, false);
308
}
309

    
310

    
311
void FullCodeGenerator::ClearAccumulator() {
312
  __ mov(r0, Operand(Smi::FromInt(0)));
313
}
314

    
315

    
316
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
317
  __ mov(r2, Operand(profiling_counter_));
318
  __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
319
  __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
320
  __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
321
}
322

    
323

    
324
void FullCodeGenerator::EmitProfilingCounterReset() {
325
  int reset_value = FLAG_interrupt_budget;
326
  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
327
    // Self-optimization is a one-off thing: if it fails, don't try again.
328
    reset_value = Smi::kMaxValue;
329
  }
330
  if (isolate()->IsDebuggerActive()) {
331
    // Detect debug break requests as soon as possible.
332
    reset_value = FLAG_interrupt_budget >> 4;
333
  }
334
  __ mov(r2, Operand(profiling_counter_));
335
  __ mov(r3, Operand(Smi::FromInt(reset_value)));
336
  __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
337
}
338

    
339

    
340
void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
341
                                                Label* back_edge_target) {
342
  Comment cmnt(masm_, "[ Back edge bookkeeping");
343
  // Block literal pools whilst emitting back edge code.
344
  Assembler::BlockConstPoolScope block_const_pool(masm_);
345
  Label ok;
346

    
347
  int weight = 1;
348
  if (FLAG_weighted_back_edges) {
349
    ASSERT(back_edge_target->is_bound());
350
    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
351
    weight = Min(kMaxBackEdgeWeight,
352
                 Max(1, distance / kCodeSizeMultiplier));
353
  }
354
  EmitProfilingCounterDecrement(weight);
355
  __ b(pl, &ok);
356
  __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
357

    
358
  // Record a mapping of this PC offset to the OSR id.  This is used to find
359
  // the AST id from the unoptimized code in order to use it as a key into
360
  // the deoptimization input data found in the optimized code.
361
  RecordBackEdge(stmt->OsrEntryId());
362

    
363
  EmitProfilingCounterReset();
364

    
365
  __ bind(&ok);
366
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
367
  // Record a mapping of the OSR id to this PC.  This is used if the OSR
368
  // entry becomes the target of a bailout.  We don't expect it to be, but
369
  // we want it to work if it is.
370
  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
371
}
372

    
373

    
374
void FullCodeGenerator::EmitReturnSequence() {
375
  Comment cmnt(masm_, "[ Return sequence");
376
  if (return_label_.is_bound()) {
377
    __ b(&return_label_);
378
  } else {
379
    __ bind(&return_label_);
380
    if (FLAG_trace) {
381
      // Push the return value on the stack as the parameter.
382
      // Runtime::TraceExit returns its parameter in r0.
383
      __ push(r0);
384
      __ CallRuntime(Runtime::kTraceExit, 1);
385
    }
386
    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
387
      // Pretend that the exit is a backwards jump to the entry.
388
      int weight = 1;
389
      if (info_->ShouldSelfOptimize()) {
390
        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391
      } else if (FLAG_weighted_back_edges) {
392
        int distance = masm_->pc_offset();
393
        weight = Min(kMaxBackEdgeWeight,
394
                     Max(1, distance / kCodeSizeMultiplier));
395
      }
396
      EmitProfilingCounterDecrement(weight);
397
      Label ok;
398
      __ b(pl, &ok);
399
      __ push(r0);
400
      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
401
        __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
402
        __ push(r2);
403
        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
404
      } else {
405
        __ Call(isolate()->builtins()->InterruptCheck(),
406
                RelocInfo::CODE_TARGET);
407
      }
408
      __ pop(r0);
409
      EmitProfilingCounterReset();
410
      __ bind(&ok);
411
    }
412

    
413
#ifdef DEBUG
414
    // Add a label for checking the size of the code used for returning.
415
    Label check_exit_codesize;
416
    masm_->bind(&check_exit_codesize);
417
#endif
418
    // Make sure that the constant pool is not emitted inside of the return
419
    // sequence.
420
    { Assembler::BlockConstPoolScope block_const_pool(masm_);
421
      // Here we use masm_-> instead of the __ macro to avoid the code coverage
422
      // tool from instrumenting as we rely on the code size here.
423
      int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
424
      CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
425
      // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
426
      PredictableCodeSizeScope predictable(masm_, -1);
427
      __ RecordJSReturn();
428
      masm_->mov(sp, fp);
429
      int no_frame_start = masm_->pc_offset();
430
      masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
431
      masm_->add(sp, sp, Operand(sp_delta));
432
      masm_->Jump(lr);
433
      info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
434
    }
435

    
436
#ifdef DEBUG
437
    // Check that the size of the code used for returning is large enough
438
    // for the debugger's requirements.
439
    ASSERT(Assembler::kJSReturnSequenceInstructions <=
440
           masm_->InstructionsGeneratedSince(&check_exit_codesize));
441
#endif
442
  }
443
}
444

    
445

    
446
void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
447
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
448
}
449

    
450

    
451
void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
452
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453
  codegen()->GetVar(result_register(), var);
454
}
455

    
456

    
457
void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
458
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
459
  codegen()->GetVar(result_register(), var);
460
  __ push(result_register());
461
}
462

    
463

    
464
void FullCodeGenerator::TestContext::Plug(Variable* var) const {
465
  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466
  // For simplicity we always test the accumulator register.
467
  codegen()->GetVar(result_register(), var);
468
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
469
  codegen()->DoTest(this);
470
}
471

    
472

    
473
void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
474
}
475

    
476

    
477
void FullCodeGenerator::AccumulatorValueContext::Plug(
478
    Heap::RootListIndex index) const {
479
  __ LoadRoot(result_register(), index);
480
}
481

    
482

    
483
void FullCodeGenerator::StackValueContext::Plug(
484
    Heap::RootListIndex index) const {
485
  __ LoadRoot(result_register(), index);
486
  __ push(result_register());
487
}
488

    
489

    
490
void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
491
  codegen()->PrepareForBailoutBeforeSplit(condition(),
492
                                          true,
493
                                          true_label_,
494
                                          false_label_);
495
  if (index == Heap::kUndefinedValueRootIndex ||
496
      index == Heap::kNullValueRootIndex ||
497
      index == Heap::kFalseValueRootIndex) {
498
    if (false_label_ != fall_through_) __ b(false_label_);
499
  } else if (index == Heap::kTrueValueRootIndex) {
500
    if (true_label_ != fall_through_) __ b(true_label_);
501
  } else {
502
    __ LoadRoot(result_register(), index);
503
    codegen()->DoTest(this);
504
  }
505
}
506

    
507

    
508
void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
509
}
510

    
511

    
512
void FullCodeGenerator::AccumulatorValueContext::Plug(
513
    Handle<Object> lit) const {
514
  __ mov(result_register(), Operand(lit));
515
}
516

    
517

    
518
void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
519
  // Immediates cannot be pushed directly.
520
  __ mov(result_register(), Operand(lit));
521
  __ push(result_register());
522
}
523

    
524

    
525
void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
526
  codegen()->PrepareForBailoutBeforeSplit(condition(),
527
                                          true,
528
                                          true_label_,
529
                                          false_label_);
530
  ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
531
  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
532
    if (false_label_ != fall_through_) __ b(false_label_);
533
  } else if (lit->IsTrue() || lit->IsJSObject()) {
534
    if (true_label_ != fall_through_) __ b(true_label_);
535
  } else if (lit->IsString()) {
536
    if (String::cast(*lit)->length() == 0) {
537
      if (false_label_ != fall_through_) __ b(false_label_);
538
    } else {
539
      if (true_label_ != fall_through_) __ b(true_label_);
540
    }
541
  } else if (lit->IsSmi()) {
542
    if (Smi::cast(*lit)->value() == 0) {
543
      if (false_label_ != fall_through_) __ b(false_label_);
544
    } else {
545
      if (true_label_ != fall_through_) __ b(true_label_);
546
    }
547
  } else {
548
    // For simplicity we always test the accumulator register.
549
    __ mov(result_register(), Operand(lit));
550
    codegen()->DoTest(this);
551
  }
552
}
553

    
554

    
555
void FullCodeGenerator::EffectContext::DropAndPlug(int count,
556
                                                   Register reg) const {
557
  ASSERT(count > 0);
558
  __ Drop(count);
559
}
560

    
561

    
562
void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
563
    int count,
564
    Register reg) const {
565
  ASSERT(count > 0);
566
  __ Drop(count);
567
  __ Move(result_register(), reg);
568
}
569

    
570

    
571
void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
572
                                                       Register reg) const {
573
  ASSERT(count > 0);
574
  if (count > 1) __ Drop(count - 1);
575
  __ str(reg, MemOperand(sp, 0));
576
}
577

    
578

    
579
void FullCodeGenerator::TestContext::DropAndPlug(int count,
580
                                                 Register reg) const {
581
  ASSERT(count > 0);
582
  // For simplicity we always test the accumulator register.
583
  __ Drop(count);
584
  __ Move(result_register(), reg);
585
  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
586
  codegen()->DoTest(this);
587
}
588

    
589

    
590
void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
591
                                            Label* materialize_false) const {
592
  ASSERT(materialize_true == materialize_false);
593
  __ bind(materialize_true);
594
}
595

    
596

    
597
void FullCodeGenerator::AccumulatorValueContext::Plug(
598
    Label* materialize_true,
599
    Label* materialize_false) const {
600
  Label done;
601
  __ bind(materialize_true);
602
  __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
603
  __ jmp(&done);
604
  __ bind(materialize_false);
605
  __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
606
  __ bind(&done);
607
}
608

    
609

    
610
void FullCodeGenerator::StackValueContext::Plug(
611
    Label* materialize_true,
612
    Label* materialize_false) const {
613
  Label done;
614
  __ bind(materialize_true);
615
  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
616
  __ push(ip);
617
  __ jmp(&done);
618
  __ bind(materialize_false);
619
  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
620
  __ push(ip);
621
  __ bind(&done);
622
}
623

    
624

    
625
void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
626
                                          Label* materialize_false) const {
627
  ASSERT(materialize_true == true_label_);
628
  ASSERT(materialize_false == false_label_);
629
}
630

    
631

    
632
void FullCodeGenerator::EffectContext::Plug(bool flag) const {
633
}
634

    
635

    
636
void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
637
  Heap::RootListIndex value_root_index =
638
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639
  __ LoadRoot(result_register(), value_root_index);
640
}
641

    
642

    
643
void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
644
  Heap::RootListIndex value_root_index =
645
      flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
646
  __ LoadRoot(ip, value_root_index);
647
  __ push(ip);
648
}
649

    
650

    
651
void FullCodeGenerator::TestContext::Plug(bool flag) const {
652
  codegen()->PrepareForBailoutBeforeSplit(condition(),
653
                                          true,
654
                                          true_label_,
655
                                          false_label_);
656
  if (flag) {
657
    if (true_label_ != fall_through_) __ b(true_label_);
658
  } else {
659
    if (false_label_ != fall_through_) __ b(false_label_);
660
  }
661
}
662

    
663

    
664
void FullCodeGenerator::DoTest(Expression* condition,
665
                               Label* if_true,
666
                               Label* if_false,
667
                               Label* fall_through) {
668
  Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
669
  CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
670
  __ tst(result_register(), result_register());
671
  Split(ne, if_true, if_false, fall_through);
672
}
673

    
674

    
675
void FullCodeGenerator::Split(Condition cond,
676
                              Label* if_true,
677
                              Label* if_false,
678
                              Label* fall_through) {
679
  if (if_false == fall_through) {
680
    __ b(cond, if_true);
681
  } else if (if_true == fall_through) {
682
    __ b(NegateCondition(cond), if_false);
683
  } else {
684
    __ b(cond, if_true);
685
    __ b(if_false);
686
  }
687
}
688

    
689

    
690
MemOperand FullCodeGenerator::StackOperand(Variable* var) {
691
  ASSERT(var->IsStackAllocated());
692
  // Offset is negative because higher indexes are at lower addresses.
693
  int offset = -var->index() * kPointerSize;
694
  // Adjust by a (parameter or local) base offset.
695
  if (var->IsParameter()) {
696
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
697
  } else {
698
    offset += JavaScriptFrameConstants::kLocal0Offset;
699
  }
700
  return MemOperand(fp, offset);
701
}
702

    
703

    
704
MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
705
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
706
  if (var->IsContextSlot()) {
707
    int context_chain_length = scope()->ContextChainLength(var->scope());
708
    __ LoadContext(scratch, context_chain_length);
709
    return ContextOperand(scratch, var->index());
710
  } else {
711
    return StackOperand(var);
712
  }
713
}
714

    
715

    
716
void FullCodeGenerator::GetVar(Register dest, Variable* var) {
717
  // Use destination as scratch.
718
  MemOperand location = VarOperand(var, dest);
719
  __ ldr(dest, location);
720
}
721

    
722

    
723
void FullCodeGenerator::SetVar(Variable* var,
724
                               Register src,
725
                               Register scratch0,
726
                               Register scratch1) {
727
  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
728
  ASSERT(!scratch0.is(src));
729
  ASSERT(!scratch0.is(scratch1));
730
  ASSERT(!scratch1.is(src));
731
  MemOperand location = VarOperand(var, scratch0);
732
  __ str(src, location);
733

    
734
  // Emit the write barrier code if the location is in the heap.
735
  if (var->IsContextSlot()) {
736
    __ RecordWriteContextSlot(scratch0,
737
                              location.offset(),
738
                              src,
739
                              scratch1,
740
                              kLRHasBeenSaved,
741
                              kDontSaveFPRegs);
742
  }
743
}
744

    
745

    
746
void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
747
                                                     bool should_normalize,
748
                                                     Label* if_true,
749
                                                     Label* if_false) {
750
  // Only prepare for bailouts before splits if we're in a test
751
  // context. Otherwise, we let the Visit function deal with the
752
  // preparation to avoid preparing with the same AST id twice.
753
  if (!context()->IsTest() || !info_->IsOptimizable()) return;
754

    
755
  Label skip;
756
  if (should_normalize) __ b(&skip);
757
  PrepareForBailout(expr, TOS_REG);
758
  if (should_normalize) {
759
    __ LoadRoot(ip, Heap::kTrueValueRootIndex);
760
    __ cmp(r0, ip);
761
    Split(eq, if_true, if_false, NULL);
762
    __ bind(&skip);
763
  }
764
}
765

    
766

    
767
void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
768
  // The variable in the declaration always resides in the current function
769
  // context.
770
  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
771
  if (generate_debug_code_) {
772
    // Check that we're not inside a with or catch context.
773
    __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
774
    __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
775
    __ Check(ne, kDeclarationInWithContext);
776
    __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
777
    __ Check(ne, kDeclarationInCatchContext);
778
  }
779
}
780

    
781

    
782
void FullCodeGenerator::VisitVariableDeclaration(
783
    VariableDeclaration* declaration) {
784
  // If it was not possible to allocate the variable at compile time, we
785
  // need to "declare" it at runtime to make sure it actually exists in the
786
  // local context.
787
  VariableProxy* proxy = declaration->proxy();
788
  VariableMode mode = declaration->mode();
789
  Variable* variable = proxy->var();
790
  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
791
  switch (variable->location()) {
792
    case Variable::UNALLOCATED:
793
      globals_->Add(variable->name(), zone());
794
      globals_->Add(variable->binding_needs_init()
795
                        ? isolate()->factory()->the_hole_value()
796
                        : isolate()->factory()->undefined_value(),
797
                    zone());
798
      break;
799

    
800
    case Variable::PARAMETER:
801
    case Variable::LOCAL:
802
      if (hole_init) {
803
        Comment cmnt(masm_, "[ VariableDeclaration");
804
        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
805
        __ str(ip, StackOperand(variable));
806
      }
807
      break;
808

    
809
    case Variable::CONTEXT:
810
      if (hole_init) {
811
        Comment cmnt(masm_, "[ VariableDeclaration");
812
        EmitDebugCheckDeclarationContext(variable);
813
        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
814
        __ str(ip, ContextOperand(cp, variable->index()));
815
        // No write barrier since the_hole_value is in old space.
816
        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
817
      }
818
      break;
819

    
820
    case Variable::LOOKUP: {
821
      Comment cmnt(masm_, "[ VariableDeclaration");
822
      __ mov(r2, Operand(variable->name()));
823
      // Declaration nodes are always introduced in one of four modes.
824
      ASSERT(IsDeclaredVariableMode(mode));
825
      PropertyAttributes attr =
826
          IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
827
      __ mov(r1, Operand(Smi::FromInt(attr)));
828
      // Push initial value, if any.
829
      // Note: For variables we must not push an initial value (such as
830
      // 'undefined') because we may have a (legal) redeclaration and we
831
      // must not destroy the current value.
832
      if (hole_init) {
833
        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
834
        __ Push(cp, r2, r1, r0);
835
      } else {
836
        __ mov(r0, Operand(Smi::FromInt(0)));  // Indicates no initial value.
837
        __ Push(cp, r2, r1, r0);
838
      }
839
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
840
      break;
841
    }
842
  }
843
}
844

    
845

    
846
void FullCodeGenerator::VisitFunctionDeclaration(
847
    FunctionDeclaration* declaration) {
848
  VariableProxy* proxy = declaration->proxy();
849
  Variable* variable = proxy->var();
850
  switch (variable->location()) {
851
    case Variable::UNALLOCATED: {
852
      globals_->Add(variable->name(), zone());
853
      Handle<SharedFunctionInfo> function =
854
          Compiler::BuildFunctionInfo(declaration->fun(), script());
855
      // Check for stack-overflow exception.
856
      if (function.is_null()) return SetStackOverflow();
857
      globals_->Add(function, zone());
858
      break;
859
    }
860

    
861
    case Variable::PARAMETER:
862
    case Variable::LOCAL: {
863
      Comment cmnt(masm_, "[ FunctionDeclaration");
864
      VisitForAccumulatorValue(declaration->fun());
865
      __ str(result_register(), StackOperand(variable));
866
      break;
867
    }
868

    
869
    case Variable::CONTEXT: {
870
      Comment cmnt(masm_, "[ FunctionDeclaration");
871
      EmitDebugCheckDeclarationContext(variable);
872
      VisitForAccumulatorValue(declaration->fun());
873
      __ str(result_register(), ContextOperand(cp, variable->index()));
874
      int offset = Context::SlotOffset(variable->index());
875
      // We know that we have written a function, which is not a smi.
876
      __ RecordWriteContextSlot(cp,
877
                                offset,
878
                                result_register(),
879
                                r2,
880
                                kLRHasBeenSaved,
881
                                kDontSaveFPRegs,
882
                                EMIT_REMEMBERED_SET,
883
                                OMIT_SMI_CHECK);
884
      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
885
      break;
886
    }
887

    
888
    case Variable::LOOKUP: {
889
      Comment cmnt(masm_, "[ FunctionDeclaration");
890
      __ mov(r2, Operand(variable->name()));
891
      __ mov(r1, Operand(Smi::FromInt(NONE)));
892
      __ Push(cp, r2, r1);
893
      // Push initial value for function declaration.
894
      VisitForStackValue(declaration->fun());
895
      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
896
      break;
897
    }
898
  }
899
}
900

    
901

    
902
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
903
  Variable* variable = declaration->proxy()->var();
904
  ASSERT(variable->location() == Variable::CONTEXT);
905
  ASSERT(variable->interface()->IsFrozen());
906

    
907
  Comment cmnt(masm_, "[ ModuleDeclaration");
908
  EmitDebugCheckDeclarationContext(variable);
909

    
910
  // Load instance object.
911
  __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
912
  __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
913
  __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
914

    
915
  // Assign it.
916
  __ str(r1, ContextOperand(cp, variable->index()));
917
  // We know that we have written a module, which is not a smi.
918
  __ RecordWriteContextSlot(cp,
919
                            Context::SlotOffset(variable->index()),
920
                            r1,
921
                            r3,
922
                            kLRHasBeenSaved,
923
                            kDontSaveFPRegs,
924
                            EMIT_REMEMBERED_SET,
925
                            OMIT_SMI_CHECK);
926
  PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
927

    
928
  // Traverse into body.
929
  Visit(declaration->module());
930
}
931

    
932

    
933
void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
934
  VariableProxy* proxy = declaration->proxy();
935
  Variable* variable = proxy->var();
936
  switch (variable->location()) {
937
    case Variable::UNALLOCATED:
938
      // TODO(rossberg)
939
      break;
940

    
941
    case Variable::CONTEXT: {
942
      Comment cmnt(masm_, "[ ImportDeclaration");
943
      EmitDebugCheckDeclarationContext(variable);
944
      // TODO(rossberg)
945
      break;
946
    }
947

    
948
    case Variable::PARAMETER:
949
    case Variable::LOCAL:
950
    case Variable::LOOKUP:
951
      UNREACHABLE();
952
  }
953
}
954

    
955

    
956
void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
957
  // TODO(rossberg)
958
}
959

    
960

    
961
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
962
  // Call the runtime to declare the globals.
963
  // The context is the first argument.
964
  __ mov(r1, Operand(pairs));
965
  __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
966
  __ Push(cp, r1, r0);
967
  __ CallRuntime(Runtime::kDeclareGlobals, 3);
968
  // Return value is ignored.
969
}
970

    
971

    
972
void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
973
  // Call the runtime to declare the modules.
974
  __ Push(descriptions);
975
  __ CallRuntime(Runtime::kDeclareModules, 1);
976
  // Return value is ignored.
977
}
978

    
979

    
980
void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
981
  Comment cmnt(masm_, "[ SwitchStatement");
982
  Breakable nested_statement(this, stmt);
983
  SetStatementPosition(stmt);
984

    
985
  // Keep the switch value on the stack until a case matches.
986
  VisitForStackValue(stmt->tag());
987
  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
988

    
989
  ZoneList<CaseClause*>* clauses = stmt->cases();
990
  CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
991

    
992
  Label next_test;  // Recycled for each test.
993
  // Compile all the tests with branches to their bodies.
994
  for (int i = 0; i < clauses->length(); i++) {
995
    CaseClause* clause = clauses->at(i);
996
    clause->body_target()->Unuse();
997

    
998
    // The default is not a test, but remember it as final fall through.
999
    if (clause->is_default()) {
1000
      default_clause = clause;
1001
      continue;
1002
    }
1003

    
1004
    Comment cmnt(masm_, "[ Case comparison");
1005
    __ bind(&next_test);
1006
    next_test.Unuse();
1007

    
1008
    // Compile the label expression.
1009
    VisitForAccumulatorValue(clause->label());
1010

    
1011
    // Perform the comparison as if via '==='.
1012
    __ ldr(r1, MemOperand(sp, 0));  // Switch value.
1013
    bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1014
    JumpPatchSite patch_site(masm_);
1015
    if (inline_smi_code) {
1016
      Label slow_case;
1017
      __ orr(r2, r1, r0);
1018
      patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1019

    
1020
      __ cmp(r1, r0);
1021
      __ b(ne, &next_test);
1022
      __ Drop(1);  // Switch value is no longer needed.
1023
      __ b(clause->body_target());
1024
      __ bind(&slow_case);
1025
    }
1026

    
1027
    // Record position before stub call for type feedback.
1028
    SetSourcePosition(clause->position());
1029
    Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1030
    CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1031
    patch_site.EmitPatchInfo();
1032

    
1033
    __ cmp(r0, Operand::Zero());
1034
    __ b(ne, &next_test);
1035
    __ Drop(1);  // Switch value is no longer needed.
1036
    __ b(clause->body_target());
1037
  }
1038

    
1039
  // Discard the test value and jump to the default if present, otherwise to
1040
  // the end of the statement.
1041
  __ bind(&next_test);
1042
  __ Drop(1);  // Switch value is no longer needed.
1043
  if (default_clause == NULL) {
1044
    __ b(nested_statement.break_label());
1045
  } else {
1046
    __ b(default_clause->body_target());
1047
  }
1048

    
1049
  // Compile all the case bodies.
1050
  for (int i = 0; i < clauses->length(); i++) {
1051
    Comment cmnt(masm_, "[ Case body");
1052
    CaseClause* clause = clauses->at(i);
1053
    __ bind(clause->body_target());
1054
    PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1055
    VisitStatements(clause->statements());
1056
  }
1057

    
1058
  __ bind(nested_statement.break_label());
1059
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1060
}
1061

    
1062

    
1063
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1064
  Comment cmnt(masm_, "[ ForInStatement");
1065
  SetStatementPosition(stmt);
1066

    
1067
  Label loop, exit;
1068
  ForIn loop_statement(this, stmt);
1069
  increment_loop_depth();
1070

    
1071
  // Get the object to enumerate over. If the object is null or undefined, skip
1072
  // over the loop.  See ECMA-262 version 5, section 12.6.4.
1073
  VisitForAccumulatorValue(stmt->enumerable());
1074
  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1075
  __ cmp(r0, ip);
1076
  __ b(eq, &exit);
1077
  Register null_value = r5;
1078
  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1079
  __ cmp(r0, null_value);
1080
  __ b(eq, &exit);
1081

    
1082
  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1083

    
1084
  // Convert the object to a JS object.
1085
  Label convert, done_convert;
1086
  __ JumpIfSmi(r0, &convert);
1087
  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1088
  __ b(ge, &done_convert);
1089
  __ bind(&convert);
1090
  __ push(r0);
1091
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1092
  __ bind(&done_convert);
1093
  __ push(r0);
1094

    
1095
  // Check for proxies.
1096
  Label call_runtime;
1097
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1098
  __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1099
  __ b(le, &call_runtime);
1100

    
1101
  // Check cache validity in generated code. This is a fast case for
1102
  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1103
  // guarantee cache validity, call the runtime system to check cache
1104
  // validity or get the property names in a fixed array.
1105
  __ CheckEnumCache(null_value, &call_runtime);
1106

    
1107
  // The enum cache is valid.  Load the map of the object being
1108
  // iterated over and use the cache for the iteration.
1109
  Label use_cache;
1110
  __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1111
  __ b(&use_cache);
1112

    
1113
  // Get the set of properties to enumerate.
1114
  __ bind(&call_runtime);
1115
  __ push(r0);  // Duplicate the enumerable object on the stack.
1116
  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1117

    
1118
  // If we got a map from the runtime call, we can do a fast
1119
  // modification check. Otherwise, we got a fixed array, and we have
1120
  // to do a slow check.
1121
  Label fixed_array;
1122
  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1123
  __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1124
  __ cmp(r2, ip);
1125
  __ b(ne, &fixed_array);
1126

    
1127
  // We got a map in register r0. Get the enumeration cache from it.
1128
  Label no_descriptors;
1129
  __ bind(&use_cache);
1130

    
1131
  __ EnumLength(r1, r0);
1132
  __ cmp(r1, Operand(Smi::FromInt(0)));
1133
  __ b(eq, &no_descriptors);
1134

    
1135
  __ LoadInstanceDescriptors(r0, r2);
1136
  __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1137
  __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1138

    
1139
  // Set up the four remaining stack slots.
1140
  __ push(r0);  // Map.
1141
  __ mov(r0, Operand(Smi::FromInt(0)));
1142
  // Push enumeration cache, enumeration cache length (as smi) and zero.
1143
  __ Push(r2, r1, r0);
1144
  __ jmp(&loop);
1145

    
1146
  __ bind(&no_descriptors);
1147
  __ Drop(1);
1148
  __ jmp(&exit);
1149

    
1150
  // We got a fixed array in register r0. Iterate through that.
1151
  Label non_proxy;
1152
  __ bind(&fixed_array);
1153

    
1154
  Handle<Cell> cell = isolate()->factory()->NewCell(
1155
      Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1156
                     isolate()));
1157
  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1158
  __ Move(r1, cell);
1159
  __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1160
  __ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
1161

    
1162
  __ mov(r1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
1163
  __ ldr(r2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
1164
  STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1165
  __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1166
  __ b(gt, &non_proxy);
1167
  __ mov(r1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
1168
  __ bind(&non_proxy);
1169
  __ Push(r1, r0);  // Smi and array
1170
  __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1171
  __ mov(r0, Operand(Smi::FromInt(0)));
1172
  __ Push(r1, r0);  // Fixed array length (as smi) and initial index.
1173

    
1174
  // Generate code for doing the condition check.
1175
  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1176
  __ bind(&loop);
1177
  // Load the current count to r0, load the length to r1.
1178
  __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1179
  __ cmp(r0, r1);  // Compare to the array length.
1180
  __ b(hs, loop_statement.break_label());
1181

    
1182
  // Get the current entry of the array into register r3.
1183
  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1184
  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1185
  __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
1186

    
1187
  // Get the expected map from the stack or a smi in the
1188
  // permanent slow case into register r2.
1189
  __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1190

    
1191
  // Check if the expected map still matches that of the enumerable.
1192
  // If not, we may have to filter the key.
1193
  Label update_each;
1194
  __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1195
  __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1196
  __ cmp(r4, Operand(r2));
1197
  __ b(eq, &update_each);
1198

    
1199
  // For proxies, no filtering is done.
1200
  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1201
  __ cmp(r2, Operand(Smi::FromInt(0)));
1202
  __ b(eq, &update_each);
1203

    
1204
  // Convert the entry to a string or (smi) 0 if it isn't a property
1205
  // any more. If the property has been removed while iterating, we
1206
  // just skip it.
1207
  __ push(r1);  // Enumerable.
1208
  __ push(r3);  // Current entry.
1209
  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1210
  __ mov(r3, Operand(r0), SetCC);
1211
  __ b(eq, loop_statement.continue_label());
1212

    
1213
  // Update the 'each' property or variable from the possibly filtered
1214
  // entry in register r3.
1215
  __ bind(&update_each);
1216
  __ mov(result_register(), r3);
1217
  // Perform the assignment as if via '='.
1218
  { EffectContext context(this);
1219
    EmitAssignment(stmt->each());
1220
  }
1221

    
1222
  // Generate code for the body of the loop.
1223
  Visit(stmt->body());
1224

    
1225
  // Generate code for the going to the next element by incrementing
1226
  // the index (smi) stored on top of the stack.
1227
  __ bind(loop_statement.continue_label());
1228
  __ pop(r0);
1229
  __ add(r0, r0, Operand(Smi::FromInt(1)));
1230
  __ push(r0);
1231

    
1232
  EmitBackEdgeBookkeeping(stmt, &loop);
1233
  __ b(&loop);
1234

    
1235
  // Remove the pointers stored on the stack.
1236
  __ bind(loop_statement.break_label());
1237
  __ Drop(5);
1238

    
1239
  // Exit and decrement the loop depth.
1240
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1241
  __ bind(&exit);
1242
  decrement_loop_depth();
1243
}
1244

    
1245

    
1246
void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1247
  Comment cmnt(masm_, "[ ForOfStatement");
1248
  SetStatementPosition(stmt);
1249

    
1250
  Iteration loop_statement(this, stmt);
1251
  increment_loop_depth();
1252

    
1253
  // var iterator = iterable[@@iterator]()
1254
  VisitForAccumulatorValue(stmt->assign_iterator());
1255

    
1256
  // As with for-in, skip the loop if the iterator is null or undefined.
1257
  __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1258
  __ b(eq, loop_statement.break_label());
1259
  __ CompareRoot(r0, Heap::kNullValueRootIndex);
1260
  __ b(eq, loop_statement.break_label());
1261

    
1262
  // Convert the iterator to a JS object.
1263
  Label convert, done_convert;
1264
  __ JumpIfSmi(r0, &convert);
1265
  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1266
  __ b(ge, &done_convert);
1267
  __ bind(&convert);
1268
  __ push(r0);
1269
  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1270
  __ bind(&done_convert);
1271
  __ push(r0);
1272

    
1273
  // Loop entry.
1274
  __ bind(loop_statement.continue_label());
1275

    
1276
  // result = iterator.next()
1277
  VisitForEffect(stmt->next_result());
1278

    
1279
  // if (result.done) break;
1280
  Label result_not_done;
1281
  VisitForControl(stmt->result_done(),
1282
                  loop_statement.break_label(),
1283
                  &result_not_done,
1284
                  &result_not_done);
1285
  __ bind(&result_not_done);
1286

    
1287
  // each = result.value
1288
  VisitForEffect(stmt->assign_each());
1289

    
1290
  // Generate code for the body of the loop.
1291
  Visit(stmt->body());
1292

    
1293
  // Check stack before looping.
1294
  PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1295
  EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1296
  __ jmp(loop_statement.continue_label());
1297

    
1298
  // Exit and decrement the loop depth.
1299
  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1300
  __ bind(loop_statement.break_label());
1301
  decrement_loop_depth();
1302
}
1303

    
1304

    
1305
void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1306
                                       bool pretenure) {
1307
  // Use the fast case closure allocation code that allocates in new
1308
  // space for nested functions that don't need literals cloning. If
1309
  // we're running with the --always-opt or the --prepare-always-opt
1310
  // flag, we need to use the runtime function so that the new function
1311
  // we are creating here gets a chance to have its code optimized and
1312
  // doesn't just get a copy of the existing unoptimized code.
1313
  if (!FLAG_always_opt &&
1314
      !FLAG_prepare_always_opt &&
1315
      !pretenure &&
1316
      scope()->is_function_scope() &&
1317
      info->num_literals() == 0) {
1318
    FastNewClosureStub stub(info->language_mode(), info->is_generator());
1319
    __ mov(r2, Operand(info));
1320
    __ CallStub(&stub);
1321
  } else {
1322
    __ mov(r0, Operand(info));
1323
    __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1324
                              : Heap::kFalseValueRootIndex);
1325
    __ Push(cp, r0, r1);
1326
    __ CallRuntime(Runtime::kNewClosure, 3);
1327
  }
1328
  context()->Plug(r0);
1329
}
1330

    
1331

    
1332
void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1333
  Comment cmnt(masm_, "[ VariableProxy");
1334
  EmitVariableLoad(expr);
1335
}
1336

    
1337

    
1338
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1339
                                                      TypeofState typeof_state,
1340
                                                      Label* slow) {
1341
  Register current = cp;
1342
  Register next = r1;
1343
  Register temp = r2;
1344

    
1345
  Scope* s = scope();
1346
  while (s != NULL) {
1347
    if (s->num_heap_slots() > 0) {
1348
      if (s->calls_non_strict_eval()) {
1349
        // Check that extension is NULL.
1350
        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1351
        __ tst(temp, temp);
1352
        __ b(ne, slow);
1353
      }
1354
      // Load next context in chain.
1355
      __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1356
      // Walk the rest of the chain without clobbering cp.
1357
      current = next;
1358
    }
1359
    // If no outer scope calls eval, we do not need to check more
1360
    // context extensions.
1361
    if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1362
    s = s->outer_scope();
1363
  }
1364

    
1365
  if (s->is_eval_scope()) {
1366
    Label loop, fast;
1367
    if (!current.is(next)) {
1368
      __ Move(next, current);
1369
    }
1370
    __ bind(&loop);
1371
    // Terminate at native context.
1372
    __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1373
    __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1374
    __ cmp(temp, ip);
1375
    __ b(eq, &fast);
1376
    // Check that extension is NULL.
1377
    __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1378
    __ tst(temp, temp);
1379
    __ b(ne, slow);
1380
    // Load next context in chain.
1381
    __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1382
    __ b(&loop);
1383
    __ bind(&fast);
1384
  }
1385

    
1386
  __ ldr(r0, GlobalObjectOperand());
1387
  __ mov(r2, Operand(var->name()));
1388
  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1389
      ? RelocInfo::CODE_TARGET
1390
      : RelocInfo::CODE_TARGET_CONTEXT;
1391
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1392
  CallIC(ic, mode);
1393
}
1394

    
1395

    
1396
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1397
                                                                Label* slow) {
1398
  ASSERT(var->IsContextSlot());
1399
  Register context = cp;
1400
  Register next = r3;
1401
  Register temp = r4;
1402

    
1403
  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1404
    if (s->num_heap_slots() > 0) {
1405
      if (s->calls_non_strict_eval()) {
1406
        // Check that extension is NULL.
1407
        __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1408
        __ tst(temp, temp);
1409
        __ b(ne, slow);
1410
      }
1411
      __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1412
      // Walk the rest of the chain without clobbering cp.
1413
      context = next;
1414
    }
1415
  }
1416
  // Check that last extension is NULL.
1417
  __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1418
  __ tst(temp, temp);
1419
  __ b(ne, slow);
1420

    
1421
  // This function is used only for loads, not stores, so it's safe to
1422
  // return an cp-based operand (the write barrier cannot be allowed to
1423
  // destroy the cp register).
1424
  return ContextOperand(context, var->index());
1425
}
1426

    
1427

    
1428
void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1429
                                                  TypeofState typeof_state,
1430
                                                  Label* slow,
1431
                                                  Label* done) {
1432
  // Generate fast-case code for variables that might be shadowed by
1433
  // eval-introduced variables.  Eval is used a lot without
1434
  // introducing variables.  In those cases, we do not want to
1435
  // perform a runtime call for all variables in the scope
1436
  // containing the eval.
1437
  if (var->mode() == DYNAMIC_GLOBAL) {
1438
    EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1439
    __ jmp(done);
1440
  } else if (var->mode() == DYNAMIC_LOCAL) {
1441
    Variable* local = var->local_if_not_shadowed();
1442
    __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1443
    if (local->mode() == LET ||
1444
        local->mode() == CONST ||
1445
        local->mode() == CONST_HARMONY) {
1446
      __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1447
      if (local->mode() == CONST) {
1448
        __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1449
      } else {  // LET || CONST_HARMONY
1450
        __ b(ne, done);
1451
        __ mov(r0, Operand(var->name()));
1452
        __ push(r0);
1453
        __ CallRuntime(Runtime::kThrowReferenceError, 1);
1454
      }
1455
    }
1456
    __ jmp(done);
1457
  }
1458
}
1459

    
1460

    
1461
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1462
  // Record position before possible IC call.
1463
  SetSourcePosition(proxy->position());
1464
  Variable* var = proxy->var();
1465

    
1466
  // Three cases: global variables, lookup variables, and all other types of
1467
  // variables.
1468
  switch (var->location()) {
1469
    case Variable::UNALLOCATED: {
1470
      Comment cmnt(masm_, "Global variable");
1471
      // Use inline caching. Variable name is passed in r2 and the global
1472
      // object (receiver) in r0.
1473
      __ ldr(r0, GlobalObjectOperand());
1474
      __ mov(r2, Operand(var->name()));
1475
      Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1476
      CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1477
      context()->Plug(r0);
1478
      break;
1479
    }
1480

    
1481
    case Variable::PARAMETER:
1482
    case Variable::LOCAL:
1483
    case Variable::CONTEXT: {
1484
      Comment cmnt(masm_, var->IsContextSlot()
1485
                              ? "Context variable"
1486
                              : "Stack variable");
1487
      if (var->binding_needs_init()) {
1488
        // var->scope() may be NULL when the proxy is located in eval code and
1489
        // refers to a potential outside binding. Currently those bindings are
1490
        // always looked up dynamically, i.e. in that case
1491
        //     var->location() == LOOKUP.
1492
        // always holds.
1493
        ASSERT(var->scope() != NULL);
1494

    
1495
        // Check if the binding really needs an initialization check. The check
1496
        // can be skipped in the following situation: we have a LET or CONST
1497
        // binding in harmony mode, both the Variable and the VariableProxy have
1498
        // the same declaration scope (i.e. they are both in global code, in the
1499
        // same function or in the same eval code) and the VariableProxy is in
1500
        // the source physically located after the initializer of the variable.
1501
        //
1502
        // We cannot skip any initialization checks for CONST in non-harmony
1503
        // mode because const variables may be declared but never initialized:
1504
        //   if (false) { const x; }; var y = x;
1505
        //
1506
        // The condition on the declaration scopes is a conservative check for
1507
        // nested functions that access a binding and are called before the
1508
        // binding is initialized:
1509
        //   function() { f(); let x = 1; function f() { x = 2; } }
1510
        //
1511
        bool skip_init_check;
1512
        if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1513
          skip_init_check = false;
1514
        } else {
1515
          // Check that we always have valid source position.
1516
          ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1517
          ASSERT(proxy->position() != RelocInfo::kNoPosition);
1518
          skip_init_check = var->mode() != CONST &&
1519
              var->initializer_position() < proxy->position();
1520
        }
1521

    
1522
        if (!skip_init_check) {
1523
          // Let and const need a read barrier.
1524
          GetVar(r0, var);
1525
          __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1526
          if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1527
            // Throw a reference error when using an uninitialized let/const
1528
            // binding in harmony mode.
1529
            Label done;
1530
            __ b(ne, &done);
1531
            __ mov(r0, Operand(var->name()));
1532
            __ push(r0);
1533
            __ CallRuntime(Runtime::kThrowReferenceError, 1);
1534
            __ bind(&done);
1535
          } else {
1536
            // Uninitalized const bindings outside of harmony mode are unholed.
1537
            ASSERT(var->mode() == CONST);
1538
            __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1539
          }
1540
          context()->Plug(r0);
1541
          break;
1542
        }
1543
      }
1544
      context()->Plug(var);
1545
      break;
1546
    }
1547

    
1548
    case Variable::LOOKUP: {
1549
      Label done, slow;
1550
      // Generate code for loading from variables potentially shadowed
1551
      // by eval-introduced variables.
1552
      EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1553
      __ bind(&slow);
1554
      Comment cmnt(masm_, "Lookup variable");
1555
      __ mov(r1, Operand(var->name()));
1556
      __ Push(cp, r1);  // Context and name.
1557
      __ CallRuntime(Runtime::kLoadContextSlot, 2);
1558
      __ bind(&done);
1559
      context()->Plug(r0);
1560
    }
1561
  }
1562
}
1563

    
1564

    
1565
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1566
  Comment cmnt(masm_, "[ RegExpLiteral");
1567
  Label materialized;
1568
  // Registers will be used as follows:
1569
  // r5 = materialized value (RegExp literal)
1570
  // r4 = JS function, literals array
1571
  // r3 = literal index
1572
  // r2 = RegExp pattern
1573
  // r1 = RegExp flags
1574
  // r0 = RegExp literal clone
1575
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1576
  __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1577
  int literal_offset =
1578
      FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1579
  __ ldr(r5, FieldMemOperand(r4, literal_offset));
1580
  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1581
  __ cmp(r5, ip);
1582
  __ b(ne, &materialized);
1583

    
1584
  // Create regexp literal using runtime function.
1585
  // Result will be in r0.
1586
  __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1587
  __ mov(r2, Operand(expr->pattern()));
1588
  __ mov(r1, Operand(expr->flags()));
1589
  __ Push(r4, r3, r2, r1);
1590
  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1591
  __ mov(r5, r0);
1592

    
1593
  __ bind(&materialized);
1594
  int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1595
  Label allocated, runtime_allocate;
1596
  __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1597
  __ jmp(&allocated);
1598

    
1599
  __ bind(&runtime_allocate);
1600
  __ push(r5);
1601
  __ mov(r0, Operand(Smi::FromInt(size)));
1602
  __ push(r0);
1603
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1604
  __ pop(r5);
1605

    
1606
  __ bind(&allocated);
1607
  // After this, registers are used as follows:
1608
  // r0: Newly allocated regexp.
1609
  // r5: Materialized regexp.
1610
  // r2: temp.
1611
  __ CopyFields(r0, r5, d0, size / kPointerSize);
1612
  context()->Plug(r0);
1613
}
1614

    
1615

    
1616
void FullCodeGenerator::EmitAccessor(Expression* expression) {
1617
  if (expression == NULL) {
1618
    __ LoadRoot(r1, Heap::kNullValueRootIndex);
1619
    __ push(r1);
1620
  } else {
1621
    VisitForStackValue(expression);
1622
  }
1623
}
1624

    
1625

    
1626
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1627
  Comment cmnt(masm_, "[ ObjectLiteral");
1628
  Handle<FixedArray> constant_properties = expr->constant_properties();
1629
  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1630
  __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1631
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1632
  __ mov(r1, Operand(constant_properties));
1633
  int flags = expr->fast_elements()
1634
      ? ObjectLiteral::kFastElements
1635
      : ObjectLiteral::kNoFlags;
1636
  flags |= expr->has_function()
1637
      ? ObjectLiteral::kHasFunction
1638
      : ObjectLiteral::kNoFlags;
1639
  __ mov(r0, Operand(Smi::FromInt(flags)));
1640
  int properties_count = constant_properties->length() / 2;
1641
  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1642
      expr->depth() > 1 || Serializer::enabled() ||
1643
      flags != ObjectLiteral::kFastElements ||
1644
      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1645
    __ Push(r3, r2, r1, r0);
1646
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1647
  } else {
1648
    FastCloneShallowObjectStub stub(properties_count);
1649
    __ CallStub(&stub);
1650
  }
1651

    
1652
  // If result_saved is true the result is on top of the stack.  If
1653
  // result_saved is false the result is in r0.
1654
  bool result_saved = false;
1655

    
1656
  // Mark all computed expressions that are bound to a key that
1657
  // is shadowed by a later occurrence of the same key. For the
1658
  // marked expressions, no store code is emitted.
1659
  expr->CalculateEmitStore(zone());
1660

    
1661
  AccessorTable accessor_table(zone());
1662
  for (int i = 0; i < expr->properties()->length(); i++) {
1663
    ObjectLiteral::Property* property = expr->properties()->at(i);
1664
    if (property->IsCompileTimeValue()) continue;
1665

    
1666
    Literal* key = property->key();
1667
    Expression* value = property->value();
1668
    if (!result_saved) {
1669
      __ push(r0);  // Save result on stack
1670
      result_saved = true;
1671
    }
1672
    switch (property->kind()) {
1673
      case ObjectLiteral::Property::CONSTANT:
1674
        UNREACHABLE();
1675
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1676
        ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1677
        // Fall through.
1678
      case ObjectLiteral::Property::COMPUTED:
1679
        if (key->value()->IsInternalizedString()) {
1680
          if (property->emit_store()) {
1681
            VisitForAccumulatorValue(value);
1682
            __ mov(r2, Operand(key->value()));
1683
            __ ldr(r1, MemOperand(sp));
1684
            Handle<Code> ic = is_classic_mode()
1685
                ? isolate()->builtins()->StoreIC_Initialize()
1686
                : isolate()->builtins()->StoreIC_Initialize_Strict();
1687
            CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1688
            PrepareForBailoutForId(key->id(), NO_REGISTERS);
1689
          } else {
1690
            VisitForEffect(value);
1691
          }
1692
          break;
1693
        }
1694
        // Duplicate receiver on stack.
1695
        __ ldr(r0, MemOperand(sp));
1696
        __ push(r0);
1697
        VisitForStackValue(key);
1698
        VisitForStackValue(value);
1699
        if (property->emit_store()) {
1700
          __ mov(r0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
1701
          __ push(r0);
1702
          __ CallRuntime(Runtime::kSetProperty, 4);
1703
        } else {
1704
          __ Drop(3);
1705
        }
1706
        break;
1707
      case ObjectLiteral::Property::PROTOTYPE:
1708
        // Duplicate receiver on stack.
1709
        __ ldr(r0, MemOperand(sp));
1710
        __ push(r0);
1711
        VisitForStackValue(value);
1712
        if (property->emit_store()) {
1713
          __ CallRuntime(Runtime::kSetPrototype, 2);
1714
        } else {
1715
          __ Drop(2);
1716
        }
1717
        break;
1718

    
1719
      case ObjectLiteral::Property::GETTER:
1720
        accessor_table.lookup(key)->second->getter = value;
1721
        break;
1722
      case ObjectLiteral::Property::SETTER:
1723
        accessor_table.lookup(key)->second->setter = value;
1724
        break;
1725
    }
1726
  }
1727

    
1728
  // Emit code to define accessors, using only a single call to the runtime for
1729
  // each pair of corresponding getters and setters.
1730
  for (AccessorTable::Iterator it = accessor_table.begin();
1731
       it != accessor_table.end();
1732
       ++it) {
1733
    __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1734
    __ push(r0);
1735
    VisitForStackValue(it->first);
1736
    EmitAccessor(it->second->getter);
1737
    EmitAccessor(it->second->setter);
1738
    __ mov(r0, Operand(Smi::FromInt(NONE)));
1739
    __ push(r0);
1740
    __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1741
  }
1742

    
1743
  if (expr->has_function()) {
1744
    ASSERT(result_saved);
1745
    __ ldr(r0, MemOperand(sp));
1746
    __ push(r0);
1747
    __ CallRuntime(Runtime::kToFastProperties, 1);
1748
  }
1749

    
1750
  if (result_saved) {
1751
    context()->PlugTOS();
1752
  } else {
1753
    context()->Plug(r0);
1754
  }
1755
}
1756

    
1757

    
1758
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1759
  Comment cmnt(masm_, "[ ArrayLiteral");
1760

    
1761
  ZoneList<Expression*>* subexprs = expr->values();
1762
  int length = subexprs->length();
1763
  Handle<FixedArray> constant_elements = expr->constant_elements();
1764
  ASSERT_EQ(2, constant_elements->length());
1765
  ElementsKind constant_elements_kind =
1766
      static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1767
  bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1768
  Handle<FixedArrayBase> constant_elements_values(
1769
      FixedArrayBase::cast(constant_elements->get(1)));
1770

    
1771
  __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1772
  __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1773
  __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1774
  __ mov(r1, Operand(constant_elements));
1775
  if (has_fast_elements && constant_elements_values->map() ==
1776
      isolate()->heap()->fixed_cow_array_map()) {
1777
    FastCloneShallowArrayStub stub(
1778
        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1779
        DONT_TRACK_ALLOCATION_SITE,
1780
        length);
1781
    __ CallStub(&stub);
1782
    __ IncrementCounter(
1783
        isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1784
  } else if (expr->depth() > 1) {
1785
    __ Push(r3, r2, r1);
1786
    __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1787
  } else if (Serializer::enabled() ||
1788
      length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1789
    __ Push(r3, r2, r1);
1790
    __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1791
  } else {
1792
    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1793
           FLAG_smi_only_arrays);
1794
    FastCloneShallowArrayStub::Mode mode =
1795
        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1796
    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1797
        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1798

    
1799
    if (has_fast_elements) {
1800
      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1801
      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1802
    }
1803

    
1804
    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1805
    __ CallStub(&stub);
1806
  }
1807

    
1808
  bool result_saved = false;  // Is the result saved to the stack?
1809

    
1810
  // Emit code to evaluate all the non-constant subexpressions and to store
1811
  // them into the newly cloned array.
1812
  for (int i = 0; i < length; i++) {
1813
    Expression* subexpr = subexprs->at(i);
1814
    // If the subexpression is a literal or a simple materialized literal it
1815
    // is already set in the cloned array.
1816
    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1817

    
1818
    if (!result_saved) {
1819
      __ push(r0);
1820
      __ Push(Smi::FromInt(expr->literal_index()));
1821
      result_saved = true;
1822
    }
1823
    VisitForAccumulatorValue(subexpr);
1824

    
1825
    if (IsFastObjectElementsKind(constant_elements_kind)) {
1826
      int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1827
      __ ldr(r6, MemOperand(sp, kPointerSize));  // Copy of array literal.
1828
      __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1829
      __ str(result_register(), FieldMemOperand(r1, offset));
1830
      // Update the write barrier for the array store.
1831
      __ RecordWriteField(r1, offset, result_register(), r2,
1832
                          kLRHasBeenSaved, kDontSaveFPRegs,
1833
                          EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1834
    } else {
1835
      __ mov(r3, Operand(Smi::FromInt(i)));
1836
      StoreArrayLiteralElementStub stub;
1837
      __ CallStub(&stub);
1838
    }
1839

    
1840
    PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1841
  }
1842

    
1843
  if (result_saved) {
1844
    __ pop();  // literal index
1845
    context()->PlugTOS();
1846
  } else {
1847
    context()->Plug(r0);
1848
  }
1849
}
1850

    
1851

    
1852
void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1853
  Comment cmnt(masm_, "[ Assignment");
1854
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1855
  // on the left-hand side.
1856
  if (!expr->target()->IsValidLeftHandSide()) {
1857
    VisitForEffect(expr->target());
1858
    return;
1859
  }
1860

    
1861
  // Left-hand side can only be a property, a global or a (parameter or local)
1862
  // slot.
1863
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1864
  LhsKind assign_type = VARIABLE;
1865
  Property* property = expr->target()->AsProperty();
1866
  if (property != NULL) {
1867
    assign_type = (property->key()->IsPropertyName())
1868
        ? NAMED_PROPERTY
1869
        : KEYED_PROPERTY;
1870
  }
1871

    
1872
  // Evaluate LHS expression.
1873
  switch (assign_type) {
1874
    case VARIABLE:
1875
      // Nothing to do here.
1876
      break;
1877
    case NAMED_PROPERTY:
1878
      if (expr->is_compound()) {
1879
        // We need the receiver both on the stack and in the accumulator.
1880
        VisitForAccumulatorValue(property->obj());
1881
        __ push(result_register());
1882
      } else {
1883
        VisitForStackValue(property->obj());
1884
      }
1885
      break;
1886
    case KEYED_PROPERTY:
1887
      if (expr->is_compound()) {
1888
        VisitForStackValue(property->obj());
1889
        VisitForAccumulatorValue(property->key());
1890
        __ ldr(r1, MemOperand(sp, 0));
1891
        __ push(r0);
1892
      } else {
1893
        VisitForStackValue(property->obj());
1894
        VisitForStackValue(property->key());
1895
      }
1896
      break;
1897
  }
1898

    
1899
  // For compound assignments we need another deoptimization point after the
1900
  // variable/property load.
1901
  if (expr->is_compound()) {
1902
    { AccumulatorValueContext context(this);
1903
      switch (assign_type) {
1904
        case VARIABLE:
1905
          EmitVariableLoad(expr->target()->AsVariableProxy());
1906
          PrepareForBailout(expr->target(), TOS_REG);
1907
          break;
1908
        case NAMED_PROPERTY:
1909
          EmitNamedPropertyLoad(property);
1910
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1911
          break;
1912
        case KEYED_PROPERTY:
1913
          EmitKeyedPropertyLoad(property);
1914
          PrepareForBailoutForId(property->LoadId(), TOS_REG);
1915
          break;
1916
      }
1917
    }
1918

    
1919
    Token::Value op = expr->binary_op();
1920
    __ push(r0);  // Left operand goes on the stack.
1921
    VisitForAccumulatorValue(expr->value());
1922

    
1923
    OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1924
        ? OVERWRITE_RIGHT
1925
        : NO_OVERWRITE;
1926
    SetSourcePosition(expr->position() + 1);
1927
    AccumulatorValueContext context(this);
1928
    if (ShouldInlineSmiCase(op)) {
1929
      EmitInlineSmiBinaryOp(expr->binary_operation(),
1930
                            op,
1931
                            mode,
1932
                            expr->target(),
1933
                            expr->value());
1934
    } else {
1935
      EmitBinaryOp(expr->binary_operation(), op, mode);
1936
    }
1937

    
1938
    // Deoptimization point in case the binary operation may have side effects.
1939
    PrepareForBailout(expr->binary_operation(), TOS_REG);
1940
  } else {
1941
    VisitForAccumulatorValue(expr->value());
1942
  }
1943

    
1944
  // Record source position before possible IC call.
1945
  SetSourcePosition(expr->position());
1946

    
1947
  // Store the value.
1948
  switch (assign_type) {
1949
    case VARIABLE:
1950
      EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1951
                             expr->op());
1952
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1953
      context()->Plug(r0);
1954
      break;
1955
    case NAMED_PROPERTY:
1956
      EmitNamedPropertyAssignment(expr);
1957
      break;
1958
    case KEYED_PROPERTY:
1959
      EmitKeyedPropertyAssignment(expr);
1960
      break;
1961
  }
1962
}
1963

    
1964

    
1965
void FullCodeGenerator::VisitYield(Yield* expr) {
1966
  Comment cmnt(masm_, "[ Yield");
1967
  // Evaluate yielded value first; the initial iterator definition depends on
1968
  // this.  It stays on the stack while we update the iterator.
1969
  VisitForStackValue(expr->expression());
1970

    
1971
  switch (expr->yield_kind()) {
1972
    case Yield::SUSPEND:
1973
      // Pop value from top-of-stack slot; box result into result register.
1974
      EmitCreateIteratorResult(false);
1975
      __ push(result_register());
1976
      // Fall through.
1977
    case Yield::INITIAL: {
1978
      Label suspend, continuation, post_runtime, resume;
1979

    
1980
      __ jmp(&suspend);
1981

    
1982
      __ bind(&continuation);
1983
      __ jmp(&resume);
1984

    
1985
      __ bind(&suspend);
1986
      VisitForAccumulatorValue(expr->generator_object());
1987
      ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1988
      __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1989
      __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1990
      __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1991
      __ mov(r1, cp);
1992
      __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1993
                          kLRHasBeenSaved, kDontSaveFPRegs);
1994
      __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1995
      __ cmp(sp, r1);
1996
      __ b(eq, &post_runtime);
1997
      __ push(r0);  // generator object
1998
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1999
      __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2000
      __ bind(&post_runtime);
2001
      __ pop(result_register());
2002
      EmitReturnSequence();
2003

    
2004
      __ bind(&resume);
2005
      context()->Plug(result_register());
2006
      break;
2007
    }
2008

    
2009
    case Yield::FINAL: {
2010
      VisitForAccumulatorValue(expr->generator_object());
2011
      __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2012
      __ str(r1, FieldMemOperand(result_register(),
2013
                                 JSGeneratorObject::kContinuationOffset));
2014
      // Pop value from top-of-stack slot, box result into result register.
2015
      EmitCreateIteratorResult(true);
2016
      EmitUnwindBeforeReturn();
2017
      EmitReturnSequence();
2018
      break;
2019
    }
2020

    
2021
    case Yield::DELEGATING: {
2022
      VisitForStackValue(expr->generator_object());
2023

    
2024
      // Initial stack layout is as follows:
2025
      // [sp + 1 * kPointerSize] iter
2026
      // [sp + 0 * kPointerSize] g
2027

    
2028
      Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2029
      Label l_next, l_call, l_loop;
2030
      // Initial send value is undefined.
2031
      __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2032
      __ b(&l_next);
2033

    
2034
      // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2035
      __ bind(&l_catch);
2036
      handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2037
      __ LoadRoot(r2, Heap::kthrow_stringRootIndex);     // "throw"
2038
      __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
2039
      __ push(r3);                                       // iter
2040
      __ push(r0);                                       // exception
2041
      __ jmp(&l_call);
2042

    
2043
      // try { received = %yield result }
2044
      // Shuffle the received result above a try handler and yield it without
2045
      // re-boxing.
2046
      __ bind(&l_try);
2047
      __ pop(r0);                                        // result
2048
      __ PushTryHandler(StackHandler::CATCH, expr->index());
2049
      const int handler_size = StackHandlerConstants::kSize;
2050
      __ push(r0);                                       // result
2051
      __ jmp(&l_suspend);
2052
      __ bind(&l_continuation);
2053
      __ jmp(&l_resume);
2054
      __ bind(&l_suspend);
2055
      const int generator_object_depth = kPointerSize + handler_size;
2056
      __ ldr(r0, MemOperand(sp, generator_object_depth));
2057
      __ push(r0);                                       // g
2058
      ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2059
      __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2060
      __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2061
      __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2062
      __ mov(r1, cp);
2063
      __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2064
                          kLRHasBeenSaved, kDontSaveFPRegs);
2065
      __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2066
      __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2067
      __ pop(r0);                                        // result
2068
      EmitReturnSequence();
2069
      __ bind(&l_resume);                                // received in r0
2070
      __ PopTryHandler();
2071

    
2072
      // receiver = iter; f = 'next'; arg = received;
2073
      __ bind(&l_next);
2074
      __ LoadRoot(r2, Heap::knext_stringRootIndex);      // "next"
2075
      __ ldr(r3, MemOperand(sp, 1 * kPointerSize));      // iter
2076
      __ push(r3);                                       // iter
2077
      __ push(r0);                                       // received
2078

    
2079
      // result = receiver[f](arg);
2080
      __ bind(&l_call);
2081
      Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1);
2082
      CallIC(ic);
2083
      __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2084

    
2085
      // if (!result.done) goto l_try;
2086
      __ bind(&l_loop);
2087
      __ push(r0);                                       // save result
2088
      __ LoadRoot(r2, Heap::kdone_stringRootIndex);      // "done"
2089
      Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
2090
      CallIC(done_ic);                                   // result.done in r0
2091
      Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2092
      CallIC(bool_ic);
2093
      __ cmp(r0, Operand(0));
2094
      __ b(eq, &l_try);
2095

    
2096
      // result.value
2097
      __ pop(r0);                                        // result
2098
      __ LoadRoot(r2, Heap::kvalue_stringRootIndex);     // "value"
2099
      Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
2100
      CallIC(value_ic);                                  // result.value in r0
2101
      context()->DropAndPlug(2, r0);                     // drop iter and g
2102
      break;
2103
    }
2104
  }
2105
}
2106

    
2107

    
2108
void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2109
    Expression *value,
2110
    JSGeneratorObject::ResumeMode resume_mode) {
2111
  // The value stays in r0, and is ultimately read by the resumed generator, as
2112
  // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it.  r1
2113
  // will hold the generator object until the activation has been resumed.
2114
  VisitForStackValue(generator);
2115
  VisitForAccumulatorValue(value);
2116
  __ pop(r1);
2117

    
2118
  // Check generator state.
2119
  Label wrong_state, done;
2120
  __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2121
  STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
2122
  STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
2123
  __ cmp(r3, Operand(Smi::FromInt(0)));
2124
  __ b(le, &wrong_state);
2125

    
2126
  // Load suspended function and context.
2127
  __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2128
  __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2129

    
2130
  // Load receiver and store as the first argument.
2131
  __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2132
  __ push(r2);
2133

    
2134
  // Push holes for the rest of the arguments to the generator function.
2135
  __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2136
  __ ldr(r3,
2137
         FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2138
  __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2139
  Label push_argument_holes, push_frame;
2140
  __ bind(&push_argument_holes);
2141
  __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2142
  __ b(mi, &push_frame);
2143
  __ push(r2);
2144
  __ jmp(&push_argument_holes);
2145

    
2146
  // Enter a new JavaScript frame, and initialize its slots as they were when
2147
  // the generator was suspended.
2148
  Label resume_frame;
2149
  __ bind(&push_frame);
2150
  __ bl(&resume_frame);
2151
  __ jmp(&done);
2152
  __ bind(&resume_frame);
2153
  __ push(lr);  // Return address.
2154
  __ push(fp);  // Caller's frame pointer.
2155
  __ mov(fp, sp);
2156
  __ push(cp);  // Callee's context.
2157
  __ push(r4);  // Callee's JS Function.
2158

    
2159
  // Load the operand stack size.
2160
  __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2161
  __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2162
  __ SmiUntag(r3);
2163

    
2164
  // If we are sending a value and there is no operand stack, we can jump back
2165
  // in directly.
2166
  if (resume_mode == JSGeneratorObject::NEXT) {
2167
    Label slow_resume;
2168
    __ cmp(r3, Operand(0));
2169
    __ b(ne, &slow_resume);
2170
    __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2171
    __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2172
    __ SmiUntag(r2);
2173
    __ add(r3, r3, r2);
2174
    __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2175
    __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2176
    __ Jump(r3);
2177
    __ bind(&slow_resume);
2178
  }
2179

    
2180
  // Otherwise, we push holes for the operand stack and call the runtime to fix
2181
  // up the stack and the handlers.
2182
  Label push_operand_holes, call_resume;
2183
  __ bind(&push_operand_holes);
2184
  __ sub(r3, r3, Operand(1), SetCC);
2185
  __ b(mi, &call_resume);
2186
  __ push(r2);
2187
  __ b(&push_operand_holes);
2188
  __ bind(&call_resume);
2189
  __ push(r1);
2190
  __ push(result_register());
2191
  __ Push(Smi::FromInt(resume_mode));
2192
  __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2193
  // Not reached: the runtime call returns elsewhere.
2194
  __ stop("not-reached");
2195

    
2196
  // Throw error if we attempt to operate on a running generator.
2197
  __ bind(&wrong_state);
2198
  __ push(r1);
2199
  __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2200

    
2201
  __ bind(&done);
2202
  context()->Plug(result_register());
2203
}
2204

    
2205

    
2206
void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2207
  Label gc_required;
2208
  Label allocated;
2209

    
2210
  Handle<Map> map(isolate()->native_context()->generator_result_map());
2211

    
2212
  __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2213
  __ jmp(&allocated);
2214

    
2215
  __ bind(&gc_required);
2216
  __ Push(Smi::FromInt(map->instance_size()));
2217
  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2218
  __ ldr(context_register(),
2219
         MemOperand(fp, StandardFrameConstants::kContextOffset));
2220

    
2221
  __ bind(&allocated);
2222
  __ mov(r1, Operand(map));
2223
  __ pop(r2);
2224
  __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2225
  __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2226
  ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2227
  __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2228
  __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2229
  __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2230
  __ str(r2,
2231
         FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2232
  __ str(r3,
2233
         FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2234

    
2235
  // Only the value field needs a write barrier, as the other values are in the
2236
  // root set.
2237
  __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2238
                      r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2239
}
2240

    
2241

    
2242
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2243
  SetSourcePosition(prop->position());
2244
  Literal* key = prop->key()->AsLiteral();
2245
  __ mov(r2, Operand(key->value()));
2246
  // Call load IC. It has arguments receiver and property name r0 and r2.
2247
  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2248
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2249
}
2250

    
2251

    
2252
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2253
  SetSourcePosition(prop->position());
2254
  // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2255
  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2256
  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2257
}
2258

    
2259

    
2260
void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2261
                                              Token::Value op,
2262
                                              OverwriteMode mode,
2263
                                              Expression* left_expr,
2264
                                              Expression* right_expr) {
2265
  Label done, smi_case, stub_call;
2266

    
2267
  Register scratch1 = r2;
2268
  Register scratch2 = r3;
2269

    
2270
  // Get the arguments.
2271
  Register left = r1;
2272
  Register right = r0;
2273
  __ pop(left);
2274

    
2275
  // Perform combined smi check on both operands.
2276
  __ orr(scratch1, left, Operand(right));
2277
  STATIC_ASSERT(kSmiTag == 0);
2278
  JumpPatchSite patch_site(masm_);
2279
  patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2280

    
2281
  __ bind(&stub_call);
2282
  BinaryOpStub stub(op, mode);
2283
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2284
         expr->BinaryOperationFeedbackId());
2285
  patch_site.EmitPatchInfo();
2286
  __ jmp(&done);
2287

    
2288
  __ bind(&smi_case);
2289
  // Smi case. This code works the same way as the smi-smi case in the type
2290
  // recording binary operation stub, see
2291
  // BinaryOpStub::GenerateSmiSmiOperation for comments.
2292
  switch (op) {
2293
    case Token::SAR:
2294
      __ GetLeastBitsFromSmi(scratch1, right, 5);
2295
      __ mov(right, Operand(left, ASR, scratch1));
2296
      __ bic(right, right, Operand(kSmiTagMask));
2297
      break;
2298
    case Token::SHL: {
2299
      __ SmiUntag(scratch1, left);
2300
      __ GetLeastBitsFromSmi(scratch2, right, 5);
2301
      __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2302
      __ TrySmiTag(right, scratch1, &stub_call);
2303
      break;
2304
    }
2305
    case Token::SHR: {
2306
      __ SmiUntag(scratch1, left);
2307
      __ GetLeastBitsFromSmi(scratch2, right, 5);
2308
      __ mov(scratch1, Operand(scratch1, LSR, scratch2));
2309
      __ tst(scratch1, Operand(0xc0000000));
2310
      __ b(ne, &stub_call);
2311
      __ SmiTag(right, scratch1);
2312
      break;
2313
    }
2314
    case Token::ADD:
2315
      __ add(scratch1, left, Operand(right), SetCC);
2316
      __ b(vs, &stub_call);
2317
      __ mov(right, scratch1);
2318
      break;
2319
    case Token::SUB:
2320
      __ sub(scratch1, left, Operand(right), SetCC);
2321
      __ b(vs, &stub_call);
2322
      __ mov(right, scratch1);
2323
      break;
2324
    case Token::MUL: {
2325
      __ SmiUntag(ip, right);
2326
      __ smull(scratch1, scratch2, left, ip);
2327
      __ mov(ip, Operand(scratch1, ASR, 31));
2328
      __ cmp(ip, Operand(scratch2));
2329
      __ b(ne, &stub_call);
2330
      __ cmp(scratch1, Operand::Zero());
2331
      __ mov(right, Operand(scratch1), LeaveCC, ne);
2332
      __ b(ne, &done);
2333
      __ add(scratch2, right, Operand(left), SetCC);
2334
      __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
2335
      __ b(mi, &stub_call);
2336
      break;
2337
    }
2338
    case Token::BIT_OR:
2339
      __ orr(right, left, Operand(right));
2340
      break;
2341
    case Token::BIT_AND:
2342
      __ and_(right, left, Operand(right));
2343
      break;
2344
    case Token::BIT_XOR:
2345
      __ eor(right, left, Operand(right));
2346
      break;
2347
    default:
2348
      UNREACHABLE();
2349
  }
2350

    
2351
  __ bind(&done);
2352
  context()->Plug(r0);
2353
}
2354

    
2355

    
2356
void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2357
                                     Token::Value op,
2358
                                     OverwriteMode mode) {
2359
  __ pop(r1);
2360
  BinaryOpStub stub(op, mode);
2361
  JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2362
  CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2363
         expr->BinaryOperationFeedbackId());
2364
  patch_site.EmitPatchInfo();
2365
  context()->Plug(r0);
2366
}
2367

    
2368

    
2369
void FullCodeGenerator::EmitAssignment(Expression* expr) {
2370
  // Invalid left-hand sides are rewritten by the parser to have a 'throw
2371
  // ReferenceError' on the left-hand side.
2372
  if (!expr->IsValidLeftHandSide()) {
2373
    VisitForEffect(expr);
2374
    return;
2375
  }
2376

    
2377
  // Left-hand side can only be a property, a global or a (parameter or local)
2378
  // slot.
2379
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2380
  LhsKind assign_type = VARIABLE;
2381
  Property* prop = expr->AsProperty();
2382
  if (prop != NULL) {
2383
    assign_type = (prop->key()->IsPropertyName())
2384
        ? NAMED_PROPERTY
2385
        : KEYED_PROPERTY;
2386
  }
2387

    
2388
  switch (assign_type) {
2389
    case VARIABLE: {
2390
      Variable* var = expr->AsVariableProxy()->var();
2391
      EffectContext context(this);
2392
      EmitVariableAssignment(var, Token::ASSIGN);
2393
      break;
2394
    }
2395
    case NAMED_PROPERTY: {
2396
      __ push(r0);  // Preserve value.
2397
      VisitForAccumulatorValue(prop->obj());
2398
      __ mov(r1, r0);
2399
      __ pop(r0);  // Restore value.
2400
      __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2401
      Handle<Code> ic = is_classic_mode()
2402
          ? isolate()->builtins()->StoreIC_Initialize()
2403
          : isolate()->builtins()->StoreIC_Initialize_Strict();
2404
      CallIC(ic);
2405
      break;
2406
    }
2407
    case KEYED_PROPERTY: {
2408
      __ push(r0);  // Preserve value.
2409
      VisitForStackValue(prop->obj());
2410
      VisitForAccumulatorValue(prop->key());
2411
      __ mov(r1, r0);
2412
      __ pop(r2);
2413
      __ pop(r0);  // Restore value.
2414
      Handle<Code> ic = is_classic_mode()
2415
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
2416
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2417
      CallIC(ic);
2418
      break;
2419
    }
2420
  }
2421
  context()->Plug(r0);
2422
}
2423

    
2424

    
2425
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2426
                                               Token::Value op) {
2427
  if (var->IsUnallocated()) {
2428
    // Global var, const, or let.
2429
    __ mov(r2, Operand(var->name()));
2430
    __ ldr(r1, GlobalObjectOperand());
2431
    Handle<Code> ic = is_classic_mode()
2432
        ? isolate()->builtins()->StoreIC_Initialize()
2433
        : isolate()->builtins()->StoreIC_Initialize_Strict();
2434
    CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2435

    
2436
  } else if (op == Token::INIT_CONST) {
2437
    // Const initializers need a write barrier.
2438
    ASSERT(!var->IsParameter());  // No const parameters.
2439
    if (var->IsStackLocal()) {
2440
      Label skip;
2441
      __ ldr(r1, StackOperand(var));
2442
      __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2443
      __ b(ne, &skip);
2444
      __ str(result_register(), StackOperand(var));
2445
      __ bind(&skip);
2446
    } else {
2447
      ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2448
      // Like var declarations, const declarations are hoisted to function
2449
      // scope.  However, unlike var initializers, const initializers are
2450
      // able to drill a hole to that function context, even from inside a
2451
      // 'with' context.  We thus bypass the normal static scope lookup for
2452
      // var->IsContextSlot().
2453
      __ push(r0);
2454
      __ mov(r0, Operand(var->name()));
2455
      __ Push(cp, r0);  // Context and name.
2456
      __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2457
    }
2458

    
2459
  } else if (var->mode() == LET && op != Token::INIT_LET) {
2460
    // Non-initializing assignment to let variable needs a write barrier.
2461
    if (var->IsLookupSlot()) {
2462
      __ push(r0);  // Value.
2463
      __ mov(r1, Operand(var->name()));
2464
      __ mov(r0, Operand(Smi::FromInt(language_mode())));
2465
      __ Push(cp, r1, r0);  // Context, name, strict mode.
2466
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2467
    } else {
2468
      ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2469
      Label assign;
2470
      MemOperand location = VarOperand(var, r1);
2471
      __ ldr(r3, location);
2472
      __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2473
      __ b(ne, &assign);
2474
      __ mov(r3, Operand(var->name()));
2475
      __ push(r3);
2476
      __ CallRuntime(Runtime::kThrowReferenceError, 1);
2477
      // Perform the assignment.
2478
      __ bind(&assign);
2479
      __ str(result_register(), location);
2480
      if (var->IsContextSlot()) {
2481
        // RecordWrite may destroy all its register arguments.
2482
        __ mov(r3, result_register());
2483
        int offset = Context::SlotOffset(var->index());
2484
        __ RecordWriteContextSlot(
2485
            r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2486
      }
2487
    }
2488

    
2489
  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2490
    // Assignment to var or initializing assignment to let/const
2491
    // in harmony mode.
2492
    if (var->IsStackAllocated() || var->IsContextSlot()) {
2493
      MemOperand location = VarOperand(var, r1);
2494
      if (generate_debug_code_ && op == Token::INIT_LET) {
2495
        // Check for an uninitialized let binding.
2496
        __ ldr(r2, location);
2497
        __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2498
        __ Check(eq, kLetBindingReInitialization);
2499
      }
2500
      // Perform the assignment.
2501
      __ str(r0, location);
2502
      if (var->IsContextSlot()) {
2503
        __ mov(r3, r0);
2504
        int offset = Context::SlotOffset(var->index());
2505
        __ RecordWriteContextSlot(
2506
            r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2507
      }
2508
    } else {
2509
      ASSERT(var->IsLookupSlot());
2510
      __ push(r0);  // Value.
2511
      __ mov(r1, Operand(var->name()));
2512
      __ mov(r0, Operand(Smi::FromInt(language_mode())));
2513
      __ Push(cp, r1, r0);  // Context, name, strict mode.
2514
      __ CallRuntime(Runtime::kStoreContextSlot, 4);
2515
    }
2516
  }
2517
  // Non-initializing assignments to consts are ignored.
2518
}
2519

    
2520

    
2521
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2522
  // Assignment to a property, using a named store IC.
2523
  Property* prop = expr->target()->AsProperty();
2524
  ASSERT(prop != NULL);
2525
  ASSERT(prop->key()->AsLiteral() != NULL);
2526

    
2527
  // Record source code position before IC call.
2528
  SetSourcePosition(expr->position());
2529
  __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2530
  __ pop(r1);
2531

    
2532
  Handle<Code> ic = is_classic_mode()
2533
      ? isolate()->builtins()->StoreIC_Initialize()
2534
      : isolate()->builtins()->StoreIC_Initialize_Strict();
2535
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2536

    
2537
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2538
  context()->Plug(r0);
2539
}
2540

    
2541

    
2542
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2543
  // Assignment to a property, using a keyed store IC.
2544

    
2545
  // Record source code position before IC call.
2546
  SetSourcePosition(expr->position());
2547
  __ pop(r1);  // Key.
2548
  __ pop(r2);
2549

    
2550
  Handle<Code> ic = is_classic_mode()
2551
      ? isolate()->builtins()->KeyedStoreIC_Initialize()
2552
      : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2553
  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2554

    
2555
  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2556
  context()->Plug(r0);
2557
}
2558

    
2559

    
2560
void FullCodeGenerator::VisitProperty(Property* expr) {
2561
  Comment cmnt(masm_, "[ Property");
2562
  Expression* key = expr->key();
2563

    
2564
  if (key->IsPropertyName()) {
2565
    VisitForAccumulatorValue(expr->obj());
2566
    EmitNamedPropertyLoad(expr);
2567
    PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2568
    context()->Plug(r0);
2569
  } else {
2570
    VisitForStackValue(expr->obj());
2571
    VisitForAccumulatorValue(expr->key());
2572
    __ pop(r1);
2573
    EmitKeyedPropertyLoad(expr);
2574
    context()->Plug(r0);
2575
  }
2576
}
2577

    
2578

    
2579
void FullCodeGenerator::CallIC(Handle<Code> code,
2580
                               RelocInfo::Mode rmode,
2581
                               TypeFeedbackId ast_id) {
2582
  ic_total_count_++;
2583
  // All calls must have a predictable size in full-codegen code to ensure that
2584
  // the debugger can patch them correctly.
2585
  __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
2586
}
2587

    
2588
void FullCodeGenerator::EmitCallWithIC(Call* expr,
2589
                                       Handle<Object> name,
2590
                                       RelocInfo::Mode mode) {
2591
  // Code common for calls using the IC.
2592
  ZoneList<Expression*>* args = expr->arguments();
2593
  int arg_count = args->length();
2594
  { PreservePositionScope scope(masm()->positions_recorder());
2595
    for (int i = 0; i < arg_count; i++) {
2596
      VisitForStackValue(args->at(i));
2597
    }
2598
    __ mov(r2, Operand(name));
2599
  }
2600
  // Record source position for debugger.
2601
  SetSourcePosition(expr->position());
2602
  // Call the IC initialization code.
2603
  Handle<Code> ic =
2604
      isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2605
  CallIC(ic, mode, expr->CallFeedbackId());
2606
  RecordJSReturnSite(expr);
2607
  // Restore context register.
2608
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2609
  context()->Plug(r0);
2610
}
2611

    
2612

    
2613
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2614
                                            Expression* key) {
2615
  // Load the key.
2616
  VisitForAccumulatorValue(key);
2617

    
2618
  // Swap the name of the function and the receiver on the stack to follow
2619
  // the calling convention for call ICs.
2620
  __ pop(r1);
2621
  __ push(r0);
2622
  __ push(r1);
2623

    
2624
  // Code common for calls using the IC.
2625
  ZoneList<Expression*>* args = expr->arguments();
2626
  int arg_count = args->length();
2627
  { PreservePositionScope scope(masm()->positions_recorder());
2628
    for (int i = 0; i < arg_count; i++) {
2629
      VisitForStackValue(args->at(i));
2630
    }
2631
  }
2632
  // Record source position for debugger.
2633
  SetSourcePosition(expr->position());
2634
  // Call the IC initialization code.
2635
  Handle<Code> ic =
2636
      isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2637
  __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
2638
  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2639
  RecordJSReturnSite(expr);
2640
  // Restore context register.
2641
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2642
  context()->DropAndPlug(1, r0);  // Drop the key still on the stack.
2643
}
2644

    
2645

    
2646
void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2647
  // Code common for calls using the call stub.
2648
  ZoneList<Expression*>* args = expr->arguments();
2649
  int arg_count = args->length();
2650
  { PreservePositionScope scope(masm()->positions_recorder());
2651
    for (int i = 0; i < arg_count; i++) {
2652
      VisitForStackValue(args->at(i));
2653
    }
2654
  }
2655
  // Record source position for debugger.
2656
  SetSourcePosition(expr->position());
2657

    
2658
  // Record call targets in unoptimized code.
2659
  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2660
  Handle<Object> uninitialized =
2661
      TypeFeedbackCells::UninitializedSentinel(isolate());
2662
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2663
  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2664
  __ mov(r2, Operand(cell));
2665

    
2666
  CallFunctionStub stub(arg_count, flags);
2667
  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2668
  __ CallStub(&stub, expr->CallFeedbackId());
2669
  RecordJSReturnSite(expr);
2670
  // Restore context register.
2671
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2672
  context()->DropAndPlug(1, r0);
2673
}
2674

    
2675

    
2676
void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2677
  // Push copy of the first argument or undefined if it doesn't exist.
2678
  if (arg_count > 0) {
2679
    __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2680
  } else {
2681
    __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2682
  }
2683
  __ push(r1);
2684

    
2685
  // Push the receiver of the enclosing function.
2686
  int receiver_offset = 2 + info_->scope()->num_parameters();
2687
  __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2688
  __ push(r1);
2689
  // Push the language mode.
2690
  __ mov(r1, Operand(Smi::FromInt(language_mode())));
2691
  __ push(r1);
2692

    
2693
  // Push the start position of the scope the calls resides in.
2694
  __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2695
  __ push(r1);
2696

    
2697
  // Do the runtime call.
2698
  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2699
}
2700

    
2701

    
2702
void FullCodeGenerator::VisitCall(Call* expr) {
2703
#ifdef DEBUG
2704
  // We want to verify that RecordJSReturnSite gets called on all paths
2705
  // through this function.  Avoid early returns.
2706
  expr->return_is_recorded_ = false;
2707
#endif
2708

    
2709
  Comment cmnt(masm_, "[ Call");
2710
  Expression* callee = expr->expression();
2711
  VariableProxy* proxy = callee->AsVariableProxy();
2712
  Property* property = callee->AsProperty();
2713

    
2714
  if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
2715
    // In a call to eval, we first call %ResolvePossiblyDirectEval to
2716
    // resolve the function we need to call and the receiver of the
2717
    // call.  Then we call the resolved function using the given
2718
    // arguments.
2719
    ZoneList<Expression*>* args = expr->arguments();
2720
    int arg_count = args->length();
2721

    
2722
    { PreservePositionScope pos_scope(masm()->positions_recorder());
2723
      VisitForStackValue(callee);
2724
      __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2725
      __ push(r2);  // Reserved receiver slot.
2726

    
2727
      // Push the arguments.
2728
      for (int i = 0; i < arg_count; i++) {
2729
        VisitForStackValue(args->at(i));
2730
      }
2731

    
2732
      // Push a copy of the function (found below the arguments) and
2733
      // resolve eval.
2734
      __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2735
      __ push(r1);
2736
      EmitResolvePossiblyDirectEval(arg_count);
2737

    
2738
      // The runtime call returns a pair of values in r0 (function) and
2739
      // r1 (receiver). Touch up the stack with the right values.
2740
      __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2741
      __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2742
    }
2743

    
2744
    // Record source position for debugger.
2745
    SetSourcePosition(expr->position());
2746
    CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2747
    __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2748
    __ CallStub(&stub);
2749
    RecordJSReturnSite(expr);
2750
    // Restore context register.
2751
    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2752
    context()->DropAndPlug(1, r0);
2753
  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2754
    // Push global object as receiver for the call IC.
2755
    __ ldr(r0, GlobalObjectOperand());
2756
    __ push(r0);
2757
    EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2758
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2759
    // Call to a lookup slot (dynamically introduced variable).
2760
    Label slow, done;
2761

    
2762
    { PreservePositionScope scope(masm()->positions_recorder());
2763
      // Generate code for loading from variables potentially shadowed
2764
      // by eval-introduced variables.
2765
      EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2766
    }
2767

    
2768
    __ bind(&slow);
2769
    // Call the runtime to find the function to call (returned in r0)
2770
    // and the object holding it (returned in edx).
2771
    __ push(context_register());
2772
    __ mov(r2, Operand(proxy->name()));
2773
    __ push(r2);
2774
    __ CallRuntime(Runtime::kLoadContextSlot, 2);
2775
    __ Push(r0, r1);  // Function, receiver.
2776

    
2777
    // If fast case code has been generated, emit code to push the
2778
    // function and receiver and have the slow path jump around this
2779
    // code.
2780
    if (done.is_linked()) {
2781
      Label call;
2782
      __ b(&call);
2783
      __ bind(&done);
2784
      // Push function.
2785
      __ push(r0);
2786
      // The receiver is implicitly the global receiver. Indicate this
2787
      // by passing the hole to the call function stub.
2788
      __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2789
      __ push(r1);
2790
      __ bind(&call);
2791
    }
2792

    
2793
    // The receiver is either the global receiver or an object found
2794
    // by LoadContextSlot. That object could be the hole if the
2795
    // receiver is implicitly the global object.
2796
    EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2797
  } else if (property != NULL) {
2798
    { PreservePositionScope scope(masm()->positions_recorder());
2799
      VisitForStackValue(property->obj());
2800
    }
2801
    if (property->key()->IsPropertyName()) {
2802
      EmitCallWithIC(expr,
2803
                     property->key()->AsLiteral()->value(),
2804
                     RelocInfo::CODE_TARGET);
2805
    } else {
2806
      EmitKeyedCallWithIC(expr, property->key());
2807
    }
2808
  } else {
2809
    // Call to an arbitrary expression not handled specially above.
2810
    { PreservePositionScope scope(masm()->positions_recorder());
2811
      VisitForStackValue(callee);
2812
    }
2813
    // Load global receiver object.
2814
    __ ldr(r1, GlobalObjectOperand());
2815
    __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2816
    __ push(r1);
2817
    // Emit function call.
2818
    EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2819
  }
2820

    
2821
#ifdef DEBUG
2822
  // RecordJSReturnSite should have been called.
2823
  ASSERT(expr->return_is_recorded_);
2824
#endif
2825
}
2826

    
2827

    
2828
void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2829
  Comment cmnt(masm_, "[ CallNew");
2830
  // According to ECMA-262, section 11.2.2, page 44, the function
2831
  // expression in new calls must be evaluated before the
2832
  // arguments.
2833

    
2834
  // Push constructor on the stack.  If it's not a function it's used as
2835
  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2836
  // ignored.
2837
  VisitForStackValue(expr->expression());
2838

    
2839
  // Push the arguments ("left-to-right") on the stack.
2840
  ZoneList<Expression*>* args = expr->arguments();
2841
  int arg_count = args->length();
2842
  for (int i = 0; i < arg_count; i++) {
2843
    VisitForStackValue(args->at(i));
2844
  }
2845

    
2846
  // Call the construct call builtin that handles allocation and
2847
  // constructor invocation.
2848
  SetSourcePosition(expr->position());
2849

    
2850
  // Load function and argument count into r1 and r0.
2851
  __ mov(r0, Operand(arg_count));
2852
  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2853

    
2854
  // Record call targets in unoptimized code.
2855
  Handle<Object> uninitialized =
2856
      TypeFeedbackCells::UninitializedSentinel(isolate());
2857
  Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2858
  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2859
  __ mov(r2, Operand(cell));
2860

    
2861
  CallConstructStub stub(RECORD_CALL_TARGET);
2862
  __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2863
  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2864
  context()->Plug(r0);
2865
}
2866

    
2867

    
2868
void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2869
  ZoneList<Expression*>* args = expr->arguments();
2870
  ASSERT(args->length() == 1);
2871

    
2872
  VisitForAccumulatorValue(args->at(0));
2873

    
2874
  Label materialize_true, materialize_false;
2875
  Label* if_true = NULL;
2876
  Label* if_false = NULL;
2877
  Label* fall_through = NULL;
2878
  context()->PrepareTest(&materialize_true, &materialize_false,
2879
                         &if_true, &if_false, &fall_through);
2880

    
2881
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2882
  __ SmiTst(r0);
2883
  Split(eq, if_true, if_false, fall_through);
2884

    
2885
  context()->Plug(if_true, if_false);
2886
}
2887

    
2888

    
2889
void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2890
  ZoneList<Expression*>* args = expr->arguments();
2891
  ASSERT(args->length() == 1);
2892

    
2893
  VisitForAccumulatorValue(args->at(0));
2894

    
2895
  Label materialize_true, materialize_false;
2896
  Label* if_true = NULL;
2897
  Label* if_false = NULL;
2898
  Label* fall_through = NULL;
2899
  context()->PrepareTest(&materialize_true, &materialize_false,
2900
                         &if_true, &if_false, &fall_through);
2901

    
2902
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2903
  __ NonNegativeSmiTst(r0);
2904
  Split(eq, if_true, if_false, fall_through);
2905

    
2906
  context()->Plug(if_true, if_false);
2907
}
2908

    
2909

    
2910
void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2911
  ZoneList<Expression*>* args = expr->arguments();
2912
  ASSERT(args->length() == 1);
2913

    
2914
  VisitForAccumulatorValue(args->at(0));
2915

    
2916
  Label materialize_true, materialize_false;
2917
  Label* if_true = NULL;
2918
  Label* if_false = NULL;
2919
  Label* fall_through = NULL;
2920
  context()->PrepareTest(&materialize_true, &materialize_false,
2921
                         &if_true, &if_false, &fall_through);
2922

    
2923
  __ JumpIfSmi(r0, if_false);
2924
  __ LoadRoot(ip, Heap::kNullValueRootIndex);
2925
  __ cmp(r0, ip);
2926
  __ b(eq, if_true);
2927
  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2928
  // Undetectable objects behave like undefined when tested with typeof.
2929
  __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2930
  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2931
  __ b(ne, if_false);
2932
  __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2933
  __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2934
  __ b(lt, if_false);
2935
  __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2936
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2937
  Split(le, if_true, if_false, fall_through);
2938

    
2939
  context()->Plug(if_true, if_false);
2940
}
2941

    
2942

    
2943
void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2944
  ZoneList<Expression*>* args = expr->arguments();
2945
  ASSERT(args->length() == 1);
2946

    
2947
  VisitForAccumulatorValue(args->at(0));
2948

    
2949
  Label materialize_true, materialize_false;
2950
  Label* if_true = NULL;
2951
  Label* if_false = NULL;
2952
  Label* fall_through = NULL;
2953
  context()->PrepareTest(&materialize_true, &materialize_false,
2954
                         &if_true, &if_false, &fall_through);
2955

    
2956
  __ JumpIfSmi(r0, if_false);
2957
  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2958
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2959
  Split(ge, if_true, if_false, fall_through);
2960

    
2961
  context()->Plug(if_true, if_false);
2962
}
2963

    
2964

    
2965
void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2966
  ZoneList<Expression*>* args = expr->arguments();
2967
  ASSERT(args->length() == 1);
2968

    
2969
  VisitForAccumulatorValue(args->at(0));
2970

    
2971
  Label materialize_true, materialize_false;
2972
  Label* if_true = NULL;
2973
  Label* if_false = NULL;
2974
  Label* fall_through = NULL;
2975
  context()->PrepareTest(&materialize_true, &materialize_false,
2976
                         &if_true, &if_false, &fall_through);
2977

    
2978
  __ JumpIfSmi(r0, if_false);
2979
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2980
  __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2981
  __ tst(r1, Operand(1 << Map::kIsUndetectable));
2982
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2983
  Split(ne, if_true, if_false, fall_through);
2984

    
2985
  context()->Plug(if_true, if_false);
2986
}
2987

    
2988

    
2989
void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2990
    CallRuntime* expr) {
2991
  ZoneList<Expression*>* args = expr->arguments();
2992
  ASSERT(args->length() == 1);
2993

    
2994
  VisitForAccumulatorValue(args->at(0));
2995

    
2996
  Label materialize_true, materialize_false, skip_lookup;
2997
  Label* if_true = NULL;
2998
  Label* if_false = NULL;
2999
  Label* fall_through = NULL;
3000
  context()->PrepareTest(&materialize_true, &materialize_false,
3001
                         &if_true, &if_false, &fall_through);
3002

    
3003
  __ AssertNotSmi(r0);
3004

    
3005
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3006
  __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
3007
  __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3008
  __ b(ne, &skip_lookup);
3009

    
3010
  // Check for fast case object. Generate false result for slow case object.
3011
  __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
3012
  __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3013
  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3014
  __ cmp(r2, ip);
3015
  __ b(eq, if_false);
3016

    
3017
  // Look for valueOf name in the descriptor array, and indicate false if
3018
  // found. Since we omit an enumeration index check, if it is added via a
3019
  // transition that shares its descriptor array, this is a false positive.
3020
  Label entry, loop, done;
3021

    
3022
  // Skip loop if no descriptors are valid.
3023
  __ NumberOfOwnDescriptors(r3, r1);
3024
  __ cmp(r3, Operand::Zero());
3025
  __ b(eq, &done);
3026

    
3027
  __ LoadInstanceDescriptors(r1, r4);
3028
  // r4: descriptor array.
3029
  // r3: valid entries in the descriptor array.
3030
  __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3031
  __ mul(r3, r3, ip);
3032
  // Calculate location of the first key name.
3033
  __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3034
  // Calculate the end of the descriptor array.
3035
  __ mov(r2, r4);
3036
  __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
3037

    
3038
  // Loop through all the keys in the descriptor array. If one of these is the
3039
  // string "valueOf" the result is false.
3040
  // The use of ip to store the valueOf string assumes that it is not otherwise
3041
  // used in the loop below.
3042
  __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3043
  __ jmp(&entry);
3044
  __ bind(&loop);
3045
  __ ldr(r3, MemOperand(r4, 0));
3046
  __ cmp(r3, ip);
3047
  __ b(eq, if_false);
3048
  __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3049
  __ bind(&entry);
3050
  __ cmp(r4, Operand(r2));
3051
  __ b(ne, &loop);
3052

    
3053
  __ bind(&done);
3054

    
3055
  // Set the bit in the map to indicate that there is no local valueOf field.
3056
  __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3057
  __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3058
  __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
3059

    
3060
  __ bind(&skip_lookup);
3061

    
3062
  // If a valueOf property is not found on the object check that its
3063
  // prototype is the un-modified String prototype. If not result is false.
3064
  __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
3065
  __ JumpIfSmi(r2, if_false);
3066
  __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3067
  __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3068
  __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
3069
  __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3070
  __ cmp(r2, r3);
3071
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3072
  Split(eq, if_true, if_false, fall_through);
3073

    
3074
  context()->Plug(if_true, if_false);
3075
}
3076

    
3077

    
3078
void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3079
  ZoneList<Expression*>* args = expr->arguments();
3080
  ASSERT(args->length() == 1);
3081

    
3082
  VisitForAccumulatorValue(args->at(0));
3083

    
3084
  Label materialize_true, materialize_false;
3085
  Label* if_true = NULL;
3086
  Label* if_false = NULL;
3087
  Label* fall_through = NULL;
3088
  context()->PrepareTest(&materialize_true, &materialize_false,
3089
                         &if_true, &if_false, &fall_through);
3090

    
3091
  __ JumpIfSmi(r0, if_false);
3092
  __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3093
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3094
  Split(eq, if_true, if_false, fall_through);
3095

    
3096
  context()->Plug(if_true, if_false);
3097
}
3098

    
3099

    
3100
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3101
  ZoneList<Expression*>* args = expr->arguments();
3102
  ASSERT(args->length() == 1);
3103

    
3104
  VisitForAccumulatorValue(args->at(0));
3105

    
3106
  Label materialize_true, materialize_false;
3107
  Label* if_true = NULL;
3108
  Label* if_false = NULL;
3109
  Label* fall_through = NULL;
3110
  context()->PrepareTest(&materialize_true, &materialize_false,
3111
                         &if_true, &if_false, &fall_through);
3112

    
3113
  __ JumpIfSmi(r0, if_false);
3114
  __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3115
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3116
  Split(eq, if_true, if_false, fall_through);
3117

    
3118
  context()->Plug(if_true, if_false);
3119
}
3120

    
3121

    
3122
void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3123
  ZoneList<Expression*>* args = expr->arguments();
3124
  ASSERT(args->length() == 1);
3125

    
3126
  VisitForAccumulatorValue(args->at(0));
3127

    
3128
  Label materialize_true, materialize_false;
3129
  Label* if_true = NULL;
3130
  Label* if_false = NULL;
3131
  Label* fall_through = NULL;
3132
  context()->PrepareTest(&materialize_true, &materialize_false,
3133
                         &if_true, &if_false, &fall_through);
3134

    
3135
  __ JumpIfSmi(r0, if_false);
3136
  __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3137
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3138
  Split(eq, if_true, if_false, fall_through);
3139

    
3140
  context()->Plug(if_true, if_false);
3141
}
3142

    
3143

    
3144

    
3145
void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3146
  ASSERT(expr->arguments()->length() == 0);
3147

    
3148
  Label materialize_true, materialize_false;
3149
  Label* if_true = NULL;
3150
  Label* if_false = NULL;
3151
  Label* fall_through = NULL;
3152
  context()->PrepareTest(&materialize_true, &materialize_false,
3153
                         &if_true, &if_false, &fall_through);
3154

    
3155
  // Get the frame pointer for the calling frame.
3156
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3157

    
3158
  // Skip the arguments adaptor frame if it exists.
3159
  Label check_frame_marker;
3160
  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3161
  __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3162
  __ b(ne, &check_frame_marker);
3163
  __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
3164

    
3165
  // Check the marker in the calling frame.
3166
  __ bind(&check_frame_marker);
3167
  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3168
  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3169
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3170
  Split(eq, if_true, if_false, fall_through);
3171

    
3172
  context()->Plug(if_true, if_false);
3173
}
3174

    
3175

    
3176
void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3177
  ZoneList<Expression*>* args = expr->arguments();
3178
  ASSERT(args->length() == 2);
3179

    
3180
  // Load the two objects into registers and perform the comparison.
3181
  VisitForStackValue(args->at(0));
3182
  VisitForAccumulatorValue(args->at(1));
3183

    
3184
  Label materialize_true, materialize_false;
3185
  Label* if_true = NULL;
3186
  Label* if_false = NULL;
3187
  Label* fall_through = NULL;
3188
  context()->PrepareTest(&materialize_true, &materialize_false,
3189
                         &if_true, &if_false, &fall_through);
3190

    
3191
  __ pop(r1);
3192
  __ cmp(r0, r1);
3193
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3194
  Split(eq, if_true, if_false, fall_through);
3195

    
3196
  context()->Plug(if_true, if_false);
3197
}
3198

    
3199

    
3200
void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3201
  ZoneList<Expression*>* args = expr->arguments();
3202
  ASSERT(args->length() == 1);
3203

    
3204
  // ArgumentsAccessStub expects the key in edx and the formal
3205
  // parameter count in r0.
3206
  VisitForAccumulatorValue(args->at(0));
3207
  __ mov(r1, r0);
3208
  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3209
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3210
  __ CallStub(&stub);
3211
  context()->Plug(r0);
3212
}
3213

    
3214

    
3215
void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3216
  ASSERT(expr->arguments()->length() == 0);
3217
  Label exit;
3218
  // Get the number of formal parameters.
3219
  __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3220

    
3221
  // Check if the calling frame is an arguments adaptor frame.
3222
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3223
  __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3224
  __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3225
  __ b(ne, &exit);
3226

    
3227
  // Arguments adaptor case: Read the arguments length from the
3228
  // adaptor frame.
3229
  __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3230

    
3231
  __ bind(&exit);
3232
  context()->Plug(r0);
3233
}
3234

    
3235

    
3236
void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3237
  ZoneList<Expression*>* args = expr->arguments();
3238
  ASSERT(args->length() == 1);
3239
  Label done, null, function, non_function_constructor;
3240

    
3241
  VisitForAccumulatorValue(args->at(0));
3242

    
3243
  // If the object is a smi, we return null.
3244
  __ JumpIfSmi(r0, &null);
3245

    
3246
  // Check that the object is a JS object but take special care of JS
3247
  // functions to make sure they have 'Function' as their class.
3248
  // Assume that there are only two callable types, and one of them is at
3249
  // either end of the type range for JS object types. Saves extra comparisons.
3250
  STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3251
  __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
3252
  // Map is now in r0.
3253
  __ b(lt, &null);
3254
  STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3255
                FIRST_SPEC_OBJECT_TYPE + 1);
3256
  __ b(eq, &function);
3257

    
3258
  __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
3259
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3260
                LAST_SPEC_OBJECT_TYPE - 1);
3261
  __ b(eq, &function);
3262
  // Assume that there is no larger type.
3263
  STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3264

    
3265
  // Check if the constructor in the map is a JS function.
3266
  __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
3267
  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3268
  __ b(ne, &non_function_constructor);
3269

    
3270
  // r0 now contains the constructor function. Grab the
3271
  // instance class name from there.
3272
  __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
3273
  __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
3274
  __ b(&done);
3275

    
3276
  // Functions have class 'Function'.
3277
  __ bind(&function);
3278
  __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
3279
  __ jmp(&done);
3280

    
3281
  // Objects with a non-function constructor have class 'Object'.
3282
  __ bind(&non_function_constructor);
3283
  __ LoadRoot(r0, Heap::kObject_stringRootIndex);
3284
  __ jmp(&done);
3285

    
3286
  // Non-JS objects have class null.
3287
  __ bind(&null);
3288
  __ LoadRoot(r0, Heap::kNullValueRootIndex);
3289

    
3290
  // All done.
3291
  __ bind(&done);
3292

    
3293
  context()->Plug(r0);
3294
}
3295

    
3296

    
3297
void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3298
  // Conditionally generate a log call.
3299
  // Args:
3300
  //   0 (literal string): The type of logging (corresponds to the flags).
3301
  //     This is used to determine whether or not to generate the log call.
3302
  //   1 (string): Format string.  Access the string at argument index 2
3303
  //     with '%2s' (see Logger::LogRuntime for all the formats).
3304
  //   2 (array): Arguments to the format string.
3305
  ZoneList<Expression*>* args = expr->arguments();
3306
  ASSERT_EQ(args->length(), 3);
3307
  if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3308
    VisitForStackValue(args->at(1));
3309
    VisitForStackValue(args->at(2));
3310
    __ CallRuntime(Runtime::kLog, 2);
3311
  }
3312

    
3313
  // Finally, we're expected to leave a value on the top of the stack.
3314
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3315
  context()->Plug(r0);
3316
}
3317

    
3318

    
3319
void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3320
  ASSERT(expr->arguments()->length() == 0);
3321
  Label slow_allocate_heapnumber;
3322
  Label heapnumber_allocated;
3323

    
3324
  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3325
  __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
3326
  __ jmp(&heapnumber_allocated);
3327

    
3328
  __ bind(&slow_allocate_heapnumber);
3329
  // Allocate a heap number.
3330
  __ CallRuntime(Runtime::kNumberAlloc, 0);
3331
  __ mov(r4, Operand(r0));
3332

    
3333
  __ bind(&heapnumber_allocated);
3334

    
3335
  // Convert 32 random bits in r0 to 0.(32 random bits) in a double
3336
  // by computing:
3337
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3338
  __ PrepareCallCFunction(1, r0);
3339
  __ ldr(r0,
3340
         ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3341
  __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
3342
  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3343

    
3344
  // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3345
  // Create this constant using mov/orr to avoid PC relative load.
3346
  __ mov(r1, Operand(0x41000000));
3347
  __ orr(r1, r1, Operand(0x300000));
3348
  // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
3349
  __ vmov(d7, r0, r1);
3350
  // Move 0x4130000000000000 to VFP.
3351
  __ mov(r0, Operand::Zero());
3352
  __ vmov(d8, r0, r1);
3353
  // Subtract and store the result in the heap number.
3354
  __ vsub(d7, d7, d8);
3355
  __ sub(r0, r4, Operand(kHeapObjectTag));
3356
  __ vstr(d7, r0, HeapNumber::kValueOffset);
3357
  __ mov(r0, r4);
3358

    
3359
  context()->Plug(r0);
3360
}
3361

    
3362

    
3363
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3364
  // Load the arguments on the stack and call the stub.
3365
  SubStringStub stub;
3366
  ZoneList<Expression*>* args = expr->arguments();
3367
  ASSERT(args->length() == 3);
3368
  VisitForStackValue(args->at(0));
3369
  VisitForStackValue(args->at(1));
3370
  VisitForStackValue(args->at(2));
3371
  __ CallStub(&stub);
3372
  context()->Plug(r0);
3373
}
3374

    
3375

    
3376
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3377
  // Load the arguments on the stack and call the stub.
3378
  RegExpExecStub stub;
3379
  ZoneList<Expression*>* args = expr->arguments();
3380
  ASSERT(args->length() == 4);
3381
  VisitForStackValue(args->at(0));
3382
  VisitForStackValue(args->at(1));
3383
  VisitForStackValue(args->at(2));
3384
  VisitForStackValue(args->at(3));
3385
  __ CallStub(&stub);
3386
  context()->Plug(r0);
3387
}
3388

    
3389

    
3390
void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3391
  ZoneList<Expression*>* args = expr->arguments();
3392
  ASSERT(args->length() == 1);
3393
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3394

    
3395
  Label done;
3396
  // If the object is a smi return the object.
3397
  __ JumpIfSmi(r0, &done);
3398
  // If the object is not a value type, return the object.
3399
  __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3400
  __ b(ne, &done);
3401
  __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
3402

    
3403
  __ bind(&done);
3404
  context()->Plug(r0);
3405
}
3406

    
3407

    
3408
void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3409
  ZoneList<Expression*>* args = expr->arguments();
3410
  ASSERT(args->length() == 2);
3411
  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3412
  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3413

    
3414
  VisitForAccumulatorValue(args->at(0));  // Load the object.
3415

    
3416
  Label runtime, done, not_date_object;
3417
  Register object = r0;
3418
  Register result = r0;
3419
  Register scratch0 = r9;
3420
  Register scratch1 = r1;
3421

    
3422
  __ JumpIfSmi(object, &not_date_object);
3423
  __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3424
  __ b(ne, &not_date_object);
3425

    
3426
  if (index->value() == 0) {
3427
    __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3428
    __ jmp(&done);
3429
  } else {
3430
    if (index->value() < JSDate::kFirstUncachedField) {
3431
      ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3432
      __ mov(scratch1, Operand(stamp));
3433
      __ ldr(scratch1, MemOperand(scratch1));
3434
      __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3435
      __ cmp(scratch1, scratch0);
3436
      __ b(ne, &runtime);
3437
      __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3438
                                             kPointerSize * index->value()));
3439
      __ jmp(&done);
3440
    }
3441
    __ bind(&runtime);
3442
    __ PrepareCallCFunction(2, scratch1);
3443
    __ mov(r1, Operand(index));
3444
    __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3445
    __ jmp(&done);
3446
  }
3447

    
3448
  __ bind(&not_date_object);
3449
  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3450
  __ bind(&done);
3451
  context()->Plug(r0);
3452
}
3453

    
3454

    
3455
void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
3456
                                                  Register index,
3457
                                                  Register value,
3458
                                                  uint32_t encoding_mask) {
3459
  __ SmiTst(index);
3460
  __ Check(eq, kNonSmiIndex);
3461
  __ SmiTst(value);
3462
  __ Check(eq, kNonSmiValue);
3463

    
3464
  __ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
3465
  __ cmp(index, ip);
3466
  __ Check(lt, kIndexIsTooLarge);
3467

    
3468
  __ cmp(index, Operand(Smi::FromInt(0)));
3469
  __ Check(ge, kIndexIsNegative);
3470

    
3471
  __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
3472
  __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
3473

    
3474
  __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
3475
  __ cmp(ip, Operand(encoding_mask));
3476
  __ Check(eq, kUnexpectedStringType);
3477
}
3478

    
3479

    
3480
void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3481
  ZoneList<Expression*>* args = expr->arguments();
3482
  ASSERT_EQ(3, args->length());
3483

    
3484
  Register string = r0;
3485
  Register index = r1;
3486
  Register value = r2;
3487

    
3488
  VisitForStackValue(args->at(1));  // index
3489
  VisitForStackValue(args->at(2));  // value
3490
  __ pop(value);
3491
  __ pop(index);
3492
  VisitForAccumulatorValue(args->at(0));  // string
3493

    
3494
  if (FLAG_debug_code) {
3495
    static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3496
    EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3497
  }
3498

    
3499
  __ SmiUntag(value, value);
3500
  __ add(ip,
3501
         string,
3502
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3503
  __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3504
  context()->Plug(string);
3505
}
3506

    
3507

    
3508
void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3509
  ZoneList<Expression*>* args = expr->arguments();
3510
  ASSERT_EQ(3, args->length());
3511

    
3512
  Register string = r0;
3513
  Register index = r1;
3514
  Register value = r2;
3515

    
3516
  VisitForStackValue(args->at(1));  // index
3517
  VisitForStackValue(args->at(2));  // value
3518
  __ pop(value);
3519
  __ pop(index);
3520
  VisitForAccumulatorValue(args->at(0));  // string
3521

    
3522
  if (FLAG_debug_code) {
3523
    static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3524
    EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3525
  }
3526

    
3527
  __ SmiUntag(value, value);
3528
  __ add(ip,
3529
         string,
3530
         Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3531
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3532
  __ strh(value, MemOperand(ip, index));
3533
  context()->Plug(string);
3534
}
3535

    
3536

    
3537

    
3538
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3539
  // Load the arguments on the stack and call the runtime function.
3540
  ZoneList<Expression*>* args = expr->arguments();
3541
  ASSERT(args->length() == 2);
3542
  VisitForStackValue(args->at(0));
3543
  VisitForStackValue(args->at(1));
3544
  MathPowStub stub(MathPowStub::ON_STACK);
3545
  __ CallStub(&stub);
3546
  context()->Plug(r0);
3547
}
3548

    
3549

    
3550
void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3551
  ZoneList<Expression*>* args = expr->arguments();
3552
  ASSERT(args->length() == 2);
3553
  VisitForStackValue(args->at(0));  // Load the object.
3554
  VisitForAccumulatorValue(args->at(1));  // Load the value.
3555
  __ pop(r1);  // r0 = value. r1 = object.
3556

    
3557
  Label done;
3558
  // If the object is a smi, return the value.
3559
  __ JumpIfSmi(r1, &done);
3560

    
3561
  // If the object is not a value type, return the value.
3562
  __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3563
  __ b(ne, &done);
3564

    
3565
  // Store the value.
3566
  __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3567
  // Update the write barrier.  Save the value as it will be
3568
  // overwritten by the write barrier code and is needed afterward.
3569
  __ mov(r2, r0);
3570
  __ RecordWriteField(
3571
      r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3572

    
3573
  __ bind(&done);
3574
  context()->Plug(r0);
3575
}
3576

    
3577

    
3578
void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3579
  ZoneList<Expression*>* args = expr->arguments();
3580
  ASSERT_EQ(args->length(), 1);
3581
  // Load the argument into r0 and call the stub.
3582
  VisitForAccumulatorValue(args->at(0));
3583

    
3584
  NumberToStringStub stub;
3585
  __ CallStub(&stub);
3586
  context()->Plug(r0);
3587
}
3588

    
3589

    
3590
void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591
  ZoneList<Expression*>* args = expr->arguments();
3592
  ASSERT(args->length() == 1);
3593
  VisitForAccumulatorValue(args->at(0));
3594

    
3595
  Label done;
3596
  StringCharFromCodeGenerator generator(r0, r1);
3597
  generator.GenerateFast(masm_);
3598
  __ jmp(&done);
3599

    
3600
  NopRuntimeCallHelper call_helper;
3601
  generator.GenerateSlow(masm_, call_helper);
3602

    
3603
  __ bind(&done);
3604
  context()->Plug(r1);
3605
}
3606

    
3607

    
3608
void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3609
  ZoneList<Expression*>* args = expr->arguments();
3610
  ASSERT(args->length() == 2);
3611
  VisitForStackValue(args->at(0));
3612
  VisitForAccumulatorValue(args->at(1));
3613

    
3614
  Register object = r1;
3615
  Register index = r0;
3616
  Register result = r3;
3617

    
3618
  __ pop(object);
3619

    
3620
  Label need_conversion;
3621
  Label index_out_of_range;
3622
  Label done;
3623
  StringCharCodeAtGenerator generator(object,
3624
                                      index,
3625
                                      result,
3626
                                      &need_conversion,
3627
                                      &need_conversion,
3628
                                      &index_out_of_range,
3629
                                      STRING_INDEX_IS_NUMBER);
3630
  generator.GenerateFast(masm_);
3631
  __ jmp(&done);
3632

    
3633
  __ bind(&index_out_of_range);
3634
  // When the index is out of range, the spec requires us to return
3635
  // NaN.
3636
  __ LoadRoot(result, Heap::kNanValueRootIndex);
3637
  __ jmp(&done);
3638

    
3639
  __ bind(&need_conversion);
3640
  // Load the undefined value into the result register, which will
3641
  // trigger conversion.
3642
  __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3643
  __ jmp(&done);
3644

    
3645
  NopRuntimeCallHelper call_helper;
3646
  generator.GenerateSlow(masm_, call_helper);
3647

    
3648
  __ bind(&done);
3649
  context()->Plug(result);
3650
}
3651

    
3652

    
3653
void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3654
  ZoneList<Expression*>* args = expr->arguments();
3655
  ASSERT(args->length() == 2);
3656
  VisitForStackValue(args->at(0));
3657
  VisitForAccumulatorValue(args->at(1));
3658

    
3659
  Register object = r1;
3660
  Register index = r0;
3661
  Register scratch = r3;
3662
  Register result = r0;
3663

    
3664
  __ pop(object);
3665

    
3666
  Label need_conversion;
3667
  Label index_out_of_range;
3668
  Label done;
3669
  StringCharAtGenerator generator(object,
3670
                                  index,
3671
                                  scratch,
3672
                                  result,
3673
                                  &need_conversion,
3674
                                  &need_conversion,
3675
                                  &index_out_of_range,
3676
                                  STRING_INDEX_IS_NUMBER);
3677
  generator.GenerateFast(masm_);
3678
  __ jmp(&done);
3679

    
3680
  __ bind(&index_out_of_range);
3681
  // When the index is out of range, the spec requires us to return
3682
  // the empty string.
3683
  __ LoadRoot(result, Heap::kempty_stringRootIndex);
3684
  __ jmp(&done);
3685

    
3686
  __ bind(&need_conversion);
3687
  // Move smi zero into the result register, which will trigger
3688
  // conversion.
3689
  __ mov(result, Operand(Smi::FromInt(0)));
3690
  __ jmp(&done);
3691

    
3692
  NopRuntimeCallHelper call_helper;
3693
  generator.GenerateSlow(masm_, call_helper);
3694

    
3695
  __ bind(&done);
3696
  context()->Plug(result);
3697
}
3698

    
3699

    
3700
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3701
  ZoneList<Expression*>* args = expr->arguments();
3702
  ASSERT_EQ(2, args->length());
3703
  VisitForStackValue(args->at(0));
3704
  VisitForStackValue(args->at(1));
3705

    
3706
  StringAddStub stub(STRING_ADD_CHECK_BOTH);
3707
  __ CallStub(&stub);
3708
  context()->Plug(r0);
3709
}
3710

    
3711

    
3712
void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3713
  ZoneList<Expression*>* args = expr->arguments();
3714
  ASSERT_EQ(2, args->length());
3715
  VisitForStackValue(args->at(0));
3716
  VisitForStackValue(args->at(1));
3717

    
3718
  StringCompareStub stub;
3719
  __ CallStub(&stub);
3720
  context()->Plug(r0);
3721
}
3722

    
3723

    
3724
void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3725
  // Load the argument on the stack and call the stub.
3726
  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3727
                               TranscendentalCacheStub::TAGGED);
3728
  ZoneList<Expression*>* args = expr->arguments();
3729
  ASSERT(args->length() == 1);
3730
  VisitForStackValue(args->at(0));
3731
  __ CallStub(&stub);
3732
  context()->Plug(r0);
3733
}
3734

    
3735

    
3736
void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3737
  // Load the argument on the stack and call the stub.
3738
  TranscendentalCacheStub stub(TranscendentalCache::COS,
3739
                               TranscendentalCacheStub::TAGGED);
3740
  ZoneList<Expression*>* args = expr->arguments();
3741
  ASSERT(args->length() == 1);
3742
  VisitForStackValue(args->at(0));
3743
  __ CallStub(&stub);
3744
  context()->Plug(r0);
3745
}
3746

    
3747

    
3748
void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3749
  // Load the argument on the stack and call the stub.
3750
  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3751
                               TranscendentalCacheStub::TAGGED);
3752
  ZoneList<Expression*>* args = expr->arguments();
3753
  ASSERT(args->length() == 1);
3754
  VisitForStackValue(args->at(0));
3755
  __ CallStub(&stub);
3756
  context()->Plug(r0);
3757
}
3758

    
3759

    
3760
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3761
  // Load the argument on the stack and call the stub.
3762
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3763
                               TranscendentalCacheStub::TAGGED);
3764
  ZoneList<Expression*>* args = expr->arguments();
3765
  ASSERT(args->length() == 1);
3766
  VisitForStackValue(args->at(0));
3767
  __ CallStub(&stub);
3768
  context()->Plug(r0);
3769
}
3770

    
3771

    
3772
void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3773
  // Load the argument on the stack and call the runtime function.
3774
  ZoneList<Expression*>* args = expr->arguments();
3775
  ASSERT(args->length() == 1);
3776
  VisitForStackValue(args->at(0));
3777
  __ CallRuntime(Runtime::kMath_sqrt, 1);
3778
  context()->Plug(r0);
3779
}
3780

    
3781

    
3782
void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3783
  ZoneList<Expression*>* args = expr->arguments();
3784
  ASSERT(args->length() >= 2);
3785

    
3786
  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3787
  for (int i = 0; i < arg_count + 1; i++) {
3788
    VisitForStackValue(args->at(i));
3789
  }
3790
  VisitForAccumulatorValue(args->last());  // Function.
3791

    
3792
  Label runtime, done;
3793
  // Check for non-function argument (including proxy).
3794
  __ JumpIfSmi(r0, &runtime);
3795
  __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3796
  __ b(ne, &runtime);
3797

    
3798
  // InvokeFunction requires the function in r1. Move it in there.
3799
  __ mov(r1, result_register());
3800
  ParameterCount count(arg_count);
3801
  __ InvokeFunction(r1, count, CALL_FUNCTION,
3802
                    NullCallWrapper(), CALL_AS_METHOD);
3803
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3804
  __ jmp(&done);
3805

    
3806
  __ bind(&runtime);
3807
  __ push(r0);
3808
  __ CallRuntime(Runtime::kCall, args->length());
3809
  __ bind(&done);
3810

    
3811
  context()->Plug(r0);
3812
}
3813

    
3814

    
3815
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3816
  RegExpConstructResultStub stub;
3817
  ZoneList<Expression*>* args = expr->arguments();
3818
  ASSERT(args->length() == 3);
3819
  VisitForStackValue(args->at(0));
3820
  VisitForStackValue(args->at(1));
3821
  VisitForStackValue(args->at(2));
3822
  __ CallStub(&stub);
3823
  context()->Plug(r0);
3824
}
3825

    
3826

    
3827
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3828
  ZoneList<Expression*>* args = expr->arguments();
3829
  ASSERT_EQ(2, args->length());
3830
  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3831
  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3832

    
3833
  Handle<FixedArray> jsfunction_result_caches(
3834
      isolate()->native_context()->jsfunction_result_caches());
3835
  if (jsfunction_result_caches->length() <= cache_id) {
3836
    __ Abort(kAttemptToUseUndefinedCache);
3837
    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3838
    context()->Plug(r0);
3839
    return;
3840
  }
3841

    
3842
  VisitForAccumulatorValue(args->at(1));
3843

    
3844
  Register key = r0;
3845
  Register cache = r1;
3846
  __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3847
  __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3848
  __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3849
  __ ldr(cache,
3850
         FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3851

    
3852

    
3853
  Label done, not_found;
3854
  // tmp now holds finger offset as a smi.
3855
  __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3856
  // r2 now holds finger offset as a smi.
3857
  __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3858
  // r3 now points to the start of fixed array elements.
3859
  __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3860
  // Note side effect of PreIndex: r3 now points to the key of the pair.
3861
  __ cmp(key, r2);
3862
  __ b(ne, &not_found);
3863

    
3864
  __ ldr(r0, MemOperand(r3, kPointerSize));
3865
  __ b(&done);
3866

    
3867
  __ bind(&not_found);
3868
  // Call runtime to perform the lookup.
3869
  __ Push(cache, key);
3870
  __ CallRuntime(Runtime::kGetFromCache, 2);
3871

    
3872
  __ bind(&done);
3873
  context()->Plug(r0);
3874
}
3875

    
3876

    
3877
void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3878
  ZoneList<Expression*>* args = expr->arguments();
3879
  ASSERT_EQ(2, args->length());
3880

    
3881
  Register right = r0;
3882
  Register left = r1;
3883
  Register tmp = r2;
3884
  Register tmp2 = r3;
3885

    
3886
  VisitForStackValue(args->at(0));
3887
  VisitForAccumulatorValue(args->at(1));
3888
  __ pop(left);
3889

    
3890
  Label done, fail, ok;
3891
  __ cmp(left, Operand(right));
3892
  __ b(eq, &ok);
3893
  // Fail if either is a non-HeapObject.
3894
  __ and_(tmp, left, Operand(right));
3895
  __ JumpIfSmi(tmp, &fail);
3896
  __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3897
  __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3898
  __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3899
  __ b(ne, &fail);
3900
  __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3901
  __ cmp(tmp, Operand(tmp2));
3902
  __ b(ne, &fail);
3903
  __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3904
  __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3905
  __ cmp(tmp, tmp2);
3906
  __ b(eq, &ok);
3907
  __ bind(&fail);
3908
  __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3909
  __ jmp(&done);
3910
  __ bind(&ok);
3911
  __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3912
  __ bind(&done);
3913

    
3914
  context()->Plug(r0);
3915
}
3916

    
3917

    
3918
void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3919
  ZoneList<Expression*>* args = expr->arguments();
3920
  VisitForAccumulatorValue(args->at(0));
3921

    
3922
  Label materialize_true, materialize_false;
3923
  Label* if_true = NULL;
3924
  Label* if_false = NULL;
3925
  Label* fall_through = NULL;
3926
  context()->PrepareTest(&materialize_true, &materialize_false,
3927
                         &if_true, &if_false, &fall_through);
3928

    
3929
  __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3930
  __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3931
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3932
  Split(eq, if_true, if_false, fall_through);
3933

    
3934
  context()->Plug(if_true, if_false);
3935
}
3936

    
3937

    
3938
void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3939
  ZoneList<Expression*>* args = expr->arguments();
3940
  ASSERT(args->length() == 1);
3941
  VisitForAccumulatorValue(args->at(0));
3942

    
3943
  __ AssertString(r0);
3944

    
3945
  __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3946
  __ IndexFromHash(r0, r0);
3947

    
3948
  context()->Plug(r0);
3949
}
3950

    
3951

    
3952
void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3953
  Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3954
      not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3955
      one_char_separator_loop_entry, long_separator_loop;
3956
  ZoneList<Expression*>* args = expr->arguments();
3957
  ASSERT(args->length() == 2);
3958
  VisitForStackValue(args->at(1));
3959
  VisitForAccumulatorValue(args->at(0));
3960

    
3961
  // All aliases of the same register have disjoint lifetimes.
3962
  Register array = r0;
3963
  Register elements = no_reg;  // Will be r0.
3964
  Register result = no_reg;  // Will be r0.
3965
  Register separator = r1;
3966
  Register array_length = r2;
3967
  Register result_pos = no_reg;  // Will be r2
3968
  Register string_length = r3;
3969
  Register string = r4;
3970
  Register element = r5;
3971
  Register elements_end = r6;
3972
  Register scratch = r9;
3973

    
3974
  // Separator operand is on the stack.
3975
  __ pop(separator);
3976

    
3977
  // Check that the array is a JSArray.
3978
  __ JumpIfSmi(array, &bailout);
3979
  __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
3980
  __ b(ne, &bailout);
3981

    
3982
  // Check that the array has fast elements.
3983
  __ CheckFastElements(scratch, array_length, &bailout);
3984

    
3985
  // If the array has length zero, return the empty string.
3986
  __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3987
  __ SmiUntag(array_length, SetCC);
3988
  __ b(ne, &non_trivial_array);
3989
  __ LoadRoot(r0, Heap::kempty_stringRootIndex);
3990
  __ b(&done);
3991

    
3992
  __ bind(&non_trivial_array);
3993

    
3994
  // Get the FixedArray containing array's elements.
3995
  elements = array;
3996
  __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3997
  array = no_reg;  // End of array's live range.
3998

    
3999
  // Check that all array elements are sequential ASCII strings, and
4000
  // accumulate the sum of their lengths, as a smi-encoded value.
4001
  __ mov(string_length, Operand::Zero());
4002
  __ add(element,
4003
         elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4004
  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4005
  // Loop condition: while (element < elements_end).
4006
  // Live values in registers:
4007
  //   elements: Fixed array of strings.
4008
  //   array_length: Length of the fixed array of strings (not smi)
4009
  //   separator: Separator string
4010
  //   string_length: Accumulated sum of string lengths (smi).
4011
  //   element: Current array element.
4012
  //   elements_end: Array end.
4013
  if (generate_debug_code_) {
4014
    __ cmp(array_length, Operand::Zero());
4015
    __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
4016
  }
4017
  __ bind(&loop);
4018
  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4019
  __ JumpIfSmi(string, &bailout);
4020
  __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
4021
  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4022
  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4023
  __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4024
  __ add(string_length, string_length, Operand(scratch), SetCC);
4025
  __ b(vs, &bailout);
4026
  __ cmp(element, elements_end);
4027
  __ b(lt, &loop);
4028

    
4029
  // If array_length is 1, return elements[0], a string.
4030
  __ cmp(array_length, Operand(1));
4031
  __ b(ne, &not_size_one_array);
4032
  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4033
  __ b(&done);
4034

    
4035
  __ bind(&not_size_one_array);
4036

    
4037
  // Live values in registers:
4038
  //   separator: Separator string
4039
  //   array_length: Length of the array.
4040
  //   string_length: Sum of string lengths (smi).
4041
  //   elements: FixedArray of strings.
4042

    
4043
  // Check that the separator is a flat ASCII string.
4044
  __ JumpIfSmi(separator, &bailout);
4045
  __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
4046
  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
4047
  __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
4048

    
4049
  // Add (separator length times array_length) - separator length to the
4050
  // string_length to get the length of the result string. array_length is not
4051
  // smi but the other values are, so the result is a smi
4052
  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4053
  __ sub(string_length, string_length, Operand(scratch));
4054
  __ smull(scratch, ip, array_length, scratch);
4055
  // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4056
  // zero.
4057
  __ cmp(ip, Operand::Zero());
4058
  __ b(ne, &bailout);
4059
  __ tst(scratch, Operand(0x80000000));
4060
  __ b(ne, &bailout);
4061
  __ add(string_length, string_length, Operand(scratch), SetCC);
4062
  __ b(vs, &bailout);
4063
  __ SmiUntag(string_length);
4064

    
4065
  // Get first element in the array to free up the elements register to be used
4066
  // for the result.
4067
  __ add(element,
4068
         elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4069
  result = elements;  // End of live range for elements.
4070
  elements = no_reg;
4071
  // Live values in registers:
4072
  //   element: First array element
4073
  //   separator: Separator string
4074
  //   string_length: Length of result string (not smi)
4075
  //   array_length: Length of the array.
4076
  __ AllocateAsciiString(result,
4077
                         string_length,
4078
                         scratch,
4079
                         string,  // used as scratch
4080
                         elements_end,  // used as scratch
4081
                         &bailout);
4082
  // Prepare for looping. Set up elements_end to end of the array. Set
4083
  // result_pos to the position of the result where to write the first
4084
  // character.
4085
  __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4086
  result_pos = array_length;  // End of live range for array_length.
4087
  array_length = no_reg;
4088
  __ add(result_pos,
4089
         result,
4090
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4091

    
4092
  // Check the length of the separator.
4093
  __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4094
  __ cmp(scratch, Operand(Smi::FromInt(1)));
4095
  __ b(eq, &one_char_separator);
4096
  __ b(gt, &long_separator);
4097

    
4098
  // Empty separator case
4099
  __ bind(&empty_separator_loop);
4100
  // Live values in registers:
4101
  //   result_pos: the position to which we are currently copying characters.
4102
  //   element: Current array element.
4103
  //   elements_end: Array end.
4104

    
4105
  // Copy next array element to the result.
4106
  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4107
  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4108
  __ SmiUntag(string_length);
4109
  __ add(string,
4110
         string,
4111
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4112
  __ CopyBytes(string, result_pos, string_length, scratch);
4113
  __ cmp(element, elements_end);
4114
  __ b(lt, &empty_separator_loop);  // End while (element < elements_end).
4115
  ASSERT(result.is(r0));
4116
  __ b(&done);
4117

    
4118
  // One-character separator case
4119
  __ bind(&one_char_separator);
4120
  // Replace separator with its ASCII character value.
4121
  __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4122
  // Jump into the loop after the code that copies the separator, so the first
4123
  // element is not preceded by a separator
4124
  __ jmp(&one_char_separator_loop_entry);
4125

    
4126
  __ bind(&one_char_separator_loop);
4127
  // Live values in registers:
4128
  //   result_pos: the position to which we are currently copying characters.
4129
  //   element: Current array element.
4130
  //   elements_end: Array end.
4131
  //   separator: Single separator ASCII char (in lower byte).
4132

    
4133
  // Copy the separator character to the result.
4134
  __ strb(separator, MemOperand(result_pos, 1, PostIndex));
4135

    
4136
  // Copy next array element to the result.
4137
  __ bind(&one_char_separator_loop_entry);
4138
  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4139
  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4140
  __ SmiUntag(string_length);
4141
  __ add(string,
4142
         string,
4143
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4144
  __ CopyBytes(string, result_pos, string_length, scratch);
4145
  __ cmp(element, elements_end);
4146
  __ b(lt, &one_char_separator_loop);  // End while (element < elements_end).
4147
  ASSERT(result.is(r0));
4148
  __ b(&done);
4149

    
4150
  // Long separator case (separator is more than one character). Entry is at the
4151
  // label long_separator below.
4152
  __ bind(&long_separator_loop);
4153
  // Live values in registers:
4154
  //   result_pos: the position to which we are currently copying characters.
4155
  //   element: Current array element.
4156
  //   elements_end: Array end.
4157
  //   separator: Separator string.
4158

    
4159
  // Copy the separator to the result.
4160
  __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4161
  __ SmiUntag(string_length);
4162
  __ add(string,
4163
         separator,
4164
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4165
  __ CopyBytes(string, result_pos, string_length, scratch);
4166

    
4167
  __ bind(&long_separator);
4168
  __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4169
  __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4170
  __ SmiUntag(string_length);
4171
  __ add(string,
4172
         string,
4173
         Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4174
  __ CopyBytes(string, result_pos, string_length, scratch);
4175
  __ cmp(element, elements_end);
4176
  __ b(lt, &long_separator_loop);  // End while (element < elements_end).
4177
  ASSERT(result.is(r0));
4178
  __ b(&done);
4179

    
4180
  __ bind(&bailout);
4181
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4182
  __ bind(&done);
4183
  context()->Plug(r0);
4184
}
4185

    
4186

    
4187
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4188
  Handle<String> name = expr->name();
4189
  if (name->length() > 0 && name->Get(0) == '_') {
4190
    Comment cmnt(masm_, "[ InlineRuntimeCall");
4191
    EmitInlineRuntimeCall(expr);
4192
    return;
4193
  }
4194

    
4195
  Comment cmnt(masm_, "[ CallRuntime");
4196
  ZoneList<Expression*>* args = expr->arguments();
4197

    
4198
  if (expr->is_jsruntime()) {
4199
    // Prepare for calling JS runtime function.
4200
    __ ldr(r0, GlobalObjectOperand());
4201
    __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
4202
    __ push(r0);
4203
  }
4204

    
4205
  // Push the arguments ("left-to-right").
4206
  int arg_count = args->length();
4207
  for (int i = 0; i < arg_count; i++) {
4208
    VisitForStackValue(args->at(i));
4209
  }
4210

    
4211
  if (expr->is_jsruntime()) {
4212
    // Call the JS runtime function.
4213
    __ mov(r2, Operand(expr->name()));
4214
    RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4215
    Handle<Code> ic =
4216
        isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
4217
    CallIC(ic, mode, expr->CallRuntimeFeedbackId());
4218
    // Restore context register.
4219
    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4220
  } else {
4221
    // Call the C runtime function.
4222
    __ CallRuntime(expr->function(), arg_count);
4223
  }
4224
  context()->Plug(r0);
4225
}
4226

    
4227

    
4228
void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4229
  switch (expr->op()) {
4230
    case Token::DELETE: {
4231
      Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4232
      Property* property = expr->expression()->AsProperty();
4233
      VariableProxy* proxy = expr->expression()->AsVariableProxy();
4234

    
4235
      if (property != NULL) {
4236
        VisitForStackValue(property->obj());
4237
        VisitForStackValue(property->key());
4238
        StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4239
            ? kNonStrictMode : kStrictMode;
4240
        __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
4241
        __ push(r1);
4242
        __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4243
        context()->Plug(r0);
4244
      } else if (proxy != NULL) {
4245
        Variable* var = proxy->var();
4246
        // Delete of an unqualified identifier is disallowed in strict mode
4247
        // but "delete this" is allowed.
4248
        ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4249
        if (var->IsUnallocated()) {
4250
          __ ldr(r2, GlobalObjectOperand());
4251
          __ mov(r1, Operand(var->name()));
4252
          __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
4253
          __ Push(r2, r1, r0);
4254
          __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4255
          context()->Plug(r0);
4256
        } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4257
          // Result of deleting non-global, non-dynamic variables is false.
4258
          // The subexpression does not have side effects.
4259
          context()->Plug(var->is_this());
4260
        } else {
4261
          // Non-global variable.  Call the runtime to try to delete from the
4262
          // context where the variable was introduced.
4263
          __ push(context_register());
4264
          __ mov(r2, Operand(var->name()));
4265
          __ push(r2);
4266
          __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4267
          context()->Plug(r0);
4268
        }
4269
      } else {
4270
        // Result of deleting non-property, non-variable reference is true.
4271
        // The subexpression may have side effects.
4272
        VisitForEffect(expr->expression());
4273
        context()->Plug(true);
4274
      }
4275
      break;
4276
    }
4277

    
4278
    case Token::VOID: {
4279
      Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4280
      VisitForEffect(expr->expression());
4281
      context()->Plug(Heap::kUndefinedValueRootIndex);
4282
      break;
4283
    }
4284

    
4285
    case Token::NOT: {
4286
      Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4287
      if (context()->IsEffect()) {
4288
        // Unary NOT has no side effects so it's only necessary to visit the
4289
        // subexpression.  Match the optimizing compiler by not branching.
4290
        VisitForEffect(expr->expression());
4291
      } else if (context()->IsTest()) {
4292
        const TestContext* test = TestContext::cast(context());
4293
        // The labels are swapped for the recursive call.
4294
        VisitForControl(expr->expression(),
4295
                        test->false_label(),
4296
                        test->true_label(),
4297
                        test->fall_through());
4298
        context()->Plug(test->true_label(), test->false_label());
4299
      } else {
4300
        // We handle value contexts explicitly rather than simply visiting
4301
        // for control and plugging the control flow into the context,
4302
        // because we need to prepare a pair of extra administrative AST ids
4303
        // for the optimizing compiler.
4304
        ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4305
        Label materialize_true, materialize_false, done;
4306
        VisitForControl(expr->expression(),
4307
                        &materialize_false,
4308
                        &materialize_true,
4309
                        &materialize_true);
4310
        __ bind(&materialize_true);
4311
        PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4312
        __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4313
        if (context()->IsStackValue()) __ push(r0);
4314
        __ jmp(&done);
4315
        __ bind(&materialize_false);
4316
        PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4317
        __ LoadRoot(r0, Heap::kFalseValueRootIndex);
4318
        if (context()->IsStackValue()) __ push(r0);
4319
        __ bind(&done);
4320
      }
4321
      break;
4322
    }
4323

    
4324
    case Token::TYPEOF: {
4325
      Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4326
      { StackValueContext context(this);
4327
        VisitForTypeofValue(expr->expression());
4328
      }
4329
      __ CallRuntime(Runtime::kTypeof, 1);
4330
      context()->Plug(r0);
4331
      break;
4332
    }
4333

    
4334
    default:
4335
      UNREACHABLE();
4336
  }
4337
}
4338

    
4339

    
4340
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4341
  Comment cmnt(masm_, "[ CountOperation");
4342
  SetSourcePosition(expr->position());
4343

    
4344
  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4345
  // as the left-hand side.
4346
  if (!expr->expression()->IsValidLeftHandSide()) {
4347
    VisitForEffect(expr->expression());
4348
    return;
4349
  }
4350

    
4351
  // Expression can only be a property, a global or a (parameter or local)
4352
  // slot.
4353
  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4354
  LhsKind assign_type = VARIABLE;
4355
  Property* prop = expr->expression()->AsProperty();
4356
  // In case of a property we use the uninitialized expression context
4357
  // of the key to detect a named property.
4358
  if (prop != NULL) {
4359
    assign_type =
4360
        (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4361
  }
4362

    
4363
  // Evaluate expression and get value.
4364
  if (assign_type == VARIABLE) {
4365
    ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4366
    AccumulatorValueContext context(this);
4367
    EmitVariableLoad(expr->expression()->AsVariableProxy());
4368
  } else {
4369
    // Reserve space for result of postfix operation.
4370
    if (expr->is_postfix() && !context()->IsEffect()) {
4371
      __ mov(ip, Operand(Smi::FromInt(0)));
4372
      __ push(ip);
4373
    }
4374
    if (assign_type == NAMED_PROPERTY) {
4375
      // Put the object both on the stack and in the accumulator.
4376
      VisitForAccumulatorValue(prop->obj());
4377
      __ push(r0);
4378
      EmitNamedPropertyLoad(prop);
4379
    } else {
4380
      VisitForStackValue(prop->obj());
4381
      VisitForAccumulatorValue(prop->key());
4382
      __ ldr(r1, MemOperand(sp, 0));
4383
      __ push(r0);
4384
      EmitKeyedPropertyLoad(prop);
4385
    }
4386
  }
4387

    
4388
  // We need a second deoptimization point after loading the value
4389
  // in case evaluating the property load my have a side effect.
4390
  if (assign_type == VARIABLE) {
4391
    PrepareForBailout(expr->expression(), TOS_REG);
4392
  } else {
4393
    PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4394
  }
4395

    
4396
  // Call ToNumber only if operand is not a smi.
4397
  Label no_conversion;
4398
  if (ShouldInlineSmiCase(expr->op())) {
4399
    __ JumpIfSmi(r0, &no_conversion);
4400
  }
4401
  ToNumberStub convert_stub;
4402
  __ CallStub(&convert_stub);
4403
  __ bind(&no_conversion);
4404

    
4405
  // Save result for postfix expressions.
4406
  if (expr->is_postfix()) {
4407
    if (!context()->IsEffect()) {
4408
      // Save the result on the stack. If we have a named or keyed property
4409
      // we store the result under the receiver that is currently on top
4410
      // of the stack.
4411
      switch (assign_type) {
4412
        case VARIABLE:
4413
          __ push(r0);
4414
          break;
4415
        case NAMED_PROPERTY:
4416
          __ str(r0, MemOperand(sp, kPointerSize));
4417
          break;
4418
        case KEYED_PROPERTY:
4419
          __ str(r0, MemOperand(sp, 2 * kPointerSize));
4420
          break;
4421
      }
4422
    }
4423
  }
4424

    
4425

    
4426
  // Inline smi case if we are in a loop.
4427
  Label stub_call, done;
4428
  JumpPatchSite patch_site(masm_);
4429

    
4430
  int count_value = expr->op() == Token::INC ? 1 : -1;
4431
  if (ShouldInlineSmiCase(expr->op())) {
4432
    __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4433
    __ b(vs, &stub_call);
4434
    // We could eliminate this smi check if we split the code at
4435
    // the first smi check before calling ToNumber.
4436
    patch_site.EmitJumpIfSmi(r0, &done);
4437

    
4438
    __ bind(&stub_call);
4439
    // Call stub. Undo operation first.
4440
    __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4441
  }
4442
  __ mov(r1, r0);
4443
  __ mov(r0, Operand(Smi::FromInt(count_value)));
4444

    
4445
  // Record position before stub call.
4446
  SetSourcePosition(expr->position());
4447

    
4448
  BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4449
  CallIC(stub.GetCode(isolate()),
4450
         RelocInfo::CODE_TARGET,
4451
         expr->CountBinOpFeedbackId());
4452
  patch_site.EmitPatchInfo();
4453
  __ bind(&done);
4454

    
4455
  // Store the value returned in r0.
4456
  switch (assign_type) {
4457
    case VARIABLE:
4458
      if (expr->is_postfix()) {
4459
        { EffectContext context(this);
4460
          EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4461
                                 Token::ASSIGN);
4462
          PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4463
          context.Plug(r0);
4464
        }
4465
        // For all contexts except EffectConstant We have the result on
4466
        // top of the stack.
4467
        if (!context()->IsEffect()) {
4468
          context()->PlugTOS();
4469
        }
4470
      } else {
4471
        EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4472
                               Token::ASSIGN);
4473
        PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4474
        context()->Plug(r0);
4475
      }
4476
      break;
4477
    case NAMED_PROPERTY: {
4478
      __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4479
      __ pop(r1);
4480
      Handle<Code> ic = is_classic_mode()
4481
          ? isolate()->builtins()->StoreIC_Initialize()
4482
          : isolate()->builtins()->StoreIC_Initialize_Strict();
4483
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4484
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4485
      if (expr->is_postfix()) {
4486
        if (!context()->IsEffect()) {
4487
          context()->PlugTOS();
4488
        }
4489
      } else {
4490
        context()->Plug(r0);
4491
      }
4492
      break;
4493
    }
4494
    case KEYED_PROPERTY: {
4495
      __ pop(r1);  // Key.
4496
      __ pop(r2);  // Receiver.
4497
      Handle<Code> ic = is_classic_mode()
4498
          ? isolate()->builtins()->KeyedStoreIC_Initialize()
4499
          : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4500
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4501
      PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4502
      if (expr->is_postfix()) {
4503
        if (!context()->IsEffect()) {
4504
          context()->PlugTOS();
4505
        }
4506
      } else {
4507
        context()->Plug(r0);
4508
      }
4509
      break;
4510
    }
4511
  }
4512
}
4513

    
4514

    
4515
void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4516
  ASSERT(!context()->IsEffect());
4517
  ASSERT(!context()->IsTest());
4518
  VariableProxy* proxy = expr->AsVariableProxy();
4519
  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4520
    Comment cmnt(masm_, "Global variable");
4521
    __ ldr(r0, GlobalObjectOperand());
4522
    __ mov(r2, Operand(proxy->name()));
4523
    Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4524
    // Use a regular load, not a contextual load, to avoid a reference
4525
    // error.
4526
    CallIC(ic);
4527
    PrepareForBailout(expr, TOS_REG);
4528
    context()->Plug(r0);
4529
  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4530
    Label done, slow;
4531

    
4532
    // Generate code for loading from variables potentially shadowed
4533
    // by eval-introduced variables.
4534
    EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4535

    
4536
    __ bind(&slow);
4537
    __ mov(r0, Operand(proxy->name()));
4538
    __ Push(cp, r0);
4539
    __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4540
    PrepareForBailout(expr, TOS_REG);
4541
    __ bind(&done);
4542

    
4543
    context()->Plug(r0);
4544
  } else {
4545
    // This expression cannot throw a reference error at the top level.
4546
    VisitInDuplicateContext(expr);
4547
  }
4548
}
4549

    
4550

    
4551
void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4552
                                                 Expression* sub_expr,
4553
                                                 Handle<String> check) {
4554
  Label materialize_true, materialize_false;
4555
  Label* if_true = NULL;
4556
  Label* if_false = NULL;
4557
  Label* fall_through = NULL;
4558
  context()->PrepareTest(&materialize_true, &materialize_false,
4559
                         &if_true, &if_false, &fall_through);
4560

    
4561
  { AccumulatorValueContext context(this);
4562
    VisitForTypeofValue(sub_expr);
4563
  }
4564
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4565

    
4566
  if (check->Equals(isolate()->heap()->number_string())) {
4567
    __ JumpIfSmi(r0, if_true);
4568
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4569
    __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4570
    __ cmp(r0, ip);
4571
    Split(eq, if_true, if_false, fall_through);
4572
  } else if (check->Equals(isolate()->heap()->string_string())) {
4573
    __ JumpIfSmi(r0, if_false);
4574
    // Check for undetectable objects => false.
4575
    __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4576
    __ b(ge, if_false);
4577
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4578
    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4579
    Split(eq, if_true, if_false, fall_through);
4580
  } else if (check->Equals(isolate()->heap()->symbol_string())) {
4581
    __ JumpIfSmi(r0, if_false);
4582
    __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
4583
    Split(eq, if_true, if_false, fall_through);
4584
  } else if (check->Equals(isolate()->heap()->boolean_string())) {
4585
    __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4586
    __ b(eq, if_true);
4587
    __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4588
    Split(eq, if_true, if_false, fall_through);
4589
  } else if (FLAG_harmony_typeof &&
4590
             check->Equals(isolate()->heap()->null_string())) {
4591
    __ CompareRoot(r0, Heap::kNullValueRootIndex);
4592
    Split(eq, if_true, if_false, fall_through);
4593
  } else if (check->Equals(isolate()->heap()->undefined_string())) {
4594
    __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4595
    __ b(eq, if_true);
4596
    __ JumpIfSmi(r0, if_false);
4597
    // Check for undetectable objects => true.
4598
    __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4599
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4600
    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4601
    Split(ne, if_true, if_false, fall_through);
4602

    
4603
  } else if (check->Equals(isolate()->heap()->function_string())) {
4604
    __ JumpIfSmi(r0, if_false);
4605
    STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4606
    __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4607
    __ b(eq, if_true);
4608
    __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4609
    Split(eq, if_true, if_false, fall_through);
4610
  } else if (check->Equals(isolate()->heap()->object_string())) {
4611
    __ JumpIfSmi(r0, if_false);
4612
    if (!FLAG_harmony_typeof) {
4613
      __ CompareRoot(r0, Heap::kNullValueRootIndex);
4614
      __ b(eq, if_true);
4615
    }
4616
    // Check for JS objects => true.
4617
    __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4618
    __ b(lt, if_false);
4619
    __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4620
    __ b(gt, if_false);
4621
    // Check for undetectable objects => false.
4622
    __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4623
    __ tst(r1, Operand(1 << Map::kIsUndetectable));
4624
    Split(eq, if_true, if_false, fall_through);
4625
  } else {
4626
    if (if_false != fall_through) __ jmp(if_false);
4627
  }
4628
  context()->Plug(if_true, if_false);
4629
}
4630

    
4631

    
4632
void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4633
  Comment cmnt(masm_, "[ CompareOperation");
4634
  SetSourcePosition(expr->position());
4635

    
4636
  // First we try a fast inlined version of the compare when one of
4637
  // the operands is a literal.
4638
  if (TryLiteralCompare(expr)) return;
4639

    
4640
  // Always perform the comparison for its control flow.  Pack the result
4641
  // into the expression's context after the comparison is performed.
4642
  Label materialize_true, materialize_false;
4643
  Label* if_true = NULL;
4644
  Label* if_false = NULL;
4645
  Label* fall_through = NULL;
4646
  context()->PrepareTest(&materialize_true, &materialize_false,
4647
                         &if_true, &if_false, &fall_through);
4648

    
4649
  Token::Value op = expr->op();
4650
  VisitForStackValue(expr->left());
4651
  switch (op) {
4652
    case Token::IN:
4653
      VisitForStackValue(expr->right());
4654
      __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4655
      PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4656
      __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4657
      __ cmp(r0, ip);
4658
      Split(eq, if_true, if_false, fall_through);
4659
      break;
4660

    
4661
    case Token::INSTANCEOF: {
4662
      VisitForStackValue(expr->right());
4663
      InstanceofStub stub(InstanceofStub::kNoFlags);
4664
      __ CallStub(&stub);
4665
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4666
      // The stub returns 0 for true.
4667
      __ tst(r0, r0);
4668
      Split(eq, if_true, if_false, fall_through);
4669
      break;
4670
    }
4671

    
4672
    default: {
4673
      VisitForAccumulatorValue(expr->right());
4674
      Condition cond = CompareIC::ComputeCondition(op);
4675
      __ pop(r1);
4676

    
4677
      bool inline_smi_code = ShouldInlineSmiCase(op);
4678
      JumpPatchSite patch_site(masm_);
4679
      if (inline_smi_code) {
4680
        Label slow_case;
4681
        __ orr(r2, r0, Operand(r1));
4682
        patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4683
        __ cmp(r1, r0);
4684
        Split(cond, if_true, if_false, NULL);
4685
        __ bind(&slow_case);
4686
      }
4687

    
4688
      // Record position and call the compare IC.
4689
      SetSourcePosition(expr->position());
4690
      Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4691
      CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4692
      patch_site.EmitPatchInfo();
4693
      PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4694
      __ cmp(r0, Operand::Zero());
4695
      Split(cond, if_true, if_false, fall_through);
4696
    }
4697
  }
4698

    
4699
  // Convert the result of the comparison into one expected for this
4700
  // expression's context.
4701
  context()->Plug(if_true, if_false);
4702
}
4703

    
4704

    
4705
void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4706
                                              Expression* sub_expr,
4707
                                              NilValue nil) {
4708
  Label materialize_true, materialize_false;
4709
  Label* if_true = NULL;
4710
  Label* if_false = NULL;
4711
  Label* fall_through = NULL;
4712
  context()->PrepareTest(&materialize_true, &materialize_false,
4713
                         &if_true, &if_false, &fall_through);
4714

    
4715
  VisitForAccumulatorValue(sub_expr);
4716
  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4717
  if (expr->op() == Token::EQ_STRICT) {
4718
    Heap::RootListIndex nil_value = nil == kNullValue ?
4719
        Heap::kNullValueRootIndex :
4720
        Heap::kUndefinedValueRootIndex;
4721
    __ LoadRoot(r1, nil_value);
4722
    __ cmp(r0, r1);
4723
    Split(eq, if_true, if_false, fall_through);
4724
  } else {
4725
    Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4726
    CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4727
    __ cmp(r0, Operand(0));
4728
    Split(ne, if_true, if_false, fall_through);
4729
  }
4730
  context()->Plug(if_true, if_false);
4731
}
4732

    
4733

    
4734
void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4735
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4736
  context()->Plug(r0);
4737
}
4738

    
4739

    
4740
Register FullCodeGenerator::result_register() {
4741
  return r0;
4742
}
4743

    
4744

    
4745
Register FullCodeGenerator::context_register() {
4746
  return cp;
4747
}
4748

    
4749

    
4750
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4751
  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4752
  __ str(value, MemOperand(fp, frame_offset));
4753
}
4754

    
4755

    
4756
void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4757
  __ ldr(dst, ContextOperand(cp, context_index));
4758
}
4759

    
4760

    
4761
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4762
  Scope* declaration_scope = scope()->DeclarationScope();
4763
  if (declaration_scope->is_global_scope() ||
4764
      declaration_scope->is_module_scope()) {
4765
    // Contexts nested in the native context have a canonical empty function
4766
    // as their closure, not the anonymous closure containing the global
4767
    // code.  Pass a smi sentinel and let the runtime look up the empty
4768
    // function.
4769
    __ mov(ip, Operand(Smi::FromInt(0)));
4770
  } else if (declaration_scope->is_eval_scope()) {
4771
    // Contexts created by a call to eval have the same closure as the
4772
    // context calling eval, not the anonymous closure containing the eval
4773
    // code.  Fetch it from the context.
4774
    __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4775
  } else {
4776
    ASSERT(declaration_scope->is_function_scope());
4777
    __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4778
  }
4779
  __ push(ip);
4780
}
4781

    
4782

    
4783
// ----------------------------------------------------------------------------
4784
// Non-local control flow support.
4785

    
4786
void FullCodeGenerator::EnterFinallyBlock() {
4787
  ASSERT(!result_register().is(r1));
4788
  // Store result register while executing finally block.
4789
  __ push(result_register());
4790
  // Cook return address in link register to stack (smi encoded Code* delta)
4791
  __ sub(r1, lr, Operand(masm_->CodeObject()));
4792
  __ SmiTag(r1);
4793

    
4794
  // Store result register while executing finally block.
4795
  __ push(r1);
4796

    
4797
  // Store pending message while executing finally block.
4798
  ExternalReference pending_message_obj =
4799
      ExternalReference::address_of_pending_message_obj(isolate());
4800
  __ mov(ip, Operand(pending_message_obj));
4801
  __ ldr(r1, MemOperand(ip));
4802
  __ push(r1);
4803

    
4804
  ExternalReference has_pending_message =
4805
      ExternalReference::address_of_has_pending_message(isolate());
4806
  __ mov(ip, Operand(has_pending_message));
4807
  __ ldr(r1, MemOperand(ip));
4808
  __ SmiTag(r1);
4809
  __ push(r1);
4810

    
4811
  ExternalReference pending_message_script =
4812
      ExternalReference::address_of_pending_message_script(isolate());
4813
  __ mov(ip, Operand(pending_message_script));
4814
  __ ldr(r1, MemOperand(ip));
4815
  __ push(r1);
4816
}
4817

    
4818

    
4819
void FullCodeGenerator::ExitFinallyBlock() {
4820
  ASSERT(!result_register().is(r1));
4821
  // Restore pending message from stack.
4822
  __ pop(r1);
4823
  ExternalReference pending_message_script =
4824
      ExternalReference::address_of_pending_message_script(isolate());
4825
  __ mov(ip, Operand(pending_message_script));
4826
  __ str(r1, MemOperand(ip));
4827

    
4828
  __ pop(r1);
4829
  __ SmiUntag(r1);
4830
  ExternalReference has_pending_message =
4831
      ExternalReference::address_of_has_pending_message(isolate());
4832
  __ mov(ip, Operand(has_pending_message));
4833
  __ str(r1, MemOperand(ip));
4834

    
4835
  __ pop(r1);
4836
  ExternalReference pending_message_obj =
4837
      ExternalReference::address_of_pending_message_obj(isolate());
4838
  __ mov(ip, Operand(pending_message_obj));
4839
  __ str(r1, MemOperand(ip));
4840

    
4841
  // Restore result register from stack.
4842
  __ pop(r1);
4843

    
4844
  // Uncook return address and return.
4845
  __ pop(result_register());
4846
  __ SmiUntag(r1);
4847
  __ add(pc, r1, Operand(masm_->CodeObject()));
4848
}
4849

    
4850

    
4851
#undef __
4852

    
4853
#define __ ACCESS_MASM(masm())
4854

    
4855
FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4856
    int* stack_depth,
4857
    int* context_length) {
4858
  // The macros used here must preserve the result register.
4859

    
4860
  // Because the handler block contains the context of the finally
4861
  // code, we can restore it directly from there for the finally code
4862
  // rather than iteratively unwinding contexts via their previous
4863
  // links.
4864
  __ Drop(*stack_depth);  // Down to the handler block.
4865
  if (*context_length > 0) {
4866
    // Restore the context to its dedicated register and the stack.
4867
    __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4868
    __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4869
  }
4870
  __ PopTryHandler();
4871
  __ bl(finally_entry_);
4872

    
4873
  *stack_depth = 0;
4874
  *context_length = 0;
4875
  return previous_;
4876
}
4877

    
4878

    
4879
#undef __
4880

    
4881

    
4882
static const int32_t kBranchBeforeInterrupt =  0x5a000004;
4883

    
4884

    
4885
void BackEdgeTable::PatchAt(Code* unoptimized_code,
4886
                            Address pc,
4887
                            BackEdgeState target_state,
4888
                            Code* replacement_code) {
4889
  static const int kInstrSize = Assembler::kInstrSize;
4890
  Address branch_address = pc - 3 * kInstrSize;
4891
  CodePatcher patcher(branch_address, 1);
4892

    
4893
  switch (target_state) {
4894
    case INTERRUPT:
4895
      //  <decrement profiling counter>
4896
      //  2a 00 00 01       bpl ok
4897
      //  e5 9f c? ??       ldr ip, [pc, <interrupt stub address>]
4898
      //  e1 2f ff 3c       blx ip
4899
      //  ok-label
4900
      patcher.masm()->b(4 * kInstrSize, pl);  // Jump offset is 4 instructions.
4901
      ASSERT_EQ(kBranchBeforeInterrupt, Memory::int32_at(branch_address));
4902
      break;
4903
    case ON_STACK_REPLACEMENT:
4904
    case OSR_AFTER_STACK_CHECK:
4905
      //  <decrement profiling counter>
4906
      //  e1 a0 00 00       mov r0, r0 (NOP)
4907
      //  e5 9f c? ??       ldr ip, [pc, <on-stack replacement address>]
4908
      //  e1 2f ff 3c       blx ip
4909
      //  ok-label
4910
      patcher.masm()->nop();
4911
      break;
4912
  }
4913

    
4914
  Address pc_immediate_load_address = pc - 2 * kInstrSize;
4915
  // Replace the call address.
4916
  uint32_t interrupt_address_offset =
4917
      Memory::uint16_at(pc_immediate_load_address) & 0xfff;
4918
  Address interrupt_address_pointer = pc + interrupt_address_offset;
4919
  Memory::uint32_at(interrupt_address_pointer) =
4920
      reinterpret_cast<uint32_t>(replacement_code->entry());
4921

    
4922
  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4923
      unoptimized_code, pc_immediate_load_address, replacement_code);
4924
}
4925

    
4926

    
4927
BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4928
    Isolate* isolate,
4929
    Code* unoptimized_code,
4930
    Address pc) {
4931
  static const int kInstrSize = Assembler::kInstrSize;
4932
  ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
4933

    
4934
  Address branch_address = pc - 3 * kInstrSize;
4935
  Address pc_immediate_load_address = pc - 2 * kInstrSize;
4936
  uint32_t interrupt_address_offset =
4937
      Memory::uint16_at(pc_immediate_load_address) & 0xfff;
4938
  Address interrupt_address_pointer = pc + interrupt_address_offset;
4939

    
4940
  if (Memory::int32_at(branch_address) == kBranchBeforeInterrupt) {
4941
    ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
4942
           reinterpret_cast<uint32_t>(
4943
               isolate->builtins()->InterruptCheck()->entry()));
4944
    ASSERT(Assembler::IsLdrPcImmediateOffset(
4945
               Assembler::instr_at(pc_immediate_load_address)));
4946
    return INTERRUPT;
4947
  }
4948

    
4949
  ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
4950
  ASSERT(Assembler::IsLdrPcImmediateOffset(
4951
             Assembler::instr_at(pc_immediate_load_address)));
4952

    
4953
  if (Memory::uint32_at(interrupt_address_pointer) ==
4954
      reinterpret_cast<uint32_t>(
4955
          isolate->builtins()->OnStackReplacement()->entry())) {
4956
    return ON_STACK_REPLACEMENT;
4957
  }
4958

    
4959
  ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
4960
         reinterpret_cast<uint32_t>(
4961
             isolate->builtins()->OsrAfterStackCheck()->entry()));
4962
  return OSR_AFTER_STACK_CHECK;
4963
}
4964

    
4965

    
4966
} }  // namespace v8::internal
4967

    
4968
#endif  // V8_TARGET_ARCH_ARM