The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / codegen-arm.cc @ 40c0f755

History | View | Annotate | Download (168 KB)

1 40c0f755 Ryan
// Copyright 2006-2009 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28
#include "v8.h"
29
30
#include "bootstrapper.h"
31
#include "codegen-inl.h"
32
#include "debug.h"
33
#include "parser.h"
34
#include "register-allocator-inl.h"
35
#include "runtime.h"
36
#include "scopes.h"
37
38
39
namespace v8 { namespace internal {
40
41
#define __ masm_->
42
43
// -------------------------------------------------------------------------
44
// CodeGenState implementation.
45
46
CodeGenState::CodeGenState(CodeGenerator* owner)
47
    : owner_(owner),
48
      typeof_state_(NOT_INSIDE_TYPEOF),
49
      true_target_(NULL),
50
      false_target_(NULL),
51
      previous_(NULL) {
52
  owner_->set_state(this);
53
}
54
55
56
CodeGenState::CodeGenState(CodeGenerator* owner,
57
                           TypeofState typeof_state,
58
                           JumpTarget* true_target,
59
                           JumpTarget* false_target)
60
    : owner_(owner),
61
      typeof_state_(typeof_state),
62
      true_target_(true_target),
63
      false_target_(false_target),
64
      previous_(owner->state()) {
65
  owner_->set_state(this);
66
}
67
68
69
CodeGenState::~CodeGenState() {
70
  ASSERT(owner_->state() == this);
71
  owner_->set_state(previous_);
72
}
73
74
75
// -------------------------------------------------------------------------
76
// CodeGenerator implementation
77
78
CodeGenerator::CodeGenerator(int buffer_size, Handle<Script> script,
79
                             bool is_eval)
80
    : is_eval_(is_eval),
81
      script_(script),
82
      deferred_(8),
83
      masm_(new MacroAssembler(NULL, buffer_size)),
84
      scope_(NULL),
85
      frame_(NULL),
86
      allocator_(NULL),
87
      cc_reg_(al),
88
      state_(NULL),
89
      function_return_is_shadowed_(false),
90
      in_spilled_code_(false) {
91
}
92
93
94
// Calling conventions:
95
// fp: caller's frame pointer
96
// sp: stack pointer
97
// r1: called JS function
98
// cp: callee's context
99
100
void CodeGenerator::GenCode(FunctionLiteral* fun) {
101
  ZoneList<Statement*>* body = fun->body();
102
103
  // Initialize state.
104
  ASSERT(scope_ == NULL);
105
  scope_ = fun->scope();
106
  ASSERT(allocator_ == NULL);
107
  RegisterAllocator register_allocator(this);
108
  allocator_ = &register_allocator;
109
  ASSERT(frame_ == NULL);
110
  frame_ = new VirtualFrame(this);
111
  cc_reg_ = al;
112
  set_in_spilled_code(false);
113
  {
114
    CodeGenState state(this);
115
116
    // Entry:
117
    // Stack: receiver, arguments
118
    // lr: return address
119
    // fp: caller's frame pointer
120
    // sp: stack pointer
121
    // r1: called JS function
122
    // cp: callee's context
123
    allocator_->Initialize();
124
    frame_->Enter();
125
    // tos: code slot
126
#ifdef DEBUG
127
    if (strlen(FLAG_stop_at) > 0 &&
128
        fun->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
129
      frame_->SpillAll();
130
      __ stop("stop-at");
131
    }
132
#endif
133
134
    // Allocate space for locals and initialize them.
135
    frame_->AllocateStackSlots(scope_->num_stack_slots());
136
    // Initialize the function return target after the locals are set
137
    // up, because it needs the expected frame height from the frame.
138
    function_return_.Initialize(this, JumpTarget::BIDIRECTIONAL);
139
    function_return_is_shadowed_ = false;
140
141
    VirtualFrame::SpilledScope spilled_scope(this);
142
    if (scope_->num_heap_slots() > 0) {
143
      // Allocate local context.
144
      // Get outer context and create a new context based on it.
145
      __ ldr(r0, frame_->Function());
146
      frame_->EmitPush(r0);
147
      frame_->CallRuntime(Runtime::kNewContext, 1);  // r0 holds the result
148
149
      if (kDebug) {
150
        JumpTarget verified_true(this);
151
        __ cmp(r0, Operand(cp));
152
        verified_true.Branch(eq);
153
        __ stop("NewContext: r0 is expected to be the same as cp");
154
        verified_true.Bind();
155
      }
156
      // Update context local.
157
      __ str(cp, frame_->Context());
158
    }
159
160
    // TODO(1241774): Improve this code:
161
    // 1) only needed if we have a context
162
    // 2) no need to recompute context ptr every single time
163
    // 3) don't copy parameter operand code from SlotOperand!
164
    {
165
      Comment cmnt2(masm_, "[ copy context parameters into .context");
166
167
      // Note that iteration order is relevant here! If we have the same
168
      // parameter twice (e.g., function (x, y, x)), and that parameter
169
      // needs to be copied into the context, it must be the last argument
170
      // passed to the parameter that needs to be copied. This is a rare
171
      // case so we don't check for it, instead we rely on the copying
172
      // order: such a parameter is copied repeatedly into the same
173
      // context location and thus the last value is what is seen inside
174
      // the function.
175
      for (int i = 0; i < scope_->num_parameters(); i++) {
176
        Variable* par = scope_->parameter(i);
177
        Slot* slot = par->slot();
178
        if (slot != NULL && slot->type() == Slot::CONTEXT) {
179
          ASSERT(!scope_->is_global_scope());  // no parameters in global scope
180
          __ ldr(r1, frame_->ParameterAt(i));
181
          // Loads r2 with context; used below in RecordWrite.
182
          __ str(r1, SlotOperand(slot, r2));
183
          // Load the offset into r3.
184
          int slot_offset =
185
              FixedArray::kHeaderSize + slot->index() * kPointerSize;
186
          __ mov(r3, Operand(slot_offset));
187
          __ RecordWrite(r2, r3, r1);
188
        }
189
      }
190
    }
191
192
    // Store the arguments object.  This must happen after context
193
    // initialization because the arguments object may be stored in the
194
    // context.
195
    if (scope_->arguments() != NULL) {
196
      ASSERT(scope_->arguments_shadow() != NULL);
197
      Comment cmnt(masm_, "[ allocate arguments object");
198
      { Reference shadow_ref(this, scope_->arguments_shadow());
199
        { Reference arguments_ref(this, scope_->arguments());
200
          ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
201
          __ ldr(r2, frame_->Function());
202
          // The receiver is below the arguments, the return address,
203
          // and the frame pointer on the stack.
204
          const int kReceiverDisplacement = 2 + scope_->num_parameters();
205
          __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
206
          __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters())));
207
          frame_->Adjust(3);
208
          __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit());
209
          frame_->CallStub(&stub, 3);
210
          frame_->EmitPush(r0);
211
          arguments_ref.SetValue(NOT_CONST_INIT);
212
        }
213
        shadow_ref.SetValue(NOT_CONST_INIT);
214
      }
215
      frame_->Drop();  // Value is no longer needed.
216
    }
217
218
    // Generate code to 'execute' declarations and initialize functions
219
    // (source elements). In case of an illegal redeclaration we need to
220
    // handle that instead of processing the declarations.
221
    if (scope_->HasIllegalRedeclaration()) {
222
      Comment cmnt(masm_, "[ illegal redeclarations");
223
      scope_->VisitIllegalRedeclaration(this);
224
    } else {
225
      Comment cmnt(masm_, "[ declarations");
226
      ProcessDeclarations(scope_->declarations());
227
      // Bail out if a stack-overflow exception occurred when processing
228
      // declarations.
229
      if (HasStackOverflow()) return;
230
    }
231
232
    if (FLAG_trace) {
233
      frame_->CallRuntime(Runtime::kTraceEnter, 0);
234
      // Ignore the return value.
235
    }
236
    CheckStack();
237
238
    // Compile the body of the function in a vanilla state. Don't
239
    // bother compiling all the code if the scope has an illegal
240
    // redeclaration.
241
    if (!scope_->HasIllegalRedeclaration()) {
242
      Comment cmnt(masm_, "[ function body");
243
#ifdef DEBUG
244
      bool is_builtin = Bootstrapper::IsActive();
245
      bool should_trace =
246
          is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
247
      if (should_trace) {
248
        frame_->CallRuntime(Runtime::kDebugTrace, 0);
249
        // Ignore the return value.
250
      }
251
#endif
252
      VisitStatementsAndSpill(body);
253
    }
254
  }
255
256
  // Generate the return sequence if necessary.
257
  if (frame_ != NULL || function_return_.is_linked()) {
258
    // exit
259
    // r0: result
260
    // sp: stack pointer
261
    // fp: frame pointer
262
    // pp: parameter pointer
263
    // cp: callee's context
264
    __ mov(r0, Operand(Factory::undefined_value()));
265
266
    function_return_.Bind();
267
    if (FLAG_trace) {
268
      // Push the return value on the stack as the parameter.
269
      // Runtime::TraceExit returns the parameter as it is.
270
      frame_->EmitPush(r0);
271
      frame_->CallRuntime(Runtime::kTraceExit, 1);
272
    }
273
274
    // Tear down the frame which will restore the caller's frame pointer and
275
    // the link register.
276
    frame_->Exit();
277
278
    __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize));
279
    __ mov(pc, lr);
280
  }
281
282
  // Code generation state must be reset.
283
  ASSERT(!has_cc());
284
  ASSERT(state_ == NULL);
285
  ASSERT(!function_return_is_shadowed_);
286
  function_return_.Unuse();
287
  DeleteFrame();
288
289
  // Process any deferred code using the register allocator.
290
  if (HasStackOverflow()) {
291
    ClearDeferred();
292
  } else {
293
    ProcessDeferred();
294
  }
295
296
  allocator_ = NULL;
297
  scope_ = NULL;
298
}
299
300
301
MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
302
  // Currently, this assertion will fail if we try to assign to
303
  // a constant variable that is constant because it is read-only
304
  // (such as the variable referring to a named function expression).
305
  // We need to implement assignments to read-only variables.
306
  // Ideally, we should do this during AST generation (by converting
307
  // such assignments into expression statements); however, in general
308
  // we may not be able to make the decision until past AST generation,
309
  // that is when the entire program is known.
310
  ASSERT(slot != NULL);
311
  int index = slot->index();
312
  switch (slot->type()) {
313
    case Slot::PARAMETER:
314
      return frame_->ParameterAt(index);
315
316
    case Slot::LOCAL:
317
      return frame_->LocalAt(index);
318
319
    case Slot::CONTEXT: {
320
      // Follow the context chain if necessary.
321
      ASSERT(!tmp.is(cp));  // do not overwrite context register
322
      Register context = cp;
323
      int chain_length = scope()->ContextChainLength(slot->var()->scope());
324
      for (int i = 0; i < chain_length; i++) {
325
        // Load the closure.
326
        // (All contexts, even 'with' contexts, have a closure,
327
        // and it is the same for all contexts inside a function.
328
        // There is no need to go to the function context first.)
329
        __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
330
        // Load the function context (which is the incoming, outer context).
331
        __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
332
        context = tmp;
333
      }
334
      // We may have a 'with' context now. Get the function context.
335
      // (In fact this mov may never be the needed, since the scope analysis
336
      // may not permit a direct context access in this case and thus we are
337
      // always at a function context. However it is safe to dereference be-
338
      // cause the function context of a function context is itself. Before
339
      // deleting this mov we should try to create a counter-example first,
340
      // though...)
341
      __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
342
      return ContextOperand(tmp, index);
343
    }
344
345
    default:
346
      UNREACHABLE();
347
      return MemOperand(r0, 0);
348
  }
349
}
350
351
352
MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
353
    Slot* slot,
354
    Register tmp,
355
    Register tmp2,
356
    JumpTarget* slow) {
357
  ASSERT(slot->type() == Slot::CONTEXT);
358
  Register context = cp;
359
360
  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
361
    if (s->num_heap_slots() > 0) {
362
      if (s->calls_eval()) {
363
        // Check that extension is NULL.
364
        __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
365
        __ tst(tmp2, tmp2);
366
        slow->Branch(ne);
367
      }
368
      __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
369
      __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
370
      context = tmp;
371
    }
372
  }
373
  // Check that last extension is NULL.
374
  __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
375
  __ tst(tmp2, tmp2);
376
  slow->Branch(ne);
377
  __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
378
  return ContextOperand(tmp, slot->index());
379
}
380
381
382
void CodeGenerator::LoadConditionAndSpill(Expression* expression,
383
                                          TypeofState typeof_state,
384
                                          JumpTarget* true_target,
385
                                          JumpTarget* false_target,
386
                                          bool force_control) {
387
  ASSERT(in_spilled_code());
388
  set_in_spilled_code(false);
389
  LoadCondition(expression, typeof_state, true_target, false_target,
390
                force_control);
391
  if (frame_ != NULL) {
392
    frame_->SpillAll();
393
  }
394
  set_in_spilled_code(true);
395
}
396
397
398
// Loads a value on TOS. If it is a boolean value, the result may have been
399
// (partially) translated into branches, or it may have set the condition
400
// code register. If force_cc is set, the value is forced to set the
401
// condition code register and no value is pushed. If the condition code
402
// register was set, has_cc() is true and cc_reg_ contains the condition to
403
// test for 'true'.
404
void CodeGenerator::LoadCondition(Expression* x,
405
                                  TypeofState typeof_state,
406
                                  JumpTarget* true_target,
407
                                  JumpTarget* false_target,
408
                                  bool force_cc) {
409
  ASSERT(!in_spilled_code());
410
  ASSERT(!has_cc());
411
  int original_height = frame_->height();
412
413
  { CodeGenState new_state(this, typeof_state, true_target, false_target);
414
    Visit(x);
415
416
    // If we hit a stack overflow, we may not have actually visited
417
    // the expression.  In that case, we ensure that we have a
418
    // valid-looking frame state because we will continue to generate
419
    // code as we unwind the C++ stack.
420
    //
421
    // It's possible to have both a stack overflow and a valid frame
422
    // state (eg, a subexpression overflowed, visiting it returned
423
    // with a dummied frame state, and visiting this expression
424
    // returned with a normal-looking state).
425
    if (HasStackOverflow() &&
426
        has_valid_frame() &&
427
        !has_cc() &&
428
        frame_->height() == original_height) {
429
      true_target->Jump();
430
    }
431
  }
432
  if (force_cc && frame_ != NULL && !has_cc()) {
433
    // Convert the TOS value to a boolean in the condition code register.
434
    ToBoolean(true_target, false_target);
435
  }
436
  ASSERT(!force_cc || !has_valid_frame() || has_cc());
437
  ASSERT(!has_valid_frame() ||
438
         (has_cc() && frame_->height() == original_height) ||
439
         (!has_cc() && frame_->height() == original_height + 1));
440
}
441
442
443
void CodeGenerator::LoadAndSpill(Expression* expression,
444
                                 TypeofState typeof_state) {
445
  ASSERT(in_spilled_code());
446
  set_in_spilled_code(false);
447
  Load(expression, typeof_state);
448
  frame_->SpillAll();
449
  set_in_spilled_code(true);
450
}
451
452
453
void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
454
#ifdef DEBUG
455
  int original_height = frame_->height();
456
#endif
457
  ASSERT(!in_spilled_code());
458
  JumpTarget true_target(this);
459
  JumpTarget false_target(this);
460
  LoadCondition(x, typeof_state, &true_target, &false_target, false);
461
462
  if (has_cc()) {
463
    // Convert cc_reg_ into a boolean value.
464
    JumpTarget loaded(this);
465
    JumpTarget materialize_true(this);
466
    materialize_true.Branch(cc_reg_);
467
    __ mov(r0, Operand(Factory::false_value()));
468
    frame_->EmitPush(r0);
469
    loaded.Jump();
470
    materialize_true.Bind();
471
    __ mov(r0, Operand(Factory::true_value()));
472
    frame_->EmitPush(r0);
473
    loaded.Bind();
474
    cc_reg_ = al;
475
  }
476
477
  if (true_target.is_linked() || false_target.is_linked()) {
478
    // We have at least one condition value that has been "translated"
479
    // into a branch, thus it needs to be loaded explicitly.
480
    JumpTarget loaded(this);
481
    if (frame_ != NULL) {
482
      loaded.Jump();  // Don't lose the current TOS.
483
    }
484
    bool both = true_target.is_linked() && false_target.is_linked();
485
    // Load "true" if necessary.
486
    if (true_target.is_linked()) {
487
      true_target.Bind();
488
      __ mov(r0, Operand(Factory::true_value()));
489
      frame_->EmitPush(r0);
490
    }
491
    // If both "true" and "false" need to be loaded jump across the code for
492
    // "false".
493
    if (both) {
494
      loaded.Jump();
495
    }
496
    // Load "false" if necessary.
497
    if (false_target.is_linked()) {
498
      false_target.Bind();
499
      __ mov(r0, Operand(Factory::false_value()));
500
      frame_->EmitPush(r0);
501
    }
502
    // A value is loaded on all paths reaching this point.
503
    loaded.Bind();
504
  }
505
  ASSERT(has_valid_frame());
506
  ASSERT(!has_cc());
507
  ASSERT(frame_->height() == original_height + 1);
508
}
509
510
511
void CodeGenerator::LoadGlobal() {
512
  VirtualFrame::SpilledScope spilled_scope(this);
513
  __ ldr(r0, GlobalObject());
514
  frame_->EmitPush(r0);
515
}
516
517
518
void CodeGenerator::LoadGlobalReceiver(Register scratch) {
519
  VirtualFrame::SpilledScope spilled_scope(this);
520
  __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX));
521
  __ ldr(scratch,
522
         FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset));
523
  frame_->EmitPush(scratch);
524
}
525
526
527
// TODO(1241834): Get rid of this function in favor of just using Load, now
528
// that we have the INSIDE_TYPEOF typeof state. => Need to handle global
529
// variables w/o reference errors elsewhere.
530
void CodeGenerator::LoadTypeofExpression(Expression* x) {
531
  VirtualFrame::SpilledScope spilled_scope(this);
532
  Variable* variable = x->AsVariableProxy()->AsVariable();
533
  if (variable != NULL && !variable->is_this() && variable->is_global()) {
534
    // NOTE: This is somewhat nasty. We force the compiler to load
535
    // the variable as if through '<global>.<variable>' to make sure we
536
    // do not get reference errors.
537
    Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
538
    Literal key(variable->name());
539
    // TODO(1241834): Fetch the position from the variable instead of using
540
    // no position.
541
    Property property(&global, &key, RelocInfo::kNoPosition);
542
    LoadAndSpill(&property);
543
  } else {
544
    LoadAndSpill(x, INSIDE_TYPEOF);
545
  }
546
}
547
548
549
Reference::Reference(CodeGenerator* cgen, Expression* expression)
550
    : cgen_(cgen), expression_(expression), type_(ILLEGAL) {
551
  cgen->LoadReference(this);
552
}
553
554
555
Reference::~Reference() {
556
  cgen_->UnloadReference(this);
557
}
558
559
560
void CodeGenerator::LoadReference(Reference* ref) {
561
  VirtualFrame::SpilledScope spilled_scope(this);
562
  Comment cmnt(masm_, "[ LoadReference");
563
  Expression* e = ref->expression();
564
  Property* property = e->AsProperty();
565
  Variable* var = e->AsVariableProxy()->AsVariable();
566
567
  if (property != NULL) {
568
    // The expression is either a property or a variable proxy that rewrites
569
    // to a property.
570
    LoadAndSpill(property->obj());
571
    // We use a named reference if the key is a literal symbol, unless it is
572
    // a string that can be legally parsed as an integer.  This is because
573
    // otherwise we will not get into the slow case code that handles [] on
574
    // String objects.
575
    Literal* literal = property->key()->AsLiteral();
576
    uint32_t dummy;
577
    if (literal != NULL &&
578
        literal->handle()->IsSymbol() &&
579
        !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) {
580
      ref->set_type(Reference::NAMED);
581
    } else {
582
      LoadAndSpill(property->key());
583
      ref->set_type(Reference::KEYED);
584
    }
585
  } else if (var != NULL) {
586
    // The expression is a variable proxy that does not rewrite to a
587
    // property.  Global variables are treated as named property references.
588
    if (var->is_global()) {
589
      LoadGlobal();
590
      ref->set_type(Reference::NAMED);
591
    } else {
592
      ASSERT(var->slot() != NULL);
593
      ref->set_type(Reference::SLOT);
594
    }
595
  } else {
596
    // Anything else is a runtime error.
597
    LoadAndSpill(e);
598
    frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
599
  }
600
}
601
602
603
void CodeGenerator::UnloadReference(Reference* ref) {
604
  VirtualFrame::SpilledScope spilled_scope(this);
605
  // Pop a reference from the stack while preserving TOS.
606
  Comment cmnt(masm_, "[ UnloadReference");
607
  int size = ref->size();
608
  if (size > 0) {
609
    frame_->EmitPop(r0);
610
    frame_->Drop(size);
611
    frame_->EmitPush(r0);
612
  }
613
}
614
615
616
// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
617
// register to a boolean in the condition code register. The code
618
// may jump to 'false_target' in case the register converts to 'false'.
619
void CodeGenerator::ToBoolean(JumpTarget* true_target,
620
                              JumpTarget* false_target) {
621
  VirtualFrame::SpilledScope spilled_scope(this);
622
  // Note: The generated code snippet does not change stack variables.
623
  //       Only the condition code should be set.
624
  frame_->EmitPop(r0);
625
626
  // Fast case checks
627
628
  // Check if the value is 'false'.
629
  __ cmp(r0, Operand(Factory::false_value()));
630
  false_target->Branch(eq);
631
632
  // Check if the value is 'true'.
633
  __ cmp(r0, Operand(Factory::true_value()));
634
  true_target->Branch(eq);
635
636
  // Check if the value is 'undefined'.
637
  __ cmp(r0, Operand(Factory::undefined_value()));
638
  false_target->Branch(eq);
639
640
  // Check if the value is a smi.
641
  __ cmp(r0, Operand(Smi::FromInt(0)));
642
  false_target->Branch(eq);
643
  __ tst(r0, Operand(kSmiTagMask));
644
  true_target->Branch(eq);
645
646
  // Slow case: call the runtime.
647
  frame_->EmitPush(r0);
648
  frame_->CallRuntime(Runtime::kToBool, 1);
649
  // Convert the result (r0) to a condition code.
650
  __ cmp(r0, Operand(Factory::false_value()));
651
652
  cc_reg_ = ne;
653
}
654
655
656
class GetPropertyStub : public CodeStub {
657
 public:
658
  GetPropertyStub() { }
659
660
 private:
661
  Major MajorKey() { return GetProperty; }
662
  int MinorKey() { return 0; }
663
  void Generate(MacroAssembler* masm);
664
};
665
666
667
class SetPropertyStub : public CodeStub {
668
 public:
669
  SetPropertyStub() { }
670
671
 private:
672
  Major MajorKey() { return SetProperty; }
673
  int MinorKey() { return 0; }
674
  void Generate(MacroAssembler* masm);
675
};
676
677
678
class GenericBinaryOpStub : public CodeStub {
679
 public:
680
  explicit GenericBinaryOpStub(Token::Value op) : op_(op) { }
681
682
 private:
683
  Token::Value op_;
684
685
  Major MajorKey() { return GenericBinaryOp; }
686
  int MinorKey() { return static_cast<int>(op_); }
687
  void Generate(MacroAssembler* masm);
688
689
  const char* GetName() {
690
    switch (op_) {
691
      case Token::ADD: return "GenericBinaryOpStub_ADD";
692
      case Token::SUB: return "GenericBinaryOpStub_SUB";
693
      case Token::MUL: return "GenericBinaryOpStub_MUL";
694
      case Token::DIV: return "GenericBinaryOpStub_DIV";
695
      case Token::BIT_OR: return "GenericBinaryOpStub_BIT_OR";
696
      case Token::BIT_AND: return "GenericBinaryOpStub_BIT_AND";
697
      case Token::BIT_XOR: return "GenericBinaryOpStub_BIT_XOR";
698
      case Token::SAR: return "GenericBinaryOpStub_SAR";
699
      case Token::SHL: return "GenericBinaryOpStub_SHL";
700
      case Token::SHR: return "GenericBinaryOpStub_SHR";
701
      default:         return "GenericBinaryOpStub";
702
    }
703
  }
704
705
#ifdef DEBUG
706
  void Print() { PrintF("GenericBinaryOpStub (%s)\n", Token::String(op_)); }
707
#endif
708
};
709
710
711
void CodeGenerator::GenericBinaryOperation(Token::Value op) {
712
  VirtualFrame::SpilledScope spilled_scope(this);
713
  // sp[0] : y
714
  // sp[1] : x
715
  // result : r0
716
717
  // Stub is entered with a call: 'return address' is in lr.
718
  switch (op) {
719
    case Token::ADD:  // fall through.
720
    case Token::SUB:  // fall through.
721
    case Token::MUL:
722
    case Token::BIT_OR:
723
    case Token::BIT_AND:
724
    case Token::BIT_XOR:
725
    case Token::SHL:
726
    case Token::SHR:
727
    case Token::SAR: {
728
      frame_->EmitPop(r0);  // r0 : y
729
      frame_->EmitPop(r1);  // r1 : x
730
      GenericBinaryOpStub stub(op);
731
      frame_->CallStub(&stub, 0);
732
      break;
733
    }
734
735
    case Token::DIV: {
736
      Result arg_count = allocator_->Allocate(r0);
737
      ASSERT(arg_count.is_valid());
738
      __ mov(arg_count.reg(), Operand(1));
739
      frame_->InvokeBuiltin(Builtins::DIV, CALL_JS, &arg_count, 2);
740
      break;
741
    }
742
743
    case Token::MOD: {
744
      Result arg_count = allocator_->Allocate(r0);
745
      ASSERT(arg_count.is_valid());
746
      __ mov(arg_count.reg(), Operand(1));
747
      frame_->InvokeBuiltin(Builtins::MOD, CALL_JS, &arg_count, 2);
748
      break;
749
    }
750
751
    case Token::COMMA:
752
      frame_->EmitPop(r0);
753
      // simply discard left value
754
      frame_->Drop();
755
      break;
756
757
    default:
758
      // Other cases should have been handled before this point.
759
      UNREACHABLE();
760
      break;
761
  }
762
}
763
764
765
class DeferredInlineSmiOperation: public DeferredCode {
766
 public:
767
  DeferredInlineSmiOperation(CodeGenerator* generator,
768
                             Token::Value op,
769
                             int value,
770
                             bool reversed)
771
      : DeferredCode(generator),
772
        op_(op),
773
        value_(value),
774
        reversed_(reversed) {
775
    set_comment("[ DeferredInlinedSmiOperation");
776
  }
777
778
  virtual void Generate();
779
780
 private:
781
  Token::Value op_;
782
  int value_;
783
  bool reversed_;
784
};
785
786
787
void DeferredInlineSmiOperation::Generate() {
788
  enter()->Bind();
789
  VirtualFrame::SpilledScope spilled_scope(generator());
790
791
  switch (op_) {
792
    case Token::ADD: {
793
      if (reversed_) {
794
        // revert optimistic add
795
        __ sub(r0, r0, Operand(Smi::FromInt(value_)));
796
        __ mov(r1, Operand(Smi::FromInt(value_)));
797
      } else {
798
        // revert optimistic add
799
        __ sub(r1, r0, Operand(Smi::FromInt(value_)));
800
        __ mov(r0, Operand(Smi::FromInt(value_)));
801
      }
802
      break;
803
    }
804
805
    case Token::SUB: {
806
      if (reversed_) {
807
        // revert optimistic sub
808
        __ rsb(r0, r0, Operand(Smi::FromInt(value_)));
809
        __ mov(r1, Operand(Smi::FromInt(value_)));
810
      } else {
811
        __ add(r1, r0, Operand(Smi::FromInt(value_)));
812
        __ mov(r0, Operand(Smi::FromInt(value_)));
813
      }
814
      break;
815
    }
816
817
    case Token::BIT_OR:
818
    case Token::BIT_XOR:
819
    case Token::BIT_AND: {
820
      if (reversed_) {
821
        __ mov(r1, Operand(Smi::FromInt(value_)));
822
      } else {
823
        __ mov(r1, Operand(r0));
824
        __ mov(r0, Operand(Smi::FromInt(value_)));
825
      }
826
      break;
827
    }
828
829
    case Token::SHL:
830
    case Token::SHR:
831
    case Token::SAR: {
832
      if (!reversed_) {
833
        __ mov(r1, Operand(r0));
834
        __ mov(r0, Operand(Smi::FromInt(value_)));
835
      } else {
836
        UNREACHABLE();  // should have been handled in SmiOperation
837
      }
838
      break;
839
    }
840
841
    default:
842
      // other cases should have been handled before this point.
843
      UNREACHABLE();
844
      break;
845
  }
846
847
  GenericBinaryOpStub igostub(op_);
848
  Result arg0 = generator()->allocator()->Allocate(r1);
849
  ASSERT(arg0.is_valid());
850
  Result arg1 = generator()->allocator()->Allocate(r0);
851
  ASSERT(arg1.is_valid());
852
  generator()->frame()->CallStub(&igostub, &arg0, &arg1);
853
  exit_.Jump();
854
}
855
856
857
void CodeGenerator::SmiOperation(Token::Value op,
858
                                 Handle<Object> value,
859
                                 bool reversed) {
860
  VirtualFrame::SpilledScope spilled_scope(this);
861
  // NOTE: This is an attempt to inline (a bit) more of the code for
862
  // some possible smi operations (like + and -) when (at least) one
863
  // of the operands is a literal smi. With this optimization, the
864
  // performance of the system is increased by ~15%, and the generated
865
  // code size is increased by ~1% (measured on a combination of
866
  // different benchmarks).
867
868
  // sp[0] : operand
869
870
  int int_value = Smi::cast(*value)->value();
871
872
  JumpTarget exit(this);
873
  frame_->EmitPop(r0);
874
875
  switch (op) {
876
    case Token::ADD: {
877
      DeferredCode* deferred =
878
        new DeferredInlineSmiOperation(this, op, int_value, reversed);
879
880
      __ add(r0, r0, Operand(value), SetCC);
881
      deferred->enter()->Branch(vs);
882
      __ tst(r0, Operand(kSmiTagMask));
883
      deferred->enter()->Branch(ne);
884
      deferred->BindExit();
885
      break;
886
    }
887
888
    case Token::SUB: {
889
      DeferredCode* deferred =
890
        new DeferredInlineSmiOperation(this, op, int_value, reversed);
891
892
      if (!reversed) {
893
        __ sub(r0, r0, Operand(value), SetCC);
894
      } else {
895
        __ rsb(r0, r0, Operand(value), SetCC);
896
      }
897
      deferred->enter()->Branch(vs);
898
      __ tst(r0, Operand(kSmiTagMask));
899
      deferred->enter()->Branch(ne);
900
      deferred->BindExit();
901
      break;
902
    }
903
904
    case Token::BIT_OR:
905
    case Token::BIT_XOR:
906
    case Token::BIT_AND: {
907
      DeferredCode* deferred =
908
        new DeferredInlineSmiOperation(this, op, int_value, reversed);
909
      __ tst(r0, Operand(kSmiTagMask));
910
      deferred->enter()->Branch(ne);
911
      switch (op) {
912
        case Token::BIT_OR:  __ orr(r0, r0, Operand(value)); break;
913
        case Token::BIT_XOR: __ eor(r0, r0, Operand(value)); break;
914
        case Token::BIT_AND: __ and_(r0, r0, Operand(value)); break;
915
        default: UNREACHABLE();
916
      }
917
      deferred->BindExit();
918
      break;
919
    }
920
921
    case Token::SHL:
922
    case Token::SHR:
923
    case Token::SAR: {
924
      if (reversed) {
925
        __ mov(ip, Operand(value));
926
        frame_->EmitPush(ip);
927
        frame_->EmitPush(r0);
928
        GenericBinaryOperation(op);
929
930
      } else {
931
        int shift_value = int_value & 0x1f;  // least significant 5 bits
932
        DeferredCode* deferred =
933
          new DeferredInlineSmiOperation(this, op, shift_value, false);
934
        __ tst(r0, Operand(kSmiTagMask));
935
        deferred->enter()->Branch(ne);
936
        __ mov(r2, Operand(r0, ASR, kSmiTagSize));  // remove tags
937
        switch (op) {
938
          case Token::SHL: {
939
            __ mov(r2, Operand(r2, LSL, shift_value));
940
            // check that the *unsigned* result fits in a smi
941
            __ add(r3, r2, Operand(0x40000000), SetCC);
942
            deferred->enter()->Branch(mi);
943
            break;
944
          }
945
          case Token::SHR: {
946
            // LSR by immediate 0 means shifting 32 bits.
947
            if (shift_value != 0) {
948
              __ mov(r2, Operand(r2, LSR, shift_value));
949
            }
950
            // check that the *unsigned* result fits in a smi
951
            // neither of the two high-order bits can be set:
952
            // - 0x80000000: high bit would be lost when smi tagging
953
            // - 0x40000000: this number would convert to negative when
954
            // smi tagging these two cases can only happen with shifts
955
            // by 0 or 1 when handed a valid smi
956
            __ and_(r3, r2, Operand(0xc0000000), SetCC);
957
            deferred->enter()->Branch(ne);
958
            break;
959
          }
960
          case Token::SAR: {
961
            if (shift_value != 0) {
962
              // ASR by immediate 0 means shifting 32 bits.
963
              __ mov(r2, Operand(r2, ASR, shift_value));
964
            }
965
            break;
966
          }
967
          default: UNREACHABLE();
968
        }
969
        __ mov(r0, Operand(r2, LSL, kSmiTagSize));
970
        deferred->BindExit();
971
      }
972
      break;
973
    }
974
975
    default:
976
      if (!reversed) {
977
        frame_->EmitPush(r0);
978
        __ mov(r0, Operand(value));
979
        frame_->EmitPush(r0);
980
      } else {
981
        __ mov(ip, Operand(value));
982
        frame_->EmitPush(ip);
983
        frame_->EmitPush(r0);
984
      }
985
      GenericBinaryOperation(op);
986
      break;
987
  }
988
989
  exit.Bind();
990
}
991
992
993
void CodeGenerator::Comparison(Condition cc, bool strict) {
994
  VirtualFrame::SpilledScope spilled_scope(this);
995
  // sp[0] : y
996
  // sp[1] : x
997
  // result : cc register
998
999
  // Strict only makes sense for equality comparisons.
1000
  ASSERT(!strict || cc == eq);
1001
1002
  JumpTarget exit(this);
1003
  JumpTarget smi(this);
1004
  // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1005
  if (cc == gt || cc == le) {
1006
    cc = ReverseCondition(cc);
1007
    frame_->EmitPop(r1);
1008
    frame_->EmitPop(r0);
1009
  } else {
1010
    frame_->EmitPop(r0);
1011
    frame_->EmitPop(r1);
1012
  }
1013
  __ orr(r2, r0, Operand(r1));
1014
  __ tst(r2, Operand(kSmiTagMask));
1015
  smi.Branch(eq);
1016
1017
  // Perform non-smi comparison by runtime call.
1018
  frame_->EmitPush(r1);
1019
1020
  // Figure out which native to call and setup the arguments.
1021
  Builtins::JavaScript native;
1022
  int arg_count = 1;
1023
  if (cc == eq) {
1024
    native = strict ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1025
  } else {
1026
    native = Builtins::COMPARE;
1027
    int ncr;  // NaN compare result
1028
    if (cc == lt || cc == le) {
1029
      ncr = GREATER;
1030
    } else {
1031
      ASSERT(cc == gt || cc == ge);  // remaining cases
1032
      ncr = LESS;
1033
    }
1034
    frame_->EmitPush(r0);
1035
    arg_count++;
1036
    __ mov(r0, Operand(Smi::FromInt(ncr)));
1037
  }
1038
1039
  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1040
  // tagged as a small integer.
1041
  frame_->EmitPush(r0);
1042
  Result arg_count_register = allocator_->Allocate(r0);
1043
  ASSERT(arg_count_register.is_valid());
1044
  __ mov(arg_count_register.reg(), Operand(arg_count));
1045
  Result result = frame_->InvokeBuiltin(native,
1046
                                        CALL_JS,
1047
                                        &arg_count_register,
1048
                                        arg_count + 1);
1049
  __ cmp(result.reg(), Operand(0));
1050
  result.Unuse();
1051
  exit.Jump();
1052
1053
  // test smi equality by pointer comparison.
1054
  smi.Bind();
1055
  __ cmp(r1, Operand(r0));
1056
1057
  exit.Bind();
1058
  cc_reg_ = cc;
1059
}
1060
1061
1062
class CallFunctionStub: public CodeStub {
1063
 public:
1064
  explicit CallFunctionStub(int argc) : argc_(argc) {}
1065
1066
  void Generate(MacroAssembler* masm);
1067
1068
 private:
1069
  int argc_;
1070
1071
#if defined(DEBUG)
1072
  void Print() { PrintF("CallFunctionStub (argc %d)\n", argc_); }
1073
#endif  // defined(DEBUG)
1074
1075
  Major MajorKey() { return CallFunction; }
1076
  int MinorKey() { return argc_; }
1077
};
1078
1079
1080
// Call the function on the stack with the given arguments.
1081
void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
1082
                                         int position) {
1083
  VirtualFrame::SpilledScope spilled_scope(this);
1084
  // Push the arguments ("left-to-right") on the stack.
1085
  int arg_count = args->length();
1086
  for (int i = 0; i < arg_count; i++) {
1087
    LoadAndSpill(args->at(i));
1088
  }
1089
1090
  // Record the position for debugging purposes.
1091
  CodeForSourcePosition(position);
1092
1093
  // Use the shared code stub to call the function.
1094
  CallFunctionStub call_function(arg_count);
1095
  frame_->CallStub(&call_function, arg_count + 1);
1096
1097
  // Restore context and pop function from the stack.
1098
  __ ldr(cp, frame_->Context());
1099
  frame_->Drop();  // discard the TOS
1100
}
1101
1102
1103
void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
1104
  VirtualFrame::SpilledScope spilled_scope(this);
1105
  ASSERT(has_cc());
1106
  Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1107
  target->Branch(cc);
1108
  cc_reg_ = al;
1109
}
1110
1111
1112
void CodeGenerator::CheckStack() {
1113
  VirtualFrame::SpilledScope spilled_scope(this);
1114
  if (FLAG_check_stack) {
1115
    Comment cmnt(masm_, "[ check stack");
1116
    StackCheckStub stub;
1117
    frame_->CallStub(&stub, 0);
1118
  }
1119
}
1120
1121
1122
void CodeGenerator::VisitAndSpill(Statement* statement) {
1123
  ASSERT(in_spilled_code());
1124
  set_in_spilled_code(false);
1125
  Visit(statement);
1126
  if (frame_ != NULL) {
1127
    frame_->SpillAll();
1128
    }
1129
  set_in_spilled_code(true);
1130
}
1131
1132
1133
void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
1134
  ASSERT(in_spilled_code());
1135
  set_in_spilled_code(false);
1136
  VisitStatements(statements);
1137
  if (frame_ != NULL) {
1138
    frame_->SpillAll();
1139
  }
1140
  set_in_spilled_code(true);
1141
}
1142
1143
1144
void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1145
#ifdef DEBUG
1146
  int original_height = frame_->height();
1147
#endif
1148
  VirtualFrame::SpilledScope spilled_scope(this);
1149
  for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
1150
    VisitAndSpill(statements->at(i));
1151
  }
1152
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
1153
}
1154
1155
1156
void CodeGenerator::VisitBlock(Block* node) {
1157
#ifdef DEBUG
1158
  int original_height = frame_->height();
1159
#endif
1160
  VirtualFrame::SpilledScope spilled_scope(this);
1161
  Comment cmnt(masm_, "[ Block");
1162
  CodeForStatementPosition(node);
1163
  node->break_target()->Initialize(this);
1164
  VisitStatementsAndSpill(node->statements());
1165
  if (node->break_target()->is_linked()) {
1166
    node->break_target()->Bind();
1167
  }
1168
  node->break_target()->Unuse();
1169
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
1170
}
1171
1172
1173
void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
1174
  VirtualFrame::SpilledScope spilled_scope(this);
1175
  __ mov(r0, Operand(pairs));
1176
  frame_->EmitPush(r0);
1177
  frame_->EmitPush(cp);
1178
  __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1179
  frame_->EmitPush(r0);
1180
  frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1181
  // The result is discarded.
1182
}
1183
1184
1185
void CodeGenerator::VisitDeclaration(Declaration* node) {
1186
#ifdef DEBUG
1187
  int original_height = frame_->height();
1188
#endif
1189
  VirtualFrame::SpilledScope spilled_scope(this);
1190
  Comment cmnt(masm_, "[ Declaration");
1191
  CodeForStatementPosition(node);
1192
  Variable* var = node->proxy()->var();
1193
  ASSERT(var != NULL);  // must have been resolved
1194
  Slot* slot = var->slot();
1195
1196
  // If it was not possible to allocate the variable at compile time,
1197
  // we need to "declare" it at runtime to make sure it actually
1198
  // exists in the local context.
1199
  if (slot != NULL && slot->type() == Slot::LOOKUP) {
1200
    // Variables with a "LOOKUP" slot were introduced as non-locals
1201
    // during variable resolution and must have mode DYNAMIC.
1202
    ASSERT(var->is_dynamic());
1203
    // For now, just do a runtime call.
1204
    frame_->EmitPush(cp);
1205
    __ mov(r0, Operand(var->name()));
1206
    frame_->EmitPush(r0);
1207
    // Declaration nodes are always declared in only two modes.
1208
    ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1209
    PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
1210
    __ mov(r0, Operand(Smi::FromInt(attr)));
1211
    frame_->EmitPush(r0);
1212
    // Push initial value, if any.
1213
    // Note: For variables we must not push an initial value (such as
1214
    // 'undefined') because we may have a (legal) redeclaration and we
1215
    // must not destroy the current value.
1216
    if (node->mode() == Variable::CONST) {
1217
      __ mov(r0, Operand(Factory::the_hole_value()));
1218
      frame_->EmitPush(r0);
1219
    } else if (node->fun() != NULL) {
1220
      LoadAndSpill(node->fun());
1221
    } else {
1222
      __ mov(r0, Operand(0));  // no initial value!
1223
      frame_->EmitPush(r0);
1224
    }
1225
    frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1226
    // Ignore the return value (declarations are statements).
1227
    ASSERT(frame_->height() == original_height);
1228
    return;
1229
  }
1230
1231
  ASSERT(!var->is_global());
1232
1233
  // If we have a function or a constant, we need to initialize the variable.
1234
  Expression* val = NULL;
1235
  if (node->mode() == Variable::CONST) {
1236
    val = new Literal(Factory::the_hole_value());
1237
  } else {
1238
    val = node->fun();  // NULL if we don't have a function
1239
  }
1240
1241
  if (val != NULL) {
1242
    {
1243
      // Set initial value.
1244
      Reference target(this, node->proxy());
1245
      LoadAndSpill(val);
1246
      target.SetValue(NOT_CONST_INIT);
1247
      // The reference is removed from the stack (preserving TOS) when
1248
      // it goes out of scope.
1249
    }
1250
    // Get rid of the assigned value (declarations are statements).
1251
    frame_->Drop();
1252
  }
1253
  ASSERT(frame_->height() == original_height);
1254
}
1255
1256
1257
void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
1258
#ifdef DEBUG
1259
  int original_height = frame_->height();
1260
#endif
1261
  VirtualFrame::SpilledScope spilled_scope(this);
1262
  Comment cmnt(masm_, "[ ExpressionStatement");
1263
  CodeForStatementPosition(node);
1264
  Expression* expression = node->expression();
1265
  expression->MarkAsStatement();
1266
  LoadAndSpill(expression);
1267
  frame_->Drop();
1268
  ASSERT(frame_->height() == original_height);
1269
}
1270
1271
1272
void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
1273
#ifdef DEBUG
1274
  int original_height = frame_->height();
1275
#endif
1276
  VirtualFrame::SpilledScope spilled_scope(this);
1277
  Comment cmnt(masm_, "// EmptyStatement");
1278
  CodeForStatementPosition(node);
1279
  // nothing to do
1280
  ASSERT(frame_->height() == original_height);
1281
}
1282
1283
1284
void CodeGenerator::VisitIfStatement(IfStatement* node) {
1285
#ifdef DEBUG
1286
  int original_height = frame_->height();
1287
#endif
1288
  VirtualFrame::SpilledScope spilled_scope(this);
1289
  Comment cmnt(masm_, "[ IfStatement");
1290
  // Generate different code depending on which parts of the if statement
1291
  // are present or not.
1292
  bool has_then_stm = node->HasThenStatement();
1293
  bool has_else_stm = node->HasElseStatement();
1294
1295
  CodeForStatementPosition(node);
1296
1297
  JumpTarget exit(this);
1298
  if (has_then_stm && has_else_stm) {
1299
    Comment cmnt(masm_, "[ IfThenElse");
1300
    JumpTarget then(this);
1301
    JumpTarget else_(this);
1302
    // if (cond)
1303
    LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
1304
                          &then, &else_, true);
1305
    if (frame_ != NULL) {
1306
      Branch(false, &else_);
1307
    }
1308
    // then
1309
    if (frame_ != NULL || then.is_linked()) {
1310
      then.Bind();
1311
      VisitAndSpill(node->then_statement());
1312
    }
1313
    if (frame_ != NULL) {
1314
      exit.Jump();
1315
    }
1316
    // else
1317
    if (else_.is_linked()) {
1318
      else_.Bind();
1319
      VisitAndSpill(node->else_statement());
1320
    }
1321
1322
  } else if (has_then_stm) {
1323
    Comment cmnt(masm_, "[ IfThen");
1324
    ASSERT(!has_else_stm);
1325
    JumpTarget then(this);
1326
    // if (cond)
1327
    LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
1328
                          &then, &exit, true);
1329
    if (frame_ != NULL) {
1330
      Branch(false, &exit);
1331
    }
1332
    // then
1333
    if (frame_ != NULL || then.is_linked()) {
1334
      then.Bind();
1335
      VisitAndSpill(node->then_statement());
1336
    }
1337
1338
  } else if (has_else_stm) {
1339
    Comment cmnt(masm_, "[ IfElse");
1340
    ASSERT(!has_then_stm);
1341
    JumpTarget else_(this);
1342
    // if (!cond)
1343
    LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
1344
                          &exit, &else_, true);
1345
    if (frame_ != NULL) {
1346
      Branch(true, &exit);
1347
    }
1348
    // else
1349
    if (frame_ != NULL || else_.is_linked()) {
1350
      else_.Bind();
1351
      VisitAndSpill(node->else_statement());
1352
    }
1353
1354
  } else {
1355
    Comment cmnt(masm_, "[ If");
1356
    ASSERT(!has_then_stm && !has_else_stm);
1357
    // if (cond)
1358
    LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
1359
                          &exit, &exit, false);
1360
    if (frame_ != NULL) {
1361
      if (has_cc()) {
1362
        cc_reg_ = al;
1363
      } else {
1364
        frame_->Drop();
1365
      }
1366
    }
1367
  }
1368
1369
  // end
1370
  if (exit.is_linked()) {
1371
    exit.Bind();
1372
  }
1373
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
1374
}
1375
1376
1377
void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
1378
  VirtualFrame::SpilledScope spilled_scope(this);
1379
  Comment cmnt(masm_, "[ ContinueStatement");
1380
  CodeForStatementPosition(node);
1381
  node->target()->continue_target()->Jump();
1382
}
1383
1384
1385
void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
1386
  VirtualFrame::SpilledScope spilled_scope(this);
1387
  Comment cmnt(masm_, "[ BreakStatement");
1388
  CodeForStatementPosition(node);
1389
  node->target()->break_target()->Jump();
1390
}
1391
1392
1393
void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
1394
  VirtualFrame::SpilledScope spilled_scope(this);
1395
  Comment cmnt(masm_, "[ ReturnStatement");
1396
1397
  if (function_return_is_shadowed_) {
1398
    CodeForStatementPosition(node);
1399
    LoadAndSpill(node->expression());
1400
    frame_->EmitPop(r0);
1401
    function_return_.Jump();
1402
  } else {
1403
    // Load the returned value.
1404
    CodeForStatementPosition(node);
1405
    LoadAndSpill(node->expression());
1406
1407
    // Pop the result from the frame and prepare the frame for
1408
    // returning thus making it easier to merge.
1409
    frame_->EmitPop(r0);
1410
    frame_->PrepareForReturn();
1411
1412
    function_return_.Jump();
1413
  }
1414
}
1415
1416
1417
void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1418
#ifdef DEBUG
1419
  int original_height = frame_->height();
1420
#endif
1421
  VirtualFrame::SpilledScope spilled_scope(this);
1422
  Comment cmnt(masm_, "[ WithEnterStatement");
1423
  CodeForStatementPosition(node);
1424
  LoadAndSpill(node->expression());
1425
  if (node->is_catch_block()) {
1426
    frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1427
  } else {
1428
    frame_->CallRuntime(Runtime::kPushContext, 1);
1429
  }
1430
  if (kDebug) {
1431
    JumpTarget verified_true(this);
1432
    __ cmp(r0, Operand(cp));
1433
    verified_true.Branch(eq);
1434
    __ stop("PushContext: r0 is expected to be the same as cp");
1435
    verified_true.Bind();
1436
  }
1437
  // Update context local.
1438
  __ str(cp, frame_->Context());
1439
  ASSERT(frame_->height() == original_height);
1440
}
1441
1442
1443
void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1444
#ifdef DEBUG
1445
  int original_height = frame_->height();
1446
#endif
1447
  VirtualFrame::SpilledScope spilled_scope(this);
1448
  Comment cmnt(masm_, "[ WithExitStatement");
1449
  CodeForStatementPosition(node);
1450
  // Pop context.
1451
  __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
1452
  // Update context local.
1453
  __ str(cp, frame_->Context());
1454
  ASSERT(frame_->height() == original_height);
1455
}
1456
1457
1458
int CodeGenerator::FastCaseSwitchMaxOverheadFactor() {
1459
  return kFastSwitchMaxOverheadFactor;
1460
}
1461
1462
int CodeGenerator::FastCaseSwitchMinCaseCount() {
1463
  return kFastSwitchMinCaseCount;
1464
}
1465
1466
1467
void CodeGenerator::GenerateFastCaseSwitchJumpTable(
1468
    SwitchStatement* node,
1469
    int min_index,
1470
    int range,
1471
    Label* default_label,
1472
    Vector<Label*> case_targets,
1473
    Vector<Label> case_labels) {
1474
  VirtualFrame::SpilledScope spilled_scope(this);
1475
  JumpTarget setup_default(this);
1476
  JumpTarget is_smi(this);
1477
1478
  // A non-null default label pointer indicates a default case among
1479
  // the case labels.  Otherwise we use the break target as a
1480
  // "default" for failure to hit the jump table.
1481
  JumpTarget* default_target =
1482
      (default_label == NULL) ? node->break_target() : &setup_default;
1483
1484
  ASSERT(kSmiTag == 0 && kSmiTagSize <= 2);
1485
  frame_->EmitPop(r0);
1486
1487
  // Test for a Smi value in a HeapNumber.
1488
  __ tst(r0, Operand(kSmiTagMask));
1489
  is_smi.Branch(eq);
1490
  __ ldr(r1, MemOperand(r0, HeapObject::kMapOffset - kHeapObjectTag));
1491
  __ ldrb(r1, MemOperand(r1, Map::kInstanceTypeOffset - kHeapObjectTag));
1492
  __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
1493
  default_target->Branch(ne);
1494
  frame_->EmitPush(r0);
1495
  frame_->CallRuntime(Runtime::kNumberToSmi, 1);
1496
  is_smi.Bind();
1497
1498
  if (min_index != 0) {
1499
    // Small positive numbers can be immediate operands.
1500
    if (min_index < 0) {
1501
      // If min_index is Smi::kMinValue, -min_index is not a Smi.
1502
      if (Smi::IsValid(-min_index)) {
1503
        __ add(r0, r0, Operand(Smi::FromInt(-min_index)));
1504
      } else {
1505
        __ add(r0, r0, Operand(Smi::FromInt(-min_index - 1)));
1506
        __ add(r0, r0, Operand(Smi::FromInt(1)));
1507
      }
1508
    } else {
1509
      __ sub(r0, r0, Operand(Smi::FromInt(min_index)));
1510
    }
1511
  }
1512
  __ tst(r0, Operand(0x80000000 | kSmiTagMask));
1513
  default_target->Branch(ne);
1514
  __ cmp(r0, Operand(Smi::FromInt(range)));
1515
  default_target->Branch(ge);
1516
  VirtualFrame* start_frame = new VirtualFrame(frame_);
1517
  __ SmiJumpTable(r0, case_targets);
1518
1519
  GenerateFastCaseSwitchCases(node, case_labels, start_frame);
1520
1521
  // If there was a default case among the case labels, we need to
1522
  // emit code to jump to it from the default target used for failure
1523
  // to hit the jump table.
1524
  if (default_label != NULL) {
1525
    if (has_valid_frame()) {
1526
      node->break_target()->Jump();
1527
    }
1528
    setup_default.Bind();
1529
    frame_->MergeTo(start_frame);
1530
    __ b(default_label);
1531
    DeleteFrame();
1532
  }
1533
  if (node->break_target()->is_linked()) {
1534
    node->break_target()->Bind();
1535
  }
1536
1537
  delete start_frame;
1538
}
1539
1540
1541
void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
1542
#ifdef DEBUG
1543
  int original_height = frame_->height();
1544
#endif
1545
  VirtualFrame::SpilledScope spilled_scope(this);
1546
  Comment cmnt(masm_, "[ SwitchStatement");
1547
  CodeForStatementPosition(node);
1548
  node->break_target()->Initialize(this);
1549
1550
  LoadAndSpill(node->tag());
1551
  if (TryGenerateFastCaseSwitchStatement(node)) {
1552
    ASSERT(!has_valid_frame() || frame_->height() == original_height);
1553
    return;
1554
  }
1555
1556
  JumpTarget next_test(this);
1557
  JumpTarget fall_through(this);
1558
  JumpTarget default_entry(this);
1559
  JumpTarget default_exit(this, JumpTarget::BIDIRECTIONAL);
1560
  ZoneList<CaseClause*>* cases = node->cases();
1561
  int length = cases->length();
1562
  CaseClause* default_clause = NULL;
1563
1564
  for (int i = 0; i < length; i++) {
1565
    CaseClause* clause = cases->at(i);
1566
    if (clause->is_default()) {
1567
      // Remember the default clause and compile it at the end.
1568
      default_clause = clause;
1569
      continue;
1570
    }
1571
1572
    Comment cmnt(masm_, "[ Case clause");
1573
    // Compile the test.
1574
    next_test.Bind();
1575
    next_test.Unuse();
1576
    // Duplicate TOS.
1577
    __ ldr(r0, frame_->Top());
1578
    frame_->EmitPush(r0);
1579
    LoadAndSpill(clause->label());
1580
    Comparison(eq, true);
1581
    Branch(false, &next_test);
1582
1583
    // Before entering the body from the test, remove the switch value from
1584
    // the stack.
1585
    frame_->Drop();
1586
1587
    // Label the body so that fall through is enabled.
1588
    if (i > 0 && cases->at(i - 1)->is_default()) {
1589
      default_exit.Bind();
1590
    } else {
1591
      fall_through.Bind();
1592
      fall_through.Unuse();
1593
    }
1594
    VisitStatementsAndSpill(clause->statements());
1595
1596
    // If control flow can fall through from the body, jump to the next body
1597
    // or the end of the statement.
1598
    if (frame_ != NULL) {
1599
      if (i < length - 1 && cases->at(i + 1)->is_default()) {
1600
        default_entry.Jump();
1601
      } else {
1602
        fall_through.Jump();
1603
      }
1604
    }
1605
  }
1606
1607
  // The final "test" removes the switch value.
1608
  next_test.Bind();
1609
  frame_->Drop();
1610
1611
  // If there is a default clause, compile it.
1612
  if (default_clause != NULL) {
1613
    Comment cmnt(masm_, "[ Default clause");
1614
    default_entry.Bind();
1615
    VisitStatementsAndSpill(default_clause->statements());
1616
    // If control flow can fall out of the default and there is a case after
1617
    // it, jup to that case's body.
1618
    if (frame_ != NULL && default_exit.is_bound()) {
1619
      default_exit.Jump();
1620
    }
1621
  }
1622
1623
  if (fall_through.is_linked()) {
1624
    fall_through.Bind();
1625
  }
1626
1627
  if (node->break_target()->is_linked()) {
1628
    node->break_target()->Bind();
1629
  }
1630
  node->break_target()->Unuse();
1631
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
1632
}
1633
1634
1635
void CodeGenerator::VisitLoopStatement(LoopStatement* node) {
1636
#ifdef DEBUG
1637
  int original_height = frame_->height();
1638
#endif
1639
  VirtualFrame::SpilledScope spilled_scope(this);
1640
  Comment cmnt(masm_, "[ LoopStatement");
1641
  CodeForStatementPosition(node);
1642
  node->break_target()->Initialize(this);
1643
1644
  // Simple condition analysis.  ALWAYS_TRUE and ALWAYS_FALSE represent a
1645
  // known result for the test expression, with no side effects.
1646
  enum { ALWAYS_TRUE, ALWAYS_FALSE, DONT_KNOW } info = DONT_KNOW;
1647
  if (node->cond() == NULL) {
1648
    ASSERT(node->type() == LoopStatement::FOR_LOOP);
1649
    info = ALWAYS_TRUE;
1650
  } else {
1651
    Literal* lit = node->cond()->AsLiteral();
1652
    if (lit != NULL) {
1653
      if (lit->IsTrue()) {
1654
        info = ALWAYS_TRUE;
1655
      } else if (lit->IsFalse()) {
1656
        info = ALWAYS_FALSE;
1657
      }
1658
    }
1659
  }
1660
1661
  switch (node->type()) {
1662
    case LoopStatement::DO_LOOP: {
1663
      JumpTarget body(this, JumpTarget::BIDIRECTIONAL);
1664
1665
      // Label the top of the loop for the backward CFG edge.  If the test
1666
      // is always true we can use the continue target, and if the test is
1667
      // always false there is no need.
1668
      if (info == ALWAYS_TRUE) {
1669
        node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL);
1670
        node->continue_target()->Bind();
1671
      } else if (info == ALWAYS_FALSE) {
1672
        node->continue_target()->Initialize(this);
1673
      } else {
1674
        ASSERT(info == DONT_KNOW);
1675
        node->continue_target()->Initialize(this);
1676
        body.Bind();
1677
      }
1678
1679
      CheckStack();  // TODO(1222600): ignore if body contains calls.
1680
      VisitAndSpill(node->body());
1681
1682
      // Compile the test.
1683
      if (info == ALWAYS_TRUE) {
1684
        if (has_valid_frame()) {
1685
          // If control can fall off the end of the body, jump back to the
1686
          // top.
1687
          node->continue_target()->Jump();
1688
        }
1689
      } else if (info == ALWAYS_FALSE) {
1690
        // If we have a continue in the body, we only have to bind its jump
1691
        // target.
1692
        if (node->continue_target()->is_linked()) {
1693
          node->continue_target()->Bind();
1694
        }
1695
      } else {
1696
        ASSERT(info == DONT_KNOW);
1697
        // We have to compile the test expression if it can be reached by
1698
        // control flow falling out of the body or via continue.
1699
        if (node->continue_target()->is_linked()) {
1700
          node->continue_target()->Bind();
1701
        }
1702
        if (has_valid_frame()) {
1703
          LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
1704
                                &body, node->break_target(), true);
1705
          if (has_valid_frame()) {
1706
            // A invalid frame here indicates that control did not
1707
            // fall out of the test expression.
1708
            Branch(true, &body);
1709
          }
1710
        }
1711
      }
1712
      break;
1713
    }
1714
1715
    case LoopStatement::WHILE_LOOP: {
1716
      // If the test is never true and has no side effects there is no need
1717
      // to compile the test or body.
1718
      if (info == ALWAYS_FALSE) break;
1719
1720
      // Label the top of the loop with the continue target for the backward
1721
      // CFG edge.
1722
      node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL);
1723
      node->continue_target()->Bind();
1724
1725
      if (info == DONT_KNOW) {
1726
        JumpTarget body(this);
1727
        LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
1728
                              &body, node->break_target(), true);
1729
        if (has_valid_frame()) {
1730
          // A NULL frame indicates that control did not fall out of the
1731
          // test expression.
1732
          Branch(false, node->break_target());
1733
        }
1734
        if (has_valid_frame() || body.is_linked()) {
1735
          body.Bind();
1736
        }
1737
      }
1738
1739
      if (has_valid_frame()) {
1740
        CheckStack();  // TODO(1222600): ignore if body contains calls.
1741
        VisitAndSpill(node->body());
1742
1743
        // If control flow can fall out of the body, jump back to the top.
1744
        if (has_valid_frame()) {
1745
          node->continue_target()->Jump();
1746
        }
1747
      }
1748
      break;
1749
    }
1750
1751
    case LoopStatement::FOR_LOOP: {
1752
      JumpTarget loop(this, JumpTarget::BIDIRECTIONAL);
1753
1754
      if (node->init() != NULL) {
1755
        VisitAndSpill(node->init());
1756
      }
1757
1758
      // There is no need to compile the test or body.
1759
      if (info == ALWAYS_FALSE) break;
1760
1761
      // If there is no update statement, label the top of the loop with the
1762
      // continue target, otherwise with the loop target.
1763
      if (node->next() == NULL) {
1764
        node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL);
1765
        node->continue_target()->Bind();
1766
      } else {
1767
        node->continue_target()->Initialize(this);
1768
        loop.Bind();
1769
      }
1770
1771
      // If the test is always true, there is no need to compile it.
1772
      if (info == DONT_KNOW) {
1773
        JumpTarget body(this);
1774
        LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
1775
                              &body, node->break_target(), true);
1776
        if (has_valid_frame()) {
1777
          Branch(false, node->break_target());
1778
        }
1779
        if (has_valid_frame() || body.is_linked()) {
1780
          body.Bind();
1781
        }
1782
      }
1783
1784
      if (has_valid_frame()) {
1785
        CheckStack();  // TODO(1222600): ignore if body contains calls.
1786
        VisitAndSpill(node->body());
1787
1788
        if (node->next() == NULL) {
1789
          // If there is no update statement and control flow can fall out
1790
          // of the loop, jump directly to the continue label.
1791
          if (has_valid_frame()) {
1792
            node->continue_target()->Jump();
1793
          }
1794
        } else {
1795
          // If there is an update statement and control flow can reach it
1796
          // via falling out of the body of the loop or continuing, we
1797
          // compile the update statement.
1798
          if (node->continue_target()->is_linked()) {
1799
            node->continue_target()->Bind();
1800
          }
1801
          if (has_valid_frame()) {
1802
            // Record source position of the statement as this code which is
1803
            // after the code for the body actually belongs to the loop
1804
            // statement and not the body.
1805
            CodeForStatementPosition(node);
1806
            VisitAndSpill(node->next());
1807
            loop.Jump();
1808
          }
1809
        }
1810
      }
1811
      break;
1812
    }
1813
  }
1814
1815
  if (node->break_target()->is_linked()) {
1816
    node->break_target()->Bind();
1817
  }
1818
  node->continue_target()->Unuse();
1819
  node->break_target()->Unuse();
1820
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
1821
}
1822
1823
1824
void CodeGenerator::VisitForInStatement(ForInStatement* node) {
1825
#ifdef DEBUG
1826
  int original_height = frame_->height();
1827
#endif
1828
  ASSERT(!in_spilled_code());
1829
  VirtualFrame::SpilledScope spilled_scope(this);
1830
  Comment cmnt(masm_, "[ ForInStatement");
1831
  CodeForStatementPosition(node);
1832
1833
  JumpTarget primitive(this);
1834
  JumpTarget jsobject(this);
1835
  JumpTarget fixed_array(this);
1836
  JumpTarget entry(this, JumpTarget::BIDIRECTIONAL);
1837
  JumpTarget end_del_check(this);
1838
  JumpTarget exit(this);
1839
1840
  // Get the object to enumerate over (converted to JSObject).
1841
  LoadAndSpill(node->enumerable());
1842
1843
  // Both SpiderMonkey and kjs ignore null and undefined in contrast
1844
  // to the specification.  12.6.4 mandates a call to ToObject.
1845
  frame_->EmitPop(r0);
1846
  __ cmp(r0, Operand(Factory::undefined_value()));
1847
  exit.Branch(eq);
1848
  __ cmp(r0, Operand(Factory::null_value()));
1849
  exit.Branch(eq);
1850
1851
  // Stack layout in body:
1852
  // [iteration counter (Smi)]
1853
  // [length of array]
1854
  // [FixedArray]
1855
  // [Map or 0]
1856
  // [Object]
1857
1858
  // Check if enumerable is already a JSObject
1859
  __ tst(r0, Operand(kSmiTagMask));
1860
  primitive.Branch(eq);
1861
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1862
  __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
1863
  __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
1864
  jsobject.Branch(hs);
1865
1866
  primitive.Bind();
1867
  frame_->EmitPush(r0);
1868
  Result arg_count = allocator_->Allocate(r0);
1869
  ASSERT(arg_count.is_valid());
1870
  __ mov(arg_count.reg(), Operand(0));
1871
  frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, &arg_count, 1);
1872
1873
  jsobject.Bind();
1874
  // Get the set of properties (as a FixedArray or Map).
1875
  frame_->EmitPush(r0);  // duplicate the object being enumerated
1876
  frame_->EmitPush(r0);
1877
  frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1878
1879
  // If we got a Map, we can do a fast modification check.
1880
  // Otherwise, we got a FixedArray, and we have to do a slow check.
1881
  __ mov(r2, Operand(r0));
1882
  __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
1883
  __ cmp(r1, Operand(Factory::meta_map()));
1884
  fixed_array.Branch(ne);
1885
1886
  // Get enum cache
1887
  __ mov(r1, Operand(r0));
1888
  __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
1889
  __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
1890
  __ ldr(r2,
1891
         FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
1892
1893
  frame_->EmitPush(r0);  // map
1894
  frame_->EmitPush(r2);  // enum cache bridge cache
1895
  __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
1896
  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1897
  frame_->EmitPush(r0);
1898
  __ mov(r0, Operand(Smi::FromInt(0)));
1899
  frame_->EmitPush(r0);
1900
  entry.Jump();
1901
1902
  fixed_array.Bind();
1903
  __ mov(r1, Operand(Smi::FromInt(0)));
1904
  frame_->EmitPush(r1);  // insert 0 in place of Map
1905
  frame_->EmitPush(r0);
1906
1907
  // Push the length of the array and the initial index onto the stack.
1908
  __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
1909
  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1910
  frame_->EmitPush(r0);
1911
  __ mov(r0, Operand(Smi::FromInt(0)));  // init index
1912
  frame_->EmitPush(r0);
1913
1914
  // Condition.
1915
  entry.Bind();
1916
  // sp[0] : index
1917
  // sp[1] : array/enum cache length
1918
  // sp[2] : array or enum cache
1919
  // sp[3] : 0 or map
1920
  // sp[4] : enumerable
1921
  // Grab the current frame's height for the break and continue
1922
  // targets only after all the state is pushed on the frame.
1923
  node->break_target()->Initialize(this);
1924
  node->continue_target()->Initialize(this);
1925
1926
  __ ldr(r0, frame_->ElementAt(0));  // load the current count
1927
  __ ldr(r1, frame_->ElementAt(1));  // load the length
1928
  __ cmp(r0, Operand(r1));  // compare to the array length
1929
  node->break_target()->Branch(hs);
1930
1931
  __ ldr(r0, frame_->ElementAt(0));
1932
1933
  // Get the i'th entry of the array.
1934
  __ ldr(r2, frame_->ElementAt(2));
1935
  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1936
  __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1937
1938
  // Get Map or 0.
1939
  __ ldr(r2, frame_->ElementAt(3));
1940
  // Check if this (still) matches the map of the enumerable.
1941
  // If not, we have to filter the key.
1942
  __ ldr(r1, frame_->ElementAt(4));
1943
  __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
1944
  __ cmp(r1, Operand(r2));
1945
  end_del_check.Branch(eq);
1946
1947
  // Convert the entry to a string (or null if it isn't a property anymore).
1948
  __ ldr(r0, frame_->ElementAt(4));  // push enumerable
1949
  frame_->EmitPush(r0);
1950
  frame_->EmitPush(r3);  // push entry
1951
  Result arg_count_register = allocator_->Allocate(r0);
1952
  ASSERT(arg_count_register.is_valid());
1953
  __ mov(arg_count_register.reg(), Operand(1));
1954
  Result result = frame_->InvokeBuiltin(Builtins::FILTER_KEY,
1955
                                        CALL_JS,
1956
                                        &arg_count_register,
1957
                                        2);
1958
  __ mov(r3, Operand(result.reg()));
1959
  result.Unuse();
1960
1961
  // If the property has been removed while iterating, we just skip it.
1962
  __ cmp(r3, Operand(Factory::null_value()));
1963
  node->continue_target()->Branch(eq);
1964
1965
  end_del_check.Bind();
1966
  // Store the entry in the 'each' expression and take another spin in the
1967
  // loop.  r3: i'th entry of the enum cache (or string there of)
1968
  frame_->EmitPush(r3);  // push entry
1969
  { Reference each(this, node->each());
1970
    if (!each.is_illegal()) {
1971
      if (each.size() > 0) {
1972
        __ ldr(r0, frame_->ElementAt(each.size()));
1973
        frame_->EmitPush(r0);
1974
      }
1975
      // If the reference was to a slot we rely on the convenient property
1976
      // that it doesn't matter whether a value (eg, r3 pushed above) is
1977
      // right on top of or right underneath a zero-sized reference.
1978
      each.SetValue(NOT_CONST_INIT);
1979
      if (each.size() > 0) {
1980
        // It's safe to pop the value lying on top of the reference before
1981
        // unloading the reference itself (which preserves the top of stack,
1982
        // ie, now the topmost value of the non-zero sized reference), since
1983
        // we will discard the top of stack after unloading the reference
1984
        // anyway.
1985
        frame_->EmitPop(r0);
1986
      }
1987
    }
1988
  }
1989
  // Discard the i'th entry pushed above or else the remainder of the
1990
  // reference, whichever is currently on top of the stack.
1991
  frame_->Drop();
1992
1993
  // Body.
1994
  CheckStack();  // TODO(1222600): ignore if body contains calls.
1995
  VisitAndSpill(node->body());
1996
1997
  // Next.  Reestablish a spilled frame in case we are coming here via
1998
  // a continue in the body.
1999
  node->continue_target()->Bind();
2000
  frame_->SpillAll();
2001
  frame_->EmitPop(r0);
2002
  __ add(r0, r0, Operand(Smi::FromInt(1)));
2003
  frame_->EmitPush(r0);
2004
  entry.Jump();
2005
2006
  // Cleanup.  No need to spill because VirtualFrame::Drop is safe for
2007
  // any frame.
2008
  node->break_target()->Bind();
2009
  frame_->Drop(5);
2010
2011
  // Exit.
2012
  exit.Bind();
2013
  node->continue_target()->Unuse();
2014
  node->break_target()->Unuse();
2015
  ASSERT(frame_->height() == original_height);
2016
}
2017
2018
2019
void CodeGenerator::VisitTryCatch(TryCatch* node) {
2020
#ifdef DEBUG
2021
  int original_height = frame_->height();
2022
#endif
2023
  VirtualFrame::SpilledScope spilled_scope(this);
2024
  Comment cmnt(masm_, "[ TryCatch");
2025
  CodeForStatementPosition(node);
2026
2027
  JumpTarget try_block(this);
2028
  JumpTarget exit(this);
2029
2030
  try_block.Call();
2031
  // --- Catch block ---
2032
  frame_->EmitPush(r0);
2033
2034
  // Store the caught exception in the catch variable.
2035
  { Reference ref(this, node->catch_var());
2036
    ASSERT(ref.is_slot());
2037
    // Here we make use of the convenient property that it doesn't matter
2038
    // whether a value is immediately on top of or underneath a zero-sized
2039
    // reference.
2040
    ref.SetValue(NOT_CONST_INIT);
2041
  }
2042
2043
  // Remove the exception from the stack.
2044
  frame_->Drop();
2045
2046
  VisitStatementsAndSpill(node->catch_block()->statements());
2047
  if (frame_ != NULL) {
2048
    exit.Jump();
2049
  }
2050
2051
2052
  // --- Try block ---
2053
  try_block.Bind();
2054
2055
  frame_->PushTryHandler(TRY_CATCH_HANDLER);
2056
  int handler_height = frame_->height();
2057
2058
  // Shadow the labels for all escapes from the try block, including
2059
  // returns. During shadowing, the original label is hidden as the
2060
  // LabelShadow and operations on the original actually affect the
2061
  // shadowing label.
2062
  //
2063
  // We should probably try to unify the escaping labels and the return
2064
  // label.
2065
  int nof_escapes = node->escaping_targets()->length();
2066
  List<ShadowTarget*> shadows(1 + nof_escapes);
2067
2068
  // Add the shadow target for the function return.
2069
  static const int kReturnShadowIndex = 0;
2070
  shadows.Add(new ShadowTarget(&function_return_));
2071
  bool function_return_was_shadowed = function_return_is_shadowed_;
2072
  function_return_is_shadowed_ = true;
2073
  ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2074
2075
  // Add the remaining shadow targets.
2076
  for (int i = 0; i < nof_escapes; i++) {
2077
    shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2078
  }
2079
2080
  // Generate code for the statements in the try block.
2081
  VisitStatementsAndSpill(node->try_block()->statements());
2082
2083
  // Stop the introduced shadowing and count the number of required unlinks.
2084
  // After shadowing stops, the original labels are unshadowed and the
2085
  // LabelShadows represent the formerly shadowing labels.
2086
  bool has_unlinks = false;
2087
  for (int i = 0; i < shadows.length(); i++) {
2088
    shadows[i]->StopShadowing();
2089
    has_unlinks = has_unlinks || shadows[i]->is_linked();
2090
  }
2091
  function_return_is_shadowed_ = function_return_was_shadowed;
2092
2093
  // Get an external reference to the handler address.
2094
  ExternalReference handler_address(Top::k_handler_address);
2095
2096
  // The next handler address is at kNextIndex in the stack.
2097
  const int kNextIndex = StackHandlerConstants::kNextOffset / kPointerSize;
2098
  // If we can fall off the end of the try block, unlink from try chain.
2099
  if (has_valid_frame()) {
2100
    __ ldr(r1, frame_->ElementAt(kNextIndex));
2101
    __ mov(r3, Operand(handler_address));
2102
    __ str(r1, MemOperand(r3));
2103
    frame_->Drop(StackHandlerConstants::kSize / kPointerSize);
2104
    if (has_unlinks) {
2105
      exit.Jump();
2106
    }
2107
  }
2108
2109
  // Generate unlink code for the (formerly) shadowing labels that have been
2110
  // jumped to.  Deallocate each shadow target.
2111
  for (int i = 0; i < shadows.length(); i++) {
2112
    if (shadows[i]->is_linked()) {
2113
      // Unlink from try chain;
2114
      shadows[i]->Bind();
2115
      // Because we can be jumping here (to spilled code) from unspilled
2116
      // code, we need to reestablish a spilled frame at this block.
2117
      frame_->SpillAll();
2118
2119
      // Reload sp from the top handler, because some statements that we
2120
      // break from (eg, for...in) may have left stuff on the stack.
2121
      __ mov(r3, Operand(handler_address));
2122
      __ ldr(sp, MemOperand(r3));
2123
      // The stack pointer was restored to just below the code slot
2124
      // (the topmost slot) in the handler.
2125
      frame_->Forget(frame_->height() - handler_height + 1);
2126
2127
      // kNextIndex is off by one because the code slot has already
2128
      // been dropped.
2129
      __ ldr(r1, frame_->ElementAt(kNextIndex - 1));
2130
      __ str(r1, MemOperand(r3));
2131
      // The code slot has already been dropped from the handler.
2132
      frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2133
2134
      if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2135
        frame_->PrepareForReturn();
2136
      }
2137
      shadows[i]->other_target()->Jump();
2138
    }
2139
    delete shadows[i];
2140
  }
2141
2142
  exit.Bind();
2143
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
2144
}
2145
2146
2147
void CodeGenerator::VisitTryFinally(TryFinally* node) {
2148
#ifdef DEBUG
2149
  int original_height = frame_->height();
2150
#endif
2151
  VirtualFrame::SpilledScope spilled_scope(this);
2152
  Comment cmnt(masm_, "[ TryFinally");
2153
  CodeForStatementPosition(node);
2154
2155
  // State: Used to keep track of reason for entering the finally
2156
  // block. Should probably be extended to hold information for
2157
  // break/continue from within the try block.
2158
  enum { FALLING, THROWING, JUMPING };
2159
2160
  JumpTarget try_block(this);
2161
  JumpTarget finally_block(this);
2162
2163
  try_block.Call();
2164
2165
  frame_->EmitPush(r0);  // save exception object on the stack
2166
  // In case of thrown exceptions, this is where we continue.
2167
  __ mov(r2, Operand(Smi::FromInt(THROWING)));
2168
  finally_block.Jump();
2169
2170
  // --- Try block ---
2171
  try_block.Bind();
2172
2173
  frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2174
  int handler_height = frame_->height();
2175
2176
  // Shadow the labels for all escapes from the try block, including
2177
  // returns.  Shadowing hides the original label as the LabelShadow and
2178
  // operations on the original actually affect the shadowing label.
2179
  //
2180
  // We should probably try to unify the escaping labels and the return
2181
  // label.
2182
  int nof_escapes = node->escaping_targets()->length();
2183
  List<ShadowTarget*> shadows(1 + nof_escapes);
2184
2185
  // Add the shadow target for the function return.
2186
  static const int kReturnShadowIndex = 0;
2187
  shadows.Add(new ShadowTarget(&function_return_));
2188
  bool function_return_was_shadowed = function_return_is_shadowed_;
2189
  function_return_is_shadowed_ = true;
2190
  ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2191
2192
  // Add the remaining shadow targets.
2193
  for (int i = 0; i < nof_escapes; i++) {
2194
    shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2195
  }
2196
2197
  // Generate code for the statements in the try block.
2198
  VisitStatementsAndSpill(node->try_block()->statements());
2199
2200
  // Stop the introduced shadowing and count the number of required unlinks.
2201
  // After shadowing stops, the original labels are unshadowed and the
2202
  // LabelShadows represent the formerly shadowing labels.
2203
  int nof_unlinks = 0;
2204
  for (int i = 0; i < shadows.length(); i++) {
2205
    shadows[i]->StopShadowing();
2206
    if (shadows[i]->is_linked()) nof_unlinks++;
2207
  }
2208
  function_return_is_shadowed_ = function_return_was_shadowed;
2209
2210
  // Get an external reference to the handler address.
2211
  ExternalReference handler_address(Top::k_handler_address);
2212
2213
  // The next handler address is at kNextIndex in the stack.
2214
  const int kNextIndex = StackHandlerConstants::kNextOffset / kPointerSize;
2215
  // If we can fall off the end of the try block, unlink from the try
2216
  // chain and set the state on the frame to FALLING.
2217
  if (has_valid_frame()) {
2218
    __ ldr(r1, frame_->ElementAt(kNextIndex));
2219
    __ mov(r3, Operand(handler_address));
2220
    __ str(r1, MemOperand(r3));
2221
    frame_->Drop(StackHandlerConstants::kSize / kPointerSize);
2222
2223
    // Fake a top of stack value (unneeded when FALLING) and set the
2224
    // state in r2, then jump around the unlink blocks if any.
2225
    __ mov(r0, Operand(Factory::undefined_value()));
2226
    frame_->EmitPush(r0);
2227
    __ mov(r2, Operand(Smi::FromInt(FALLING)));
2228
    if (nof_unlinks > 0) {
2229
      finally_block.Jump();
2230
    }
2231
  }
2232
2233
  // Generate code to unlink and set the state for the (formerly)
2234
  // shadowing targets that have been jumped to.
2235
  for (int i = 0; i < shadows.length(); i++) {
2236
    if (shadows[i]->is_linked()) {
2237
      // If we have come from the shadowed return, the return value is
2238
      // in (a non-refcounted reference to) r0.  We must preserve it
2239
      // until it is pushed.
2240
      //
2241
      // Because we can be jumping here (to spilled code) from
2242
      // unspilled code, we need to reestablish a spilled frame at
2243
      // this block.
2244
      shadows[i]->Bind();
2245
      frame_->SpillAll();
2246
2247
      // Reload sp from the top handler, because some statements that
2248
      // we break from (eg, for...in) may have left stuff on the
2249
      // stack.
2250
      __ mov(r3, Operand(handler_address));
2251
      __ ldr(sp, MemOperand(r3));
2252
      // The stack pointer was restored to the address slot in the handler.
2253
      ASSERT(StackHandlerConstants::kNextOffset == 1 * kPointerSize);
2254
      frame_->Forget(frame_->height() - handler_height + 1);
2255
2256
      // Unlink this handler and drop it from the frame.  The next
2257
      // handler address is now on top of the frame.
2258
      frame_->EmitPop(r1);
2259
      __ str(r1, MemOperand(r3));
2260
      // The top (code) and the second (handler) slot have both been
2261
      // dropped already.
2262
      frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 2);
2263
2264
      if (i == kReturnShadowIndex) {
2265
        // If this label shadowed the function return, materialize the
2266
        // return value on the stack.
2267
        frame_->EmitPush(r0);
2268
      } else {
2269
        // Fake TOS for targets that shadowed breaks and continues.
2270
        __ mov(r0, Operand(Factory::undefined_value()));
2271
        frame_->EmitPush(r0);
2272
      }
2273
      __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
2274
      if (--nof_unlinks > 0) {
2275
        // If this is not the last unlink block, jump around the next.
2276
        finally_block.Jump();
2277
      }
2278
    }
2279
  }
2280
2281
  // --- Finally block ---
2282
  finally_block.Bind();
2283
2284
  // Push the state on the stack.
2285
  frame_->EmitPush(r2);
2286
2287
  // We keep two elements on the stack - the (possibly faked) result
2288
  // and the state - while evaluating the finally block.
2289
  //
2290
  // Generate code for the statements in the finally block.
2291
  VisitStatementsAndSpill(node->finally_block()->statements());
2292
2293
  if (has_valid_frame()) {
2294
    // Restore state and return value or faked TOS.
2295
    frame_->EmitPop(r2);
2296
    frame_->EmitPop(r0);
2297
  }
2298
2299
  // Generate code to jump to the right destination for all used
2300
  // formerly shadowing targets.  Deallocate each shadow target.
2301
  for (int i = 0; i < shadows.length(); i++) {
2302
    if (has_valid_frame() && shadows[i]->is_bound()) {
2303
      JumpTarget* original = shadows[i]->other_target();
2304
      __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
2305
      if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2306
        JumpTarget skip(this);
2307
        skip.Branch(ne);
2308
        frame_->PrepareForReturn();
2309
        original->Jump();
2310
        skip.Bind();
2311
      } else {
2312
        original->Branch(eq);
2313
      }
2314
    }
2315
    delete shadows[i];
2316
  }
2317
2318
  if (has_valid_frame()) {
2319
    // Check if we need to rethrow the exception.
2320
    JumpTarget exit(this);
2321
    __ cmp(r2, Operand(Smi::FromInt(THROWING)));
2322
    exit.Branch(ne);
2323
2324
    // Rethrow exception.
2325
    frame_->EmitPush(r0);
2326
    frame_->CallRuntime(Runtime::kReThrow, 1);
2327
2328
    // Done.
2329
    exit.Bind();
2330
  }
2331
  ASSERT(!has_valid_frame() || frame_->height() == original_height);
2332
}
2333
2334
2335
void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2336
#ifdef DEBUG
2337
  int original_height = frame_->height();
2338
#endif
2339
  VirtualFrame::SpilledScope spilled_scope(this);
2340
  Comment cmnt(masm_, "[ DebuggerStatament");
2341
  CodeForStatementPosition(node);
2342
  frame_->CallRuntime(Runtime::kDebugBreak, 0);
2343
  // Ignore the return value.
2344
  ASSERT(frame_->height() == original_height);
2345
}
2346
2347
2348
void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
2349
  VirtualFrame::SpilledScope spilled_scope(this);
2350
  ASSERT(boilerplate->IsBoilerplate());
2351
2352
  // Push the boilerplate on the stack.
2353
  __ mov(r0, Operand(boilerplate));
2354
  frame_->EmitPush(r0);
2355
2356
  // Create a new closure.
2357
  frame_->EmitPush(cp);
2358
  frame_->CallRuntime(Runtime::kNewClosure, 2);
2359
  frame_->EmitPush(r0);
2360
}
2361
2362
2363
void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2364
#ifdef DEBUG
2365
  int original_height = frame_->height();
2366
#endif
2367
  VirtualFrame::SpilledScope spilled_scope(this);
2368
  Comment cmnt(masm_, "[ FunctionLiteral");
2369
2370
  // Build the function boilerplate and instantiate it.
2371
  Handle<JSFunction> boilerplate = BuildBoilerplate(node);
2372
  // Check for stack-overflow exception.
2373
  if (HasStackOverflow()) {
2374
    ASSERT(frame_->height() == original_height);
2375
    return;
2376
  }
2377
  InstantiateBoilerplate(boilerplate);
2378
  ASSERT(frame_->height() == original_height + 1);
2379
}
2380
2381
2382
void CodeGenerator::VisitFunctionBoilerplateLiteral(
2383
    FunctionBoilerplateLiteral* node) {
2384
#ifdef DEBUG
2385
  int original_height = frame_->height();
2386
#endif
2387
  VirtualFrame::SpilledScope spilled_scope(this);
2388
  Comment cmnt(masm_, "[ FunctionBoilerplateLiteral");
2389
  InstantiateBoilerplate(node->boilerplate());
2390
  ASSERT(frame_->height() == original_height + 1);
2391
}
2392
2393
2394
void CodeGenerator::VisitConditional(Conditional* node) {
2395
#ifdef DEBUG
2396
  int original_height = frame_->height();
2397
#endif
2398
  VirtualFrame::SpilledScope spilled_scope(this);
2399
  Comment cmnt(masm_, "[ Conditional");
2400
  JumpTarget then(this);
2401
  JumpTarget else_(this);
2402
  JumpTarget exit(this);
2403
  LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
2404
                        &then, &else_, true);
2405
  Branch(false, &else_);
2406
  then.Bind();
2407
  LoadAndSpill(node->then_expression(), typeof_state());
2408
  exit.Jump();
2409
  else_.Bind();
2410
  LoadAndSpill(node->else_expression(), typeof_state());
2411
  exit.Bind();
2412
  ASSERT(frame_->height() == original_height + 1);
2413
}
2414
2415
2416
void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
2417
  VirtualFrame::SpilledScope spilled_scope(this);
2418
  if (slot->type() == Slot::LOOKUP) {
2419
    ASSERT(slot->var()->is_dynamic());
2420
2421
    JumpTarget slow(this);
2422
    JumpTarget done(this);
2423
2424
    // Generate fast-case code for variables that might be shadowed by
2425
    // eval-introduced variables.  Eval is used a lot without
2426
    // introducing variables.  In those cases, we do not want to
2427
    // perform a runtime call for all variables in the scope
2428
    // containing the eval.
2429
    if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
2430
      LoadFromGlobalSlotCheckExtensions(slot, typeof_state, r1, r2, &slow);
2431
      // If there was no control flow to slow, we can exit early.
2432
      if (!slow.is_linked()) {
2433
        frame_->EmitPush(r0);
2434
        return;
2435
      }
2436
2437
      done.Jump();
2438
2439
    } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
2440
      Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
2441
      // Only generate the fast case for locals that rewrite to slots.
2442
      // This rules out argument loads.
2443
      if (potential_slot != NULL) {
2444
        __ ldr(r0,
2445
               ContextSlotOperandCheckExtensions(potential_slot,
2446
                                                 r1,
2447
                                                 r2,
2448
                                                 &slow));
2449
        if (potential_slot->var()->mode() == Variable::CONST) {
2450
          __ cmp(r0, Operand(Factory::the_hole_value()));
2451
          __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
2452
        }
2453
        // There is always control flow to slow from
2454
        // ContextSlotOperandCheckExtensions so we have to jump around
2455
        // it.
2456
        done.Jump();
2457
      }
2458
    }
2459
2460
    slow.Bind();
2461
    frame_->EmitPush(cp);
2462
    __ mov(r0, Operand(slot->var()->name()));
2463
    frame_->EmitPush(r0);
2464
2465
    if (typeof_state == INSIDE_TYPEOF) {
2466
      frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
2467
    } else {
2468
      frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2469
    }
2470
2471
    done.Bind();
2472
    frame_->EmitPush(r0);
2473
2474
  } else {
2475
    // Note: We would like to keep the assert below, but it fires because of
2476
    // some nasty code in LoadTypeofExpression() which should be removed...
2477
    // ASSERT(!slot->var()->is_dynamic());
2478
2479
    // Special handling for locals allocated in registers.
2480
    __ ldr(r0, SlotOperand(slot, r2));
2481
    frame_->EmitPush(r0);
2482
    if (slot->var()->mode() == Variable::CONST) {
2483
      // Const slots may contain 'the hole' value (the constant hasn't been
2484
      // initialized yet) which needs to be converted into the 'undefined'
2485
      // value.
2486
      Comment cmnt(masm_, "[ Unhole const");
2487
      frame_->EmitPop(r0);
2488
      __ cmp(r0, Operand(Factory::the_hole_value()));
2489
      __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq);
2490
      frame_->EmitPush(r0);
2491
    }
2492
  }
2493
}
2494
2495
2496
void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
2497
                                                      TypeofState typeof_state,
2498
                                                      Register tmp,
2499
                                                      Register tmp2,
2500
                                                      JumpTarget* slow) {
2501
  // Check that no extension objects have been created by calls to
2502
  // eval from the current scope to the global scope.
2503
  Register context = cp;
2504
  Scope* s = scope();
2505
  while (s != NULL) {
2506
    if (s->num_heap_slots() > 0) {
2507
      if (s->calls_eval()) {
2508
        // Check that extension is NULL.
2509
        __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
2510
        __ tst(tmp2, tmp2);
2511
        slow->Branch(ne);
2512
      }
2513
      // Load next context in chain.
2514
      __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
2515
      __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
2516
      context = tmp;
2517
    }
2518
    // If no outer scope calls eval, we do not need to check more
2519
    // context extensions.
2520
    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
2521
    s = s->outer_scope();
2522
  }
2523
2524
  if (s->is_eval_scope()) {
2525
    Label next, fast;
2526
    if (!context.is(tmp)) __ mov(tmp, Operand(context));
2527
    __ bind(&next);
2528
    // Terminate at global context.
2529
    __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
2530
    __ cmp(tmp2, Operand(Factory::global_context_map()));
2531
    __ b(eq, &fast);
2532
    // Check that extension is NULL.
2533
    __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
2534
    __ tst(tmp2, tmp2);
2535
    slow->Branch(ne);
2536
    // Load next context in chain.
2537
    __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
2538
    __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
2539
    __ b(&next);
2540
    __ bind(&fast);
2541
  }
2542
2543
  // All extension objects were empty and it is safe to use a global
2544
  // load IC call.
2545
  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2546
  // Load the global object.
2547
  LoadGlobal();
2548
  // Setup the name register.
2549
  Result name = allocator_->Allocate(r2);
2550
  ASSERT(name.is_valid());  // We are in spilled code.
2551
  __ mov(name.reg(), Operand(slot->var()->name()));
2552
  // Call IC stub.
2553
  if (typeof_state == INSIDE_TYPEOF) {
2554
    frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, &name, 0);
2555
  } else {
2556
    frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET_CONTEXT, &name, 0);
2557
  }
2558
2559
  // Drop the global object. The result is in r0.
2560
  frame_->Drop();
2561
}
2562
2563
2564
void CodeGenerator::VisitSlot(Slot* node) {
2565
#ifdef DEBUG
2566
  int original_height = frame_->height();
2567
#endif
2568
  VirtualFrame::SpilledScope spilled_scope(this);
2569
  Comment cmnt(masm_, "[ Slot");
2570
  LoadFromSlot(node, typeof_state());
2571
  ASSERT(frame_->height() == original_height + 1);
2572
}
2573
2574
2575
void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
2576
#ifdef DEBUG
2577
  int original_height = frame_->height();
2578
#endif
2579
  VirtualFrame::SpilledScope spilled_scope(this);
2580
  Comment cmnt(masm_, "[ VariableProxy");
2581
2582
  Variable* var = node->var();
2583
  Expression* expr = var->rewrite();
2584
  if (expr != NULL) {
2585
    Visit(expr);
2586
  } else {
2587
    ASSERT(var->is_global());
2588
    Reference ref(this, node);
2589
    ref.GetValueAndSpill(typeof_state());
2590
  }
2591
  ASSERT(frame_->height() == original_height + 1);
2592
}
2593
2594
2595
void CodeGenerator::VisitLiteral(Literal* node) {
2596
#ifdef DEBUG
2597
  int original_height = frame_->height();
2598
#endif
2599
  VirtualFrame::SpilledScope spilled_scope(this);
2600
  Comment cmnt(masm_, "[ Literal");
2601
  __ mov(r0, Operand(node->handle()));
2602
  frame_->EmitPush(r0);
2603
  ASSERT(frame_->height() == original_height + 1);
2604
}
2605
2606
2607
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
2608
#ifdef DEBUG
2609
  int original_height = frame_->height();
2610
#endif
2611
  VirtualFrame::SpilledScope spilled_scope(this);
2612
  Comment cmnt(masm_, "[ RexExp Literal");
2613
2614
  // Retrieve the literal array and check the allocated entry.
2615
2616
  // Load the function of this activation.
2617
  __ ldr(r1, frame_->Function());
2618
2619
  // Load the literals array of the function.
2620
  __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
2621
2622
  // Load the literal at the ast saved index.
2623
  int literal_offset =
2624
      FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2625
  __ ldr(r2, FieldMemOperand(r1, literal_offset));
2626
2627
  JumpTarget done(this);
2628
  __ cmp(r2, Operand(Factory::undefined_value()));
2629
  done.Branch(ne);
2630
2631
  // If the entry is undefined we call the runtime system to computed
2632
  // the literal.
2633
  frame_->EmitPush(r1);  // literal array  (0)
2634
  __ mov(r0, Operand(Smi::FromInt(node->literal_index())));
2635
  frame_->EmitPush(r0);  // literal index  (1)
2636
  __ mov(r0, Operand(node->pattern()));  // RegExp pattern (2)
2637
  frame_->EmitPush(r0);
2638
  __ mov(r0, Operand(node->flags()));  // RegExp flags   (3)
2639
  frame_->EmitPush(r0);
2640
  frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
2641
  __ mov(r2, Operand(r0));
2642
2643
  done.Bind();
2644
  // Push the literal.
2645
  frame_->EmitPush(r2);
2646
  ASSERT(frame_->height() == original_height + 1);
2647
}
2648
2649
2650
// This deferred code stub will be used for creating the boilerplate
2651
// by calling Runtime_CreateObjectLiteralBoilerplate.
2652
// Each created boilerplate is stored in the JSFunction and they are
2653
// therefore context dependent.
2654
class DeferredObjectLiteral: public DeferredCode {
2655
 public:
2656
  DeferredObjectLiteral(CodeGenerator* generator, ObjectLiteral* node)
2657
      : DeferredCode(generator), node_(node) {
2658
    set_comment("[ DeferredObjectLiteral");
2659
  }
2660
2661
  virtual void Generate();
2662
2663
 private:
2664
  ObjectLiteral* node_;
2665
};
2666
2667
2668
void DeferredObjectLiteral::Generate() {
2669
  // Argument is passed in r1.
2670
  enter()->Bind();
2671
  VirtualFrame::SpilledScope spilled_scope(generator());
2672
2673
  // If the entry is undefined we call the runtime system to compute
2674
  // the literal.
2675
2676
  VirtualFrame* frame = generator()->frame();
2677
  // Literal array (0).
2678
  frame->EmitPush(r1);
2679
  // Literal index (1).
2680
  __ mov(r0, Operand(Smi::FromInt(node_->literal_index())));
2681
  frame->EmitPush(r0);
2682
  // Constant properties (2).
2683
  __ mov(r0, Operand(node_->constant_properties()));
2684
  frame->EmitPush(r0);
2685
  Result boilerplate =
2686
      frame->CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3);
2687
  __ mov(r2, Operand(boilerplate.reg()));
2688
  // Result is returned in r2.
2689
  exit_.Jump();
2690
}
2691
2692
2693
void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
2694
#ifdef DEBUG
2695
  int original_height = frame_->height();
2696
#endif
2697
  VirtualFrame::SpilledScope spilled_scope(this);
2698
  Comment cmnt(masm_, "[ ObjectLiteral");
2699
2700
  DeferredObjectLiteral* deferred = new DeferredObjectLiteral(this, node);
2701
2702
  // Retrieve the literal array and check the allocated entry.
2703
2704
  // Load the function of this activation.
2705
  __ ldr(r1, frame_->Function());
2706
2707
  // Load the literals array of the function.
2708
  __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
2709
2710
  // Load the literal at the ast saved index.
2711
  int literal_offset =
2712
      FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2713
  __ ldr(r2, FieldMemOperand(r1, literal_offset));
2714
2715
  // Check whether we need to materialize the object literal boilerplate.
2716
  // If so, jump to the deferred code.
2717
  __ cmp(r2, Operand(Factory::undefined_value()));
2718
  deferred->enter()->Branch(eq);
2719
  deferred->BindExit();
2720
2721
  // Push the object literal boilerplate.
2722
  frame_->EmitPush(r2);
2723
2724
  // Clone the boilerplate object.
2725
  Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
2726
  if (node->depth() == 1) {
2727
    clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
2728
  }
2729
  frame_->CallRuntime(clone_function_id, 1);
2730
  frame_->EmitPush(r0);  // save the result
2731
  // r0: cloned object literal
2732
2733
  for (int i = 0; i < node->properties()->length(); i++) {
2734
    ObjectLiteral::Property* property = node->properties()->at(i);
2735
    Literal* key = property->key();
2736
    Expression* value = property->value();
2737
    switch (property->kind()) {
2738
      case ObjectLiteral::Property::CONSTANT:
2739
        break;
2740
      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2741
        if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
2742
        // else fall through
2743
      case ObjectLiteral::Property::COMPUTED:  // fall through
2744
      case ObjectLiteral::Property::PROTOTYPE: {
2745
        frame_->EmitPush(r0);  // dup the result
2746
        LoadAndSpill(key);
2747
        LoadAndSpill(value);
2748
        frame_->CallRuntime(Runtime::kSetProperty, 3);
2749
        // restore r0
2750
        __ ldr(r0, frame_->Top());
2751
        break;
2752
      }
2753
      case ObjectLiteral::Property::SETTER: {
2754
        frame_->EmitPush(r0);
2755
        LoadAndSpill(key);
2756
        __ mov(r0, Operand(Smi::FromInt(1)));
2757
        frame_->EmitPush(r0);
2758
        LoadAndSpill(value);
2759
        frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2760
        __ ldr(r0, frame_->Top());
2761
        break;
2762
      }
2763
      case ObjectLiteral::Property::GETTER: {
2764
        frame_->EmitPush(r0);
2765
        LoadAndSpill(key);
2766
        __ mov(r0, Operand(Smi::FromInt(0)));
2767
        frame_->EmitPush(r0);
2768
        LoadAndSpill(value);
2769
        frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2770
        __ ldr(r0, frame_->Top());
2771
        break;
2772
      }
2773
    }
2774
  }
2775
  ASSERT(frame_->height() == original_height + 1);
2776
}
2777
2778
2779
// This deferred code stub will be used for creating the boilerplate
2780
// by calling Runtime_CreateArrayLiteralBoilerplate.
2781
// Each created boilerplate is stored in the JSFunction and they are
2782
// therefore context dependent.
2783
class DeferredArrayLiteral: public DeferredCode {
2784
 public:
2785
  DeferredArrayLiteral(CodeGenerator* generator, ArrayLiteral* node)
2786
      : DeferredCode(generator), node_(node) {
2787
    set_comment("[ DeferredArrayLiteral");
2788
  }
2789
2790
  virtual void Generate();
2791
2792
 private:
2793
  ArrayLiteral* node_;
2794
};
2795
2796
2797
void DeferredArrayLiteral::Generate() {
2798
  // Argument is passed in r1.
2799
  enter()->Bind();
2800
  VirtualFrame::SpilledScope spilled_scope(generator());
2801
2802
  // If the entry is undefined we call the runtime system to computed
2803
  // the literal.
2804
2805
  VirtualFrame* frame = generator()->frame();
2806
  // Literal array (0).
2807
  frame->EmitPush(r1);
2808
  // Literal index (1).
2809
  __ mov(r0, Operand(Smi::FromInt(node_->literal_index())));
2810
  frame->EmitPush(r0);
2811
  // Constant properties (2).
2812
  __ mov(r0, Operand(node_->literals()));
2813
  frame->EmitPush(r0);
2814
  Result boilerplate =
2815
      frame->CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3);
2816
  __ mov(r2, Operand(boilerplate.reg()));
2817
  // Result is returned in r2.
2818
  exit_.Jump();
2819
}
2820
2821
2822
void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
2823
#ifdef DEBUG
2824
  int original_height = frame_->height();
2825
#endif
2826
  VirtualFrame::SpilledScope spilled_scope(this);
2827
  Comment cmnt(masm_, "[ ArrayLiteral");
2828
2829
  DeferredArrayLiteral* deferred = new DeferredArrayLiteral(this, node);
2830
2831
  // Retrieve the literal array and check the allocated entry.
2832
2833
  // Load the function of this activation.
2834
  __ ldr(r1, frame_->Function());
2835
2836
  // Load the literals array of the function.
2837
  __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
2838
2839
  // Load the literal at the ast saved index.
2840
  int literal_offset =
2841
      FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2842
  __ ldr(r2, FieldMemOperand(r1, literal_offset));
2843
2844
  // Check whether we need to materialize the object literal boilerplate.
2845
  // If so, jump to the deferred code.
2846
  __ cmp(r2, Operand(Factory::undefined_value()));
2847
  deferred->enter()->Branch(eq);
2848
  deferred->BindExit();
2849
2850
  // Push the object literal boilerplate.
2851
  frame_->EmitPush(r2);
2852
2853
  // Clone the boilerplate object.
2854
  Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
2855
  if (node->depth() == 1) {
2856
    clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
2857
  }
2858
  frame_->CallRuntime(clone_function_id, 1);
2859
  frame_->EmitPush(r0);  // save the result
2860
  // r0: cloned object literal
2861
2862
  // Generate code to set the elements in the array that are not
2863
  // literals.
2864
  for (int i = 0; i < node->values()->length(); i++) {
2865
    Expression* value = node->values()->at(i);
2866
2867
    // If value is a literal the property value is already set in the
2868
    // boilerplate object.
2869
    if (value->AsLiteral() != NULL) continue;
2870
    // If value is a materialized literal the property value is already set
2871
    // in the boilerplate object if it is simple.
2872
    if (CompileTimeValue::IsCompileTimeValue(value)) continue;
2873
2874
    // The property must be set by generated code.
2875
    LoadAndSpill(value);
2876
    frame_->EmitPop(r0);
2877
2878
    // Fetch the object literal.
2879
    __ ldr(r1, frame_->Top());
2880
    // Get the elements array.
2881
    __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
2882
2883
    // Write to the indexed properties array.
2884
    int offset = i * kPointerSize + Array::kHeaderSize;
2885
    __ str(r0, FieldMemOperand(r1, offset));
2886
2887
    // Update the write barrier for the array address.
2888
    __ mov(r3, Operand(offset));
2889
    __ RecordWrite(r1, r3, r2);
2890
  }
2891
  ASSERT(frame_->height() == original_height + 1);
2892
}
2893
2894
2895
void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
2896
#ifdef DEBUG
2897
  int original_height = frame_->height();
2898
#endif
2899
  ASSERT(!in_spilled_code());
2900
  VirtualFrame::SpilledScope spilled_scope(this);
2901
  // Call runtime routine to allocate the catch extension object and
2902
  // assign the exception value to the catch variable.
2903
  Comment cmnt(masm_, "[ CatchExtensionObject");
2904
  LoadAndSpill(node->key());
2905
  LoadAndSpill(node->value());
2906
  Result result =
2907
      frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
2908
  frame_->EmitPush(result.reg());
2909
  ASSERT(frame_->height() == original_height + 1);
2910
}
2911
2912
2913
void CodeGenerator::VisitAssignment(Assignment* node) {
2914
#ifdef DEBUG
2915
  int original_height = frame_->height();
2916
#endif
2917
  VirtualFrame::SpilledScope spilled_scope(this);
2918
  Comment cmnt(masm_, "[ Assignment");
2919
  CodeForStatementPosition(node);
2920
2921
  { Reference target(this, node->target());
2922
    if (target.is_illegal()) {
2923
      // Fool the virtual frame into thinking that we left the assignment's
2924
      // value on the frame.
2925
      __ mov(r0, Operand(Smi::FromInt(0)));
2926
      frame_->EmitPush(r0);
2927
      ASSERT(frame_->height() == original_height + 1);
2928
      return;
2929
    }
2930
2931
    if (node->op() == Token::ASSIGN ||
2932
        node->op() == Token::INIT_VAR ||
2933
        node->op() == Token::INIT_CONST) {
2934
      LoadAndSpill(node->value());
2935
2936
    } else {
2937
      target.GetValueAndSpill(NOT_INSIDE_TYPEOF);
2938
      Literal* literal = node->value()->AsLiteral();
2939
      if (literal != NULL && literal->handle()->IsSmi()) {
2940
        SmiOperation(node->binary_op(), literal->handle(), false);
2941
        frame_->EmitPush(r0);
2942
2943
      } else {
2944
        LoadAndSpill(node->value());
2945
        GenericBinaryOperation(node->binary_op());
2946
        frame_->EmitPush(r0);
2947
      }
2948
    }
2949
2950
    Variable* var = node->target()->AsVariableProxy()->AsVariable();
2951
    if (var != NULL &&
2952
        (var->mode() == Variable::CONST) &&
2953
        node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2954
      // Assignment ignored - leave the value on the stack.
2955
2956
    } else {
2957
      CodeForSourcePosition(node->position());
2958
      if (node->op() == Token::INIT_CONST) {
2959
        // Dynamic constant initializations must use the function context
2960
        // and initialize the actual constant declared. Dynamic variable
2961
        // initializations are simply assignments and use SetValue.
2962
        target.SetValue(CONST_INIT);
2963
      } else {
2964
        target.SetValue(NOT_CONST_INIT);
2965
      }
2966
    }
2967
  }
2968
  ASSERT(frame_->height() == original_height + 1);
2969
}
2970
2971
2972
void CodeGenerator::VisitThrow(Throw* node) {
2973
#ifdef DEBUG
2974
  int original_height = frame_->height();
2975
#endif
2976
  VirtualFrame::SpilledScope spilled_scope(this);
2977
  Comment cmnt(masm_, "[ Throw");
2978
2979
  LoadAndSpill(node->exception());
2980
  CodeForSourcePosition(node->position());
2981
  frame_->CallRuntime(Runtime::kThrow, 1);
2982
  frame_->EmitPush(r0);
2983
  ASSERT(frame_->height() == original_height + 1);
2984
}
2985
2986
2987
void CodeGenerator::VisitProperty(Property* node) {
2988
#ifdef DEBUG
2989
  int original_height = frame_->height();
2990
#endif
2991
  VirtualFrame::SpilledScope spilled_scope(this);
2992
  Comment cmnt(masm_, "[ Property");
2993
2994
  { Reference property(this, node);
2995
    property.GetValueAndSpill(typeof_state());
2996
  }
2997
  ASSERT(frame_->height() == original_height + 1);
2998
}
2999
3000
3001
void CodeGenerator::VisitCall(Call* node) {
3002
#ifdef DEBUG
3003
  int original_height = frame_->height();
3004
#endif
3005
  VirtualFrame::SpilledScope spilled_scope(this);
3006
  Comment cmnt(masm_, "[ Call");
3007
3008
  ZoneList<Expression*>* args = node->arguments();
3009
3010
  CodeForStatementPosition(node);
3011
  // Standard function call.
3012
3013
  // Check if the function is a variable or a property.
3014
  Expression* function = node->expression();
3015
  Variable* var = function->AsVariableProxy()->AsVariable();
3016
  Property* property = function->AsProperty();
3017
3018
  // ------------------------------------------------------------------------
3019
  // Fast-case: Use inline caching.
3020
  // ---
3021
  // According to ECMA-262, section 11.2.3, page 44, the function to call
3022
  // must be resolved after the arguments have been evaluated. The IC code
3023
  // automatically handles this by loading the arguments before the function
3024
  // is resolved in cache misses (this also holds for megamorphic calls).
3025
  // ------------------------------------------------------------------------
3026
3027
  if (var != NULL && !var->is_this() && var->is_global()) {
3028
    // ----------------------------------
3029
    // JavaScript example: 'foo(1, 2, 3)'  // foo is global
3030
    // ----------------------------------
3031
3032
    // Push the name of the function and the receiver onto the stack.
3033
    __ mov(r0, Operand(var->name()));
3034
    frame_->EmitPush(r0);
3035
3036
    // Pass the global object as the receiver and let the IC stub
3037
    // patch the stack to use the global proxy as 'this' in the
3038
    // invoked function.
3039
    LoadGlobal();
3040
3041
    // Load the arguments.
3042
    int arg_count = args->length();
3043
    for (int i = 0; i < arg_count; i++) {
3044
      LoadAndSpill(args->at(i));
3045
    }
3046
3047
    // Setup the receiver register and call the IC initialization code.
3048
    Handle<Code> stub = ComputeCallInitialize(arg_count);
3049
    CodeForSourcePosition(node->position());
3050
    frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
3051
                           arg_count + 1);
3052
    __ ldr(cp, frame_->Context());
3053
    // Remove the function from the stack.
3054
    frame_->Drop();
3055
    frame_->EmitPush(r0);
3056
3057
  } else if (var != NULL && var->slot() != NULL &&
3058
             var->slot()->type() == Slot::LOOKUP) {
3059
    // ----------------------------------
3060
    // JavaScript example: 'with (obj) foo(1, 2, 3)'  // foo is in obj
3061
    // ----------------------------------
3062
3063
    // Load the function
3064
    frame_->EmitPush(cp);
3065
    __ mov(r0, Operand(var->name()));
3066
    frame_->EmitPush(r0);
3067
    frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3068
    // r0: slot value; r1: receiver
3069
3070
    // Load the receiver.
3071
    frame_->EmitPush(r0);  // function
3072
    frame_->EmitPush(r1);  // receiver
3073
3074
    // Call the function.
3075
    CallWithArguments(args, node->position());
3076
    frame_->EmitPush(r0);
3077
3078
  } else if (property != NULL) {
3079
    // Check if the key is a literal string.
3080
    Literal* literal = property->key()->AsLiteral();
3081
3082
    if (literal != NULL && literal->handle()->IsSymbol()) {
3083
      // ------------------------------------------------------------------
3084
      // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
3085
      // ------------------------------------------------------------------
3086
3087
      // Push the name of the function and the receiver onto the stack.
3088
      __ mov(r0, Operand(literal->handle()));
3089
      frame_->EmitPush(r0);
3090
      LoadAndSpill(property->obj());
3091
3092
      // Load the arguments.
3093
      int arg_count = args->length();
3094
      for (int i = 0; i < arg_count; i++) {
3095
        LoadAndSpill(args->at(i));
3096
      }
3097
3098
      // Set the receiver register and call the IC initialization code.
3099
      Handle<Code> stub = ComputeCallInitialize(arg_count);
3100
      CodeForSourcePosition(node->position());
3101
      frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
3102
      __ ldr(cp, frame_->Context());
3103
3104
      // Remove the function from the stack.
3105
      frame_->Drop();
3106
3107
      frame_->EmitPush(r0);  // push after get rid of function from the stack
3108
3109
    } else {
3110
      // -------------------------------------------
3111
      // JavaScript example: 'array[index](1, 2, 3)'
3112
      // -------------------------------------------
3113
3114
      // Load the function to call from the property through a reference.
3115
      Reference ref(this, property);
3116
      ref.GetValueAndSpill(NOT_INSIDE_TYPEOF);  // receiver
3117
3118
      // Pass receiver to called function.
3119
      if (property->is_synthetic()) {
3120
        LoadGlobalReceiver(r0);
3121
      } else {
3122
        __ ldr(r0, frame_->ElementAt(ref.size()));
3123
        frame_->EmitPush(r0);
3124
      }
3125
3126
      // Call the function.
3127
      CallWithArguments(args, node->position());
3128
      frame_->EmitPush(r0);
3129
    }
3130
3131
  } else {
3132
    // ----------------------------------
3133
    // JavaScript example: 'foo(1, 2, 3)'  // foo is not global
3134
    // ----------------------------------
3135
3136
    // Load the function.
3137
    LoadAndSpill(function);
3138
3139
    // Pass the global proxy as the receiver.
3140
    LoadGlobalReceiver(r0);
3141
3142
    // Call the function.
3143
    CallWithArguments(args, node->position());
3144
    frame_->EmitPush(r0);
3145
  }
3146
  ASSERT(frame_->height() == original_height + 1);
3147
}
3148
3149
3150
void CodeGenerator::VisitCallEval(CallEval* node) {
3151
#ifdef DEBUG
3152
  int original_height = frame_->height();
3153
#endif
3154
  VirtualFrame::SpilledScope spilled_scope(this);
3155
  Comment cmnt(masm_, "[ CallEval");
3156
3157
  // In a call to eval, we first call %ResolvePossiblyDirectEval to resolve
3158
  // the function we need to call and the receiver of the call.
3159
  // Then we call the resolved function using the given arguments.
3160
3161
  ZoneList<Expression*>* args = node->arguments();
3162
  Expression* function = node->expression();
3163
3164
  CodeForStatementPosition(node);
3165
3166
  // Prepare stack for call to resolved function.
3167
  LoadAndSpill(function);
3168
  __ mov(r2, Operand(Factory::undefined_value()));
3169
  frame_->EmitPush(r2);  // Slot for receiver
3170
  int arg_count = args->length();
3171
  for (int i = 0; i < arg_count; i++) {
3172
    LoadAndSpill(args->at(i));
3173
  }
3174
3175
  // Prepare stack for call to ResolvePossiblyDirectEval.
3176
  __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
3177
  frame_->EmitPush(r1);
3178
  if (arg_count > 0) {
3179
    __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3180
    frame_->EmitPush(r1);
3181
  } else {
3182
    frame_->EmitPush(r2);
3183
  }
3184
3185
  // Resolve the call.
3186
  frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 2);
3187
3188
  // Touch up stack with the right values for the function and the receiver.
3189
  __ ldr(r1, FieldMemOperand(r0, FixedArray::kHeaderSize));
3190
  __ str(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3191
  __ ldr(r1, FieldMemOperand(r0, FixedArray::kHeaderSize + kPointerSize));
3192
  __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3193
3194
  // Call the function.
3195
  CodeForSourcePosition(node->position());
3196
3197
  CallFunctionStub call_function(arg_count);
3198
  frame_->CallStub(&call_function, arg_count + 1);
3199
3200
  __ ldr(cp, frame_->Context());
3201
  // Remove the function from the stack.
3202
  frame_->Drop();
3203
  frame_->EmitPush(r0);
3204
  ASSERT(frame_->height() == original_height + 1);
3205
}
3206
3207
3208
void CodeGenerator::VisitCallNew(CallNew* node) {
3209
#ifdef DEBUG
3210
  int original_height = frame_->height();
3211
#endif
3212
  VirtualFrame::SpilledScope spilled_scope(this);
3213
  Comment cmnt(masm_, "[ CallNew");
3214
  CodeForStatementPosition(node);
3215
3216
  // According to ECMA-262, section 11.2.2, page 44, the function
3217
  // expression in new calls must be evaluated before the
3218
  // arguments. This is different from ordinary calls, where the
3219
  // actual function to call is resolved after the arguments have been
3220
  // evaluated.
3221
3222
  // Compute function to call and use the global object as the
3223
  // receiver. There is no need to use the global proxy here because
3224
  // it will always be replaced with a newly allocated object.
3225
  LoadAndSpill(node->expression());
3226
  LoadGlobal();
3227
3228
  // Push the arguments ("left-to-right") on the stack.
3229
  ZoneList<Expression*>* args = node->arguments();
3230
  int arg_count = args->length();
3231
  for (int i = 0; i < arg_count; i++) {
3232
    LoadAndSpill(args->at(i));
3233
  }
3234
3235
  // r0: the number of arguments.
3236
  Result num_args = allocator_->Allocate(r0);
3237
  ASSERT(num_args.is_valid());
3238
  __ mov(num_args.reg(), Operand(arg_count));
3239
3240
  // Load the function into r1 as per calling convention.
3241
  Result function = allocator_->Allocate(r1);
3242
  ASSERT(function.is_valid());
3243
  __ ldr(function.reg(), frame_->ElementAt(arg_count + 1));
3244
3245
  // Call the construct call builtin that handles allocation and
3246
  // constructor invocation.
3247
  CodeForSourcePosition(node->position());
3248
  Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
3249
  Result result = frame_->CallCodeObject(ic,
3250
                                         RelocInfo::CONSTRUCT_CALL,
3251
                                         &num_args,
3252
                                         &function,
3253
                                         arg_count + 1);
3254
3255
  // Discard old TOS value and push r0 on the stack (same as Pop(), push(r0)).
3256
  __ str(r0, frame_->Top());
3257
  ASSERT(frame_->height() == original_height + 1);
3258
}
3259
3260
3261
void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
3262
  VirtualFrame::SpilledScope spilled_scope(this);
3263
  ASSERT(args->length() == 1);
3264
  JumpTarget leave(this);
3265
  LoadAndSpill(args->at(0));
3266
  frame_->EmitPop(r0);  // r0 contains object.
3267
  // if (object->IsSmi()) return the object.
3268
  __ tst(r0, Operand(kSmiTagMask));
3269
  leave.Branch(eq);
3270
  // It is a heap object - get map.
3271
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3272
  __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3273
  // if (!object->IsJSValue()) return the object.
3274
  __ cmp(r1, Operand(JS_VALUE_TYPE));
3275
  leave.Branch(ne);
3276
  // Load the value.
3277
  __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
3278
  leave.Bind();
3279
  frame_->EmitPush(r0);
3280
}
3281
3282
3283
void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
3284
  VirtualFrame::SpilledScope spilled_scope(this);
3285
  ASSERT(args->length() == 2);
3286
  JumpTarget leave(this);
3287
  LoadAndSpill(args->at(0));  // Load the object.
3288
  LoadAndSpill(args->at(1));  // Load the value.
3289
  frame_->EmitPop(r0);  // r0 contains value
3290
  frame_->EmitPop(r1);  // r1 contains object
3291
  // if (object->IsSmi()) return object.
3292
  __ tst(r1, Operand(kSmiTagMask));
3293
  leave.Branch(eq);
3294
  // It is a heap object - get map.
3295
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3296
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3297
  // if (!object->IsJSValue()) return object.
3298
  __ cmp(r2, Operand(JS_VALUE_TYPE));
3299
  leave.Branch(ne);
3300
  // Store the value.
3301
  __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3302
  // Update the write barrier.
3303
  __ mov(r2, Operand(JSValue::kValueOffset - kHeapObjectTag));
3304
  __ RecordWrite(r1, r2, r3);
3305
  // Leave.
3306
  leave.Bind();
3307
  frame_->EmitPush(r0);
3308
}
3309
3310
3311
void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
3312
  VirtualFrame::SpilledScope spilled_scope(this);
3313
  ASSERT(args->length() == 1);
3314
  LoadAndSpill(args->at(0));
3315
  frame_->EmitPop(r0);
3316
  __ tst(r0, Operand(kSmiTagMask));
3317
  cc_reg_ = eq;
3318
}
3319
3320
3321
void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
3322
  VirtualFrame::SpilledScope spilled_scope(this);
3323
  // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
3324
  ASSERT_EQ(args->length(), 3);
3325
#ifdef ENABLE_LOGGING_AND_PROFILING
3326
  if (ShouldGenerateLog(args->at(0))) {
3327
    LoadAndSpill(args->at(1));
3328
    LoadAndSpill(args->at(2));
3329
    __ CallRuntime(Runtime::kLog, 2);
3330
  }
3331
#endif
3332
  __ mov(r0, Operand(Factory::undefined_value()));
3333
  frame_->EmitPush(r0);
3334
}
3335
3336
3337
void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
3338
  VirtualFrame::SpilledScope spilled_scope(this);
3339
  ASSERT(args->length() == 1);
3340
  LoadAndSpill(args->at(0));
3341
  frame_->EmitPop(r0);
3342
  __ tst(r0, Operand(kSmiTagMask | 0x80000000));
3343
  cc_reg_ = eq;
3344
}
3345
3346
3347
// This should generate code that performs a charCodeAt() call or returns
3348
// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
3349
// It is not yet implemented on ARM, so it always goes to the slow case.
3350
void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
3351
  VirtualFrame::SpilledScope spilled_scope(this);
3352
  ASSERT(args->length() == 2);
3353
  __ mov(r0, Operand(Factory::undefined_value()));
3354
  frame_->EmitPush(r0);
3355
}
3356
3357
3358
void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
3359
  VirtualFrame::SpilledScope spilled_scope(this);
3360
  ASSERT(args->length() == 1);
3361
  LoadAndSpill(args->at(0));
3362
  JumpTarget answer(this);
3363
  // We need the CC bits to come out as not_equal in the case where the
3364
  // object is a smi.  This can't be done with the usual test opcode so
3365
  // we use XOR to get the right CC bits.
3366
  frame_->EmitPop(r0);
3367
  __ and_(r1, r0, Operand(kSmiTagMask));
3368
  __ eor(r1, r1, Operand(kSmiTagMask), SetCC);
3369
  answer.Branch(ne);
3370
  // It is a heap object - get the map.
3371
  __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3372
  __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3373
  // Check if the object is a JS array or not.
3374
  __ cmp(r1, Operand(JS_ARRAY_TYPE));
3375
  answer.Bind();
3376
  cc_reg_ = eq;
3377
}
3378
3379
3380
void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
3381
  VirtualFrame::SpilledScope spilled_scope(this);
3382
  ASSERT(args->length() == 0);
3383
3384
  // Seed the result with the formal parameters count, which will be used
3385
  // in case no arguments adaptor frame is found below the current frame.
3386
  __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters())));
3387
3388
  // Call the shared stub to get to the arguments.length.
3389
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH);
3390
  frame_->CallStub(&stub, 0);
3391
  frame_->EmitPush(r0);
3392
}
3393
3394
3395
void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
3396
  VirtualFrame::SpilledScope spilled_scope(this);
3397
  ASSERT(args->length() == 1);
3398
3399
  // Satisfy contract with ArgumentsAccessStub:
3400
  // Load the key into r1 and the formal parameters count into r0.
3401
  LoadAndSpill(args->at(0));
3402
  frame_->EmitPop(r1);
3403
  __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters())));
3404
3405
  // Call the shared stub to get to arguments[key].
3406
  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3407
  frame_->CallStub(&stub, 0);
3408
  frame_->EmitPush(r0);
3409
}
3410
3411
3412
void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
3413
  VirtualFrame::SpilledScope spilled_scope(this);
3414
  ASSERT(args->length() == 2);
3415
3416
  // Load the two objects into registers and perform the comparison.
3417
  LoadAndSpill(args->at(0));
3418
  LoadAndSpill(args->at(1));
3419
  frame_->EmitPop(r0);
3420
  frame_->EmitPop(r1);
3421
  __ cmp(r0, Operand(r1));
3422
  cc_reg_ = eq;
3423
}
3424
3425
3426
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
3427
#ifdef DEBUG
3428
  int original_height = frame_->height();
3429
#endif
3430
  VirtualFrame::SpilledScope spilled_scope(this);
3431
  if (CheckForInlineRuntimeCall(node)) {
3432
    ASSERT((has_cc() && frame_->height() == original_height) ||
3433
           (!has_cc() && frame_->height() == original_height + 1));
3434
    return;
3435
  }
3436
3437
  ZoneList<Expression*>* args = node->arguments();
3438
  Comment cmnt(masm_, "[ CallRuntime");
3439
  Runtime::Function* function = node->function();
3440
3441
  if (function == NULL) {
3442
    // Prepare stack for calling JS runtime function.
3443
    __ mov(r0, Operand(node->name()));
3444
    frame_->EmitPush(r0);
3445
    // Push the builtins object found in the current global object.
3446
    __ ldr(r1, GlobalObject());
3447
    __ ldr(r0, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
3448
    frame_->EmitPush(r0);
3449
  }
3450
3451
  // Push the arguments ("left-to-right").
3452
  int arg_count = args->length();
3453
  for (int i = 0; i < arg_count; i++) {
3454
    LoadAndSpill(args->at(i));
3455
  }
3456
3457
  if (function == NULL) {
3458
    // Call the JS runtime function.
3459
    Handle<Code> stub = ComputeCallInitialize(arg_count);
3460
    frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
3461
    __ ldr(cp, frame_->Context());
3462
    frame_->Drop();
3463
    frame_->EmitPush(r0);
3464
  } else {
3465
    // Call the C runtime function.
3466
    frame_->CallRuntime(function, arg_count);
3467
    frame_->EmitPush(r0);
3468
  }
3469
  ASSERT(frame_->height() == original_height + 1);
3470
}
3471
3472
3473
void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
3474
#ifdef DEBUG
3475
  int original_height = frame_->height();
3476
#endif
3477
  VirtualFrame::SpilledScope spilled_scope(this);
3478
  Comment cmnt(masm_, "[ UnaryOperation");
3479
3480
  Token::Value op = node->op();
3481
3482
  if (op == Token::NOT) {
3483
    LoadConditionAndSpill(node->expression(),
3484
                          NOT_INSIDE_TYPEOF,
3485
                          false_target(),
3486
                          true_target(),
3487
                          true);
3488
    cc_reg_ = NegateCondition(cc_reg_);
3489
3490
  } else if (op == Token::DELETE) {
3491
    Property* property = node->expression()->AsProperty();
3492
    Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
3493
    if (property != NULL) {
3494
      LoadAndSpill(property->obj());
3495
      LoadAndSpill(property->key());
3496
      Result arg_count = allocator_->Allocate(r0);
3497
      ASSERT(arg_count.is_valid());
3498
      __ mov(arg_count.reg(), Operand(1));  // not counting receiver
3499
      frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2);
3500
3501
    } else if (variable != NULL) {
3502
      Slot* slot = variable->slot();
3503
      if (variable->is_global()) {
3504
        LoadGlobal();
3505
        __ mov(r0, Operand(variable->name()));
3506
        frame_->EmitPush(r0);
3507
        Result arg_count = allocator_->Allocate(r0);
3508
        ASSERT(arg_count.is_valid());
3509
        __ mov(arg_count.reg(), Operand(1));  // not counting receiver
3510
        frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2);
3511
3512
      } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
3513
        // lookup the context holding the named variable
3514
        frame_->EmitPush(cp);
3515
        __ mov(r0, Operand(variable->name()));
3516
        frame_->EmitPush(r0);
3517
        frame_->CallRuntime(Runtime::kLookupContext, 2);
3518
        // r0: context
3519
        frame_->EmitPush(r0);
3520
        __ mov(r0, Operand(variable->name()));
3521
        frame_->EmitPush(r0);
3522
        Result arg_count = allocator_->Allocate(r0);
3523
        ASSERT(arg_count.is_valid());
3524
        __ mov(arg_count.reg(), Operand(1));  // not counting receiver
3525
        frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2);
3526
3527
      } else {
3528
        // Default: Result of deleting non-global, not dynamically
3529
        // introduced variables is false.
3530
        __ mov(r0, Operand(Factory::false_value()));
3531
      }
3532
3533
    } else {
3534
      // Default: Result of deleting expressions is true.
3535
      LoadAndSpill(node->expression());  // may have side-effects
3536
      frame_->Drop();
3537
      __ mov(r0, Operand(Factory::true_value()));
3538
    }
3539
    frame_->EmitPush(r0);
3540
3541
  } else if (op == Token::TYPEOF) {
3542
    // Special case for loading the typeof expression; see comment on
3543
    // LoadTypeofExpression().
3544
    LoadTypeofExpression(node->expression());
3545
    frame_->CallRuntime(Runtime::kTypeof, 1);
3546
    frame_->EmitPush(r0);  // r0 has result
3547
3548
  } else {
3549
    LoadAndSpill(node->expression());
3550
    frame_->EmitPop(r0);
3551
    switch (op) {
3552
      case Token::NOT:
3553
      case Token::DELETE:
3554
      case Token::TYPEOF:
3555
        UNREACHABLE();  // handled above
3556
        break;
3557
3558
      case Token::SUB: {
3559
        UnarySubStub stub;
3560
        frame_->CallStub(&stub, 0);
3561
        break;
3562
      }
3563
3564
      case Token::BIT_NOT: {
3565
        // smi check
3566
        JumpTarget smi_label(this);
3567
        JumpTarget continue_label(this);
3568
        __ tst(r0, Operand(kSmiTagMask));
3569
        smi_label.Branch(eq);
3570
3571
        frame_->EmitPush(r0);
3572
        Result arg_count = allocator_->Allocate(r0);
3573
        ASSERT(arg_count.is_valid());
3574
        __ mov(arg_count.reg(), Operand(0));  // not counting receiver
3575
        frame_->InvokeBuiltin(Builtins::BIT_NOT, CALL_JS, &arg_count, 1);
3576
3577
        continue_label.Jump();
3578
        smi_label.Bind();
3579
        __ mvn(r0, Operand(r0));
3580
        __ bic(r0, r0, Operand(kSmiTagMask));  // bit-clear inverted smi-tag
3581
        continue_label.Bind();
3582
        break;
3583
      }
3584
3585
      case Token::VOID:
3586
        // since the stack top is cached in r0, popping and then
3587
        // pushing a value can be done by just writing to r0.
3588
        __ mov(r0, Operand(Factory::undefined_value()));
3589
        break;
3590
3591
      case Token::ADD: {
3592
        // Smi check.
3593
        JumpTarget continue_label(this);
3594
        __ tst(r0, Operand(kSmiTagMask));
3595
        continue_label.Branch(eq);
3596
        frame_->EmitPush(r0);
3597
        Result arg_count = allocator_->Allocate(r0);
3598
        ASSERT(arg_count.is_valid());
3599
        __ mov(arg_count.reg(), Operand(0));  // not counting receiver
3600
        frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, &arg_count, 1);
3601
        continue_label.Bind();
3602
        break;
3603
      }
3604
      default:
3605
        UNREACHABLE();
3606
    }
3607
    frame_->EmitPush(r0);  // r0 has result
3608
  }
3609
  ASSERT((has_cc() && frame_->height() == original_height) ||
3610
         (!has_cc() && frame_->height() == original_height + 1));
3611
}
3612
3613
3614
void CodeGenerator::VisitCountOperation(CountOperation* node) {
3615
#ifdef DEBUG
3616
  int original_height = frame_->height();
3617
#endif
3618
  VirtualFrame::SpilledScope spilled_scope(this);
3619
  Comment cmnt(masm_, "[ CountOperation");
3620
3621
  bool is_postfix = node->is_postfix();
3622
  bool is_increment = node->op() == Token::INC;
3623
3624
  Variable* var = node->expression()->AsVariableProxy()->AsVariable();
3625
  bool is_const = (var != NULL && var->mode() == Variable::CONST);
3626
3627
  // Postfix: Make room for the result.
3628
  if (is_postfix) {
3629
     __ mov(r0, Operand(0));
3630
     frame_->EmitPush(r0);
3631
  }
3632
3633
  { Reference target(this, node->expression());
3634
    if (target.is_illegal()) {
3635
      // Spoof the virtual frame to have the expected height (one higher
3636
      // than on entry).
3637
      if (!is_postfix) {
3638
        __ mov(r0, Operand(Smi::FromInt(0)));
3639
        frame_->EmitPush(r0);
3640
      }
3641
      ASSERT(frame_->height() == original_height + 1);
3642
      return;
3643
    }
3644
    target.GetValueAndSpill(NOT_INSIDE_TYPEOF);
3645
    frame_->EmitPop(r0);
3646
3647
    JumpTarget slow(this);
3648
    JumpTarget exit(this);
3649
3650
    // Load the value (1) into register r1.
3651
    __ mov(r1, Operand(Smi::FromInt(1)));
3652
3653
    // Check for smi operand.
3654
    __ tst(r0, Operand(kSmiTagMask));
3655
    slow.Branch(ne);
3656
3657
    // Postfix: Store the old value as the result.
3658
    if (is_postfix) {
3659
      __ str(r0, frame_->ElementAt(target.size()));
3660
    }
3661
3662
    // Perform optimistic increment/decrement.
3663
    if (is_increment) {
3664
      __ add(r0, r0, Operand(r1), SetCC);
3665
    } else {
3666
      __ sub(r0, r0, Operand(r1), SetCC);
3667
    }
3668
3669
    // If the increment/decrement didn't overflow, we're done.
3670
    exit.Branch(vc);
3671
3672
    // Revert optimistic increment/decrement.
3673
    if (is_increment) {
3674
      __ sub(r0, r0, Operand(r1));
3675
    } else {
3676
      __ add(r0, r0, Operand(r1));
3677
    }
3678
3679
    // Slow case: Convert to number.
3680
    slow.Bind();
3681
    {
3682
      // Convert the operand to a number.
3683
      frame_->EmitPush(r0);
3684
      Result arg_count = allocator_->Allocate(r0);
3685
      ASSERT(arg_count.is_valid());
3686
      __ mov(arg_count.reg(), Operand(0));
3687
      frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, &arg_count, 1);
3688
    }
3689
    if (is_postfix) {
3690
      // Postfix: store to result (on the stack).
3691
      __ str(r0, frame_->ElementAt(target.size()));
3692
    }
3693
3694
    // Compute the new value.
3695
    __ mov(r1, Operand(Smi::FromInt(1)));
3696
    frame_->EmitPush(r0);
3697
    frame_->EmitPush(r1);
3698
    if (is_increment) {
3699
      frame_->CallRuntime(Runtime::kNumberAdd, 2);
3700
    } else {
3701
      frame_->CallRuntime(Runtime::kNumberSub, 2);
3702
    }
3703
3704
    // Store the new value in the target if not const.
3705
    exit.Bind();
3706
    frame_->EmitPush(r0);
3707
    if (!is_const) target.SetValue(NOT_CONST_INIT);
3708
  }
3709
3710
  // Postfix: Discard the new value and use the old.
3711
  if (is_postfix) frame_->EmitPop(r0);
3712
  ASSERT(frame_->height() == original_height + 1);
3713
}
3714
3715
3716
void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
3717
#ifdef DEBUG
3718
  int original_height = frame_->height();
3719
#endif
3720
  VirtualFrame::SpilledScope spilled_scope(this);
3721
  Comment cmnt(masm_, "[ BinaryOperation");
3722
  Token::Value op = node->op();
3723
3724
  // According to ECMA-262 section 11.11, page 58, the binary logical
3725
  // operators must yield the result of one of the two expressions
3726
  // before any ToBoolean() conversions. This means that the value
3727
  // produced by a && or || operator is not necessarily a boolean.
3728
3729
  // NOTE: If the left hand side produces a materialized value (not in
3730
  // the CC register), we force the right hand side to do the
3731
  // same. This is necessary because we may have to branch to the exit
3732
  // after evaluating the left hand side (due to the shortcut
3733
  // semantics), but the compiler must (statically) know if the result
3734
  // of compiling the binary operation is materialized or not.
3735
3736
  if (op == Token::AND) {
3737
    JumpTarget is_true(this);
3738
    LoadConditionAndSpill(node->left(),
3739
                          NOT_INSIDE_TYPEOF,
3740
                          &is_true,
3741
                          false_target(),
3742
                          false);
3743
    if (has_cc()) {
3744
      Branch(false, false_target());
3745
3746
      // Evaluate right side expression.
3747
      is_true.Bind();
3748
      LoadConditionAndSpill(node->right(),
3749
                            NOT_INSIDE_TYPEOF,
3750
                            true_target(),
3751
                            false_target(),
3752
                            false);
3753
3754
    } else {
3755
      JumpTarget pop_and_continue(this);
3756
      JumpTarget exit(this);
3757
3758
      __ ldr(r0, frame_->Top());  // dup the stack top
3759
      frame_->EmitPush(r0);
3760
      // Avoid popping the result if it converts to 'false' using the
3761
      // standard ToBoolean() conversion as described in ECMA-262,
3762
      // section 9.2, page 30.
3763
      ToBoolean(&pop_and_continue, &exit);
3764
      Branch(false, &exit);
3765
3766
      // Pop the result of evaluating the first part.
3767
      pop_and_continue.Bind();
3768
      frame_->EmitPop(r0);
3769
3770
      // Evaluate right side expression.
3771
      is_true.Bind();
3772
      LoadAndSpill(node->right());
3773
3774
      // Exit (always with a materialized value).
3775
      exit.Bind();
3776
    }
3777
3778
  } else if (op == Token::OR) {
3779
    JumpTarget is_false(this);
3780
    LoadConditionAndSpill(node->left(),
3781
                          NOT_INSIDE_TYPEOF,
3782
                          true_target(),
3783
                          &is_false,
3784
                          false);
3785
    if (has_cc()) {
3786
      Branch(true, true_target());
3787
3788
      // Evaluate right side expression.
3789
      is_false.Bind();
3790
      LoadConditionAndSpill(node->right(),
3791
                            NOT_INSIDE_TYPEOF,
3792
                            true_target(),
3793
                            false_target(),
3794
                            false);
3795
3796
    } else {
3797
      JumpTarget pop_and_continue(this);
3798
      JumpTarget exit(this);
3799
3800
      __ ldr(r0, frame_->Top());
3801
      frame_->EmitPush(r0);
3802
      // Avoid popping the result if it converts to 'true' using the
3803
      // standard ToBoolean() conversion as described in ECMA-262,
3804
      // section 9.2, page 30.
3805
      ToBoolean(&exit, &pop_and_continue);
3806
      Branch(true, &exit);
3807
3808
      // Pop the result of evaluating the first part.
3809
      pop_and_continue.Bind();
3810
      frame_->EmitPop(r0);
3811
3812
      // Evaluate right side expression.
3813
      is_false.Bind();
3814
      LoadAndSpill(node->right());
3815
3816
      // Exit (always with a materialized value).
3817
      exit.Bind();
3818
    }
3819
3820
  } else {
3821
    // Optimize for the case where (at least) one of the expressions
3822
    // is a literal small integer.
3823
    Literal* lliteral = node->left()->AsLiteral();
3824
    Literal* rliteral = node->right()->AsLiteral();
3825
3826
    if (rliteral != NULL && rliteral->handle()->IsSmi()) {
3827
      LoadAndSpill(node->left());
3828
      SmiOperation(node->op(), rliteral->handle(), false);
3829
3830
    } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
3831
      LoadAndSpill(node->right());
3832
      SmiOperation(node->op(), lliteral->handle(), true);
3833
3834
    } else {
3835
      LoadAndSpill(node->left());
3836
      LoadAndSpill(node->right());
3837
      GenericBinaryOperation(node->op());
3838
    }
3839
    frame_->EmitPush(r0);
3840
  }
3841
  ASSERT((has_cc() && frame_->height() == original_height) ||
3842
         (!has_cc() && frame_->height() == original_height + 1));
3843
}
3844
3845
3846
void CodeGenerator::VisitThisFunction(ThisFunction* node) {
3847
#ifdef DEBUG
3848
  int original_height = frame_->height();
3849
#endif
3850
  VirtualFrame::SpilledScope spilled_scope(this);
3851
  __ ldr(r0, frame_->Function());
3852
  frame_->EmitPush(r0);
3853
  ASSERT(frame_->height() == original_height + 1);
3854
}
3855
3856
3857
void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
3858
#ifdef DEBUG
3859
  int original_height = frame_->height();
3860
#endif
3861
  VirtualFrame::SpilledScope spilled_scope(this);
3862
  Comment cmnt(masm_, "[ CompareOperation");
3863
3864
  // Get the expressions from the node.
3865
  Expression* left = node->left();
3866
  Expression* right = node->right();
3867
  Token::Value op = node->op();
3868
3869
  // To make null checks efficient, we check if either left or right is the
3870
  // literal 'null'. If so, we optimize the code by inlining a null check
3871
  // instead of calling the (very) general runtime routine for checking
3872
  // equality.
3873
  if (op == Token::EQ || op == Token::EQ_STRICT) {
3874
    bool left_is_null =
3875
        left->AsLiteral() != NULL && left->AsLiteral()->IsNull();
3876
    bool right_is_null =
3877
        right->AsLiteral() != NULL && right->AsLiteral()->IsNull();
3878
    // The 'null' value can only be equal to 'null' or 'undefined'.
3879
    if (left_is_null || right_is_null) {
3880
      LoadAndSpill(left_is_null ? right : left);
3881
      frame_->EmitPop(r0);
3882
      __ cmp(r0, Operand(Factory::null_value()));
3883
3884
      // The 'null' value is only equal to 'undefined' if using non-strict
3885
      // comparisons.
3886
      if (op != Token::EQ_STRICT) {
3887
        true_target()->Branch(eq);
3888
3889
        __ cmp(r0, Operand(Factory::undefined_value()));
3890
        true_target()->Branch(eq);
3891
3892
        __ tst(r0, Operand(kSmiTagMask));
3893
        false_target()->Branch(eq);
3894
3895
        // It can be an undetectable object.
3896
        __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3897
        __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset));
3898
        __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
3899
        __ cmp(r0, Operand(1 << Map::kIsUndetectable));
3900
      }
3901
3902
      cc_reg_ = eq;
3903
      ASSERT(has_cc() && frame_->height() == original_height);
3904
      return;
3905
    }
3906
  }
3907
3908
  // To make typeof testing for natives implemented in JavaScript really
3909
  // efficient, we generate special code for expressions of the form:
3910
  // 'typeof <expression> == <string>'.
3911
  UnaryOperation* operation = left->AsUnaryOperation();
3912
  if ((op == Token::EQ || op == Token::EQ_STRICT) &&
3913
      (operation != NULL && operation->op() == Token::TYPEOF) &&
3914
      (right->AsLiteral() != NULL &&
3915
       right->AsLiteral()->handle()->IsString())) {
3916
    Handle<String> check(String::cast(*right->AsLiteral()->handle()));
3917
3918
    // Load the operand, move it to register r1.
3919
    LoadTypeofExpression(operation->expression());
3920
    frame_->EmitPop(r1);
3921
3922
    if (check->Equals(Heap::number_symbol())) {
3923
      __ tst(r1, Operand(kSmiTagMask));
3924
      true_target()->Branch(eq);
3925
      __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
3926
      __ cmp(r1, Operand(Factory::heap_number_map()));
3927
      cc_reg_ = eq;
3928
3929
    } else if (check->Equals(Heap::string_symbol())) {
3930
      __ tst(r1, Operand(kSmiTagMask));
3931
      false_target()->Branch(eq);
3932
3933
      __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
3934
3935
      // It can be an undetectable string object.
3936
      __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset));
3937
      __ and_(r2, r2, Operand(1 << Map::kIsUndetectable));
3938
      __ cmp(r2, Operand(1 << Map::kIsUndetectable));
3939
      false_target()->Branch(eq);
3940
3941
      __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3942
      __ cmp(r2, Operand(FIRST_NONSTRING_TYPE));
3943
      cc_reg_ = lt;
3944
3945
    } else if (check->Equals(Heap::boolean_symbol())) {
3946
      __ cmp(r1, Operand(Factory::true_value()));
3947
      true_target()->Branch(eq);
3948
      __ cmp(r1, Operand(Factory::false_value()));
3949
      cc_reg_ = eq;
3950
3951
    } else if (check->Equals(Heap::undefined_symbol())) {
3952
      __ cmp(r1, Operand(Factory::undefined_value()));
3953
      true_target()->Branch(eq);
3954
3955
      __ tst(r1, Operand(kSmiTagMask));
3956
      false_target()->Branch(eq);
3957
3958
      // It can be an undetectable object.
3959
      __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
3960
      __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset));
3961
      __ and_(r2, r2, Operand(1 << Map::kIsUndetectable));
3962
      __ cmp(r2, Operand(1 << Map::kIsUndetectable));
3963
3964
      cc_reg_ = eq;
3965
3966
    } else if (check->Equals(Heap::function_symbol())) {
3967
      __ tst(r1, Operand(kSmiTagMask));
3968
      false_target()->Branch(eq);
3969
      __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
3970
      __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
3971
      __ cmp(r1, Operand(JS_FUNCTION_TYPE));
3972
      cc_reg_ = eq;
3973
3974
    } else if (check->Equals(Heap::object_symbol())) {
3975
      __ tst(r1, Operand(kSmiTagMask));
3976
      false_target()->Branch(eq);
3977
3978
      __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3979
      __ cmp(r1, Operand(Factory::null_value()));
3980
      true_target()->Branch(eq);
3981
3982
      // It can be an undetectable object.
3983
      __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
3984
      __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
3985
      __ cmp(r1, Operand(1 << Map::kIsUndetectable));
3986
      false_target()->Branch(eq);
3987
3988
      __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
3989
      __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
3990
      false_target()->Branch(lt);
3991
      __ cmp(r2, Operand(LAST_JS_OBJECT_TYPE));
3992
      cc_reg_ = le;
3993
3994
    } else {
3995
      // Uncommon case: typeof testing against a string literal that is
3996
      // never returned from the typeof operator.
3997
      false_target()->Jump();
3998
    }
3999
    ASSERT(!has_valid_frame() ||
4000
           (has_cc() && frame_->height() == original_height));
4001
    return;
4002
  }
4003
4004
  LoadAndSpill(left);
4005
  LoadAndSpill(right);
4006
  switch (op) {
4007
    case Token::EQ:
4008
      Comparison(eq, false);
4009
      break;
4010
4011
    case Token::LT:
4012
      Comparison(lt);
4013
      break;
4014
4015
    case Token::GT:
4016
      Comparison(gt);
4017
      break;
4018
4019
    case Token::LTE:
4020
      Comparison(le);
4021
      break;
4022
4023
    case Token::GTE:
4024
      Comparison(ge);
4025
      break;
4026
4027
    case Token::EQ_STRICT:
4028
      Comparison(eq, true);
4029
      break;
4030
4031
    case Token::IN: {
4032
      Result arg_count = allocator_->Allocate(r0);
4033
      ASSERT(arg_count.is_valid());
4034
      __ mov(arg_count.reg(), Operand(1));  // not counting receiver
4035
      Result result = frame_->InvokeBuiltin(Builtins::IN,
4036
                                            CALL_JS,
4037
                                            &arg_count,
4038
                                            2);
4039
      frame_->EmitPush(result.reg());
4040
      break;
4041
    }
4042
4043
    case Token::INSTANCEOF: {
4044
      Result arg_count = allocator_->Allocate(r0);
4045
      ASSERT(arg_count.is_valid());
4046
      __ mov(arg_count.reg(), Operand(1));  // not counting receiver
4047
      Result result = frame_->InvokeBuiltin(Builtins::INSTANCE_OF,
4048
                                            CALL_JS,
4049
                                            &arg_count,
4050
                                            2);
4051
      __ tst(result.reg(), Operand(result.reg()));
4052
      cc_reg_ = eq;
4053
      break;
4054
    }
4055
4056
    default:
4057
      UNREACHABLE();
4058
  }
4059
  ASSERT((has_cc() && frame_->height() == original_height) ||
4060
         (!has_cc() && frame_->height() == original_height + 1));
4061
}
4062
4063
4064
#ifdef DEBUG
4065
bool CodeGenerator::HasValidEntryRegisters() { return true; }
4066
#endif
4067
4068
4069
#undef __
4070
#define __ masm->
4071
4072
Handle<String> Reference::GetName() {
4073
  ASSERT(type_ == NAMED);
4074
  Property* property = expression_->AsProperty();
4075
  if (property == NULL) {
4076
    // Global variable reference treated as a named property reference.
4077
    VariableProxy* proxy = expression_->AsVariableProxy();
4078
    ASSERT(proxy->AsVariable() != NULL);
4079
    ASSERT(proxy->AsVariable()->is_global());
4080
    return proxy->name();
4081
  } else {
4082
    Literal* raw_name = property->key()->AsLiteral();
4083
    ASSERT(raw_name != NULL);
4084
    return Handle<String>(String::cast(*raw_name->handle()));
4085
  }
4086
}
4087
4088
4089
void Reference::GetValueAndSpill(TypeofState typeof_state) {
4090
  ASSERT(cgen_->in_spilled_code());
4091
  cgen_->set_in_spilled_code(false);
4092
  GetValue(typeof_state);
4093
  cgen_->frame()->SpillAll();
4094
  cgen_->set_in_spilled_code(true);
4095
}
4096
4097
4098
void Reference::GetValue(TypeofState typeof_state) {
4099
  ASSERT(!cgen_->in_spilled_code());
4100
  ASSERT(cgen_->HasValidEntryRegisters());
4101
  ASSERT(!is_illegal());
4102
  ASSERT(!cgen_->has_cc());
4103
  MacroAssembler* masm = cgen_->masm();
4104
  Property* property = expression_->AsProperty();
4105
  if (property != NULL) {
4106
    cgen_->CodeForSourcePosition(property->position());
4107
  }
4108
4109
  switch (type_) {
4110
    case SLOT: {
4111
      Comment cmnt(masm, "[ Load from Slot");
4112
      Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
4113
      ASSERT(slot != NULL);
4114
      cgen_->LoadFromSlot(slot, typeof_state);
4115
      break;
4116
    }
4117
4118
    case NAMED: {
4119
      // TODO(1241834): Make sure that this it is safe to ignore the
4120
      // distinction between expressions in a typeof and not in a typeof. If
4121
      // there is a chance that reference errors can be thrown below, we
4122
      // must distinguish between the two kinds of loads (typeof expression
4123
      // loads must not throw a reference error).
4124
      VirtualFrame* frame = cgen_->frame();
4125
      Comment cmnt(masm, "[ Load from named Property");
4126
      Handle<String> name(GetName());
4127
      Variable* var = expression_->AsVariableProxy()->AsVariable();
4128
      Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
4129
      // Setup the name register.
4130
      Result name_reg = cgen_->allocator()->Allocate(r2);
4131
      ASSERT(name_reg.is_valid());
4132
      __ mov(name_reg.reg(), Operand(name));
4133
      ASSERT(var == NULL || var->is_global());
4134
      RelocInfo::Mode rmode = (var == NULL)
4135
                            ? RelocInfo::CODE_TARGET
4136
                            : RelocInfo::CODE_TARGET_CONTEXT;
4137
      Result answer = frame->CallCodeObject(ic, rmode, &name_reg, 0);
4138
      frame->EmitPush(answer.reg());
4139
      break;
4140
    }
4141
4142
    case KEYED: {
4143
      // TODO(1241834): Make sure that this it is safe to ignore the
4144
      // distinction between expressions in a typeof and not in a typeof.
4145
4146
      // TODO(181): Implement inlined version of array indexing once
4147
      // loop nesting is properly tracked on ARM.
4148
      VirtualFrame* frame = cgen_->frame();
4149
      Comment cmnt(masm, "[ Load from keyed Property");
4150
      ASSERT(property != NULL);
4151
      Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
4152
      Variable* var = expression_->AsVariableProxy()->AsVariable();
4153
      ASSERT(var == NULL || var->is_global());
4154
      RelocInfo::Mode rmode = (var == NULL)
4155
                            ? RelocInfo::CODE_TARGET
4156
                            : RelocInfo::CODE_TARGET_CONTEXT;
4157
      Result answer = frame->CallCodeObject(ic, rmode, 0);
4158
      frame->EmitPush(answer.reg());
4159
      break;
4160
    }
4161
4162
    default:
4163
      UNREACHABLE();
4164
  }
4165
}
4166
4167
4168
void Reference::SetValue(InitState init_state) {
4169
  ASSERT(!is_illegal());
4170
  ASSERT(!cgen_->has_cc());
4171
  MacroAssembler* masm = cgen_->masm();
4172
  VirtualFrame* frame = cgen_->frame();
4173
  Property* property = expression_->AsProperty();
4174
  if (property != NULL) {
4175
    cgen_->CodeForSourcePosition(property->position());
4176
  }
4177
4178
  switch (type_) {
4179
    case SLOT: {
4180
      Comment cmnt(masm, "[ Store to Slot");
4181
      Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
4182
      ASSERT(slot != NULL);
4183
      if (slot->type() == Slot::LOOKUP) {
4184
        ASSERT(slot->var()->is_dynamic());
4185
4186
        // For now, just do a runtime call.
4187
        frame->EmitPush(cp);
4188
        __ mov(r0, Operand(slot->var()->name()));
4189
        frame->EmitPush(r0);
4190
4191
        if (init_state == CONST_INIT) {
4192
          // Same as the case for a normal store, but ignores attribute
4193
          // (e.g. READ_ONLY) of context slot so that we can initialize
4194
          // const properties (introduced via eval("const foo = (some
4195
          // expr);")). Also, uses the current function context instead of
4196
          // the top context.
4197
          //
4198
          // Note that we must declare the foo upon entry of eval(), via a
4199
          // context slot declaration, but we cannot initialize it at the
4200
          // same time, because the const declaration may be at the end of
4201
          // the eval code (sigh...) and the const variable may have been
4202
          // used before (where its value is 'undefined'). Thus, we can only
4203
          // do the initialization when we actually encounter the expression
4204
          // and when the expression operands are defined and valid, and
4205
          // thus we need the split into 2 operations: declaration of the
4206
          // context slot followed by initialization.
4207
          frame->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4208
        } else {
4209
          frame->CallRuntime(Runtime::kStoreContextSlot, 3);
4210
        }
4211
        // Storing a variable must keep the (new) value on the expression
4212
        // stack. This is necessary for compiling assignment expressions.
4213
        frame->EmitPush(r0);
4214
4215
      } else {
4216
        ASSERT(!slot->var()->is_dynamic());
4217
4218
        JumpTarget exit(cgen_);
4219
        if (init_state == CONST_INIT) {
4220
          ASSERT(slot->var()->mode() == Variable::CONST);
4221
          // Only the first const initialization must be executed (the slot
4222
          // still contains 'the hole' value). When the assignment is
4223
          // executed, the code is identical to a normal store (see below).
4224
          Comment cmnt(masm, "[ Init const");
4225
          __ ldr(r2, cgen_->SlotOperand(slot, r2));
4226
          __ cmp(r2, Operand(Factory::the_hole_value()));
4227
          exit.Branch(ne);
4228
        }
4229
4230
        // We must execute the store.  Storing a variable must keep the
4231
        // (new) value on the stack. This is necessary for compiling
4232
        // assignment expressions.
4233
        //
4234
        // Note: We will reach here even with slot->var()->mode() ==
4235
        // Variable::CONST because of const declarations which will
4236
        // initialize consts to 'the hole' value and by doing so, end up
4237
        // calling this code.  r2 may be loaded with context; used below in
4238
        // RecordWrite.
4239
        frame->EmitPop(r0);
4240
        __ str(r0, cgen_->SlotOperand(slot, r2));
4241
        frame->EmitPush(r0);
4242
        if (slot->type() == Slot::CONTEXT) {
4243
          // Skip write barrier if the written value is a smi.
4244
          __ tst(r0, Operand(kSmiTagMask));
4245
          exit.Branch(eq);
4246
          // r2 is loaded with context when calling SlotOperand above.
4247
          int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
4248
          __ mov(r3, Operand(offset));
4249
          __ RecordWrite(r2, r3, r1);
4250
        }
4251
        // If we definitely did not jump over the assignment, we do not need
4252
        // to bind the exit label.  Doing so can defeat peephole
4253
        // optimization.
4254
        if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
4255
          exit.Bind();
4256
        }
4257
      }
4258
      break;
4259
    }
4260
4261
    case NAMED: {
4262
      Comment cmnt(masm, "[ Store to named Property");
4263
      // Call the appropriate IC code.
4264
      Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
4265
      Handle<String> name(GetName());
4266
4267
      Result value = cgen_->allocator()->Allocate(r0);
4268
      ASSERT(value.is_valid());
4269
      frame->EmitPop(value.reg());
4270
4271
      // Setup the name register.
4272
      Result property_name = cgen_->allocator()->Allocate(r2);
4273
      ASSERT(property_name.is_valid());
4274
      __ mov(property_name.reg(), Operand(name));
4275
      Result answer = frame->CallCodeObject(ic,
4276
                                            RelocInfo::CODE_TARGET,
4277
                                            &value,
4278
                                            &property_name,
4279
                                            0);
4280
      frame->EmitPush(answer.reg());
4281
      break;
4282
    }
4283
4284
    case KEYED: {
4285
      Comment cmnt(masm, "[ Store to keyed Property");
4286
      Property* property = expression_->AsProperty();
4287
      ASSERT(property != NULL);
4288
      cgen_->CodeForSourcePosition(property->position());
4289
4290
      // Call IC code.
4291
      Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
4292
      // TODO(1222589): Make the IC grab the values from the stack.
4293
      Result value = cgen_->allocator()->Allocate(r0);
4294
      ASSERT(value.is_valid());
4295
      frame->EmitPop(value.reg());  // value
4296
      Result result =
4297
          frame->CallCodeObject(ic, RelocInfo::CODE_TARGET, &value, 0);
4298
      frame->EmitPush(result.reg());
4299
      break;
4300
    }
4301
4302
    default:
4303
      UNREACHABLE();
4304
  }
4305
}
4306
4307
4308
void GetPropertyStub::Generate(MacroAssembler* masm) {
4309
  // sp[0]: key
4310
  // sp[1]: receiver
4311
  Label slow, fast;
4312
  // Get the key and receiver object from the stack.
4313
  __ ldm(ia, sp, r0.bit() | r1.bit());
4314
  // Check that the key is a smi.
4315
  __ tst(r0, Operand(kSmiTagMask));
4316
  __ b(ne, &slow);
4317
  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
4318
  // Check that the object isn't a smi.
4319
  __ tst(r1, Operand(kSmiTagMask));
4320
  __ b(eq, &slow);
4321
4322
  // Check that the object is some kind of JS object EXCEPT JS Value type.
4323
  // In the case that the object is a value-wrapper object,
4324
  // we enter the runtime system to make sure that indexing into string
4325
  // objects work as intended.
4326
  ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
4327
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
4328
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
4329
  __ cmp(r2, Operand(JS_OBJECT_TYPE));
4330
  __ b(lt, &slow);
4331
4332
  // Get the elements array of the object.
4333
  __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
4334
  // Check that the object is in fast mode (not dictionary).
4335
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
4336
  __ cmp(r3, Operand(Factory::hash_table_map()));
4337
  __ b(eq, &slow);
4338
  // Check that the key (index) is within bounds.
4339
  __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
4340
  __ cmp(r0, Operand(r3));
4341
  __ b(lo, &fast);
4342
4343
  // Slow case: Push extra copies of the arguments (2).
4344
  __ bind(&slow);
4345
  __ ldm(ia, sp, r0.bit() | r1.bit());
4346
  __ stm(db_w, sp, r0.bit() | r1.bit());
4347
  // Do tail-call to runtime routine.
4348
  __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
4349
4350
  // Fast case: Do the load.
4351
  __ bind(&fast);
4352
  __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
4353
  __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
4354
  __ cmp(r0, Operand(Factory::the_hole_value()));
4355
  // In case the loaded value is the_hole we have to consult GetProperty
4356
  // to ensure the prototype chain is searched.
4357
  __ b(eq, &slow);
4358
4359
  __ StubReturn(1);
4360
}
4361
4362
4363
void SetPropertyStub::Generate(MacroAssembler* masm) {
4364
  // r0 : value
4365
  // sp[0] : key
4366
  // sp[1] : receiver
4367
4368
  Label slow, fast, array, extra, exit;
4369
  // Get the key and the object from the stack.
4370
  __ ldm(ia, sp, r1.bit() | r3.bit());  // r1 = key, r3 = receiver
4371
  // Check that the key is a smi.
4372
  __ tst(r1, Operand(kSmiTagMask));
4373
  __ b(ne, &slow);
4374
  // Check that the object isn't a smi.
4375
  __ tst(r3, Operand(kSmiTagMask));
4376
  __ b(eq, &slow);
4377
  // Get the type of the object from its map.
4378
  __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
4379
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
4380
  // Check if the object is a JS array or not.
4381
  __ cmp(r2, Operand(JS_ARRAY_TYPE));
4382
  __ b(eq, &array);
4383
  // Check that the object is some kind of JS object.
4384
  __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
4385
  __ b(lt, &slow);
4386
4387
4388
  // Object case: Check key against length in the elements array.
4389
  __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
4390
  // Check that the object is in fast mode (not dictionary).
4391
  __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
4392
  __ cmp(r2, Operand(Factory::hash_table_map()));
4393
  __ b(eq, &slow);
4394
  // Untag the key (for checking against untagged length in the fixed array).
4395
  __ mov(r1, Operand(r1, ASR, kSmiTagSize));
4396
  // Compute address to store into and check array bounds.
4397
  __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
4398
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
4399
  __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
4400
  __ cmp(r1, Operand(ip));
4401
  __ b(lo, &fast);
4402
4403
4404
  // Slow case: Push extra copies of the arguments (3).
4405
  __ bind(&slow);
4406
  __ ldm(ia, sp, r1.bit() | r3.bit());  // r0 == value, r1 == key, r3 == object
4407
  __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
4408
  // Do tail-call to runtime routine.
4409
  __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
4410
4411
4412
  // Extra capacity case: Check if there is extra capacity to
4413
  // perform the store and update the length. Used for adding one
4414
  // element to the array by writing to array[array.length].
4415
  // r0 == value, r1 == key, r2 == elements, r3 == object
4416
  __ bind(&extra);
4417
  __ b(ne, &slow);  // do not leave holes in the array
4418
  __ mov(r1, Operand(r1, ASR, kSmiTagSize));  // untag
4419
  __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
4420
  __ cmp(r1, Operand(ip));
4421
  __ b(hs, &slow);
4422
  __ mov(r1, Operand(r1, LSL, kSmiTagSize));  // restore tag
4423
  __ add(r1, r1, Operand(1 << kSmiTagSize));  // and increment
4424
  __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
4425
  __ mov(r3, Operand(r2));
4426
  // NOTE: Computing the address to store into must take the fact
4427
  // that the key has been incremented into account.
4428
  int displacement = Array::kHeaderSize - kHeapObjectTag -
4429
      ((1 << kSmiTagSize) * 2);
4430
  __ add(r2, r2, Operand(displacement));
4431
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
4432
  __ b(&fast);
4433
4434
4435
  // Array case: Get the length and the elements array from the JS
4436
  // array. Check that the array is in fast mode; if it is the
4437
  // length is always a smi.
4438
  // r0 == value, r3 == object
4439
  __ bind(&array);
4440
  __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
4441
  __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
4442
  __ cmp(r1, Operand(Factory::hash_table_map()));
4443
  __ b(eq, &slow);
4444
4445
  // Check the key against the length in the array, compute the
4446
  // address to store into and fall through to fast case.
4447
  __ ldr(r1, MemOperand(sp));
4448
  // r0 == value, r1 == key, r2 == elements, r3 == object.
4449
  __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
4450
  __ cmp(r1, Operand(ip));
4451
  __ b(hs, &extra);
4452
  __ mov(r3, Operand(r2));
4453
  __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
4454
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
4455
4456
4457
  // Fast case: Do the store.
4458
  // r0 == value, r2 == address to store into, r3 == elements
4459
  __ bind(&fast);
4460
  __ str(r0, MemOperand(r2));
4461
  // Skip write barrier if the written value is a smi.
4462
  __ tst(r0, Operand(kSmiTagMask));
4463
  __ b(eq, &exit);
4464
  // Update write barrier for the elements array address.
4465
  __ sub(r1, r2, Operand(r3));
4466
  __ RecordWrite(r3, r1, r2);
4467
  __ bind(&exit);
4468
  __ StubReturn(1);
4469
}
4470
4471
4472
void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
4473
  // r1 : x
4474
  // r0 : y
4475
  // result : r0
4476
4477
  switch (op_) {
4478
    case Token::ADD: {
4479
      Label slow, exit;
4480
      // fast path
4481
      __ orr(r2, r1, Operand(r0));  // r2 = x | y;
4482
      __ add(r0, r1, Operand(r0), SetCC);  // add y optimistically
4483
      // go slow-path in case of overflow
4484
      __ b(vs, &slow);
4485
      // go slow-path in case of non-smi operands
4486
      ASSERT(kSmiTag == 0);  // adjust code below
4487
      __ tst(r2, Operand(kSmiTagMask));
4488
      __ b(eq, &exit);
4489
      // slow path
4490
      __ bind(&slow);
4491
      __ sub(r0, r0, Operand(r1));  // revert optimistic add
4492
      __ push(r1);
4493
      __ push(r0);
4494
      __ mov(r0, Operand(1));  // set number of arguments
4495
      __ InvokeBuiltin(Builtins::ADD, JUMP_JS);
4496
      // done
4497
      __ bind(&exit);
4498
      break;
4499
    }
4500
4501
    case Token::SUB: {
4502
      Label slow, exit;
4503
      // fast path
4504
      __ orr(r2, r1, Operand(r0));  // r2 = x | y;
4505
      __ sub(r3, r1, Operand(r0), SetCC);  // subtract y optimistically
4506
      // go slow-path in case of overflow
4507
      __ b(vs, &slow);
4508
      // go slow-path in case of non-smi operands
4509
      ASSERT(kSmiTag == 0);  // adjust code below
4510
      __ tst(r2, Operand(kSmiTagMask));
4511
      __ mov(r0, Operand(r3), LeaveCC, eq);  // conditionally set r0 to result
4512
      __ b(eq, &exit);
4513
      // slow path
4514
      __ bind(&slow);
4515
      __ push(r1);
4516
      __ push(r0);
4517
      __ mov(r0, Operand(1));  // set number of arguments
4518
      __ InvokeBuiltin(Builtins::SUB, JUMP_JS);
4519
      // done
4520
      __ bind(&exit);
4521
      break;
4522
    }
4523
4524
    case Token::MUL: {
4525
      Label slow, exit;
4526
      // tag check
4527
      __ orr(r2, r1, Operand(r0));  // r2 = x | y;
4528
      ASSERT(kSmiTag == 0);  // adjust code below
4529
      __ tst(r2, Operand(kSmiTagMask));
4530
      __ b(ne, &slow);
4531
      // remove tag from one operand (but keep sign), so that result is smi
4532
      __ mov(ip, Operand(r0, ASR, kSmiTagSize));
4533
      // do multiplication
4534
      __ smull(r3, r2, r1, ip);  // r3 = lower 32 bits of ip*r1
4535
      // go slow on overflows (overflow bit is not set)
4536
      __ mov(ip, Operand(r3, ASR, 31));
4537
      __ cmp(ip, Operand(r2));  // no overflow if higher 33 bits are identical
4538
      __ b(ne, &slow);
4539
      // go slow on zero result to handle -0
4540
      __ tst(r3, Operand(r3));
4541
      __ mov(r0, Operand(r3), LeaveCC, ne);
4542
      __ b(ne, &exit);
4543
      // slow case
4544
      __ bind(&slow);
4545
      __ push(r1);
4546
      __ push(r0);
4547
      __ mov(r0, Operand(1));  // set number of arguments
4548
      __ InvokeBuiltin(Builtins::MUL, JUMP_JS);
4549
      // done
4550
      __ bind(&exit);
4551
      break;
4552
    }
4553
4554
    case Token::BIT_OR:
4555
    case Token::BIT_AND:
4556
    case Token::BIT_XOR: {
4557
      Label slow, exit;
4558
      // tag check
4559
      __ orr(r2, r1, Operand(r0));  // r2 = x | y;
4560
      ASSERT(kSmiTag == 0);  // adjust code below
4561
      __ tst(r2, Operand(kSmiTagMask));
4562
      __ b(ne, &slow);
4563
      switch (op_) {
4564
        case Token::BIT_OR:  __ orr(r0, r0, Operand(r1)); break;
4565
        case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
4566
        case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
4567
        default: UNREACHABLE();
4568
      }
4569
      __ b(&exit);
4570
      __ bind(&slow);
4571
      __ push(r1);  // restore stack
4572
      __ push(r0);
4573
      __ mov(r0, Operand(1));  // 1 argument (not counting receiver).
4574
      switch (op_) {
4575
        case Token::BIT_OR:
4576
          __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
4577
          break;
4578
        case Token::BIT_AND:
4579
          __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
4580
          break;
4581
        case Token::BIT_XOR:
4582
          __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
4583
          break;
4584
        default:
4585
          UNREACHABLE();
4586
      }
4587
      __ bind(&exit);
4588
      break;
4589
    }
4590
4591
    case Token::SHL:
4592
    case Token::SHR:
4593
    case Token::SAR: {
4594
      Label slow, exit;
4595
      // tag check
4596
      __ orr(r2, r1, Operand(r0));  // r2 = x | y;
4597
      ASSERT(kSmiTag == 0);  // adjust code below
4598
      __ tst(r2, Operand(kSmiTagMask));
4599
      __ b(ne, &slow);
4600
      // remove tags from operands (but keep sign)
4601
      __ mov(r3, Operand(r1, ASR, kSmiTagSize));  // x
4602
      __ mov(r2, Operand(r0, ASR, kSmiTagSize));  // y
4603
      // use only the 5 least significant bits of the shift count
4604
      __ and_(r2, r2, Operand(0x1f));
4605
      // perform operation
4606
      switch (op_) {
4607
        case Token::SAR:
4608
          __ mov(r3, Operand(r3, ASR, r2));
4609
          // no checks of result necessary
4610
          break;
4611
4612
        case Token::SHR:
4613
          __ mov(r3, Operand(r3, LSR, r2));
4614
          // check that the *unsigned* result fits in a smi
4615
          // neither of the two high-order bits can be set:
4616
          // - 0x80000000: high bit would be lost when smi tagging
4617
          // - 0x40000000: this number would convert to negative when
4618
          // smi tagging these two cases can only happen with shifts
4619
          // by 0 or 1 when handed a valid smi
4620
          __ and_(r2, r3, Operand(0xc0000000), SetCC);
4621
          __ b(ne, &slow);
4622
          break;
4623
4624
        case Token::SHL:
4625
          __ mov(r3, Operand(r3, LSL, r2));
4626
          // check that the *signed* result fits in a smi
4627
          __ add(r2, r3, Operand(0x40000000), SetCC);
4628
          __ b(mi, &slow);
4629
          break;
4630
4631
        default: UNREACHABLE();
4632
      }
4633
      // tag result and store it in r0
4634
      ASSERT(kSmiTag == 0);  // adjust code below
4635
      __ mov(r0, Operand(r3, LSL, kSmiTagSize));
4636
      __ b(&exit);
4637
      // slow case
4638
      __ bind(&slow);
4639
      __ push(r1);  // restore stack
4640
      __ push(r0);
4641
      __ mov(r0, Operand(1));  // 1 argument (not counting receiver).
4642
      switch (op_) {
4643
        case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
4644
        case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
4645
        case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
4646
        default: UNREACHABLE();
4647
      }
4648
      __ bind(&exit);
4649
      break;
4650
    }
4651
4652
    default: UNREACHABLE();
4653
  }
4654
  __ Ret();
4655
}
4656
4657
4658
void StackCheckStub::Generate(MacroAssembler* masm) {
4659
  Label within_limit;
4660
  __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
4661
  __ ldr(ip, MemOperand(ip));
4662
  __ cmp(sp, Operand(ip));
4663
  __ b(hs, &within_limit);
4664
  // Do tail-call to runtime routine.
4665
  __ push(r0);
4666
  __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
4667
  __ bind(&within_limit);
4668
4669
  __ StubReturn(1);
4670
}
4671
4672
4673
void UnarySubStub::Generate(MacroAssembler* masm) {
4674
  Label undo;
4675
  Label slow;
4676
  Label done;
4677
4678
  // Enter runtime system if the value is not a smi.
4679
  __ tst(r0, Operand(kSmiTagMask));
4680
  __ b(ne, &slow);
4681
4682
  // Enter runtime system if the value of the expression is zero
4683
  // to make sure that we switch between 0 and -0.
4684
  __ cmp(r0, Operand(0));
4685
  __ b(eq, &slow);
4686
4687
  // The value of the expression is a smi that is not zero.  Try
4688
  // optimistic subtraction '0 - value'.
4689
  __ rsb(r1, r0, Operand(0), SetCC);
4690
  __ b(vs, &slow);
4691
4692
  // If result is a smi we are done.
4693
  __ tst(r1, Operand(kSmiTagMask));
4694
  __ mov(r0, Operand(r1), LeaveCC, eq);  // conditionally set r0 to result
4695
  __ b(eq, &done);
4696
4697
  // Enter runtime system.
4698
  __ bind(&slow);
4699
  __ push(r0);
4700
  __ mov(r0, Operand(0));  // set number of arguments
4701
  __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
4702
4703
  __ bind(&done);
4704
  __ StubReturn(1);
4705
}
4706
4707
4708
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
4709
  // r0 holds exception
4710
  ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize);  // adjust this code
4711
  __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
4712
  __ ldr(sp, MemOperand(r3));
4713
  __ pop(r2);  // pop next in chain
4714
  __ str(r2, MemOperand(r3));
4715
  // restore parameter- and frame-pointer and pop state.
4716
  __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
4717
  // Before returning we restore the context from the frame pointer if not NULL.
4718
  // The frame pointer is NULL in the exception handler of a JS entry frame.
4719
  __ cmp(fp, Operand(0));
4720
  // Set cp to NULL if fp is NULL.
4721
  __ mov(cp, Operand(0), LeaveCC, eq);
4722
  // Restore cp otherwise.
4723
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
4724
  if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
4725
  __ pop(pc);
4726
}
4727
4728
4729
void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
4730
  // Fetch top stack handler.
4731
  __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
4732
  __ ldr(r3, MemOperand(r3));
4733
4734
  // Unwind the handlers until the ENTRY handler is found.
4735
  Label loop, done;
4736
  __ bind(&loop);
4737
  // Load the type of the current stack handler.
4738
  const int kStateOffset = StackHandlerConstants::kAddressDisplacement +
4739
      StackHandlerConstants::kStateOffset;
4740
  __ ldr(r2, MemOperand(r3, kStateOffset));
4741
  __ cmp(r2, Operand(StackHandler::ENTRY));
4742
  __ b(eq, &done);
4743
  // Fetch the next handler in the list.
4744
  const int kNextOffset =  StackHandlerConstants::kAddressDisplacement +
4745
      StackHandlerConstants::kNextOffset;
4746
  __ ldr(r3, MemOperand(r3, kNextOffset));
4747
  __ jmp(&loop);
4748
  __ bind(&done);
4749
4750
  // Set the top handler address to next handler past the current ENTRY handler.
4751
  __ ldr(r0, MemOperand(r3, kNextOffset));
4752
  __ mov(r2, Operand(ExternalReference(Top::k_handler_address)));
4753
  __ str(r0, MemOperand(r2));
4754
4755
  // Set external caught exception to false.
4756
  __ mov(r0, Operand(false));
4757
  ExternalReference external_caught(Top::k_external_caught_exception_address);
4758
  __ mov(r2, Operand(external_caught));
4759
  __ str(r0, MemOperand(r2));
4760
4761
  // Set pending exception and r0 to out of memory exception.
4762
  Failure* out_of_memory = Failure::OutOfMemoryException();
4763
  __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
4764
  __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
4765
  __ str(r0, MemOperand(r2));
4766
4767
  // Restore the stack to the address of the ENTRY handler
4768
  __ mov(sp, Operand(r3));
4769
4770
  // Stack layout at this point. See also PushTryHandler
4771
  // r3, sp ->   next handler
4772
  //             state (ENTRY)
4773
  //             pp
4774
  //             fp
4775
  //             lr
4776
4777
  // Discard ENTRY state (r2 is not used), and restore parameter-
4778
  // and frame-pointer and pop state.
4779
  __ ldm(ia_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit());
4780
  // Before returning we restore the context from the frame pointer if not NULL.
4781
  // The frame pointer is NULL in the exception handler of a JS entry frame.
4782
  __ cmp(fp, Operand(0));
4783
  // Set cp to NULL if fp is NULL.
4784
  __ mov(cp, Operand(0), LeaveCC, eq);
4785
  // Restore cp otherwise.
4786
  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
4787
  if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
4788
  __ pop(pc);
4789
}
4790
4791
4792
void CEntryStub::GenerateCore(MacroAssembler* masm,
4793
                              Label* throw_normal_exception,
4794
                              Label* throw_out_of_memory_exception,
4795
                              StackFrame::Type frame_type,
4796
                              bool do_gc,
4797
                              bool always_allocate) {
4798
  // r0: result parameter for PerformGC, if any
4799
  // r4: number of arguments including receiver  (C callee-saved)
4800
  // r5: pointer to builtin function  (C callee-saved)
4801
  // r6: pointer to the first argument (C callee-saved)
4802
4803
  if (do_gc) {
4804
    // Passing r0.
4805
    __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4806
  }
4807
4808
  ExternalReference scope_depth =
4809
      ExternalReference::heap_always_allocate_scope_depth();
4810
  if (always_allocate) {
4811
    __ mov(r0, Operand(scope_depth));
4812
    __ ldr(r1, MemOperand(r0));
4813
    __ add(r1, r1, Operand(1));
4814
    __ str(r1, MemOperand(r0));
4815
  }
4816
4817
  // Call C built-in.
4818
  // r0 = argc, r1 = argv
4819
  __ mov(r0, Operand(r4));
4820
  __ mov(r1, Operand(r6));
4821
4822
  // TODO(1242173): To let the GC traverse the return address of the exit
4823
  // frames, we need to know where the return address is. Right now,
4824
  // we push it on the stack to be able to find it again, but we never
4825
  // restore from it in case of changes, which makes it impossible to
4826
  // support moving the C entry code stub. This should be fixed, but currently
4827
  // this is OK because the CEntryStub gets generated so early in the V8 boot
4828
  // sequence that it is not moving ever.
4829
  __ add(lr, pc, Operand(4));  // compute return address: (pc + 8) + 4
4830
  __ push(lr);
4831
#if !defined(__arm__)
4832
  // Notify the simulator of the transition to C code.
4833
  __ swi(assembler::arm::call_rt_r5);
4834
#else /* !defined(__arm__) */
4835
  __ Jump(r5);
4836
#endif /* !defined(__arm__) */
4837
4838
  if (always_allocate) {
4839
    // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
4840
    // though (contain the result).
4841
    __ mov(r2, Operand(scope_depth));
4842
    __ ldr(r3, MemOperand(r2));
4843
    __ sub(r3, r3, Operand(1));
4844
    __ str(r3, MemOperand(r2));
4845
  }
4846
4847
  // check for failure result
4848
  Label failure_returned;
4849
  ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4850
  // Lower 2 bits of r2 are 0 iff r0 has failure tag.
4851
  __ add(r2, r0, Operand(1));
4852
  __ tst(r2, Operand(kFailureTagMask));
4853
  __ b(eq, &failure_returned);
4854
4855
  // Exit C frame and return.
4856
  // r0:r1: result
4857
  // sp: stack pointer
4858
  // fp: frame pointer
4859
  // pp: caller's parameter pointer pp  (restored as C callee-saved)
4860
  __ LeaveExitFrame(frame_type);
4861
4862
  // check if we should retry or throw exception
4863
  Label retry;
4864
  __ bind(&failure_returned);
4865
  ASSERT(Failure::RETRY_AFTER_GC == 0);
4866
  __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4867
  __ b(eq, &retry);
4868
4869
  Label continue_exception;
4870
  // If the returned failure is EXCEPTION then promote Top::pending_exception().
4871
  __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
4872
  __ b(ne, &continue_exception);
4873
4874
  // Retrieve the pending exception and clear the variable.
4875
  __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
4876
  __ ldr(r3, MemOperand(ip));
4877
  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
4878
  __ ldr(r0, MemOperand(ip));
4879
  __ str(r3, MemOperand(ip));
4880
4881
  __ bind(&continue_exception);
4882
  // Special handling of out of memory exception.
4883
  Failure* out_of_memory = Failure::OutOfMemoryException();
4884
  __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
4885
  __ b(eq, throw_out_of_memory_exception);
4886
4887
  // Handle normal exception.
4888
  __ jmp(throw_normal_exception);
4889
4890
  __ bind(&retry);  // pass last failure (r0) as parameter (r0) when retrying
4891
}
4892
4893
4894
void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
4895
  // Called from JavaScript; parameters are on stack as if calling JS function
4896
  // r0: number of arguments including receiver
4897
  // r1: pointer to builtin function
4898
  // fp: frame pointer  (restored after C call)
4899
  // sp: stack pointer  (restored as callee's pp after C call)
4900
  // cp: current context  (C callee-saved)
4901
  // pp: caller's parameter pointer pp  (C callee-saved)
4902
4903
  // NOTE: Invocations of builtins may return failure objects
4904
  // instead of a proper result. The builtin entry handles
4905
  // this by performing a garbage collection and retrying the
4906
  // builtin once.
4907
4908
  StackFrame::Type frame_type = is_debug_break
4909
      ? StackFrame::EXIT_DEBUG
4910
      : StackFrame::EXIT;
4911
4912
  // Enter the exit frame that transitions from JavaScript to C++.
4913
  __ EnterExitFrame(frame_type);
4914
4915
  // r4: number of arguments (C callee-saved)
4916
  // r5: pointer to builtin function (C callee-saved)
4917
  // r6: pointer to first argument (C callee-saved)
4918
4919
  Label throw_out_of_memory_exception;
4920
  Label throw_normal_exception;
4921
4922
  // Call into the runtime system. Collect garbage before the call if
4923
  // running with --gc-greedy set.
4924
  if (FLAG_gc_greedy) {
4925
    Failure* failure = Failure::RetryAfterGC(0);
4926
    __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure)));
4927
  }
4928
  GenerateCore(masm, &throw_normal_exception,
4929
               &throw_out_of_memory_exception,
4930
               frame_type,
4931
               FLAG_gc_greedy,
4932
               false);
4933
4934
  // Do space-specific GC and retry runtime call.
4935
  GenerateCore(masm,
4936
               &throw_normal_exception,
4937
               &throw_out_of_memory_exception,
4938
               frame_type,
4939
               true,
4940
               false);
4941
4942
  // Do full GC and retry runtime call one final time.
4943
  Failure* failure = Failure::InternalError();
4944
  __ mov(r0, Operand(reinterpret_cast<int32_t>(failure)));
4945
  GenerateCore(masm,
4946
               &throw_normal_exception,
4947
               &throw_out_of_memory_exception,
4948
               frame_type,
4949
               true,
4950
               true);
4951
4952
  __ bind(&throw_out_of_memory_exception);
4953
  GenerateThrowOutOfMemory(masm);
4954
  // control flow for generated will not return.
4955
4956
  __ bind(&throw_normal_exception);
4957
  GenerateThrowTOS(masm);
4958
}
4959
4960
4961
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4962
  // r0: code entry
4963
  // r1: function
4964
  // r2: receiver
4965
  // r3: argc
4966
  // [sp+0]: argv
4967
4968
  Label invoke, exit;
4969
4970
  // Called from C, so do not pop argc and args on exit (preserve sp)
4971
  // No need to save register-passed args
4972
  // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
4973
  __ stm(db_w, sp, kCalleeSaved | lr.bit());
4974
4975
  // Get address of argv, see stm above.
4976
  // r0: code entry
4977
  // r1: function
4978
  // r2: receiver
4979
  // r3: argc
4980
  __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
4981
  __ ldr(r4, MemOperand(r4));  // argv
4982
4983
  // Push a frame with special values setup to mark it as an entry frame.
4984
  // r0: code entry
4985
  // r1: function
4986
  // r2: receiver
4987
  // r3: argc
4988
  // r4: argv
4989
  int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4990
  __ mov(r8, Operand(-1));  // Push a bad frame pointer to fail if it is used.
4991
  __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
4992
  __ mov(r6, Operand(Smi::FromInt(marker)));
4993
  __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
4994
  __ ldr(r5, MemOperand(r5));
4995
  __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
4996
4997
  // Setup frame pointer for the frame to be pushed.
4998
  __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
4999
5000
  // Call a faked try-block that does the invoke.
5001
  __ bl(&invoke);
5002
5003
  // Caught exception: Store result (exception) in the pending
5004
  // exception field in the JSEnv and return a failure sentinel.
5005
  // Coming in here the fp will be invalid because the PushTryHandler below
5006
  // sets it to 0 to signal the existence of the JSEntry frame.
5007
  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
5008
  __ str(r0, MemOperand(ip));
5009
  __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
5010
  __ b(&exit);
5011
5012
  // Invoke: Link this frame into the handler chain.
5013
  __ bind(&invoke);
5014
  // Must preserve r0-r4, r5-r7 are available.
5015
  __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
5016
  // If an exception not caught by another handler occurs, this handler returns
5017
  // control to the code after the bl(&invoke) above, which restores all
5018
  // kCalleeSaved registers (including cp, pp and fp) to their saved values
5019
  // before returning a failure to C.
5020
5021
  // Clear any pending exceptions.
5022
  __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
5023
  __ ldr(r5, MemOperand(ip));
5024
  __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
5025
  __ str(r5, MemOperand(ip));
5026
5027
  // Invoke the function by calling through JS entry trampoline builtin.
5028
  // Notice that we cannot store a reference to the trampoline code directly in
5029
  // this stub, because runtime stubs are not traversed when doing GC.
5030
5031
  // Expected registers by Builtins::JSEntryTrampoline
5032
  // r0: code entry
5033
  // r1: function
5034
  // r2: receiver
5035
  // r3: argc
5036
  // r4: argv
5037
  if (is_construct) {
5038
    ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
5039
    __ mov(ip, Operand(construct_entry));
5040
  } else {
5041
    ExternalReference entry(Builtins::JSEntryTrampoline);
5042
    __ mov(ip, Operand(entry));
5043
  }
5044
  __ ldr(ip, MemOperand(ip));  // deref address
5045
5046
  // Branch and link to JSEntryTrampoline
5047
  __ mov(lr, Operand(pc));
5048
  __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
5049
5050
  // Unlink this frame from the handler chain. When reading the
5051
  // address of the next handler, there is no need to use the address
5052
  // displacement since the current stack pointer (sp) points directly
5053
  // to the stack handler.
5054
  __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
5055
  __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
5056
  __ str(r3, MemOperand(ip));
5057
  // No need to restore registers
5058
  __ add(sp, sp, Operand(StackHandlerConstants::kSize));
5059
5060
  __ bind(&exit);  // r0 holds result
5061
  // Restore the top frame descriptors from the stack.
5062
  __ pop(r3);
5063
  __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
5064
  __ str(r3, MemOperand(ip));
5065
5066
  // Reset the stack to the callee saved registers.
5067
  __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
5068
5069
  // Restore callee-saved registers and return.
5070
#ifdef DEBUG
5071
  if (FLAG_debug_code) __ mov(lr, Operand(pc));
5072
#endif
5073
  __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
5074
}
5075
5076
5077
void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
5078
  // Check if the calling frame is an arguments adaptor frame.
5079
  Label adaptor;
5080
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5081
  __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
5082
  __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
5083
  __ b(eq, &adaptor);
5084
5085
  // Nothing to do: The formal number of parameters has already been
5086
  // passed in register r0 by calling function. Just return it.
5087
  __ mov(pc, lr);
5088
5089
  // Arguments adaptor case: Read the arguments length from the
5090
  // adaptor frame and return it.
5091
  __ bind(&adaptor);
5092
  __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
5093
  __ mov(pc, lr);
5094
}
5095
5096
5097
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
5098
  // The displacement is the offset of the last parameter (if any)
5099
  // relative to the frame pointer.
5100
  static const int kDisplacement =
5101
      StandardFrameConstants::kCallerSPOffset - kPointerSize;
5102
5103
  // Check that the key is a smi.
5104
  Label slow;
5105
  __ tst(r1, Operand(kSmiTagMask));
5106
  __ b(ne, &slow);
5107
5108
  // Check if the calling frame is an arguments adaptor frame.
5109
  Label adaptor;
5110
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5111
  __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
5112
  __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
5113
  __ b(eq, &adaptor);
5114
5115
  // Check index against formal parameters count limit passed in
5116
  // through register eax. Use unsigned comparison to get negative
5117
  // check for free.
5118
  __ cmp(r1, r0);
5119
  __ b(cs, &slow);
5120
5121
  // Read the argument from the stack and return it.
5122
  __ sub(r3, r0, r1);
5123
  __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
5124
  __ ldr(r0, MemOperand(r3, kDisplacement));
5125
  __ mov(pc, lr);
5126
5127
  // Arguments adaptor case: Check index against actual arguments
5128
  // limit found in the arguments adaptor frame. Use unsigned
5129
  // comparison to get negative check for free.
5130
  __ bind(&adaptor);
5131
  __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
5132
  __ cmp(r1, r0);
5133
  __ b(cs, &slow);
5134
5135
  // Read the argument from the adaptor frame and return it.
5136
  __ sub(r3, r0, r1);
5137
  __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
5138
  __ ldr(r0, MemOperand(r3, kDisplacement));
5139
  __ mov(pc, lr);
5140
5141
  // Slow-case: Handle non-smi or out-of-bounds access to arguments
5142
  // by calling the runtime system.
5143
  __ bind(&slow);
5144
  __ push(r1);
5145
  __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
5146
}
5147
5148
5149
void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
5150
  // Check if the calling frame is an arguments adaptor frame.
5151
  Label runtime;
5152
  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5153
  __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
5154
  __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
5155
  __ b(ne, &runtime);
5156
5157
  // Patch the arguments.length and the parameters pointer.
5158
  __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
5159
  __ str(r0, MemOperand(sp, 0 * kPointerSize));
5160
  __ add(r3, r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
5161
  __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
5162
  __ str(r3, MemOperand(sp, 1 * kPointerSize));
5163
5164
  // Do the runtime call to allocate the arguments object.
5165
  __ bind(&runtime);
5166
  __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
5167
}
5168
5169
5170
void CallFunctionStub::Generate(MacroAssembler* masm) {
5171
  Label slow;
5172
  // Get the function to call from the stack.
5173
  // function, receiver [, arguments]
5174
  __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
5175
5176
  // Check that the function is really a JavaScript function.
5177
  // r1: pushed function (to be verified)
5178
  __ tst(r1, Operand(kSmiTagMask));
5179
  __ b(eq, &slow);
5180
  // Get the map of the function object.
5181
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
5182
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
5183
  __ cmp(r2, Operand(JS_FUNCTION_TYPE));
5184
  __ b(ne, &slow);
5185
5186
  // Fast-case: Invoke the function now.
5187
  // r1: pushed function
5188
  ParameterCount actual(argc_);
5189
  __ InvokeFunction(r1, actual, JUMP_FUNCTION);
5190
5191
  // Slow-case: Non-function called.
5192
  __ bind(&slow);
5193
  __ mov(r0, Operand(argc_));  // Setup the number of arguments.
5194
  __ mov(r2, Operand(0));
5195
  __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
5196
  __ Jump(Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)),
5197
          RelocInfo::CODE_TARGET);
5198
}
5199
5200
5201
#undef __
5202
5203
} }  // namespace v8::internal