The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / arm / builtins-arm.cc @ f230a1cf

History | View | Annotate | Download (51.3 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_ARM
31

    
32
#include "codegen.h"
33
#include "debug.h"
34
#include "deoptimizer.h"
35
#include "full-codegen.h"
36
#include "runtime.h"
37

    
38
namespace v8 {
39
namespace internal {
40

    
41

    
42
#define __ ACCESS_MASM(masm)
43

    
44

    
45
void Builtins::Generate_Adaptor(MacroAssembler* masm,
46
                                CFunctionId id,
47
                                BuiltinExtraArguments extra_args) {
48
  // ----------- S t a t e -------------
49
  //  -- r0                 : number of arguments excluding receiver
50
  //  -- r1                 : called function (only guaranteed when
51
  //                          extra_args requires it)
52
  //  -- cp                 : context
53
  //  -- sp[0]              : last argument
54
  //  -- ...
55
  //  -- sp[4 * (argc - 1)] : first argument (argc == r0)
56
  //  -- sp[4 * argc]       : receiver
57
  // -----------------------------------
58

    
59
  // Insert extra arguments.
60
  int num_extra_args = 0;
61
  if (extra_args == NEEDS_CALLED_FUNCTION) {
62
    num_extra_args = 1;
63
    __ push(r1);
64
  } else {
65
    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
66
  }
67

    
68
  // JumpToExternalReference expects r0 to contain the number of arguments
69
  // including the receiver and the extra arguments.
70
  __ add(r0, r0, Operand(num_extra_args + 1));
71
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
72
}
73

    
74

    
75
// Load the built-in InternalArray function from the current context.
76
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
77
                                              Register result) {
78
  // Load the native context.
79

    
80
  __ ldr(result,
81
         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
82
  __ ldr(result,
83
         FieldMemOperand(result, GlobalObject::kNativeContextOffset));
84
  // Load the InternalArray function from the native context.
85
  __ ldr(result,
86
         MemOperand(result,
87
                    Context::SlotOffset(
88
                        Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
89
}
90

    
91

    
92
// Load the built-in Array function from the current context.
93
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
94
  // Load the native context.
95

    
96
  __ ldr(result,
97
         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
98
  __ ldr(result,
99
         FieldMemOperand(result, GlobalObject::kNativeContextOffset));
100
  // Load the Array function from the native context.
101
  __ ldr(result,
102
         MemOperand(result,
103
                    Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
104
}
105

    
106

    
107
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
108
  // ----------- S t a t e -------------
109
  //  -- r0     : number of arguments
110
  //  -- lr     : return address
111
  //  -- sp[...]: constructor arguments
112
  // -----------------------------------
113
  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
114

    
115
  // Get the InternalArray function.
116
  GenerateLoadInternalArrayFunction(masm, r1);
117

    
118
  if (FLAG_debug_code) {
119
    // Initial map for the builtin InternalArray functions should be maps.
120
    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
121
    __ SmiTst(r2);
122
    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
123
    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
124
    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
125
  }
126

    
127
  // Run the native code for the InternalArray function called as a normal
128
  // function.
129
  // tail call a stub
130
  InternalArrayConstructorStub stub(masm->isolate());
131
  __ TailCallStub(&stub);
132
}
133

    
134

    
135
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
136
  // ----------- S t a t e -------------
137
  //  -- r0     : number of arguments
138
  //  -- lr     : return address
139
  //  -- sp[...]: constructor arguments
140
  // -----------------------------------
141
  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
142

    
143
  // Get the Array function.
144
  GenerateLoadArrayFunction(masm, r1);
145

    
146
  if (FLAG_debug_code) {
147
    // Initial map for the builtin Array functions should be maps.
148
    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
149
    __ SmiTst(r2);
150
    __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
151
    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
152
    __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
153
  }
154

    
155
  // Run the native code for the Array function called as a normal function.
156
  // tail call a stub
157
  Handle<Object> undefined_sentinel(
158
      masm->isolate()->heap()->undefined_value(),
159
      masm->isolate());
160
  __ mov(r2, Operand(undefined_sentinel));
161
  ArrayConstructorStub stub(masm->isolate());
162
  __ TailCallStub(&stub);
163
}
164

    
165

    
166
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
167
  // ----------- S t a t e -------------
168
  //  -- r0                     : number of arguments
169
  //  -- r1                     : constructor function
170
  //  -- lr                     : return address
171
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
172
  //  -- sp[argc * 4]           : receiver
173
  // -----------------------------------
174
  Counters* counters = masm->isolate()->counters();
175
  __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
176

    
177
  Register function = r1;
178
  if (FLAG_debug_code) {
179
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
180
    __ cmp(function, Operand(r2));
181
    __ Assert(eq, kUnexpectedStringFunction);
182
  }
183

    
184
  // Load the first arguments in r0 and get rid of the rest.
185
  Label no_arguments;
186
  __ cmp(r0, Operand::Zero());
187
  __ b(eq, &no_arguments);
188
  // First args = sp[(argc - 1) * 4].
189
  __ sub(r0, r0, Operand(1));
190
  __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
191
  // sp now point to args[0], drop args[0] + receiver.
192
  __ Drop(2);
193

    
194
  Register argument = r2;
195
  Label not_cached, argument_is_string;
196
  __ LookupNumberStringCache(r0,        // Input.
197
                             argument,  // Result.
198
                             r3,        // Scratch.
199
                             r4,        // Scratch.
200
                             r5,        // Scratch.
201
                             &not_cached);
202
  __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
203
  __ bind(&argument_is_string);
204

    
205
  // ----------- S t a t e -------------
206
  //  -- r2     : argument converted to string
207
  //  -- r1     : constructor function
208
  //  -- lr     : return address
209
  // -----------------------------------
210

    
211
  Label gc_required;
212
  __ Allocate(JSValue::kSize,
213
              r0,  // Result.
214
              r3,  // Scratch.
215
              r4,  // Scratch.
216
              &gc_required,
217
              TAG_OBJECT);
218

    
219
  // Initialising the String Object.
220
  Register map = r3;
221
  __ LoadGlobalFunctionInitialMap(function, map, r4);
222
  if (FLAG_debug_code) {
223
    __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
224
    __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
225
    __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
226
    __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
227
    __ cmp(r4, Operand::Zero());
228
    __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
229
  }
230
  __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
231

    
232
  __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
233
  __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
234
  __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
235

    
236
  __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
237

    
238
  // Ensure the object is fully initialized.
239
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
240

    
241
  __ Ret();
242

    
243
  // The argument was not found in the number to string cache. Check
244
  // if it's a string already before calling the conversion builtin.
245
  Label convert_argument;
246
  __ bind(&not_cached);
247
  __ JumpIfSmi(r0, &convert_argument);
248

    
249
  // Is it a String?
250
  __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
251
  __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
252
  STATIC_ASSERT(kNotStringTag != 0);
253
  __ tst(r3, Operand(kIsNotStringMask));
254
  __ b(ne, &convert_argument);
255
  __ mov(argument, r0);
256
  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
257
  __ b(&argument_is_string);
258

    
259
  // Invoke the conversion builtin and put the result into r2.
260
  __ bind(&convert_argument);
261
  __ push(function);  // Preserve the function.
262
  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
263
  {
264
    FrameScope scope(masm, StackFrame::INTERNAL);
265
    __ push(r0);
266
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
267
  }
268
  __ pop(function);
269
  __ mov(argument, r0);
270
  __ b(&argument_is_string);
271

    
272
  // Load the empty string into r2, remove the receiver from the
273
  // stack, and jump back to the case where the argument is a string.
274
  __ bind(&no_arguments);
275
  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
276
  __ Drop(1);
277
  __ b(&argument_is_string);
278

    
279
  // At this point the argument is already a string. Call runtime to
280
  // create a string wrapper.
281
  __ bind(&gc_required);
282
  __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
283
  {
284
    FrameScope scope(masm, StackFrame::INTERNAL);
285
    __ push(argument);
286
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
287
  }
288
  __ Ret();
289
}
290

    
291

    
292
static void CallRuntimePassFunction(MacroAssembler* masm,
293
                                    Runtime::FunctionId function_id) {
294
  FrameScope scope(masm, StackFrame::INTERNAL);
295
  // Push a copy of the function onto the stack.
296
  __ push(r1);
297
  // Push call kind information.
298
  __ push(r5);
299
  // Function is also the parameter to the runtime call.
300
  __ push(r1);
301

    
302
  __ CallRuntime(function_id, 1);
303
  // Restore call kind information.
304
  __ pop(r5);
305
  // Restore receiver.
306
  __ pop(r1);
307
}
308

    
309

    
310
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
311
  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
312
  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
313
  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
314
  __ Jump(r2);
315
}
316

    
317

    
318
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
319
  // Checking whether the queued function is ready for install is optional,
320
  // since we come across interrupts and stack checks elsewhere.  However,
321
  // not checking may delay installing ready functions, and always checking
322
  // would be quite expensive.  A good compromise is to first check against
323
  // stack limit as a cue for an interrupt signal.
324
  Label ok;
325
  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
326
  __ cmp(sp, Operand(ip));
327
  __ b(hs, &ok);
328

    
329
  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
330
  // Tail call to returned code.
331
  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
332
  __ Jump(r0);
333

    
334
  __ bind(&ok);
335
  GenerateTailCallToSharedCode(masm);
336
}
337

    
338

    
339
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
340
  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
341
  GenerateTailCallToSharedCode(masm);
342
}
343

    
344

    
345
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
346
                                           bool is_api_function,
347
                                           bool count_constructions) {
348
  // ----------- S t a t e -------------
349
  //  -- r0     : number of arguments
350
  //  -- r1     : constructor function
351
  //  -- lr     : return address
352
  //  -- sp[...]: constructor arguments
353
  // -----------------------------------
354

    
355
  // Should never count constructions for api objects.
356
  ASSERT(!is_api_function || !count_constructions);
357

    
358
  Isolate* isolate = masm->isolate();
359

    
360
  // Enter a construct frame.
361
  {
362
    FrameScope scope(masm, StackFrame::CONSTRUCT);
363

    
364
    // Preserve the two incoming parameters on the stack.
365
    __ SmiTag(r0);
366
    __ push(r0);  // Smi-tagged arguments count.
367
    __ push(r1);  // Constructor function.
368

    
369
    // Try to allocate the object without transitioning into C code. If any of
370
    // the preconditions is not met, the code bails out to the runtime call.
371
    Label rt_call, allocated;
372
    if (FLAG_inline_new) {
373
      Label undo_allocation;
374
#ifdef ENABLE_DEBUGGER_SUPPORT
375
      ExternalReference debug_step_in_fp =
376
          ExternalReference::debug_step_in_fp_address(isolate);
377
      __ mov(r2, Operand(debug_step_in_fp));
378
      __ ldr(r2, MemOperand(r2));
379
      __ tst(r2, r2);
380
      __ b(ne, &rt_call);
381
#endif
382

    
383
      // Load the initial map and verify that it is in fact a map.
384
      // r1: constructor function
385
      __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
386
      __ JumpIfSmi(r2, &rt_call);
387
      __ CompareObjectType(r2, r3, r4, MAP_TYPE);
388
      __ b(ne, &rt_call);
389

    
390
      // Check that the constructor is not constructing a JSFunction (see
391
      // comments in Runtime_NewObject in runtime.cc). In which case the
392
      // initial map's instance type would be JS_FUNCTION_TYPE.
393
      // r1: constructor function
394
      // r2: initial map
395
      __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
396
      __ b(eq, &rt_call);
397

    
398
      if (count_constructions) {
399
        Label allocate;
400
        // Decrease generous allocation count.
401
        __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
402
        MemOperand constructor_count =
403
            FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
404
        __ ldrb(r4, constructor_count);
405
        __ sub(r4, r4, Operand(1), SetCC);
406
        __ strb(r4, constructor_count);
407
        __ b(ne, &allocate);
408

    
409
        __ Push(r1, r2);
410

    
411
        __ push(r1);  // constructor
412
        // The call will replace the stub, so the countdown is only done once.
413
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
414

    
415
        __ pop(r2);
416
        __ pop(r1);
417

    
418
        __ bind(&allocate);
419
      }
420

    
421
      // Now allocate the JSObject on the heap.
422
      // r1: constructor function
423
      // r2: initial map
424
      __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
425
      __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
426

    
427
      // Allocated the JSObject, now initialize the fields. Map is set to
428
      // initial map and properties and elements are set to empty fixed array.
429
      // r1: constructor function
430
      // r2: initial map
431
      // r3: object size
432
      // r4: JSObject (not tagged)
433
      __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
434
      __ mov(r5, r4);
435
      ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
436
      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
437
      ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
438
      __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
439
      ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
440
      __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
441

    
442
      // Fill all the in-object properties with the appropriate filler.
443
      // r1: constructor function
444
      // r2: initial map
445
      // r3: object size (in words)
446
      // r4: JSObject (not tagged)
447
      // r5: First in-object property of JSObject (not tagged)
448
      ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
449
      __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
450
      if (count_constructions) {
451
        __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
452
        __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
453
                kBitsPerByte);
454
        __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
455
        // r0: offset of first field after pre-allocated fields
456
        if (FLAG_debug_code) {
457
          __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
458
          __ cmp(r0, ip);
459
          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
460
        }
461
        __ InitializeFieldsWithFiller(r5, r0, r6);
462
        // To allow for truncation.
463
        __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
464
      }
465
      __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
466
      __ InitializeFieldsWithFiller(r5, r0, r6);
467

    
468
      // Add the object tag to make the JSObject real, so that we can continue
469
      // and jump into the continuation code at any time from now on. Any
470
      // failures need to undo the allocation, so that the heap is in a
471
      // consistent state and verifiable.
472
      __ add(r4, r4, Operand(kHeapObjectTag));
473

    
474
      // Check if a non-empty properties array is needed. Continue with
475
      // allocated object if not fall through to runtime call if it is.
476
      // r1: constructor function
477
      // r4: JSObject
478
      // r5: start of next object (not tagged)
479
      __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
480
      // The field instance sizes contains both pre-allocated property fields
481
      // and in-object properties.
482
      __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
483
      __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
484
              kBitsPerByte);
485
      __ add(r3, r3, Operand(r6));
486
      __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
487
              kBitsPerByte);
488
      __ sub(r3, r3, Operand(r6), SetCC);
489

    
490
      // Done if no extra properties are to be allocated.
491
      __ b(eq, &allocated);
492
      __ Assert(pl, kPropertyAllocationCountFailed);
493

    
494
      // Scale the number of elements by pointer size and add the header for
495
      // FixedArrays to the start of the next object calculation from above.
496
      // r1: constructor
497
      // r3: number of elements in properties array
498
      // r4: JSObject
499
      // r5: start of next object
500
      __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
501
      __ Allocate(
502
          r0,
503
          r5,
504
          r6,
505
          r2,
506
          &undo_allocation,
507
          static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
508

    
509
      // Initialize the FixedArray.
510
      // r1: constructor
511
      // r3: number of elements in properties array
512
      // r4: JSObject
513
      // r5: FixedArray (not tagged)
514
      __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
515
      __ mov(r2, r5);
516
      ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
517
      __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
518
      ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
519
      __ SmiTag(r0, r3);
520
      __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
521

    
522
      // Initialize the fields to undefined.
523
      // r1: constructor function
524
      // r2: First element of FixedArray (not tagged)
525
      // r3: number of elements in properties array
526
      // r4: JSObject
527
      // r5: FixedArray (not tagged)
528
      __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
529
      ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
530
      { Label loop, entry;
531
        __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
532
        __ b(&entry);
533
        __ bind(&loop);
534
        __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
535
        __ bind(&entry);
536
        __ cmp(r2, r6);
537
        __ b(lt, &loop);
538
      }
539

    
540
      // Store the initialized FixedArray into the properties field of
541
      // the JSObject
542
      // r1: constructor function
543
      // r4: JSObject
544
      // r5: FixedArray (not tagged)
545
      __ add(r5, r5, Operand(kHeapObjectTag));  // Add the heap tag.
546
      __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
547

    
548
      // Continue with JSObject being successfully allocated
549
      // r1: constructor function
550
      // r4: JSObject
551
      __ jmp(&allocated);
552

    
553
      // Undo the setting of the new top so that the heap is verifiable. For
554
      // example, the map's unused properties potentially do not match the
555
      // allocated objects unused properties.
556
      // r4: JSObject (previous new top)
557
      __ bind(&undo_allocation);
558
      __ UndoAllocationInNewSpace(r4, r5);
559
    }
560

    
561
    // Allocate the new receiver object using the runtime call.
562
    // r1: constructor function
563
    __ bind(&rt_call);
564
    __ push(r1);  // argument for Runtime_NewObject
565
    __ CallRuntime(Runtime::kNewObject, 1);
566
    __ mov(r4, r0);
567

    
568
    // Receiver for constructor call allocated.
569
    // r4: JSObject
570
    __ bind(&allocated);
571
    __ push(r4);
572
    __ push(r4);
573

    
574
    // Reload the number of arguments and the constructor from the stack.
575
    // sp[0]: receiver
576
    // sp[1]: receiver
577
    // sp[2]: constructor function
578
    // sp[3]: number of arguments (smi-tagged)
579
    __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
580
    __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
581

    
582
    // Set up pointer to last argument.
583
    __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
584

    
585
    // Set up number of arguments for function call below
586
    __ SmiUntag(r0, r3);
587

    
588
    // Copy arguments and receiver to the expression stack.
589
    // r0: number of arguments
590
    // r1: constructor function
591
    // r2: address of last argument (caller sp)
592
    // r3: number of arguments (smi-tagged)
593
    // sp[0]: receiver
594
    // sp[1]: receiver
595
    // sp[2]: constructor function
596
    // sp[3]: number of arguments (smi-tagged)
597
    Label loop, entry;
598
    __ b(&entry);
599
    __ bind(&loop);
600
    __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
601
    __ push(ip);
602
    __ bind(&entry);
603
    __ sub(r3, r3, Operand(2), SetCC);
604
    __ b(ge, &loop);
605

    
606
    // Call the function.
607
    // r0: number of arguments
608
    // r1: constructor function
609
    if (is_api_function) {
610
      __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
611
      Handle<Code> code =
612
          masm->isolate()->builtins()->HandleApiCallConstruct();
613
      ParameterCount expected(0);
614
      __ InvokeCode(code, expected, expected,
615
                    RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
616
    } else {
617
      ParameterCount actual(r0);
618
      __ InvokeFunction(r1, actual, CALL_FUNCTION,
619
                        NullCallWrapper(), CALL_AS_METHOD);
620
    }
621

    
622
    // Store offset of return address for deoptimizer.
623
    if (!is_api_function && !count_constructions) {
624
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
625
    }
626

    
627
    // Restore context from the frame.
628
    // r0: result
629
    // sp[0]: receiver
630
    // sp[1]: constructor function
631
    // sp[2]: number of arguments (smi-tagged)
632
    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
633

    
634
    // If the result is an object (in the ECMA sense), we should get rid
635
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
636
    // on page 74.
637
    Label use_receiver, exit;
638

    
639
    // If the result is a smi, it is *not* an object in the ECMA sense.
640
    // r0: result
641
    // sp[0]: receiver (newly allocated object)
642
    // sp[1]: constructor function
643
    // sp[2]: number of arguments (smi-tagged)
644
    __ JumpIfSmi(r0, &use_receiver);
645

    
646
    // If the type of the result (stored in its map) is less than
647
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
648
    __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
649
    __ b(ge, &exit);
650

    
651
    // Throw away the result of the constructor invocation and use the
652
    // on-stack receiver as the result.
653
    __ bind(&use_receiver);
654
    __ ldr(r0, MemOperand(sp));
655

    
656
    // Remove receiver from the stack, remove caller arguments, and
657
    // return.
658
    __ bind(&exit);
659
    // r0: result
660
    // sp[0]: receiver (newly allocated object)
661
    // sp[1]: constructor function
662
    // sp[2]: number of arguments (smi-tagged)
663
    __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
664

    
665
    // Leave construct frame.
666
  }
667

    
668
  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
669
  __ add(sp, sp, Operand(kPointerSize));
670
  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
671
  __ Jump(lr);
672
}
673

    
674

    
675
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
676
  Generate_JSConstructStubHelper(masm, false, true);
677
}
678

    
679

    
680
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
681
  Generate_JSConstructStubHelper(masm, false, false);
682
}
683

    
684

    
685
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
686
  Generate_JSConstructStubHelper(masm, true, false);
687
}
688

    
689

    
690
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
691
                                             bool is_construct) {
692
  // Called from Generate_JS_Entry
693
  // r0: code entry
694
  // r1: function
695
  // r2: receiver
696
  // r3: argc
697
  // r4: argv
698
  // r5-r6, r7 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
699
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
700

    
701
  // Clear the context before we push it when entering the internal frame.
702
  __ mov(cp, Operand::Zero());
703

    
704
  // Enter an internal frame.
705
  {
706
    FrameScope scope(masm, StackFrame::INTERNAL);
707

    
708
    // Set up the context from the function argument.
709
    __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
710

    
711
    __ InitializeRootRegister();
712

    
713
    // Push the function and the receiver onto the stack.
714
    __ push(r1);
715
    __ push(r2);
716

    
717
    // Copy arguments to the stack in a loop.
718
    // r1: function
719
    // r3: argc
720
    // r4: argv, i.e. points to first arg
721
    Label loop, entry;
722
    __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
723
    // r2 points past last arg.
724
    __ b(&entry);
725
    __ bind(&loop);
726
    __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
727
    __ ldr(r0, MemOperand(r0));  // dereference handle
728
    __ push(r0);  // push parameter
729
    __ bind(&entry);
730
    __ cmp(r4, r2);
731
    __ b(ne, &loop);
732

    
733
    // Initialize all JavaScript callee-saved registers, since they will be seen
734
    // by the garbage collector as part of handlers.
735
    __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
736
    __ mov(r5, Operand(r4));
737
    __ mov(r6, Operand(r4));
738
    if (!FLAG_enable_ool_constant_pool) {
739
      __ mov(r7, Operand(r4));
740
    }
741
    if (kR9Available == 1) {
742
      __ mov(r9, Operand(r4));
743
    }
744

    
745
    // Invoke the code and pass argc as r0.
746
    __ mov(r0, Operand(r3));
747
    if (is_construct) {
748
      // No type feedback cell is available
749
      Handle<Object> undefined_sentinel(
750
          masm->isolate()->heap()->undefined_value(), masm->isolate());
751
      __ mov(r2, Operand(undefined_sentinel));
752
      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
753
      __ CallStub(&stub);
754
    } else {
755
      ParameterCount actual(r0);
756
      __ InvokeFunction(r1, actual, CALL_FUNCTION,
757
                        NullCallWrapper(), CALL_AS_METHOD);
758
    }
759
    // Exit the JS frame and remove the parameters (except function), and
760
    // return.
761
    // Respect ABI stack constraint.
762
  }
763
  __ Jump(lr);
764

    
765
  // r0: result
766
}
767

    
768

    
769
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
770
  Generate_JSEntryTrampolineHelper(masm, false);
771
}
772

    
773

    
774
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
775
  Generate_JSEntryTrampolineHelper(masm, true);
776
}
777

    
778

    
779
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
780
  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
781
  // Do a tail-call of the compiled function.
782
  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
783
  __ Jump(r2);
784
}
785

    
786

    
787
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
788
  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
789
  // Do a tail-call of the compiled function.
790
  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
791
  __ Jump(r2);
792
}
793

    
794

    
795
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
796
  // For now, we are relying on the fact that make_code_young doesn't do any
797
  // garbage collection which allows us to save/restore the registers without
798
  // worrying about which of them contain pointers. We also don't build an
799
  // internal frame to make the code faster, since we shouldn't have to do stack
800
  // crawls in MakeCodeYoung. This seems a bit fragile.
801

    
802
  // The following registers must be saved and restored when calling through to
803
  // the runtime:
804
  //   r0 - contains return address (beginning of patch sequence)
805
  //   r1 - isolate
806
  FrameScope scope(masm, StackFrame::MANUAL);
807
  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
808
  __ PrepareCallCFunction(1, 0, r2);
809
  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
810
  __ CallCFunction(
811
      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
812
  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
813
  __ mov(pc, r0);
814
}
815

    
816
#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
817
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
818
    MacroAssembler* masm) {                                  \
819
  GenerateMakeCodeYoungAgainCommon(masm);                    \
820
}                                                            \
821
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
822
    MacroAssembler* masm) {                                  \
823
  GenerateMakeCodeYoungAgainCommon(masm);                    \
824
}
825
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
826
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
827

    
828

    
829
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
830
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
831
  // that make_code_young doesn't do any garbage collection which allows us to
832
  // save/restore the registers without worrying about which of them contain
833
  // pointers.
834

    
835
  // The following registers must be saved and restored when calling through to
836
  // the runtime:
837
  //   r0 - contains return address (beginning of patch sequence)
838
  //   r1 - isolate
839
  FrameScope scope(masm, StackFrame::MANUAL);
840
  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
841
  __ PrepareCallCFunction(1, 0, r2);
842
  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
843
  __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
844
        masm->isolate()), 2);
845
  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
846

    
847
  // Perform prologue operations usually performed by the young code stub.
848
  __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
849
  __ add(fp, sp, Operand(2 * kPointerSize));
850

    
851
  // Jump to point after the code-age stub.
852
  __ add(r0, r0, Operand(kNoCodeAgeSequenceLength * Assembler::kInstrSize));
853
  __ mov(pc, r0);
854
}
855

    
856

    
857
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
858
  GenerateMakeCodeYoungAgainCommon(masm);
859
}
860

    
861

    
862
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
863
  {
864
    FrameScope scope(masm, StackFrame::INTERNAL);
865

    
866
    // Preserve registers across notification, this is important for compiled
867
    // stubs that tail call the runtime on deopts passing their parameters in
868
    // registers.
869
    __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
870
    // Pass the function and deoptimization type to the runtime system.
871
    __ CallRuntime(Runtime::kNotifyStubFailure, 0);
872
    __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
873
  }
874

    
875
  __ add(sp, sp, Operand(kPointerSize));  // Ignore state
876
  __ mov(pc, lr);  // Jump to miss handler
877
}
878

    
879

    
880
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
881
                                             Deoptimizer::BailoutType type) {
882
  {
883
    FrameScope scope(masm, StackFrame::INTERNAL);
884
    // Pass the function and deoptimization type to the runtime system.
885
    __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
886
    __ push(r0);
887
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
888
  }
889

    
890
  // Get the full codegen state from the stack and untag it -> r6.
891
  __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
892
  __ SmiUntag(r6);
893
  // Switch on the state.
894
  Label with_tos_register, unknown_state;
895
  __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
896
  __ b(ne, &with_tos_register);
897
  __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
898
  __ Ret();
899

    
900
  __ bind(&with_tos_register);
901
  __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
902
  __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
903
  __ b(ne, &unknown_state);
904
  __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
905
  __ Ret();
906

    
907
  __ bind(&unknown_state);
908
  __ stop("no cases left");
909
}
910

    
911

    
912
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
913
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
914
}
915

    
916

    
917
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
918
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
919
}
920

    
921

    
922
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
923
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
924
}
925

    
926

    
927
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
928
  // Lookup the function in the JavaScript frame.
929
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
930
  {
931
    FrameScope scope(masm, StackFrame::INTERNAL);
932
    // Lookup and calculate pc offset.
933
    __ ldr(r1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
934
    __ ldr(r2, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
935
    __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
936
    __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
937
    __ sub(r1, r1, r2);
938
    __ SmiTag(r1);
939

    
940
    // Pass both function and pc offset as arguments.
941
    __ push(r0);
942
    __ push(r1);
943
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
944
  }
945

    
946
  // If the code object is null, just return to the unoptimized code.
947
  Label skip;
948
  __ cmp(r0, Operand(Smi::FromInt(0)));
949
  __ b(ne, &skip);
950
  __ Ret();
951

    
952
  __ bind(&skip);
953

    
954
  // Load deoptimization data from the code object.
955
  // <deopt_data> = <code>[#deoptimization_data_offset]
956
  __ ldr(r1, MemOperand(r0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
957

    
958
  // Load the OSR entrypoint offset from the deoptimization data.
959
  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
960
  __ ldr(r1, MemOperand(r1, FixedArray::OffsetOfElementAt(
961
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
962

    
963
  // Compute the target address = code_obj + header_size + osr_offset
964
  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
965
  __ add(r0, r0, Operand::SmiUntag(r1));
966
  __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
967

    
968
  // And "return" to the OSR entry point of the function.
969
  __ Ret();
970
}
971

    
972

    
973
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
974
  // We check the stack limit as indicator that recompilation might be done.
975
  Label ok;
976
  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
977
  __ cmp(sp, Operand(ip));
978
  __ b(hs, &ok);
979
  {
980
    FrameScope scope(masm, StackFrame::INTERNAL);
981
    __ CallRuntime(Runtime::kStackGuard, 0);
982
  }
983
  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
984
          RelocInfo::CODE_TARGET);
985

    
986
  __ bind(&ok);
987
  __ Ret();
988
}
989

    
990

    
991
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
992
  // 1. Make sure we have at least one argument.
993
  // r0: actual number of arguments
994
  { Label done;
995
    __ cmp(r0, Operand::Zero());
996
    __ b(ne, &done);
997
    __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
998
    __ push(r2);
999
    __ add(r0, r0, Operand(1));
1000
    __ bind(&done);
1001
  }
1002

    
1003
  // 2. Get the function to call (passed as receiver) from the stack, check
1004
  //    if it is a function.
1005
  // r0: actual number of arguments
1006
  Label slow, non_function;
1007
  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1008
  __ JumpIfSmi(r1, &non_function);
1009
  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1010
  __ b(ne, &slow);
1011

    
1012
  // 3a. Patch the first argument if necessary when calling a function.
1013
  // r0: actual number of arguments
1014
  // r1: function
1015
  Label shift_arguments;
1016
  __ mov(r4, Operand::Zero());  // indicate regular JS_FUNCTION
1017
  { Label convert_to_object, use_global_receiver, patch_receiver;
1018
    // Change context eagerly in case we need the global receiver.
1019
    __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1020

    
1021
    // Do not transform the receiver for strict mode functions.
1022
    __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1023
    __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1024
    __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1025
                             kSmiTagSize)));
1026
    __ b(ne, &shift_arguments);
1027

    
1028
    // Do not transform the receiver for native (Compilerhints already in r3).
1029
    __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1030
    __ b(ne, &shift_arguments);
1031

    
1032
    // Compute the receiver in non-strict mode.
1033
    __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1034
    __ ldr(r2, MemOperand(r2, -kPointerSize));
1035
    // r0: actual number of arguments
1036
    // r1: function
1037
    // r2: first argument
1038
    __ JumpIfSmi(r2, &convert_to_object);
1039

    
1040
    __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1041
    __ cmp(r2, r3);
1042
    __ b(eq, &use_global_receiver);
1043
    __ LoadRoot(r3, Heap::kNullValueRootIndex);
1044
    __ cmp(r2, r3);
1045
    __ b(eq, &use_global_receiver);
1046

    
1047
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1048
    __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1049
    __ b(ge, &shift_arguments);
1050

    
1051
    __ bind(&convert_to_object);
1052

    
1053
    {
1054
      // Enter an internal frame in order to preserve argument count.
1055
      FrameScope scope(masm, StackFrame::INTERNAL);
1056
      __ SmiTag(r0);
1057
      __ push(r0);
1058

    
1059
      __ push(r2);
1060
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1061
      __ mov(r2, r0);
1062

    
1063
      __ pop(r0);
1064
      __ SmiUntag(r0);
1065

    
1066
      // Exit the internal frame.
1067
    }
1068

    
1069
    // Restore the function to r1, and the flag to r4.
1070
    __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1071
    __ mov(r4, Operand::Zero());
1072
    __ jmp(&patch_receiver);
1073

    
1074
    // Use the global receiver object from the called function as the
1075
    // receiver.
1076
    __ bind(&use_global_receiver);
1077
    const int kGlobalIndex =
1078
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1079
    __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1080
    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
1081
    __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1082
    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1083

    
1084
    __ bind(&patch_receiver);
1085
    __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1086
    __ str(r2, MemOperand(r3, -kPointerSize));
1087

    
1088
    __ jmp(&shift_arguments);
1089
  }
1090

    
1091
  // 3b. Check for function proxy.
1092
  __ bind(&slow);
1093
  __ mov(r4, Operand(1, RelocInfo::NONE32));  // indicate function proxy
1094
  __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1095
  __ b(eq, &shift_arguments);
1096
  __ bind(&non_function);
1097
  __ mov(r4, Operand(2, RelocInfo::NONE32));  // indicate non-function
1098

    
1099
  // 3c. Patch the first argument when calling a non-function.  The
1100
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
1101
  //     receiver, so overwrite the first argument which will ultimately
1102
  //     become the receiver.
1103
  // r0: actual number of arguments
1104
  // r1: function
1105
  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1106
  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1107
  __ str(r1, MemOperand(r2, -kPointerSize));
1108

    
1109
  // 4. Shift arguments and return address one slot down on the stack
1110
  //    (overwriting the original receiver).  Adjust argument count to make
1111
  //    the original first argument the new receiver.
1112
  // r0: actual number of arguments
1113
  // r1: function
1114
  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1115
  __ bind(&shift_arguments);
1116
  { Label loop;
1117
    // Calculate the copy start address (destination). Copy end address is sp.
1118
    __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1119

    
1120
    __ bind(&loop);
1121
    __ ldr(ip, MemOperand(r2, -kPointerSize));
1122
    __ str(ip, MemOperand(r2));
1123
    __ sub(r2, r2, Operand(kPointerSize));
1124
    __ cmp(r2, sp);
1125
    __ b(ne, &loop);
1126
    // Adjust the actual number of arguments and remove the top element
1127
    // (which is a copy of the last argument).
1128
    __ sub(r0, r0, Operand(1));
1129
    __ pop();
1130
  }
1131

    
1132
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1133
  //     or a function proxy via CALL_FUNCTION_PROXY.
1134
  // r0: actual number of arguments
1135
  // r1: function
1136
  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1137
  { Label function, non_proxy;
1138
    __ tst(r4, r4);
1139
    __ b(eq, &function);
1140
    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1141
    __ mov(r2, Operand::Zero());
1142
    __ SetCallKind(r5, CALL_AS_METHOD);
1143
    __ cmp(r4, Operand(1));
1144
    __ b(ne, &non_proxy);
1145

    
1146
    __ push(r1);  // re-add proxy object as additional argument
1147
    __ add(r0, r0, Operand(1));
1148
    __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1149
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1150
            RelocInfo::CODE_TARGET);
1151

    
1152
    __ bind(&non_proxy);
1153
    __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1154
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1155
            RelocInfo::CODE_TARGET);
1156
    __ bind(&function);
1157
  }
1158

    
1159
  // 5b. Get the code to call from the function and check that the number of
1160
  //     expected arguments matches what we're providing.  If so, jump
1161
  //     (tail-call) to the code in register edx without checking arguments.
1162
  // r0: actual number of arguments
1163
  // r1: function
1164
  __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1165
  __ ldr(r2,
1166
         FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1167
  __ SmiUntag(r2);
1168
  __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1169
  __ SetCallKind(r5, CALL_AS_METHOD);
1170
  __ cmp(r2, r0);  // Check formal and actual parameter counts.
1171
  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1172
          RelocInfo::CODE_TARGET,
1173
          ne);
1174

    
1175
  ParameterCount expected(0);
1176
  __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1177
                NullCallWrapper(), CALL_AS_METHOD);
1178
}
1179

    
1180

    
1181
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1182
  const int kIndexOffset    = -5 * kPointerSize;
1183
  const int kLimitOffset    = -4 * kPointerSize;
1184
  const int kArgsOffset     =  2 * kPointerSize;
1185
  const int kRecvOffset     =  3 * kPointerSize;
1186
  const int kFunctionOffset =  4 * kPointerSize;
1187

    
1188
  {
1189
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1190

    
1191
    __ ldr(r0, MemOperand(fp, kFunctionOffset));  // get the function
1192
    __ push(r0);
1193
    __ ldr(r0, MemOperand(fp, kArgsOffset));  // get the args array
1194
    __ push(r0);
1195
    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1196

    
1197
    // Check the stack for overflow. We are not trying to catch
1198
    // interruptions (e.g. debug break and preemption) here, so the "real stack
1199
    // limit" is checked.
1200
    Label okay;
1201
    __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1202
    // Make r2 the space we have left. The stack might already be overflowed
1203
    // here which will cause r2 to become negative.
1204
    __ sub(r2, sp, r2);
1205
    // Check if the arguments will overflow the stack.
1206
    __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
1207
    __ b(gt, &okay);  // Signed comparison.
1208

    
1209
    // Out of stack space.
1210
    __ ldr(r1, MemOperand(fp, kFunctionOffset));
1211
    __ push(r1);
1212
    __ push(r0);
1213
    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1214
    // End of stack check.
1215

    
1216
    // Push current limit and index.
1217
    __ bind(&okay);
1218
    __ push(r0);  // limit
1219
    __ mov(r1, Operand::Zero());  // initial index
1220
    __ push(r1);
1221

    
1222
    // Get the receiver.
1223
    __ ldr(r0, MemOperand(fp, kRecvOffset));
1224

    
1225
    // Check that the function is a JS function (otherwise it must be a proxy).
1226
    Label push_receiver;
1227
    __ ldr(r1, MemOperand(fp, kFunctionOffset));
1228
    __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1229
    __ b(ne, &push_receiver);
1230

    
1231
    // Change context eagerly to get the right global object if necessary.
1232
    __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1233
    // Load the shared function info while the function is still in r1.
1234
    __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1235

    
1236
    // Compute the receiver.
1237
    // Do not transform the receiver for strict mode functions.
1238
    Label call_to_object, use_global_receiver;
1239
    __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1240
    __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1241
                             kSmiTagSize)));
1242
    __ b(ne, &push_receiver);
1243

    
1244
    // Do not transform the receiver for strict mode functions.
1245
    __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1246
    __ b(ne, &push_receiver);
1247

    
1248
    // Compute the receiver in non-strict mode.
1249
    __ JumpIfSmi(r0, &call_to_object);
1250
    __ LoadRoot(r1, Heap::kNullValueRootIndex);
1251
    __ cmp(r0, r1);
1252
    __ b(eq, &use_global_receiver);
1253
    __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1254
    __ cmp(r0, r1);
1255
    __ b(eq, &use_global_receiver);
1256

    
1257
    // Check if the receiver is already a JavaScript object.
1258
    // r0: receiver
1259
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1260
    __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1261
    __ b(ge, &push_receiver);
1262

    
1263
    // Convert the receiver to a regular object.
1264
    // r0: receiver
1265
    __ bind(&call_to_object);
1266
    __ push(r0);
1267
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1268
    __ b(&push_receiver);
1269

    
1270
    // Use the current global receiver object as the receiver.
1271
    __ bind(&use_global_receiver);
1272
    const int kGlobalOffset =
1273
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1274
    __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1275
    __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
1276
    __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1277
    __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1278

    
1279
    // Push the receiver.
1280
    // r0: receiver
1281
    __ bind(&push_receiver);
1282
    __ push(r0);
1283

    
1284
    // Copy all arguments from the array to the stack.
1285
    Label entry, loop;
1286
    __ ldr(r0, MemOperand(fp, kIndexOffset));
1287
    __ b(&entry);
1288

    
1289
    // Load the current argument from the arguments array and push it to the
1290
    // stack.
1291
    // r0: current argument index
1292
    __ bind(&loop);
1293
    __ ldr(r1, MemOperand(fp, kArgsOffset));
1294
    __ push(r1);
1295
    __ push(r0);
1296

    
1297
    // Call the runtime to access the property in the arguments array.
1298
    __ CallRuntime(Runtime::kGetProperty, 2);
1299
    __ push(r0);
1300

    
1301
    // Use inline caching to access the arguments.
1302
    __ ldr(r0, MemOperand(fp, kIndexOffset));
1303
    __ add(r0, r0, Operand(1 << kSmiTagSize));
1304
    __ str(r0, MemOperand(fp, kIndexOffset));
1305

    
1306
    // Test if the copy loop has finished copying all the elements from the
1307
    // arguments object.
1308
    __ bind(&entry);
1309
    __ ldr(r1, MemOperand(fp, kLimitOffset));
1310
    __ cmp(r0, r1);
1311
    __ b(ne, &loop);
1312

    
1313
    // Invoke the function.
1314
    Label call_proxy;
1315
    ParameterCount actual(r0);
1316
    __ SmiUntag(r0);
1317
    __ ldr(r1, MemOperand(fp, kFunctionOffset));
1318
    __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1319
    __ b(ne, &call_proxy);
1320
    __ InvokeFunction(r1, actual, CALL_FUNCTION,
1321
                      NullCallWrapper(), CALL_AS_METHOD);
1322

    
1323
    frame_scope.GenerateLeaveFrame();
1324
    __ add(sp, sp, Operand(3 * kPointerSize));
1325
    __ Jump(lr);
1326

    
1327
    // Invoke the function proxy.
1328
    __ bind(&call_proxy);
1329
    __ push(r1);  // add function proxy as last argument
1330
    __ add(r0, r0, Operand(1));
1331
    __ mov(r2, Operand::Zero());
1332
    __ SetCallKind(r5, CALL_AS_METHOD);
1333
    __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1334
    __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1335
            RelocInfo::CODE_TARGET);
1336

    
1337
    // Tear down the internal frame and remove function, receiver and args.
1338
  }
1339
  __ add(sp, sp, Operand(3 * kPointerSize));
1340
  __ Jump(lr);
1341
}
1342

    
1343

    
1344
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1345
  __ SmiTag(r0);
1346
  __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1347
  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1348
  __ add(fp, sp, Operand(3 * kPointerSize));
1349
}
1350

    
1351

    
1352
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1353
  // ----------- S t a t e -------------
1354
  //  -- r0 : result being passed through
1355
  // -----------------------------------
1356
  // Get the number of arguments passed (as a smi), tear down the frame and
1357
  // then tear down the parameters.
1358
  __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1359
  __ mov(sp, fp);
1360
  __ ldm(ia_w, sp, fp.bit() | lr.bit());
1361
  __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1362
  __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
1363
}
1364

    
1365

    
1366
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1367
  // ----------- S t a t e -------------
1368
  //  -- r0 : actual number of arguments
1369
  //  -- r1 : function (passed through to callee)
1370
  //  -- r2 : expected number of arguments
1371
  //  -- r3 : code entry to call
1372
  //  -- r5 : call kind information
1373
  // -----------------------------------
1374

    
1375
  Label invoke, dont_adapt_arguments;
1376

    
1377
  Label enough, too_few;
1378
  __ cmp(r0, r2);
1379
  __ b(lt, &too_few);
1380
  __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1381
  __ b(eq, &dont_adapt_arguments);
1382

    
1383
  {  // Enough parameters: actual >= expected
1384
    __ bind(&enough);
1385
    EnterArgumentsAdaptorFrame(masm);
1386

    
1387
    // Calculate copy start address into r0 and copy end address into r2.
1388
    // r0: actual number of arguments as a smi
1389
    // r1: function
1390
    // r2: expected number of arguments
1391
    // r3: code entry to call
1392
    __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1393
    // adjust for return address and receiver
1394
    __ add(r0, r0, Operand(2 * kPointerSize));
1395
    __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1396

    
1397
    // Copy the arguments (including the receiver) to the new stack frame.
1398
    // r0: copy start address
1399
    // r1: function
1400
    // r2: copy end address
1401
    // r3: code entry to call
1402

    
1403
    Label copy;
1404
    __ bind(&copy);
1405
    __ ldr(ip, MemOperand(r0, 0));
1406
    __ push(ip);
1407
    __ cmp(r0, r2);  // Compare before moving to next argument.
1408
    __ sub(r0, r0, Operand(kPointerSize));
1409
    __ b(ne, &copy);
1410

    
1411
    __ b(&invoke);
1412
  }
1413

    
1414
  {  // Too few parameters: Actual < expected
1415
    __ bind(&too_few);
1416
    EnterArgumentsAdaptorFrame(masm);
1417

    
1418
    // Calculate copy start address into r0 and copy end address is fp.
1419
    // r0: actual number of arguments as a smi
1420
    // r1: function
1421
    // r2: expected number of arguments
1422
    // r3: code entry to call
1423
    __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1424

    
1425
    // Copy the arguments (including the receiver) to the new stack frame.
1426
    // r0: copy start address
1427
    // r1: function
1428
    // r2: expected number of arguments
1429
    // r3: code entry to call
1430
    Label copy;
1431
    __ bind(&copy);
1432
    // Adjust load for return address and receiver.
1433
    __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1434
    __ push(ip);
1435
    __ cmp(r0, fp);  // Compare before moving to next argument.
1436
    __ sub(r0, r0, Operand(kPointerSize));
1437
    __ b(ne, &copy);
1438

    
1439
    // Fill the remaining expected arguments with undefined.
1440
    // r1: function
1441
    // r2: expected number of arguments
1442
    // r3: code entry to call
1443
    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1444
    __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1445
    __ sub(r2, r2, Operand(4 * kPointerSize));  // Adjust for frame.
1446

    
1447
    Label fill;
1448
    __ bind(&fill);
1449
    __ push(ip);
1450
    __ cmp(sp, r2);
1451
    __ b(ne, &fill);
1452
  }
1453

    
1454
  // Call the entry point.
1455
  __ bind(&invoke);
1456
  __ Call(r3);
1457

    
1458
  // Store offset of return address for deoptimizer.
1459
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1460

    
1461
  // Exit frame and return.
1462
  LeaveArgumentsAdaptorFrame(masm);
1463
  __ Jump(lr);
1464

    
1465

    
1466
  // -------------------------------------------
1467
  // Dont adapt arguments.
1468
  // -------------------------------------------
1469
  __ bind(&dont_adapt_arguments);
1470
  __ Jump(r3);
1471
}
1472

    
1473

    
1474
#undef __
1475

    
1476
} }  // namespace v8::internal
1477

    
1478
#endif  // V8_TARGET_ARCH_ARM