The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / mips / builtins-mips.cc @ f230a1cf

History | View | Annotate | Download (53.9 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28

    
29

    
30
#include "v8.h"
31

    
32
#if V8_TARGET_ARCH_MIPS
33

    
34
#include "codegen.h"
35
#include "debug.h"
36
#include "deoptimizer.h"
37
#include "full-codegen.h"
38
#include "runtime.h"
39

    
40
namespace v8 {
41
namespace internal {
42

    
43

    
44
#define __ ACCESS_MASM(masm)
45

    
46

    
47
void Builtins::Generate_Adaptor(MacroAssembler* masm,
48
                                CFunctionId id,
49
                                BuiltinExtraArguments extra_args) {
50
  // ----------- S t a t e -------------
51
  //  -- a0                 : number of arguments excluding receiver
52
  //  -- a1                 : called function (only guaranteed when
53
  //  --                      extra_args requires it)
54
  //  -- cp                 : context
55
  //  -- sp[0]              : last argument
56
  //  -- ...
57
  //  -- sp[4 * (argc - 1)] : first argument
58
  //  -- sp[4 * agrc]       : receiver
59
  // -----------------------------------
60

    
61
  // Insert extra arguments.
62
  int num_extra_args = 0;
63
  if (extra_args == NEEDS_CALLED_FUNCTION) {
64
    num_extra_args = 1;
65
    __ push(a1);
66
  } else {
67
    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68
  }
69

    
70
  // JumpToExternalReference expects s0 to contain the number of arguments
71
  // including the receiver and the extra arguments.
72
  __ Addu(s0, a0, num_extra_args + 1);
73
  __ sll(s1, s0, kPointerSizeLog2);
74
  __ Subu(s1, s1, kPointerSize);
75
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
76
}
77

    
78

    
79
// Load the built-in InternalArray function from the current context.
80
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
81
                                              Register result) {
82
  // Load the native context.
83

    
84
  __ lw(result,
85
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
86
  __ lw(result,
87
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
88
  // Load the InternalArray function from the native context.
89
  __ lw(result,
90
         MemOperand(result,
91
                    Context::SlotOffset(
92
                        Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
93
}
94

    
95

    
96
// Load the built-in Array function from the current context.
97
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
98
  // Load the native context.
99

    
100
  __ lw(result,
101
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
102
  __ lw(result,
103
        FieldMemOperand(result, GlobalObject::kNativeContextOffset));
104
  // Load the Array function from the native context.
105
  __ lw(result,
106
        MemOperand(result,
107
                   Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
108
}
109

    
110

    
111
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
112
  // ----------- S t a t e -------------
113
  //  -- a0     : number of arguments
114
  //  -- ra     : return address
115
  //  -- sp[...]: constructor arguments
116
  // -----------------------------------
117
  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
118

    
119
  // Get the InternalArray function.
120
  GenerateLoadInternalArrayFunction(masm, a1);
121

    
122
  if (FLAG_debug_code) {
123
    // Initial map for the builtin InternalArray functions should be maps.
124
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
125
    __ And(t0, a2, Operand(kSmiTagMask));
126
    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
127
              t0, Operand(zero_reg));
128
    __ GetObjectType(a2, a3, t0);
129
    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
130
              t0, Operand(MAP_TYPE));
131
  }
132

    
133
  // Run the native code for the InternalArray function called as a normal
134
  // function.
135
  // Tail call a stub.
136
  InternalArrayConstructorStub stub(masm->isolate());
137
  __ TailCallStub(&stub);
138
}
139

    
140

    
141
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
142
  // ----------- S t a t e -------------
143
  //  -- a0     : number of arguments
144
  //  -- ra     : return address
145
  //  -- sp[...]: constructor arguments
146
  // -----------------------------------
147
  Label generic_array_code;
148

    
149
  // Get the Array function.
150
  GenerateLoadArrayFunction(masm, a1);
151

    
152
  if (FLAG_debug_code) {
153
    // Initial map for the builtin Array functions should be maps.
154
    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
155
    __ And(t0, a2, Operand(kSmiTagMask));
156
    __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
157
              t0, Operand(zero_reg));
158
    __ GetObjectType(a2, a3, t0);
159
    __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
160
              t0, Operand(MAP_TYPE));
161
  }
162

    
163
  // Run the native code for the Array function called as a normal function.
164
  // Tail call a stub.
165
  Handle<Object> undefined_sentinel(
166
      masm->isolate()->heap()->undefined_value(),
167
      masm->isolate());
168
  __ li(a2, Operand(undefined_sentinel));
169
  ArrayConstructorStub stub(masm->isolate());
170
  __ TailCallStub(&stub);
171
}
172

    
173

    
174
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
175
  // ----------- S t a t e -------------
176
  //  -- a0                     : number of arguments
177
  //  -- a1                     : constructor function
178
  //  -- ra                     : return address
179
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
180
  //  -- sp[argc * 4]           : receiver
181
  // -----------------------------------
182
  Counters* counters = masm->isolate()->counters();
183
  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
184

    
185
  Register function = a1;
186
  if (FLAG_debug_code) {
187
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
188
    __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
189
  }
190

    
191
  // Load the first arguments in a0 and get rid of the rest.
192
  Label no_arguments;
193
  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
194
  // First args = sp[(argc - 1) * 4].
195
  __ Subu(a0, a0, Operand(1));
196
  __ sll(a0, a0, kPointerSizeLog2);
197
  __ Addu(sp, a0, sp);
198
  __ lw(a0, MemOperand(sp));
199
  // sp now point to args[0], drop args[0] + receiver.
200
  __ Drop(2);
201

    
202
  Register argument = a2;
203
  Label not_cached, argument_is_string;
204
  __ LookupNumberStringCache(a0,        // Input.
205
                             argument,  // Result.
206
                             a3,        // Scratch.
207
                             t0,        // Scratch.
208
                             t1,        // Scratch.
209
                             &not_cached);
210
  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
211
  __ bind(&argument_is_string);
212

    
213
  // ----------- S t a t e -------------
214
  //  -- a2     : argument converted to string
215
  //  -- a1     : constructor function
216
  //  -- ra     : return address
217
  // -----------------------------------
218

    
219
  Label gc_required;
220
  __ Allocate(JSValue::kSize,
221
              v0,  // Result.
222
              a3,  // Scratch.
223
              t0,  // Scratch.
224
              &gc_required,
225
              TAG_OBJECT);
226

    
227
  // Initialising the String Object.
228
  Register map = a3;
229
  __ LoadGlobalFunctionInitialMap(function, map, t0);
230
  if (FLAG_debug_code) {
231
    __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
232
    __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
233
        t0, Operand(JSValue::kSize >> kPointerSizeLog2));
234
    __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
235
    __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
236
        t0, Operand(zero_reg));
237
  }
238
  __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
239

    
240
  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
241
  __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
242
  __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
243

    
244
  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
245

    
246
  // Ensure the object is fully initialized.
247
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
248

    
249
  __ Ret();
250

    
251
  // The argument was not found in the number to string cache. Check
252
  // if it's a string already before calling the conversion builtin.
253
  Label convert_argument;
254
  __ bind(&not_cached);
255
  __ JumpIfSmi(a0, &convert_argument);
256

    
257
  // Is it a String?
258
  __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
259
  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
260
  STATIC_ASSERT(kNotStringTag != 0);
261
  __ And(t0, a3, Operand(kIsNotStringMask));
262
  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
263
  __ mov(argument, a0);
264
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
265
  __ Branch(&argument_is_string);
266

    
267
  // Invoke the conversion builtin and put the result into a2.
268
  __ bind(&convert_argument);
269
  __ push(function);  // Preserve the function.
270
  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
271
  {
272
    FrameScope scope(masm, StackFrame::INTERNAL);
273
    __ push(a0);
274
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
275
  }
276
  __ pop(function);
277
  __ mov(argument, v0);
278
  __ Branch(&argument_is_string);
279

    
280
  // Load the empty string into a2, remove the receiver from the
281
  // stack, and jump back to the case where the argument is a string.
282
  __ bind(&no_arguments);
283
  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
284
  __ Drop(1);
285
  __ Branch(&argument_is_string);
286

    
287
  // At this point the argument is already a string. Call runtime to
288
  // create a string wrapper.
289
  __ bind(&gc_required);
290
  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
291
  {
292
    FrameScope scope(masm, StackFrame::INTERNAL);
293
    __ push(argument);
294
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
295
  }
296
  __ Ret();
297
}
298

    
299

    
300
static void CallRuntimePassFunction(MacroAssembler* masm,
301
                                    Runtime::FunctionId function_id) {
302
  FrameScope scope(masm, StackFrame::INTERNAL);
303
  // Push a copy of the function onto the stack.
304
  __ push(a1);
305
  // Push call kind information.
306
  __ push(t1);
307
  // Function is also the parameter to the runtime call.
308
  __ push(a1);
309

    
310
  __ CallRuntime(function_id, 1);
311
  // Restore call kind information.
312
  __ pop(t1);
313
  // Restore receiver.
314
  __ pop(a1);
315
}
316

    
317

    
318
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
319
  __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
320
  __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
321
  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
322
  __ Jump(at);
323
}
324

    
325

    
326
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
327
  // Checking whether the queued function is ready for install is optional,
328
  // since we come across interrupts and stack checks elsewhere.  However,
329
  // not checking may delay installing ready functions, and always checking
330
  // would be quite expensive.  A good compromise is to first check against
331
  // stack limit as a cue for an interrupt signal.
332
  Label ok;
333
  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
334
  __ Branch(&ok, hs, sp, Operand(t0));
335

    
336
  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
337
  // Tail call to returned code.
338
  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
339
  __ Jump(at);
340

    
341
  __ bind(&ok);
342
  GenerateTailCallToSharedCode(masm);
343
}
344

    
345

    
346
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
347
  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
348
  GenerateTailCallToSharedCode(masm);
349
}
350

    
351

    
352
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
353
                                           bool is_api_function,
354
                                           bool count_constructions) {
355
  // ----------- S t a t e -------------
356
  //  -- a0     : number of arguments
357
  //  -- a1     : constructor function
358
  //  -- ra     : return address
359
  //  -- sp[...]: constructor arguments
360
  // -----------------------------------
361

    
362
  // Should never count constructions for api objects.
363
  ASSERT(!is_api_function || !count_constructions);
364

    
365
  Isolate* isolate = masm->isolate();
366

    
367
  // ----------- S t a t e -------------
368
  //  -- a0     : number of arguments
369
  //  -- a1     : constructor function
370
  //  -- ra     : return address
371
  //  -- sp[...]: constructor arguments
372
  // -----------------------------------
373

    
374
  // Enter a construct frame.
375
  {
376
    FrameScope scope(masm, StackFrame::CONSTRUCT);
377

    
378
    // Preserve the two incoming parameters on the stack.
379
    __ sll(a0, a0, kSmiTagSize);  // Tag arguments count.
380
    __ MultiPushReversed(a0.bit() | a1.bit());
381

    
382
    // Use t7 to hold undefined, which is used in several places below.
383
    __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
384

    
385
    Label rt_call, allocated;
386
    // Try to allocate the object without transitioning into C code. If any of
387
    // the preconditions is not met, the code bails out to the runtime call.
388
    if (FLAG_inline_new) {
389
      Label undo_allocation;
390
#ifdef ENABLE_DEBUGGER_SUPPORT
391
      ExternalReference debug_step_in_fp =
392
          ExternalReference::debug_step_in_fp_address(isolate);
393
      __ li(a2, Operand(debug_step_in_fp));
394
      __ lw(a2, MemOperand(a2));
395
      __ Branch(&rt_call, ne, a2, Operand(zero_reg));
396
#endif
397

    
398
      // Load the initial map and verify that it is in fact a map.
399
      // a1: constructor function
400
      __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
401
      __ JumpIfSmi(a2, &rt_call);
402
      __ GetObjectType(a2, a3, t4);
403
      __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
404

    
405
      // Check that the constructor is not constructing a JSFunction (see
406
      // comments in Runtime_NewObject in runtime.cc). In which case the
407
      // initial map's instance type would be JS_FUNCTION_TYPE.
408
      // a1: constructor function
409
      // a2: initial map
410
      __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
411
      __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
412

    
413
      if (count_constructions) {
414
        Label allocate;
415
        // Decrease generous allocation count.
416
        __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
417
        MemOperand constructor_count =
418
           FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset);
419
        __ lbu(t0, constructor_count);
420
        __ Subu(t0, t0, Operand(1));
421
        __ sb(t0, constructor_count);
422
        __ Branch(&allocate, ne, t0, Operand(zero_reg));
423

    
424
        __ Push(a1, a2);
425

    
426
        __ push(a1);  // Constructor.
427
        // The call will replace the stub, so the countdown is only done once.
428
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
429

    
430
        __ pop(a2);
431
        __ pop(a1);
432

    
433
        __ bind(&allocate);
434
      }
435

    
436
      // Now allocate the JSObject on the heap.
437
      // a1: constructor function
438
      // a2: initial map
439
      __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
440
      __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
441

    
442
      // Allocated the JSObject, now initialize the fields. Map is set to
443
      // initial map and properties and elements are set to empty fixed array.
444
      // a1: constructor function
445
      // a2: initial map
446
      // a3: object size
447
      // t4: JSObject (not tagged)
448
      __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
449
      __ mov(t5, t4);
450
      __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
451
      __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
452
      __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
453
      __ Addu(t5, t5, Operand(3*kPointerSize));
454
      ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
455
      ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
456
      ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
457

    
458
      // Fill all the in-object properties with appropriate filler.
459
      // a1: constructor function
460
      // a2: initial map
461
      // a3: object size (in words)
462
      // t4: JSObject (not tagged)
463
      // t5: First in-object property of JSObject (not tagged)
464
      __ sll(t0, a3, kPointerSizeLog2);
465
      __ addu(t6, t4, t0);   // End of object.
466
      ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
467
      __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
468
      if (count_constructions) {
469
        __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
470
        __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
471
                kBitsPerByte);
472
        __ sll(t0, a0, kPointerSizeLog2);
473
        __ addu(a0, t5, t0);
474
        // a0: offset of first field after pre-allocated fields
475
        if (FLAG_debug_code) {
476
          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
477
              a0, Operand(t6));
478
        }
479
        __ InitializeFieldsWithFiller(t5, a0, t7);
480
        // To allow for truncation.
481
        __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
482
      }
483
      __ InitializeFieldsWithFiller(t5, t6, t7);
484

    
485
      // Add the object tag to make the JSObject real, so that we can continue
486
      // and jump into the continuation code at any time from now on. Any
487
      // failures need to undo the allocation, so that the heap is in a
488
      // consistent state and verifiable.
489
      __ Addu(t4, t4, Operand(kHeapObjectTag));
490

    
491
      // Check if a non-empty properties array is needed. Continue with
492
      // allocated object if not fall through to runtime call if it is.
493
      // a1: constructor function
494
      // t4: JSObject
495
      // t5: start of next object (not tagged)
496
      __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
497
      // The field instance sizes contains both pre-allocated property fields
498
      // and in-object properties.
499
      __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
500
      __ Ext(t6, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
501
             kBitsPerByte);
502
      __ Addu(a3, a3, Operand(t6));
503
      __ Ext(t6, a0, Map::kInObjectPropertiesByte * kBitsPerByte,
504
              kBitsPerByte);
505
      __ subu(a3, a3, t6);
506

    
507
      // Done if no extra properties are to be allocated.
508
      __ Branch(&allocated, eq, a3, Operand(zero_reg));
509
      __ Assert(greater_equal, kPropertyAllocationCountFailed,
510
          a3, Operand(zero_reg));
511

    
512
      // Scale the number of elements by pointer size and add the header for
513
      // FixedArrays to the start of the next object calculation from above.
514
      // a1: constructor
515
      // a3: number of elements in properties array
516
      // t4: JSObject
517
      // t5: start of next object
518
      __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
519
      __ Allocate(
520
          a0,
521
          t5,
522
          t6,
523
          a2,
524
          &undo_allocation,
525
          static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
526

    
527
      // Initialize the FixedArray.
528
      // a1: constructor
529
      // a3: number of elements in properties array (untagged)
530
      // t4: JSObject
531
      // t5: start of next object
532
      __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
533
      __ mov(a2, t5);
534
      __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
535
      __ sll(a0, a3, kSmiTagSize);
536
      __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
537
      __ Addu(a2, a2, Operand(2 * kPointerSize));
538

    
539
      ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
540
      ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
541

    
542
      // Initialize the fields to undefined.
543
      // a1: constructor
544
      // a2: First element of FixedArray (not tagged)
545
      // a3: number of elements in properties array
546
      // t4: JSObject
547
      // t5: FixedArray (not tagged)
548
      __ sll(t3, a3, kPointerSizeLog2);
549
      __ addu(t6, a2, t3);  // End of object.
550
      ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
551
      { Label loop, entry;
552
        if (count_constructions) {
553
          __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
554
        } else if (FLAG_debug_code) {
555
          __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
556
          __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
557
        }
558
        __ jmp(&entry);
559
        __ bind(&loop);
560
        __ sw(t7, MemOperand(a2));
561
        __ addiu(a2, a2, kPointerSize);
562
        __ bind(&entry);
563
        __ Branch(&loop, less, a2, Operand(t6));
564
      }
565

    
566
      // Store the initialized FixedArray into the properties field of
567
      // the JSObject.
568
      // a1: constructor function
569
      // t4: JSObject
570
      // t5: FixedArray (not tagged)
571
      __ Addu(t5, t5, Operand(kHeapObjectTag));  // Add the heap tag.
572
      __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
573

    
574
      // Continue with JSObject being successfully allocated.
575
      // a1: constructor function
576
      // a4: JSObject
577
      __ jmp(&allocated);
578

    
579
      // Undo the setting of the new top so that the heap is verifiable. For
580
      // example, the map's unused properties potentially do not match the
581
      // allocated objects unused properties.
582
      // t4: JSObject (previous new top)
583
      __ bind(&undo_allocation);
584
      __ UndoAllocationInNewSpace(t4, t5);
585
    }
586

    
587
    __ bind(&rt_call);
588
    // Allocate the new receiver object using the runtime call.
589
    // a1: constructor function
590
    __ push(a1);  // Argument for Runtime_NewObject.
591
    __ CallRuntime(Runtime::kNewObject, 1);
592
    __ mov(t4, v0);
593

    
594
    // Receiver for constructor call allocated.
595
    // t4: JSObject
596
    __ bind(&allocated);
597
    __ push(t4);
598
    __ push(t4);
599

    
600
    // Reload the number of arguments from the stack.
601
    // sp[0]: receiver
602
    // sp[1]: receiver
603
    // sp[2]: constructor function
604
    // sp[3]: number of arguments (smi-tagged)
605
    __ lw(a1, MemOperand(sp, 2 * kPointerSize));
606
    __ lw(a3, MemOperand(sp, 3 * kPointerSize));
607

    
608
    // Set up pointer to last argument.
609
    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
610

    
611
    // Set up number of arguments for function call below.
612
    __ srl(a0, a3, kSmiTagSize);
613

    
614
    // Copy arguments and receiver to the expression stack.
615
    // a0: number of arguments
616
    // a1: constructor function
617
    // a2: address of last argument (caller sp)
618
    // a3: number of arguments (smi-tagged)
619
    // sp[0]: receiver
620
    // sp[1]: receiver
621
    // sp[2]: constructor function
622
    // sp[3]: number of arguments (smi-tagged)
623
    Label loop, entry;
624
    __ jmp(&entry);
625
    __ bind(&loop);
626
    __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
627
    __ Addu(t0, a2, Operand(t0));
628
    __ lw(t1, MemOperand(t0));
629
    __ push(t1);
630
    __ bind(&entry);
631
    __ Addu(a3, a3, Operand(-2));
632
    __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
633

    
634
    // Call the function.
635
    // a0: number of arguments
636
    // a1: constructor function
637
    if (is_api_function) {
638
      __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
639
      Handle<Code> code =
640
          masm->isolate()->builtins()->HandleApiCallConstruct();
641
      ParameterCount expected(0);
642
      __ InvokeCode(code, expected, expected,
643
                    RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
644
    } else {
645
      ParameterCount actual(a0);
646
      __ InvokeFunction(a1, actual, CALL_FUNCTION,
647
                        NullCallWrapper(), CALL_AS_METHOD);
648
    }
649

    
650
    // Store offset of return address for deoptimizer.
651
    if (!is_api_function && !count_constructions) {
652
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
653
    }
654

    
655
    // Restore context from the frame.
656
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
657

    
658
    // If the result is an object (in the ECMA sense), we should get rid
659
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
660
    // on page 74.
661
    Label use_receiver, exit;
662

    
663
    // If the result is a smi, it is *not* an object in the ECMA sense.
664
    // v0: result
665
    // sp[0]: receiver (newly allocated object)
666
    // sp[1]: constructor function
667
    // sp[2]: number of arguments (smi-tagged)
668
    __ JumpIfSmi(v0, &use_receiver);
669

    
670
    // If the type of the result (stored in its map) is less than
671
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
672
    __ GetObjectType(v0, a1, a3);
673
    __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
674

    
675
    // Throw away the result of the constructor invocation and use the
676
    // on-stack receiver as the result.
677
    __ bind(&use_receiver);
678
    __ lw(v0, MemOperand(sp));
679

    
680
    // Remove receiver from the stack, remove caller arguments, and
681
    // return.
682
    __ bind(&exit);
683
    // v0: result
684
    // sp[0]: receiver (newly allocated object)
685
    // sp[1]: constructor function
686
    // sp[2]: number of arguments (smi-tagged)
687
    __ lw(a1, MemOperand(sp, 2 * kPointerSize));
688

    
689
    // Leave construct frame.
690
  }
691

    
692
  __ sll(t0, a1, kPointerSizeLog2 - 1);
693
  __ Addu(sp, sp, t0);
694
  __ Addu(sp, sp, kPointerSize);
695
  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
696
  __ Ret();
697
}
698

    
699

    
700
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
701
  Generate_JSConstructStubHelper(masm, false, true);
702
}
703

    
704

    
705
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
706
  Generate_JSConstructStubHelper(masm, false, false);
707
}
708

    
709

    
710
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
711
  Generate_JSConstructStubHelper(masm, true, false);
712
}
713

    
714

    
715
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
716
                                             bool is_construct) {
717
  // Called from JSEntryStub::GenerateBody
718

    
719
  // ----------- S t a t e -------------
720
  //  -- a0: code entry
721
  //  -- a1: function
722
  //  -- a2: receiver_pointer
723
  //  -- a3: argc
724
  //  -- s0: argv
725
  // -----------------------------------
726
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
727

    
728
  // Clear the context before we push it when entering the JS frame.
729
  __ mov(cp, zero_reg);
730

    
731
  // Enter an internal frame.
732
  {
733
    FrameScope scope(masm, StackFrame::INTERNAL);
734

    
735
    // Set up the context from the function argument.
736
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
737

    
738
    // Push the function and the receiver onto the stack.
739
    __ Push(a1, a2);
740

    
741
    // Copy arguments to the stack in a loop.
742
    // a3: argc
743
    // s0: argv, i.e. points to first arg
744
    Label loop, entry;
745
    __ sll(t0, a3, kPointerSizeLog2);
746
    __ addu(t2, s0, t0);
747
    __ b(&entry);
748
    __ nop();   // Branch delay slot nop.
749
    // t2 points past last arg.
750
    __ bind(&loop);
751
    __ lw(t0, MemOperand(s0));  // Read next parameter.
752
    __ addiu(s0, s0, kPointerSize);
753
    __ lw(t0, MemOperand(t0));  // Dereference handle.
754
    __ push(t0);  // Push parameter.
755
    __ bind(&entry);
756
    __ Branch(&loop, ne, s0, Operand(t2));
757

    
758
    // Initialize all JavaScript callee-saved registers, since they will be seen
759
    // by the garbage collector as part of handlers.
760
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
761
    __ mov(s1, t0);
762
    __ mov(s2, t0);
763
    __ mov(s3, t0);
764
    __ mov(s4, t0);
765
    __ mov(s5, t0);
766
    // s6 holds the root address. Do not clobber.
767
    // s7 is cp. Do not init.
768

    
769
    // Invoke the code and pass argc as a0.
770
    __ mov(a0, a3);
771
    if (is_construct) {
772
      // No type feedback cell is available
773
      Handle<Object> undefined_sentinel(
774
          masm->isolate()->heap()->undefined_value(), masm->isolate());
775
      __ li(a2, Operand(undefined_sentinel));
776
      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
777
      __ CallStub(&stub);
778
    } else {
779
      ParameterCount actual(a0);
780
      __ InvokeFunction(a1, actual, CALL_FUNCTION,
781
                        NullCallWrapper(), CALL_AS_METHOD);
782
    }
783

    
784
    // Leave internal frame.
785
  }
786

    
787
  __ Jump(ra);
788
}
789

    
790

    
791
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
792
  Generate_JSEntryTrampolineHelper(masm, false);
793
}
794

    
795

    
796
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
797
  Generate_JSEntryTrampolineHelper(masm, true);
798
}
799

    
800

    
801
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
802
  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
803
  // Do a tail-call of the compiled function.
804
  __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
805
  __ Jump(t9);
806
}
807

    
808

    
809
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
810
  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
811
  // Do a tail-call of the compiled function.
812
  __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
813
  __ Jump(t9);
814
}
815

    
816

    
817
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
818
  // For now, we are relying on the fact that make_code_young doesn't do any
819
  // garbage collection which allows us to save/restore the registers without
820
  // worrying about which of them contain pointers. We also don't build an
821
  // internal frame to make the code faster, since we shouldn't have to do stack
822
  // crawls in MakeCodeYoung. This seems a bit fragile.
823

    
824
  __ mov(a0, ra);
825
  // Adjust a0 to point to the head of the PlatformCodeAge sequence
826
  __ Subu(a0, a0,
827
      Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
828
  // Restore the original return address of the function
829
  __ mov(ra, at);
830

    
831
  // The following registers must be saved and restored when calling through to
832
  // the runtime:
833
  //   a0 - contains return address (beginning of patch sequence)
834
  //   a1 - isolate
835
  RegList saved_regs =
836
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
837
  FrameScope scope(masm, StackFrame::MANUAL);
838
  __ MultiPush(saved_regs);
839
  __ PrepareCallCFunction(1, 0, a2);
840
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
841
  __ CallCFunction(
842
      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
843
  __ MultiPop(saved_regs);
844
  __ Jump(a0);
845
}
846

    
847
#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
848
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
849
    MacroAssembler* masm) {                                  \
850
  GenerateMakeCodeYoungAgainCommon(masm);                    \
851
}                                                            \
852
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
853
    MacroAssembler* masm) {                                  \
854
  GenerateMakeCodeYoungAgainCommon(masm);                    \
855
}
856
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
857
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
858

    
859

    
860
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
861
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
862
  // that make_code_young doesn't do any garbage collection which allows us to
863
  // save/restore the registers without worrying about which of them contain
864
  // pointers.
865

    
866
  __ mov(a0, ra);
867
  // Adjust a0 to point to the head of the PlatformCodeAge sequence
868
  __ Subu(a0, a0,
869
      Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
870
  // Restore the original return address of the function
871
  __ mov(ra, at);
872

    
873
  // The following registers must be saved and restored when calling through to
874
  // the runtime:
875
  //   a0 - contains return address (beginning of patch sequence)
876
  //   a1 - isolate
877
  RegList saved_regs =
878
      (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
879
  FrameScope scope(masm, StackFrame::MANUAL);
880
  __ MultiPush(saved_regs);
881
  __ PrepareCallCFunction(1, 0, a2);
882
  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
883
  __ CallCFunction(
884
      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
885
      2);
886
  __ MultiPop(saved_regs);
887

    
888
  // Perform prologue operations usually performed by the young code stub.
889
  __ Push(ra, fp, cp, a1);
890
  __ Addu(fp, sp, Operand(2 * kPointerSize));
891

    
892
  // Jump to point after the code-age stub.
893
  __ Addu(a0, a0, Operand((kNoCodeAgeSequenceLength) * Assembler::kInstrSize));
894
  __ Jump(a0);
895
}
896

    
897

    
898
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
899
  GenerateMakeCodeYoungAgainCommon(masm);
900
}
901

    
902

    
903
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
904
  {
905
    FrameScope scope(masm, StackFrame::INTERNAL);
906

    
907
    // Preserve registers across notification, this is important for compiled
908
    // stubs that tail call the runtime on deopts passing their parameters in
909
    // registers.
910
    __ MultiPush(kJSCallerSaved | kCalleeSaved);
911
    // Pass the function and deoptimization type to the runtime system.
912
    __ CallRuntime(Runtime::kNotifyStubFailure, 0);
913
    __ MultiPop(kJSCallerSaved | kCalleeSaved);
914
  }
915

    
916
  __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
917
  __ Jump(ra);  // Jump to miss handler
918
}
919

    
920

    
921
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
922
                                             Deoptimizer::BailoutType type) {
923
  {
924
    FrameScope scope(masm, StackFrame::INTERNAL);
925
    // Pass the function and deoptimization type to the runtime system.
926
    __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
927
    __ push(a0);
928
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
929
  }
930

    
931
  // Get the full codegen state from the stack and untag it -> t2.
932
  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
933
  __ SmiUntag(t2);
934
  // Switch on the state.
935
  Label with_tos_register, unknown_state;
936
  __ Branch(&with_tos_register,
937
            ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
938
  __ Ret(USE_DELAY_SLOT);
939
  // Safe to fill delay slot Addu will emit one instruction.
940
  __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
941

    
942
  __ bind(&with_tos_register);
943
  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
944
  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
945

    
946
  __ Ret(USE_DELAY_SLOT);
947
  // Safe to fill delay slot Addu will emit one instruction.
948
  __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
949

    
950
  __ bind(&unknown_state);
951
  __ stop("no cases left");
952
}
953

    
954

    
955
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
956
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
957
}
958

    
959

    
960
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
961
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
962
}
963

    
964

    
965
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
966
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
967
}
968

    
969

    
970
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
971
  // Lookup the function in the JavaScript frame.
972
  __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
973
  {
974
    FrameScope scope(masm, StackFrame::INTERNAL);
975
    // Lookup and calculate pc offset.
976
    __ lw(a1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
977
    __ lw(a2, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
978
    __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
979
    __ Subu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
980
    __ Subu(a1, a1, a2);
981
    __ SmiTag(a1);
982

    
983
    // Pass both function and pc offset as arguments.
984
    __ push(a0);
985
    __ push(a1);
986
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
987
  }
988

    
989
  // If the code object is null, just return to the unoptimized code.
990
  __ Ret(eq, v0, Operand(Smi::FromInt(0)));
991

    
992
  // Load deoptimization data from the code object.
993
  // <deopt_data> = <code>[#deoptimization_data_offset]
994
  __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
995

    
996
  // Load the OSR entrypoint offset from the deoptimization data.
997
  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
998
  __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
999
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1000
  __ SmiUntag(a1);
1001

    
1002
  // Compute the target address = code_obj + header_size + osr_offset
1003
  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1004
  __ addu(v0, v0, a1);
1005
  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1006

    
1007
  // And "return" to the OSR entry point of the function.
1008
  __ Ret();
1009
}
1010

    
1011

    
1012
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1013
  // We check the stack limit as indicator that recompilation might be done.
1014
  Label ok;
1015
  __ LoadRoot(at, Heap::kStackLimitRootIndex);
1016
  __ Branch(&ok, hs, sp, Operand(at));
1017
  {
1018
    FrameScope scope(masm, StackFrame::INTERNAL);
1019
    __ CallRuntime(Runtime::kStackGuard, 0);
1020
  }
1021
  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1022
          RelocInfo::CODE_TARGET);
1023

    
1024
  __ bind(&ok);
1025
  __ Ret();
1026
}
1027

    
1028

    
1029
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1030
  // 1. Make sure we have at least one argument.
1031
  // a0: actual number of arguments
1032
  { Label done;
1033
    __ Branch(&done, ne, a0, Operand(zero_reg));
1034
    __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1035
    __ push(t2);
1036
    __ Addu(a0, a0, Operand(1));
1037
    __ bind(&done);
1038
  }
1039

    
1040
  // 2. Get the function to call (passed as receiver) from the stack, check
1041
  //    if it is a function.
1042
  // a0: actual number of arguments
1043
  Label slow, non_function;
1044
  __ sll(at, a0, kPointerSizeLog2);
1045
  __ addu(at, sp, at);
1046
  __ lw(a1, MemOperand(at));
1047
  __ JumpIfSmi(a1, &non_function);
1048
  __ GetObjectType(a1, a2, a2);
1049
  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1050

    
1051
  // 3a. Patch the first argument if necessary when calling a function.
1052
  // a0: actual number of arguments
1053
  // a1: function
1054
  Label shift_arguments;
1055
  __ li(t0, Operand(0, RelocInfo::NONE32));  // Indicate regular JS_FUNCTION.
1056
  { Label convert_to_object, use_global_receiver, patch_receiver;
1057
    // Change context eagerly in case we need the global receiver.
1058
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1059

    
1060
    // Do not transform the receiver for strict mode functions.
1061
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1062
    __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1063
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1064
                                 kSmiTagSize)));
1065
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1066

    
1067
    // Do not transform the receiver for native (Compilerhints already in a3).
1068
    __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1069
    __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1070

    
1071
    // Compute the receiver in non-strict mode.
1072
    // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1073
    __ sll(at, a0, kPointerSizeLog2);
1074
    __ addu(a2, sp, at);
1075
    __ lw(a2, MemOperand(a2, -kPointerSize));
1076
    // a0: actual number of arguments
1077
    // a1: function
1078
    // a2: first argument
1079
    __ JumpIfSmi(a2, &convert_to_object, t2);
1080

    
1081
    __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1082
    __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1083
    __ LoadRoot(a3, Heap::kNullValueRootIndex);
1084
    __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1085

    
1086
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1087
    __ GetObjectType(a2, a3, a3);
1088
    __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1089

    
1090
    __ bind(&convert_to_object);
1091
    // Enter an internal frame in order to preserve argument count.
1092
    {
1093
      FrameScope scope(masm, StackFrame::INTERNAL);
1094
      __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
1095
      __ push(a0);
1096

    
1097
      __ push(a2);
1098
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1099
      __ mov(a2, v0);
1100

    
1101
      __ pop(a0);
1102
      __ sra(a0, a0, kSmiTagSize);  // Un-tag.
1103
      // Leave internal frame.
1104
    }
1105
    // Restore the function to a1, and the flag to t0.
1106
    __ sll(at, a0, kPointerSizeLog2);
1107
    __ addu(at, sp, at);
1108
    __ lw(a1, MemOperand(at));
1109
    __ li(t0, Operand(0, RelocInfo::NONE32));
1110
    __ Branch(&patch_receiver);
1111

    
1112
    // Use the global receiver object from the called function as the
1113
    // receiver.
1114
    __ bind(&use_global_receiver);
1115
    const int kGlobalIndex =
1116
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1117
    __ lw(a2, FieldMemOperand(cp, kGlobalIndex));
1118
    __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
1119
    __ lw(a2, FieldMemOperand(a2, kGlobalIndex));
1120
    __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
1121

    
1122
    __ bind(&patch_receiver);
1123
    __ sll(at, a0, kPointerSizeLog2);
1124
    __ addu(a3, sp, at);
1125
    __ sw(a2, MemOperand(a3, -kPointerSize));
1126

    
1127
    __ Branch(&shift_arguments);
1128
  }
1129

    
1130
  // 3b. Check for function proxy.
1131
  __ bind(&slow);
1132
  __ li(t0, Operand(1, RelocInfo::NONE32));  // Indicate function proxy.
1133
  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1134

    
1135
  __ bind(&non_function);
1136
  __ li(t0, Operand(2, RelocInfo::NONE32));  // Indicate non-function.
1137

    
1138
  // 3c. Patch the first argument when calling a non-function.  The
1139
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
1140
  //     receiver, so overwrite the first argument which will ultimately
1141
  //     become the receiver.
1142
  // a0: actual number of arguments
1143
  // a1: function
1144
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1145
  __ sll(at, a0, kPointerSizeLog2);
1146
  __ addu(a2, sp, at);
1147
  __ sw(a1, MemOperand(a2, -kPointerSize));
1148

    
1149
  // 4. Shift arguments and return address one slot down on the stack
1150
  //    (overwriting the original receiver).  Adjust argument count to make
1151
  //    the original first argument the new receiver.
1152
  // a0: actual number of arguments
1153
  // a1: function
1154
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1155
  __ bind(&shift_arguments);
1156
  { Label loop;
1157
    // Calculate the copy start address (destination). Copy end address is sp.
1158
    __ sll(at, a0, kPointerSizeLog2);
1159
    __ addu(a2, sp, at);
1160

    
1161
    __ bind(&loop);
1162
    __ lw(at, MemOperand(a2, -kPointerSize));
1163
    __ sw(at, MemOperand(a2));
1164
    __ Subu(a2, a2, Operand(kPointerSize));
1165
    __ Branch(&loop, ne, a2, Operand(sp));
1166
    // Adjust the actual number of arguments and remove the top element
1167
    // (which is a copy of the last argument).
1168
    __ Subu(a0, a0, Operand(1));
1169
    __ Pop();
1170
  }
1171

    
1172
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1173
  //     or a function proxy via CALL_FUNCTION_PROXY.
1174
  // a0: actual number of arguments
1175
  // a1: function
1176
  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1177
  { Label function, non_proxy;
1178
    __ Branch(&function, eq, t0, Operand(zero_reg));
1179
    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1180
    __ mov(a2, zero_reg);
1181
    __ SetCallKind(t1, CALL_AS_METHOD);
1182
    __ Branch(&non_proxy, ne, t0, Operand(1));
1183

    
1184
    __ push(a1);  // Re-add proxy object as additional argument.
1185
    __ Addu(a0, a0, Operand(1));
1186
    __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1187
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1188
            RelocInfo::CODE_TARGET);
1189

    
1190
    __ bind(&non_proxy);
1191
    __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
1192
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1193
            RelocInfo::CODE_TARGET);
1194
    __ bind(&function);
1195
  }
1196

    
1197
  // 5b. Get the code to call from the function and check that the number of
1198
  //     expected arguments matches what we're providing.  If so, jump
1199
  //     (tail-call) to the code in register edx without checking arguments.
1200
  // a0: actual number of arguments
1201
  // a1: function
1202
  __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1203
  __ lw(a2,
1204
         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1205
  __ sra(a2, a2, kSmiTagSize);
1206
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1207
  __ SetCallKind(t1, CALL_AS_METHOD);
1208
  // Check formal and actual parameter counts.
1209
  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1210
          RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1211

    
1212
  ParameterCount expected(0);
1213
  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION,
1214
                NullCallWrapper(), CALL_AS_METHOD);
1215
}
1216

    
1217

    
1218
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1219
  const int kIndexOffset    = -5 * kPointerSize;
1220
  const int kLimitOffset    = -4 * kPointerSize;
1221
  const int kArgsOffset     =  2 * kPointerSize;
1222
  const int kRecvOffset     =  3 * kPointerSize;
1223
  const int kFunctionOffset =  4 * kPointerSize;
1224

    
1225
  {
1226
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1227
    __ lw(a0, MemOperand(fp, kFunctionOffset));  // Get the function.
1228
    __ push(a0);
1229
    __ lw(a0, MemOperand(fp, kArgsOffset));  // Get the args array.
1230
    __ push(a0);
1231
    // Returns (in v0) number of arguments to copy to stack as Smi.
1232
    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1233

    
1234
    // Check the stack for overflow. We are not trying to catch
1235
    // interruptions (e.g. debug break and preemption) here, so the "real stack
1236
    // limit" is checked.
1237
    Label okay;
1238
    __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1239
    // Make a2 the space we have left. The stack might already be overflowed
1240
    // here which will cause a2 to become negative.
1241
    __ subu(a2, sp, a2);
1242
    // Check if the arguments will overflow the stack.
1243
    __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1244
    __ Branch(&okay, gt, a2, Operand(t3));  // Signed comparison.
1245

    
1246
    // Out of stack space.
1247
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1248
    __ push(a1);
1249
    __ push(v0);
1250
    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1251
    // End of stack check.
1252

    
1253
    // Push current limit and index.
1254
    __ bind(&okay);
1255
    __ push(v0);  // Limit.
1256
    __ mov(a1, zero_reg);  // Initial index.
1257
    __ push(a1);
1258

    
1259
    // Get the receiver.
1260
    __ lw(a0, MemOperand(fp, kRecvOffset));
1261

    
1262
    // Check that the function is a JS function (otherwise it must be a proxy).
1263
    Label push_receiver;
1264
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1265
    __ GetObjectType(a1, a2, a2);
1266
    __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1267

    
1268
    // Change context eagerly to get the right global object if necessary.
1269
    __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1270
    // Load the shared function info while the function is still in a1.
1271
    __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1272

    
1273
    // Compute the receiver.
1274
    // Do not transform the receiver for strict mode functions.
1275
    Label call_to_object, use_global_receiver;
1276
    __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1277
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1278
                                 kSmiTagSize)));
1279
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1280

    
1281
    // Do not transform the receiver for native (Compilerhints already in a2).
1282
    __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1283
    __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1284

    
1285
    // Compute the receiver in non-strict mode.
1286
    __ JumpIfSmi(a0, &call_to_object);
1287
    __ LoadRoot(a1, Heap::kNullValueRootIndex);
1288
    __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1289
    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1290
    __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1291

    
1292
    // Check if the receiver is already a JavaScript object.
1293
    // a0: receiver
1294
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1295
    __ GetObjectType(a0, a1, a1);
1296
    __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1297

    
1298
    // Convert the receiver to a regular object.
1299
    // a0: receiver
1300
    __ bind(&call_to_object);
1301
    __ push(a0);
1302
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1303
    __ mov(a0, v0);  // Put object in a0 to match other paths to push_receiver.
1304
    __ Branch(&push_receiver);
1305

    
1306
    // Use the current global receiver object as the receiver.
1307
    __ bind(&use_global_receiver);
1308
    const int kGlobalOffset =
1309
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1310
    __ lw(a0, FieldMemOperand(cp, kGlobalOffset));
1311
    __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset));
1312
    __ lw(a0, FieldMemOperand(a0, kGlobalOffset));
1313
    __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
1314

    
1315
    // Push the receiver.
1316
    // a0: receiver
1317
    __ bind(&push_receiver);
1318
    __ push(a0);
1319

    
1320
    // Copy all arguments from the array to the stack.
1321
    Label entry, loop;
1322
    __ lw(a0, MemOperand(fp, kIndexOffset));
1323
    __ Branch(&entry);
1324

    
1325
    // Load the current argument from the arguments array and push it to the
1326
    // stack.
1327
    // a0: current argument index
1328
    __ bind(&loop);
1329
    __ lw(a1, MemOperand(fp, kArgsOffset));
1330
    __ push(a1);
1331
    __ push(a0);
1332

    
1333
    // Call the runtime to access the property in the arguments array.
1334
    __ CallRuntime(Runtime::kGetProperty, 2);
1335
    __ push(v0);
1336

    
1337
    // Use inline caching to access the arguments.
1338
    __ lw(a0, MemOperand(fp, kIndexOffset));
1339
    __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1340
    __ sw(a0, MemOperand(fp, kIndexOffset));
1341

    
1342
    // Test if the copy loop has finished copying all the elements from the
1343
    // arguments object.
1344
    __ bind(&entry);
1345
    __ lw(a1, MemOperand(fp, kLimitOffset));
1346
    __ Branch(&loop, ne, a0, Operand(a1));
1347

    
1348
    // Invoke the function.
1349
    Label call_proxy;
1350
    ParameterCount actual(a0);
1351
    __ sra(a0, a0, kSmiTagSize);
1352
    __ lw(a1, MemOperand(fp, kFunctionOffset));
1353
    __ GetObjectType(a1, a2, a2);
1354
    __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1355

    
1356
    __ InvokeFunction(a1, actual, CALL_FUNCTION,
1357
                      NullCallWrapper(), CALL_AS_METHOD);
1358

    
1359
    frame_scope.GenerateLeaveFrame();
1360
    __ Ret(USE_DELAY_SLOT);
1361
    __ Addu(sp, sp, Operand(3 * kPointerSize));  // In delay slot.
1362

    
1363
    // Invoke the function proxy.
1364
    __ bind(&call_proxy);
1365
    __ push(a1);  // Add function proxy as last argument.
1366
    __ Addu(a0, a0, Operand(1));
1367
    __ li(a2, Operand(0, RelocInfo::NONE32));
1368
    __ SetCallKind(t1, CALL_AS_METHOD);
1369
    __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1370
    __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1371
            RelocInfo::CODE_TARGET);
1372
    // Tear down the internal frame and remove function, receiver and args.
1373
  }
1374

    
1375
  __ Ret(USE_DELAY_SLOT);
1376
  __ Addu(sp, sp, Operand(3 * kPointerSize));  // In delay slot.
1377
}
1378

    
1379

    
1380
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1381
  __ sll(a0, a0, kSmiTagSize);
1382
  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1383
  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1384
  __ Addu(fp, sp, Operand(3 * kPointerSize));
1385
}
1386

    
1387

    
1388
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1389
  // ----------- S t a t e -------------
1390
  //  -- v0 : result being passed through
1391
  // -----------------------------------
1392
  // Get the number of arguments passed (as a smi), tear down the frame and
1393
  // then tear down the parameters.
1394
  __ lw(a1, MemOperand(fp, -3 * kPointerSize));
1395
  __ mov(sp, fp);
1396
  __ MultiPop(fp.bit() | ra.bit());
1397
  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1398
  __ Addu(sp, sp, t0);
1399
  // Adjust for the receiver.
1400
  __ Addu(sp, sp, Operand(kPointerSize));
1401
}
1402

    
1403

    
1404
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1405
  // State setup as expected by MacroAssembler::InvokePrologue.
1406
  // ----------- S t a t e -------------
1407
  //  -- a0: actual arguments count
1408
  //  -- a1: function (passed through to callee)
1409
  //  -- a2: expected arguments count
1410
  //  -- a3: callee code entry
1411
  //  -- t1: call kind information
1412
  // -----------------------------------
1413

    
1414
  Label invoke, dont_adapt_arguments;
1415

    
1416
  Label enough, too_few;
1417
  __ Branch(&dont_adapt_arguments, eq,
1418
      a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1419
  // We use Uless as the number of argument should always be greater than 0.
1420
  __ Branch(&too_few, Uless, a0, Operand(a2));
1421

    
1422
  {  // Enough parameters: actual >= expected.
1423
    // a0: actual number of arguments as a smi
1424
    // a1: function
1425
    // a2: expected number of arguments
1426
    // a3: code entry to call
1427
    __ bind(&enough);
1428
    EnterArgumentsAdaptorFrame(masm);
1429

    
1430
    // Calculate copy start address into a0 and copy end address into a2.
1431
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1432
    __ Addu(a0, fp, a0);
1433
    // Adjust for return address and receiver.
1434
    __ Addu(a0, a0, Operand(2 * kPointerSize));
1435
    // Compute copy end address.
1436
    __ sll(a2, a2, kPointerSizeLog2);
1437
    __ subu(a2, a0, a2);
1438

    
1439
    // Copy the arguments (including the receiver) to the new stack frame.
1440
    // a0: copy start address
1441
    // a1: function
1442
    // a2: copy end address
1443
    // a3: code entry to call
1444

    
1445
    Label copy;
1446
    __ bind(&copy);
1447
    __ lw(t0, MemOperand(a0));
1448
    __ push(t0);
1449
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1450
    __ addiu(a0, a0, -kPointerSize);  // In delay slot.
1451

    
1452
    __ jmp(&invoke);
1453
  }
1454

    
1455
  {  // Too few parameters: Actual < expected.
1456
    __ bind(&too_few);
1457
    EnterArgumentsAdaptorFrame(masm);
1458

    
1459
    // Calculate copy start address into a0 and copy end address is fp.
1460
    // a0: actual number of arguments as a smi
1461
    // a1: function
1462
    // a2: expected number of arguments
1463
    // a3: code entry to call
1464
    __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1465
    __ Addu(a0, fp, a0);
1466
    // Adjust for return address and receiver.
1467
    __ Addu(a0, a0, Operand(2 * kPointerSize));
1468
    // Compute copy end address. Also adjust for return address.
1469
    __ Addu(t3, fp, kPointerSize);
1470

    
1471
    // Copy the arguments (including the receiver) to the new stack frame.
1472
    // a0: copy start address
1473
    // a1: function
1474
    // a2: expected number of arguments
1475
    // a3: code entry to call
1476
    // t3: copy end address
1477
    Label copy;
1478
    __ bind(&copy);
1479
    __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
1480
    __ Subu(sp, sp, kPointerSize);
1481
    __ Subu(a0, a0, kPointerSize);
1482
    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1483
    __ sw(t0, MemOperand(sp));  // In the delay slot.
1484

    
1485
    // Fill the remaining expected arguments with undefined.
1486
    // a1: function
1487
    // a2: expected number of arguments
1488
    // a3: code entry to call
1489
    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1490
    __ sll(t2, a2, kPointerSizeLog2);
1491
    __ Subu(a2, fp, Operand(t2));
1492
    __ Addu(a2, a2, Operand(-4 * kPointerSize));  // Adjust for frame.
1493

    
1494
    Label fill;
1495
    __ bind(&fill);
1496
    __ Subu(sp, sp, kPointerSize);
1497
    __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1498
    __ sw(t0, MemOperand(sp));
1499
  }
1500

    
1501
  // Call the entry point.
1502
  __ bind(&invoke);
1503

    
1504
  __ Call(a3);
1505

    
1506
  // Store offset of return address for deoptimizer.
1507
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1508

    
1509
  // Exit frame and return.
1510
  LeaveArgumentsAdaptorFrame(masm);
1511
  __ Ret();
1512

    
1513

    
1514
  // -------------------------------------------
1515
  // Don't adapt arguments.
1516
  // -------------------------------------------
1517
  __ bind(&dont_adapt_arguments);
1518
  __ Jump(a3);
1519
}
1520

    
1521

    
1522
#undef __
1523

    
1524
} }  // namespace v8::internal
1525

    
1526
#endif  // V8_TARGET_ARCH_MIPS