The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / x64 / builtins-x64.cc @ f230a1cf

History | View | Annotate | Download (50 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_X64
31

    
32
#include "codegen.h"
33
#include "deoptimizer.h"
34
#include "full-codegen.h"
35

    
36
namespace v8 {
37
namespace internal {
38

    
39

    
40
#define __ ACCESS_MASM(masm)
41

    
42

    
43
void Builtins::Generate_Adaptor(MacroAssembler* masm,
44
                                CFunctionId id,
45
                                BuiltinExtraArguments extra_args) {
46
  // ----------- S t a t e -------------
47
  //  -- rax                 : number of arguments excluding receiver
48
  //  -- rdi                 : called function (only guaranteed when
49
  //                           extra_args requires it)
50
  //  -- rsi                 : context
51
  //  -- rsp[0]              : return address
52
  //  -- rsp[8]              : last argument
53
  //  -- ...
54
  //  -- rsp[8 * argc]       : first argument (argc == rax)
55
  //  -- rsp[8 * (argc + 1)] : receiver
56
  // -----------------------------------
57

    
58
  // Insert extra arguments.
59
  int num_extra_args = 0;
60
  if (extra_args == NEEDS_CALLED_FUNCTION) {
61
    num_extra_args = 1;
62
    __ PopReturnAddressTo(kScratchRegister);
63
    __ push(rdi);
64
    __ PushReturnAddressFrom(kScratchRegister);
65
  } else {
66
    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67
  }
68

    
69
  // JumpToExternalReference expects rax to contain the number of arguments
70
  // including the receiver and the extra arguments.
71
  __ addq(rax, Immediate(num_extra_args + 1));
72
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73
}
74

    
75

    
76
static void CallRuntimePassFunction(MacroAssembler* masm,
77
                                    Runtime::FunctionId function_id) {
78
  FrameScope scope(masm, StackFrame::INTERNAL);
79
  // Push a copy of the function onto the stack.
80
  __ push(rdi);
81
  // Push call kind information.
82
  __ push(rcx);
83
  // Function is also the parameter to the runtime call.
84
  __ push(rdi);
85

    
86
  __ CallRuntime(function_id, 1);
87
  // Restore call kind information.
88
  __ pop(rcx);
89
  // Restore receiver.
90
  __ pop(rdi);
91
}
92

    
93

    
94
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
95
  __ movq(kScratchRegister,
96
          FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
97
  __ movq(kScratchRegister,
98
          FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
99
  __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
100
  __ jmp(kScratchRegister);
101
}
102

    
103

    
104
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
105
  // Checking whether the queued function is ready for install is optional,
106
  // since we come across interrupts and stack checks elsewhere.  However,
107
  // not checking may delay installing ready functions, and always checking
108
  // would be quite expensive.  A good compromise is to first check against
109
  // stack limit as a cue for an interrupt signal.
110
  Label ok;
111
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
112
  __ j(above_equal, &ok);
113

    
114
  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
115
  // Tail call to returned code.
116
  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
117
  __ jmp(rax);
118

    
119
  __ bind(&ok);
120
  GenerateTailCallToSharedCode(masm);
121
}
122

    
123

    
124
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
125
  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
126
  GenerateTailCallToSharedCode(masm);
127
}
128

    
129

    
130
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
131
                                           bool is_api_function,
132
                                           bool count_constructions) {
133
  // ----------- S t a t e -------------
134
  //  -- rax: number of arguments
135
  //  -- rdi: constructor function
136
  // -----------------------------------
137

    
138
  // Should never count constructions for api objects.
139
  ASSERT(!is_api_function || !count_constructions);
140

    
141
  // Enter a construct frame.
142
  {
143
    FrameScope scope(masm, StackFrame::CONSTRUCT);
144

    
145
    // Store a smi-tagged arguments count on the stack.
146
    __ Integer32ToSmi(rax, rax);
147
    __ push(rax);
148

    
149
    // Push the function to invoke on the stack.
150
    __ push(rdi);
151

    
152
    // Try to allocate the object without transitioning into C code. If any of
153
    // the preconditions is not met, the code bails out to the runtime call.
154
    Label rt_call, allocated;
155
    if (FLAG_inline_new) {
156
      Label undo_allocation;
157

    
158
#ifdef ENABLE_DEBUGGER_SUPPORT
159
      ExternalReference debug_step_in_fp =
160
          ExternalReference::debug_step_in_fp_address(masm->isolate());
161
      __ movq(kScratchRegister, debug_step_in_fp);
162
      __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
163
      __ j(not_equal, &rt_call);
164
#endif
165

    
166
      // Verified that the constructor is a JSFunction.
167
      // Load the initial map and verify that it is in fact a map.
168
      // rdi: constructor
169
      __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
170
      // Will both indicate a NULL and a Smi
171
      ASSERT(kSmiTag == 0);
172
      __ JumpIfSmi(rax, &rt_call);
173
      // rdi: constructor
174
      // rax: initial map (if proven valid below)
175
      __ CmpObjectType(rax, MAP_TYPE, rbx);
176
      __ j(not_equal, &rt_call);
177

    
178
      // Check that the constructor is not constructing a JSFunction (see
179
      // comments in Runtime_NewObject in runtime.cc). In which case the
180
      // initial map's instance type would be JS_FUNCTION_TYPE.
181
      // rdi: constructor
182
      // rax: initial map
183
      __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
184
      __ j(equal, &rt_call);
185

    
186
      if (count_constructions) {
187
        Label allocate;
188
        // Decrease generous allocation count.
189
        __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
190
        __ decb(FieldOperand(rcx,
191
                             SharedFunctionInfo::kConstructionCountOffset));
192
        __ j(not_zero, &allocate);
193

    
194
        __ push(rax);
195
        __ push(rdi);
196

    
197
        __ push(rdi);  // constructor
198
        // The call will replace the stub, so the countdown is only done once.
199
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
200

    
201
        __ pop(rdi);
202
        __ pop(rax);
203

    
204
        __ bind(&allocate);
205
      }
206

    
207
      // Now allocate the JSObject on the heap.
208
      __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
209
      __ shl(rdi, Immediate(kPointerSizeLog2));
210
      // rdi: size of new object
211
      __ Allocate(rdi,
212
                  rbx,
213
                  rdi,
214
                  no_reg,
215
                  &rt_call,
216
                  NO_ALLOCATION_FLAGS);
217
      // Allocated the JSObject, now initialize the fields.
218
      // rax: initial map
219
      // rbx: JSObject (not HeapObject tagged - the actual address).
220
      // rdi: start of next object
221
      __ movq(Operand(rbx, JSObject::kMapOffset), rax);
222
      __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
223
      __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
224
      __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
225
      // Set extra fields in the newly allocated object.
226
      // rax: initial map
227
      // rbx: JSObject
228
      // rdi: start of next object
229
      __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
230
      __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
231
      if (count_constructions) {
232
        __ movzxbq(rsi,
233
                   FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
234
        __ lea(rsi,
235
               Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
236
        // rsi: offset of first field after pre-allocated fields
237
        if (FLAG_debug_code) {
238
          __ cmpq(rsi, rdi);
239
          __ Assert(less_equal,
240
                    kUnexpectedNumberOfPreAllocatedPropertyFields);
241
        }
242
        __ InitializeFieldsWithFiller(rcx, rsi, rdx);
243
        __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
244
      }
245
      __ InitializeFieldsWithFiller(rcx, rdi, rdx);
246

    
247
      // Add the object tag to make the JSObject real, so that we can continue
248
      // and jump into the continuation code at any time from now on. Any
249
      // failures need to undo the allocation, so that the heap is in a
250
      // consistent state and verifiable.
251
      // rax: initial map
252
      // rbx: JSObject
253
      // rdi: start of next object
254
      __ or_(rbx, Immediate(kHeapObjectTag));
255

    
256
      // Check if a non-empty properties array is needed.
257
      // Allocate and initialize a FixedArray if it is.
258
      // rax: initial map
259
      // rbx: JSObject
260
      // rdi: start of next object
261
      // Calculate total properties described map.
262
      __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
263
      __ movzxbq(rcx,
264
                 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
265
      __ addq(rdx, rcx);
266
      // Calculate unused properties past the end of the in-object properties.
267
      __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
268
      __ subq(rdx, rcx);
269
      // Done if no extra properties are to be allocated.
270
      __ j(zero, &allocated);
271
      __ Assert(positive, kPropertyAllocationCountFailed);
272

    
273
      // Scale the number of elements by pointer size and add the header for
274
      // FixedArrays to the start of the next object calculation from above.
275
      // rbx: JSObject
276
      // rdi: start of next object (will be start of FixedArray)
277
      // rdx: number of elements in properties array
278
      __ Allocate(FixedArray::kHeaderSize,
279
                  times_pointer_size,
280
                  rdx,
281
                  rdi,
282
                  rax,
283
                  no_reg,
284
                  &undo_allocation,
285
                  RESULT_CONTAINS_TOP);
286

    
287
      // Initialize the FixedArray.
288
      // rbx: JSObject
289
      // rdi: FixedArray
290
      // rdx: number of elements
291
      // rax: start of next object
292
      __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
293
      __ movq(Operand(rdi, HeapObject::kMapOffset), rcx);  // setup the map
294
      __ Integer32ToSmi(rdx, rdx);
295
      __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx);  // and length
296

    
297
      // Initialize the fields to undefined.
298
      // rbx: JSObject
299
      // rdi: FixedArray
300
      // rax: start of next object
301
      // rdx: number of elements
302
      { Label loop, entry;
303
        __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
304
        __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
305
        __ jmp(&entry);
306
        __ bind(&loop);
307
        __ movq(Operand(rcx, 0), rdx);
308
        __ addq(rcx, Immediate(kPointerSize));
309
        __ bind(&entry);
310
        __ cmpq(rcx, rax);
311
        __ j(below, &loop);
312
      }
313

    
314
      // Store the initialized FixedArray into the properties field of
315
      // the JSObject
316
      // rbx: JSObject
317
      // rdi: FixedArray
318
      __ or_(rdi, Immediate(kHeapObjectTag));  // add the heap tag
319
      __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
320

    
321

    
322
      // Continue with JSObject being successfully allocated
323
      // rbx: JSObject
324
      __ jmp(&allocated);
325

    
326
      // Undo the setting of the new top so that the heap is verifiable. For
327
      // example, the map's unused properties potentially do not match the
328
      // allocated objects unused properties.
329
      // rbx: JSObject (previous new top)
330
      __ bind(&undo_allocation);
331
      __ UndoAllocationInNewSpace(rbx);
332
    }
333

    
334
    // Allocate the new receiver object using the runtime call.
335
    // rdi: function (constructor)
336
    __ bind(&rt_call);
337
    // Must restore rdi (constructor) before calling runtime.
338
    __ movq(rdi, Operand(rsp, 0));
339
    __ push(rdi);
340
    __ CallRuntime(Runtime::kNewObject, 1);
341
    __ movq(rbx, rax);  // store result in rbx
342

    
343
    // New object allocated.
344
    // rbx: newly allocated object
345
    __ bind(&allocated);
346
    // Retrieve the function from the stack.
347
    __ pop(rdi);
348

    
349
    // Retrieve smi-tagged arguments count from the stack.
350
    __ movq(rax, Operand(rsp, 0));
351
    __ SmiToInteger32(rax, rax);
352

    
353
    // Push the allocated receiver to the stack. We need two copies
354
    // because we may have to return the original one and the calling
355
    // conventions dictate that the called function pops the receiver.
356
    __ push(rbx);
357
    __ push(rbx);
358

    
359
    // Set up pointer to last argument.
360
    __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
361

    
362
    // Copy arguments and receiver to the expression stack.
363
    Label loop, entry;
364
    __ movq(rcx, rax);
365
    __ jmp(&entry);
366
    __ bind(&loop);
367
    __ push(Operand(rbx, rcx, times_pointer_size, 0));
368
    __ bind(&entry);
369
    __ decq(rcx);
370
    __ j(greater_equal, &loop);
371

    
372
    // Call the function.
373
    if (is_api_function) {
374
      __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
375
      Handle<Code> code =
376
          masm->isolate()->builtins()->HandleApiCallConstruct();
377
      ParameterCount expected(0);
378
      __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
379
                    CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
380
    } else {
381
      ParameterCount actual(rax);
382
      __ InvokeFunction(rdi, actual, CALL_FUNCTION,
383
                        NullCallWrapper(), CALL_AS_METHOD);
384
    }
385

    
386
    // Store offset of return address for deoptimizer.
387
    if (!is_api_function && !count_constructions) {
388
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
389
    }
390

    
391
    // Restore context from the frame.
392
    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
393

    
394
    // If the result is an object (in the ECMA sense), we should get rid
395
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
396
    // on page 74.
397
    Label use_receiver, exit;
398
    // If the result is a smi, it is *not* an object in the ECMA sense.
399
    __ JumpIfSmi(rax, &use_receiver);
400

    
401
    // If the type of the result (stored in its map) is less than
402
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
403
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
404
    __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
405
    __ j(above_equal, &exit);
406

    
407
    // Throw away the result of the constructor invocation and use the
408
    // on-stack receiver as the result.
409
    __ bind(&use_receiver);
410
    __ movq(rax, Operand(rsp, 0));
411

    
412
    // Restore the arguments count and leave the construct frame.
413
    __ bind(&exit);
414
    __ movq(rbx, Operand(rsp, kPointerSize));  // Get arguments count.
415

    
416
    // Leave construct frame.
417
  }
418

    
419
  // Remove caller arguments from the stack and return.
420
  __ PopReturnAddressTo(rcx);
421
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
422
  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
423
  __ PushReturnAddressFrom(rcx);
424
  Counters* counters = masm->isolate()->counters();
425
  __ IncrementCounter(counters->constructed_objects(), 1);
426
  __ ret(0);
427
}
428

    
429

    
430
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
431
  Generate_JSConstructStubHelper(masm, false, true);
432
}
433

    
434

    
435
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
436
  Generate_JSConstructStubHelper(masm, false, false);
437
}
438

    
439

    
440
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
441
  Generate_JSConstructStubHelper(masm, true, false);
442
}
443

    
444

    
445
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
446
                                             bool is_construct) {
447
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
448

    
449
  // Expects five C++ function parameters.
450
  // - Address entry (ignored)
451
  // - JSFunction* function (
452
  // - Object* receiver
453
  // - int argc
454
  // - Object*** argv
455
  // (see Handle::Invoke in execution.cc).
456

    
457
  // Open a C++ scope for the FrameScope.
458
  {
459
    // Platform specific argument handling. After this, the stack contains
460
    // an internal frame and the pushed function and receiver, and
461
    // register rax and rbx holds the argument count and argument array,
462
    // while rdi holds the function pointer and rsi the context.
463

    
464
#ifdef _WIN64
465
    // MSVC parameters in:
466
    // rcx        : entry (ignored)
467
    // rdx        : function
468
    // r8         : receiver
469
    // r9         : argc
470
    // [rsp+0x20] : argv
471

    
472
    // Clear the context before we push it when entering the internal frame.
473
    __ Set(rsi, 0);
474
    // Enter an internal frame.
475
    FrameScope scope(masm, StackFrame::INTERNAL);
476

    
477
    // Load the function context into rsi.
478
    __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
479

    
480
    // Push the function and the receiver onto the stack.
481
    __ push(rdx);
482
    __ push(r8);
483

    
484
    // Load the number of arguments and setup pointer to the arguments.
485
    __ movq(rax, r9);
486
    // Load the previous frame pointer to access C argument on stack
487
    __ movq(kScratchRegister, Operand(rbp, 0));
488
    __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
489
    // Load the function pointer into rdi.
490
    __ movq(rdi, rdx);
491
#else  // _WIN64
492
    // GCC parameters in:
493
    // rdi : entry (ignored)
494
    // rsi : function
495
    // rdx : receiver
496
    // rcx : argc
497
    // r8  : argv
498

    
499
    __ movq(rdi, rsi);
500
    // rdi : function
501

    
502
    // Clear the context before we push it when entering the internal frame.
503
    __ Set(rsi, 0);
504
    // Enter an internal frame.
505
    FrameScope scope(masm, StackFrame::INTERNAL);
506

    
507
    // Push the function and receiver and setup the context.
508
    __ push(rdi);
509
    __ push(rdx);
510
    __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
511

    
512
    // Load the number of arguments and setup pointer to the arguments.
513
    __ movq(rax, rcx);
514
    __ movq(rbx, r8);
515
#endif  // _WIN64
516

    
517
    // Current stack contents:
518
    // [rsp + 2 * kPointerSize ... ] : Internal frame
519
    // [rsp + kPointerSize]          : function
520
    // [rsp]                         : receiver
521
    // Current register contents:
522
    // rax : argc
523
    // rbx : argv
524
    // rsi : context
525
    // rdi : function
526

    
527
    // Copy arguments to the stack in a loop.
528
    // Register rbx points to array of pointers to handle locations.
529
    // Push the values of these handles.
530
    Label loop, entry;
531
    __ Set(rcx, 0);  // Set loop variable to 0.
532
    __ jmp(&entry);
533
    __ bind(&loop);
534
    __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
535
    __ push(Operand(kScratchRegister, 0));  // dereference handle
536
    __ addq(rcx, Immediate(1));
537
    __ bind(&entry);
538
    __ cmpq(rcx, rax);
539
    __ j(not_equal, &loop);
540

    
541
    // Invoke the code.
542
    if (is_construct) {
543
      // No type feedback cell is available
544
      Handle<Object> undefined_sentinel(
545
          masm->isolate()->factory()->undefined_value());
546
      __ Move(rbx, undefined_sentinel);
547
      // Expects rdi to hold function pointer.
548
      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
549
      __ CallStub(&stub);
550
    } else {
551
      ParameterCount actual(rax);
552
      // Function must be in rdi.
553
      __ InvokeFunction(rdi, actual, CALL_FUNCTION,
554
                        NullCallWrapper(), CALL_AS_METHOD);
555
    }
556
    // Exit the internal frame. Notice that this also removes the empty
557
    // context and the function left on the stack by the code
558
    // invocation.
559
  }
560

    
561
  // TODO(X64): Is argument correct? Is there a receiver to remove?
562
  __ ret(1 * kPointerSize);  // Remove receiver.
563
}
564

    
565

    
566
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
567
  Generate_JSEntryTrampolineHelper(masm, false);
568
}
569

    
570

    
571
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
572
  Generate_JSEntryTrampolineHelper(masm, true);
573
}
574

    
575

    
576
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
577
  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
578
  // Do a tail-call of the compiled function.
579
  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
580
  __ jmp(rax);
581
}
582

    
583

    
584
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
585
  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
586
  // Do a tail-call of the compiled function.
587
  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
588
  __ jmp(rax);
589
}
590

    
591

    
592
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
593
  // For now, we are relying on the fact that make_code_young doesn't do any
594
  // garbage collection which allows us to save/restore the registers without
595
  // worrying about which of them contain pointers. We also don't build an
596
  // internal frame to make the code faster, since we shouldn't have to do stack
597
  // crawls in MakeCodeYoung. This seems a bit fragile.
598

    
599
  // Re-execute the code that was patched back to the young age when
600
  // the stub returns.
601
  __ subq(Operand(rsp, 0), Immediate(5));
602
  __ Pushad();
603
  __ movq(arg_reg_2,
604
          ExternalReference::isolate_address(masm->isolate()));
605
  __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
606
  {  // NOLINT
607
    FrameScope scope(masm, StackFrame::MANUAL);
608
    __ PrepareCallCFunction(1);
609
    __ CallCFunction(
610
        ExternalReference::get_make_code_young_function(masm->isolate()), 1);
611
  }
612
  __ Popad();
613
  __ ret(0);
614
}
615

    
616

    
617
#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
618
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
619
    MacroAssembler* masm) {                                  \
620
  GenerateMakeCodeYoungAgainCommon(masm);                    \
621
}                                                            \
622
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
623
    MacroAssembler* masm) {                                  \
624
  GenerateMakeCodeYoungAgainCommon(masm);                    \
625
}
626
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
627
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
628

    
629

    
630
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
631
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
632
  // that make_code_young doesn't do any garbage collection which allows us to
633
  // save/restore the registers without worrying about which of them contain
634
  // pointers.
635
  __ Pushad();
636
  __ movq(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
637
  __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
638
  __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
639
  {  // NOLINT
640
    FrameScope scope(masm, StackFrame::MANUAL);
641
    __ PrepareCallCFunction(1);
642
    __ CallCFunction(
643
        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
644
        1);
645
  }
646
  __ Popad();
647

    
648
  // Perform prologue operations usually performed by the young code stub.
649
  __ PopReturnAddressTo(kScratchRegister);
650
  __ push(rbp);  // Caller's frame pointer.
651
  __ movq(rbp, rsp);
652
  __ push(rsi);  // Callee's context.
653
  __ push(rdi);  // Callee's JS Function.
654
  __ PushReturnAddressFrom(kScratchRegister);
655

    
656
  // Jump to point after the code-age stub.
657
  __ ret(0);
658
}
659

    
660

    
661
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
662
  GenerateMakeCodeYoungAgainCommon(masm);
663
}
664

    
665

    
666
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
667
  // Enter an internal frame.
668
  {
669
    FrameScope scope(masm, StackFrame::INTERNAL);
670

    
671
    // Preserve registers across notification, this is important for compiled
672
    // stubs that tail call the runtime on deopts passing their parameters in
673
    // registers.
674
    __ Pushad();
675
    __ CallRuntime(Runtime::kNotifyStubFailure, 0);
676
    __ Popad();
677
    // Tear down internal frame.
678
  }
679

    
680
  __ pop(MemOperand(rsp, 0));  // Ignore state offset
681
  __ ret(0);  // Return to IC Miss stub, continuation still on stack.
682
}
683

    
684

    
685
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
686
                                             Deoptimizer::BailoutType type) {
687
  // Enter an internal frame.
688
  {
689
    FrameScope scope(masm, StackFrame::INTERNAL);
690

    
691
    // Pass the deoptimization type to the runtime system.
692
    __ Push(Smi::FromInt(static_cast<int>(type)));
693

    
694
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
695
    // Tear down internal frame.
696
  }
697

    
698
  // Get the full codegen state from the stack and untag it.
699
  __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
700

    
701
  // Switch on the state.
702
  Label not_no_registers, not_tos_rax;
703
  __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
704
  __ j(not_equal, &not_no_registers, Label::kNear);
705
  __ ret(1 * kPointerSize);  // Remove state.
706

    
707
  __ bind(&not_no_registers);
708
  __ movq(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
709
  __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
710
  __ j(not_equal, &not_tos_rax, Label::kNear);
711
  __ ret(2 * kPointerSize);  // Remove state, rax.
712

    
713
  __ bind(&not_tos_rax);
714
  __ Abort(kNoCasesLeft);
715
}
716

    
717

    
718
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
719
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
720
}
721

    
722

    
723
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
724
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
725
}
726

    
727

    
728
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
729
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
730
}
731

    
732

    
733
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
734
  // Stack Layout:
735
  // rsp[0]           : Return address
736
  // rsp[8]           : Argument n
737
  // rsp[16]          : Argument n-1
738
  //  ...
739
  // rsp[8 * n]       : Argument 1
740
  // rsp[8 * (n + 1)] : Receiver (function to call)
741
  //
742
  // rax contains the number of arguments, n, not counting the receiver.
743
  //
744
  // 1. Make sure we have at least one argument.
745
  { Label done;
746
    __ testq(rax, rax);
747
    __ j(not_zero, &done);
748
    __ PopReturnAddressTo(rbx);
749
    __ Push(masm->isolate()->factory()->undefined_value());
750
    __ PushReturnAddressFrom(rbx);
751
    __ incq(rax);
752
    __ bind(&done);
753
  }
754

    
755
  // 2. Get the function to call (passed as receiver) from the stack, check
756
  //    if it is a function.
757
  Label slow, non_function;
758
  StackArgumentsAccessor args(rsp, rax);
759
  __ movq(rdi, args.GetReceiverOperand());
760
  __ JumpIfSmi(rdi, &non_function);
761
  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
762
  __ j(not_equal, &slow);
763

    
764
  // 3a. Patch the first argument if necessary when calling a function.
765
  Label shift_arguments;
766
  __ Set(rdx, 0);  // indicate regular JS_FUNCTION
767
  { Label convert_to_object, use_global_receiver, patch_receiver;
768
    // Change context eagerly in case we need the global receiver.
769
    __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
770

    
771
    // Do not transform the receiver for strict mode functions.
772
    __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
773
    __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
774
             Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
775
    __ j(not_equal, &shift_arguments);
776

    
777
    // Do not transform the receiver for natives.
778
    // SharedFunctionInfo is already loaded into rbx.
779
    __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
780
             Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
781
    __ j(not_zero, &shift_arguments);
782

    
783
    // Compute the receiver in non-strict mode.
784
    __ movq(rbx, args.GetArgumentOperand(1));
785
    __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
786

    
787
    __ CompareRoot(rbx, Heap::kNullValueRootIndex);
788
    __ j(equal, &use_global_receiver);
789
    __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
790
    __ j(equal, &use_global_receiver);
791

    
792
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
793
    __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
794
    __ j(above_equal, &shift_arguments);
795

    
796
    __ bind(&convert_to_object);
797
    {
798
      // Enter an internal frame in order to preserve argument count.
799
      FrameScope scope(masm, StackFrame::INTERNAL);
800
      __ Integer32ToSmi(rax, rax);
801
      __ push(rax);
802

    
803
      __ push(rbx);
804
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
805
      __ movq(rbx, rax);
806
      __ Set(rdx, 0);  // indicate regular JS_FUNCTION
807

    
808
      __ pop(rax);
809
      __ SmiToInteger32(rax, rax);
810
    }
811

    
812
    // Restore the function to rdi.
813
    __ movq(rdi, args.GetReceiverOperand());
814
    __ jmp(&patch_receiver, Label::kNear);
815

    
816
    // Use the global receiver object from the called function as the
817
    // receiver.
818
    __ bind(&use_global_receiver);
819
    const int kGlobalIndex =
820
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
821
    __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
822
    __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
823
    __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
824
    __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
825

    
826
    __ bind(&patch_receiver);
827
    __ movq(args.GetArgumentOperand(1), rbx);
828

    
829
    __ jmp(&shift_arguments);
830
  }
831

    
832
  // 3b. Check for function proxy.
833
  __ bind(&slow);
834
  __ Set(rdx, 1);  // indicate function proxy
835
  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
836
  __ j(equal, &shift_arguments);
837
  __ bind(&non_function);
838
  __ Set(rdx, 2);  // indicate non-function
839

    
840
  // 3c. Patch the first argument when calling a non-function.  The
841
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
842
  //     receiver, so overwrite the first argument which will ultimately
843
  //     become the receiver.
844
  __ movq(args.GetArgumentOperand(1), rdi);
845

    
846
  // 4. Shift arguments and return address one slot down on the stack
847
  //    (overwriting the original receiver).  Adjust argument count to make
848
  //    the original first argument the new receiver.
849
  __ bind(&shift_arguments);
850
  { Label loop;
851
    __ movq(rcx, rax);
852
    __ bind(&loop);
853
    __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
854
    __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
855
    __ decq(rcx);
856
    __ j(not_sign, &loop);  // While non-negative (to copy return address).
857
    __ pop(rbx);  // Discard copy of return address.
858
    __ decq(rax);  // One fewer argument (first argument is new receiver).
859
  }
860

    
861
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
862
  //     or a function proxy via CALL_FUNCTION_PROXY.
863
  { Label function, non_proxy;
864
    __ testq(rdx, rdx);
865
    __ j(zero, &function);
866
    __ Set(rbx, 0);
867
    __ SetCallKind(rcx, CALL_AS_METHOD);
868
    __ cmpq(rdx, Immediate(1));
869
    __ j(not_equal, &non_proxy);
870

    
871
    __ PopReturnAddressTo(rdx);
872
    __ push(rdi);  // re-add proxy object as additional argument
873
    __ PushReturnAddressFrom(rdx);
874
    __ incq(rax);
875
    __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
876
    __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
877
           RelocInfo::CODE_TARGET);
878

    
879
    __ bind(&non_proxy);
880
    __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
881
    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
882
            RelocInfo::CODE_TARGET);
883
    __ bind(&function);
884
  }
885

    
886
  // 5b. Get the code to call from the function and check that the number of
887
  //     expected arguments matches what we're providing.  If so, jump
888
  //     (tail-call) to the code in register edx without checking arguments.
889
  __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
890
  __ movsxlq(rbx,
891
             FieldOperand(rdx,
892
                          SharedFunctionInfo::kFormalParameterCountOffset));
893
  __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
894
  __ SetCallKind(rcx, CALL_AS_METHOD);
895
  __ cmpq(rax, rbx);
896
  __ j(not_equal,
897
       masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
898
       RelocInfo::CODE_TARGET);
899

    
900
  ParameterCount expected(0);
901
  __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
902
                NullCallWrapper(), CALL_AS_METHOD);
903
}
904

    
905

    
906
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
907
  // Stack at entry:
908
  // rsp     : return address
909
  // rsp[8]  : arguments
910
  // rsp[16] : receiver ("this")
911
  // rsp[24] : function
912
  {
913
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
914
    // Stack frame:
915
    // rbp     : Old base pointer
916
    // rbp[8]  : return address
917
    // rbp[16] : function arguments
918
    // rbp[24] : receiver
919
    // rbp[32] : function
920
    static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
921
    static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
922
    static const int kFunctionOffset = kReceiverOffset + kPointerSize;
923

    
924
    __ push(Operand(rbp, kFunctionOffset));
925
    __ push(Operand(rbp, kArgumentsOffset));
926
    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
927

    
928
    // Check the stack for overflow. We are not trying to catch
929
    // interruptions (e.g. debug break and preemption) here, so the "real stack
930
    // limit" is checked.
931
    Label okay;
932
    __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
933
    __ movq(rcx, rsp);
934
    // Make rcx the space we have left. The stack might already be overflowed
935
    // here which will cause rcx to become negative.
936
    __ subq(rcx, kScratchRegister);
937
    // Make rdx the space we need for the array when it is unrolled onto the
938
    // stack.
939
    __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
940
    // Check if the arguments will overflow the stack.
941
    __ cmpq(rcx, rdx);
942
    __ j(greater, &okay);  // Signed comparison.
943

    
944
    // Out of stack space.
945
    __ push(Operand(rbp, kFunctionOffset));
946
    __ push(rax);
947
    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
948
    __ bind(&okay);
949
    // End of stack check.
950

    
951
    // Push current index and limit.
952
    const int kLimitOffset =
953
        StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
954
    const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
955
    __ push(rax);  // limit
956
    __ push(Immediate(0));  // index
957

    
958
    // Get the receiver.
959
    __ movq(rbx, Operand(rbp, kReceiverOffset));
960

    
961
    // Check that the function is a JS function (otherwise it must be a proxy).
962
    Label push_receiver;
963
    __ movq(rdi, Operand(rbp, kFunctionOffset));
964
    __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
965
    __ j(not_equal, &push_receiver);
966

    
967
    // Change context eagerly to get the right global object if necessary.
968
    __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
969

    
970
    // Do not transform the receiver for strict mode functions.
971
    Label call_to_object, use_global_receiver;
972
    __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
973
    __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
974
             Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
975
    __ j(not_equal, &push_receiver);
976

    
977
    // Do not transform the receiver for natives.
978
    __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
979
             Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
980
    __ j(not_equal, &push_receiver);
981

    
982
    // Compute the receiver in non-strict mode.
983
    __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
984
    __ CompareRoot(rbx, Heap::kNullValueRootIndex);
985
    __ j(equal, &use_global_receiver);
986
    __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
987
    __ j(equal, &use_global_receiver);
988

    
989
    // If given receiver is already a JavaScript object then there's no
990
    // reason for converting it.
991
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
992
    __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
993
    __ j(above_equal, &push_receiver);
994

    
995
    // Convert the receiver to an object.
996
    __ bind(&call_to_object);
997
    __ push(rbx);
998
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
999
    __ movq(rbx, rax);
1000
    __ jmp(&push_receiver, Label::kNear);
1001

    
1002
    // Use the current global receiver object as the receiver.
1003
    __ bind(&use_global_receiver);
1004
    const int kGlobalOffset =
1005
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1006
    __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
1007
    __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
1008
    __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
1009
    __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1010

    
1011
    // Push the receiver.
1012
    __ bind(&push_receiver);
1013
    __ push(rbx);
1014

    
1015
    // Copy all arguments from the array to the stack.
1016
    Label entry, loop;
1017
    __ movq(rax, Operand(rbp, kIndexOffset));
1018
    __ jmp(&entry);
1019
    __ bind(&loop);
1020
    __ movq(rdx, Operand(rbp, kArgumentsOffset));  // load arguments
1021

    
1022
    // Use inline caching to speed up access to arguments.
1023
    Handle<Code> ic =
1024
        masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1025
    __ Call(ic, RelocInfo::CODE_TARGET);
1026
    // It is important that we do not have a test instruction after the
1027
    // call.  A test instruction after the call is used to indicate that
1028
    // we have generated an inline version of the keyed load.  In this
1029
    // case, we know that we are not generating a test instruction next.
1030

    
1031
    // Push the nth argument.
1032
    __ push(rax);
1033

    
1034
    // Update the index on the stack and in register rax.
1035
    __ movq(rax, Operand(rbp, kIndexOffset));
1036
    __ SmiAddConstant(rax, rax, Smi::FromInt(1));
1037
    __ movq(Operand(rbp, kIndexOffset), rax);
1038

    
1039
    __ bind(&entry);
1040
    __ cmpq(rax, Operand(rbp, kLimitOffset));
1041
    __ j(not_equal, &loop);
1042

    
1043
    // Invoke the function.
1044
    Label call_proxy;
1045
    ParameterCount actual(rax);
1046
    __ SmiToInteger32(rax, rax);
1047
    __ movq(rdi, Operand(rbp, kFunctionOffset));
1048
    __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1049
    __ j(not_equal, &call_proxy);
1050
    __ InvokeFunction(rdi, actual, CALL_FUNCTION,
1051
                      NullCallWrapper(), CALL_AS_METHOD);
1052

    
1053
    frame_scope.GenerateLeaveFrame();
1054
    __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
1055

    
1056
    // Invoke the function proxy.
1057
    __ bind(&call_proxy);
1058
    __ push(rdi);  // add function proxy as last argument
1059
    __ incq(rax);
1060
    __ Set(rbx, 0);
1061
    __ SetCallKind(rcx, CALL_AS_METHOD);
1062
    __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1063
    __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1064
            RelocInfo::CODE_TARGET);
1065

    
1066
    // Leave internal frame.
1067
  }
1068
  __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
1069
}
1070

    
1071

    
1072
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1073
  // ----------- S t a t e -------------
1074
  //  -- rax    : argc
1075
  //  -- rsp[0] : return address
1076
  //  -- rsp[8] : last argument
1077
  // -----------------------------------
1078
  Label generic_array_code;
1079

    
1080
  // Get the InternalArray function.
1081
  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1082

    
1083
  if (FLAG_debug_code) {
1084
    // Initial map for the builtin InternalArray functions should be maps.
1085
    __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1086
    // Will both indicate a NULL and a Smi.
1087
    STATIC_ASSERT(kSmiTag == 0);
1088
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1089
    __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1090
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1091
    __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1092
  }
1093

    
1094
  // Run the native code for the InternalArray function called as a normal
1095
  // function.
1096
  // tail call a stub
1097
  InternalArrayConstructorStub stub(masm->isolate());
1098
  __ TailCallStub(&stub);
1099
}
1100

    
1101

    
1102
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1103
  // ----------- S t a t e -------------
1104
  //  -- rax    : argc
1105
  //  -- rsp[0] : return address
1106
  //  -- rsp[8] : last argument
1107
  // -----------------------------------
1108
  Label generic_array_code;
1109

    
1110
  // Get the Array function.
1111
  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1112

    
1113
  if (FLAG_debug_code) {
1114
    // Initial map for the builtin Array functions should be maps.
1115
    __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1116
    // Will both indicate a NULL and a Smi.
1117
    STATIC_ASSERT(kSmiTag == 0);
1118
    Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1119
    __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1120
    __ CmpObjectType(rbx, MAP_TYPE, rcx);
1121
    __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1122
  }
1123

    
1124
  // Run the native code for the Array function called as a normal function.
1125
  // tail call a stub
1126
  Handle<Object> undefined_sentinel(
1127
      masm->isolate()->heap()->undefined_value(),
1128
      masm->isolate());
1129
  __ Move(rbx, undefined_sentinel);
1130
  ArrayConstructorStub stub(masm->isolate());
1131
  __ TailCallStub(&stub);
1132
}
1133

    
1134

    
1135
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1136
  // ----------- S t a t e -------------
1137
  //  -- rax                 : number of arguments
1138
  //  -- rdi                 : constructor function
1139
  //  -- rsp[0]              : return address
1140
  //  -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1141
  //  -- rsp[(argc + 1) * 8] : receiver
1142
  // -----------------------------------
1143
  Counters* counters = masm->isolate()->counters();
1144
  __ IncrementCounter(counters->string_ctor_calls(), 1);
1145

    
1146
  if (FLAG_debug_code) {
1147
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1148
    __ cmpq(rdi, rcx);
1149
    __ Assert(equal, kUnexpectedStringFunction);
1150
  }
1151

    
1152
  // Load the first argument into rax and get rid of the rest
1153
  // (including the receiver).
1154
  StackArgumentsAccessor args(rsp, rax);
1155
  Label no_arguments;
1156
  __ testq(rax, rax);
1157
  __ j(zero, &no_arguments);
1158
  __ movq(rbx, args.GetArgumentOperand(1));
1159
  __ PopReturnAddressTo(rcx);
1160
  __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1161
  __ PushReturnAddressFrom(rcx);
1162
  __ movq(rax, rbx);
1163

    
1164
  // Lookup the argument in the number to string cache.
1165
  Label not_cached, argument_is_string;
1166
  __ LookupNumberStringCache(rax,  // Input.
1167
                             rbx,  // Result.
1168
                             rcx,  // Scratch 1.
1169
                             rdx,  // Scratch 2.
1170
                             &not_cached);
1171
  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1172
  __ bind(&argument_is_string);
1173

    
1174
  // ----------- S t a t e -------------
1175
  //  -- rbx    : argument converted to string
1176
  //  -- rdi    : constructor function
1177
  //  -- rsp[0] : return address
1178
  // -----------------------------------
1179

    
1180
  // Allocate a JSValue and put the tagged pointer into rax.
1181
  Label gc_required;
1182
  __ Allocate(JSValue::kSize,
1183
              rax,  // Result.
1184
              rcx,  // New allocation top (we ignore it).
1185
              no_reg,
1186
              &gc_required,
1187
              TAG_OBJECT);
1188

    
1189
  // Set the map.
1190
  __ LoadGlobalFunctionInitialMap(rdi, rcx);
1191
  if (FLAG_debug_code) {
1192
    __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1193
            Immediate(JSValue::kSize >> kPointerSizeLog2));
1194
    __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1195
    __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1196
    __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1197
  }
1198
  __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1199

    
1200
  // Set properties and elements.
1201
  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1202
  __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1203
  __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1204

    
1205
  // Set the value.
1206
  __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx);
1207

    
1208
  // Ensure the object is fully initialized.
1209
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1210

    
1211
  // We're done. Return.
1212
  __ ret(0);
1213

    
1214
  // The argument was not found in the number to string cache. Check
1215
  // if it's a string already before calling the conversion builtin.
1216
  Label convert_argument;
1217
  __ bind(&not_cached);
1218
  STATIC_ASSERT(kSmiTag == 0);
1219
  __ JumpIfSmi(rax, &convert_argument);
1220
  Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1221
  __ j(NegateCondition(is_string), &convert_argument);
1222
  __ movq(rbx, rax);
1223
  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1224
  __ jmp(&argument_is_string);
1225

    
1226
  // Invoke the conversion builtin and put the result into rbx.
1227
  __ bind(&convert_argument);
1228
  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1229
  {
1230
    FrameScope scope(masm, StackFrame::INTERNAL);
1231
    __ push(rdi);  // Preserve the function.
1232
    __ push(rax);
1233
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1234
    __ pop(rdi);
1235
  }
1236
  __ movq(rbx, rax);
1237
  __ jmp(&argument_is_string);
1238

    
1239
  // Load the empty string into rbx, remove the receiver from the
1240
  // stack, and jump back to the case where the argument is a string.
1241
  __ bind(&no_arguments);
1242
  __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1243
  __ PopReturnAddressTo(rcx);
1244
  __ lea(rsp, Operand(rsp, kPointerSize));
1245
  __ PushReturnAddressFrom(rcx);
1246
  __ jmp(&argument_is_string);
1247

    
1248
  // At this point the argument is already a string. Call runtime to
1249
  // create a string wrapper.
1250
  __ bind(&gc_required);
1251
  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1252
  {
1253
    FrameScope scope(masm, StackFrame::INTERNAL);
1254
    __ push(rbx);
1255
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
1256
  }
1257
  __ ret(0);
1258
}
1259

    
1260

    
1261
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1262
  __ push(rbp);
1263
  __ movq(rbp, rsp);
1264

    
1265
  // Store the arguments adaptor context sentinel.
1266
  __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1267

    
1268
  // Push the function on the stack.
1269
  __ push(rdi);
1270

    
1271
  // Preserve the number of arguments on the stack. Must preserve rax,
1272
  // rbx and rcx because these registers are used when copying the
1273
  // arguments and the receiver.
1274
  __ Integer32ToSmi(r8, rax);
1275
  __ push(r8);
1276
}
1277

    
1278

    
1279
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1280
  // Retrieve the number of arguments from the stack. Number is a Smi.
1281
  __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1282

    
1283
  // Leave the frame.
1284
  __ movq(rsp, rbp);
1285
  __ pop(rbp);
1286

    
1287
  // Remove caller arguments from the stack.
1288
  __ PopReturnAddressTo(rcx);
1289
  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1290
  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1291
  __ PushReturnAddressFrom(rcx);
1292
}
1293

    
1294

    
1295
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1296
  // ----------- S t a t e -------------
1297
  //  -- rax : actual number of arguments
1298
  //  -- rbx : expected number of arguments
1299
  //  -- rcx : call kind information
1300
  //  -- rdx : code entry to call
1301
  // -----------------------------------
1302

    
1303
  Label invoke, dont_adapt_arguments;
1304
  Counters* counters = masm->isolate()->counters();
1305
  __ IncrementCounter(counters->arguments_adaptors(), 1);
1306

    
1307
  Label enough, too_few;
1308
  __ cmpq(rax, rbx);
1309
  __ j(less, &too_few);
1310
  __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1311
  __ j(equal, &dont_adapt_arguments);
1312

    
1313
  {  // Enough parameters: Actual >= expected.
1314
    __ bind(&enough);
1315
    EnterArgumentsAdaptorFrame(masm);
1316

    
1317
    // Copy receiver and all expected arguments.
1318
    const int offset = StandardFrameConstants::kCallerSPOffset;
1319
    __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1320
    __ Set(r8, -1);  // account for receiver
1321

    
1322
    Label copy;
1323
    __ bind(&copy);
1324
    __ incq(r8);
1325
    __ push(Operand(rax, 0));
1326
    __ subq(rax, Immediate(kPointerSize));
1327
    __ cmpq(r8, rbx);
1328
    __ j(less, &copy);
1329
    __ jmp(&invoke);
1330
  }
1331

    
1332
  {  // Too few parameters: Actual < expected.
1333
    __ bind(&too_few);
1334
    EnterArgumentsAdaptorFrame(masm);
1335

    
1336
    // Copy receiver and all actual arguments.
1337
    const int offset = StandardFrameConstants::kCallerSPOffset;
1338
    __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1339
    __ Set(r8, -1);  // account for receiver
1340

    
1341
    Label copy;
1342
    __ bind(&copy);
1343
    __ incq(r8);
1344
    __ push(Operand(rdi, 0));
1345
    __ subq(rdi, Immediate(kPointerSize));
1346
    __ cmpq(r8, rax);
1347
    __ j(less, &copy);
1348

    
1349
    // Fill remaining expected arguments with undefined values.
1350
    Label fill;
1351
    __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1352
    __ bind(&fill);
1353
    __ incq(r8);
1354
    __ push(kScratchRegister);
1355
    __ cmpq(r8, rbx);
1356
    __ j(less, &fill);
1357

    
1358
    // Restore function pointer.
1359
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1360
  }
1361

    
1362
  // Call the entry point.
1363
  __ bind(&invoke);
1364
  __ call(rdx);
1365

    
1366
  // Store offset of return address for deoptimizer.
1367
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1368

    
1369
  // Leave frame and return.
1370
  LeaveArgumentsAdaptorFrame(masm);
1371
  __ ret(0);
1372

    
1373
  // -------------------------------------------
1374
  // Dont adapt arguments.
1375
  // -------------------------------------------
1376
  __ bind(&dont_adapt_arguments);
1377
  __ jmp(rdx);
1378
}
1379

    
1380

    
1381
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1382
  // Lookup the function in the JavaScript frame.
1383
  __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1384
  {
1385
    FrameScope scope(masm, StackFrame::INTERNAL);
1386
    // Lookup and calculate pc offset.
1387
    __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
1388
    __ movq(rbx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1389
    __ subq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1390
    __ subq(rdx, FieldOperand(rbx, SharedFunctionInfo::kCodeOffset));
1391
    __ Integer32ToSmi(rdx, rdx);
1392

    
1393
    // Pass both function and pc offset as arguments.
1394
    __ push(rax);
1395
    __ push(rdx);
1396
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
1397
  }
1398

    
1399
  Label skip;
1400
  // If the code object is null, just return to the unoptimized code.
1401
  __ cmpq(rax, Immediate(0));
1402
  __ j(not_equal, &skip, Label::kNear);
1403
  __ ret(0);
1404

    
1405
  __ bind(&skip);
1406

    
1407
  // Load deoptimization data from the code object.
1408
  __ movq(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1409

    
1410
  // Load the OSR entrypoint offset from the deoptimization data.
1411
  __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1412
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1413

    
1414
  // Compute the target address = code_obj + header_size + osr_offset
1415
  __ lea(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1416

    
1417
  // Overwrite the return address on the stack.
1418
  __ movq(Operand(rsp, 0), rax);
1419

    
1420
  // And "return" to the OSR entry point of the function.
1421
  __ ret(0);
1422
}
1423

    
1424

    
1425
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1426
  // We check the stack limit as indicator that recompilation might be done.
1427
  Label ok;
1428
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1429
  __ j(above_equal, &ok);
1430
  {
1431
    FrameScope scope(masm, StackFrame::INTERNAL);
1432
    __ CallRuntime(Runtime::kStackGuard, 0);
1433
  }
1434
  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1435
         RelocInfo::CODE_TARGET);
1436

    
1437
  __ bind(&ok);
1438
  __ ret(0);
1439
}
1440

    
1441

    
1442
#undef __
1443

    
1444
} }  // namespace v8::internal
1445

    
1446
#endif  // V8_TARGET_ARCH_X64