Revision f230a1cf deps/v8/src/x64/builtins-x64.cc

View differences:

deps/v8/src/x64/builtins-x64.cc
600 600
  // the stub returns.
601 601
  __ subq(Operand(rsp, 0), Immediate(5));
602 602
  __ Pushad();
603
  __ movq(arg_reg_2,
604
          ExternalReference::isolate_address(masm->isolate()));
603 605
  __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
604 606
  {  // NOLINT
605 607
    FrameScope scope(masm, StackFrame::MANUAL);
......
625 627
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
626 628

  
627 629

  
630
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
631
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
632
  // that make_code_young doesn't do any garbage collection which allows us to
633
  // save/restore the registers without worrying about which of them contain
634
  // pointers.
635
  __ Pushad();
636
  __ movq(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
637
  __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
638
  __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
639
  {  // NOLINT
640
    FrameScope scope(masm, StackFrame::MANUAL);
641
    __ PrepareCallCFunction(1);
642
    __ CallCFunction(
643
        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
644
        1);
645
  }
646
  __ Popad();
647

  
648
  // Perform prologue operations usually performed by the young code stub.
649
  __ PopReturnAddressTo(kScratchRegister);
650
  __ push(rbp);  // Caller's frame pointer.
651
  __ movq(rbp, rsp);
652
  __ push(rsi);  // Callee's context.
653
  __ push(rdi);  // Callee's JS Function.
654
  __ PushReturnAddressFrom(kScratchRegister);
655

  
656
  // Jump to point after the code-age stub.
657
  __ ret(0);
658
}
659

  
660

  
661
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
662
  GenerateMakeCodeYoungAgainCommon(masm);
663
}
664

  
665

  
628 666
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
629 667
  // Enter an internal frame.
630 668
  {
......
658 696
  }
659 697

  
660 698
  // Get the full codegen state from the stack and untag it.
661
  __ SmiToInteger32(r10, Operand(rsp, kPCOnStackSize));
699
  __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
662 700

  
663 701
  // Switch on the state.
664 702
  Label not_no_registers, not_tos_rax;
665
  __ cmpq(r10, Immediate(FullCodeGenerator::NO_REGISTERS));
703
  __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
666 704
  __ j(not_equal, &not_no_registers, Label::kNear);
667 705
  __ ret(1 * kPointerSize);  // Remove state.
668 706

  
669 707
  __ bind(&not_no_registers);
670 708
  __ movq(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
671
  __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG));
709
  __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
672 710
  __ j(not_equal, &not_tos_rax, Label::kNear);
673 711
  __ ret(2 * kPointerSize);  // Remove state, rax.
674 712

  
......
692 730
}
693 731

  
694 732

  
695
void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
696
  // For now, we are relying on the fact that Runtime::NotifyOSR
697
  // doesn't do any garbage collection which allows us to save/restore
698
  // the registers without worrying about which of them contain
699
  // pointers. This seems a bit fragile.
700
  __ Pushad();
701
  {
702
    FrameScope scope(masm, StackFrame::INTERNAL);
703
    __ CallRuntime(Runtime::kNotifyOSR, 0);
704
  }
705
  __ Popad();
706
  __ ret(0);
707
}
708

  
709

  
710 733
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
711 734
  // Stack Layout:
712 735
  // rsp[0]           : Return address
......
894 917
    // rbp[16] : function arguments
895 918
    // rbp[24] : receiver
896 919
    // rbp[32] : function
897
    static const int kArgumentsOffset = 2 * kPointerSize;
898
    static const int kReceiverOffset = 3 * kPointerSize;
899
    static const int kFunctionOffset = 4 * kPointerSize;
920
    static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
921
    static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
922
    static const int kFunctionOffset = kReceiverOffset + kPointerSize;
900 923

  
901 924
    __ push(Operand(rbp, kFunctionOffset));
902 925
    __ push(Operand(rbp, kArgumentsOffset));
......
1140 1163

  
1141 1164
  // Lookup the argument in the number to string cache.
1142 1165
  Label not_cached, argument_is_string;
1143
  NumberToStringStub::GenerateLookupNumberStringCache(
1144
      masm,
1145
      rax,  // Input.
1146
      rbx,  // Result.
1147
      rcx,  // Scratch 1.
1148
      rdx,  // Scratch 2.
1149
      &not_cached);
1166
  __ LookupNumberStringCache(rax,  // Input.
1167
                             rbx,  // Result.
1168
                             rcx,  // Scratch 1.
1169
                             rdx,  // Scratch 2.
1170
                             &not_cached);
1150 1171
  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1151 1172
  __ bind(&argument_is_string);
1152 1173

  
......
1401 1422
}
1402 1423

  
1403 1424

  
1425
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1426
  // We check the stack limit as indicator that recompilation might be done.
1427
  Label ok;
1428
  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1429
  __ j(above_equal, &ok);
1430
  {
1431
    FrameScope scope(masm, StackFrame::INTERNAL);
1432
    __ CallRuntime(Runtime::kStackGuard, 0);
1433
  }
1434
  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1435
         RelocInfo::CODE_TARGET);
1436

  
1437
  __ bind(&ok);
1438
  __ ret(0);
1439
}
1440

  
1441

  
1404 1442
#undef __
1405 1443

  
1406 1444
} }  // namespace v8::internal

Also available in: Unified diff