Revision f230a1cf deps/v8/src/x64/full-codegen-x64.cc

View differences:

deps/v8/src/x64/full-codegen-x64.cc
140 140
    Label ok;
141 141
    __ testq(rcx, rcx);
142 142
    __ j(zero, &ok, Label::kNear);
143
    // +1 for return address.
144
    int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
143
    StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
145 144
    __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
146
    __ movq(Operand(rsp, receiver_offset), kScratchRegister);
145
    __ movq(args.GetReceiverOperand(), kScratchRegister);
147 146
    __ bind(&ok);
148 147
  }
149 148

  
......
153 152
  FrameScope frame_scope(masm_, StackFrame::MANUAL);
154 153

  
155 154
  info->set_prologue_offset(masm_->pc_offset());
156
  __ push(rbp);  // Caller's frame pointer.
157
  __ movq(rbp, rsp);
158
  __ push(rsi);  // Callee's context.
159
  __ push(rdi);  // Callee's JS Function.
155
  __ Prologue(BUILD_FUNCTION_FRAME);
160 156
  info->AddNoFrameRange(0, masm_->pc_offset());
161 157

  
162 158
  { Comment cmnt(masm_, "[ Allocate locals");
......
678 674
  int offset = -var->index() * kPointerSize;
679 675
  // Adjust by a (parameter or local) base offset.
680 676
  if (var->IsParameter()) {
681
    offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
677
    offset += kFPOnStackSize + kPCOnStackSize +
678
              (info_->scope()->num_parameters() - 1) * kPointerSize;
682 679
  } else {
683 680
    offset += JavaScriptFrameConstants::kLocal0Offset;
684 681
  }
......
1129 1126
      Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1130 1127
                     isolate()));
1131 1128
  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1132
  __ LoadHeapObject(rbx, cell);
1129
  __ Move(rbx, cell);
1133 1130
  __ Move(FieldOperand(rbx, Cell::kValueOffset),
1134 1131
          Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
1135 1132

  
......
1609 1606
      : ObjectLiteral::kNoFlags;
1610 1607
  int properties_count = constant_properties->length() / 2;
1611 1608
  if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1612
      expr->depth() > 1) {
1613
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1614
    __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1615
    __ Push(Smi::FromInt(expr->literal_index()));
1616
    __ Push(constant_properties);
1617
    __ Push(Smi::FromInt(flags));
1618
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1619
  } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
1609
      expr->depth() > 1 || Serializer::enabled() ||
1610
      flags != ObjectLiteral::kFastElements ||
1620 1611
      properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1621 1612
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1622 1613
    __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1623 1614
    __ Push(Smi::FromInt(expr->literal_index()));
1624 1615
    __ Push(constant_properties);
1625 1616
    __ Push(Smi::FromInt(flags));
1626
    __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1617
    __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1627 1618
  } else {
1628 1619
    __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1629 1620
    __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset));
......
2638 2629
  }
2639 2630

  
2640 2631
  // Push the receiver of the enclosing function and do runtime call.
2641
  __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2632
  StackArgumentsAccessor args(rbp, info_->scope()->num_parameters());
2633
  __ push(args.GetReceiverOperand());
2642 2634

  
2643 2635
  // Push the language mode.
2644 2636
  __ Push(Smi::FromInt(language_mode()));
......
3513 3505
  ZoneList<Expression*>* args = expr->arguments();
3514 3506
  ASSERT_EQ(args->length(), 1);
3515 3507

  
3516
  // Load the argument on the stack and call the stub.
3517
  VisitForStackValue(args->at(0));
3508
  // Load the argument into rax and call the stub.
3509
  VisitForAccumulatorValue(args->at(0));
3518 3510

  
3519 3511
  NumberToStringStub stub;
3520 3512
  __ CallStub(&stub);
......
4883 4875

  
4884 4876
#undef __
4885 4877

  
4878

  
4879
static const byte kJnsInstruction = 0x79;
4880
static const byte kJnsOffset = 0x1d;
4881
static const byte kCallInstruction = 0xe8;
4882
static const byte kNopByteOne = 0x66;
4883
static const byte kNopByteTwo = 0x90;
4884

  
4885

  
4886
void BackEdgeTable::PatchAt(Code* unoptimized_code,
4887
                            Address pc,
4888
                            BackEdgeState target_state,
4889
                            Code* replacement_code) {
4890
  Address call_target_address = pc - kIntSize;
4891
  Address jns_instr_address = call_target_address - 3;
4892
  Address jns_offset_address = call_target_address - 2;
4893

  
4894
  switch (target_state) {
4895
    case INTERRUPT:
4896
      //     sub <profiling_counter>, <delta>  ;; Not changed
4897
      //     jns ok
4898
      //     call <interrupt stub>
4899
      //   ok:
4900
      *jns_instr_address = kJnsInstruction;
4901
      *jns_offset_address = kJnsOffset;
4902
      break;
4903
    case ON_STACK_REPLACEMENT:
4904
    case OSR_AFTER_STACK_CHECK:
4905
      //     sub <profiling_counter>, <delta>  ;; Not changed
4906
      //     nop
4907
      //     nop
4908
      //     call <on-stack replacment>
4909
      //   ok:
4910
      *jns_instr_address = kNopByteOne;
4911
      *jns_offset_address = kNopByteTwo;
4912
      break;
4913
  }
4914

  
4915
  Assembler::set_target_address_at(call_target_address,
4916
                                   replacement_code->entry());
4917
  unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4918
      unoptimized_code, call_target_address, replacement_code);
4919
}
4920

  
4921

  
4922
BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4923
    Isolate* isolate,
4924
    Code* unoptimized_code,
4925
    Address pc) {
4926
  Address call_target_address = pc - kIntSize;
4927
  Address jns_instr_address = call_target_address - 3;
4928
  ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4929

  
4930
  if (*jns_instr_address == kJnsInstruction) {
4931
    ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4932
    ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4933
              Assembler::target_address_at(call_target_address));
4934
    return INTERRUPT;
4935
  }
4936

  
4937
  ASSERT_EQ(kNopByteOne, *jns_instr_address);
4938
  ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4939

  
4940
  if (Assembler::target_address_at(call_target_address) ==
4941
      isolate->builtins()->OnStackReplacement()->entry()) {
4942
    return ON_STACK_REPLACEMENT;
4943
  }
4944

  
4945
  ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4946
            Assembler::target_address_at(call_target_address));
4947
  return OSR_AFTER_STACK_CHECK;
4948
}
4949

  
4950

  
4886 4951
} }  // namespace v8::internal
4887 4952

  
4888 4953
#endif  // V8_TARGET_ARCH_X64

Also available in: Unified diff