Revision f230a1cf deps/v8/src/x64/lithium-codegen-x64.cc

View differences:

deps/v8/src/x64/lithium-codegen-x64.cc
89 89
  ASSERT(is_done());
90 90
  code->set_stack_slots(GetStackSlotCount());
91 91
  code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
92
  if (FLAG_weak_embedded_maps_in_optimized_code) {
93
    RegisterDependentCodeForEmbeddedMaps(code);
94
  }
92
  RegisterDependentCodeForEmbeddedMaps(code);
95 93
  PopulateDeoptimizationData(code);
96 94
  info()->CommitDependencies(code);
97 95
}
......
103 101
}
104 102

  
105 103

  
106
void LCodeGen::Comment(const char* format, ...) {
107
  if (!FLAG_code_comments) return;
108
  char buffer[4 * KB];
109
  StringBuilder builder(buffer, ARRAY_SIZE(buffer));
110
  va_list arguments;
111
  va_start(arguments, format);
112
  builder.AddFormattedList(format, arguments);
113
  va_end(arguments);
114

  
115
  // Copy the string before recording it in the assembler to avoid
116
  // issues when the stack allocated buffer goes out of scope.
117
  int length = builder.position();
118
  Vector<char> copy = Vector<char>::New(length + 1);
119
  OS::MemCopy(copy.start(), builder.Finalize(), copy.length());
120
  masm()->RecordComment(copy.start());
121
}
122

  
123

  
124 104
#ifdef _MSC_VER
125 105
void LCodeGen::MakeSureStackPagesMapped(int offset) {
126 106
  const int kPageSize = 4 * KB;
......
152 132
      Label ok;
153 133
      __ testq(rcx, rcx);
154 134
      __ j(zero, &ok, Label::kNear);
155
      // +1 for return address.
156
      int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
135
      StackArgumentsAccessor args(rsp, scope()->num_parameters());
157 136
      __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
158
      __ movq(Operand(rsp, receiver_offset), kScratchRegister);
137
      __ movq(args.GetReceiverOperand(), kScratchRegister);
159 138
      __ bind(&ok);
160 139
    }
161 140
  }
......
164 143
  if (NeedsEagerFrame()) {
165 144
    ASSERT(!frame_is_built_);
166 145
    frame_is_built_ = true;
167
    __ push(rbp);  // Caller's frame pointer.
168
    __ movq(rbp, rsp);
169
    __ push(rsi);  // Callee's context.
170
    if (info()->IsStub()) {
171
      __ Push(Smi::FromInt(StackFrame::STUB));
172
    } else {
173
      __ push(rdi);  // Callee's JS function.
174
    }
146
    __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME);
175 147
    info()->AddNoFrameRange(0, masm_->pc_offset());
176 148
  }
177 149

  
......
273 245
}
274 246

  
275 247

  
276
bool LCodeGen::GenerateBody() {
277
  ASSERT(is_generating());
278
  bool emit_instructions = true;
279
  for (current_instruction_ = 0;
280
       !is_aborted() && current_instruction_ < instructions_->length();
281
       current_instruction_++) {
282
    LInstruction* instr = instructions_->at(current_instruction_);
283

  
284
    // Don't emit code for basic blocks with a replacement.
285
    if (instr->IsLabel()) {
286
      emit_instructions = !LLabel::cast(instr)->HasReplacement();
287
    }
288
    if (!emit_instructions) continue;
289

  
290
    if (FLAG_code_comments && instr->HasInterestingComment(this)) {
291
      Comment(";;; <@%d,#%d> %s",
292
              current_instruction_,
293
              instr->hydrogen_value()->id(),
294
              instr->Mnemonic());
295
    }
296

  
297
    RecordAndUpdatePosition(instr->position());
298

  
299
    instr->CompileToNative(this);
300
  }
301
  EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
302
  return !is_aborted();
303
}
304

  
305

  
306 248
bool LCodeGen::GenerateJumpTable() {
307 249
  Label needs_frame;
308 250
  if (jump_table_.length() > 0) {
......
350 292
    for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
351 293
      LDeferredCode* code = deferred_[i];
352 294

  
353
      int pos = instructions_->at(code->instruction_index())->position();
354
      RecordAndUpdatePosition(pos);
295
      HValue* value =
296
          instructions_->at(code->instruction_index())->hydrogen_value();
297
      RecordAndWritePosition(value->position());
355 298

  
356 299
      Comment(";;; <@%d,#%d> "
357 300
              "-------------------- Deferred %s --------------------",
......
614 557
                               int argc) {
615 558
  EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code));
616 559
  ASSERT(instr != NULL);
617
  LPointerMap* pointers = instr->pointer_map();
618
  RecordPosition(pointers->position());
619 560
  __ call(code, mode);
620 561
  RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
621 562

  
......
637 578

  
638 579
void LCodeGen::CallRuntime(const Runtime::Function* function,
639 580
                           int num_arguments,
640
                           LInstruction* instr) {
581
                           LInstruction* instr,
582
                           SaveFPRegsMode save_doubles) {
641 583
  ASSERT(instr != NULL);
642 584
  ASSERT(instr->HasPointerMap());
643
  LPointerMap* pointers = instr->pointer_map();
644
  RecordPosition(pointers->position());
645 585

  
646
  __ CallRuntime(function, num_arguments);
586
  __ CallRuntime(function, num_arguments, save_doubles);
587

  
647 588
  RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
648 589
}
649 590

  
......
754 695

  
755 696
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
756 697
  ZoneList<Handle<Map> > maps(1, zone());
698
  ZoneList<Handle<JSObject> > objects(1, zone());
757 699
  int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
758 700
  for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
759
    RelocInfo::Mode mode = it.rinfo()->rmode();
760
    if (mode == RelocInfo::EMBEDDED_OBJECT &&
761
        it.rinfo()->target_object()->IsMap()) {
762
      Handle<Map> map(Map::cast(it.rinfo()->target_object()));
763
      if (map->CanTransition()) {
701
    if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) {
702
      if (it.rinfo()->target_object()->IsMap()) {
703
        Handle<Map> map(Map::cast(it.rinfo()->target_object()));
764 704
        maps.Add(map, zone());
705
      } else if (it.rinfo()->target_object()->IsJSObject()) {
706
        Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object()));
707
        objects.Add(object, zone());
765 708
      }
766 709
    }
767 710
  }
768 711
#ifdef VERIFY_HEAP
769
  // This disables verification of weak embedded maps after full GC.
712
  // This disables verification of weak embedded objects after full GC.
770 713
  // AddDependentCode can cause a GC, which would observe the state where
771 714
  // this code is not yet in the depended code lists of the embedded maps.
772
  NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
715
  NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
773 716
#endif
774 717
  for (int i = 0; i < maps.length(); i++) {
775 718
    maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
776 719
  }
720
  for (int i = 0; i < objects.length(); i++) {
721
    AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
722
  }
777 723
}
778 724

  
779 725

  
......
884 830

  
885 831

  
886 832
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
887
  LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
833
  LPointerMap empty_pointers(zone());
888 834
  RecordSafepoint(&empty_pointers, deopt_mode);
889 835
}
890 836

  
......
896 842
}
897 843

  
898 844

  
899
void LCodeGen::RecordPosition(int position) {
845
void LCodeGen::RecordAndWritePosition(int position) {
900 846
  if (position == RelocInfo::kNoPosition) return;
901 847
  masm()->positions_recorder()->RecordPosition(position);
902
}
903

  
904

  
905
void LCodeGen::RecordAndUpdatePosition(int position) {
906
  if (position >= 0 && position != old_position_) {
907
    masm()->positions_recorder()->RecordPosition(position);
908
    old_position_ = position;
909
  }
848
  masm()->positions_recorder()->WriteRecordedPositions();
910 849
}
911 850

  
912 851

  
......
973 912
      CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
974 913
      break;
975 914
    }
976
    case CodeStub::NumberToString: {
977
      NumberToStringStub stub;
978
      CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
979
      break;
980
    }
981 915
    case CodeStub::StringCompare: {
982 916
      StringCompareStub stub;
983 917
      CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
......
1615 1549

  
1616 1550
void LCodeGen::DoConstantT(LConstantT* instr) {
1617 1551
  Handle<Object> value = instr->value(isolate());
1618
  AllowDeferredHandleDereference smi_check;
1619
  __ LoadObject(ToRegister(instr->result()), value);
1552
  __ Move(ToRegister(instr->result()), value);
1620 1553
}
1621 1554

  
1622 1555

  
......
1832 1765
    __ jmp(&return_right, Label::kNear);
1833 1766

  
1834 1767
    __ bind(&check_zero);
1835
    XMMRegister xmm_scratch = xmm0;
1768
    XMMRegister xmm_scratch = double_scratch0();
1836 1769
    __ xorps(xmm_scratch, xmm_scratch);
1837 1770
    __ ucomisd(left_reg, xmm_scratch);
1838 1771
    __ j(not_equal, &return_left, Label::kNear);  // left == right != 0.
......
1878 1811
      // when there is a mulsd depending on the result
1879 1812
      __ movaps(left, left);
1880 1813
      break;
1881
    case Token::MOD:
1814
    case Token::MOD: {
1815
      XMMRegister xmm_scratch = double_scratch0();
1882 1816
      __ PrepareCallCFunction(2);
1883
      __ movaps(xmm0, left);
1817
      __ movaps(xmm_scratch, left);
1884 1818
      ASSERT(right.is(xmm1));
1885 1819
      __ CallCFunction(
1886 1820
          ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
1887 1821
      __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1888
      __ movaps(result, xmm0);
1822
      __ movaps(result, xmm_scratch);
1889 1823
      break;
1824
    }
1890 1825
    default:
1891 1826
      UNREACHABLE();
1892 1827
      break;
......
1905 1840
}
1906 1841

  
1907 1842

  
1908
int LCodeGen::GetNextEmittedBlock() const {
1909
  for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
1910
    if (!chunk_->GetLabel(i)->HasReplacement()) return i;
1911
  }
1912
  return -1;
1913
}
1914

  
1915

  
1916 1843
template<class InstrType>
1917 1844
void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
1918 1845
  int left_block = instr->TrueDestination(chunk_);
......
1947 1874
}
1948 1875

  
1949 1876

  
1950
void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) {
1951
  Representation r = instr->hydrogen()->value()->representation();
1952
  if (r.IsSmiOrInteger32() || r.IsDouble()) {
1953
    EmitBranch(instr, no_condition);
1954
  } else {
1955
    ASSERT(r.IsTagged());
1956
    Register reg = ToRegister(instr->value());
1957
    HType type = instr->hydrogen()->value()->type();
1958
    if (type.IsTaggedNumber()) {
1959
      EmitBranch(instr, no_condition);
1960
    }
1961
    __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
1962
    __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
1963
                   Heap::kHeapNumberMapRootIndex);
1964
    EmitBranch(instr, equal);
1965
  }
1966
}
1967

  
1968

  
1969 1877
void LCodeGen::DoBranch(LBranch* instr) {
1970 1878
  Representation r = instr->hydrogen()->value()->representation();
1971 1879
  if (r.IsInteger32()) {
......
1981 1889
  } else if (r.IsDouble()) {
1982 1890
    ASSERT(!info()->IsStub());
1983 1891
    XMMRegister reg = ToDoubleRegister(instr->value());
1984
    __ xorps(xmm0, xmm0);
1985
    __ ucomisd(reg, xmm0);
1892
    XMMRegister xmm_scratch = double_scratch0();
1893
    __ xorps(xmm_scratch, xmm_scratch);
1894
    __ ucomisd(reg, xmm_scratch);
1986 1895
    EmitBranch(instr, not_equal);
1987 1896
  } else {
1988 1897
    ASSERT(r.IsTagged());
......
2001 1910
      EmitBranch(instr, no_condition);
2002 1911
    } else if (type.IsHeapNumber()) {
2003 1912
      ASSERT(!info()->IsStub());
2004
      __ xorps(xmm0, xmm0);
2005
      __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1913
      XMMRegister xmm_scratch = double_scratch0();
1914
      __ xorps(xmm_scratch, xmm_scratch);
1915
      __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2006 1916
      EmitBranch(instr, not_equal);
2007 1917
    } else if (type.IsString()) {
2008 1918
      ASSERT(!info()->IsStub());
......
2083 1993
        Label not_heap_number;
2084 1994
        __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
2085 1995
        __ j(not_equal, &not_heap_number, Label::kNear);
2086
        __ xorps(xmm0, xmm0);
2087
        __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1996
        XMMRegister xmm_scratch = double_scratch0();
1997
        __ xorps(xmm_scratch, xmm_scratch);
1998
        __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2088 1999
        __ j(zero, instr->FalseLabel(chunk_));
2089 2000
        __ jmp(instr->TrueLabel(chunk_));
2090 2001
        __ bind(&not_heap_number);
......
2119 2030
    case Token::EQ_STRICT:
2120 2031
      cond = equal;
2121 2032
      break;
2033
    case Token::NE:
2034
    case Token::NE_STRICT:
2035
      cond = not_equal;
2036
      break;
2122 2037
    case Token::LT:
2123 2038
      cond = is_unsigned ? below : less;
2124 2039
      break;
......
2206 2121

  
2207 2122
  if (instr->right()->IsConstantOperand()) {
2208 2123
    Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right()));
2209
    __ CmpObject(left, right);
2124
    __ Cmp(left, right);
2210 2125
  } else {
2211 2126
    Register right = ToRegister(instr->right());
2212 2127
    __ cmpq(left, right);
......
2574 2489
    InstanceofStub stub(flags);
2575 2490

  
2576 2491
    __ push(ToRegister(instr->value()));
2577
    __ PushHeapObject(instr->function());
2492
    __ Push(instr->function());
2578 2493

  
2579 2494
    static const int kAdditionalDelta = 10;
2580 2495
    int delta =
......
2610 2525
}
2611 2526

  
2612 2527

  
2613
void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
2614
  Register object = ToRegister(instr->object());
2615
  Register result = ToRegister(instr->result());
2616
  __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
2617
  __ movzxbq(result, FieldOperand(result, Map::kInstanceSizeOffset));
2618
}
2619

  
2620

  
2621 2528
void LCodeGen::DoCmpT(LCmpT* instr) {
2622 2529
  Token::Value op = instr->op();
2623 2530

  
......
2682 2589

  
2683 2590
void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2684 2591
  Register result = ToRegister(instr->result());
2685
  __ LoadGlobalCell(result, instr->hydrogen()->cell());
2592
  __ LoadGlobalCell(result, instr->hydrogen()->cell().handle());
2686 2593
  if (instr->hydrogen()->RequiresHoleCheck()) {
2687 2594
    __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2688 2595
    DeoptimizeIf(equal, instr->environment());
......
2704 2611

  
2705 2612
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2706 2613
  Register value = ToRegister(instr->value());
2707
  Handle<Cell> cell_handle = instr->hydrogen()->cell();
2614
  Handle<Cell> cell_handle = instr->hydrogen()->cell().handle();
2708 2615

  
2709 2616
  // If the cell we are storing to contains the hole it could have
2710 2617
  // been deleted from the property dictionary. In that case, we need
......
2805 2712
      __ load_rax(ToExternalReference(LConstantOperand::cast(instr->object())));
2806 2713
    } else {
2807 2714
      Register object = ToRegister(instr->object());
2808
      __ movq(result, MemOperand(object, offset));
2715
      __ Load(result, MemOperand(object, offset), access.representation());
2809 2716
    }
2810 2717
    return;
2811 2718
  }
......
2819 2726
  }
2820 2727

  
2821 2728
  Register result = ToRegister(instr->result());
2822
  if (access.IsInobject()) {
2823
    __ movq(result, FieldOperand(object, offset));
2824
  } else {
2729
  if (!access.IsInobject()) {
2825 2730
    __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
2826
    __ movq(result, FieldOperand(result, offset));
2731
    object = result;
2827 2732
  }
2733
  __ Load(result, FieldOperand(object, offset), access.representation());
2828 2734
}
2829 2735

  
2830 2736

  
......
2879 2785
}
2880 2786

  
2881 2787

  
2788
void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
2789
  Register result = ToRegister(instr->result());
2790
  __ LoadRoot(result, instr->index());
2791
}
2792

  
2793

  
2882 2794
void LCodeGen::DoLoadExternalArrayPointer(
2883 2795
    LLoadExternalArrayPointer* instr) {
2884 2796
  Register result = ToRegister(instr->result());
......
2896 2808
      instr->index()->IsConstantOperand()) {
2897 2809
    int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2898 2810
    int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length()));
2899
    int index = (const_length - const_index) + 1;
2900
    __ movq(result, Operand(arguments, index * kPointerSize));
2811
    StackArgumentsAccessor args(arguments, const_length,
2812
                                ARGUMENTS_DONT_CONTAIN_RECEIVER);
2813
    __ movq(result, args.GetArgumentOperand(const_index));
2901 2814
  } else {
2902 2815
    Register length = ToRegister(instr->length());
2903 2816
    // There are two words between the frame pointer and the last argument.
......
2907 2820
    } else {
2908 2821
      __ subl(length, ToOperand(instr->index()));
2909 2822
    }
2910
    __ movq(result,
2911
            Operand(arguments, length, times_pointer_size, kPointerSize));
2823
    StackArgumentsAccessor args(arguments, length,
2824
                                ARGUMENTS_DONT_CONTAIN_RECEIVER);
2825
    __ movq(result, args.GetArgumentOperand(0));
2912 2826
  }
2913 2827
}
2914 2828

  
......
3112 3026
  Register result = ToRegister(instr->result());
3113 3027

  
3114 3028
  if (instr->hydrogen()->from_inlined()) {
3115
    __ lea(result, Operand(rsp, -2 * kPointerSize));
3029
    __ lea(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize));
3116 3030
  } else {
3117 3031
    // Check for arguments adapter frame.
3118 3032
    Label done, adapted;
......
3234 3148
  __ testl(length, length);
3235 3149
  __ j(zero, &invoke, Label::kNear);
3236 3150
  __ bind(&loop);
3237
  __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
3151
  StackArgumentsAccessor args(elements, length,
3152
                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
3153
  __ push(args.GetArgumentOperand(0));
3238 3154
  __ decl(length);
3239 3155
  __ j(not_zero, &loop);
3240 3156

  
......
3242 3158
  __ bind(&invoke);
3243 3159
  ASSERT(instr->HasPointerMap());
3244 3160
  LPointerMap* pointers = instr->pointer_map();
3245
  RecordPosition(pointers->position());
3246 3161
  SafepointGenerator safepoint_generator(
3247 3162
      this, pointers, Safepoint::kLazyDeopt);
3248 3163
  ParameterCount actual(rax);
......
3285 3200

  
3286 3201
void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3287 3202
  __ push(rsi);  // The context is the first argument.
3288
  __ PushHeapObject(instr->hydrogen()->pairs());
3203
  __ Push(instr->hydrogen()->pairs());
3289 3204
  __ Push(Smi::FromInt(instr->hydrogen()->flags()));
3290 3205
  CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3291 3206
}
......
3316 3231
      dont_adapt_arguments || formal_parameter_count == arity;
3317 3232

  
3318 3233
  LPointerMap* pointers = instr->pointer_map();
3319
  RecordPosition(pointers->position());
3320 3234

  
3321 3235
  if (can_invoke_directly) {
3322 3236
    if (rdi_state == RDI_UNINITIALIZED) {
3323
      __ LoadHeapObject(rdi, function);
3237
      __ Move(rdi, function);
3324 3238
    }
3325 3239

  
3326 3240
    // Change context.
......
3401 3315
  __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3402 3316

  
3403 3317
  __ bind(&allocated);
3404
  __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
3318
  __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
3405 3319
  __ shl(tmp2, Immediate(1));
3406 3320
  __ shr(tmp2, Immediate(1));
3407
  __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
3321
  __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
3408 3322
  __ StoreToSafepointRegisterSlot(input_reg, tmp);
3409 3323

  
3410 3324
  __ bind(&done);
......
3451 3365
  Representation r = instr->hydrogen()->value()->representation();
3452 3366

  
3453 3367
  if (r.IsDouble()) {
3454
    XMMRegister scratch = xmm0;
3368
    XMMRegister scratch = double_scratch0();
3455 3369
    XMMRegister input_reg = ToDoubleRegister(instr->value());
3456 3370
    __ xorps(scratch, scratch);
3457 3371
    __ subsd(scratch, input_reg);
3458
    __ andpd(input_reg, scratch);
3372
    __ andps(input_reg, scratch);
3459 3373
  } else if (r.IsInteger32()) {
3460 3374
    EmitIntegerMathAbs(instr);
3461 3375
  } else if (r.IsSmi()) {
......
3473 3387

  
3474 3388

  
3475 3389
void LCodeGen::DoMathFloor(LMathFloor* instr) {
3476
  XMMRegister xmm_scratch = xmm0;
3390
  XMMRegister xmm_scratch = double_scratch0();
3477 3391
  Register output_reg = ToRegister(instr->result());
3478 3392
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3479 3393

  
......
3520 3434
    __ bind(&negative_sign);
3521 3435
    // Truncate, then compare and compensate.
3522 3436
    __ cvttsd2si(output_reg, input_reg);
3523
    __ cvtlsi2sd(xmm_scratch, output_reg);
3437
    __ Cvtlsi2sd(xmm_scratch, output_reg);
3524 3438
    __ ucomisd(input_reg, xmm_scratch);
3525 3439
    __ j(equal, &done, Label::kNear);
3526 3440
    __ subl(output_reg, Immediate(1));
......
3532 3446

  
3533 3447

  
3534 3448
void LCodeGen::DoMathRound(LMathRound* instr) {
3535
  const XMMRegister xmm_scratch = xmm0;
3449
  const XMMRegister xmm_scratch = double_scratch0();
3536 3450
  Register output_reg = ToRegister(instr->result());
3537 3451
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3538 3452
  static int64_t one_half = V8_INT64_C(0x3FE0000000000000);  // 0.5
......
3569 3483
  __ RecordComment("D2I conversion overflow");
3570 3484
  DeoptimizeIf(equal, instr->environment());
3571 3485

  
3572
  __ cvtlsi2sd(xmm_scratch, output_reg);
3486
  __ Cvtlsi2sd(xmm_scratch, output_reg);
3573 3487
  __ ucomisd(input_reg, xmm_scratch);
3574 3488
  __ j(equal, &restore, Label::kNear);
3575 3489
  __ subl(output_reg, Immediate(1));
......
3600 3514

  
3601 3515

  
3602 3516
void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3603
  XMMRegister xmm_scratch = xmm0;
3517
  XMMRegister xmm_scratch = double_scratch0();
3604 3518
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3605 3519
  ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
3606 3520

  
......
3717 3631
  // by computing:
3718 3632
  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3719 3633
  XMMRegister result = ToDoubleRegister(instr->result());
3720
  // We use xmm0 as fixed scratch register here.
3721
  XMMRegister scratch4 = xmm0;
3634
  XMMRegister scratch4 = double_scratch0();
3722 3635
  __ movq(scratch3, V8_INT64_C(0x4130000000000000),
3723 3636
          RelocInfo::NONE64);  // 1.0 x 2^20 as double
3724 3637
  __ movq(scratch4, scratch3);
......
3731 3644
void LCodeGen::DoMathExp(LMathExp* instr) {
3732 3645
  XMMRegister input = ToDoubleRegister(instr->value());
3733 3646
  XMMRegister result = ToDoubleRegister(instr->result());
3647
  XMMRegister temp0 = double_scratch0();
3734 3648
  Register temp1 = ToRegister(instr->temp1());
3735 3649
  Register temp2 = ToRegister(instr->temp2());
3736 3650

  
3737
  MathExpGenerator::EmitMathExp(masm(), input, result, xmm0, temp1, temp2);
3651
  MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2);
3738 3652
}
3739 3653

  
3740 3654

  
3741 3655
void LCodeGen::DoMathLog(LMathLog* instr) {
3742
  ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3743
  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3744
                               TranscendentalCacheStub::UNTAGGED);
3745
  CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3656
  ASSERT(instr->value()->Equals(instr->result()));
3657
  XMMRegister input_reg = ToDoubleRegister(instr->value());
3658
  XMMRegister xmm_scratch = double_scratch0();
3659
  Label positive, done, zero;
3660
  __ xorps(xmm_scratch, xmm_scratch);
3661
  __ ucomisd(input_reg, xmm_scratch);
3662
  __ j(above, &positive, Label::kNear);
3663
  __ j(equal, &zero, Label::kNear);
3664
  ExternalReference nan =
3665
      ExternalReference::address_of_canonical_non_hole_nan();
3666
  Operand nan_operand = masm()->ExternalOperand(nan);
3667
  __ movsd(input_reg, nan_operand);
3668
  __ jmp(&done, Label::kNear);
3669
  __ bind(&zero);
3670
  ExternalReference ninf =
3671
      ExternalReference::address_of_negative_infinity();
3672
  Operand ninf_operand = masm()->ExternalOperand(ninf);
3673
  __ movsd(input_reg, ninf_operand);
3674
  __ jmp(&done, Label::kNear);
3675
  __ bind(&positive);
3676
  __ fldln2();
3677
  __ subq(rsp, Immediate(kDoubleSize));
3678
  __ movsd(Operand(rsp, 0), input_reg);
3679
  __ fld_d(Operand(rsp, 0));
3680
  __ fyl2x();
3681
  __ fstp_d(Operand(rsp, 0));
3682
  __ movsd(input_reg, Operand(rsp, 0));
3683
  __ addq(rsp, Immediate(kDoubleSize));
3684
  __ bind(&done);
3746 3685
}
3747 3686

  
3748 3687

  
......
3777 3716
  Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3778 3717
  if (known_function.is_null()) {
3779 3718
    LPointerMap* pointers = instr->pointer_map();
3780
    RecordPosition(pointers->position());
3781 3719
    SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3782 3720
    ParameterCount count(instr->arity());
3783 3721
    __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
......
3910 3848

  
3911 3849

  
3912 3850
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3913
  CallRuntime(instr->function(), instr->arity(), instr);
3851
  CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
3914 3852
}
3915 3853

  
3916 3854

  
......
3940 3878
    Register value = ToRegister(instr->value());
3941 3879
    if (instr->object()->IsConstantOperand()) {
3942 3880
      ASSERT(value.is(rax));
3881
      ASSERT(!access.representation().IsSpecialization());
3943 3882
      LConstantOperand* object = LConstantOperand::cast(instr->object());
3944 3883
      __ store_rax(ToExternalReference(object));
3945 3884
    } else {
3946 3885
      Register object = ToRegister(instr->object());
3947
      __ movq(MemOperand(object, offset), value);
3886
      __ Store(MemOperand(object, offset), value, representation);
3948 3887
    }
3949 3888
    return;
3950 3889
  }
......
4013 3952
  if (instr->value()->IsConstantOperand()) {
4014 3953
    LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4015 3954
    if (operand_value->IsRegister()) {
4016
      __ movq(FieldOperand(write_register, offset),
4017
              ToRegister(operand_value));
3955
      Register value = ToRegister(operand_value);
3956
      __ Store(FieldOperand(write_register, offset), value, representation);
4018 3957
    } else {
4019 3958
      Handle<Object> handle_value = ToHandle(operand_value);
4020 3959
      ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
4021 3960
      __ Move(FieldOperand(write_register, offset), handle_value);
4022 3961
    }
4023 3962
  } else {
4024
    __ movq(FieldOperand(write_register, offset), ToRegister(instr->value()));
3963
    Register value = ToRegister(instr->value());
3964
    __ Store(FieldOperand(write_register, offset), value, representation);
4025 3965
  }
4026 3966

  
4027 3967
  if (instr->hydrogen()->NeedsWriteBarrier()) {
......
4325 4265
void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4326 4266
  Register object = ToRegister(instr->object());
4327 4267
  Register temp = ToRegister(instr->temp());
4328
  __ TestJSArrayForAllocationMemento(object, temp);
4268
  Label no_memento_found;
4269
  __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
4329 4270
  DeoptimizeIf(equal, instr->environment());
4271
  __ bind(&no_memento_found);
4330 4272
}
4331 4273

  
4332 4274

  
......
4449 4391
  LOperand* output = instr->result();
4450 4392
  ASSERT(output->IsDoubleRegister());
4451 4393
  if (input->IsRegister()) {
4452
    __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
4394
    __ Cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
4453 4395
  } else {
4454
    __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
4396
    __ Cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
4455 4397
  }
4456 4398
}
4457 4399

  
......
4479 4421
}
4480 4422

  
4481 4423

  
4424
void LCodeGen::DoUint32ToSmi(LUint32ToSmi* instr) {
4425
  LOperand* input = instr->value();
4426
  ASSERT(input->IsRegister());
4427
  LOperand* output = instr->result();
4428
  if (!instr->hydrogen()->value()->HasRange() ||
4429
      !instr->hydrogen()->value()->range()->IsInSmiRange() ||
4430
      instr->hydrogen()->value()->range()->upper() == kMaxInt) {
4431
    // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
4432
    // interval, so we treat kMaxInt as a sentinel for this entire interval.
4433
    __ testl(ToRegister(input), Immediate(0x80000000));
4434
    DeoptimizeIf(not_zero, instr->environment());
4435
  }
4436
  __ Integer32ToSmi(ToRegister(output), ToRegister(input));
4437
}
4438

  
4439

  
4482 4440
void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4483 4441
  LOperand* input = instr->value();
4484 4442
  ASSERT(input->IsRegister() && input->Equals(instr->result()));
......
4517 4475
  Label slow;
4518 4476
  Register reg = ToRegister(instr->value());
4519 4477
  Register tmp = reg.is(rax) ? rcx : rax;
4478
  XMMRegister temp_xmm = ToDoubleRegister(instr->temp());
4520 4479

  
4521 4480
  // Preserve the value of all registers.
4522 4481
  PushSafepointRegistersScope scope(this);
4523 4482

  
4524 4483
  Label done;
4525
  // Load value into xmm1 which will be preserved across potential call to
4484
  // Load value into temp_xmm which will be preserved across potential call to
4526 4485
  // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable
4527 4486
  // XMM registers on x64).
4528
  __ LoadUint32(xmm1, reg, xmm0);
4487
  XMMRegister xmm_scratch = double_scratch0();
4488
  __ LoadUint32(temp_xmm, reg, xmm_scratch);
4529 4489

  
4530 4490
  if (FLAG_inline_new) {
4531 4491
    __ AllocateHeapNumber(reg, tmp, &slow);
......
4543 4503
  CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
4544 4504
  if (!reg.is(rax)) __ movq(reg, rax);
4545 4505

  
4546
  // Done. Put the value in xmm1 into the value of the allocated heap
4506
  // Done. Put the value in temp_xmm into the value of the allocated heap
4547 4507
  // number.
4548 4508
  __ bind(&done);
4549
  __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), xmm1);
4509
  __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm);
4550 4510
  __ StoreToSafepointRegisterSlot(reg, reg);
4551 4511
}
4552 4512

  
......
4623 4583
                                bool deoptimize_on_minus_zero,
4624 4584
                                LEnvironment* env,
4625 4585
                                NumberUntagDMode mode) {
4626
  Label load_smi, done;
4586
  Label convert, load_smi, done;
4627 4587

  
4628 4588
  if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4629 4589
    // Smi check.
......
4632 4592
    // Heap number map check.
4633 4593
    __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4634 4594
                   Heap::kHeapNumberMapRootIndex);
4635
    if (!can_convert_undefined_to_nan) {
4636
      DeoptimizeIf(not_equal, env);
4637
    } else {
4638
      Label heap_number, convert;
4639
      __ j(equal, &heap_number, Label::kNear);
4640 4595

  
4641
      // Convert undefined (and hole) to NaN. Compute NaN as 0/0.
4642
      __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4643
      DeoptimizeIf(not_equal, env);
4644

  
4645
      __ bind(&convert);
4646
      __ xorps(result_reg, result_reg);
4647
      __ divsd(result_reg, result_reg);
4648
      __ jmp(&done, Label::kNear);
4596
    // On x64 it is safe to load at heap number offset before evaluating the map
4597
    // check, since all heap objects are at least two words long.
4598
    __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4649 4599

  
4650
      __ bind(&heap_number);
4600
    if (can_convert_undefined_to_nan) {
4601
      __ j(not_equal, &convert);
4602
    } else {
4603
      DeoptimizeIf(not_equal, env);
4651 4604
    }
4652
    // Heap number to XMM conversion.
4653
    __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4605

  
4654 4606
    if (deoptimize_on_minus_zero) {
4655
      XMMRegister xmm_scratch = xmm0;
4607
      XMMRegister xmm_scratch = double_scratch0();
4656 4608
      __ xorps(xmm_scratch, xmm_scratch);
4657 4609
      __ ucomisd(xmm_scratch, result_reg);
4658 4610
      __ j(not_equal, &done, Label::kNear);
......
4661 4613
      DeoptimizeIf(not_zero, env);
4662 4614
    }
4663 4615
    __ jmp(&done, Label::kNear);
4616

  
4617
    if (can_convert_undefined_to_nan) {
4618
      __ bind(&convert);
4619

  
4620
      // Convert undefined (and hole) to NaN. Compute NaN as 0/0.
4621
      __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4622
      DeoptimizeIf(not_equal, env);
4623

  
4624
      __ xorps(result_reg, result_reg);
4625
      __ divsd(result_reg, result_reg);
4626
      __ jmp(&done, Label::kNear);
4627
    }
4664 4628
  } else {
4665 4629
    ASSERT(mode == NUMBER_CANDIDATE_IS_SMI);
4666 4630
  }
......
4668 4632
  // Smi to XMM conversion
4669 4633
  __ bind(&load_smi);
4670 4634
  __ SmiToInteger32(kScratchRegister, input_reg);
4671
  __ cvtlsi2sd(result_reg, kScratchRegister);
4635
  __ Cvtlsi2sd(result_reg, kScratchRegister);
4672 4636
  __ bind(&done);
4673 4637
}
4674 4638

  
4675 4639

  
4676 4640
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr, Label* done) {
4677
  Label heap_number;
4678 4641
  Register input_reg = ToRegister(instr->value());
4679 4642

  
4680

  
4681 4643
  if (instr->truncating()) {
4644
    Label no_heap_number, check_bools, check_false;
4645

  
4682 4646
    // Heap number map check.
4683 4647
    __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4684 4648
                   Heap::kHeapNumberMapRootIndex);
4685
    __ j(equal, &heap_number, Label::kNear);
4686
    // Check for undefined. Undefined is converted to zero for truncating
4687
    // conversions.
4649
    __ j(not_equal, &no_heap_number, Label::kNear);
4650
    __ TruncateHeapNumberToI(input_reg, input_reg);
4651
    __ jmp(done);
4652

  
4653
    __ bind(&no_heap_number);
4654
    // Check for Oddballs. Undefined/False is converted to zero and True to one
4655
    // for truncating conversions.
4688 4656
    __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4689
    DeoptimizeIf(not_equal, instr->environment());
4657
    __ j(not_equal, &check_bools, Label::kNear);
4690 4658
    __ Set(input_reg, 0);
4691 4659
    __ jmp(done);
4692 4660

  
4693
    __ bind(&heap_number);
4694
    __ TruncateHeapNumberToI(input_reg, input_reg);
4661
    __ bind(&check_bools);
4662
    __ CompareRoot(input_reg, Heap::kTrueValueRootIndex);
4663
    __ j(not_equal, &check_false, Label::kNear);
4664
    __ Set(input_reg, 1);
4665
    __ jmp(done);
4666

  
4667
    __ bind(&check_false);
4668
    __ CompareRoot(input_reg, Heap::kFalseValueRootIndex);
4669
    __ RecordComment("Deferred TaggedToI: cannot truncate");
4670
    DeoptimizeIf(not_equal, instr->environment());
4671
    __ Set(input_reg, 0);
4672
    __ jmp(done);
4695 4673
  } else {
4696 4674
    Label bailout;
4697 4675
    XMMRegister xmm_temp = ToDoubleRegister(instr->temp());
......
4721 4699
  LOperand* input = instr->value();
4722 4700
  ASSERT(input->IsRegister());
4723 4701
  ASSERT(input->Equals(instr->result()));
4724

  
4725 4702
  Register input_reg = ToRegister(input);
4726
  DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
4727
  __ JumpIfNotSmi(input_reg, deferred->entry());
4728
  __ SmiToInteger32(input_reg, input_reg);
4729
  __ bind(deferred->exit());
4703

  
4704
  if (instr->hydrogen()->value()->representation().IsSmi()) {
4705
    __ SmiToInteger32(input_reg, input_reg);
4706
  } else {
4707
    DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
4708
    __ JumpIfNotSmi(input_reg, deferred->entry());
4709
    __ SmiToInteger32(input_reg, input_reg);
4710
    __ bind(deferred->exit());
4711
  }
4730 4712
}
4731 4713

  
4732 4714

  
......
4764 4746
    __ TruncateDoubleToI(result_reg, input_reg);
4765 4747
  } else {
4766 4748
    Label bailout, done;
4767
    __ DoubleToI(result_reg, input_reg, xmm0,
4749
    XMMRegister xmm_scratch = double_scratch0();
4750
    __ DoubleToI(result_reg, input_reg, xmm_scratch,
4768 4751
        instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear);
4769 4752

  
4770 4753
    __ jmp(&done, Label::kNear);
......
4785 4768
  Register result_reg = ToRegister(result);
4786 4769

  
4787 4770
  Label bailout, done;
4788
  __ DoubleToI(result_reg, input_reg, xmm0,
4771
  XMMRegister xmm_scratch = double_scratch0();
4772
  __ DoubleToI(result_reg, input_reg, xmm_scratch,
4789 4773
      instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear);
4790 4774

  
4791 4775
  __ jmp(&done, Label::kNear);
......
4862 4846

  
4863 4847
void LCodeGen::DoCheckValue(LCheckValue* instr) {
4864 4848
  Register reg = ToRegister(instr->value());
4865
  Handle<HeapObject> object = instr->hydrogen()->object();
4866
  __ CmpHeapObject(reg, object);
4849
  __ Cmp(reg, instr->hydrogen()->object().handle());
4867 4850
  DeoptimizeIf(not_equal, instr->environment());
4868 4851
}
4869 4852

  
......
4903 4886
  ASSERT(input->IsRegister());
4904 4887
  Register reg = ToRegister(input);
4905 4888

  
4906
  SmallMapList* map_set = instr->hydrogen()->map_set();
4907

  
4908 4889
  DeferredCheckMaps* deferred = NULL;
4909 4890
  if (instr->hydrogen()->has_migration_target()) {
4910 4891
    deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
4911 4892
    __ bind(deferred->check_maps());
4912 4893
  }
4913 4894

  
4895
  UniqueSet<Map> map_set = instr->hydrogen()->map_set();
4914 4896
  Label success;
4915
  for (int i = 0; i < map_set->length() - 1; i++) {
4916
    Handle<Map> map = map_set->at(i);
4897
  for (int i = 0; i < map_set.size() - 1; i++) {
4898
    Handle<Map> map = map_set.at(i).handle();
4917 4899
    __ CompareMap(reg, map, &success);
4918 4900
    __ j(equal, &success);
4919 4901
  }
4920 4902

  
4921
  Handle<Map> map = map_set->last();
4903
  Handle<Map> map = map_set.at(map_set.size() - 1).handle();
4922 4904
  __ CompareMap(reg, map, &success);
4923 4905
  if (instr->hydrogen()->has_migration_target()) {
4924 4906
    __ j(not_equal, deferred->entry());
......
4932 4914

  
4933 4915
void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4934 4916
  XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4917
  XMMRegister xmm_scratch = double_scratch0();
4935 4918
  Register result_reg = ToRegister(instr->result());
4936
  __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4919
  __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg);
4937 4920
}
4938 4921

  
4939 4922

  
......
4948 4931
  ASSERT(instr->unclamped()->Equals(instr->result()));
4949 4932
  Register input_reg = ToRegister(instr->unclamped());
4950 4933
  XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
4934
  XMMRegister xmm_scratch = double_scratch0();
4951 4935
  Label is_smi, done, heap_number;
4952 4936

  
4953 4937
  __ JumpIfSmi(input_reg, &is_smi);
......
4966 4950

  
4967 4951
  // Heap number
4968 4952
  __ bind(&heap_number);
4969
  __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
4970
  __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg);
4953
  __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset));
4954
  __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
4971 4955
  __ jmp(&done, Label::kNear);
4972 4956

  
4973 4957
  // smi
......
5089 5073
  // rax = regexp literal clone.
5090 5074
  int literal_offset =
5091 5075
      FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5092
  __ LoadHeapObject(rcx, instr->hydrogen()->literals());
5076
  __ Move(rcx, instr->hydrogen()->literals());
5093 5077
  __ movq(rbx, FieldOperand(rcx, literal_offset));
5094 5078
  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
5095 5079
  __ j(not_equal, &materialized, Label::kNear);
......
5160 5144
void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
5161 5145
  ASSERT(!operand->IsDoubleRegister());
5162 5146
  if (operand->IsConstantOperand()) {
5163
    Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
5164
    AllowDeferredHandleDereference smi_check;
5165
    if (object->IsSmi()) {
5166
      __ Push(Handle<Smi>::cast(object));
5167
    } else {
5168
      __ PushHeapObject(Handle<HeapObject>::cast(object));
5169
    }
5147
    __ Push(ToHandle(LConstantOperand::cast(operand)));
5170 5148
  } else if (operand->IsRegister()) {
5171 5149
    __ push(ToRegister(operand));
5172 5150
  } else {
......
5280 5258
  __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
5281 5259
         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
5282 5260
  __ j(not_equal, &check_frame_marker, Label::kNear);
5283
  __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
5261
  __ movq(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
5284 5262

  
5285 5263
  // Check the marker in the calling frame.
5286 5264
  __ bind(&check_frame_marker);

Also available in: Unified diff