Revision f230a1cf deps/v8/src/x64/macro-assembler-x64.cc

View differences:

deps/v8/src/x64/macro-assembler-x64.cc
37 37
#include "serialize.h"
38 38
#include "debug.h"
39 39
#include "heap.h"
40
#include "isolate-inl.h"
40 41

  
41 42
namespace v8 {
42 43
namespace internal {
......
605 606
}
606 607

  
607 608

  
608
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
609
  CallRuntime(Runtime::FunctionForId(id), num_arguments);
610
}
611

  
612

  
613
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
614
  const Runtime::Function* function = Runtime::FunctionForId(id);
615
  Set(rax, function->nargs);
616
  LoadAddress(rbx, ExternalReference(function, isolate()));
617
  CEntryStub ces(1, kSaveFPRegs);
618
  CallStub(&ces);
619
}
620

  
621

  
622 609
void MacroAssembler::CallRuntime(const Runtime::Function* f,
623
                                 int num_arguments) {
610
                                 int num_arguments,
611
                                 SaveFPRegsMode save_doubles) {
624 612
  // If the expected number of arguments of the runtime function is
625 613
  // constant, we check that the actual number of arguments match the
626 614
  // expectation.
......
635 623
  // smarter.
636 624
  Set(rax, num_arguments);
637 625
  LoadAddress(rbx, ExternalReference(f, isolate()));
638
  CEntryStub ces(f->result_size);
626
  CEntryStub ces(f->result_size, save_doubles);
639 627
  CallStub(&ces);
640 628
}
641 629

  
......
691 679
}
692 680

  
693 681

  
694
void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
695
                                              Address thunk_address,
696
                                              Register thunk_last_arg,
697
                                              int stack_space,
698
                                              int return_value_offset) {
682
void MacroAssembler::CallApiFunctionAndReturn(
683
    Address function_address,
684
    Address thunk_address,
685
    Register thunk_last_arg,
686
    int stack_space,
687
    Operand return_value_operand,
688
    Operand* context_restore_operand) {
699 689
  Label prologue;
700 690
  Label promote_scheduled_exception;
691
  Label exception_handled;
701 692
  Label delete_allocated_handles;
702 693
  Label leave_exit_frame;
703 694
  Label write_back;
......
750 741

  
751 742
  bind(&profiler_disabled);
752 743
  // Call the api function!
753
  movq(rax, reinterpret_cast<int64_t>(function_address),
744
  movq(rax, reinterpret_cast<Address>(function_address),
754 745
       RelocInfo::EXTERNAL_REFERENCE);
755 746

  
756 747
  bind(&end_profiler_check);
......
768 759
  }
769 760

  
770 761
  // Load the value from ReturnValue
771
  movq(rax, Operand(rbp, return_value_offset * kPointerSize));
762
  movq(rax, return_value_operand);
772 763
  bind(&prologue);
773 764

  
774 765
  // No more valid handles (the result handle was the last one). Restore
......
783 774
  movq(rsi, scheduled_exception_address);
784 775
  Cmp(Operand(rsi, 0), factory->the_hole_value());
785 776
  j(not_equal, &promote_scheduled_exception);
777
  bind(&exception_handled);
786 778

  
787 779
#if ENABLE_EXTRA_CHECKS
788 780
  // Check if the function returned a valid JavaScript value.
......
819 811
  bind(&ok);
820 812
#endif
821 813

  
822
  LeaveApiExitFrame();
814
  bool restore_context = context_restore_operand != NULL;
815
  if (restore_context) {
816
    movq(rsi, *context_restore_operand);
817
  }
818
  LeaveApiExitFrame(!restore_context);
823 819
  ret(stack_space * kPointerSize);
824 820

  
825 821
  bind(&promote_scheduled_exception);
826
  TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
822
  {
823
    FrameScope frame(this, StackFrame::INTERNAL);
824
    CallRuntime(Runtime::kPromoteScheduledException, 0);
825
  }
826
  jmp(&exception_handled);
827 827

  
828 828
  // HandleScope limit has changed. Delete allocated extensions.
829 829
  bind(&delete_allocated_handles);
......
936 936
}
937 937

  
938 938

  
939
void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
940
  xorps(dst, dst);
941
  cvtlsi2sd(dst, src);
942
}
943

  
944

  
945
void MacroAssembler::Cvtlsi2sd(XMMRegister dst, const Operand& src) {
946
  xorps(dst, dst);
947
  cvtlsi2sd(dst, src);
948
}
949

  
950

  
951
void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
952
  ASSERT(!r.IsDouble());
953
  if (r.IsByte()) {
954
    movzxbl(dst, src);
955
  } else if (r.IsInteger32()) {
956
    movl(dst, src);
957
  } else {
958
    movq(dst, src);
959
  }
960
}
961

  
962

  
963
void MacroAssembler::Store(const Operand& dst, Register src, Representation r) {
964
  ASSERT(!r.IsDouble());
965
  if (r.IsByte()) {
966
    movb(dst, src);
967
  } else if (r.IsInteger32()) {
968
    movl(dst, src);
969
  } else {
970
    movq(dst, src);
971
  }
972
}
973

  
974

  
939 975
void MacroAssembler::Set(Register dst, int64_t x) {
940 976
  if (x == 0) {
941 977
    xorl(dst, dst);
......
1423 1459
}
1424 1460

  
1425 1461

  
1426
void MacroAssembler::SmiTryAddConstant(Register dst,
1427
                                       Register src,
1428
                                       Smi* constant,
1429
                                       Label* on_not_smi_result,
1430
                                       Label::Distance near_jump) {
1431
  // Does not assume that src is a smi.
1432
  ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1433
  STATIC_ASSERT(kSmiTag == 0);
1434
  ASSERT(!dst.is(kScratchRegister));
1435
  ASSERT(!src.is(kScratchRegister));
1436

  
1437
  JumpIfNotSmi(src, on_not_smi_result, near_jump);
1438
  Register tmp = (dst.is(src) ? kScratchRegister : dst);
1439
  LoadSmiConstant(tmp, constant);
1440
  addq(tmp, src);
1441
  j(overflow, on_not_smi_result, near_jump);
1442
  if (dst.is(src)) {
1443
    movq(dst, tmp);
1444
  }
1445
}
1446

  
1447

  
1448 1462
void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1449 1463
  if (constant->value() == 0) {
1450 1464
    if (!dst.is(src)) {
......
1513 1527
  } else if (dst.is(src)) {
1514 1528
    ASSERT(!dst.is(kScratchRegister));
1515 1529

  
1530
    Label done;
1516 1531
    LoadSmiConstant(kScratchRegister, constant);
1517
    addq(kScratchRegister, src);
1518
    j(overflow, on_not_smi_result, near_jump);
1519
    movq(dst, kScratchRegister);
1532
    addq(dst, kScratchRegister);
1533
    j(no_overflow, &done, Label::kNear);
1534
    // Restore src.
1535
    subq(dst, kScratchRegister);
1536
    jmp(on_not_smi_result, near_jump);
1537
    bind(&done);
1520 1538
  } else {
1521 1539
    LoadSmiConstant(dst, constant);
1522 1540
    addq(dst, src);
......
1616 1634
}
1617 1635

  
1618 1636

  
1637
template<class T>
1638
static void SmiAddHelper(MacroAssembler* masm,
1639
                         Register dst,
1640
                         Register src1,
1641
                         T src2,
1642
                         Label* on_not_smi_result,
1643
                         Label::Distance near_jump) {
1644
  if (dst.is(src1)) {
1645
    Label done;
1646
    masm->addq(dst, src2);
1647
    masm->j(no_overflow, &done, Label::kNear);
1648
    // Restore src1.
1649
    masm->subq(dst, src2);
1650
    masm->jmp(on_not_smi_result, near_jump);
1651
    masm->bind(&done);
1652
  } else {
1653
    masm->movq(dst, src1);
1654
    masm->addq(dst, src2);
1655
    masm->j(overflow, on_not_smi_result, near_jump);
1656
  }
1657
}
1658

  
1659

  
1619 1660
void MacroAssembler::SmiAdd(Register dst,
1620 1661
                            Register src1,
1621 1662
                            Register src2,
......
1623 1664
                            Label::Distance near_jump) {
1624 1665
  ASSERT_NOT_NULL(on_not_smi_result);
1625 1666
  ASSERT(!dst.is(src2));
1626
  if (dst.is(src1)) {
1627
    movq(kScratchRegister, src1);
1628
    addq(kScratchRegister, src2);
1629
    j(overflow, on_not_smi_result, near_jump);
1630
    movq(dst, kScratchRegister);
1631
  } else {
1632
    movq(dst, src1);
1633
    addq(dst, src2);
1634
    j(overflow, on_not_smi_result, near_jump);
1635
  }
1667
  SmiAddHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1636 1668
}
1637 1669

  
1638 1670

  
......
1642 1674
                            Label* on_not_smi_result,
1643 1675
                            Label::Distance near_jump) {
1644 1676
  ASSERT_NOT_NULL(on_not_smi_result);
1645
  if (dst.is(src1)) {
1646
    movq(kScratchRegister, src1);
1647
    addq(kScratchRegister, src2);
1648
    j(overflow, on_not_smi_result, near_jump);
1649
    movq(dst, kScratchRegister);
1650
  } else {
1651
    ASSERT(!src2.AddressUsesRegister(dst));
1652
    movq(dst, src1);
1653
    addq(dst, src2);
1654
    j(overflow, on_not_smi_result, near_jump);
1655
  }
1677
  ASSERT(!src2.AddressUsesRegister(dst));
1678
  SmiAddHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1656 1679
}
1657 1680

  
1658 1681

  
......
1675 1698
}
1676 1699

  
1677 1700

  
1678
void MacroAssembler::SmiSub(Register dst,
1679
                            Register src1,
1680
                            Register src2,
1681
                            Label* on_not_smi_result,
1682
                            Label::Distance near_jump) {
1683
  ASSERT_NOT_NULL(on_not_smi_result);
1684
  ASSERT(!dst.is(src2));
1701
template<class T>
1702
static void SmiSubHelper(MacroAssembler* masm,
1703
                         Register dst,
1704
                         Register src1,
1705
                         T src2,
1706
                         Label* on_not_smi_result,
1707
                         Label::Distance near_jump) {
1685 1708
  if (dst.is(src1)) {
1686
    cmpq(dst, src2);
1687
    j(overflow, on_not_smi_result, near_jump);
1688
    subq(dst, src2);
1709
    Label done;
1710
    masm->subq(dst, src2);
1711
    masm->j(no_overflow, &done, Label::kNear);
1712
    // Restore src1.
1713
    masm->addq(dst, src2);
1714
    masm->jmp(on_not_smi_result, near_jump);
1715
    masm->bind(&done);
1689 1716
  } else {
1690
    movq(dst, src1);
1691
    subq(dst, src2);
1692
    j(overflow, on_not_smi_result, near_jump);
1717
    masm->movq(dst, src1);
1718
    masm->subq(dst, src2);
1719
    masm->j(overflow, on_not_smi_result, near_jump);
1693 1720
  }
1694 1721
}
1695 1722

  
1696 1723

  
1697
void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1698
  // No overflow checking. Use only when it's known that
1699
  // overflowing is impossible (e.g., subtracting two positive smis).
1724
void MacroAssembler::SmiSub(Register dst,
1725
                            Register src1,
1726
                            Register src2,
1727
                            Label* on_not_smi_result,
1728
                            Label::Distance near_jump) {
1729
  ASSERT_NOT_NULL(on_not_smi_result);
1700 1730
  ASSERT(!dst.is(src2));
1701
  if (!dst.is(src1)) {
1702
    movq(dst, src1);
1703
  }
1704
  subq(dst, src2);
1705
  Assert(no_overflow, kSmiSubtractionOverflow);
1731
  SmiSubHelper<Register>(this, dst, src1, src2, on_not_smi_result, near_jump);
1706 1732
}
1707 1733

  
1708 1734

  
......
1712 1738
                            Label* on_not_smi_result,
1713 1739
                            Label::Distance near_jump) {
1714 1740
  ASSERT_NOT_NULL(on_not_smi_result);
1715
  if (dst.is(src1)) {
1716
    movq(kScratchRegister, src2);
1717
    cmpq(src1, kScratchRegister);
1718
    j(overflow, on_not_smi_result, near_jump);
1719
    subq(src1, kScratchRegister);
1720
  } else {
1721
    movq(dst, src1);
1722
    subq(dst, src2);
1723
    j(overflow, on_not_smi_result, near_jump);
1724
  }
1741
  ASSERT(!src2.AddressUsesRegister(dst));
1742
  SmiSubHelper<Operand>(this, dst, src1, src2, on_not_smi_result, near_jump);
1725 1743
}
1726 1744

  
1727 1745

  
1728
void MacroAssembler::SmiSub(Register dst,
1729
                            Register src1,
1730
                            const Operand& src2) {
1746
template<class T>
1747
static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1748
                                   Register dst,
1749
                                   Register src1,
1750
                                   T src2) {
1731 1751
  // No overflow checking. Use only when it's known that
1732 1752
  // overflowing is impossible (e.g., subtracting two positive smis).
1733 1753
  if (!dst.is(src1)) {
1734
    movq(dst, src1);
1754
    masm->movq(dst, src1);
1735 1755
  }
1736
  subq(dst, src2);
1737
  Assert(no_overflow, kSmiSubtractionOverflow);
1756
  masm->subq(dst, src2);
1757
  masm->Assert(no_overflow, kSmiSubtractionOverflow);
1758
}
1759

  
1760

  
1761
void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1762
  ASSERT(!dst.is(src2));
1763
  SmiSubNoOverflowHelper<Register>(this, dst, src1, src2);
1764
}
1765

  
1766

  
1767
void MacroAssembler::SmiSub(Register dst,
1768
                            Register src1,
1769
                            const Operand& src2) {
1770
  SmiSubNoOverflowHelper<Operand>(this, dst, src1, src2);
1738 1771
}
1739 1772

  
1740 1773

  
......
2240 2273
// ----------------------------------------------------------------------------
2241 2274

  
2242 2275

  
2276
void MacroAssembler::LookupNumberStringCache(Register object,
2277
                                             Register result,
2278
                                             Register scratch1,
2279
                                             Register scratch2,
2280
                                             Label* not_found) {
2281
  // Use of registers. Register result is used as a temporary.
2282
  Register number_string_cache = result;
2283
  Register mask = scratch1;
2284
  Register scratch = scratch2;
2285

  
2286
  // Load the number string cache.
2287
  LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2288

  
2289
  // Make the hash mask from the length of the number string cache. It
2290
  // contains two elements (number and string) for each cache entry.
2291
  SmiToInteger32(
2292
      mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2293
  shrl(mask, Immediate(1));
2294
  subq(mask, Immediate(1));  // Make mask.
2295

  
2296
  // Calculate the entry in the number string cache. The hash value in the
2297
  // number string cache for smis is just the smi value, and the hash for
2298
  // doubles is the xor of the upper and lower words. See
2299
  // Heap::GetNumberStringCache.
2300
  Label is_smi;
2301
  Label load_result_from_cache;
2302
  JumpIfSmi(object, &is_smi);
2303
  CheckMap(object,
2304
           isolate()->factory()->heap_number_map(),
2305
           not_found,
2306
           DONT_DO_SMI_CHECK);
2307

  
2308
  STATIC_ASSERT(8 == kDoubleSize);
2309
  movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2310
  xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2311
  and_(scratch, mask);
2312
  // Each entry in string cache consists of two pointer sized fields,
2313
  // but times_twice_pointer_size (multiplication by 16) scale factor
2314
  // is not supported by addrmode on x64 platform.
2315
  // So we have to premultiply entry index before lookup.
2316
  shl(scratch, Immediate(kPointerSizeLog2 + 1));
2317

  
2318
  Register index = scratch;
2319
  Register probe = mask;
2320
  movq(probe,
2321
       FieldOperand(number_string_cache,
2322
                    index,
2323
                    times_1,
2324
                    FixedArray::kHeaderSize));
2325
  JumpIfSmi(probe, not_found);
2326
  movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2327
  ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2328
  j(parity_even, not_found);  // Bail out if NaN is involved.
2329
  j(not_equal, not_found);  // The cache did not contain this value.
2330
  jmp(&load_result_from_cache);
2331

  
2332
  bind(&is_smi);
2333
  SmiToInteger32(scratch, object);
2334
  and_(scratch, mask);
2335
  // Each entry in string cache consists of two pointer sized fields,
2336
  // but times_twice_pointer_size (multiplication by 16) scale factor
2337
  // is not supported by addrmode on x64 platform.
2338
  // So we have to premultiply entry index before lookup.
2339
  shl(scratch, Immediate(kPointerSizeLog2 + 1));
2340

  
2341
  // Check if the entry is the smi we are looking for.
2342
  cmpq(object,
2343
       FieldOperand(number_string_cache,
2344
                    index,
2345
                    times_1,
2346
                    FixedArray::kHeaderSize));
2347
  j(not_equal, not_found);
2348

  
2349
  // Get the result from the cache.
2350
  bind(&load_result_from_cache);
2351
  movq(result,
2352
       FieldOperand(number_string_cache,
2353
                    index,
2354
                    times_1,
2355
                    FixedArray::kHeaderSize + kPointerSize));
2356
  IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2357
}
2358

  
2359

  
2243 2360
void MacroAssembler::JumpIfNotString(Register object,
2244 2361
                                     Register object_map,
2245 2362
                                     Label* not_string,
......
2376 2493
  if (source->IsSmi()) {
2377 2494
    Move(dst, Smi::cast(*source));
2378 2495
  } else {
2379
    ASSERT(source->IsHeapObject());
2380
    movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2496
    MoveHeapObject(dst, source);
2381 2497
  }
2382 2498
}
2383 2499

  
......
2387 2503
  if (source->IsSmi()) {
2388 2504
    Move(dst, Smi::cast(*source));
2389 2505
  } else {
2390
    ASSERT(source->IsHeapObject());
2391
    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2506
    MoveHeapObject(kScratchRegister, source);
2392 2507
    movq(dst, kScratchRegister);
2393 2508
  }
2394 2509
}
......
2399 2514
  if (source->IsSmi()) {
2400 2515
    Cmp(dst, Smi::cast(*source));
2401 2516
  } else {
2402
    ASSERT(source->IsHeapObject());
2403
    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2517
    MoveHeapObject(kScratchRegister, source);
2404 2518
    cmpq(dst, kScratchRegister);
2405 2519
  }
2406 2520
}
......
2411 2525
  if (source->IsSmi()) {
2412 2526
    Cmp(dst, Smi::cast(*source));
2413 2527
  } else {
2414
    ASSERT(source->IsHeapObject());
2415
    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2528
    MoveHeapObject(kScratchRegister, source);
2416 2529
    cmpq(dst, kScratchRegister);
2417 2530
  }
2418 2531
}
......
2423 2536
  if (source->IsSmi()) {
2424 2537
    Push(Smi::cast(*source));
2425 2538
  } else {
2426
    ASSERT(source->IsHeapObject());
2427
    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2539
    MoveHeapObject(kScratchRegister, source);
2428 2540
    push(kScratchRegister);
2429 2541
  }
2430 2542
}
2431 2543

  
2432 2544

  
2433
void MacroAssembler::LoadHeapObject(Register result,
2434
                                    Handle<HeapObject> object) {
2545
void MacroAssembler::MoveHeapObject(Register result,
2546
                                    Handle<Object> object) {
2435 2547
  AllowDeferredHandleDereference using_raw_address;
2548
  ASSERT(object->IsHeapObject());
2436 2549
  if (isolate()->heap()->InNewSpace(*object)) {
2437 2550
    Handle<Cell> cell = isolate()->factory()->NewCell(object);
2438 2551
    movq(result, cell, RelocInfo::CELL);
2439 2552
    movq(result, Operand(result, 0));
2440 2553
  } else {
2441
    Move(result, object);
2442
  }
2443
}
2444

  
2445

  
2446
void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2447
  AllowDeferredHandleDereference using_raw_address;
2448
  if (isolate()->heap()->InNewSpace(*object)) {
2449
    Handle<Cell> cell = isolate()->factory()->NewCell(object);
2450
    movq(kScratchRegister, cell, RelocInfo::CELL);
2451
    cmpq(reg, Operand(kScratchRegister, 0));
2452
  } else {
2453
    Cmp(reg, object);
2454
  }
2455
}
2456

  
2457

  
2458
void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2459
  AllowDeferredHandleDereference using_raw_address;
2460
  if (isolate()->heap()->InNewSpace(*object)) {
2461
    Handle<Cell> cell = isolate()->factory()->NewCell(object);
2462
    movq(kScratchRegister, cell, RelocInfo::CELL);
2463
    movq(kScratchRegister, Operand(kScratchRegister, 0));
2464
    push(kScratchRegister);
2465
  } else {
2466
    Push(object);
2554
    movq(result, object, RelocInfo::EMBEDDED_OBJECT);
2467 2555
  }
2468 2556
}
2469 2557

  
......
2548 2636
#ifdef DEBUG
2549 2637
  int end_position = pc_offset() + CallSize(code_object);
2550 2638
#endif
2551
  ASSERT(RelocInfo::IsCodeTarget(rmode));
2639
  ASSERT(RelocInfo::IsCodeTarget(rmode) ||
2640
      rmode == RelocInfo::CODE_AGE_SEQUENCE);
2552 2641
  call(code_object, rmode, ast_id);
2553 2642
#ifdef DEBUG
2554 2643
  CHECK_EQ(end_position, pc_offset());
......
2651 2740
void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2652 2741
                                    int handler_index) {
2653 2742
  // Adjust this code if not the case.
2654
  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2743
  STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
2744
                                                kFPOnStackSize);
2655 2745
  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2656 2746
  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2657 2747
  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
......
2710 2800

  
2711 2801
void MacroAssembler::Throw(Register value) {
2712 2802
  // Adjust this code if not the case.
2713
  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2803
  STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
2804
                                                kFPOnStackSize);
2714 2805
  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2715 2806
  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2716 2807
  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
......
2750 2841

  
2751 2842
void MacroAssembler::ThrowUncatchable(Register value) {
2752 2843
  // Adjust this code if not the case.
2753
  STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2844
  STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize +
2845
                                                kFPOnStackSize);
2754 2846
  STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2755 2847
  STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2756 2848
  STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
......
2917 3009
  // Value is a smi. convert to a double and store.
2918 3010
  // Preserve original value.
2919 3011
  SmiToInteger32(kScratchRegister, maybe_number);
2920
  cvtlsi2sd(xmm_scratch, kScratchRegister);
3012
  Cvtlsi2sd(xmm_scratch, kScratchRegister);
2921 3013
  movsd(FieldOperand(elements, index, times_8,
2922 3014
                     FixedDoubleArray::kHeaderSize - elements_offset),
2923 3015
        xmm_scratch);
......
3050 3142
                               Label* conversion_failed,
3051 3143
                               Label::Distance dst) {
3052 3144
  cvttsd2si(result_reg, input_reg);
3053
  cvtlsi2sd(xmm0, result_reg);
3145
  Cvtlsi2sd(xmm0, result_reg);
3054 3146
  ucomisd(xmm0, input_reg);
3055 3147
  j(not_equal, conversion_failed, dst);
3056 3148
  j(parity_even, conversion_failed, dst);  // NaN.
......
3087 3179

  
3088 3180
  movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3089 3181
  cvttsd2si(result_reg, xmm0);
3090
  cvtlsi2sd(temp, result_reg);
3182
  Cvtlsi2sd(temp, result_reg);
3091 3183
  ucomisd(xmm0, temp);
3092 3184
  RecordComment("Deferred TaggedToI: lost precision");
3093 3185
  j(not_equal, lost_precision, dst);
......
3472 3564
  ASSERT(flag == JUMP_FUNCTION || has_frame());
3473 3565

  
3474 3566
  // Get the function and setup the context.
3475
  LoadHeapObject(rdi, function);
3567
  Move(rdi, function);
3476 3568
  movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3477 3569

  
3478 3570
  // We call indirectly through the code field in the function to
......
3559 3651
}
3560 3652

  
3561 3653

  
3654
void MacroAssembler::Prologue(PrologueFrameMode frame_mode) {
3655
  if (frame_mode == BUILD_STUB_FRAME) {
3656
    push(rbp);  // Caller's frame pointer.
3657
    movq(rbp, rsp);
3658
    push(rsi);  // Callee's context.
3659
    Push(Smi::FromInt(StackFrame::STUB));
3660
  } else {
3661
    PredictableCodeSizeScope predictible_code_size_scope(this,
3662
        kNoCodeAgeSequenceLength);
3663
    if (isolate()->IsCodePreAgingActive()) {
3664
        // Pre-age the code.
3665
      Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3666
           RelocInfo::CODE_AGE_SEQUENCE);
3667
      Nop(kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
3668
    } else {
3669
      push(rbp);  // Caller's frame pointer.
3670
      movq(rbp, rsp);
3671
      push(rsi);  // Callee's context.
3672
      push(rdi);  // Callee's JS function.
3673
    }
3674
  }
3675
}
3676

  
3677

  
3562 3678
void MacroAssembler::EnterFrame(StackFrame::Type type) {
3563 3679
  push(rbp);
3564 3680
  movq(rbp, rsp);
......
3590 3706
void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
3591 3707
  // Set up the frame structure on the stack.
3592 3708
  // All constants are relative to the frame pointer of the exit frame.
3593
  ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3594
  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3595
  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
3709
  ASSERT(ExitFrameConstants::kCallerSPDisplacement ==
3710
         kFPOnStackSize + kPCOnStackSize);
3711
  ASSERT(ExitFrameConstants::kCallerPCOffset == kFPOnStackSize);
3712
  ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3596 3713
  push(rbp);
3597 3714
  movq(rbp, rsp);
3598 3715

  
......
3620 3737
#endif
3621 3738
  // Optionally save all XMM registers.
3622 3739
  if (save_doubles) {
3623
    int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
3740
    int space = XMMRegister::kMaxNumAllocatableRegisters * kDoubleSize +
3624 3741
        arg_stack_space * kPointerSize;
3625 3742
    subq(rsp, Immediate(space));
3626 3743
    int offset = -2 * kPointerSize;
......
3683 3800

  
3684 3801
  PushReturnAddressFrom(rcx);
3685 3802

  
3686
  LeaveExitFrameEpilogue();
3803
  LeaveExitFrameEpilogue(true);
3687 3804
}
3688 3805

  
3689 3806

  
3690
void MacroAssembler::LeaveApiExitFrame() {
3807
void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
3691 3808
  movq(rsp, rbp);
3692 3809
  pop(rbp);
3693 3810

  
3694
  LeaveExitFrameEpilogue();
3811
  LeaveExitFrameEpilogue(restore_context);
3695 3812
}
3696 3813

  
3697 3814

  
3698
void MacroAssembler::LeaveExitFrameEpilogue() {
3815
void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
3699 3816
  // Restore current context from top and clear it in debug mode.
3700 3817
  ExternalReference context_address(Isolate::kContextAddress, isolate());
3701 3818
  Operand context_operand = ExternalOperand(context_address);
3702
  movq(rsi, context_operand);
3819
  if (restore_context) {
3820
    movq(rsi, context_operand);
3821
  }
3703 3822
#ifdef DEBUG
3704 3823
  movq(context_operand, Immediate(0));
3705 3824
#endif
......
3971 4090
  // Load address of new object into result.
3972 4091
  LoadAllocationTopHelper(result, scratch, flags);
3973 4092

  
4093
  if (isolate()->heap_profiler()->is_tracking_allocations()) {
4094
    RecordObjectAllocation(isolate(), result, object_size);
4095
  }
4096

  
3974 4097
  // Align the next allocation. Storing the filler map without checking top is
3975 4098
  // safe in new-space because the limit of the heap is aligned there.
3976 4099
  if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
......
4050 4173
  // Load address of new object into result.
4051 4174
  LoadAllocationTopHelper(result, scratch, flags);
4052 4175

  
4176
  if (isolate()->heap_profiler()->is_tracking_allocations()) {
4177
    RecordObjectAllocation(isolate(), result, object_size);
4178
  }
4179

  
4053 4180
  // Align the next allocation. Storing the filler map without checking top is
4054 4181
  // safe in new-space because the limit of the heap is aligned there.
4055 4182
  if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
......
4791 4918

  
4792 4919
void MacroAssembler::TestJSArrayForAllocationMemento(
4793 4920
    Register receiver_reg,
4794
    Register scratch_reg) {
4795
  Label no_memento_available;
4921
    Register scratch_reg,
4922
    Label* no_memento_found) {
4796 4923
  ExternalReference new_space_start =
4797 4924
      ExternalReference::new_space_start(isolate());
4798 4925
  ExternalReference new_space_allocation_top =
......
4802 4929
      JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
4803 4930
  movq(kScratchRegister, new_space_start);
4804 4931
  cmpq(scratch_reg, kScratchRegister);
4805
  j(less, &no_memento_available);
4932
  j(less, no_memento_found);
4806 4933
  cmpq(scratch_reg, ExternalOperand(new_space_allocation_top));
4807
  j(greater, &no_memento_available);
4934
  j(greater, no_memento_found);
4808 4935
  CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4809 4936
              Heap::kAllocationMementoMapRootIndex);
4810
  bind(&no_memento_available);
4937
}
4938

  
4939

  
4940
void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
4941
                                            Register object,
4942
                                            Register object_size) {
4943
  FrameScope frame(this, StackFrame::EXIT);
4944
  PushSafepointRegisters();
4945
  PrepareCallCFunction(3);
4946
  // In case object is rdx
4947
  movq(kScratchRegister, object);
4948
  movq(arg_reg_3, object_size);
4949
  movq(arg_reg_2, kScratchRegister);
4950
  movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
4951
  CallCFunction(
4952
      ExternalReference::record_object_allocation_function(isolate), 3);
4953
  PopSafepointRegisters();
4954
}
4955

  
4956

  
4957
void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
4958
                                            Register object,
4959
                                            int object_size) {
4960
  FrameScope frame(this, StackFrame::EXIT);
4961
  PushSafepointRegisters();
4962
  PrepareCallCFunction(3);
4963
  movq(arg_reg_2, object);
4964
  movq(arg_reg_3, Immediate(object_size));
4965
  movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
4966
  CallCFunction(
4967
      ExternalReference::record_object_allocation_function(isolate), 3);
4968
  PopSafepointRegisters();
4811 4969
}
4812 4970

  
4813 4971

  

Also available in: Unified diff