Revision f230a1cf deps/v8/src/arm/builtins-arm.cc

View differences:

deps/v8/src/arm/builtins-arm.cc
193 193

  
194 194
  Register argument = r2;
195 195
  Label not_cached, argument_is_string;
196
  NumberToStringStub::GenerateLookupNumberStringCache(
197
      masm,
198
      r0,        // Input.
199
      argument,  // Result.
200
      r3,        // Scratch.
201
      r4,        // Scratch.
202
      r5,        // Scratch.
203
      &not_cached);
196
  __ LookupNumberStringCache(r0,        // Input.
197
                             argument,  // Result.
198
                             r3,        // Scratch.
199
                             r4,        // Scratch.
200
                             r5,        // Scratch.
201
                             &not_cached);
204 202
  __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
205 203
  __ bind(&argument_is_string);
206 204

  
......
447 445
      // r3: object size (in words)
448 446
      // r4: JSObject (not tagged)
449 447
      // r5: First in-object property of JSObject (not tagged)
450
      __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
451 448
      ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
452
      __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
449
      __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
453 450
      if (count_constructions) {
454 451
        __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
455 452
        __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
......
457 454
        __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
458 455
        // r0: offset of first field after pre-allocated fields
459 456
        if (FLAG_debug_code) {
460
          __ cmp(r0, r6);
457
          __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
458
          __ cmp(r0, ip);
461 459
          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
462 460
        }
463
        __ InitializeFieldsWithFiller(r5, r0, r7);
461
        __ InitializeFieldsWithFiller(r5, r0, r6);
464 462
        // To allow for truncation.
465
        __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
463
        __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
466 464
      }
467
      __ InitializeFieldsWithFiller(r5, r6, r7);
465
      __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
466
      __ InitializeFieldsWithFiller(r5, r0, r6);
468 467

  
469 468
      // Add the object tag to make the JSObject real, so that we can continue
470 469
      // and jump into the continuation code at any time from now on. Any
......
529 528
      __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
530 529
      ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
531 530
      { Label loop, entry;
532
        if (count_constructions) {
533
          __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
534
        } else if (FLAG_debug_code) {
535
          __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
536
          __ cmp(r7, r8);
537
          __ Assert(eq, kUndefinedValueNotLoaded);
538
        }
531
        __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
539 532
        __ b(&entry);
540 533
        __ bind(&loop);
541
        __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
534
        __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
542 535
        __ bind(&entry);
543 536
        __ cmp(r2, r6);
544 537
        __ b(lt, &loop);
......
702 695
  // r2: receiver
703 696
  // r3: argc
704 697
  // r4: argv
705
  // r5-r7, cp may be clobbered
698
  // r5-r6, r7 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
706 699
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
707 700

  
708 701
  // Clear the context before we push it when entering the internal frame.
......
742 735
    __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
743 736
    __ mov(r5, Operand(r4));
744 737
    __ mov(r6, Operand(r4));
745
    __ mov(r7, Operand(r4));
738
    if (!FLAG_enable_ool_constant_pool) {
739
      __ mov(r7, Operand(r4));
740
    }
746 741
    if (kR9Available == 1) {
747 742
      __ mov(r9, Operand(r4));
748 743
    }
......
807 802
  // The following registers must be saved and restored when calling through to
808 803
  // the runtime:
809 804
  //   r0 - contains return address (beginning of patch sequence)
810
  //   r1 - function object
805
  //   r1 - isolate
811 806
  FrameScope scope(masm, StackFrame::MANUAL);
812 807
  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
813
  __ PrepareCallCFunction(1, 0, r1);
808
  __ PrepareCallCFunction(1, 0, r2);
809
  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
814 810
  __ CallCFunction(
815
      ExternalReference::get_make_code_young_function(masm->isolate()), 1);
811
      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
816 812
  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
817 813
  __ mov(pc, r0);
818 814
}
......
830 826
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
831 827

  
832 828

  
829
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
830
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
831
  // that make_code_young doesn't do any garbage collection which allows us to
832
  // save/restore the registers without worrying about which of them contain
833
  // pointers.
834

  
835
  // The following registers must be saved and restored when calling through to
836
  // the runtime:
837
  //   r0 - contains return address (beginning of patch sequence)
838
  //   r1 - isolate
839
  FrameScope scope(masm, StackFrame::MANUAL);
840
  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
841
  __ PrepareCallCFunction(1, 0, r2);
842
  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
843
  __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
844
        masm->isolate()), 2);
845
  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
846

  
847
  // Perform prologue operations usually performed by the young code stub.
848
  __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
849
  __ add(fp, sp, Operand(2 * kPointerSize));
850

  
851
  // Jump to point after the code-age stub.
852
  __ add(r0, r0, Operand(kNoCodeAgeSequenceLength * Assembler::kInstrSize));
853
  __ mov(pc, r0);
854
}
855

  
856

  
857
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
858
  GenerateMakeCodeYoungAgainCommon(masm);
859
}
860

  
861

  
833 862
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
834 863
  {
835 864
    FrameScope scope(masm, StackFrame::INTERNAL);
......
895 924
}
896 925

  
897 926

  
898
void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
899
  // For now, we are relying on the fact that Runtime::NotifyOSR
900
  // doesn't do any garbage collection which allows us to save/restore
901
  // the registers without worrying about which of them contain
902
  // pointers. This seems a bit fragile.
903
  __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
904
  {
905
    FrameScope scope(masm, StackFrame::INTERNAL);
906
    __ CallRuntime(Runtime::kNotifyOSR, 0);
907
  }
908
  __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
909
  __ Ret();
910
}
911

  
912

  
913 927
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
914 928
  // Lookup the function in the JavaScript frame.
915 929
  __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
......
956 970
}
957 971

  
958 972

  
973
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
974
  // We check the stack limit as indicator that recompilation might be done.
975
  Label ok;
976
  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
977
  __ cmp(sp, Operand(ip));
978
  __ b(hs, &ok);
979
  {
980
    FrameScope scope(masm, StackFrame::INTERNAL);
981
    __ CallRuntime(Runtime::kStackGuard, 0);
982
  }
983
  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
984
          RelocInfo::CODE_TARGET);
985

  
986
  __ bind(&ok);
987
  __ Ret();
988
}
989

  
990

  
959 991
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
960 992
  // 1. Make sure we have at least one argument.
961 993
  // r0: actual number of arguments

Also available in: Unified diff