The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / arm / assembler-arm-inl.h @ f230a1cf

History | View | Annotate | Download (16.4 KB)

1
// Copyright (c) 1994-2006 Sun Microsystems Inc.
2
// All Rights Reserved.
3
//
4
// Redistribution and use in source and binary forms, with or without
5
// modification, are permitted provided that the following conditions
6
// are met:
7
//
8
// - Redistributions of source code must retain the above copyright notice,
9
// this list of conditions and the following disclaimer.
10
//
11
// - Redistribution in binary form must reproduce the above copyright
12
// notice, this list of conditions and the following disclaimer in the
13
// documentation and/or other materials provided with the
14
// distribution.
15
//
16
// - Neither the name of Sun Microsystems or the names of contributors may
17
// be used to endorse or promote products derived from this software without
18
// specific prior written permission.
19
//
20
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31
// OF THE POSSIBILITY OF SUCH DAMAGE.
32

    
33
// The original source code covered by the above license above has been modified
34
// significantly by Google Inc.
35
// Copyright 2012 the V8 project authors. All rights reserved.
36

    
37
#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38
#define V8_ARM_ASSEMBLER_ARM_INL_H_
39

    
40
#include "arm/assembler-arm.h"
41

    
42
#include "cpu.h"
43
#include "debug.h"
44

    
45

    
46
namespace v8 {
47
namespace internal {
48

    
49

    
50
int Register::NumAllocatableRegisters() {
51
  return kMaxNumAllocatableRegisters;
52
}
53

    
54

    
55
int DwVfpRegister::NumRegisters() {
56
  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
57
}
58

    
59

    
60
int DwVfpRegister::NumAllocatableRegisters() {
61
  return NumRegisters() - kNumReservedRegisters;
62
}
63

    
64

    
65
int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
66
  ASSERT(!reg.is(kDoubleRegZero));
67
  ASSERT(!reg.is(kScratchDoubleReg));
68
  if (reg.code() > kDoubleRegZero.code()) {
69
    return reg.code() - kNumReservedRegisters;
70
  }
71
  return reg.code();
72
}
73

    
74

    
75
DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
76
  ASSERT(index >= 0 && index < NumAllocatableRegisters());
77
  ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
78
         kNumReservedRegisters - 1);
79
  if (index >= kDoubleRegZero.code()) {
80
    return from_code(index + kNumReservedRegisters);
81
  }
82
  return from_code(index);
83
}
84

    
85

    
86
void RelocInfo::apply(intptr_t delta) {
87
  if (RelocInfo::IsInternalReference(rmode_)) {
88
    // absolute code pointer inside code object moves with the code object.
89
    int32_t* p = reinterpret_cast<int32_t*>(pc_);
90
    *p += delta;  // relocate entry
91
  }
92
  // We do not use pc relative addressing on ARM, so there is
93
  // nothing else to do.
94
}
95

    
96

    
97
Address RelocInfo::target_address() {
98
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
99
  return Assembler::target_address_at(pc_);
100
}
101

    
102

    
103
Address RelocInfo::target_address_address() {
104
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
105
                              || rmode_ == EMBEDDED_OBJECT
106
                              || rmode_ == EXTERNAL_REFERENCE);
107
  return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_));
108
}
109

    
110

    
111
int RelocInfo::target_address_size() {
112
  return kPointerSize;
113
}
114

    
115

    
116
void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
117
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
118
  Assembler::set_target_address_at(pc_, target);
119
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
120
    Object* target_code = Code::GetCodeFromTargetAddress(target);
121
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
122
        host(), this, HeapObject::cast(target_code));
123
  }
124
}
125

    
126

    
127
Object* RelocInfo::target_object() {
128
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
129
  return reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
130
}
131

    
132

    
133
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
134
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
135
  return Handle<Object>(reinterpret_cast<Object**>(
136
      Assembler::target_pointer_at(pc_)));
137
}
138

    
139

    
140
Object** RelocInfo::target_object_address() {
141
  // Provide a "natural pointer" to the embedded object,
142
  // which can be de-referenced during heap iteration.
143
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
144
  reconstructed_obj_ptr_ =
145
      reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_));
146
  return &reconstructed_obj_ptr_;
147
}
148

    
149

    
150
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
151
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
152
  ASSERT(!target->IsConsString());
153
  Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target));
154
  if (mode == UPDATE_WRITE_BARRIER &&
155
      host() != NULL &&
156
      target->IsHeapObject()) {
157
    host()->GetHeap()->incremental_marking()->RecordWrite(
158
        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
159
  }
160
}
161

    
162

    
163
Address* RelocInfo::target_reference_address() {
164
  ASSERT(rmode_ == EXTERNAL_REFERENCE);
165
  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
166
  return &reconstructed_adr_ptr_;
167
}
168

    
169

    
170
Address RelocInfo::target_runtime_entry(Assembler* origin) {
171
  ASSERT(IsRuntimeEntry(rmode_));
172
  return target_address();
173
}
174

    
175

    
176
void RelocInfo::set_target_runtime_entry(Address target,
177
                                         WriteBarrierMode mode) {
178
  ASSERT(IsRuntimeEntry(rmode_));
179
  if (target_address() != target) set_target_address(target, mode);
180
}
181

    
182

    
183
Handle<Cell> RelocInfo::target_cell_handle() {
184
  ASSERT(rmode_ == RelocInfo::CELL);
185
  Address address = Memory::Address_at(pc_);
186
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
187
}
188

    
189

    
190
Cell* RelocInfo::target_cell() {
191
  ASSERT(rmode_ == RelocInfo::CELL);
192
  return Cell::FromValueAddress(Memory::Address_at(pc_));
193
}
194

    
195

    
196
void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
197
  ASSERT(rmode_ == RelocInfo::CELL);
198
  Address address = cell->address() + Cell::kValueOffset;
199
  Memory::Address_at(pc_) = address;
200
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
201
    // TODO(1550) We are passing NULL as a slot because cell can never be on
202
    // evacuation candidate.
203
    host()->GetHeap()->incremental_marking()->RecordWrite(
204
        host(), NULL, cell);
205
  }
206
}
207

    
208

    
209
static const int kNoCodeAgeSequenceLength = 3;
210

    
211

    
212
Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
213
  UNREACHABLE();  // This should never be reached on Arm.
214
  return Handle<Object>();
215
}
216

    
217

    
218
Code* RelocInfo::code_age_stub() {
219
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
220
  return Code::GetCodeFromTargetAddress(
221
      Memory::Address_at(pc_ + Assembler::kInstrSize *
222
                         (kNoCodeAgeSequenceLength - 1)));
223
}
224

    
225

    
226
void RelocInfo::set_code_age_stub(Code* stub) {
227
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
228
  Memory::Address_at(pc_ + Assembler::kInstrSize *
229
                     (kNoCodeAgeSequenceLength - 1)) =
230
      stub->instruction_start();
231
}
232

    
233

    
234
Address RelocInfo::call_address() {
235
  // The 2 instructions offset assumes patched debug break slot or return
236
  // sequence.
237
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
238
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
239
  return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
240
}
241

    
242

    
243
void RelocInfo::set_call_address(Address target) {
244
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
245
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
246
  Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
247
  if (host() != NULL) {
248
    Object* target_code = Code::GetCodeFromTargetAddress(target);
249
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
250
        host(), this, HeapObject::cast(target_code));
251
  }
252
}
253

    
254

    
255
Object* RelocInfo::call_object() {
256
  return *call_object_address();
257
}
258

    
259

    
260
void RelocInfo::set_call_object(Object* target) {
261
  *call_object_address() = target;
262
}
263

    
264

    
265
Object** RelocInfo::call_object_address() {
266
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
267
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
268
  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
269
}
270

    
271

    
272
bool RelocInfo::IsPatchedReturnSequence() {
273
  Instr current_instr = Assembler::instr_at(pc_);
274
  Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
275
  // A patched return sequence is:
276
  //  ldr ip, [pc, #0]
277
  //  blx ip
278
  return ((current_instr & kLdrPCMask) == kLdrPCPattern)
279
          && ((next_instr & kBlxRegMask) == kBlxRegPattern);
280
}
281

    
282

    
283
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
284
  Instr current_instr = Assembler::instr_at(pc_);
285
  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
286
}
287

    
288

    
289
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
290
  RelocInfo::Mode mode = rmode();
291
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
292
    visitor->VisitEmbeddedPointer(this);
293
  } else if (RelocInfo::IsCodeTarget(mode)) {
294
    visitor->VisitCodeTarget(this);
295
  } else if (mode == RelocInfo::CELL) {
296
    visitor->VisitCell(this);
297
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
298
    visitor->VisitExternalReference(this);
299
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
300
    visitor->VisitCodeAgeSequence(this);
301
#ifdef ENABLE_DEBUGGER_SUPPORT
302
  } else if (((RelocInfo::IsJSReturn(mode) &&
303
              IsPatchedReturnSequence()) ||
304
             (RelocInfo::IsDebugBreakSlot(mode) &&
305
              IsPatchedDebugBreakSlotSequence())) &&
306
             isolate->debug()->has_break_points()) {
307
    visitor->VisitDebugTarget(this);
308
#endif
309
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
310
    visitor->VisitRuntimeEntry(this);
311
  }
312
}
313

    
314

    
315
template<typename StaticVisitor>
316
void RelocInfo::Visit(Heap* heap) {
317
  RelocInfo::Mode mode = rmode();
318
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
319
    StaticVisitor::VisitEmbeddedPointer(heap, this);
320
  } else if (RelocInfo::IsCodeTarget(mode)) {
321
    StaticVisitor::VisitCodeTarget(heap, this);
322
  } else if (mode == RelocInfo::CELL) {
323
    StaticVisitor::VisitCell(heap, this);
324
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
325
    StaticVisitor::VisitExternalReference(this);
326
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
327
    StaticVisitor::VisitCodeAgeSequence(heap, this);
328
#ifdef ENABLE_DEBUGGER_SUPPORT
329
  } else if (heap->isolate()->debug()->has_break_points() &&
330
             ((RelocInfo::IsJSReturn(mode) &&
331
              IsPatchedReturnSequence()) ||
332
             (RelocInfo::IsDebugBreakSlot(mode) &&
333
              IsPatchedDebugBreakSlotSequence()))) {
334
    StaticVisitor::VisitDebugTarget(heap, this);
335
#endif
336
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
337
    StaticVisitor::VisitRuntimeEntry(this);
338
  }
339
}
340

    
341

    
342
Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
343
  rm_ = no_reg;
344
  imm32_ = immediate;
345
  rmode_ = rmode;
346
}
347

    
348

    
349
Operand::Operand(const ExternalReference& f)  {
350
  rm_ = no_reg;
351
  imm32_ = reinterpret_cast<int32_t>(f.address());
352
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
353
}
354

    
355

    
356
Operand::Operand(Smi* value) {
357
  rm_ = no_reg;
358
  imm32_ =  reinterpret_cast<intptr_t>(value);
359
  rmode_ = RelocInfo::NONE32;
360
}
361

    
362

    
363
Operand::Operand(Register rm) {
364
  rm_ = rm;
365
  rs_ = no_reg;
366
  shift_op_ = LSL;
367
  shift_imm_ = 0;
368
}
369

    
370

    
371
bool Operand::is_reg() const {
372
  return rm_.is_valid() &&
373
         rs_.is(no_reg) &&
374
         shift_op_ == LSL &&
375
         shift_imm_ == 0;
376
}
377

    
378

    
379
void Assembler::CheckBuffer() {
380
  if (buffer_space() <= kGap) {
381
    GrowBuffer();
382
  }
383
  if (pc_offset() >= next_buffer_check_) {
384
    CheckConstPool(false, true);
385
  }
386
}
387

    
388

    
389
void Assembler::emit(Instr x) {
390
  CheckBuffer();
391
  *reinterpret_cast<Instr*>(pc_) = x;
392
  pc_ += kInstrSize;
393
}
394

    
395

    
396
Address Assembler::target_pointer_address_at(Address pc) {
397
  Address target_pc = pc;
398
  Instr instr = Memory::int32_at(target_pc);
399
  // If we have a bx instruction, the instruction before the bx is
400
  // what we need to patch.
401
  static const int32_t kBxInstMask = 0x0ffffff0;
402
  static const int32_t kBxInstPattern = 0x012fff10;
403
  if ((instr & kBxInstMask) == kBxInstPattern) {
404
    target_pc -= kInstrSize;
405
    instr = Memory::int32_at(target_pc);
406
  }
407

    
408
  // With a blx instruction, the instruction before is what needs to be patched.
409
  if ((instr & kBlxRegMask) == kBlxRegPattern) {
410
    target_pc -= kInstrSize;
411
    instr = Memory::int32_at(target_pc);
412
  }
413

    
414
  ASSERT(IsLdrPcImmediateOffset(instr));
415
  int offset = instr & 0xfff;  // offset_12 is unsigned
416
  if ((instr & (1 << 23)) == 0) offset = -offset;  // U bit defines offset sign
417
  // Verify that the constant pool comes after the instruction referencing it.
418
  ASSERT(offset >= -4);
419
  return target_pc + offset + 8;
420
}
421

    
422

    
423
Address Assembler::target_pointer_at(Address pc) {
424
  if (IsMovW(Memory::int32_at(pc))) {
425
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
426
    Instruction* instr = Instruction::At(pc);
427
    Instruction* next_instr = Instruction::At(pc + kInstrSize);
428
    return reinterpret_cast<Address>(
429
        (next_instr->ImmedMovwMovtValue() << 16) |
430
        instr->ImmedMovwMovtValue());
431
  }
432
  return Memory::Address_at(target_pointer_address_at(pc));
433
}
434

    
435

    
436
Address Assembler::target_address_from_return_address(Address pc) {
437
  // Returns the address of the call target from the return address that will
438
  // be returned to after a call.
439
  // Call sequence on V7 or later is :
440
  //  movw  ip, #... @ call address low 16
441
  //  movt  ip, #... @ call address high 16
442
  //  blx   ip
443
  //                      @ return address
444
  // Or pre-V7 or cases that need frequent patching:
445
  //  ldr   ip, [pc, #...] @ call address
446
  //  blx   ip
447
  //                      @ return address
448
  Address candidate = pc - 2 * Assembler::kInstrSize;
449
  Instr candidate_instr(Memory::int32_at(candidate));
450
  if (IsLdrPcImmediateOffset(candidate_instr)) {
451
    return candidate;
452
  }
453
  candidate = pc - 3 * Assembler::kInstrSize;
454
  ASSERT(IsMovW(Memory::int32_at(candidate)) &&
455
         IsMovT(Memory::int32_at(candidate + kInstrSize)));
456
  return candidate;
457
}
458

    
459

    
460
Address Assembler::return_address_from_call_start(Address pc) {
461
  if (IsLdrPcImmediateOffset(Memory::int32_at(pc))) {
462
    return pc + kInstrSize * 2;
463
  } else {
464
    ASSERT(IsMovW(Memory::int32_at(pc)));
465
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
466
    return pc + kInstrSize * 3;
467
  }
468
}
469

    
470

    
471
void Assembler::deserialization_set_special_target_at(
472
    Address constant_pool_entry, Address target) {
473
  Memory::Address_at(constant_pool_entry) = target;
474
}
475

    
476

    
477
void Assembler::set_external_target_at(Address constant_pool_entry,
478
                                       Address target) {
479
  Memory::Address_at(constant_pool_entry) = target;
480
}
481

    
482

    
483
static Instr EncodeMovwImmediate(uint32_t immediate) {
484
  ASSERT(immediate < 0x10000);
485
  return ((immediate & 0xf000) << 4) | (immediate & 0xfff);
486
}
487

    
488

    
489
void Assembler::set_target_pointer_at(Address pc, Address target) {
490
  if (IsMovW(Memory::int32_at(pc))) {
491
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
492
    uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
493
    uint32_t immediate = reinterpret_cast<uint32_t>(target);
494
    uint32_t intermediate = instr_ptr[0];
495
    intermediate &= ~EncodeMovwImmediate(0xFFFF);
496
    intermediate |= EncodeMovwImmediate(immediate & 0xFFFF);
497
    instr_ptr[0] = intermediate;
498
    intermediate = instr_ptr[1];
499
    intermediate &= ~EncodeMovwImmediate(0xFFFF);
500
    intermediate |= EncodeMovwImmediate(immediate >> 16);
501
    instr_ptr[1] = intermediate;
502
    ASSERT(IsMovW(Memory::int32_at(pc)));
503
    ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
504
    CPU::FlushICache(pc, 2 * kInstrSize);
505
  } else {
506
    ASSERT(IsLdrPcImmediateOffset(Memory::int32_at(pc)));
507
    Memory::Address_at(target_pointer_address_at(pc)) = target;
508
    // Intuitively, we would think it is necessary to always flush the
509
    // instruction cache after patching a target address in the code as follows:
510
    //   CPU::FlushICache(pc, sizeof(target));
511
    // However, on ARM, no instruction is actually patched in the case
512
    // of embedded constants of the form:
513
    // ldr   ip, [pc, #...]
514
    // since the instruction accessing this address in the constant pool remains
515
    // unchanged.
516
  }
517
}
518

    
519

    
520
Address Assembler::target_address_at(Address pc) {
521
  return target_pointer_at(pc);
522
}
523

    
524

    
525
void Assembler::set_target_address_at(Address pc, Address target) {
526
  set_target_pointer_at(pc, target);
527
}
528

    
529

    
530
} }  // namespace v8::internal
531

    
532
#endif  // V8_ARM_ASSEMBLER_ARM_INL_H_