The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / mips / assembler-mips-inl.h @ f230a1cf

History | View | Annotate | Download (13.5 KB)

1

    
2
// Copyright (c) 1994-2006 Sun Microsystems Inc.
3
// All Rights Reserved.
4
//
5
// Redistribution and use in source and binary forms, with or without
6
// modification, are permitted provided that the following conditions are
7
// met:
8
//
9
// - Redistributions of source code must retain the above copyright notice,
10
// this list of conditions and the following disclaimer.
11
//
12
// - Redistribution in binary form must reproduce the above copyright
13
// notice, this list of conditions and the following disclaimer in the
14
// documentation and/or other materials provided with the distribution.
15
//
16
// - Neither the name of Sun Microsystems or the names of contributors may
17
// be used to endorse or promote products derived from this software without
18
// specific prior written permission.
19
//
20
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31

    
32
// The original source code covered by the above license above has been
33
// modified significantly by Google Inc.
34
// Copyright 2012 the V8 project authors. All rights reserved.
35

    
36

    
37
#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38
#define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39

    
40
#include "mips/assembler-mips.h"
41

    
42
#include "cpu.h"
43
#include "debug.h"
44

    
45

    
46
namespace v8 {
47
namespace internal {
48

    
49
// -----------------------------------------------------------------------------
50
// Operand and MemOperand.
51

    
52
Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
53
  rm_ = no_reg;
54
  imm32_ = immediate;
55
  rmode_ = rmode;
56
}
57

    
58

    
59
Operand::Operand(const ExternalReference& f)  {
60
  rm_ = no_reg;
61
  imm32_ = reinterpret_cast<int32_t>(f.address());
62
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
63
}
64

    
65

    
66
Operand::Operand(Smi* value) {
67
  rm_ = no_reg;
68
  imm32_ =  reinterpret_cast<intptr_t>(value);
69
  rmode_ = RelocInfo::NONE32;
70
}
71

    
72

    
73
Operand::Operand(Register rm) {
74
  rm_ = rm;
75
}
76

    
77

    
78
bool Operand::is_reg() const {
79
  return rm_.is_valid();
80
}
81

    
82

    
83
int Register::NumAllocatableRegisters() {
84
    return kMaxNumAllocatableRegisters;
85
}
86

    
87

    
88
int DoubleRegister::NumRegisters() {
89
    return FPURegister::kMaxNumRegisters;
90
}
91

    
92

    
93
int DoubleRegister::NumAllocatableRegisters() {
94
    return FPURegister::kMaxNumAllocatableRegisters;
95
}
96

    
97

    
98
int FPURegister::ToAllocationIndex(FPURegister reg) {
99
  ASSERT(reg.code() % 2 == 0);
100
  ASSERT(reg.code() / 2 < kMaxNumAllocatableRegisters);
101
  ASSERT(reg.is_valid());
102
  ASSERT(!reg.is(kDoubleRegZero));
103
  ASSERT(!reg.is(kLithiumScratchDouble));
104
  return (reg.code() / 2);
105
}
106

    
107

    
108
// -----------------------------------------------------------------------------
109
// RelocInfo.
110

    
111
void RelocInfo::apply(intptr_t delta) {
112
  if (IsCodeTarget(rmode_)) {
113
    uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
114
    uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
115

    
116
    if (scope1 != scope2) {
117
      Assembler::JumpLabelToJumpRegister(pc_);
118
    }
119
  }
120
  if (IsInternalReference(rmode_)) {
121
    // Absolute code pointer inside code object moves with the code object.
122
    byte* p = reinterpret_cast<byte*>(pc_);
123
    int count = Assembler::RelocateInternalReference(p, delta);
124
    CPU::FlushICache(p, count * sizeof(uint32_t));
125
  }
126
}
127

    
128

    
129
Address RelocInfo::target_address() {
130
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
131
  return Assembler::target_address_at(pc_);
132
}
133

    
134

    
135
Address RelocInfo::target_address_address() {
136
  ASSERT(IsCodeTarget(rmode_) ||
137
         IsRuntimeEntry(rmode_) ||
138
         rmode_ == EMBEDDED_OBJECT ||
139
         rmode_ == EXTERNAL_REFERENCE);
140
  // Read the address of the word containing the target_address in an
141
  // instruction stream.
142
  // The only architecture-independent user of this function is the serializer.
143
  // The serializer uses it to find out how many raw bytes of instruction to
144
  // output before the next target.
145
  // For an instruction like LUI/ORI where the target bits are mixed into the
146
  // instruction bits, the size of the target will be zero, indicating that the
147
  // serializer should not step forward in memory after a target is resolved
148
  // and written. In this case the target_address_address function should
149
  // return the end of the instructions to be patched, allowing the
150
  // deserializer to deserialize the instructions as raw bytes and put them in
151
  // place, ready to be patched with the target. After jump optimization,
152
  // that is the address of the instruction that follows J/JAL/JR/JALR
153
  // instruction.
154
  return reinterpret_cast<Address>(
155
    pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
156
}
157

    
158

    
159
int RelocInfo::target_address_size() {
160
  return Assembler::kSpecialTargetSize;
161
}
162

    
163

    
164
void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
165
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
166
  Assembler::set_target_address_at(pc_, target);
167
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
168
    Object* target_code = Code::GetCodeFromTargetAddress(target);
169
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
170
        host(), this, HeapObject::cast(target_code));
171
  }
172
}
173

    
174

    
175
Address Assembler::target_address_from_return_address(Address pc) {
176
  return pc - kCallTargetAddressOffset;
177
}
178

    
179

    
180
Object* RelocInfo::target_object() {
181
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
182
  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
183
}
184

    
185

    
186
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
187
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
188
  return Handle<Object>(reinterpret_cast<Object**>(
189
      Assembler::target_address_at(pc_)));
190
}
191

    
192

    
193
Object** RelocInfo::target_object_address() {
194
  // Provide a "natural pointer" to the embedded object,
195
  // which can be de-referenced during heap iteration.
196
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
197
  reconstructed_obj_ptr_ =
198
      reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
199
  return &reconstructed_obj_ptr_;
200
}
201

    
202

    
203
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
204
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
205
  ASSERT(!target->IsConsString());
206
  Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
207
  if (mode == UPDATE_WRITE_BARRIER &&
208
      host() != NULL &&
209
      target->IsHeapObject()) {
210
    host()->GetHeap()->incremental_marking()->RecordWrite(
211
        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
212
  }
213
}
214

    
215

    
216
Address* RelocInfo::target_reference_address() {
217
  ASSERT(rmode_ == EXTERNAL_REFERENCE);
218
  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
219
  return &reconstructed_adr_ptr_;
220
}
221

    
222

    
223
Address RelocInfo::target_runtime_entry(Assembler* origin) {
224
  ASSERT(IsRuntimeEntry(rmode_));
225
  return target_address();
226
}
227

    
228

    
229
void RelocInfo::set_target_runtime_entry(Address target,
230
                                         WriteBarrierMode mode) {
231
  ASSERT(IsRuntimeEntry(rmode_));
232
  if (target_address() != target) set_target_address(target, mode);
233
}
234

    
235

    
236
Handle<Cell> RelocInfo::target_cell_handle() {
237
  ASSERT(rmode_ == RelocInfo::CELL);
238
  Address address = Memory::Address_at(pc_);
239
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
240
}
241

    
242

    
243
Cell* RelocInfo::target_cell() {
244
  ASSERT(rmode_ == RelocInfo::CELL);
245
  return Cell::FromValueAddress(Memory::Address_at(pc_));
246
}
247

    
248

    
249
void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
250
  ASSERT(rmode_ == RelocInfo::CELL);
251
  Address address = cell->address() + Cell::kValueOffset;
252
  Memory::Address_at(pc_) = address;
253
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
254
    // TODO(1550) We are passing NULL as a slot because cell can never be on
255
    // evacuation candidate.
256
    host()->GetHeap()->incremental_marking()->RecordWrite(
257
        host(), NULL, cell);
258
  }
259
}
260

    
261

    
262
static const int kNoCodeAgeSequenceLength = 7;
263

    
264

    
265
Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
266
  UNREACHABLE();  // This should never be reached on Arm.
267
  return Handle<Object>();
268
}
269

    
270

    
271
Code* RelocInfo::code_age_stub() {
272
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
273
  return Code::GetCodeFromTargetAddress(
274
      Memory::Address_at(pc_ + Assembler::kInstrSize *
275
                         (kNoCodeAgeSequenceLength - 1)));
276
}
277

    
278

    
279
void RelocInfo::set_code_age_stub(Code* stub) {
280
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
281
  Memory::Address_at(pc_ + Assembler::kInstrSize *
282
                     (kNoCodeAgeSequenceLength - 1)) =
283
      stub->instruction_start();
284
}
285

    
286

    
287
Address RelocInfo::call_address() {
288
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
289
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
290
  // The pc_ offset of 0 assumes mips patched return sequence per
291
  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
292
  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
293
  return Assembler::target_address_at(pc_);
294
}
295

    
296

    
297
void RelocInfo::set_call_address(Address target) {
298
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
299
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
300
  // The pc_ offset of 0 assumes mips patched return sequence per
301
  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
302
  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
303
  Assembler::set_target_address_at(pc_, target);
304
  if (host() != NULL) {
305
    Object* target_code = Code::GetCodeFromTargetAddress(target);
306
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
307
        host(), this, HeapObject::cast(target_code));
308
  }
309
}
310

    
311

    
312
Object* RelocInfo::call_object() {
313
  return *call_object_address();
314
}
315

    
316

    
317
Object** RelocInfo::call_object_address() {
318
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
319
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
320
  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
321
}
322

    
323

    
324
void RelocInfo::set_call_object(Object* target) {
325
  *call_object_address() = target;
326
}
327

    
328

    
329
bool RelocInfo::IsPatchedReturnSequence() {
330
  Instr instr0 = Assembler::instr_at(pc_);
331
  Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
332
  Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
333
  bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
334
                         (instr1 & kOpcodeMask) == ORI &&
335
                         ((instr2 & kOpcodeMask) == JAL ||
336
                          ((instr2 & kOpcodeMask) == SPECIAL &&
337
                           (instr2 & kFunctionFieldMask) == JALR)));
338
  return patched_return;
339
}
340

    
341

    
342
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
343
  Instr current_instr = Assembler::instr_at(pc_);
344
  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
345
}
346

    
347

    
348
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
349
  RelocInfo::Mode mode = rmode();
350
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
351
    visitor->VisitEmbeddedPointer(this);
352
  } else if (RelocInfo::IsCodeTarget(mode)) {
353
    visitor->VisitCodeTarget(this);
354
  } else if (mode == RelocInfo::CELL) {
355
    visitor->VisitCell(this);
356
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
357
    visitor->VisitExternalReference(this);
358
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
359
    visitor->VisitCodeAgeSequence(this);
360
#ifdef ENABLE_DEBUGGER_SUPPORT
361
  } else if (((RelocInfo::IsJSReturn(mode) &&
362
              IsPatchedReturnSequence()) ||
363
             (RelocInfo::IsDebugBreakSlot(mode) &&
364
             IsPatchedDebugBreakSlotSequence())) &&
365
             isolate->debug()->has_break_points()) {
366
    visitor->VisitDebugTarget(this);
367
#endif
368
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
369
    visitor->VisitRuntimeEntry(this);
370
  }
371
}
372

    
373

    
374
template<typename StaticVisitor>
375
void RelocInfo::Visit(Heap* heap) {
376
  RelocInfo::Mode mode = rmode();
377
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
378
    StaticVisitor::VisitEmbeddedPointer(heap, this);
379
  } else if (RelocInfo::IsCodeTarget(mode)) {
380
    StaticVisitor::VisitCodeTarget(heap, this);
381
  } else if (mode == RelocInfo::CELL) {
382
    StaticVisitor::VisitCell(heap, this);
383
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
384
    StaticVisitor::VisitExternalReference(this);
385
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
386
    StaticVisitor::VisitCodeAgeSequence(heap, this);
387
#ifdef ENABLE_DEBUGGER_SUPPORT
388
  } else if (heap->isolate()->debug()->has_break_points() &&
389
             ((RelocInfo::IsJSReturn(mode) &&
390
              IsPatchedReturnSequence()) ||
391
             (RelocInfo::IsDebugBreakSlot(mode) &&
392
              IsPatchedDebugBreakSlotSequence()))) {
393
    StaticVisitor::VisitDebugTarget(heap, this);
394
#endif
395
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
396
    StaticVisitor::VisitRuntimeEntry(this);
397
  }
398
}
399

    
400

    
401
// -----------------------------------------------------------------------------
402
// Assembler.
403

    
404

    
405
void Assembler::CheckBuffer() {
406
  if (buffer_space() <= kGap) {
407
    GrowBuffer();
408
  }
409
}
410

    
411

    
412
void Assembler::CheckTrampolinePoolQuick() {
413
  if (pc_offset() >= next_buffer_check_) {
414
    CheckTrampolinePool();
415
  }
416
}
417

    
418

    
419
void Assembler::emit(Instr x) {
420
  if (!is_buffer_growth_blocked()) {
421
    CheckBuffer();
422
  }
423
  *reinterpret_cast<Instr*>(pc_) = x;
424
  pc_ += kInstrSize;
425
  CheckTrampolinePoolQuick();
426
}
427

    
428

    
429
} }  // namespace v8::internal
430

    
431
#endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_