The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / x64 / assembler-x64-inl.h @ f230a1cf

History | View | Annotate | Download (16.4 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29
#define V8_X64_ASSEMBLER_X64_INL_H_
30

    
31
#include "x64/assembler-x64.h"
32

    
33
#include "cpu.h"
34
#include "debug.h"
35
#include "v8memory.h"
36

    
37
namespace v8 {
38
namespace internal {
39

    
40

    
41
// -----------------------------------------------------------------------------
42
// Implementation of Assembler
43

    
44

    
45
static const byte kCallOpcode = 0xE8;
46
static const int kNoCodeAgeSequenceLength = 6;
47

    
48

    
49
void Assembler::emitl(uint32_t x) {
50
  Memory::uint32_at(pc_) = x;
51
  pc_ += sizeof(uint32_t);
52
}
53

    
54

    
55
void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
56
  uintptr_t value = reinterpret_cast<uintptr_t>(x);
57
  Memory::uintptr_at(pc_) = value;
58
  if (!RelocInfo::IsNone(rmode)) {
59
    RecordRelocInfo(rmode, value);
60
  }
61
  pc_ += sizeof(uintptr_t);
62
}
63

    
64

    
65
void Assembler::emitq(uint64_t x) {
66
  Memory::uint64_at(pc_) = x;
67
  pc_ += sizeof(uint64_t);
68
}
69

    
70

    
71
void Assembler::emitw(uint16_t x) {
72
  Memory::uint16_at(pc_) = x;
73
  pc_ += sizeof(uint16_t);
74
}
75

    
76

    
77
void Assembler::emit_code_target(Handle<Code> target,
78
                                 RelocInfo::Mode rmode,
79
                                 TypeFeedbackId ast_id) {
80
  ASSERT(RelocInfo::IsCodeTarget(rmode) ||
81
      rmode == RelocInfo::CODE_AGE_SEQUENCE);
82
  if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
83
    RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
84
  } else {
85
    RecordRelocInfo(rmode);
86
  }
87
  int current = code_targets_.length();
88
  if (current > 0 && code_targets_.last().is_identical_to(target)) {
89
    // Optimization if we keep jumping to the same code target.
90
    emitl(current - 1);
91
  } else {
92
    code_targets_.Add(target);
93
    emitl(current);
94
  }
95
}
96

    
97

    
98
void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
99
  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
100
  ASSERT(isolate()->code_range()->exists());
101
  RecordRelocInfo(rmode);
102
  emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
103
}
104

    
105

    
106
void Assembler::emit_rex_64(Register reg, Register rm_reg) {
107
  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
108
}
109

    
110

    
111
void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
112
  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
113
}
114

    
115

    
116
void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
117
  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
118
}
119

    
120

    
121
void Assembler::emit_rex_64(Register reg, const Operand& op) {
122
  emit(0x48 | reg.high_bit() << 2 | op.rex_);
123
}
124

    
125

    
126
void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
127
  emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
128
}
129

    
130

    
131
void Assembler::emit_rex_64(Register rm_reg) {
132
  ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
133
  emit(0x48 | rm_reg.high_bit());
134
}
135

    
136

    
137
void Assembler::emit_rex_64(const Operand& op) {
138
  emit(0x48 | op.rex_);
139
}
140

    
141

    
142
void Assembler::emit_rex_32(Register reg, Register rm_reg) {
143
  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
144
}
145

    
146

    
147
void Assembler::emit_rex_32(Register reg, const Operand& op) {
148
  emit(0x40 | reg.high_bit() << 2  | op.rex_);
149
}
150

    
151

    
152
void Assembler::emit_rex_32(Register rm_reg) {
153
  emit(0x40 | rm_reg.high_bit());
154
}
155

    
156

    
157
void Assembler::emit_rex_32(const Operand& op) {
158
  emit(0x40 | op.rex_);
159
}
160

    
161

    
162
void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
163
  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
164
  if (rex_bits != 0) emit(0x40 | rex_bits);
165
}
166

    
167

    
168
void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
169
  byte rex_bits =  reg.high_bit() << 2 | op.rex_;
170
  if (rex_bits != 0) emit(0x40 | rex_bits);
171
}
172

    
173

    
174
void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
175
  byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
176
  if (rex_bits != 0) emit(0x40 | rex_bits);
177
}
178

    
179

    
180
void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
181
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
182
  if (rex_bits != 0) emit(0x40 | rex_bits);
183
}
184

    
185

    
186
void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
187
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
188
  if (rex_bits != 0) emit(0x40 | rex_bits);
189
}
190

    
191

    
192
void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
193
  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
194
  if (rex_bits != 0) emit(0x40 | rex_bits);
195
}
196

    
197

    
198
void Assembler::emit_optional_rex_32(Register rm_reg) {
199
  if (rm_reg.high_bit()) emit(0x41);
200
}
201

    
202

    
203
void Assembler::emit_optional_rex_32(const Operand& op) {
204
  if (op.rex_ != 0) emit(0x40 | op.rex_);
205
}
206

    
207

    
208
Address Assembler::target_address_at(Address pc) {
209
  return Memory::int32_at(pc) + pc + 4;
210
}
211

    
212

    
213
void Assembler::set_target_address_at(Address pc, Address target) {
214
  Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
215
  CPU::FlushICache(pc, sizeof(int32_t));
216
}
217

    
218

    
219
Address Assembler::target_address_from_return_address(Address pc) {
220
  return pc - kCallTargetAddressOffset;
221
}
222

    
223

    
224
Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
225
  return code_targets_[Memory::int32_at(pc)];
226
}
227

    
228

    
229
Address Assembler::runtime_entry_at(Address pc) {
230
  ASSERT(isolate()->code_range()->exists());
231
  return Memory::int32_at(pc) + isolate()->code_range()->start();
232
}
233

    
234
// -----------------------------------------------------------------------------
235
// Implementation of RelocInfo
236

    
237
// The modes possibly affected by apply must be in kApplyMask.
238
void RelocInfo::apply(intptr_t delta) {
239
  if (IsInternalReference(rmode_)) {
240
    // absolute code pointer inside code object moves with the code object.
241
    Memory::Address_at(pc_) += static_cast<int32_t>(delta);
242
    CPU::FlushICache(pc_, sizeof(Address));
243
  } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
244
    Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
245
    CPU::FlushICache(pc_, sizeof(int32_t));
246
  } else if (rmode_ == CODE_AGE_SEQUENCE) {
247
    if (*pc_ == kCallOpcode) {
248
      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
249
      *p -= static_cast<int32_t>(delta);  // Relocate entry.
250
      CPU::FlushICache(p, sizeof(uint32_t));
251
    }
252
  }
253
}
254

    
255

    
256
Address RelocInfo::target_address() {
257
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
258
  return Assembler::target_address_at(pc_);
259
}
260

    
261

    
262
Address RelocInfo::target_address_address() {
263
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
264
                              || rmode_ == EMBEDDED_OBJECT
265
                              || rmode_ == EXTERNAL_REFERENCE);
266
  return reinterpret_cast<Address>(pc_);
267
}
268

    
269

    
270
int RelocInfo::target_address_size() {
271
  if (IsCodedSpecially()) {
272
    return Assembler::kSpecialTargetSize;
273
  } else {
274
    return kPointerSize;
275
  }
276
}
277

    
278

    
279
void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
280
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
281
  Assembler::set_target_address_at(pc_, target);
282
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
283
    Object* target_code = Code::GetCodeFromTargetAddress(target);
284
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
285
        host(), this, HeapObject::cast(target_code));
286
  }
287
}
288

    
289

    
290
Object* RelocInfo::target_object() {
291
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
292
  return Memory::Object_at(pc_);
293
}
294

    
295

    
296
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
297
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
298
  if (rmode_ == EMBEDDED_OBJECT) {
299
    return Memory::Object_Handle_at(pc_);
300
  } else {
301
    return origin->code_target_object_handle_at(pc_);
302
  }
303
}
304

    
305

    
306
Object** RelocInfo::target_object_address() {
307
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
308
  return reinterpret_cast<Object**>(pc_);
309
}
310

    
311

    
312
Address* RelocInfo::target_reference_address() {
313
  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
314
  return reinterpret_cast<Address*>(pc_);
315
}
316

    
317

    
318
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
319
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
320
  ASSERT(!target->IsConsString());
321
  Memory::Object_at(pc_) = target;
322
  CPU::FlushICache(pc_, sizeof(Address));
323
  if (mode == UPDATE_WRITE_BARRIER &&
324
      host() != NULL &&
325
      target->IsHeapObject()) {
326
    host()->GetHeap()->incremental_marking()->RecordWrite(
327
        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
328
  }
329
}
330

    
331

    
332
Address RelocInfo::target_runtime_entry(Assembler* origin) {
333
  ASSERT(IsRuntimeEntry(rmode_));
334
  return origin->runtime_entry_at(pc_);
335
}
336

    
337

    
338
void RelocInfo::set_target_runtime_entry(Address target,
339
                                         WriteBarrierMode mode) {
340
  ASSERT(IsRuntimeEntry(rmode_));
341
  if (target_address() != target) set_target_address(target, mode);
342
}
343

    
344

    
345
Handle<Cell> RelocInfo::target_cell_handle() {
346
  ASSERT(rmode_ == RelocInfo::CELL);
347
  Address address = Memory::Address_at(pc_);
348
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
349
}
350

    
351

    
352
Cell* RelocInfo::target_cell() {
353
  ASSERT(rmode_ == RelocInfo::CELL);
354
  return Cell::FromValueAddress(Memory::Address_at(pc_));
355
}
356

    
357

    
358
void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
359
  ASSERT(rmode_ == RelocInfo::CELL);
360
  Address address = cell->address() + Cell::kValueOffset;
361
  Memory::Address_at(pc_) = address;
362
  CPU::FlushICache(pc_, sizeof(Address));
363
  if (mode == UPDATE_WRITE_BARRIER &&
364
      host() != NULL) {
365
    // TODO(1550) We are passing NULL as a slot because cell can never be on
366
    // evacuation candidate.
367
    host()->GetHeap()->incremental_marking()->RecordWrite(
368
        host(), NULL, cell);
369
  }
370
}
371

    
372

    
373
bool RelocInfo::IsPatchedReturnSequence() {
374
  // The recognized call sequence is:
375
  //  movq(kScratchRegister, address); call(kScratchRegister);
376
  // It only needs to be distinguished from a return sequence
377
  //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
378
  // The 11th byte is int3 (0xCC) in the return sequence and
379
  // REX.WB (0x48+register bit) for the call sequence.
380
#ifdef ENABLE_DEBUGGER_SUPPORT
381
  return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
382
         0xCC;
383
#else
384
  return false;
385
#endif
386
}
387

    
388

    
389
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
390
  return !Assembler::IsNop(pc());
391
}
392

    
393

    
394
Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
395
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
396
  ASSERT(*pc_ == kCallOpcode);
397
  return origin->code_target_object_handle_at(pc_ + 1);
398
}
399

    
400

    
401
Code* RelocInfo::code_age_stub() {
402
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
403
  ASSERT(*pc_ == kCallOpcode);
404
  return Code::GetCodeFromTargetAddress(
405
      Assembler::target_address_at(pc_ + 1));
406
}
407

    
408

    
409
void RelocInfo::set_code_age_stub(Code* stub) {
410
  ASSERT(*pc_ == kCallOpcode);
411
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
412
  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
413
}
414

    
415

    
416
Address RelocInfo::call_address() {
417
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
418
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
419
  return Memory::Address_at(
420
      pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
421
}
422

    
423

    
424
void RelocInfo::set_call_address(Address target) {
425
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
426
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
427
  Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
428
      target;
429
  CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
430
                   sizeof(Address));
431
  if (host() != NULL) {
432
    Object* target_code = Code::GetCodeFromTargetAddress(target);
433
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
434
        host(), this, HeapObject::cast(target_code));
435
  }
436
}
437

    
438

    
439
Object* RelocInfo::call_object() {
440
  return *call_object_address();
441
}
442

    
443

    
444
void RelocInfo::set_call_object(Object* target) {
445
  *call_object_address() = target;
446
}
447

    
448

    
449
Object** RelocInfo::call_object_address() {
450
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
451
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
452
  return reinterpret_cast<Object**>(
453
      pc_ + Assembler::kPatchReturnSequenceAddressOffset);
454
}
455

    
456

    
457
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
458
  RelocInfo::Mode mode = rmode();
459
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
460
    visitor->VisitEmbeddedPointer(this);
461
    CPU::FlushICache(pc_, sizeof(Address));
462
  } else if (RelocInfo::IsCodeTarget(mode)) {
463
    visitor->VisitCodeTarget(this);
464
  } else if (mode == RelocInfo::CELL) {
465
    visitor->VisitCell(this);
466
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
467
    visitor->VisitExternalReference(this);
468
    CPU::FlushICache(pc_, sizeof(Address));
469
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
470
    visitor->VisitCodeAgeSequence(this);
471
#ifdef ENABLE_DEBUGGER_SUPPORT
472
  } else if (((RelocInfo::IsJSReturn(mode) &&
473
              IsPatchedReturnSequence()) ||
474
             (RelocInfo::IsDebugBreakSlot(mode) &&
475
              IsPatchedDebugBreakSlotSequence())) &&
476
             isolate->debug()->has_break_points()) {
477
    visitor->VisitDebugTarget(this);
478
#endif
479
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
480
    visitor->VisitRuntimeEntry(this);
481
  }
482
}
483

    
484

    
485
template<typename StaticVisitor>
486
void RelocInfo::Visit(Heap* heap) {
487
  RelocInfo::Mode mode = rmode();
488
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
489
    StaticVisitor::VisitEmbeddedPointer(heap, this);
490
    CPU::FlushICache(pc_, sizeof(Address));
491
  } else if (RelocInfo::IsCodeTarget(mode)) {
492
    StaticVisitor::VisitCodeTarget(heap, this);
493
  } else if (mode == RelocInfo::CELL) {
494
    StaticVisitor::VisitCell(heap, this);
495
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
496
    StaticVisitor::VisitExternalReference(this);
497
    CPU::FlushICache(pc_, sizeof(Address));
498
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
499
    StaticVisitor::VisitCodeAgeSequence(heap, this);
500
#ifdef ENABLE_DEBUGGER_SUPPORT
501
  } else if (heap->isolate()->debug()->has_break_points() &&
502
             ((RelocInfo::IsJSReturn(mode) &&
503
              IsPatchedReturnSequence()) ||
504
             (RelocInfo::IsDebugBreakSlot(mode) &&
505
              IsPatchedDebugBreakSlotSequence()))) {
506
    StaticVisitor::VisitDebugTarget(heap, this);
507
#endif
508
  } else if (RelocInfo::IsRuntimeEntry(mode)) {
509
    StaticVisitor::VisitRuntimeEntry(this);
510
  }
511
}
512

    
513

    
514
// -----------------------------------------------------------------------------
515
// Implementation of Operand
516

    
517
void Operand::set_modrm(int mod, Register rm_reg) {
518
  ASSERT(is_uint2(mod));
519
  buf_[0] = mod << 6 | rm_reg.low_bits();
520
  // Set REX.B to the high bit of rm.code().
521
  rex_ |= rm_reg.high_bit();
522
}
523

    
524

    
525
void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
526
  ASSERT(len_ == 1);
527
  ASSERT(is_uint2(scale));
528
  // Use SIB with no index register only for base rsp or r12. Otherwise we
529
  // would skip the SIB byte entirely.
530
  ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
531
  buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
532
  rex_ |= index.high_bit() << 1 | base.high_bit();
533
  len_ = 2;
534
}
535

    
536
void Operand::set_disp8(int disp) {
537
  ASSERT(is_int8(disp));
538
  ASSERT(len_ == 1 || len_ == 2);
539
  int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
540
  *p = disp;
541
  len_ += sizeof(int8_t);
542
}
543

    
544
void Operand::set_disp32(int disp) {
545
  ASSERT(len_ == 1 || len_ == 2);
546
  int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
547
  *p = disp;
548
  len_ += sizeof(int32_t);
549
}
550

    
551

    
552
} }  // namespace v8::internal
553

    
554
#endif  // V8_X64_ASSEMBLER_X64_INL_H_