The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / assembler-ia32-inl.h @ f230a1cf

History | View | Annotate | Download (15.5 KB)

1
// Copyright (c) 1994-2006 Sun Microsystems Inc.
2
// All Rights Reserved.
3
//
4
// Redistribution and use in source and binary forms, with or without
5
// modification, are permitted provided that the following conditions are
6
// met:
7
//
8
// - Redistributions of source code must retain the above copyright notice,
9
// this list of conditions and the following disclaimer.
10
//
11
// - Redistribution in binary form must reproduce the above copyright
12
// notice, this list of conditions and the following disclaimer in the
13
// documentation and/or other materials provided with the distribution.
14
//
15
// - Neither the name of Sun Microsystems or the names of contributors may
16
// be used to endorse or promote products derived from this software without
17
// specific prior written permission.
18
//
19
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30

    
31
// The original source code covered by the above license above has been
32
// modified significantly by Google Inc.
33
// Copyright 2012 the V8 project authors. All rights reserved.
34

    
35
// A light-weight IA32 Assembler.
36

    
37
#ifndef V8_IA32_ASSEMBLER_IA32_INL_H_
38
#define V8_IA32_ASSEMBLER_IA32_INL_H_
39

    
40
#include "ia32/assembler-ia32.h"
41

    
42
#include "cpu.h"
43
#include "debug.h"
44

    
45
namespace v8 {
46
namespace internal {
47

    
48

    
49
static const byte kCallOpcode = 0xE8;
50
static const int kNoCodeAgeSequenceLength = 5;
51

    
52

    
53
// The modes possibly affected by apply must be in kApplyMask.
54
void RelocInfo::apply(intptr_t delta) {
55
  if (IsRuntimeEntry(rmode_) || IsCodeTarget(rmode_)) {
56
    int32_t* p = reinterpret_cast<int32_t*>(pc_);
57
    *p -= delta;  // Relocate entry.
58
    CPU::FlushICache(p, sizeof(uint32_t));
59
  } else if (rmode_ == CODE_AGE_SEQUENCE) {
60
    if (*pc_ == kCallOpcode) {
61
      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
62
      *p -= delta;  // Relocate entry.
63
      CPU::FlushICache(p, sizeof(uint32_t));
64
    }
65
  } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
66
    // Special handling of js_return when a break point is set (call
67
    // instruction has been inserted).
68
    int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
69
    *p -= delta;  // Relocate entry.
70
    CPU::FlushICache(p, sizeof(uint32_t));
71
  } else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
72
    // Special handling of a debug break slot when a break point is set (call
73
    // instruction has been inserted).
74
    int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
75
    *p -= delta;  // Relocate entry.
76
    CPU::FlushICache(p, sizeof(uint32_t));
77
  } else if (IsInternalReference(rmode_)) {
78
    // absolute code pointer inside code object moves with the code object.
79
    int32_t* p = reinterpret_cast<int32_t*>(pc_);
80
    *p += delta;  // Relocate entry.
81
    CPU::FlushICache(p, sizeof(uint32_t));
82
  }
83
}
84

    
85

    
86
Address RelocInfo::target_address() {
87
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
88
  return Assembler::target_address_at(pc_);
89
}
90

    
91

    
92
Address RelocInfo::target_address_address() {
93
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
94
                              || rmode_ == EMBEDDED_OBJECT
95
                              || rmode_ == EXTERNAL_REFERENCE);
96
  return reinterpret_cast<Address>(pc_);
97
}
98

    
99

    
100
int RelocInfo::target_address_size() {
101
  return Assembler::kSpecialTargetSize;
102
}
103

    
104

    
105
void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
106
  Assembler::set_target_address_at(pc_, target);
107
  ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
108
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
109
    Object* target_code = Code::GetCodeFromTargetAddress(target);
110
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
111
        host(), this, HeapObject::cast(target_code));
112
  }
113
}
114

    
115

    
116
Object* RelocInfo::target_object() {
117
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
118
  return Memory::Object_at(pc_);
119
}
120

    
121

    
122
Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
123
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
124
  return Memory::Object_Handle_at(pc_);
125
}
126

    
127

    
128
Object** RelocInfo::target_object_address() {
129
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
130
  return &Memory::Object_at(pc_);
131
}
132

    
133

    
134
void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
135
  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
136
  ASSERT(!target->IsConsString());
137
  Memory::Object_at(pc_) = target;
138
  CPU::FlushICache(pc_, sizeof(Address));
139
  if (mode == UPDATE_WRITE_BARRIER &&
140
      host() != NULL &&
141
      target->IsHeapObject()) {
142
    host()->GetHeap()->incremental_marking()->RecordWrite(
143
        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
144
  }
145
}
146

    
147

    
148
Address* RelocInfo::target_reference_address() {
149
  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
150
  return reinterpret_cast<Address*>(pc_);
151
}
152

    
153

    
154
Address RelocInfo::target_runtime_entry(Assembler* origin) {
155
  ASSERT(IsRuntimeEntry(rmode_));
156
  return reinterpret_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
157
}
158

    
159

    
160
void RelocInfo::set_target_runtime_entry(Address target,
161
                                         WriteBarrierMode mode) {
162
  ASSERT(IsRuntimeEntry(rmode_));
163
  if (target_address() != target) set_target_address(target, mode);
164
}
165

    
166

    
167
Handle<Cell> RelocInfo::target_cell_handle() {
168
  ASSERT(rmode_ == RelocInfo::CELL);
169
  Address address = Memory::Address_at(pc_);
170
  return Handle<Cell>(reinterpret_cast<Cell**>(address));
171
}
172

    
173

    
174
Cell* RelocInfo::target_cell() {
175
  ASSERT(rmode_ == RelocInfo::CELL);
176
  return Cell::FromValueAddress(Memory::Address_at(pc_));
177
}
178

    
179

    
180
void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) {
181
  ASSERT(rmode_ == RelocInfo::CELL);
182
  Address address = cell->address() + Cell::kValueOffset;
183
  Memory::Address_at(pc_) = address;
184
  CPU::FlushICache(pc_, sizeof(Address));
185
  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
186
    // TODO(1550) We are passing NULL as a slot because cell can never be on
187
    // evacuation candidate.
188
    host()->GetHeap()->incremental_marking()->RecordWrite(
189
        host(), NULL, cell);
190
  }
191
}
192

    
193

    
194
Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
195
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
196
  ASSERT(*pc_ == kCallOpcode);
197
  return Memory::Object_Handle_at(pc_ + 1);
198
}
199

    
200

    
201
Code* RelocInfo::code_age_stub() {
202
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
203
  ASSERT(*pc_ == kCallOpcode);
204
  return Code::GetCodeFromTargetAddress(
205
      Assembler::target_address_at(pc_ + 1));
206
}
207

    
208

    
209
void RelocInfo::set_code_age_stub(Code* stub) {
210
  ASSERT(*pc_ == kCallOpcode);
211
  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
212
  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
213
}
214

    
215

    
216
Address RelocInfo::call_address() {
217
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
218
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
219
  return Assembler::target_address_at(pc_ + 1);
220
}
221

    
222

    
223
void RelocInfo::set_call_address(Address target) {
224
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
225
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
226
  Assembler::set_target_address_at(pc_ + 1, target);
227
  if (host() != NULL) {
228
    Object* target_code = Code::GetCodeFromTargetAddress(target);
229
    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
230
        host(), this, HeapObject::cast(target_code));
231
  }
232
}
233

    
234

    
235
Object* RelocInfo::call_object() {
236
  return *call_object_address();
237
}
238

    
239

    
240
void RelocInfo::set_call_object(Object* target) {
241
  *call_object_address() = target;
242
}
243

    
244

    
245
Object** RelocInfo::call_object_address() {
246
  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
247
         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
248
  return reinterpret_cast<Object**>(pc_ + 1);
249
}
250

    
251

    
252
bool RelocInfo::IsPatchedReturnSequence() {
253
  return *pc_ == kCallOpcode;
254
}
255

    
256

    
257
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
258
  return !Assembler::IsNop(pc());
259
}
260

    
261

    
262
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
263
  RelocInfo::Mode mode = rmode();
264
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
265
    visitor->VisitEmbeddedPointer(this);
266
    CPU::FlushICache(pc_, sizeof(Address));
267
  } else if (RelocInfo::IsCodeTarget(mode)) {
268
    visitor->VisitCodeTarget(this);
269
  } else if (mode == RelocInfo::CELL) {
270
    visitor->VisitCell(this);
271
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
272
    visitor->VisitExternalReference(this);
273
    CPU::FlushICache(pc_, sizeof(Address));
274
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
275
    visitor->VisitCodeAgeSequence(this);
276
  #ifdef ENABLE_DEBUGGER_SUPPORT
277
  } else if (((RelocInfo::IsJSReturn(mode) &&
278
              IsPatchedReturnSequence()) ||
279
             (RelocInfo::IsDebugBreakSlot(mode) &&
280
              IsPatchedDebugBreakSlotSequence())) &&
281
             isolate->debug()->has_break_points()) {
282
    visitor->VisitDebugTarget(this);
283
#endif
284
  } else if (IsRuntimeEntry(mode)) {
285
    visitor->VisitRuntimeEntry(this);
286
  }
287
}
288

    
289

    
290
template<typename StaticVisitor>
291
void RelocInfo::Visit(Heap* heap) {
292
  RelocInfo::Mode mode = rmode();
293
  if (mode == RelocInfo::EMBEDDED_OBJECT) {
294
    StaticVisitor::VisitEmbeddedPointer(heap, this);
295
    CPU::FlushICache(pc_, sizeof(Address));
296
  } else if (RelocInfo::IsCodeTarget(mode)) {
297
    StaticVisitor::VisitCodeTarget(heap, this);
298
  } else if (mode == RelocInfo::CELL) {
299
    StaticVisitor::VisitCell(heap, this);
300
  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
301
    StaticVisitor::VisitExternalReference(this);
302
    CPU::FlushICache(pc_, sizeof(Address));
303
  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
304
    StaticVisitor::VisitCodeAgeSequence(heap, this);
305
#ifdef ENABLE_DEBUGGER_SUPPORT
306
  } else if (heap->isolate()->debug()->has_break_points() &&
307
             ((RelocInfo::IsJSReturn(mode) &&
308
              IsPatchedReturnSequence()) ||
309
             (RelocInfo::IsDebugBreakSlot(mode) &&
310
              IsPatchedDebugBreakSlotSequence()))) {
311
    StaticVisitor::VisitDebugTarget(heap, this);
312
#endif
313
  } else if (IsRuntimeEntry(mode)) {
314
    StaticVisitor::VisitRuntimeEntry(this);
315
  }
316
}
317

    
318

    
319

    
320
Immediate::Immediate(int x)  {
321
  x_ = x;
322
  rmode_ = RelocInfo::NONE32;
323
}
324

    
325

    
326
Immediate::Immediate(const ExternalReference& ext) {
327
  x_ = reinterpret_cast<int32_t>(ext.address());
328
  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
329
}
330

    
331

    
332
Immediate::Immediate(Label* internal_offset) {
333
  x_ = reinterpret_cast<int32_t>(internal_offset);
334
  rmode_ = RelocInfo::INTERNAL_REFERENCE;
335
}
336

    
337

    
338
Immediate::Immediate(Handle<Object> handle) {
339
  AllowDeferredHandleDereference using_raw_address;
340
  // Verify all Objects referred by code are NOT in new space.
341
  Object* obj = *handle;
342
  if (obj->IsHeapObject()) {
343
    ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
344
    x_ = reinterpret_cast<intptr_t>(handle.location());
345
    rmode_ = RelocInfo::EMBEDDED_OBJECT;
346
  } else {
347
    // no relocation needed
348
    x_ =  reinterpret_cast<intptr_t>(obj);
349
    rmode_ = RelocInfo::NONE32;
350
  }
351
}
352

    
353

    
354
Immediate::Immediate(Smi* value) {
355
  x_ = reinterpret_cast<intptr_t>(value);
356
  rmode_ = RelocInfo::NONE32;
357
}
358

    
359

    
360
Immediate::Immediate(Address addr) {
361
  x_ = reinterpret_cast<int32_t>(addr);
362
  rmode_ = RelocInfo::NONE32;
363
}
364

    
365

    
366
void Assembler::emit(uint32_t x) {
367
  *reinterpret_cast<uint32_t*>(pc_) = x;
368
  pc_ += sizeof(uint32_t);
369
}
370

    
371

    
372
void Assembler::emit(Handle<Object> handle) {
373
  AllowDeferredHandleDereference heap_object_check;
374
  // Verify all Objects referred by code are NOT in new space.
375
  Object* obj = *handle;
376
  ASSERT(!isolate()->heap()->InNewSpace(obj));
377
  if (obj->IsHeapObject()) {
378
    emit(reinterpret_cast<intptr_t>(handle.location()),
379
         RelocInfo::EMBEDDED_OBJECT);
380
  } else {
381
    // no relocation needed
382
    emit(reinterpret_cast<intptr_t>(obj));
383
  }
384
}
385

    
386

    
387
void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, TypeFeedbackId id) {
388
  if (rmode == RelocInfo::CODE_TARGET && !id.IsNone()) {
389
    RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, id.ToInt());
390
  } else if (!RelocInfo::IsNone(rmode)
391
      && rmode != RelocInfo::CODE_AGE_SEQUENCE) {
392
    RecordRelocInfo(rmode);
393
  }
394
  emit(x);
395
}
396

    
397

    
398
void Assembler::emit(Handle<Code> code,
399
                     RelocInfo::Mode rmode,
400
                     TypeFeedbackId id) {
401
  AllowDeferredHandleDereference embedding_raw_address;
402
  emit(reinterpret_cast<intptr_t>(code.location()), rmode, id);
403
}
404

    
405

    
406
void Assembler::emit(const Immediate& x) {
407
  if (x.rmode_ == RelocInfo::INTERNAL_REFERENCE) {
408
    Label* label = reinterpret_cast<Label*>(x.x_);
409
    emit_code_relative_offset(label);
410
    return;
411
  }
412
  if (!RelocInfo::IsNone(x.rmode_)) RecordRelocInfo(x.rmode_);
413
  emit(x.x_);
414
}
415

    
416

    
417
void Assembler::emit_code_relative_offset(Label* label) {
418
  if (label->is_bound()) {
419
    int32_t pos;
420
    pos = label->pos() + Code::kHeaderSize - kHeapObjectTag;
421
    emit(pos);
422
  } else {
423
    emit_disp(label, Displacement::CODE_RELATIVE);
424
  }
425
}
426

    
427

    
428
void Assembler::emit_w(const Immediate& x) {
429
  ASSERT(RelocInfo::IsNone(x.rmode_));
430
  uint16_t value = static_cast<uint16_t>(x.x_);
431
  reinterpret_cast<uint16_t*>(pc_)[0] = value;
432
  pc_ += sizeof(uint16_t);
433
}
434

    
435

    
436
Address Assembler::target_address_at(Address pc) {
437
  return pc + sizeof(int32_t) + *reinterpret_cast<int32_t*>(pc);
438
}
439

    
440

    
441
void Assembler::set_target_address_at(Address pc, Address target) {
442
  int32_t* p = reinterpret_cast<int32_t*>(pc);
443
  *p = target - (pc + sizeof(int32_t));
444
  CPU::FlushICache(p, sizeof(int32_t));
445
}
446

    
447

    
448
Address Assembler::target_address_from_return_address(Address pc) {
449
  return pc - kCallTargetAddressOffset;
450
}
451

    
452

    
453
Displacement Assembler::disp_at(Label* L) {
454
  return Displacement(long_at(L->pos()));
455
}
456

    
457

    
458
void Assembler::disp_at_put(Label* L, Displacement disp) {
459
  long_at_put(L->pos(), disp.data());
460
}
461

    
462

    
463
void Assembler::emit_disp(Label* L, Displacement::Type type) {
464
  Displacement disp(L, type);
465
  L->link_to(pc_offset());
466
  emit(static_cast<int>(disp.data()));
467
}
468

    
469

    
470
void Assembler::emit_near_disp(Label* L) {
471
  byte disp = 0x00;
472
  if (L->is_near_linked()) {
473
    int offset = L->near_link_pos() - pc_offset();
474
    ASSERT(is_int8(offset));
475
    disp = static_cast<byte>(offset & 0xFF);
476
  }
477
  L->link_to(pc_offset(), Label::kNear);
478
  *pc_++ = disp;
479
}
480

    
481

    
482
void Operand::set_modrm(int mod, Register rm) {
483
  ASSERT((mod & -4) == 0);
484
  buf_[0] = mod << 6 | rm.code();
485
  len_ = 1;
486
}
487

    
488

    
489
void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
490
  ASSERT(len_ == 1);
491
  ASSERT((scale & -4) == 0);
492
  // Use SIB with no index register only for base esp.
493
  ASSERT(!index.is(esp) || base.is(esp));
494
  buf_[1] = scale << 6 | index.code() << 3 | base.code();
495
  len_ = 2;
496
}
497

    
498

    
499
void Operand::set_disp8(int8_t disp) {
500
  ASSERT(len_ == 1 || len_ == 2);
501
  *reinterpret_cast<int8_t*>(&buf_[len_++]) = disp;
502
}
503

    
504

    
505
void Operand::set_dispr(int32_t disp, RelocInfo::Mode rmode) {
506
  ASSERT(len_ == 1 || len_ == 2);
507
  int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
508
  *p = disp;
509
  len_ += sizeof(int32_t);
510
  rmode_ = rmode;
511
}
512

    
513
Operand::Operand(Register reg) {
514
  // reg
515
  set_modrm(3, reg);
516
}
517

    
518

    
519
Operand::Operand(XMMRegister xmm_reg) {
520
  Register reg = { xmm_reg.code() };
521
  set_modrm(3, reg);
522
}
523

    
524

    
525
Operand::Operand(int32_t disp, RelocInfo::Mode rmode) {
526
  // [disp/r]
527
  set_modrm(0, ebp);
528
  set_dispr(disp, rmode);
529
}
530

    
531
} }  // namespace v8::internal
532

    
533
#endif  // V8_IA32_ASSEMBLER_IA32_INL_H_