The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / lithium-codegen-ia32.h @ f230a1cf

History | View | Annotate | Download (19 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_
29
#define V8_IA32_LITHIUM_CODEGEN_IA32_H_
30

    
31
#include "ia32/lithium-ia32.h"
32

    
33
#include "checks.h"
34
#include "deoptimizer.h"
35
#include "ia32/lithium-gap-resolver-ia32.h"
36
#include "lithium-codegen.h"
37
#include "safepoint-table.h"
38
#include "scopes.h"
39
#include "v8utils.h"
40

    
41
namespace v8 {
42
namespace internal {
43

    
44
// Forward declarations.
45
class LDeferredCode;
46
class LGapNode;
47
class SafepointGenerator;
48

    
49
class LCodeGen: public LCodeGenBase {
50
 public:
51
  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
52
      : LCodeGenBase(chunk, assembler, info),
53
        deoptimizations_(4, info->zone()),
54
        jump_table_(4, info->zone()),
55
        deoptimization_literals_(8, info->zone()),
56
        inlined_function_count_(0),
57
        scope_(info->scope()),
58
        translations_(info->zone()),
59
        deferred_(8, info->zone()),
60
        dynamic_frame_alignment_(false),
61
        support_aligned_spilled_doubles_(false),
62
        osr_pc_offset_(-1),
63
        frame_is_built_(false),
64
        x87_stack_(assembler),
65
        safepoints_(info->zone()),
66
        resolver_(this),
67
        expected_safepoint_kind_(Safepoint::kSimple) {
68
    PopulateDeoptimizationLiteralsWithInlinedFunctions();
69
  }
70

    
71
  int LookupDestination(int block_id) const {
72
    return chunk()->LookupDestination(block_id);
73
  }
74

    
75
  bool IsNextEmittedBlock(int block_id) const {
76
    return LookupDestination(block_id) == GetNextEmittedBlock();
77
  }
78

    
79
  bool NeedsEagerFrame() const {
80
    return GetStackSlotCount() > 0 ||
81
        info()->is_non_deferred_calling() ||
82
        !info()->IsStub() ||
83
        info()->requires_frame();
84
  }
85
  bool NeedsDeferredFrame() const {
86
    return !NeedsEagerFrame() && info()->is_deferred_calling();
87
  }
88

    
89
  // Support for converting LOperands to assembler types.
90
  Operand ToOperand(LOperand* op) const;
91
  Register ToRegister(LOperand* op) const;
92
  XMMRegister ToDoubleRegister(LOperand* op) const;
93
  X87Register ToX87Register(LOperand* op) const;
94

    
95
  bool IsInteger32(LConstantOperand* op) const;
96
  bool IsSmi(LConstantOperand* op) const;
97
  Immediate ToImmediate(LOperand* op, const Representation& r) const {
98
    return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
99
  }
100
  double ToDouble(LConstantOperand* op) const;
101

    
102
  // Support for non-sse2 (x87) floating point stack handling.
103
  // These functions maintain the mapping of physical stack registers to our
104
  // virtual registers between instructions.
105
  enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand };
106

    
107
  void X87Mov(X87Register reg, Operand src,
108
      X87OperandType operand = kX87DoubleOperand);
109
  void X87Mov(Operand src, X87Register reg,
110
      X87OperandType operand = kX87DoubleOperand);
111

    
112
  void X87PrepareBinaryOp(
113
      X87Register left, X87Register right, X87Register result);
114

    
115
  void X87LoadForUsage(X87Register reg);
116
  void X87LoadForUsage(X87Register reg1, X87Register reg2);
117
  void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); }
118
  void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); }
119

    
120
  void X87Fxch(X87Register reg, int other_slot = 0) {
121
    x87_stack_.Fxch(reg, other_slot);
122
  }
123
  void X87Free(X87Register reg) {
124
    x87_stack_.Free(reg);
125
  }
126

    
127

    
128
  bool X87StackEmpty() {
129
    return x87_stack_.depth() == 0;
130
  }
131

    
132
  Handle<Object> ToHandle(LConstantOperand* op) const;
133

    
134
  // The operand denoting the second word (the one with a higher address) of
135
  // a double stack slot.
136
  Operand HighOperand(LOperand* op);
137

    
138
  // Try to generate code for the entire chunk, but it may fail if the
139
  // chunk contains constructs we cannot handle. Returns true if the
140
  // code generation attempt succeeded.
141
  bool GenerateCode();
142

    
143
  // Finish the code by setting stack height, safepoint, and bailout
144
  // information on it.
145
  void FinishCode(Handle<Code> code);
146

    
147
  // Deferred code support.
148
  void DoDeferredNumberTagD(LNumberTagD* instr);
149

    
150
  enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
151
  void DoDeferredNumberTagI(LInstruction* instr,
152
                            LOperand* value,
153
                            IntegerSignedness signedness);
154

    
155
  void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
156
  void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
157
  void DoDeferredStackCheck(LStackCheck* instr);
158
  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
159
  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
160
  void DoDeferredAllocate(LAllocate* instr);
161
  void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
162
                                       Label* map_check);
163
  void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
164

    
165
  // Parallel move support.
166
  void DoParallelMove(LParallelMove* move);
167
  void DoGap(LGap* instr);
168

    
169
  // Emit frame translation commands for an environment.
170
  void WriteTranslation(LEnvironment* environment, Translation* translation);
171

    
172
  void EnsureRelocSpaceForDeoptimization();
173

    
174
  // Declare methods that deal with the individual node types.
175
#define DECLARE_DO(type) void Do##type(L##type* node);
176
  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
177
#undef DECLARE_DO
178

    
179
 private:
180
  StrictModeFlag strict_mode_flag() const {
181
    return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
182
  }
183

    
184
  Scope* scope() const { return scope_; }
185

    
186
  XMMRegister double_scratch0() const { return xmm0; }
187

    
188
  void EmitClassOfTest(Label* if_true,
189
                       Label* if_false,
190
                       Handle<String> class_name,
191
                       Register input,
192
                       Register temporary,
193
                       Register temporary2);
194

    
195
  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
196

    
197
  void Abort(BailoutReason reason);
198

    
199
  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
200

    
201
  // Code generation passes.  Returns true if code generation should
202
  // continue.
203
  void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
204
  void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
205
  bool GeneratePrologue();
206
  bool GenerateDeferredCode();
207
  bool GenerateJumpTable();
208
  bool GenerateSafepointTable();
209

    
210
  // Generates the custom OSR entrypoint and sets the osr_pc_offset.
211
  void GenerateOsrPrologue();
212

    
213
  enum SafepointMode {
214
    RECORD_SIMPLE_SAFEPOINT,
215
    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
216
  };
217

    
218
  void CallCode(Handle<Code> code,
219
                RelocInfo::Mode mode,
220
                LInstruction* instr);
221

    
222
  void CallCodeGeneric(Handle<Code> code,
223
                       RelocInfo::Mode mode,
224
                       LInstruction* instr,
225
                       SafepointMode safepoint_mode);
226

    
227
  void CallRuntime(const Runtime::Function* fun,
228
                   int argc,
229
                   LInstruction* instr,
230
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);
231

    
232
  void CallRuntime(Runtime::FunctionId id,
233
                   int argc,
234
                   LInstruction* instr) {
235
    const Runtime::Function* function = Runtime::FunctionForId(id);
236
    CallRuntime(function, argc, instr);
237
  }
238

    
239
  void CallRuntimeFromDeferred(Runtime::FunctionId id,
240
                               int argc,
241
                               LInstruction* instr,
242
                               LOperand* context);
243

    
244
  void LoadContextFromDeferred(LOperand* context);
245

    
246
  enum EDIState {
247
    EDI_UNINITIALIZED,
248
    EDI_CONTAINS_TARGET
249
  };
250

    
251
  // Generate a direct call to a known function.  Expects the function
252
  // to be in edi.
253
  void CallKnownFunction(Handle<JSFunction> function,
254
                         int formal_parameter_count,
255
                         int arity,
256
                         LInstruction* instr,
257
                         CallKind call_kind,
258
                         EDIState edi_state);
259

    
260
  void RecordSafepointWithLazyDeopt(LInstruction* instr,
261
                                    SafepointMode safepoint_mode);
262

    
263
  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
264
                                            Safepoint::DeoptMode mode);
265
  void DeoptimizeIf(Condition cc,
266
                    LEnvironment* environment,
267
                    Deoptimizer::BailoutType bailout_type);
268
  void DeoptimizeIf(Condition cc, LEnvironment* environment);
269
  void ApplyCheckIf(Condition cc, LBoundsCheck* check);
270

    
271
  void AddToTranslation(LEnvironment* environment,
272
                        Translation* translation,
273
                        LOperand* op,
274
                        bool is_tagged,
275
                        bool is_uint32,
276
                        int* object_index_pointer,
277
                        int* dematerialized_index_pointer);
278
  void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code);
279
  void PopulateDeoptimizationData(Handle<Code> code);
280
  int DefineDeoptimizationLiteral(Handle<Object> literal);
281

    
282
  void PopulateDeoptimizationLiteralsWithInlinedFunctions();
283

    
284
  Register ToRegister(int index) const;
285
  XMMRegister ToDoubleRegister(int index) const;
286
  X87Register ToX87Register(int index) const;
287
  int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
288
  int32_t ToInteger32(LConstantOperand* op) const;
289
  ExternalReference ToExternalReference(LConstantOperand* op) const;
290

    
291
  Operand BuildFastArrayOperand(LOperand* elements_pointer,
292
                                LOperand* key,
293
                                Representation key_representation,
294
                                ElementsKind elements_kind,
295
                                uint32_t offset,
296
                                uint32_t additional_index = 0);
297

    
298
  void EmitIntegerMathAbs(LMathAbs* instr);
299

    
300
  // Support for recording safepoint and position information.
301
  void RecordSafepoint(LPointerMap* pointers,
302
                       Safepoint::Kind kind,
303
                       int arguments,
304
                       Safepoint::DeoptMode mode);
305
  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
306
  void RecordSafepoint(Safepoint::DeoptMode mode);
307
  void RecordSafepointWithRegisters(LPointerMap* pointers,
308
                                    int arguments,
309
                                    Safepoint::DeoptMode mode);
310

    
311
  void RecordAndWritePosition(int position) V8_OVERRIDE;
312

    
313
  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
314
  void EmitGoto(int block);
315
  template<class InstrType>
316
  void EmitBranch(InstrType instr, Condition cc);
317
  template<class InstrType>
318
  void EmitFalseBranch(InstrType instr, Condition cc);
319
  void EmitNumberUntagD(
320
      Register input,
321
      Register temp,
322
      XMMRegister result,
323
      bool allow_undefined_as_nan,
324
      bool deoptimize_on_minus_zero,
325
      LEnvironment* env,
326
      NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
327

    
328
  void EmitNumberUntagDNoSSE2(
329
      Register input,
330
      Register temp,
331
      X87Register res_reg,
332
      bool allow_undefined_as_nan,
333
      bool deoptimize_on_minus_zero,
334
      LEnvironment* env,
335
      NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
336

    
337
  // Emits optimized code for typeof x == "y".  Modifies input register.
338
  // Returns the condition on which a final split to
339
  // true and false label should be made, to optimize fallthrough.
340
  Condition EmitTypeofIs(Label* true_label,
341
                         Label* false_label,
342
                         Register input,
343
                         Handle<String> type_name);
344

    
345
  // Emits optimized code for %_IsObject(x).  Preserves input register.
346
  // Returns the condition on which a final split to
347
  // true and false label should be made, to optimize fallthrough.
348
  Condition EmitIsObject(Register input,
349
                         Register temp1,
350
                         Label* is_not_object,
351
                         Label* is_object);
352

    
353
  // Emits optimized code for %_IsString(x).  Preserves input register.
354
  // Returns the condition on which a final split to
355
  // true and false label should be made, to optimize fallthrough.
356
  Condition EmitIsString(Register input,
357
                         Register temp1,
358
                         Label* is_not_string,
359
                         SmiCheck check_needed);
360

    
361
  // Emits optimized code for %_IsConstructCall().
362
  // Caller should branch on equal condition.
363
  void EmitIsConstructCall(Register temp);
364

    
365
  // Emits optimized code to deep-copy the contents of statically known
366
  // object graphs (e.g. object literal boilerplate).
367
  void EmitDeepCopy(Handle<JSObject> object,
368
                    Register result,
369
                    Register source,
370
                    int* offset,
371
                    AllocationSiteMode mode);
372

    
373
  void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
374
  void DoLoadKeyedExternalArray(LLoadKeyed* instr);
375
  void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
376
  void DoLoadKeyedFixedArray(LLoadKeyed* instr);
377
  void DoStoreKeyedExternalArray(LStoreKeyed* instr);
378
  void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
379
  void DoStoreKeyedFixedArray(LStoreKeyed* instr);
380

    
381
  void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
382

    
383
  // Emits code for pushing either a tagged constant, a (non-double)
384
  // register, or a stack slot operand.
385
  void EmitPushTaggedOperand(LOperand* operand);
386

    
387
  void X87Fld(Operand src, X87OperandType opts);
388

    
389
  void EmitFlushX87ForDeopt();
390
  void FlushX87StackIfNecessary(LInstruction* instr) {
391
    x87_stack_.FlushIfNecessary(instr, this);
392
  }
393
  friend class LGapResolver;
394

    
395
#ifdef _MSC_VER
396
  // On windows, you may not access the stack more than one page below
397
  // the most recently mapped page. To make the allocated area randomly
398
  // accessible, we write an arbitrary value to each page in range
399
  // esp + offset - page_size .. esp in turn.
400
  void MakeSureStackPagesMapped(int offset);
401
#endif
402

    
403
  ZoneList<LEnvironment*> deoptimizations_;
404
  ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
405
  ZoneList<Handle<Object> > deoptimization_literals_;
406
  int inlined_function_count_;
407
  Scope* const scope_;
408
  TranslationBuffer translations_;
409
  ZoneList<LDeferredCode*> deferred_;
410
  bool dynamic_frame_alignment_;
411
  bool support_aligned_spilled_doubles_;
412
  int osr_pc_offset_;
413
  bool frame_is_built_;
414

    
415
  class X87Stack {
416
   public:
417
    explicit X87Stack(MacroAssembler* masm)
418
        : stack_depth_(0), is_mutable_(true), masm_(masm) { }
419
    explicit X87Stack(const X87Stack& other)
420
        : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) {
421
      for (int i = 0; i < stack_depth_; i++) {
422
        stack_[i] = other.stack_[i];
423
      }
424
    }
425
    bool operator==(const X87Stack& other) const {
426
      if (stack_depth_ != other.stack_depth_) return false;
427
      for (int i = 0; i < stack_depth_; i++) {
428
        if (!stack_[i].is(other.stack_[i])) return false;
429
      }
430
      return true;
431
    }
432
    bool Contains(X87Register reg);
433
    void Fxch(X87Register reg, int other_slot = 0);
434
    void Free(X87Register reg);
435
    void PrepareToWrite(X87Register reg);
436
    void CommitWrite(X87Register reg);
437
    void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen);
438
    void LeavingBlock(int current_block_id, LGoto* goto_instr);
439
    int depth() const { return stack_depth_; }
440
    void pop() {
441
      ASSERT(is_mutable_);
442
      stack_depth_--;
443
    }
444
    void push(X87Register reg) {
445
      ASSERT(is_mutable_);
446
      ASSERT(stack_depth_ < X87Register::kNumAllocatableRegisters);
447
      stack_[stack_depth_] = reg;
448
      stack_depth_++;
449
    }
450

    
451
    MacroAssembler* masm() const { return masm_; }
452

    
453
   private:
454
    int ArrayIndex(X87Register reg);
455
    int st2idx(int pos);
456

    
457
    X87Register stack_[X87Register::kNumAllocatableRegisters];
458
    int stack_depth_;
459
    bool is_mutable_;
460
    MacroAssembler* masm_;
461
  };
462
  X87Stack x87_stack_;
463

    
464
  // Builder that keeps track of safepoints in the code. The table
465
  // itself is emitted at the end of the generated code.
466
  SafepointTableBuilder safepoints_;
467

    
468
  // Compiler from a set of parallel moves to a sequential list of moves.
469
  LGapResolver resolver_;
470

    
471
  Safepoint::Kind expected_safepoint_kind_;
472

    
473
  class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
474
   public:
475
    explicit PushSafepointRegistersScope(LCodeGen* codegen)
476
        : codegen_(codegen) {
477
      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
478
      codegen_->masm_->PushSafepointRegisters();
479
      codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
480
      ASSERT(codegen_->info()->is_calling());
481
    }
482

    
483
    ~PushSafepointRegistersScope() {
484
      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
485
      codegen_->masm_->PopSafepointRegisters();
486
      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
487
    }
488

    
489
   private:
490
    LCodeGen* codegen_;
491
  };
492

    
493
  friend class LDeferredCode;
494
  friend class LEnvironment;
495
  friend class SafepointGenerator;
496
  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
497
};
498

    
499

    
500
class LDeferredCode : public ZoneObject {
501
 public:
502
  explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack)
503
      : codegen_(codegen),
504
        external_exit_(NULL),
505
        instruction_index_(codegen->current_instruction_),
506
        x87_stack_(x87_stack) {
507
    codegen->AddDeferredCode(this);
508
  }
509

    
510
  virtual ~LDeferredCode() {}
511
  virtual void Generate() = 0;
512
  virtual LInstruction* instr() = 0;
513

    
514
  void SetExit(Label* exit) { external_exit_ = exit; }
515
  Label* entry() { return &entry_; }
516
  Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
517
  Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
518
  int instruction_index() const { return instruction_index_; }
519
  const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; }
520

    
521
 protected:
522
  LCodeGen* codegen() const { return codegen_; }
523
  MacroAssembler* masm() const { return codegen_->masm(); }
524

    
525
 private:
526
  LCodeGen* codegen_;
527
  Label entry_;
528
  Label exit_;
529
  Label* external_exit_;
530
  Label done_;
531
  int instruction_index_;
532
  LCodeGen::X87Stack x87_stack_;
533
};
534

    
535
} }  // namespace v8::internal
536

    
537
#endif  // V8_IA32_LITHIUM_CODEGEN_IA32_H_