The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / x64 / lithium-codegen-x64.h @ f230a1cf

History | View | Annotate | Download (14.8 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
29
#define V8_X64_LITHIUM_CODEGEN_X64_H_
30

    
31
#include "x64/lithium-x64.h"
32

    
33
#include "checks.h"
34
#include "deoptimizer.h"
35
#include "lithium-codegen.h"
36
#include "safepoint-table.h"
37
#include "scopes.h"
38
#include "v8utils.h"
39
#include "x64/lithium-gap-resolver-x64.h"
40

    
41
namespace v8 {
42
namespace internal {
43

    
44
// Forward declarations.
45
class LDeferredCode;
46
class SafepointGenerator;
47

    
48
class LCodeGen: public LCodeGenBase {
49
 public:
50
  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
51
      : LCodeGenBase(chunk, assembler, info),
52
        deoptimizations_(4, info->zone()),
53
        jump_table_(4, info->zone()),
54
        deoptimization_literals_(8, info->zone()),
55
        inlined_function_count_(0),
56
        scope_(info->scope()),
57
        translations_(info->zone()),
58
        deferred_(8, info->zone()),
59
        osr_pc_offset_(-1),
60
        frame_is_built_(false),
61
        safepoints_(info->zone()),
62
        resolver_(this),
63
        expected_safepoint_kind_(Safepoint::kSimple) {
64
    PopulateDeoptimizationLiteralsWithInlinedFunctions();
65
  }
66

    
67
  int LookupDestination(int block_id) const {
68
    return chunk()->LookupDestination(block_id);
69
  }
70

    
71
  bool IsNextEmittedBlock(int block_id) const {
72
    return LookupDestination(block_id) == GetNextEmittedBlock();
73
  }
74

    
75
  bool NeedsEagerFrame() const {
76
    return GetStackSlotCount() > 0 ||
77
        info()->is_non_deferred_calling() ||
78
        !info()->IsStub() ||
79
        info()->requires_frame();
80
  }
81
  bool NeedsDeferredFrame() const {
82
    return !NeedsEagerFrame() && info()->is_deferred_calling();
83
  }
84

    
85
  // Support for converting LOperands to assembler types.
86
  Register ToRegister(LOperand* op) const;
87
  XMMRegister ToDoubleRegister(LOperand* op) const;
88
  bool IsInteger32Constant(LConstantOperand* op) const;
89
  bool IsSmiConstant(LConstantOperand* op) const;
90
  int32_t ToInteger32(LConstantOperand* op) const;
91
  Smi* ToSmi(LConstantOperand* op) const;
92
  double ToDouble(LConstantOperand* op) const;
93
  ExternalReference ToExternalReference(LConstantOperand* op) const;
94
  bool IsTaggedConstant(LConstantOperand* op) const;
95
  Handle<Object> ToHandle(LConstantOperand* op) const;
96
  Operand ToOperand(LOperand* op) const;
97

    
98
  // Try to generate code for the entire chunk, but it may fail if the
99
  // chunk contains constructs we cannot handle. Returns true if the
100
  // code generation attempt succeeded.
101
  bool GenerateCode();
102

    
103
  // Finish the code by setting stack height, safepoint, and bailout
104
  // information on it.
105
  void FinishCode(Handle<Code> code);
106

    
107
  // Deferred code support.
108
  void DoDeferredNumberTagD(LNumberTagD* instr);
109
  void DoDeferredNumberTagU(LNumberTagU* instr);
110
  void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
111
  void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112
  void DoDeferredStackCheck(LStackCheck* instr);
113
  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114
  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115
  void DoDeferredAllocate(LAllocate* instr);
116
  void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
117
                                       Label* map_check);
118
  void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
119

    
120
// Parallel move support.
121
  void DoParallelMove(LParallelMove* move);
122
  void DoGap(LGap* instr);
123

    
124
  // Emit frame translation commands for an environment.
125
  void WriteTranslation(LEnvironment* environment, Translation* translation);
126

    
127
  // Declare methods that deal with the individual node types.
128
#define DECLARE_DO(type) void Do##type(L##type* node);
129
  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
130
#undef DECLARE_DO
131

    
132
 private:
133
  StrictModeFlag strict_mode_flag() const {
134
    return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
135
  }
136

    
137
  LPlatformChunk* chunk() const { return chunk_; }
138
  Scope* scope() const { return scope_; }
139
  HGraph* graph() const { return chunk()->graph(); }
140

    
141
  XMMRegister double_scratch0() const { return xmm0; }
142

    
143
  void EmitClassOfTest(Label* if_true,
144
                       Label* if_false,
145
                       Handle<String> class_name,
146
                       Register input,
147
                       Register temporary,
148
                       Register scratch);
149

    
150
  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
151

    
152
  void Abort(BailoutReason reason);
153

    
154
  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
155

    
156
  // Code generation passes.  Returns true if code generation should
157
  // continue.
158
  bool GeneratePrologue();
159
  bool GenerateDeferredCode();
160
  bool GenerateJumpTable();
161
  bool GenerateSafepointTable();
162

    
163
  // Generates the custom OSR entrypoint and sets the osr_pc_offset.
164
  void GenerateOsrPrologue();
165

    
166
  enum SafepointMode {
167
    RECORD_SIMPLE_SAFEPOINT,
168
    RECORD_SAFEPOINT_WITH_REGISTERS
169
  };
170

    
171
  void CallCodeGeneric(Handle<Code> code,
172
                       RelocInfo::Mode mode,
173
                       LInstruction* instr,
174
                       SafepointMode safepoint_mode,
175
                       int argc);
176

    
177

    
178
  void CallCode(Handle<Code> code,
179
                RelocInfo::Mode mode,
180
                LInstruction* instr);
181

    
182
  void CallRuntime(const Runtime::Function* function,
183
                   int num_arguments,
184
                   LInstruction* instr,
185
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);
186

    
187
  void CallRuntime(Runtime::FunctionId id,
188
                   int num_arguments,
189
                   LInstruction* instr) {
190
    const Runtime::Function* function = Runtime::FunctionForId(id);
191
    CallRuntime(function, num_arguments, instr);
192
  }
193

    
194
  void CallRuntimeFromDeferred(Runtime::FunctionId id,
195
                               int argc,
196
                               LInstruction* instr);
197

    
198
  enum RDIState {
199
    RDI_UNINITIALIZED,
200
    RDI_CONTAINS_TARGET
201
  };
202

    
203
  // Generate a direct call to a known function.  Expects the function
204
  // to be in rdi.
205
  void CallKnownFunction(Handle<JSFunction> function,
206
                         int formal_parameter_count,
207
                         int arity,
208
                         LInstruction* instr,
209
                         CallKind call_kind,
210
                         RDIState rdi_state);
211

    
212
  void RecordSafepointWithLazyDeopt(LInstruction* instr,
213
                                    SafepointMode safepoint_mode,
214
                                    int argc);
215
  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
216
                                            Safepoint::DeoptMode mode);
217
  void DeoptimizeIf(Condition cc,
218
                    LEnvironment* environment,
219
                    Deoptimizer::BailoutType bailout_type);
220
  void DeoptimizeIf(Condition cc, LEnvironment* environment);
221
  void ApplyCheckIf(Condition cc, LBoundsCheck* check);
222

    
223
  void AddToTranslation(LEnvironment* environment,
224
                        Translation* translation,
225
                        LOperand* op,
226
                        bool is_tagged,
227
                        bool is_uint32,
228
                        int* object_index_pointer,
229
                        int* dematerialized_index_pointer);
230
  void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code);
231
  void PopulateDeoptimizationData(Handle<Code> code);
232
  int DefineDeoptimizationLiteral(Handle<Object> literal);
233

    
234
  void PopulateDeoptimizationLiteralsWithInlinedFunctions();
235

    
236
  Register ToRegister(int index) const;
237
  XMMRegister ToDoubleRegister(int index) const;
238
  Operand BuildFastArrayOperand(
239
      LOperand* elements_pointer,
240
      LOperand* key,
241
      ElementsKind elements_kind,
242
      uint32_t offset,
243
      uint32_t additional_index = 0);
244

    
245
  void EmitIntegerMathAbs(LMathAbs* instr);
246
  void EmitSmiMathAbs(LMathAbs* instr);
247

    
248
  // Support for recording safepoint and position information.
249
  void RecordSafepoint(LPointerMap* pointers,
250
                       Safepoint::Kind kind,
251
                       int arguments,
252
                       Safepoint::DeoptMode mode);
253
  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
254
  void RecordSafepoint(Safepoint::DeoptMode mode);
255
  void RecordSafepointWithRegisters(LPointerMap* pointers,
256
                                    int arguments,
257
                                    Safepoint::DeoptMode mode);
258
  void RecordAndWritePosition(int position) V8_OVERRIDE;
259

    
260
  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
261
  void EmitGoto(int block);
262
  template<class InstrType>
263
  void EmitBranch(InstrType instr, Condition cc);
264
  template<class InstrType>
265
  void EmitFalseBranch(InstrType instr, Condition cc);
266
  void EmitNumberUntagD(
267
      Register input,
268
      XMMRegister result,
269
      bool allow_undefined_as_nan,
270
      bool deoptimize_on_minus_zero,
271
      LEnvironment* env,
272
      NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
273

    
274
  // Emits optimized code for typeof x == "y".  Modifies input register.
275
  // Returns the condition on which a final split to
276
  // true and false label should be made, to optimize fallthrough.
277
  Condition EmitTypeofIs(Label* true_label,
278
                         Label* false_label,
279
                         Register input,
280
                         Handle<String> type_name);
281

    
282
  // Emits optimized code for %_IsObject(x).  Preserves input register.
283
  // Returns the condition on which a final split to
284
  // true and false label should be made, to optimize fallthrough.
285
  Condition EmitIsObject(Register input,
286
                         Label* is_not_object,
287
                         Label* is_object);
288

    
289
  // Emits optimized code for %_IsString(x).  Preserves input register.
290
  // Returns the condition on which a final split to
291
  // true and false label should be made, to optimize fallthrough.
292
  Condition EmitIsString(Register input,
293
                         Register temp1,
294
                         Label* is_not_string,
295
                         SmiCheck check_needed);
296

    
297
  // Emits optimized code for %_IsConstructCall().
298
  // Caller should branch on equal condition.
299
  void EmitIsConstructCall(Register temp);
300

    
301
  // Emits code for pushing either a tagged constant, a (non-double)
302
  // register, or a stack slot operand.
303
  void EmitPushTaggedOperand(LOperand* operand);
304

    
305
  // Emits optimized code to deep-copy the contents of statically known
306
  // object graphs (e.g. object literal boilerplate).
307
  void EmitDeepCopy(Handle<JSObject> object,
308
                    Register result,
309
                    Register source,
310
                    int* offset,
311
                    AllocationSiteMode mode);
312

    
313
  void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
314
  void DoLoadKeyedExternalArray(LLoadKeyed* instr);
315
  void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
316
  void DoLoadKeyedFixedArray(LLoadKeyed* instr);
317
  void DoStoreKeyedExternalArray(LStoreKeyed* instr);
318
  void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
319
  void DoStoreKeyedFixedArray(LStoreKeyed* instr);
320
#ifdef _MSC_VER
321
  // On windows, you may not access the stack more than one page below
322
  // the most recently mapped page. To make the allocated area randomly
323
  // accessible, we write an arbitrary value to each page in range
324
  // rsp + offset - page_size .. rsp in turn.
325
  void MakeSureStackPagesMapped(int offset);
326
#endif
327

    
328
  ZoneList<LEnvironment*> deoptimizations_;
329
  ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
330
  ZoneList<Handle<Object> > deoptimization_literals_;
331
  int inlined_function_count_;
332
  Scope* const scope_;
333
  TranslationBuffer translations_;
334
  ZoneList<LDeferredCode*> deferred_;
335
  int osr_pc_offset_;
336
  bool frame_is_built_;
337

    
338
  // Builder that keeps track of safepoints in the code. The table
339
  // itself is emitted at the end of the generated code.
340
  SafepointTableBuilder safepoints_;
341

    
342
  // Compiler from a set of parallel moves to a sequential list of moves.
343
  LGapResolver resolver_;
344

    
345
  Safepoint::Kind expected_safepoint_kind_;
346

    
347
  class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
348
   public:
349
    explicit PushSafepointRegistersScope(LCodeGen* codegen)
350
        : codegen_(codegen) {
351
      ASSERT(codegen_->info()->is_calling());
352
      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
353
      codegen_->masm_->PushSafepointRegisters();
354
      codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
355
    }
356

    
357
    ~PushSafepointRegistersScope() {
358
      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
359
      codegen_->masm_->PopSafepointRegisters();
360
      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
361
    }
362

    
363
   private:
364
    LCodeGen* codegen_;
365
  };
366

    
367
  friend class LDeferredCode;
368
  friend class LEnvironment;
369
  friend class SafepointGenerator;
370
  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
371
};
372

    
373

    
374
class LDeferredCode: public ZoneObject {
375
 public:
376
  explicit LDeferredCode(LCodeGen* codegen)
377
      : codegen_(codegen),
378
        external_exit_(NULL),
379
        instruction_index_(codegen->current_instruction_) {
380
    codegen->AddDeferredCode(this);
381
  }
382

    
383
  virtual ~LDeferredCode() {}
384
  virtual void Generate() = 0;
385
  virtual LInstruction* instr() = 0;
386

    
387
  void SetExit(Label* exit) { external_exit_ = exit; }
388
  Label* entry() { return &entry_; }
389
  Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
390
  Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
391
  int instruction_index() const { return instruction_index_; }
392

    
393
 protected:
394
  LCodeGen* codegen() const { return codegen_; }
395
  MacroAssembler* masm() const { return codegen_->masm(); }
396

    
397
 private:
398
  LCodeGen* codegen_;
399
  Label entry_;
400
  Label exit_;
401
  Label done_;
402
  Label* external_exit_;
403
  int instruction_index_;
404
};
405

    
406
} }  // namespace v8::internal
407

    
408
#endif  // V8_X64_LITHIUM_CODEGEN_X64_H_