The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / mips / lithium-codegen-mips.h @ f230a1cf

History | View | Annotate | Download (17.9 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
29
#define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
30

    
31
#include "deoptimizer.h"
32
#include "mips/lithium-gap-resolver-mips.h"
33
#include "mips/lithium-mips.h"
34
#include "lithium-codegen.h"
35
#include "safepoint-table.h"
36
#include "scopes.h"
37
#include "v8utils.h"
38

    
39
namespace v8 {
40
namespace internal {
41

    
42
// Forward declarations.
43
class LDeferredCode;
44
class SafepointGenerator;
45

    
46
class LCodeGen: public LCodeGenBase {
47
 public:
48
  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
49
      : LCodeGenBase(chunk, assembler, info),
50
        deoptimizations_(4, info->zone()),
51
        deopt_jump_table_(4, info->zone()),
52
        deoptimization_literals_(8, info->zone()),
53
        inlined_function_count_(0),
54
        scope_(info->scope()),
55
        translations_(info->zone()),
56
        deferred_(8, info->zone()),
57
        osr_pc_offset_(-1),
58
        frame_is_built_(false),
59
        safepoints_(info->zone()),
60
        resolver_(this),
61
        expected_safepoint_kind_(Safepoint::kSimple) {
62
    PopulateDeoptimizationLiteralsWithInlinedFunctions();
63
  }
64

    
65

    
66
  int LookupDestination(int block_id) const {
67
    return chunk()->LookupDestination(block_id);
68
  }
69

    
70
  bool IsNextEmittedBlock(int block_id) const {
71
    return LookupDestination(block_id) == GetNextEmittedBlock();
72
  }
73

    
74
  bool NeedsEagerFrame() const {
75
    return GetStackSlotCount() > 0 ||
76
        info()->is_non_deferred_calling() ||
77
        !info()->IsStub() ||
78
        info()->requires_frame();
79
  }
80
  bool NeedsDeferredFrame() const {
81
    return !NeedsEagerFrame() && info()->is_deferred_calling();
82
  }
83

    
84
  RAStatus GetRAState() const {
85
    return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
86
  }
87

    
88
  // Support for converting LOperands to assembler types.
89
  // LOperand must be a register.
90
  Register ToRegister(LOperand* op) const;
91

    
92
  // LOperand is loaded into scratch, unless already a register.
93
  Register EmitLoadRegister(LOperand* op, Register scratch);
94

    
95
  // LOperand must be a double register.
96
  DoubleRegister ToDoubleRegister(LOperand* op) const;
97

    
98
  // LOperand is loaded into dbl_scratch, unless already a double register.
99
  DoubleRegister EmitLoadDoubleRegister(LOperand* op,
100
                                        FloatRegister flt_scratch,
101
                                        DoubleRegister dbl_scratch);
102
  int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
103
  int32_t ToInteger32(LConstantOperand* op) const;
104
  Smi* ToSmi(LConstantOperand* op) const;
105
  double ToDouble(LConstantOperand* op) const;
106
  Operand ToOperand(LOperand* op);
107
  MemOperand ToMemOperand(LOperand* op) const;
108
  // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
109
  MemOperand ToHighMemOperand(LOperand* op) const;
110

    
111
  bool IsInteger32(LConstantOperand* op) const;
112
  bool IsSmi(LConstantOperand* op) const;
113
  Handle<Object> ToHandle(LConstantOperand* op) const;
114

    
115
  // Try to generate code for the entire chunk, but it may fail if the
116
  // chunk contains constructs we cannot handle. Returns true if the
117
  // code generation attempt succeeded.
118
  bool GenerateCode();
119

    
120
  // Finish the code by setting stack height, safepoint, and bailout
121
  // information on it.
122
  void FinishCode(Handle<Code> code);
123

    
124
  void DoDeferredNumberTagD(LNumberTagD* instr);
125

    
126
  enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
127
  void DoDeferredNumberTagI(LInstruction* instr,
128
                            LOperand* value,
129
                            IntegerSignedness signedness);
130

    
131
  void DoDeferredTaggedToI(LTaggedToI* instr);
132
  void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
133
  void DoDeferredStackCheck(LStackCheck* instr);
134
  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
135
  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
136
  void DoDeferredAllocate(LAllocate* instr);
137
  void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
138
                                       Label* map_check);
139

    
140
  void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
141

    
142
  // Parallel move support.
143
  void DoParallelMove(LParallelMove* move);
144
  void DoGap(LGap* instr);
145

    
146
  MemOperand PrepareKeyedOperand(Register key,
147
                                 Register base,
148
                                 bool key_is_constant,
149
                                 int constant_key,
150
                                 int element_size,
151
                                 int shift_size,
152
                                 int additional_index,
153
                                 int additional_offset);
154

    
155
  // Emit frame translation commands for an environment.
156
  void WriteTranslation(LEnvironment* environment, Translation* translation);
157

    
158
  // Declare methods that deal with the individual node types.
159
#define DECLARE_DO(type) void Do##type(L##type* node);
160
  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
161
#undef DECLARE_DO
162

    
163
 private:
164
  StrictModeFlag strict_mode_flag() const {
165
    return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
166
  }
167

    
168
  Scope* scope() const { return scope_; }
169

    
170
  Register scratch0() { return kLithiumScratchReg; }
171
  Register scratch1() { return kLithiumScratchReg2; }
172
  DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
173

    
174
  LInstruction* GetNextInstruction();
175

    
176
  void EmitClassOfTest(Label* if_true,
177
                       Label* if_false,
178
                       Handle<String> class_name,
179
                       Register input,
180
                       Register temporary,
181
                       Register temporary2);
182

    
183
  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
184

    
185
  void Abort(BailoutReason reason);
186

    
187
  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
188

    
189
  // Code generation passes.  Returns true if code generation should
190
  // continue.
191
  bool GeneratePrologue();
192
  bool GenerateDeferredCode();
193
  bool GenerateDeoptJumpTable();
194
  bool GenerateSafepointTable();
195

    
196
  // Generates the custom OSR entrypoint and sets the osr_pc_offset.
197
  void GenerateOsrPrologue();
198

    
199
  enum SafepointMode {
200
    RECORD_SIMPLE_SAFEPOINT,
201
    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
202
  };
203

    
204
  void CallCode(Handle<Code> code,
205
                RelocInfo::Mode mode,
206
                LInstruction* instr);
207

    
208
  void CallCodeGeneric(Handle<Code> code,
209
                       RelocInfo::Mode mode,
210
                       LInstruction* instr,
211
                       SafepointMode safepoint_mode);
212

    
213
  void CallRuntime(const Runtime::Function* function,
214
                   int num_arguments,
215
                   LInstruction* instr,
216
                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);
217

    
218
  void CallRuntime(Runtime::FunctionId id,
219
                   int num_arguments,
220
                   LInstruction* instr) {
221
    const Runtime::Function* function = Runtime::FunctionForId(id);
222
    CallRuntime(function, num_arguments, instr);
223
  }
224

    
225
  void LoadContextFromDeferred(LOperand* context);
226
  void CallRuntimeFromDeferred(Runtime::FunctionId id,
227
                               int argc,
228
                               LInstruction* instr,
229
                               LOperand* context);
230

    
231
  enum A1State {
232
    A1_UNINITIALIZED,
233
    A1_CONTAINS_TARGET
234
  };
235

    
236
  // Generate a direct call to a known function.  Expects the function
237
  // to be in a1.
238
  void CallKnownFunction(Handle<JSFunction> function,
239
                         int formal_parameter_count,
240
                         int arity,
241
                         LInstruction* instr,
242
                         CallKind call_kind,
243
                         A1State a1_state);
244

    
245
  void LoadHeapObject(Register result, Handle<HeapObject> object);
246

    
247
  void RecordSafepointWithLazyDeopt(LInstruction* instr,
248
                                    SafepointMode safepoint_mode);
249

    
250
  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
251
                                            Safepoint::DeoptMode mode);
252
  void DeoptimizeIf(Condition condition,
253
                    LEnvironment* environment,
254
                    Deoptimizer::BailoutType bailout_type,
255
                    Register src1 = zero_reg,
256
                    const Operand& src2 = Operand(zero_reg));
257
  void DeoptimizeIf(Condition condition,
258
                    LEnvironment* environment,
259
                    Register src1 = zero_reg,
260
                    const Operand& src2 = Operand(zero_reg));
261
  void ApplyCheckIf(Condition condition,
262
                    LBoundsCheck* check,
263
                    Register src1 = zero_reg,
264
                    const Operand& src2 = Operand(zero_reg));
265

    
266
  void AddToTranslation(LEnvironment* environment,
267
                        Translation* translation,
268
                        LOperand* op,
269
                        bool is_tagged,
270
                        bool is_uint32,
271
                        int* object_index_pointer,
272
                        int* dematerialized_index_pointer);
273
  void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code);
274
  void PopulateDeoptimizationData(Handle<Code> code);
275
  int DefineDeoptimizationLiteral(Handle<Object> literal);
276

    
277
  void PopulateDeoptimizationLiteralsWithInlinedFunctions();
278

    
279
  Register ToRegister(int index) const;
280
  DoubleRegister ToDoubleRegister(int index) const;
281

    
282
  void EmitIntegerMathAbs(LMathAbs* instr);
283

    
284
  // Support for recording safepoint and position information.
285
  void RecordSafepoint(LPointerMap* pointers,
286
                       Safepoint::Kind kind,
287
                       int arguments,
288
                       Safepoint::DeoptMode mode);
289
  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
290
  void RecordSafepoint(Safepoint::DeoptMode mode);
291
  void RecordSafepointWithRegisters(LPointerMap* pointers,
292
                                    int arguments,
293
                                    Safepoint::DeoptMode mode);
294
  void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
295
                                              int arguments,
296
                                              Safepoint::DeoptMode mode);
297

    
298
  void RecordAndWritePosition(int position) V8_OVERRIDE;
299

    
300
  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
301
  void EmitGoto(int block);
302
  template<class InstrType>
303
  void EmitBranch(InstrType instr,
304
                  Condition condition,
305
                  Register src1,
306
                  const Operand& src2);
307
  template<class InstrType>
308
  void EmitBranchF(InstrType instr,
309
                   Condition condition,
310
                   FPURegister src1,
311
                   FPURegister src2);
312
  template<class InstrType>
313
  void EmitFalseBranchF(InstrType instr,
314
                        Condition condition,
315
                        FPURegister src1,
316
                        FPURegister src2);
317
  void EmitCmpI(LOperand* left, LOperand* right);
318
  void EmitNumberUntagD(Register input,
319
                        DoubleRegister result,
320
                        bool allow_undefined_as_nan,
321
                        bool deoptimize_on_minus_zero,
322
                        LEnvironment* env,
323
                        NumberUntagDMode mode);
324

    
325
  // Emits optimized code for typeof x == "y".  Modifies input register.
326
  // Returns the condition on which a final split to
327
  // true and false label should be made, to optimize fallthrough.
328
  // Returns two registers in cmp1 and cmp2 that can be used in the
329
  // Branch instruction after EmitTypeofIs.
330
  Condition EmitTypeofIs(Label* true_label,
331
                         Label* false_label,
332
                         Register input,
333
                         Handle<String> type_name,
334
                         Register& cmp1,
335
                         Operand& cmp2);
336

    
337
  // Emits optimized code for %_IsObject(x).  Preserves input register.
338
  // Returns the condition on which a final split to
339
  // true and false label should be made, to optimize fallthrough.
340
  Condition EmitIsObject(Register input,
341
                         Register temp1,
342
                         Register temp2,
343
                         Label* is_not_object,
344
                         Label* is_object);
345

    
346
  // Emits optimized code for %_IsString(x).  Preserves input register.
347
  // Returns the condition on which a final split to
348
  // true and false label should be made, to optimize fallthrough.
349
  Condition EmitIsString(Register input,
350
                         Register temp1,
351
                         Label* is_not_string,
352
                         SmiCheck check_needed);
353

    
354
  // Emits optimized code for %_IsConstructCall().
355
  // Caller should branch on equal condition.
356
  void EmitIsConstructCall(Register temp1, Register temp2);
357

    
358
  // Emits optimized code to deep-copy the contents of statically known
359
  // object graphs (e.g. object literal boilerplate).
360
  void EmitDeepCopy(Handle<JSObject> object,
361
                    Register result,
362
                    Register source,
363
                    int* offset,
364
                    AllocationSiteMode mode);
365
  // Emit optimized code for integer division.
366
  // Inputs are signed.
367
  // All registers are clobbered.
368
  // If 'remainder' is no_reg, it is not computed.
369
  void EmitSignedIntegerDivisionByConstant(Register result,
370
                                           Register dividend,
371
                                           int32_t divisor,
372
                                           Register remainder,
373
                                           Register scratch,
374
                                           LEnvironment* environment);
375

    
376

    
377
  void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
378
  void DoLoadKeyedExternalArray(LLoadKeyed* instr);
379
  void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
380
  void DoLoadKeyedFixedArray(LLoadKeyed* instr);
381
  void DoStoreKeyedExternalArray(LStoreKeyed* instr);
382
  void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
383
  void DoStoreKeyedFixedArray(LStoreKeyed* instr);
384

    
385
  ZoneList<LEnvironment*> deoptimizations_;
386
  ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
387
  ZoneList<Handle<Object> > deoptimization_literals_;
388
  int inlined_function_count_;
389
  Scope* const scope_;
390
  TranslationBuffer translations_;
391
  ZoneList<LDeferredCode*> deferred_;
392
  int osr_pc_offset_;
393
  bool frame_is_built_;
394

    
395
  // Builder that keeps track of safepoints in the code. The table
396
  // itself is emitted at the end of the generated code.
397
  SafepointTableBuilder safepoints_;
398

    
399
  // Compiler from a set of parallel moves to a sequential list of moves.
400
  LGapResolver resolver_;
401

    
402
  Safepoint::Kind expected_safepoint_kind_;
403

    
404
  class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
405
   public:
406
    PushSafepointRegistersScope(LCodeGen* codegen,
407
                                Safepoint::Kind kind)
408
        : codegen_(codegen) {
409
      ASSERT(codegen_->info()->is_calling());
410
      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
411
      codegen_->expected_safepoint_kind_ = kind;
412

    
413
      switch (codegen_->expected_safepoint_kind_) {
414
        case Safepoint::kWithRegisters:
415
          codegen_->masm_->PushSafepointRegisters();
416
          break;
417
        case Safepoint::kWithRegistersAndDoubles:
418
          codegen_->masm_->PushSafepointRegistersAndDoubles();
419
          break;
420
        default:
421
          UNREACHABLE();
422
      }
423
    }
424

    
425
    ~PushSafepointRegistersScope() {
426
      Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
427
      ASSERT((kind & Safepoint::kWithRegisters) != 0);
428
      switch (kind) {
429
        case Safepoint::kWithRegisters:
430
          codegen_->masm_->PopSafepointRegisters();
431
          break;
432
        case Safepoint::kWithRegistersAndDoubles:
433
          codegen_->masm_->PopSafepointRegistersAndDoubles();
434
          break;
435
        default:
436
          UNREACHABLE();
437
      }
438
      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
439
    }
440

    
441
   private:
442
    LCodeGen* codegen_;
443
  };
444

    
445
  friend class LDeferredCode;
446
  friend class LEnvironment;
447
  friend class SafepointGenerator;
448
  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
449
};
450

    
451

    
452
class LDeferredCode : public ZoneObject {
453
 public:
454
  explicit LDeferredCode(LCodeGen* codegen)
455
      : codegen_(codegen),
456
        external_exit_(NULL),
457
        instruction_index_(codegen->current_instruction_) {
458
    codegen->AddDeferredCode(this);
459
  }
460

    
461
  virtual ~LDeferredCode() {}
462
  virtual void Generate() = 0;
463
  virtual LInstruction* instr() = 0;
464

    
465
  void SetExit(Label* exit) { external_exit_ = exit; }
466
  Label* entry() { return &entry_; }
467
  Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
468
  int instruction_index() const { return instruction_index_; }
469

    
470
 protected:
471
  LCodeGen* codegen() const { return codegen_; }
472
  MacroAssembler* masm() const { return codegen_->masm(); }
473

    
474
 private:
475
  LCodeGen* codegen_;
476
  Label entry_;
477
  Label exit_;
478
  Label* external_exit_;
479
  int instruction_index_;
480
};
481

    
482
} }  // namespace v8::internal
483

    
484
#endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_