The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / lithium.h @ f230a1cf

History | View | Annotate | Download (24.3 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#ifndef V8_LITHIUM_H_
29
#define V8_LITHIUM_H_
30

    
31
#include "allocation.h"
32
#include "hydrogen.h"
33
#include "safepoint-table.h"
34

    
35
namespace v8 {
36
namespace internal {
37

    
38
#define LITHIUM_OPERAND_LIST(V)         \
39
  V(ConstantOperand, CONSTANT_OPERAND)  \
40
  V(StackSlot,       STACK_SLOT)        \
41
  V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
42
  V(Register,        REGISTER)          \
43
  V(DoubleRegister,  DOUBLE_REGISTER)
44

    
45

    
46
class LOperand : public ZoneObject {
47
 public:
48
  enum Kind {
49
    INVALID,
50
    UNALLOCATED,
51
    CONSTANT_OPERAND,
52
    STACK_SLOT,
53
    DOUBLE_STACK_SLOT,
54
    REGISTER,
55
    DOUBLE_REGISTER,
56
    ARGUMENT
57
  };
58

    
59
  LOperand() : value_(KindField::encode(INVALID)) { }
60

    
61
  Kind kind() const { return KindField::decode(value_); }
62
  int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
63
#define LITHIUM_OPERAND_PREDICATE(name, type) \
64
  bool Is##name() const { return kind() == type; }
65
  LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
66
  LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT)
67
  LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
68
  LITHIUM_OPERAND_PREDICATE(Ignored, INVALID)
69
#undef LITHIUM_OPERAND_PREDICATE
70
  bool Equals(LOperand* other) const { return value_ == other->value_; }
71

    
72
  void PrintTo(StringStream* stream);
73
  void ConvertTo(Kind kind, int index) {
74
    value_ = KindField::encode(kind);
75
    value_ |= index << kKindFieldWidth;
76
    ASSERT(this->index() == index);
77
  }
78

    
79
  // Calls SetUpCache()/TearDownCache() for each subclass.
80
  static void SetUpCaches();
81
  static void TearDownCaches();
82

    
83
 protected:
84
  static const int kKindFieldWidth = 3;
85
  class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
86

    
87
  LOperand(Kind kind, int index) { ConvertTo(kind, index); }
88

    
89
  unsigned value_;
90
};
91

    
92

    
93
class LUnallocated : public LOperand {
94
 public:
95
  enum BasicPolicy {
96
    FIXED_SLOT,
97
    EXTENDED_POLICY
98
  };
99

    
100
  enum ExtendedPolicy {
101
    NONE,
102
    ANY,
103
    FIXED_REGISTER,
104
    FIXED_DOUBLE_REGISTER,
105
    MUST_HAVE_REGISTER,
106
    WRITABLE_REGISTER,
107
    SAME_AS_FIRST_INPUT
108
  };
109

    
110
  // Lifetime of operand inside the instruction.
111
  enum Lifetime {
112
    // USED_AT_START operand is guaranteed to be live only at
113
    // instruction start. Register allocator is free to assign the same register
114
    // to some other operand used inside instruction (i.e. temporary or
115
    // output).
116
    USED_AT_START,
117

    
118
    // USED_AT_END operand is treated as live until the end of
119
    // instruction. This means that register allocator will not reuse it's
120
    // register for any other operand inside instruction.
121
    USED_AT_END
122
  };
123

    
124
  explicit LUnallocated(ExtendedPolicy policy) : LOperand(UNALLOCATED, 0) {
125
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
126
    value_ |= ExtendedPolicyField::encode(policy);
127
    value_ |= LifetimeField::encode(USED_AT_END);
128
  }
129

    
130
  LUnallocated(BasicPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
131
    ASSERT(policy == FIXED_SLOT);
132
    value_ |= BasicPolicyField::encode(policy);
133
    value_ |= index << FixedSlotIndexField::kShift;
134
    ASSERT(this->fixed_slot_index() == index);
135
  }
136

    
137
  LUnallocated(ExtendedPolicy policy, int index) : LOperand(UNALLOCATED, 0) {
138
    ASSERT(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
139
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
140
    value_ |= ExtendedPolicyField::encode(policy);
141
    value_ |= LifetimeField::encode(USED_AT_END);
142
    value_ |= FixedRegisterField::encode(index);
143
  }
144

    
145
  LUnallocated(ExtendedPolicy policy, Lifetime lifetime)
146
      : LOperand(UNALLOCATED, 0) {
147
    value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
148
    value_ |= ExtendedPolicyField::encode(policy);
149
    value_ |= LifetimeField::encode(lifetime);
150
  }
151

    
152
  LUnallocated* CopyUnconstrained(Zone* zone) {
153
    LUnallocated* result = new(zone) LUnallocated(ANY);
154
    result->set_virtual_register(virtual_register());
155
    return result;
156
  }
157

    
158
  static LUnallocated* cast(LOperand* op) {
159
    ASSERT(op->IsUnallocated());
160
    return reinterpret_cast<LUnallocated*>(op);
161
  }
162

    
163
  // The encoding used for LUnallocated operands depends on the policy that is
164
  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
165
  // because it accommodates a larger pay-load.
166
  //
167
  // For FIXED_SLOT policy:
168
  //     +------------------------------------------+
169
  //     |       slot_index      |  vreg  | 0 | 001 |
170
  //     +------------------------------------------+
171
  //
172
  // For all other (extended) policies:
173
  //     +------------------------------------------+
174
  //     |  reg_index  | L | PPP |  vreg  | 1 | 001 |    L ... Lifetime
175
  //     +------------------------------------------+    P ... Policy
176
  //
177
  // The slot index is a signed value which requires us to decode it manually
178
  // instead of using the BitField utility class.
179

    
180
  // The superclass has a KindField.
181
  STATIC_ASSERT(kKindFieldWidth == 3);
182

    
183
  // BitFields for all unallocated operands.
184
  class BasicPolicyField     : public BitField<BasicPolicy,     3,  1> {};
185
  class VirtualRegisterField : public BitField<unsigned,        4, 18> {};
186

    
187
  // BitFields specific to BasicPolicy::FIXED_SLOT.
188
  class FixedSlotIndexField  : public BitField<int,            22, 10> {};
189

    
190
  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
191
  class ExtendedPolicyField  : public BitField<ExtendedPolicy, 22,  3> {};
192
  class LifetimeField        : public BitField<Lifetime,       25,  1> {};
193
  class FixedRegisterField   : public BitField<int,            26,  6> {};
194

    
195
  static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
196
  static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
197
  static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
198
  static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
199

    
200
  // Predicates for the operand policy.
201
  bool HasAnyPolicy() const {
202
    return basic_policy() == EXTENDED_POLICY &&
203
        extended_policy() == ANY;
204
  }
205
  bool HasFixedPolicy() const {
206
    return basic_policy() == FIXED_SLOT ||
207
        extended_policy() == FIXED_REGISTER ||
208
        extended_policy() == FIXED_DOUBLE_REGISTER;
209
  }
210
  bool HasRegisterPolicy() const {
211
    return basic_policy() == EXTENDED_POLICY && (
212
        extended_policy() == WRITABLE_REGISTER ||
213
        extended_policy() == MUST_HAVE_REGISTER);
214
  }
215
  bool HasSameAsInputPolicy() const {
216
    return basic_policy() == EXTENDED_POLICY &&
217
        extended_policy() == SAME_AS_FIRST_INPUT;
218
  }
219
  bool HasFixedSlotPolicy() const {
220
    return basic_policy() == FIXED_SLOT;
221
  }
222
  bool HasFixedRegisterPolicy() const {
223
    return basic_policy() == EXTENDED_POLICY &&
224
        extended_policy() == FIXED_REGISTER;
225
  }
226
  bool HasFixedDoubleRegisterPolicy() const {
227
    return basic_policy() == EXTENDED_POLICY &&
228
        extended_policy() == FIXED_DOUBLE_REGISTER;
229
  }
230
  bool HasWritableRegisterPolicy() const {
231
    return basic_policy() == EXTENDED_POLICY &&
232
        extended_policy() == WRITABLE_REGISTER;
233
  }
234

    
235
  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
236
  BasicPolicy basic_policy() const {
237
    return BasicPolicyField::decode(value_);
238
  }
239

    
240
  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
241
  ExtendedPolicy extended_policy() const {
242
    ASSERT(basic_policy() == EXTENDED_POLICY);
243
    return ExtendedPolicyField::decode(value_);
244
  }
245

    
246
  // [fixed_slot_index]: Only for FIXED_SLOT.
247
  int fixed_slot_index() const {
248
    ASSERT(HasFixedSlotPolicy());
249
    return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
250
  }
251

    
252
  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
253
  int fixed_register_index() const {
254
    ASSERT(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
255
    return FixedRegisterField::decode(value_);
256
  }
257

    
258
  // [virtual_register]: The virtual register ID for this operand.
259
  int virtual_register() const {
260
    return VirtualRegisterField::decode(value_);
261
  }
262
  void set_virtual_register(unsigned id) {
263
    value_ = VirtualRegisterField::update(value_, id);
264
  }
265

    
266
  // [lifetime]: Only for non-FIXED_SLOT.
267
  bool IsUsedAtStart() {
268
    ASSERT(basic_policy() == EXTENDED_POLICY);
269
    return LifetimeField::decode(value_) == USED_AT_START;
270
  }
271
};
272

    
273

    
274
class LMoveOperands V8_FINAL BASE_EMBEDDED {
275
 public:
276
  LMoveOperands(LOperand* source, LOperand* destination)
277
      : source_(source), destination_(destination) {
278
  }
279

    
280
  LOperand* source() const { return source_; }
281
  void set_source(LOperand* operand) { source_ = operand; }
282

    
283
  LOperand* destination() const { return destination_; }
284
  void set_destination(LOperand* operand) { destination_ = operand; }
285

    
286
  // The gap resolver marks moves as "in-progress" by clearing the
287
  // destination (but not the source).
288
  bool IsPending() const {
289
    return destination_ == NULL && source_ != NULL;
290
  }
291

    
292
  // True if this move a move into the given destination operand.
293
  bool Blocks(LOperand* operand) const {
294
    return !IsEliminated() && source()->Equals(operand);
295
  }
296

    
297
  // A move is redundant if it's been eliminated, if its source and
298
  // destination are the same, or if its destination is unneeded.
299
  bool IsRedundant() const {
300
    return IsEliminated() || source_->Equals(destination_) || IsIgnored();
301
  }
302

    
303
  bool IsIgnored() const {
304
    return destination_ != NULL && destination_->IsIgnored();
305
  }
306

    
307
  // We clear both operands to indicate move that's been eliminated.
308
  void Eliminate() { source_ = destination_ = NULL; }
309
  bool IsEliminated() const {
310
    ASSERT(source_ != NULL || destination_ == NULL);
311
    return source_ == NULL;
312
  }
313

    
314
 private:
315
  LOperand* source_;
316
  LOperand* destination_;
317
};
318

    
319

    
320
class LConstantOperand V8_FINAL : public LOperand {
321
 public:
322
  static LConstantOperand* Create(int index, Zone* zone) {
323
    ASSERT(index >= 0);
324
    if (index < kNumCachedOperands) return &cache[index];
325
    return new(zone) LConstantOperand(index);
326
  }
327

    
328
  static LConstantOperand* cast(LOperand* op) {
329
    ASSERT(op->IsConstantOperand());
330
    return reinterpret_cast<LConstantOperand*>(op);
331
  }
332

    
333
  static void SetUpCache();
334
  static void TearDownCache();
335

    
336
 private:
337
  static const int kNumCachedOperands = 128;
338
  static LConstantOperand* cache;
339

    
340
  LConstantOperand() : LOperand() { }
341
  explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
342
};
343

    
344

    
345
class LArgument V8_FINAL : public LOperand {
346
 public:
347
  explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
348

    
349
  static LArgument* cast(LOperand* op) {
350
    ASSERT(op->IsArgument());
351
    return reinterpret_cast<LArgument*>(op);
352
  }
353
};
354

    
355

    
356
class LStackSlot V8_FINAL : public LOperand {
357
 public:
358
  static LStackSlot* Create(int index, Zone* zone) {
359
    ASSERT(index >= 0);
360
    if (index < kNumCachedOperands) return &cache[index];
361
    return new(zone) LStackSlot(index);
362
  }
363

    
364
  static LStackSlot* cast(LOperand* op) {
365
    ASSERT(op->IsStackSlot());
366
    return reinterpret_cast<LStackSlot*>(op);
367
  }
368

    
369
  static void SetUpCache();
370
  static void TearDownCache();
371

    
372
 private:
373
  static const int kNumCachedOperands = 128;
374
  static LStackSlot* cache;
375

    
376
  LStackSlot() : LOperand() { }
377
  explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
378
};
379

    
380

    
381
class LDoubleStackSlot V8_FINAL : public LOperand {
382
 public:
383
  static LDoubleStackSlot* Create(int index, Zone* zone) {
384
    ASSERT(index >= 0);
385
    if (index < kNumCachedOperands) return &cache[index];
386
    return new(zone) LDoubleStackSlot(index);
387
  }
388

    
389
  static LDoubleStackSlot* cast(LOperand* op) {
390
    ASSERT(op->IsStackSlot());
391
    return reinterpret_cast<LDoubleStackSlot*>(op);
392
  }
393

    
394
  static void SetUpCache();
395
  static void TearDownCache();
396

    
397
 private:
398
  static const int kNumCachedOperands = 128;
399
  static LDoubleStackSlot* cache;
400

    
401
  LDoubleStackSlot() : LOperand() { }
402
  explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
403
};
404

    
405

    
406
class LRegister V8_FINAL : public LOperand {
407
 public:
408
  static LRegister* Create(int index, Zone* zone) {
409
    ASSERT(index >= 0);
410
    if (index < kNumCachedOperands) return &cache[index];
411
    return new(zone) LRegister(index);
412
  }
413

    
414
  static LRegister* cast(LOperand* op) {
415
    ASSERT(op->IsRegister());
416
    return reinterpret_cast<LRegister*>(op);
417
  }
418

    
419
  static void SetUpCache();
420
  static void TearDownCache();
421

    
422
 private:
423
  static const int kNumCachedOperands = 16;
424
  static LRegister* cache;
425

    
426
  LRegister() : LOperand() { }
427
  explicit LRegister(int index) : LOperand(REGISTER, index) { }
428
};
429

    
430

    
431
class LDoubleRegister V8_FINAL : public LOperand {
432
 public:
433
  static LDoubleRegister* Create(int index, Zone* zone) {
434
    ASSERT(index >= 0);
435
    if (index < kNumCachedOperands) return &cache[index];
436
    return new(zone) LDoubleRegister(index);
437
  }
438

    
439
  static LDoubleRegister* cast(LOperand* op) {
440
    ASSERT(op->IsDoubleRegister());
441
    return reinterpret_cast<LDoubleRegister*>(op);
442
  }
443

    
444
  static void SetUpCache();
445
  static void TearDownCache();
446

    
447
 private:
448
  static const int kNumCachedOperands = 16;
449
  static LDoubleRegister* cache;
450

    
451
  LDoubleRegister() : LOperand() { }
452
  explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
453
};
454

    
455

    
456
class LParallelMove V8_FINAL : public ZoneObject {
457
 public:
458
  explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
459

    
460
  void AddMove(LOperand* from, LOperand* to, Zone* zone) {
461
    move_operands_.Add(LMoveOperands(from, to), zone);
462
  }
463

    
464
  bool IsRedundant() const;
465

    
466
  const ZoneList<LMoveOperands>* move_operands() const {
467
    return &move_operands_;
468
  }
469

    
470
  void PrintDataTo(StringStream* stream) const;
471

    
472
 private:
473
  ZoneList<LMoveOperands> move_operands_;
474
};
475

    
476

    
477
class LPointerMap V8_FINAL : public ZoneObject {
478
 public:
479
  explicit LPointerMap(Zone* zone)
480
      : pointer_operands_(8, zone),
481
        untagged_operands_(0, zone),
482
        lithium_position_(-1) { }
483

    
484
  const ZoneList<LOperand*>* GetNormalizedOperands() {
485
    for (int i = 0; i < untagged_operands_.length(); ++i) {
486
      RemovePointer(untagged_operands_[i]);
487
    }
488
    untagged_operands_.Clear();
489
    return &pointer_operands_;
490
  }
491
  int lithium_position() const { return lithium_position_; }
492

    
493
  void set_lithium_position(int pos) {
494
    ASSERT(lithium_position_ == -1);
495
    lithium_position_ = pos;
496
  }
497

    
498
  void RecordPointer(LOperand* op, Zone* zone);
499
  void RemovePointer(LOperand* op);
500
  void RecordUntagged(LOperand* op, Zone* zone);
501
  void PrintTo(StringStream* stream);
502

    
503
 private:
504
  ZoneList<LOperand*> pointer_operands_;
505
  ZoneList<LOperand*> untagged_operands_;
506
  int lithium_position_;
507
};
508

    
509

    
510
class LEnvironment V8_FINAL : public ZoneObject {
511
 public:
512
  LEnvironment(Handle<JSFunction> closure,
513
               FrameType frame_type,
514
               BailoutId ast_id,
515
               int parameter_count,
516
               int argument_count,
517
               int value_count,
518
               LEnvironment* outer,
519
               HEnterInlined* entry,
520
               Zone* zone)
521
      : closure_(closure),
522
        frame_type_(frame_type),
523
        arguments_stack_height_(argument_count),
524
        deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
525
        translation_index_(-1),
526
        ast_id_(ast_id),
527
        translation_size_(value_count),
528
        parameter_count_(parameter_count),
529
        pc_offset_(-1),
530
        values_(value_count, zone),
531
        is_tagged_(value_count, zone),
532
        is_uint32_(value_count, zone),
533
        object_mapping_(0, zone),
534
        outer_(outer),
535
        entry_(entry),
536
        zone_(zone) { }
537

    
538
  Handle<JSFunction> closure() const { return closure_; }
539
  FrameType frame_type() const { return frame_type_; }
540
  int arguments_stack_height() const { return arguments_stack_height_; }
541
  int deoptimization_index() const { return deoptimization_index_; }
542
  int translation_index() const { return translation_index_; }
543
  BailoutId ast_id() const { return ast_id_; }
544
  int translation_size() const { return translation_size_; }
545
  int parameter_count() const { return parameter_count_; }
546
  int pc_offset() const { return pc_offset_; }
547
  const ZoneList<LOperand*>* values() const { return &values_; }
548
  LEnvironment* outer() const { return outer_; }
549
  HEnterInlined* entry() { return entry_; }
550
  Zone* zone() const { return zone_; }
551

    
552
  void AddValue(LOperand* operand,
553
                Representation representation,
554
                bool is_uint32) {
555
    values_.Add(operand, zone());
556
    if (representation.IsSmiOrTagged()) {
557
      ASSERT(!is_uint32);
558
      is_tagged_.Add(values_.length() - 1, zone());
559
    }
560

    
561
    if (is_uint32) {
562
      is_uint32_.Add(values_.length() - 1, zone());
563
    }
564
  }
565

    
566
  bool HasTaggedValueAt(int index) const {
567
    return is_tagged_.Contains(index);
568
  }
569

    
570
  bool HasUint32ValueAt(int index) const {
571
    return is_uint32_.Contains(index);
572
  }
573

    
574
  void AddNewObject(int length, bool is_arguments) {
575
    uint32_t encoded = LengthOrDupeField::encode(length) |
576
                       IsArgumentsField::encode(is_arguments) |
577
                       IsDuplicateField::encode(false);
578
    object_mapping_.Add(encoded, zone());
579
  }
580

    
581
  void AddDuplicateObject(int dupe_of) {
582
    uint32_t encoded = LengthOrDupeField::encode(dupe_of) |
583
                       IsDuplicateField::encode(true);
584
    object_mapping_.Add(encoded, zone());
585
  }
586

    
587
  int ObjectDuplicateOfAt(int index) {
588
    ASSERT(ObjectIsDuplicateAt(index));
589
    return LengthOrDupeField::decode(object_mapping_[index]);
590
  }
591

    
592
  int ObjectLengthAt(int index) {
593
    ASSERT(!ObjectIsDuplicateAt(index));
594
    return LengthOrDupeField::decode(object_mapping_[index]);
595
  }
596

    
597
  bool ObjectIsArgumentsAt(int index) {
598
    ASSERT(!ObjectIsDuplicateAt(index));
599
    return IsArgumentsField::decode(object_mapping_[index]);
600
  }
601

    
602
  bool ObjectIsDuplicateAt(int index) {
603
    return IsDuplicateField::decode(object_mapping_[index]);
604
  }
605

    
606
  void Register(int deoptimization_index,
607
                int translation_index,
608
                int pc_offset) {
609
    ASSERT(!HasBeenRegistered());
610
    deoptimization_index_ = deoptimization_index;
611
    translation_index_ = translation_index;
612
    pc_offset_ = pc_offset;
613
  }
614
  bool HasBeenRegistered() const {
615
    return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
616
  }
617

    
618
  void PrintTo(StringStream* stream);
619

    
620
  // Marker value indicating a de-materialized object.
621
  static LOperand* materialization_marker() { return NULL; }
622

    
623
  // Encoding used for the object_mapping map below.
624
  class LengthOrDupeField : public BitField<int,   0, 30> { };
625
  class IsArgumentsField  : public BitField<bool, 30,  1> { };
626
  class IsDuplicateField  : public BitField<bool, 31,  1> { };
627

    
628
 private:
629
  Handle<JSFunction> closure_;
630
  FrameType frame_type_;
631
  int arguments_stack_height_;
632
  int deoptimization_index_;
633
  int translation_index_;
634
  BailoutId ast_id_;
635
  int translation_size_;
636
  int parameter_count_;
637
  int pc_offset_;
638

    
639
  // Value array: [parameters] [locals] [expression stack] [de-materialized].
640
  //              |>--------- translation_size ---------<|
641
  ZoneList<LOperand*> values_;
642
  GrowableBitVector is_tagged_;
643
  GrowableBitVector is_uint32_;
644

    
645
  // Map with encoded information about materialization_marker operands.
646
  ZoneList<uint32_t> object_mapping_;
647

    
648
  LEnvironment* outer_;
649
  HEnterInlined* entry_;
650
  Zone* zone_;
651
};
652

    
653

    
654
// Iterates over the non-null, non-constant operands in an environment.
655
class ShallowIterator V8_FINAL BASE_EMBEDDED {
656
 public:
657
  explicit ShallowIterator(LEnvironment* env)
658
      : env_(env),
659
        limit_(env != NULL ? env->values()->length() : 0),
660
        current_(0) {
661
    SkipUninteresting();
662
  }
663

    
664
  bool Done() { return current_ >= limit_; }
665

    
666
  LOperand* Current() {
667
    ASSERT(!Done());
668
    ASSERT(env_->values()->at(current_) != NULL);
669
    return env_->values()->at(current_);
670
  }
671

    
672
  void Advance() {
673
    ASSERT(!Done());
674
    ++current_;
675
    SkipUninteresting();
676
  }
677

    
678
  LEnvironment* env() { return env_; }
679

    
680
 private:
681
  bool ShouldSkip(LOperand* op) {
682
    return op == NULL || op->IsConstantOperand() || op->IsArgument();
683
  }
684

    
685
  // Skip until something interesting, beginning with and including current_.
686
  void SkipUninteresting() {
687
    while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
688
      ++current_;
689
    }
690
  }
691

    
692
  LEnvironment* env_;
693
  int limit_;
694
  int current_;
695
};
696

    
697

    
698
// Iterator for non-null, non-constant operands incl. outer environments.
699
class DeepIterator V8_FINAL BASE_EMBEDDED {
700
 public:
701
  explicit DeepIterator(LEnvironment* env)
702
      : current_iterator_(env) {
703
    SkipUninteresting();
704
  }
705

    
706
  bool Done() { return current_iterator_.Done(); }
707

    
708
  LOperand* Current() {
709
    ASSERT(!current_iterator_.Done());
710
    ASSERT(current_iterator_.Current() != NULL);
711
    return current_iterator_.Current();
712
  }
713

    
714
  void Advance() {
715
    current_iterator_.Advance();
716
    SkipUninteresting();
717
  }
718

    
719
 private:
720
  void SkipUninteresting() {
721
    while (current_iterator_.env() != NULL && current_iterator_.Done()) {
722
      current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
723
    }
724
  }
725

    
726
  ShallowIterator current_iterator_;
727
};
728

    
729

    
730
class LPlatformChunk;
731
class LGap;
732
class LLabel;
733

    
734
// Superclass providing data and behavior common to all the
735
// arch-specific LPlatformChunk classes.
736
class LChunk : public ZoneObject {
737
 public:
738
  static LChunk* NewChunk(HGraph* graph);
739

    
740
  void AddInstruction(LInstruction* instruction, HBasicBlock* block);
741
  LConstantOperand* DefineConstantOperand(HConstant* constant);
742
  HConstant* LookupConstant(LConstantOperand* operand) const;
743
  Representation LookupLiteralRepresentation(LConstantOperand* operand) const;
744

    
745
  int ParameterAt(int index);
746
  int GetParameterStackSlot(int index) const;
747
  int spill_slot_count() const { return spill_slot_count_; }
748
  CompilationInfo* info() const { return info_; }
749
  HGraph* graph() const { return graph_; }
750
  Isolate* isolate() const { return graph_->isolate(); }
751
  const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
752
  void AddGapMove(int index, LOperand* from, LOperand* to);
753
  LGap* GetGapAt(int index) const;
754
  bool IsGapAt(int index) const;
755
  int NearestGapPos(int index) const;
756
  void MarkEmptyBlocks();
757
  const ZoneList<LPointerMap*>* pointer_maps() const { return &pointer_maps_; }
758
  LLabel* GetLabel(int block_id) const;
759
  int LookupDestination(int block_id) const;
760
  Label* GetAssemblyLabel(int block_id) const;
761

    
762
  const ZoneList<Handle<JSFunction> >* inlined_closures() const {
763
    return &inlined_closures_;
764
  }
765

    
766
  void AddInlinedClosure(Handle<JSFunction> closure) {
767
    inlined_closures_.Add(closure, zone());
768
  }
769

    
770
  Zone* zone() const { return info_->zone(); }
771

    
772
  Handle<Code> Codegen();
773

    
774
  void set_allocated_double_registers(BitVector* allocated_registers);
775
  BitVector* allocated_double_registers() {
776
    return allocated_double_registers_;
777
  }
778

    
779
 protected:
780
  LChunk(CompilationInfo* info, HGraph* graph);
781

    
782
  int spill_slot_count_;
783

    
784
 private:
785
  CompilationInfo* info_;
786
  HGraph* const graph_;
787
  BitVector* allocated_double_registers_;
788
  ZoneList<LInstruction*> instructions_;
789
  ZoneList<LPointerMap*> pointer_maps_;
790
  ZoneList<Handle<JSFunction> > inlined_closures_;
791
};
792

    
793

    
794
int ElementsKindToShiftSize(ElementsKind elements_kind);
795
int StackSlotOffset(int index);
796

    
797
enum NumberUntagDMode {
798
  NUMBER_CANDIDATE_IS_SMI,
799
  NUMBER_CANDIDATE_IS_ANY_TAGGED
800
};
801

    
802

    
803
class LPhase : public CompilationPhase {
804
 public:
805
  LPhase(const char* name, LChunk* chunk)
806
      : CompilationPhase(name, chunk->info()),
807
        chunk_(chunk) { }
808
  ~LPhase();
809

    
810
 private:
811
  LChunk* chunk_;
812

    
813
  DISALLOW_COPY_AND_ASSIGN(LPhase);
814
};
815

    
816

    
817
} }  // namespace v8::internal
818

    
819
#endif  // V8_LITHIUM_H_