The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / objects-inl.h @ 40c0f755

History | View | Annotate | Download (68.6 KB)

1
// Copyright 2006-2008 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
//
28
// Review notes:
29
//
30
// - The use of macros in these inline functions may seem superfluous
31
// but it is absolutely needed to make sure gcc generates optimal
32
// code. gcc is not happy when attempting to inline too deep.
33
//
34

    
35
#ifndef V8_OBJECTS_INL_H_
36
#define V8_OBJECTS_INL_H_
37

    
38
#include "objects.h"
39
#include "contexts.h"
40
#include "conversions-inl.h"
41
#include "property.h"
42

    
43
namespace v8 { namespace internal {
44

    
45
PropertyDetails::PropertyDetails(Smi* smi) {
46
  value_ = smi->value();
47
}
48

    
49

    
50
Smi* PropertyDetails::AsSmi() {
51
  return Smi::FromInt(value_);
52
}
53

    
54

    
55
#define CAST_ACCESSOR(type)                     \
56
  type* type::cast(Object* object) {            \
57
    ASSERT(object->Is##type());                 \
58
    return reinterpret_cast<type*>(object);     \
59
  }
60

    
61

    
62
#define INT_ACCESSORS(holder, name, offset)                             \
63
  int holder::name() { return READ_INT_FIELD(this, offset); }           \
64
  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
65

    
66

    
67
#define ACCESSORS(holder, name, type, offset)                           \
68
  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
69
  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
70
    WRITE_FIELD(this, offset, value);                                   \
71
    CONDITIONAL_WRITE_BARRIER(this, offset, mode);                      \
72
  }
73

    
74

    
75

    
76
#define SMI_ACCESSORS(holder, name, offset)             \
77
  int holder::name() {                                  \
78
    Object* value = READ_FIELD(this, offset);           \
79
    return Smi::cast(value)->value();                   \
80
  }                                                     \
81
  void holder::set_##name(int value) {                  \
82
    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
83
  }
84

    
85

    
86
#define BOOL_ACCESSORS(holder, field, name, offset) \
87
  bool holder::name() {                                    \
88
    return BooleanBit::get(field(), offset);               \
89
  }                                                        \
90
  void holder::set_##name(bool value) {                    \
91
    set_##field(BooleanBit::set(field(), offset, value));  \
92
  }
93

    
94

    
95
bool Object::IsSmi() {
96
  return HAS_SMI_TAG(this);
97
}
98

    
99

    
100
bool Object::IsHeapObject() {
101
  return HAS_HEAP_OBJECT_TAG(this);
102
}
103

    
104

    
105
bool Object::IsHeapNumber() {
106
  return Object::IsHeapObject()
107
    && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
108
}
109

    
110

    
111
bool Object::IsString() {
112
  return Object::IsHeapObject()
113
    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
114
}
115

    
116

    
117
bool Object::IsSymbol() {
118
  if (!this->IsHeapObject()) return false;
119
  uint32_t type = HeapObject::cast(this)->map()->instance_type();
120
  return (type & (kIsNotStringMask | kIsSymbolMask)) ==
121
         (kStringTag | kSymbolTag);
122
}
123

    
124

    
125
bool Object::IsConsString() {
126
  if (!this->IsHeapObject()) return false;
127
  uint32_t type = HeapObject::cast(this)->map()->instance_type();
128
  return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
129
         (kStringTag | kConsStringTag);
130
}
131

    
132

    
133
#ifdef DEBUG
134
// These are for cast checks.  If you need one of these in release
135
// mode you should consider using a StringShape before moving it out
136
// of the ifdef
137

    
138
bool Object::IsSeqString() {
139
  if (!IsString()) return false;
140
  return StringShape(String::cast(this)).IsSequential();
141
}
142

    
143

    
144
bool Object::IsSeqAsciiString() {
145
  if (!IsString()) return false;
146
  return StringShape(String::cast(this)).IsSequential() &&
147
         StringShape(String::cast(this)).IsAsciiRepresentation();
148
}
149

    
150

    
151
bool Object::IsSeqTwoByteString() {
152
  if (!IsString()) return false;
153
  return StringShape(String::cast(this)).IsSequential() &&
154
         StringShape(String::cast(this)).IsTwoByteRepresentation();
155
}
156

    
157

    
158
bool Object::IsExternalString() {
159
  if (!IsString()) return false;
160
  return StringShape(String::cast(this)).IsExternal();
161
}
162

    
163

    
164
bool Object::IsExternalAsciiString() {
165
  if (!IsString()) return false;
166
  return StringShape(String::cast(this)).IsExternal() &&
167
         StringShape(String::cast(this)).IsAsciiRepresentation();
168
}
169

    
170

    
171
bool Object::IsExternalTwoByteString() {
172
  if (!IsString()) return false;
173
  return StringShape(String::cast(this)).IsExternal() &&
174
         StringShape(String::cast(this)).IsTwoByteRepresentation();
175
}
176

    
177

    
178
bool Object::IsSlicedString() {
179
  if (!IsString()) return false;
180
  return StringShape(String::cast(this)).IsSliced();
181
}
182

    
183

    
184
#endif  // DEBUG
185

    
186

    
187
StringShape::StringShape(String* str)
188
  : type_(str->map()->instance_type()) {
189
  set_valid();
190
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
191
}
192

    
193

    
194
StringShape::StringShape(Map* map)
195
  : type_(map->instance_type()) {
196
  set_valid();
197
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
198
}
199

    
200

    
201
StringShape::StringShape(InstanceType t)
202
  : type_(static_cast<uint32_t>(t)) {
203
  set_valid();
204
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
205
}
206

    
207

    
208
bool StringShape::IsSymbol() {
209
  ASSERT(valid());
210
  return (type_ & kIsSymbolMask) == kSymbolTag;
211
}
212

    
213

    
214
bool StringShape::IsAsciiRepresentation() {
215
  return (type_ & kStringEncodingMask) == kAsciiStringTag;
216
}
217

    
218

    
219
bool StringShape::IsTwoByteRepresentation() {
220
  return (type_ & kStringEncodingMask) == kTwoByteStringTag;
221
}
222

    
223

    
224
bool StringShape::IsCons() {
225
  return (type_ & kStringRepresentationMask) == kConsStringTag;
226
}
227

    
228

    
229
bool StringShape::IsSliced() {
230
  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
231
}
232

    
233

    
234
bool StringShape::IsExternal() {
235
  return (type_ & kStringRepresentationMask) == kExternalStringTag;
236
}
237

    
238

    
239
bool StringShape::IsSequential() {
240
  return (type_ & kStringRepresentationMask) == kSeqStringTag;
241
}
242

    
243

    
244
StringRepresentationTag StringShape::representation_tag() {
245
  uint32_t tag = (type_ & kStringRepresentationMask);
246
  return static_cast<StringRepresentationTag>(tag);
247
}
248

    
249

    
250
uint32_t StringShape::full_representation_tag() {
251
  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
252
}
253

    
254

    
255
uint32_t StringShape::size_tag() {
256
  return (type_ & kStringSizeMask);
257
}
258

    
259

    
260
bool StringShape::IsSequentialAscii() {
261
  return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
262
}
263

    
264

    
265
bool StringShape::IsSequentialTwoByte() {
266
  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
267
}
268

    
269

    
270
bool StringShape::IsExternalAscii() {
271
  return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
272
}
273

    
274

    
275
bool StringShape::IsExternalTwoByte() {
276
  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
277
}
278

    
279

    
280
uc32 FlatStringReader::Get(int index) {
281
  ASSERT(0 <= index && index <= length_);
282
  if (is_ascii_) {
283
    return static_cast<const byte*>(start_)[index];
284
  } else {
285
    return static_cast<const uc16*>(start_)[index];
286
  }
287
}
288

    
289

    
290
bool Object::IsNumber() {
291
  return IsSmi() || IsHeapNumber();
292
}
293

    
294

    
295
bool Object::IsByteArray() {
296
  return Object::IsHeapObject()
297
    && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
298
}
299

    
300

    
301
bool Object::IsFailure() {
302
  return HAS_FAILURE_TAG(this);
303
}
304

    
305

    
306
bool Object::IsRetryAfterGC() {
307
  return HAS_FAILURE_TAG(this)
308
    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
309
}
310

    
311

    
312
bool Object::IsOutOfMemoryFailure() {
313
  return HAS_FAILURE_TAG(this)
314
    && Failure::cast(this)->IsOutOfMemoryException();
315
}
316

    
317

    
318
bool Object::IsException() {
319
  return this == Failure::Exception();
320
}
321

    
322

    
323
bool Object::IsJSObject() {
324
  return IsHeapObject()
325
    && HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
326
}
327

    
328

    
329
bool Object::IsJSContextExtensionObject() {
330
  return IsHeapObject()
331
    && (HeapObject::cast(this)->map()->instance_type() ==
332
        JS_CONTEXT_EXTENSION_OBJECT_TYPE);
333
}
334

    
335

    
336
bool Object::IsMap() {
337
  return Object::IsHeapObject()
338
    && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
339
}
340

    
341

    
342
bool Object::IsFixedArray() {
343
  return Object::IsHeapObject()
344
    && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
345
}
346

    
347

    
348
bool Object::IsDescriptorArray() {
349
  return IsFixedArray();
350
}
351

    
352

    
353
bool Object::IsContext() {
354
  return Object::IsHeapObject()
355
    && (HeapObject::cast(this)->map() == Heap::context_map() ||
356
        HeapObject::cast(this)->map() == Heap::catch_context_map() ||
357
        HeapObject::cast(this)->map() == Heap::global_context_map());
358
}
359

    
360

    
361
bool Object::IsCatchContext() {
362
  return Object::IsHeapObject()
363
    && HeapObject::cast(this)->map() == Heap::catch_context_map();
364
}
365

    
366

    
367
bool Object::IsGlobalContext() {
368
  return Object::IsHeapObject()
369
    && HeapObject::cast(this)->map() == Heap::global_context_map();
370
}
371

    
372

    
373
bool Object::IsJSFunction() {
374
  return Object::IsHeapObject()
375
    && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
376
}
377

    
378

    
379
template <> inline bool Is<JSFunction>(Object* obj) {
380
  return obj->IsJSFunction();
381
}
382

    
383

    
384
bool Object::IsCode() {
385
  return Object::IsHeapObject()
386
    && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
387
}
388

    
389

    
390
bool Object::IsOddball() {
391
  return Object::IsHeapObject()
392
    && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
393
}
394

    
395

    
396
bool Object::IsSharedFunctionInfo() {
397
  return Object::IsHeapObject() &&
398
      (HeapObject::cast(this)->map()->instance_type() ==
399
       SHARED_FUNCTION_INFO_TYPE);
400
}
401

    
402

    
403
bool Object::IsJSValue() {
404
  return Object::IsHeapObject()
405
    && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
406
}
407

    
408

    
409
bool Object::IsStringWrapper() {
410
  return IsJSValue() && JSValue::cast(this)->value()->IsString();
411
}
412

    
413

    
414
bool Object::IsProxy() {
415
  return Object::IsHeapObject()
416
    && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
417
}
418

    
419

    
420
bool Object::IsBoolean() {
421
  return IsTrue() || IsFalse();
422
}
423

    
424

    
425
bool Object::IsJSArray() {
426
  return Object::IsHeapObject()
427
    && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
428
}
429

    
430

    
431
bool Object::IsJSRegExp() {
432
  return Object::IsHeapObject()
433
    && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
434
}
435

    
436

    
437
template <> inline bool Is<JSArray>(Object* obj) {
438
  return obj->IsJSArray();
439
}
440

    
441

    
442
bool Object::IsHashTable() {
443
  return Object::IsHeapObject()
444
    && HeapObject::cast(this)->map() == Heap::hash_table_map();
445
}
446

    
447

    
448
bool Object::IsDictionary() {
449
  return IsHashTable() && this != Heap::symbol_table();
450
}
451

    
452

    
453
bool Object::IsSymbolTable() {
454
  return IsHashTable() && this == Heap::symbol_table();
455
}
456

    
457

    
458
bool Object::IsCompilationCacheTable() {
459
  return IsHashTable();
460
}
461

    
462

    
463
bool Object::IsMapCache() {
464
  return IsHashTable();
465
}
466

    
467

    
468
bool Object::IsLookupCache() {
469
  return IsHashTable();
470
}
471

    
472

    
473
bool Object::IsPrimitive() {
474
  return IsOddball() || IsNumber() || IsString();
475
}
476

    
477

    
478
bool Object::IsJSGlobalProxy() {
479
  bool result = IsHeapObject() &&
480
                (HeapObject::cast(this)->map()->instance_type() ==
481
                 JS_GLOBAL_PROXY_TYPE);
482
  ASSERT(!result || IsAccessCheckNeeded());
483
  return result;
484
}
485

    
486

    
487
bool Object::IsGlobalObject() {
488
  if (!IsHeapObject()) return false;
489

    
490
  InstanceType type = HeapObject::cast(this)->map()->instance_type();
491
  return type == JS_GLOBAL_OBJECT_TYPE ||
492
         type == JS_BUILTINS_OBJECT_TYPE;
493
}
494

    
495

    
496
bool Object::IsJSGlobalObject() {
497
  return IsHeapObject() &&
498
      (HeapObject::cast(this)->map()->instance_type() ==
499
       JS_GLOBAL_OBJECT_TYPE);
500
}
501

    
502

    
503
bool Object::IsJSBuiltinsObject() {
504
  return IsHeapObject() &&
505
      (HeapObject::cast(this)->map()->instance_type() ==
506
       JS_BUILTINS_OBJECT_TYPE);
507
}
508

    
509

    
510
bool Object::IsUndetectableObject() {
511
  return IsHeapObject()
512
    && HeapObject::cast(this)->map()->is_undetectable();
513
}
514

    
515

    
516
bool Object::IsAccessCheckNeeded() {
517
  return IsHeapObject()
518
    && HeapObject::cast(this)->map()->is_access_check_needed();
519
}
520

    
521

    
522
bool Object::IsStruct() {
523
  if (!IsHeapObject()) return false;
524
  switch (HeapObject::cast(this)->map()->instance_type()) {
525
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
526
  STRUCT_LIST(MAKE_STRUCT_CASE)
527
#undef MAKE_STRUCT_CASE
528
    default: return false;
529
  }
530
}
531

    
532

    
533
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
534
  bool Object::Is##Name() {                                      \
535
    return Object::IsHeapObject()                                \
536
      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
537
  }
538
  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
539
#undef MAKE_STRUCT_PREDICATE
540

    
541

    
542
bool Object::IsUndefined() {
543
  return this == Heap::undefined_value();
544
}
545

    
546

    
547
bool Object::IsTheHole() {
548
  return this == Heap::the_hole_value();
549
}
550

    
551

    
552
bool Object::IsNull() {
553
  return this == Heap::null_value();
554
}
555

    
556

    
557
bool Object::IsTrue() {
558
  return this == Heap::true_value();
559
}
560

    
561

    
562
bool Object::IsFalse() {
563
  return this == Heap::false_value();
564
}
565

    
566

    
567
double Object::Number() {
568
  ASSERT(IsNumber());
569
  return IsSmi()
570
    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
571
    : reinterpret_cast<HeapNumber*>(this)->value();
572
}
573

    
574

    
575

    
576
Object* Object::ToSmi() {
577
  if (IsSmi()) return this;
578
  if (IsHeapNumber()) {
579
    double value = HeapNumber::cast(this)->value();
580
    int int_value = FastD2I(value);
581
    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
582
      return Smi::FromInt(int_value);
583
    }
584
  }
585
  return Failure::Exception();
586
}
587

    
588

    
589
bool Object::HasSpecificClassOf(String* name) {
590
  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
591
}
592

    
593

    
594
Object* Object::GetElement(uint32_t index) {
595
  return GetElementWithReceiver(this, index);
596
}
597

    
598

    
599
Object* Object::GetProperty(String* key) {
600
  PropertyAttributes attributes;
601
  return GetPropertyWithReceiver(this, key, &attributes);
602
}
603

    
604

    
605
Object* Object::GetProperty(String* key, PropertyAttributes* attributes) {
606
  return GetPropertyWithReceiver(this, key, attributes);
607
}
608

    
609

    
610
#define FIELD_ADDR(p, offset) \
611
  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
612

    
613
#define READ_FIELD(p, offset) \
614
  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
615

    
616
#define WRITE_FIELD(p, offset, value) \
617
  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
618

    
619

    
620
#define WRITE_BARRIER(object, offset) \
621
  Heap::RecordWrite(object->address(), offset);
622

    
623
// CONDITIONAL_WRITE_BARRIER must be issued after the actual
624
// write due to the assert validating the written value.
625
#define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
626
  if (mode == UPDATE_WRITE_BARRIER) { \
627
    Heap::RecordWrite(object->address(), offset); \
628
  } else { \
629
    ASSERT(mode == SKIP_WRITE_BARRIER); \
630
    ASSERT(Heap::InNewSpace(object) || \
631
           !Heap::InNewSpace(READ_FIELD(object, offset))); \
632
  }
633

    
634
#define READ_DOUBLE_FIELD(p, offset) \
635
  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
636

    
637
#define WRITE_DOUBLE_FIELD(p, offset, value) \
638
  (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
639

    
640
#define READ_INT_FIELD(p, offset) \
641
  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
642

    
643
#define WRITE_INT_FIELD(p, offset, value) \
644
  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
645

    
646
#define READ_UINT32_FIELD(p, offset) \
647
  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
648

    
649
#define WRITE_UINT32_FIELD(p, offset, value) \
650
  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
651

    
652
#define READ_SHORT_FIELD(p, offset) \
653
  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
654

    
655
#define WRITE_SHORT_FIELD(p, offset, value) \
656
  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
657

    
658
#define READ_BYTE_FIELD(p, offset) \
659
  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
660

    
661
#define WRITE_BYTE_FIELD(p, offset, value) \
662
  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
663

    
664

    
665
Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
666
  return &READ_FIELD(obj, byte_offset);
667
}
668

    
669

    
670
int Smi::value() {
671
  return reinterpret_cast<int>(this) >> kSmiTagSize;
672
}
673

    
674

    
675
Smi* Smi::FromInt(int value) {
676
  ASSERT(Smi::IsValid(value));
677
  return reinterpret_cast<Smi*>((value << kSmiTagSize) | kSmiTag);
678
}
679

    
680

    
681
Failure::Type Failure::type() const {
682
  return static_cast<Type>(value() & kFailureTypeTagMask);
683
}
684

    
685

    
686
bool Failure::IsInternalError() const {
687
  return type() == INTERNAL_ERROR;
688
}
689

    
690

    
691
bool Failure::IsOutOfMemoryException() const {
692
  return type() == OUT_OF_MEMORY_EXCEPTION;
693
}
694

    
695

    
696
int Failure::requested() const {
697
  const int kShiftBits =
698
      kFailureTypeTagSize + kSpaceTagSize - kObjectAlignmentBits;
699
  STATIC_ASSERT(kShiftBits >= 0);
700
  ASSERT(type() == RETRY_AFTER_GC);
701
  return value() >> kShiftBits;
702
}
703

    
704

    
705
AllocationSpace Failure::allocation_space() const {
706
  ASSERT_EQ(RETRY_AFTER_GC, type());
707
  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
708
                                      & kSpaceTagMask);
709
}
710

    
711

    
712
Failure* Failure::InternalError() {
713
  return Construct(INTERNAL_ERROR);
714
}
715

    
716

    
717
Failure* Failure::Exception() {
718
  return Construct(EXCEPTION);
719
}
720

    
721
Failure* Failure::OutOfMemoryException() {
722
  return Construct(OUT_OF_MEMORY_EXCEPTION);
723
}
724

    
725

    
726
int Failure::value() const {
727
  return reinterpret_cast<int>(this) >> kFailureTagSize;
728
}
729

    
730

    
731
Failure* Failure::RetryAfterGC(int requested_bytes) {
732
  int requested = requested_bytes >> kObjectAlignmentBits;
733
  int value = (requested << kSpaceTagSize) | NEW_SPACE;
734
  ASSERT(value >> kSpaceTagSize == requested);
735
  ASSERT(Smi::IsValid(value));
736
  ASSERT(value == ((value << kFailureTypeTagSize) >> kFailureTypeTagSize));
737
  ASSERT(Smi::IsValid(value << kFailureTypeTagSize));
738
  return Construct(RETRY_AFTER_GC, value);
739
}
740

    
741

    
742
Failure* Failure::Construct(Type type, int value) {
743
  int info = (value << kFailureTypeTagSize) | type;
744
  ASSERT(Smi::IsValid(info));  // Same validation check as in Smi
745
  return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
746
}
747

    
748

    
749
bool Smi::IsValid(int value) {
750
#ifdef DEBUG
751
  bool in_range = (value >= kMinValue) && (value <= kMaxValue);
752
#endif
753
  // To be representable as an tagged small integer, the two
754
  // most-significant bits of 'value' must be either 00 or 11 due to
755
  // sign-extension. To check this we add 01 to the two
756
  // most-significant bits, and check if the most-significant bit is 0
757
  //
758
  // CAUTION: The original code below:
759
  // bool result = ((value + 0x40000000) & 0x80000000) == 0;
760
  // may lead to incorrect results according to the C language spec, and
761
  // in fact doesn't work correctly with gcc4.1.1 in some cases: The
762
  // compiler may produce undefined results in case of signed integer
763
  // overflow. The computation must be done w/ unsigned ints.
764
  bool result =
765
      ((static_cast<unsigned int>(value) + 0x40000000U) & 0x80000000U) == 0;
766
  ASSERT(result == in_range);
767
  return result;
768
}
769

    
770

    
771
MapWord MapWord::FromMap(Map* map) {
772
  return MapWord(reinterpret_cast<uintptr_t>(map));
773
}
774

    
775

    
776
Map* MapWord::ToMap() {
777
  return reinterpret_cast<Map*>(value_);
778
}
779

    
780

    
781
bool MapWord::IsForwardingAddress() {
782
  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
783
}
784

    
785

    
786
MapWord MapWord::FromForwardingAddress(HeapObject* object) {
787
  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
788
  return MapWord(reinterpret_cast<uintptr_t>(raw));
789
}
790

    
791

    
792
HeapObject* MapWord::ToForwardingAddress() {
793
  ASSERT(IsForwardingAddress());
794
  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
795
}
796

    
797

    
798
bool MapWord::IsMarked() {
799
  return (value_ & kMarkingMask) == 0;
800
}
801

    
802

    
803
void MapWord::SetMark() {
804
  value_ &= ~kMarkingMask;
805
}
806

    
807

    
808
void MapWord::ClearMark() {
809
  value_ |= kMarkingMask;
810
}
811

    
812

    
813
bool MapWord::IsOverflowed() {
814
  return (value_ & kOverflowMask) != 0;
815
}
816

    
817

    
818
void MapWord::SetOverflow() {
819
  value_ |= kOverflowMask;
820
}
821

    
822

    
823
void MapWord::ClearOverflow() {
824
  value_ &= ~kOverflowMask;
825
}
826

    
827

    
828
MapWord MapWord::EncodeAddress(Address map_address, int offset) {
829
  // Offset is the distance in live bytes from the first live object in the
830
  // same page. The offset between two objects in the same page should not
831
  // exceed the object area size of a page.
832
  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
833

    
834
  int compact_offset = offset >> kObjectAlignmentBits;
835
  ASSERT(compact_offset < (1 << kForwardingOffsetBits));
836

    
837
  Page* map_page = Page::FromAddress(map_address);
838
  ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
839

    
840
  int map_page_offset =
841
      map_page->Offset(map_address) >> kObjectAlignmentBits;
842

    
843
  uintptr_t encoding =
844
      (compact_offset << kForwardingOffsetShift) |
845
      (map_page_offset << kMapPageOffsetShift) |
846
      (map_page->mc_page_index << kMapPageIndexShift);
847
  return MapWord(encoding);
848
}
849

    
850

    
851
Address MapWord::DecodeMapAddress(MapSpace* map_space) {
852
  int map_page_index = (value_ & kMapPageIndexMask) >> kMapPageIndexShift;
853
  ASSERT_MAP_PAGE_INDEX(map_page_index);
854

    
855
  int map_page_offset =
856
      ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift)
857
      << kObjectAlignmentBits;
858

    
859
  return (map_space->PageAddress(map_page_index) + map_page_offset);
860
}
861

    
862

    
863
int MapWord::DecodeOffset() {
864
  // The offset field is represented in the kForwardingOffsetBits
865
  // most-significant bits.
866
  int offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
867
  ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
868
  return offset;
869
}
870

    
871

    
872
MapWord MapWord::FromEncodedAddress(Address address) {
873
  return MapWord(reinterpret_cast<uintptr_t>(address));
874
}
875

    
876

    
877
Address MapWord::ToEncodedAddress() {
878
  return reinterpret_cast<Address>(value_);
879
}
880

    
881

    
882
#ifdef DEBUG
883
void HeapObject::VerifyObjectField(int offset) {
884
  VerifyPointer(READ_FIELD(this, offset));
885
}
886
#endif
887

    
888

    
889
Map* HeapObject::map() {
890
  return map_word().ToMap();
891
}
892

    
893

    
894
void HeapObject::set_map(Map* value) {
895
  set_map_word(MapWord::FromMap(value));
896
}
897

    
898

    
899
MapWord HeapObject::map_word() {
900
  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
901
}
902

    
903

    
904
void HeapObject::set_map_word(MapWord map_word) {
905
  // WRITE_FIELD does not update the remembered set, but there is no need
906
  // here.
907
  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
908
}
909

    
910

    
911
HeapObject* HeapObject::FromAddress(Address address) {
912
  ASSERT_TAG_ALIGNED(address);
913
  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
914
}
915

    
916

    
917
Address HeapObject::address() {
918
  return reinterpret_cast<Address>(this) - kHeapObjectTag;
919
}
920

    
921

    
922
int HeapObject::Size() {
923
  return SizeFromMap(map());
924
}
925

    
926

    
927
void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
928
  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
929
                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
930
}
931

    
932

    
933
void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
934
  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
935
}
936

    
937

    
938
bool HeapObject::IsMarked() {
939
  return map_word().IsMarked();
940
}
941

    
942

    
943
void HeapObject::SetMark() {
944
  ASSERT(!IsMarked());
945
  MapWord first_word = map_word();
946
  first_word.SetMark();
947
  set_map_word(first_word);
948
}
949

    
950

    
951
void HeapObject::ClearMark() {
952
  ASSERT(IsMarked());
953
  MapWord first_word = map_word();
954
  first_word.ClearMark();
955
  set_map_word(first_word);
956
}
957

    
958

    
959
bool HeapObject::IsOverflowed() {
960
  return map_word().IsOverflowed();
961
}
962

    
963

    
964
void HeapObject::SetOverflow() {
965
  MapWord first_word = map_word();
966
  first_word.SetOverflow();
967
  set_map_word(first_word);
968
}
969

    
970

    
971
void HeapObject::ClearOverflow() {
972
  ASSERT(IsOverflowed());
973
  MapWord first_word = map_word();
974
  first_word.ClearOverflow();
975
  set_map_word(first_word);
976
}
977

    
978

    
979
double HeapNumber::value() {
980
  return READ_DOUBLE_FIELD(this, kValueOffset);
981
}
982

    
983

    
984
void HeapNumber::set_value(double value) {
985
  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
986
}
987

    
988

    
989
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
990
ACCESSORS(JSObject, elements, FixedArray, kElementsOffset)
991

    
992

    
993
void JSObject::initialize_properties() {
994
  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
995
  WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array());
996
}
997

    
998

    
999
void JSObject::initialize_elements() {
1000
  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1001
  WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
1002
}
1003

    
1004

    
1005
ACCESSORS(Oddball, to_string, String, kToStringOffset)
1006
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1007

    
1008

    
1009
int JSObject::GetHeaderSize() {
1010
  switch (map()->instance_type()) {
1011
    case JS_GLOBAL_PROXY_TYPE:
1012
      return JSGlobalProxy::kSize;
1013
    case JS_GLOBAL_OBJECT_TYPE:
1014
      return JSGlobalObject::kSize;
1015
    case JS_BUILTINS_OBJECT_TYPE:
1016
      return JSBuiltinsObject::kSize;
1017
    case JS_FUNCTION_TYPE:
1018
      return JSFunction::kSize;
1019
    case JS_VALUE_TYPE:
1020
      return JSValue::kSize;
1021
    case JS_ARRAY_TYPE:
1022
      return JSValue::kSize;
1023
    case JS_REGEXP_TYPE:
1024
      return JSValue::kSize;
1025
    case JS_OBJECT_TYPE:
1026
    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1027
      return JSObject::kHeaderSize;
1028
    default:
1029
      UNREACHABLE();
1030
      return 0;
1031
  }
1032
}
1033

    
1034

    
1035
int JSObject::GetInternalFieldCount() {
1036
  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1037
  // Make sure to adjust for the number of in-object properties. These
1038
  // properties do contribute to the size, but are not internal fields.
1039
  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1040
         map()->inobject_properties();
1041
}
1042

    
1043

    
1044
Object* JSObject::GetInternalField(int index) {
1045
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1046
  // Internal objects do follow immediately after the header, whereas in-object
1047
  // properties are at the end of the object. Therefore there is no need
1048
  // to adjust the index here.
1049
  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1050
}
1051

    
1052

    
1053
void JSObject::SetInternalField(int index, Object* value) {
1054
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1055
  // Internal objects do follow immediately after the header, whereas in-object
1056
  // properties are at the end of the object. Therefore there is no need
1057
  // to adjust the index here.
1058
  int offset = GetHeaderSize() + (kPointerSize * index);
1059
  WRITE_FIELD(this, offset, value);
1060
  WRITE_BARRIER(this, offset);
1061
}
1062

    
1063

    
1064
// Access fast-case object properties at index. The use of these routines
1065
// is needed to correctly distinguish between properties stored in-object and
1066
// properties stored in the properties array.
1067
Object* JSObject::FastPropertyAt(int index) {
1068
  // Adjust for the number of properties stored in the object.
1069
  index -= map()->inobject_properties();
1070
  if (index < 0) {
1071
    int offset = map()->instance_size() + (index * kPointerSize);
1072
    return READ_FIELD(this, offset);
1073
  } else {
1074
    ASSERT(index < properties()->length());
1075
    return properties()->get(index);
1076
  }
1077
}
1078

    
1079

    
1080
Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1081
  // Adjust for the number of properties stored in the object.
1082
  index -= map()->inobject_properties();
1083
  if (index < 0) {
1084
    int offset = map()->instance_size() + (index * kPointerSize);
1085
    WRITE_FIELD(this, offset, value);
1086
    WRITE_BARRIER(this, offset);
1087
  } else {
1088
    ASSERT(index < properties()->length());
1089
    properties()->set(index, value);
1090
  }
1091
  return value;
1092
}
1093

    
1094

    
1095
Object* JSObject::InObjectPropertyAt(int index) {
1096
  // Adjust for the number of properties stored in the object.
1097
  index -= map()->inobject_properties();
1098
  ASSERT(index < 0);
1099
  int offset = map()->instance_size() + (index * kPointerSize);
1100
  return READ_FIELD(this, offset);
1101
}
1102

    
1103

    
1104
Object* JSObject::InObjectPropertyAtPut(int index,
1105
                                        Object* value,
1106
                                        WriteBarrierMode mode) {
1107
  // Adjust for the number of properties stored in the object.
1108
  index -= map()->inobject_properties();
1109
  ASSERT(index < 0);
1110
  int offset = map()->instance_size() + (index * kPointerSize);
1111
  WRITE_FIELD(this, offset, value);
1112
  CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1113
  return value;
1114
}
1115

    
1116

    
1117

    
1118
void JSObject::InitializeBody(int object_size) {
1119
  Object* value = Heap::undefined_value();
1120
  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1121
    WRITE_FIELD(this, offset, value);
1122
  }
1123
}
1124

    
1125

    
1126
void Struct::InitializeBody(int object_size) {
1127
  Object* value = Heap::undefined_value();
1128
  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1129
    WRITE_FIELD(this, offset, value);
1130
  }
1131
}
1132

    
1133

    
1134
bool JSObject::HasFastProperties() {
1135
  return !properties()->IsDictionary();
1136
}
1137

    
1138

    
1139
bool Array::IndexFromObject(Object* object, uint32_t* index) {
1140
  if (object->IsSmi()) {
1141
    int value = Smi::cast(object)->value();
1142
    if (value < 0) return false;
1143
    *index = value;
1144
    return true;
1145
  }
1146
  if (object->IsHeapNumber()) {
1147
    double value = HeapNumber::cast(object)->value();
1148
    uint32_t uint_value = static_cast<uint32_t>(value);
1149
    if (value == static_cast<double>(uint_value)) {
1150
      *index = uint_value;
1151
      return true;
1152
    }
1153
  }
1154
  return false;
1155
}
1156

    
1157

    
1158
bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1159
  if (!this->IsJSValue()) return false;
1160

    
1161
  JSValue* js_value = JSValue::cast(this);
1162
  if (!js_value->value()->IsString()) return false;
1163

    
1164
  String* str = String::cast(js_value->value());
1165
  if (index >= (uint32_t)str->length()) return false;
1166

    
1167
  return true;
1168
}
1169

    
1170

    
1171
Object* FixedArray::get(int index) {
1172
  ASSERT(index >= 0 && index < this->length());
1173
  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1174
}
1175

    
1176

    
1177
void FixedArray::set(int index, Smi* value) {
1178
  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1179
  int offset = kHeaderSize + index * kPointerSize;
1180
  WRITE_FIELD(this, offset, value);
1181
}
1182

    
1183

    
1184
void FixedArray::set(int index, Object* value) {
1185
  ASSERT(index >= 0 && index < this->length());
1186
  int offset = kHeaderSize + index * kPointerSize;
1187
  WRITE_FIELD(this, offset, value);
1188
  WRITE_BARRIER(this, offset);
1189
}
1190

    
1191

    
1192
WriteBarrierMode HeapObject::GetWriteBarrierMode() {
1193
  if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
1194
  return UPDATE_WRITE_BARRIER;
1195
}
1196

    
1197

    
1198
void FixedArray::set(int index,
1199
                     Object* value,
1200
                     WriteBarrierMode mode) {
1201
  ASSERT(index >= 0 && index < this->length());
1202
  int offset = kHeaderSize + index * kPointerSize;
1203
  WRITE_FIELD(this, offset, value);
1204
  CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1205
}
1206

    
1207

    
1208
void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1209
  ASSERT(index >= 0 && index < array->length());
1210
  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1211
}
1212

    
1213

    
1214
void FixedArray::set_undefined(int index) {
1215
  ASSERT(index >= 0 && index < this->length());
1216
  ASSERT(!Heap::InNewSpace(Heap::undefined_value()));
1217
  WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1218
              Heap::undefined_value());
1219
}
1220

    
1221

    
1222
void FixedArray::set_null(int index) {
1223
  ASSERT(index >= 0 && index < this->length());
1224
  ASSERT(!Heap::InNewSpace(Heap::null_value()));
1225
  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
1226
}
1227

    
1228

    
1229
void FixedArray::set_the_hole(int index) {
1230
  ASSERT(index >= 0 && index < this->length());
1231
  ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
1232
  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value());
1233
}
1234

    
1235

    
1236
bool DescriptorArray::IsEmpty() {
1237
  ASSERT(this == Heap::empty_descriptor_array() ||
1238
         this->length() > 2);
1239
  return this == Heap::empty_descriptor_array();
1240
}
1241

    
1242

    
1243
void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1244
  Object* tmp = array->get(first);
1245
  fast_set(array, first, array->get(second));
1246
  fast_set(array, second, tmp);
1247
}
1248

    
1249

    
1250
int DescriptorArray::Search(String* name) {
1251
  SLOW_ASSERT(IsSortedNoDuplicates());
1252

    
1253
  // Check for empty descriptor array.
1254
  int nof = number_of_descriptors();
1255
  if (nof == 0) return kNotFound;
1256

    
1257
  // Fast case: do linear search for small arrays.
1258
  const int kMaxElementsForLinearSearch = 8;
1259
  if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1260
    return LinearSearch(name, nof);
1261
  }
1262

    
1263
  // Slow case: perform binary search.
1264
  return BinarySearch(name, 0, nof - 1);
1265
}
1266

    
1267

    
1268

    
1269
String* DescriptorArray::GetKey(int descriptor_number) {
1270
  ASSERT(descriptor_number < number_of_descriptors());
1271
  return String::cast(get(ToKeyIndex(descriptor_number)));
1272
}
1273

    
1274

    
1275
Object* DescriptorArray::GetValue(int descriptor_number) {
1276
  ASSERT(descriptor_number < number_of_descriptors());
1277
  return GetContentArray()->get(ToValueIndex(descriptor_number));
1278
}
1279

    
1280

    
1281
Smi* DescriptorArray::GetDetails(int descriptor_number) {
1282
  ASSERT(descriptor_number < number_of_descriptors());
1283
  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1284
}
1285

    
1286

    
1287
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1288
  desc->Init(GetKey(descriptor_number),
1289
             GetValue(descriptor_number),
1290
             GetDetails(descriptor_number));
1291
}
1292

    
1293

    
1294
void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1295
  // Range check.
1296
  ASSERT(descriptor_number < number_of_descriptors());
1297

    
1298
  // Make sure non of the elements in desc are in new space.
1299
  ASSERT(!Heap::InNewSpace(desc->GetKey()));
1300
  ASSERT(!Heap::InNewSpace(desc->GetValue()));
1301

    
1302
  fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1303
  FixedArray* content_array = GetContentArray();
1304
  fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1305
  fast_set(content_array, ToDetailsIndex(descriptor_number),
1306
           desc->GetDetails().AsSmi());
1307
}
1308

    
1309

    
1310
void DescriptorArray::Swap(int first, int second) {
1311
  fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1312
  FixedArray* content_array = GetContentArray();
1313
  fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1314
  fast_swap(content_array, ToDetailsIndex(first),  ToDetailsIndex(second));
1315
}
1316

    
1317

    
1318
bool Dictionary::requires_slow_elements() {
1319
  Object* max_index_object = get(kMaxNumberKeyIndex);
1320
  if (!max_index_object->IsSmi()) return false;
1321
  return 0 !=
1322
      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1323
}
1324

    
1325

    
1326
uint32_t Dictionary::max_number_key() {
1327
  ASSERT(!requires_slow_elements());
1328
  Object* max_index_object = get(kMaxNumberKeyIndex);
1329
  if (!max_index_object->IsSmi()) return 0;
1330
  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1331
  return value >> kRequiresSlowElementsTagSize;
1332
}
1333

    
1334

    
1335
void Dictionary::set_requires_slow_elements() {
1336
  set(kMaxNumberKeyIndex,
1337
      Smi::FromInt(kRequiresSlowElementsMask),
1338
      SKIP_WRITE_BARRIER);
1339
}
1340

    
1341

    
1342
// ------------------------------------
1343
// Cast operations
1344

    
1345

    
1346
CAST_ACCESSOR(FixedArray)
1347
CAST_ACCESSOR(DescriptorArray)
1348
CAST_ACCESSOR(Dictionary)
1349
CAST_ACCESSOR(SymbolTable)
1350
CAST_ACCESSOR(CompilationCacheTable)
1351
CAST_ACCESSOR(MapCache)
1352
CAST_ACCESSOR(LookupCache)
1353
CAST_ACCESSOR(String)
1354
CAST_ACCESSOR(SeqString)
1355
CAST_ACCESSOR(SeqAsciiString)
1356
CAST_ACCESSOR(SeqTwoByteString)
1357
CAST_ACCESSOR(ConsString)
1358
CAST_ACCESSOR(SlicedString)
1359
CAST_ACCESSOR(ExternalString)
1360
CAST_ACCESSOR(ExternalAsciiString)
1361
CAST_ACCESSOR(ExternalTwoByteString)
1362
CAST_ACCESSOR(JSObject)
1363
CAST_ACCESSOR(Smi)
1364
CAST_ACCESSOR(Failure)
1365
CAST_ACCESSOR(HeapObject)
1366
CAST_ACCESSOR(HeapNumber)
1367
CAST_ACCESSOR(Oddball)
1368
CAST_ACCESSOR(SharedFunctionInfo)
1369
CAST_ACCESSOR(Map)
1370
CAST_ACCESSOR(JSFunction)
1371
CAST_ACCESSOR(GlobalObject)
1372
CAST_ACCESSOR(JSGlobalProxy)
1373
CAST_ACCESSOR(JSGlobalObject)
1374
CAST_ACCESSOR(JSBuiltinsObject)
1375
CAST_ACCESSOR(Code)
1376
CAST_ACCESSOR(JSArray)
1377
CAST_ACCESSOR(JSRegExp)
1378
CAST_ACCESSOR(Proxy)
1379
CAST_ACCESSOR(ByteArray)
1380
CAST_ACCESSOR(Struct)
1381

    
1382

    
1383
#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
1384
  STRUCT_LIST(MAKE_STRUCT_CAST)
1385
#undef MAKE_STRUCT_CAST
1386

    
1387
template <int prefix_size, int elem_size>
1388
HashTable<prefix_size, elem_size>* HashTable<prefix_size, elem_size>::cast(
1389
    Object* obj) {
1390
  ASSERT(obj->IsHashTable());
1391
  return reinterpret_cast<HashTable*>(obj);
1392
}
1393

    
1394

    
1395
INT_ACCESSORS(Array, length, kLengthOffset)
1396

    
1397

    
1398
bool String::Equals(String* other) {
1399
  if (other == this) return true;
1400
  if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
1401
    return false;
1402
  }
1403
  return SlowEquals(other);
1404
}
1405

    
1406

    
1407
int String::length() {
1408
  uint32_t len = READ_INT_FIELD(this, kLengthOffset);
1409

    
1410
  ASSERT(kShortStringTag + kLongLengthShift == kShortLengthShift);
1411
  ASSERT(kMediumStringTag + kLongLengthShift == kMediumLengthShift);
1412
  ASSERT(kLongStringTag == 0);
1413

    
1414
  return len >> (StringShape(this).size_tag() + kLongLengthShift);
1415
}
1416

    
1417

    
1418
void String::set_length(int value) {
1419
  ASSERT(kShortStringTag + kLongLengthShift == kShortLengthShift);
1420
  ASSERT(kMediumStringTag + kLongLengthShift == kMediumLengthShift);
1421
  ASSERT(kLongStringTag == 0);
1422

    
1423
  WRITE_INT_FIELD(this,
1424
                  kLengthOffset,
1425
                  value << (StringShape(this).size_tag() + kLongLengthShift));
1426
}
1427

    
1428

    
1429
uint32_t String::length_field() {
1430
  return READ_UINT32_FIELD(this, kLengthOffset);
1431
}
1432

    
1433

    
1434
void String::set_length_field(uint32_t value) {
1435
  WRITE_UINT32_FIELD(this, kLengthOffset, value);
1436
}
1437

    
1438

    
1439
Object* String::TryFlattenIfNotFlat() {
1440
  // We don't need to flatten strings that are already flat.  Since this code
1441
  // is inlined, it can be helpful in the flat case to not call out to Flatten.
1442
  if (!IsFlat()) {
1443
    return TryFlatten();
1444
  }
1445
  return this;
1446
}
1447

    
1448

    
1449
uint16_t String::Get(int index) {
1450
  ASSERT(index >= 0 && index < length());
1451
  switch (StringShape(this).full_representation_tag()) {
1452
    case kSeqStringTag | kAsciiStringTag:
1453
      return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
1454
    case kSeqStringTag | kTwoByteStringTag:
1455
      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
1456
    case kConsStringTag | kAsciiStringTag:
1457
    case kConsStringTag | kTwoByteStringTag:
1458
      return ConsString::cast(this)->ConsStringGet(index);
1459
    case kSlicedStringTag | kAsciiStringTag:
1460
    case kSlicedStringTag | kTwoByteStringTag:
1461
      return SlicedString::cast(this)->SlicedStringGet(index);
1462
    case kExternalStringTag | kAsciiStringTag:
1463
      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
1464
    case kExternalStringTag | kTwoByteStringTag:
1465
      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
1466
    default:
1467
      break;
1468
  }
1469

    
1470
  UNREACHABLE();
1471
  return 0;
1472
}
1473

    
1474

    
1475
void String::Set(int index, uint16_t value) {
1476
  ASSERT(index >= 0 && index < length());
1477
  ASSERT(StringShape(this).IsSequential());
1478

    
1479
  return StringShape(this).IsAsciiRepresentation()
1480
      ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
1481
      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
1482
}
1483

    
1484

    
1485
bool String::IsFlat() {
1486
  switch (StringShape(this).representation_tag()) {
1487
    case kConsStringTag: {
1488
      String* second = ConsString::cast(this)->second();
1489
      // Only flattened strings have second part empty.
1490
      return second->length() == 0;
1491
    }
1492
    case kSlicedStringTag: {
1493
      StringRepresentationTag tag =
1494
          StringShape(SlicedString::cast(this)->buffer()).representation_tag();
1495
      return tag == kSeqStringTag || tag == kExternalStringTag;
1496
    }
1497
    default:
1498
      return true;
1499
  }
1500
}
1501

    
1502

    
1503
uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
1504
  ASSERT(index >= 0 && index < length());
1505
  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
1506
}
1507

    
1508

    
1509
void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
1510
  ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
1511
  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
1512
                   static_cast<byte>(value));
1513
}
1514

    
1515

    
1516
Address SeqAsciiString::GetCharsAddress() {
1517
  return FIELD_ADDR(this, kHeaderSize);
1518
}
1519

    
1520

    
1521
char* SeqAsciiString::GetChars() {
1522
  return reinterpret_cast<char*>(GetCharsAddress());
1523
}
1524

    
1525

    
1526
Address SeqTwoByteString::GetCharsAddress() {
1527
  return FIELD_ADDR(this, kHeaderSize);
1528
}
1529

    
1530

    
1531
uc16* SeqTwoByteString::GetChars() {
1532
  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
1533
}
1534

    
1535

    
1536
uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
1537
  ASSERT(index >= 0 && index < length());
1538
  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
1539
}
1540

    
1541

    
1542
void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
1543
  ASSERT(index >= 0 && index < length());
1544
  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
1545
}
1546

    
1547

    
1548
int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
1549
  uint32_t length = READ_INT_FIELD(this, kLengthOffset);
1550

    
1551
  ASSERT(kShortStringTag + kLongLengthShift == kShortLengthShift);
1552
  ASSERT(kMediumStringTag + kLongLengthShift == kMediumLengthShift);
1553
  ASSERT(kLongStringTag == 0);
1554

    
1555
  // Use the map (and not 'this') to compute the size tag, since
1556
  // TwoByteStringSize is called during GC when maps are encoded.
1557
  length >>= StringShape(instance_type).size_tag() + kLongLengthShift;
1558

    
1559
  return SizeFor(length);
1560
}
1561

    
1562

    
1563
int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
1564
  uint32_t length = READ_INT_FIELD(this, kLengthOffset);
1565

    
1566
  ASSERT(kShortStringTag + kLongLengthShift == kShortLengthShift);
1567
  ASSERT(kMediumStringTag + kLongLengthShift == kMediumLengthShift);
1568
  ASSERT(kLongStringTag == 0);
1569

    
1570
  // Use the map (and not 'this') to compute the size tag, since
1571
  // AsciiStringSize is called during GC when maps are encoded.
1572
  length >>= StringShape(instance_type).size_tag() + kLongLengthShift;
1573

    
1574
  return SizeFor(length);
1575
}
1576

    
1577

    
1578
String* ConsString::first() {
1579
  ASSERT(String::cast(READ_FIELD(this, kSecondOffset))->length() != 0 ||
1580
      StringShape(
1581
          String::cast(
1582
              READ_FIELD(this, kFirstOffset))).IsAsciiRepresentation()
1583
          == StringShape(this).IsAsciiRepresentation());
1584
  return String::cast(READ_FIELD(this, kFirstOffset));
1585
}
1586

    
1587

    
1588
Object* ConsString::unchecked_first() {
1589
  return READ_FIELD(this, kFirstOffset);
1590
}
1591

    
1592

    
1593
void ConsString::set_first(String* value, WriteBarrierMode mode) {
1594
  WRITE_FIELD(this, kFirstOffset, value);
1595
  CONDITIONAL_WRITE_BARRIER(this, kFirstOffset, mode);
1596
}
1597

    
1598

    
1599
String* ConsString::second() {
1600
  return String::cast(READ_FIELD(this, kSecondOffset));
1601
}
1602

    
1603

    
1604
Object* ConsString::unchecked_second() {
1605
  return READ_FIELD(this, kSecondOffset);
1606
}
1607

    
1608

    
1609
void ConsString::set_second(String* value, WriteBarrierMode mode) {
1610
  WRITE_FIELD(this, kSecondOffset, value);
1611
  CONDITIONAL_WRITE_BARRIER(this, kSecondOffset, mode);
1612
}
1613

    
1614

    
1615
String* SlicedString::buffer() {
1616
  ASSERT(
1617
      StringShape(
1618
          String::cast(READ_FIELD(this, kBufferOffset))).IsAsciiRepresentation()
1619
      == StringShape(this).IsAsciiRepresentation());
1620
  return String::cast(READ_FIELD(this, kBufferOffset));
1621
}
1622

    
1623

    
1624
void SlicedString::set_buffer(String* buffer) {
1625
  WRITE_FIELD(this, kBufferOffset, buffer);
1626
  WRITE_BARRIER(this, kBufferOffset);
1627
}
1628

    
1629

    
1630
int SlicedString::start() {
1631
  return READ_INT_FIELD(this, kStartOffset);
1632
}
1633

    
1634

    
1635
void SlicedString::set_start(int start) {
1636
  WRITE_INT_FIELD(this, kStartOffset, start);
1637
}
1638

    
1639

    
1640
ExternalAsciiString::Resource* ExternalAsciiString::resource() {
1641
  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
1642
}
1643

    
1644

    
1645
void ExternalAsciiString::set_resource(
1646
    ExternalAsciiString::Resource* resource) {
1647
  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
1648
}
1649

    
1650

    
1651
Map* ExternalAsciiString::StringMap(int length) {
1652
  Map* map;
1653
  // Number of characters: determines the map.
1654
  if (length <= String::kMaxShortStringSize) {
1655
    map = Heap::short_external_ascii_string_map();
1656
  } else if (length <= String::kMaxMediumStringSize) {
1657
    map = Heap::medium_external_ascii_string_map();
1658
  } else {
1659
    map = Heap::long_external_ascii_string_map();
1660
  }
1661
  return map;
1662
}
1663

    
1664

    
1665
Map* ExternalAsciiString::SymbolMap(int length) {
1666
  Map* map;
1667
  // Number of characters: determines the map.
1668
  if (length <= String::kMaxShortStringSize) {
1669
    map = Heap::short_external_ascii_symbol_map();
1670
  } else if (length <= String::kMaxMediumStringSize) {
1671
    map = Heap::medium_external_ascii_symbol_map();
1672
  } else {
1673
    map = Heap::long_external_ascii_symbol_map();
1674
  }
1675
  return map;
1676
}
1677

    
1678

    
1679
ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
1680
  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
1681
}
1682

    
1683

    
1684
void ExternalTwoByteString::set_resource(
1685
    ExternalTwoByteString::Resource* resource) {
1686
  *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
1687
}
1688

    
1689

    
1690
Map* ExternalTwoByteString::StringMap(int length) {
1691
  Map* map;
1692
  // Number of characters: determines the map.
1693
  if (length <= String::kMaxShortStringSize) {
1694
    map = Heap::short_external_string_map();
1695
  } else if (length <= String::kMaxMediumStringSize) {
1696
    map = Heap::medium_external_string_map();
1697
  } else {
1698
    map = Heap::long_external_string_map();
1699
  }
1700
  return map;
1701
}
1702

    
1703

    
1704
Map* ExternalTwoByteString::SymbolMap(int length) {
1705
  Map* map;
1706
  // Number of characters: determines the map.
1707
  if (length <= String::kMaxShortStringSize) {
1708
    map = Heap::short_external_symbol_map();
1709
  } else if (length <= String::kMaxMediumStringSize) {
1710
    map = Heap::medium_external_symbol_map();
1711
  } else {
1712
    map = Heap::long_external_symbol_map();
1713
  }
1714
  return map;
1715
}
1716

    
1717

    
1718
byte ByteArray::get(int index) {
1719
  ASSERT(index >= 0 && index < this->length());
1720
  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
1721
}
1722

    
1723

    
1724
void ByteArray::set(int index, byte value) {
1725
  ASSERT(index >= 0 && index < this->length());
1726
  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
1727
}
1728

    
1729

    
1730
int ByteArray::get_int(int index) {
1731
  ASSERT(index >= 0 && (index * kIntSize) < this->length());
1732
  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
1733
}
1734

    
1735

    
1736
ByteArray* ByteArray::FromDataStartAddress(Address address) {
1737
  ASSERT_TAG_ALIGNED(address);
1738
  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
1739
}
1740

    
1741

    
1742
Address ByteArray::GetDataStartAddress() {
1743
  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
1744
}
1745

    
1746

    
1747
int Map::instance_size() {
1748
  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
1749
}
1750

    
1751

    
1752
int Map::inobject_properties() {
1753
  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
1754
}
1755

    
1756

    
1757
int HeapObject::SizeFromMap(Map* map) {
1758
  InstanceType instance_type = map->instance_type();
1759
  // Only inline the two most frequent cases.
1760
  if (instance_type == JS_OBJECT_TYPE) return  map->instance_size();
1761
  if (instance_type == FIXED_ARRAY_TYPE) {
1762
    return reinterpret_cast<FixedArray*>(this)->FixedArraySize();
1763
  }
1764
  // Otherwise do the general size computation.
1765
  return SlowSizeFromMap(map);
1766
}
1767

    
1768

    
1769
void Map::set_instance_size(int value) {
1770
  ASSERT((value & ~(kPointerSize - 1)) == value);
1771
  value >>= kPointerSizeLog2;
1772
  ASSERT(0 <= value && value < 256);
1773
  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
1774
}
1775

    
1776

    
1777
void Map::set_inobject_properties(int value) {
1778
  ASSERT(0 <= value && value < 256);
1779
  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
1780
}
1781

    
1782

    
1783
InstanceType Map::instance_type() {
1784
  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
1785
}
1786

    
1787

    
1788
void Map::set_instance_type(InstanceType value) {
1789
  ASSERT(0 <= value && value < 256);
1790
  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
1791
}
1792

    
1793

    
1794
int Map::unused_property_fields() {
1795
  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
1796
}
1797

    
1798

    
1799
void Map::set_unused_property_fields(int value) {
1800
  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
1801
}
1802

    
1803

    
1804
byte Map::bit_field() {
1805
  return READ_BYTE_FIELD(this, kBitFieldOffset);
1806
}
1807

    
1808

    
1809
void Map::set_bit_field(byte value) {
1810
  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
1811
}
1812

    
1813

    
1814
void Map::set_non_instance_prototype(bool value) {
1815
  if (value) {
1816
    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
1817
  } else {
1818
    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
1819
  }
1820
}
1821

    
1822

    
1823
bool Map::has_non_instance_prototype() {
1824
  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
1825
}
1826

    
1827

    
1828
void Map::set_is_access_check_needed(bool access_check_needed) {
1829
  if (access_check_needed) {
1830
    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
1831
  } else {
1832
    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
1833
  }
1834
}
1835

    
1836

    
1837
bool Map::is_access_check_needed() {
1838
  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
1839
}
1840

    
1841

    
1842
Code::Flags Code::flags() {
1843
  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
1844
}
1845

    
1846

    
1847
void Code::set_flags(Code::Flags flags) {
1848
  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
1849
  // Make sure that all call stubs have an arguments count.
1850
  ASSERT(ExtractKindFromFlags(flags) != CALL_IC ||
1851
         ExtractArgumentsCountFromFlags(flags) >= 0);
1852
  WRITE_INT_FIELD(this, kFlagsOffset, flags);
1853
}
1854

    
1855

    
1856
Code::Kind Code::kind() {
1857
  return ExtractKindFromFlags(flags());
1858
}
1859

    
1860

    
1861
InlineCacheState Code::ic_state() {
1862
  InlineCacheState result = ExtractICStateFromFlags(flags());
1863
  // Only allow uninitialized or debugger states for non-IC code
1864
  // objects. This is used in the debugger to determine whether or not
1865
  // a call to code object has been replaced with a debug break call.
1866
  ASSERT(is_inline_cache_stub() ||
1867
         result == UNINITIALIZED ||
1868
         result == DEBUG_BREAK ||
1869
         result == DEBUG_PREPARE_STEP_IN);
1870
  return result;
1871
}
1872

    
1873

    
1874
PropertyType Code::type() {
1875
  ASSERT(ic_state() == MONOMORPHIC);
1876
  return ExtractTypeFromFlags(flags());
1877
}
1878

    
1879

    
1880
int Code::arguments_count() {
1881
  ASSERT(is_call_stub() || kind() == STUB);
1882
  return ExtractArgumentsCountFromFlags(flags());
1883
}
1884

    
1885

    
1886
CodeStub::Major Code::major_key() {
1887
  ASSERT(kind() == STUB);
1888
  return static_cast<CodeStub::Major>(READ_BYTE_FIELD(this,
1889
                                                      kStubMajorKeyOffset));
1890
}
1891

    
1892

    
1893
void Code::set_major_key(CodeStub::Major major) {
1894
  ASSERT(kind() == STUB);
1895
  ASSERT(0 <= major && major < 256);
1896
  WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
1897
}
1898

    
1899

    
1900
bool Code::is_inline_cache_stub() {
1901
  Kind kind = this->kind();
1902
  return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
1903
}
1904

    
1905

    
1906
Code::Flags Code::ComputeFlags(Kind kind,
1907
                               InlineCacheState ic_state,
1908
                               PropertyType type,
1909
                               int argc) {
1910
  // Compute the bit mask.
1911
  int bits = kind << kFlagsKindShift;
1912
  bits |= ic_state << kFlagsICStateShift;
1913
  bits |= type << kFlagsTypeShift;
1914
  bits |= argc << kFlagsArgumentsCountShift;
1915
  // Cast to flags and validate result before returning it.
1916
  Flags result = static_cast<Flags>(bits);
1917
  ASSERT(ExtractKindFromFlags(result) == kind);
1918
  ASSERT(ExtractICStateFromFlags(result) == ic_state);
1919
  ASSERT(ExtractTypeFromFlags(result) == type);
1920
  ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
1921
  return result;
1922
}
1923

    
1924

    
1925
Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
1926
                                          PropertyType type,
1927
                                          int argc) {
1928
  return ComputeFlags(kind, MONOMORPHIC, type, argc);
1929
}
1930

    
1931

    
1932
Code::Kind Code::ExtractKindFromFlags(Flags flags) {
1933
  int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
1934
  return static_cast<Kind>(bits);
1935
}
1936

    
1937

    
1938
InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
1939
  int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
1940
  return static_cast<InlineCacheState>(bits);
1941
}
1942

    
1943

    
1944
PropertyType Code::ExtractTypeFromFlags(Flags flags) {
1945
  int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
1946
  return static_cast<PropertyType>(bits);
1947
}
1948

    
1949

    
1950
int Code::ExtractArgumentsCountFromFlags(Flags flags) {
1951
  return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
1952
}
1953

    
1954

    
1955
Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
1956
  int bits = flags & ~kFlagsTypeMask;
1957
  return static_cast<Flags>(bits);
1958
}
1959

    
1960

    
1961
Code* Code::GetCodeFromTargetAddress(Address address) {
1962
  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
1963
  // GetCodeFromTargetAddress might be called when marking objects during mark
1964
  // sweep. reinterpret_cast is therefore used instead of the more appropriate
1965
  // Code::cast. Code::cast does not work when the object's map is
1966
  // marked.
1967
  Code* result = reinterpret_cast<Code*>(code);
1968
  return result;
1969
}
1970

    
1971

    
1972
Object* Map::prototype() {
1973
  return READ_FIELD(this, kPrototypeOffset);
1974
}
1975

    
1976

    
1977
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
1978
  ASSERT(value->IsNull() || value->IsJSObject());
1979
  WRITE_FIELD(this, kPrototypeOffset, value);
1980
  CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode);
1981
}
1982

    
1983

    
1984
ACCESSORS(Map, instance_descriptors, DescriptorArray,
1985
          kInstanceDescriptorsOffset)
1986
ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
1987
ACCESSORS(Map, constructor, Object, kConstructorOffset)
1988

    
1989
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
1990
ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
1991

    
1992
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
1993
ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
1994
ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
1995

    
1996
ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
1997

    
1998
ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
1999
ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2000
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
2001
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
2002
ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
2003

    
2004
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
2005
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
2006
ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
2007

    
2008
ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
2009
ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
2010
ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
2011
ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
2012
ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
2013
ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
2014

    
2015
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
2016
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
2017

    
2018
ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
2019
ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
2020

    
2021
ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
2022
ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
2023
ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
2024
          kPropertyAccessorsOffset)
2025
ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
2026
          kPrototypeTemplateOffset)
2027
ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
2028
ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
2029
          kNamedPropertyHandlerOffset)
2030
ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
2031
          kIndexedPropertyHandlerOffset)
2032
ACCESSORS(FunctionTemplateInfo, instance_template, Object,
2033
          kInstanceTemplateOffset)
2034
ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
2035
ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
2036
ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
2037
          kInstanceCallHandlerOffset)
2038
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
2039
          kAccessCheckInfoOffset)
2040
ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
2041

    
2042
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
2043
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
2044
          kInternalFieldCountOffset)
2045

    
2046
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
2047
ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
2048

    
2049
ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
2050

    
2051
ACCESSORS(Script, source, Object, kSourceOffset)
2052
ACCESSORS(Script, name, Object, kNameOffset)
2053
ACCESSORS(Script, id, Object, kIdOffset)
2054
ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
2055
ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
2056
ACCESSORS(Script, wrapper, Proxy, kWrapperOffset)
2057
ACCESSORS(Script, type, Smi, kTypeOffset)
2058
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
2059

    
2060
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
2061
ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
2062
ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
2063
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
2064

    
2065
ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
2066
ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
2067
ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
2068
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
2069

    
2070
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
2071
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
2072
          kInstanceClassNameOffset)
2073
ACCESSORS(SharedFunctionInfo, function_data, Object,
2074
          kExternalReferenceDataOffset)
2075
ACCESSORS(SharedFunctionInfo, lazy_load_data, Object, kLazyLoadDataOffset)
2076
ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
2077
ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
2078
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
2079

    
2080
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
2081
               kHiddenPrototypeBit)
2082
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
2083
BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
2084
               kNeedsAccessCheckBit)
2085
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
2086
               kIsExpressionBit)
2087
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
2088
               kIsTopLevelBit)
2089

    
2090
INT_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
2091
INT_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
2092
              kFormalParameterCountOffset)
2093
INT_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
2094
              kExpectedNofPropertiesOffset)
2095
INT_ACCESSORS(SharedFunctionInfo, start_position_and_type,
2096
              kStartPositionAndTypeOffset)
2097
INT_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
2098
INT_ACCESSORS(SharedFunctionInfo, function_token_position,
2099
              kFunctionTokenPositionOffset)
2100

    
2101

    
2102
void SharedFunctionInfo::DontAdaptArguments() {
2103
  ASSERT(code()->kind() == Code::BUILTIN);
2104
  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
2105
}
2106

    
2107

    
2108
int SharedFunctionInfo::start_position() {
2109
  return start_position_and_type() >> kStartPositionShift;
2110
}
2111

    
2112

    
2113
void SharedFunctionInfo::set_start_position(int start_position) {
2114
  set_start_position_and_type((start_position << kStartPositionShift)
2115
    | (start_position_and_type() & ~kStartPositionMask));
2116
}
2117

    
2118

    
2119
Code* SharedFunctionInfo::code() {
2120
  return Code::cast(READ_FIELD(this, kCodeOffset));
2121
}
2122

    
2123

    
2124
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
2125
  WRITE_FIELD(this, kCodeOffset, value);
2126
  CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode);
2127
}
2128

    
2129

    
2130
bool SharedFunctionInfo::is_compiled() {
2131
  // TODO(1242782): Create a code kind for uncompiled code.
2132
  return code()->kind() != Code::STUB;
2133
}
2134

    
2135

    
2136
bool JSFunction::IsBoilerplate() {
2137
  return map() == Heap::boilerplate_function_map();
2138
}
2139

    
2140

    
2141
bool JSFunction::IsLoaded() {
2142
  return shared()->lazy_load_data() == Heap::undefined_value();
2143
}
2144

    
2145

    
2146
Code* JSFunction::code() {
2147
  return shared()->code();
2148
}
2149

    
2150

    
2151
void JSFunction::set_code(Code* value) {
2152
  shared()->set_code(value);
2153
}
2154

    
2155

    
2156
Context* JSFunction::context() {
2157
  return Context::cast(READ_FIELD(this, kContextOffset));
2158
}
2159

    
2160

    
2161
Object* JSFunction::unchecked_context() {
2162
  return READ_FIELD(this, kContextOffset);
2163
}
2164

    
2165

    
2166
void JSFunction::set_context(Object* value) {
2167
  ASSERT(value == Heap::undefined_value() || value->IsContext());
2168
  WRITE_FIELD(this, kContextOffset, value);
2169
  WRITE_BARRIER(this, kContextOffset);
2170
}
2171

    
2172
ACCESSORS(JSFunction, prototype_or_initial_map, Object,
2173
          kPrototypeOrInitialMapOffset)
2174

    
2175

    
2176
Map* JSFunction::initial_map() {
2177
  return Map::cast(prototype_or_initial_map());
2178
}
2179

    
2180

    
2181
void JSFunction::set_initial_map(Map* value) {
2182
  set_prototype_or_initial_map(value);
2183
}
2184

    
2185

    
2186
bool JSFunction::has_initial_map() {
2187
  return prototype_or_initial_map()->IsMap();
2188
}
2189

    
2190

    
2191
bool JSFunction::has_instance_prototype() {
2192
  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
2193
}
2194

    
2195

    
2196
bool JSFunction::has_prototype() {
2197
  return map()->has_non_instance_prototype() || has_instance_prototype();
2198
}
2199

    
2200

    
2201
Object* JSFunction::instance_prototype() {
2202
  ASSERT(has_instance_prototype());
2203
  if (has_initial_map()) return initial_map()->prototype();
2204
  // When there is no initial map and the prototype is a JSObject, the
2205
  // initial map field is used for the prototype field.
2206
  return prototype_or_initial_map();
2207
}
2208

    
2209

    
2210
Object* JSFunction::prototype() {
2211
  ASSERT(has_prototype());
2212
  // If the function's prototype property has been set to a non-JSObject
2213
  // value, that value is stored in the constructor field of the map.
2214
  if (map()->has_non_instance_prototype()) return map()->constructor();
2215
  return instance_prototype();
2216
}
2217

    
2218

    
2219
bool JSFunction::is_compiled() {
2220
  return shared()->is_compiled();
2221
}
2222

    
2223

    
2224
int JSFunction::NumberOfLiterals() {
2225
  return literals()->length();
2226
}
2227

    
2228

    
2229
Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
2230
  ASSERT(0 <= id && id < kJSBuiltinsCount);
2231
  return READ_FIELD(this, kJSBuiltinsOffset + (id * kPointerSize));
2232
}
2233

    
2234

    
2235
void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
2236
                                              Object* value) {
2237
  ASSERT(0 <= id && id < kJSBuiltinsCount);
2238
  WRITE_FIELD(this, kJSBuiltinsOffset + (id * kPointerSize), value);
2239
  WRITE_BARRIER(this, kJSBuiltinsOffset + (id * kPointerSize));
2240
}
2241

    
2242

    
2243
Address Proxy::proxy() {
2244
  return AddressFrom<Address>(READ_INT_FIELD(this, kProxyOffset));
2245
}
2246

    
2247

    
2248
void Proxy::set_proxy(Address value) {
2249
  WRITE_INT_FIELD(this, kProxyOffset, OffsetFrom(value));
2250
}
2251

    
2252

    
2253
void Proxy::ProxyIterateBody(ObjectVisitor* visitor) {
2254
  visitor->VisitExternalReference(
2255
      reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
2256
}
2257

    
2258

    
2259
ACCESSORS(JSValue, value, Object, kValueOffset)
2260

    
2261

    
2262
JSValue* JSValue::cast(Object* obj) {
2263
  ASSERT(obj->IsJSValue());
2264
  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
2265
  return reinterpret_cast<JSValue*>(obj);
2266
}
2267

    
2268

    
2269
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
2270
INT_ACCESSORS(Code, relocation_size, kRelocationSizeOffset)
2271
INT_ACCESSORS(Code, sinfo_size, kSInfoSizeOffset)
2272

    
2273

    
2274
Code::ICTargetState Code::ic_flag() {
2275
  return static_cast<ICTargetState>(READ_BYTE_FIELD(this, kICFlagOffset));
2276
}
2277

    
2278

    
2279
void Code::set_ic_flag(ICTargetState value) {
2280
  WRITE_BYTE_FIELD(this, kICFlagOffset, value);
2281
}
2282

    
2283

    
2284
byte* Code::instruction_start()  {
2285
  return FIELD_ADDR(this, kHeaderSize);
2286
}
2287

    
2288

    
2289
int Code::body_size() {
2290
  return RoundUp(instruction_size() + relocation_size(), kObjectAlignment);
2291
}
2292

    
2293

    
2294
byte* Code::relocation_start() {
2295
  return FIELD_ADDR(this, kHeaderSize + instruction_size());
2296
}
2297

    
2298

    
2299
byte* Code::entry() {
2300
  return instruction_start();
2301
}
2302

    
2303

    
2304
bool Code::contains(byte* pc) {
2305
  return (instruction_start() <= pc) &&
2306
      (pc < instruction_start() + instruction_size());
2307
}
2308

    
2309

    
2310
byte* Code::sinfo_start() {
2311
  return FIELD_ADDR(this, kHeaderSize + body_size());
2312
}
2313

    
2314

    
2315
ACCESSORS(JSArray, length, Object, kLengthOffset)
2316

    
2317

    
2318
ACCESSORS(JSRegExp, data, Object, kDataOffset)
2319

    
2320

    
2321
JSRegExp::Type JSRegExp::TypeTag() {
2322
  Object* data = this->data();
2323
  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
2324
  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
2325
  return static_cast<JSRegExp::Type>(smi->value());
2326
}
2327

    
2328

    
2329
int JSRegExp::CaptureCount() {
2330
  switch (TypeTag()) {
2331
    case ATOM:
2332
      return 0;
2333
    case IRREGEXP:
2334
      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
2335
    default:
2336
      UNREACHABLE();
2337
      return -1;
2338
  }
2339
}
2340

    
2341

    
2342
JSRegExp::Flags JSRegExp::GetFlags() {
2343
  ASSERT(this->data()->IsFixedArray());
2344
  Object* data = this->data();
2345
  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
2346
  return Flags(smi->value());
2347
}
2348

    
2349

    
2350
String* JSRegExp::Pattern() {
2351
  ASSERT(this->data()->IsFixedArray());
2352
  Object* data = this->data();
2353
  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
2354
  return pattern;
2355
}
2356

    
2357

    
2358
Object* JSRegExp::DataAt(int index) {
2359
  ASSERT(TypeTag() != NOT_COMPILED);
2360
  return FixedArray::cast(data())->get(index);
2361
}
2362

    
2363

    
2364
void JSRegExp::SetDataAt(int index, Object* value) {
2365
  ASSERT(TypeTag() != NOT_COMPILED);
2366
  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
2367
  FixedArray::cast(data())->set(index, value);
2368
}
2369

    
2370

    
2371
bool JSObject::HasFastElements() {
2372
  return !elements()->IsDictionary();
2373
}
2374

    
2375

    
2376
bool JSObject::HasNamedInterceptor() {
2377
  return map()->has_named_interceptor();
2378
}
2379

    
2380

    
2381
bool JSObject::HasIndexedInterceptor() {
2382
  return map()->has_indexed_interceptor();
2383
}
2384

    
2385

    
2386
Dictionary* JSObject::property_dictionary() {
2387
  ASSERT(!HasFastProperties());
2388
  return Dictionary::cast(properties());
2389
}
2390

    
2391

    
2392
Dictionary* JSObject::element_dictionary() {
2393
  ASSERT(!HasFastElements());
2394
  return Dictionary::cast(elements());
2395
}
2396

    
2397

    
2398
bool String::HasHashCode() {
2399
  return (length_field() & kHashComputedMask) != 0;
2400
}
2401

    
2402

    
2403
uint32_t String::Hash() {
2404
  // Fast case: has hash code already been computed?
2405
  uint32_t field = length_field();
2406
  if (field & kHashComputedMask) return field >> kHashShift;
2407
  // Slow case: compute hash code and set it.
2408
  return ComputeAndSetHash();
2409
}
2410

    
2411

    
2412
StringHasher::StringHasher(int length)
2413
  : length_(length),
2414
    raw_running_hash_(0),
2415
    array_index_(0),
2416
    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
2417
    is_first_char_(true),
2418
    is_valid_(true) { }
2419

    
2420

    
2421
bool StringHasher::has_trivial_hash() {
2422
  return length_ > String::kMaxMediumStringSize;
2423
}
2424

    
2425

    
2426
void StringHasher::AddCharacter(uc32 c) {
2427
  // Use the Jenkins one-at-a-time hash function to update the hash
2428
  // for the given character.
2429
  raw_running_hash_ += c;
2430
  raw_running_hash_ += (raw_running_hash_ << 10);
2431
  raw_running_hash_ ^= (raw_running_hash_ >> 6);
2432
  // Incremental array index computation.
2433
  if (is_array_index_) {
2434
    if (c < '0' || c > '9') {
2435
      is_array_index_ = false;
2436
    } else {
2437
      int d = c - '0';
2438
      if (is_first_char_) {
2439
        is_first_char_ = false;
2440
        if (c == '0' && length_ > 1) {
2441
          is_array_index_ = false;
2442
          return;
2443
        }
2444
      }
2445
      if (array_index_ > 429496729U - ((d + 2) >> 3)) {
2446
        is_array_index_ = false;
2447
      } else {
2448
        array_index_ = array_index_ * 10 + d;
2449
      }
2450
    }
2451
  }
2452
}
2453

    
2454

    
2455
void StringHasher::AddCharacterNoIndex(uc32 c) {
2456
  ASSERT(!is_array_index());
2457
  raw_running_hash_ += c;
2458
  raw_running_hash_ += (raw_running_hash_ << 10);
2459
  raw_running_hash_ ^= (raw_running_hash_ >> 6);
2460
}
2461

    
2462

    
2463
uint32_t StringHasher::GetHash() {
2464
  // Get the calculated raw hash value and do some more bit ops to distribute
2465
  // the hash further. Ensure that we never return zero as the hash value.
2466
  uint32_t result = raw_running_hash_;
2467
  result += (result << 3);
2468
  result ^= (result >> 11);
2469
  result += (result << 15);
2470
  if (result == 0) {
2471
    result = 27;
2472
  }
2473
  return result;
2474
}
2475

    
2476

    
2477
bool String::AsArrayIndex(uint32_t* index) {
2478
  uint32_t field = length_field();
2479
  if ((field & kHashComputedMask) && !(field & kIsArrayIndexMask)) return false;
2480
  return SlowAsArrayIndex(index);
2481
}
2482

    
2483

    
2484
Object* JSObject::GetPrototype() {
2485
  return JSObject::cast(this)->map()->prototype();
2486
}
2487

    
2488

    
2489
PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
2490
  return GetPropertyAttributeWithReceiver(this, key);
2491
}
2492

    
2493

    
2494
bool JSObject::HasElement(uint32_t index) {
2495
  return HasElementWithReceiver(this, index);
2496
}
2497

    
2498

    
2499
bool AccessorInfo::all_can_read() {
2500
  return BooleanBit::get(flag(), kAllCanReadBit);
2501
}
2502

    
2503

    
2504
void AccessorInfo::set_all_can_read(bool value) {
2505
  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
2506
}
2507

    
2508

    
2509
bool AccessorInfo::all_can_write() {
2510
  return BooleanBit::get(flag(), kAllCanWriteBit);
2511
}
2512

    
2513

    
2514
void AccessorInfo::set_all_can_write(bool value) {
2515
  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
2516
}
2517

    
2518

    
2519
bool AccessorInfo::prohibits_overwriting() {
2520
  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
2521
}
2522

    
2523

    
2524
void AccessorInfo::set_prohibits_overwriting(bool value) {
2525
  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
2526
}
2527

    
2528

    
2529
PropertyAttributes AccessorInfo::property_attributes() {
2530
  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
2531
}
2532

    
2533

    
2534
void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
2535
  ASSERT(AttributesField::is_valid(attributes));
2536
  int rest_value = flag()->value() & ~AttributesField::mask();
2537
  set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
2538
}
2539

    
2540
void Dictionary::SetEntry(int entry,
2541
                          Object* key,
2542
                          Object* value,
2543
                          PropertyDetails details) {
2544
  ASSERT(!key->IsString() || details.index() > 0);
2545
  int index = EntryToIndex(entry);
2546
  WriteBarrierMode mode = GetWriteBarrierMode();
2547
  set(index, key, mode);
2548
  set(index+1, value, mode);
2549
  fast_set(this, index+2, details.AsSmi());
2550
}
2551

    
2552

    
2553
void Map::ClearCodeCache() {
2554
  // No write barrier is needed since empty_fixed_array is not in new space.
2555
  // Please note this function is used during marking:
2556
  //  - MarkCompactCollector::MarkUnmarkedObject
2557
  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
2558
  WRITE_FIELD(this, kCodeCacheOffset, Heap::empty_fixed_array());
2559
}
2560

    
2561

    
2562
void JSArray::SetContent(FixedArray* storage) {
2563
  set_length(Smi::FromInt(storage->length()), SKIP_WRITE_BARRIER);
2564
  set_elements(storage);
2565
}
2566

    
2567

    
2568
Object* FixedArray::Copy() {
2569
  if (length() == 0) return this;
2570
  return Heap::CopyFixedArray(this);
2571
}
2572

    
2573

    
2574
#undef CAST_ACCESSOR
2575
#undef INT_ACCESSORS
2576
#undef SMI_ACCESSORS
2577
#undef ACCESSORS
2578
#undef FIELD_ADDR
2579
#undef READ_FIELD
2580
#undef WRITE_FIELD
2581
#undef WRITE_BARRIER
2582
#undef CONDITIONAL_WRITE_BARRIER
2583
#undef READ_MEMADDR_FIELD
2584
#undef WRITE_MEMADDR_FIELD
2585
#undef READ_DOUBLE_FIELD
2586
#undef WRITE_DOUBLE_FIELD
2587
#undef READ_INT_FIELD
2588
#undef WRITE_INT_FIELD
2589
#undef READ_SHORT_FIELD
2590
#undef WRITE_SHORT_FIELD
2591
#undef READ_BYTE_FIELD
2592
#undef WRITE_BYTE_FIELD
2593

    
2594

    
2595
} }  // namespace v8::internal
2596

    
2597
#endif  // V8_OBJECTS_INL_H_