The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / objects-inl.h @ f230a1cf

History | View | Annotate | Download (184 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
//
28
// Review notes:
29
//
30
// - The use of macros in these inline functions may seem superfluous
31
// but it is absolutely needed to make sure gcc generates optimal
32
// code. gcc is not happy when attempting to inline too deep.
33
//
34

    
35
#ifndef V8_OBJECTS_INL_H_
36
#define V8_OBJECTS_INL_H_
37

    
38
#include "elements.h"
39
#include "objects.h"
40
#include "contexts.h"
41
#include "conversions-inl.h"
42
#include "heap.h"
43
#include "isolate.h"
44
#include "property.h"
45
#include "spaces.h"
46
#include "store-buffer.h"
47
#include "v8memory.h"
48
#include "factory.h"
49
#include "incremental-marking.h"
50
#include "transitions-inl.h"
51

    
52
namespace v8 {
53
namespace internal {
54

    
55
PropertyDetails::PropertyDetails(Smi* smi) {
56
  value_ = smi->value();
57
}
58

    
59

    
60
Smi* PropertyDetails::AsSmi() {
61
  // Ensure the upper 2 bits have the same value by sign extending it. This is
62
  // necessary to be able to use the 31st bit of the property details.
63
  int value = value_ << 1;
64
  return Smi::FromInt(value >> 1);
65
}
66

    
67

    
68
PropertyDetails PropertyDetails::AsDeleted() {
69
  Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
70
  return PropertyDetails(smi);
71
}
72

    
73

    
74
#define TYPE_CHECKER(type, instancetype)                                \
75
  bool Object::Is##type() {                                             \
76
  return Object::IsHeapObject() &&                                      \
77
      HeapObject::cast(this)->map()->instance_type() == instancetype;   \
78
  }
79

    
80

    
81
#define CAST_ACCESSOR(type)                     \
82
  type* type::cast(Object* object) {            \
83
    SLOW_ASSERT(object->Is##type());            \
84
    return reinterpret_cast<type*>(object);     \
85
  }
86

    
87

    
88
#define INT_ACCESSORS(holder, name, offset)                             \
89
  int holder::name() { return READ_INT_FIELD(this, offset); }           \
90
  void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
91

    
92

    
93
#define ACCESSORS(holder, name, type, offset)                           \
94
  type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
95
  void holder::set_##name(type* value, WriteBarrierMode mode) {         \
96
    WRITE_FIELD(this, offset, value);                                   \
97
    CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);    \
98
  }
99

    
100

    
101
// Getter that returns a tagged Smi and setter that writes a tagged Smi.
102
#define ACCESSORS_TO_SMI(holder, name, offset)                          \
103
  Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); }   \
104
  void holder::set_##name(Smi* value, WriteBarrierMode mode) {          \
105
    WRITE_FIELD(this, offset, value);                                   \
106
  }
107

    
108

    
109
// Getter that returns a Smi as an int and writes an int as a Smi.
110
#define SMI_ACCESSORS(holder, name, offset)             \
111
  int holder::name() {                                  \
112
    Object* value = READ_FIELD(this, offset);           \
113
    return Smi::cast(value)->value();                   \
114
  }                                                     \
115
  void holder::set_##name(int value) {                  \
116
    WRITE_FIELD(this, offset, Smi::FromInt(value));     \
117
  }
118

    
119

    
120
#define BOOL_GETTER(holder, field, name, offset)           \
121
  bool holder::name() {                                    \
122
    return BooleanBit::get(field(), offset);               \
123
  }                                                        \
124

    
125

    
126
#define BOOL_ACCESSORS(holder, field, name, offset)        \
127
  bool holder::name() {                                    \
128
    return BooleanBit::get(field(), offset);               \
129
  }                                                        \
130
  void holder::set_##name(bool value) {                    \
131
    set_##field(BooleanBit::set(field(), offset, value));  \
132
  }
133

    
134

    
135
bool Object::IsFixedArrayBase() {
136
  return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray();
137
}
138

    
139

    
140
// External objects are not extensible, so the map check is enough.
141
bool Object::IsExternal() {
142
  return Object::IsHeapObject() &&
143
      HeapObject::cast(this)->map() ==
144
      HeapObject::cast(this)->GetHeap()->external_map();
145
}
146

    
147

    
148
bool Object::IsAccessorInfo() {
149
  return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
150
}
151

    
152

    
153
bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
154
  // There is a constraint on the object; check.
155
  if (!this->IsJSObject()) return false;
156
  // Fetch the constructor function of the object.
157
  Object* cons_obj = JSObject::cast(this)->map()->constructor();
158
  if (!cons_obj->IsJSFunction()) return false;
159
  JSFunction* fun = JSFunction::cast(cons_obj);
160
  // Iterate through the chain of inheriting function templates to
161
  // see if the required one occurs.
162
  for (Object* type = fun->shared()->function_data();
163
       type->IsFunctionTemplateInfo();
164
       type = FunctionTemplateInfo::cast(type)->parent_template()) {
165
    if (type == expected) return true;
166
  }
167
  // Didn't find the required type in the inheritance chain.
168
  return false;
169
}
170

    
171

    
172
bool Object::IsSmi() {
173
  return HAS_SMI_TAG(this);
174
}
175

    
176

    
177
bool Object::IsHeapObject() {
178
  return Internals::HasHeapObjectTag(this);
179
}
180

    
181

    
182
bool Object::NonFailureIsHeapObject() {
183
  ASSERT(!this->IsFailure());
184
  return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
185
}
186

    
187

    
188
TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
189
TYPE_CHECKER(Symbol, SYMBOL_TYPE)
190

    
191

    
192
bool Object::IsString() {
193
  return Object::IsHeapObject()
194
    && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
195
}
196

    
197

    
198
bool Object::IsName() {
199
  return IsString() || IsSymbol();
200
}
201

    
202

    
203
bool Object::IsUniqueName() {
204
  return IsInternalizedString() || IsSymbol();
205
}
206

    
207

    
208
bool Object::IsSpecObject() {
209
  return Object::IsHeapObject()
210
    && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
211
}
212

    
213

    
214
bool Object::IsSpecFunction() {
215
  if (!Object::IsHeapObject()) return false;
216
  InstanceType type = HeapObject::cast(this)->map()->instance_type();
217
  return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
218
}
219

    
220

    
221
bool Object::IsInternalizedString() {
222
  if (!this->IsHeapObject()) return false;
223
  uint32_t type = HeapObject::cast(this)->map()->instance_type();
224
  STATIC_ASSERT(kNotInternalizedTag != 0);
225
  return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
226
      (kStringTag | kInternalizedTag);
227
}
228

    
229

    
230
bool Object::IsConsString() {
231
  if (!IsString()) return false;
232
  return StringShape(String::cast(this)).IsCons();
233
}
234

    
235

    
236
bool Object::IsSlicedString() {
237
  if (!IsString()) return false;
238
  return StringShape(String::cast(this)).IsSliced();
239
}
240

    
241

    
242
bool Object::IsSeqString() {
243
  if (!IsString()) return false;
244
  return StringShape(String::cast(this)).IsSequential();
245
}
246

    
247

    
248
bool Object::IsSeqOneByteString() {
249
  if (!IsString()) return false;
250
  return StringShape(String::cast(this)).IsSequential() &&
251
         String::cast(this)->IsOneByteRepresentation();
252
}
253

    
254

    
255
bool Object::IsSeqTwoByteString() {
256
  if (!IsString()) return false;
257
  return StringShape(String::cast(this)).IsSequential() &&
258
         String::cast(this)->IsTwoByteRepresentation();
259
}
260

    
261

    
262
bool Object::IsExternalString() {
263
  if (!IsString()) return false;
264
  return StringShape(String::cast(this)).IsExternal();
265
}
266

    
267

    
268
bool Object::IsExternalAsciiString() {
269
  if (!IsString()) return false;
270
  return StringShape(String::cast(this)).IsExternal() &&
271
         String::cast(this)->IsOneByteRepresentation();
272
}
273

    
274

    
275
bool Object::IsExternalTwoByteString() {
276
  if (!IsString()) return false;
277
  return StringShape(String::cast(this)).IsExternal() &&
278
         String::cast(this)->IsTwoByteRepresentation();
279
}
280

    
281
bool Object::HasValidElements() {
282
  // Dictionary is covered under FixedArray.
283
  return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray();
284
}
285

    
286

    
287
MaybeObject* Object::AllocateNewStorageFor(Heap* heap,
288
                                           Representation representation) {
289
  if (!FLAG_track_double_fields) return this;
290
  if (!representation.IsDouble()) return this;
291
  if (IsUninitialized()) {
292
    return heap->AllocateHeapNumber(0);
293
  }
294
  return heap->AllocateHeapNumber(Number());
295
}
296

    
297

    
298
StringShape::StringShape(String* str)
299
  : type_(str->map()->instance_type()) {
300
  set_valid();
301
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
302
}
303

    
304

    
305
StringShape::StringShape(Map* map)
306
  : type_(map->instance_type()) {
307
  set_valid();
308
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
309
}
310

    
311

    
312
StringShape::StringShape(InstanceType t)
313
  : type_(static_cast<uint32_t>(t)) {
314
  set_valid();
315
  ASSERT((type_ & kIsNotStringMask) == kStringTag);
316
}
317

    
318

    
319
bool StringShape::IsInternalized() {
320
  ASSERT(valid());
321
  STATIC_ASSERT(kNotInternalizedTag != 0);
322
  return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
323
      (kStringTag | kInternalizedTag);
324
}
325

    
326

    
327
bool String::IsOneByteRepresentation() {
328
  uint32_t type = map()->instance_type();
329
  return (type & kStringEncodingMask) == kOneByteStringTag;
330
}
331

    
332

    
333
bool String::IsTwoByteRepresentation() {
334
  uint32_t type = map()->instance_type();
335
  return (type & kStringEncodingMask) == kTwoByteStringTag;
336
}
337

    
338

    
339
bool String::IsOneByteRepresentationUnderneath() {
340
  uint32_t type = map()->instance_type();
341
  STATIC_ASSERT(kIsIndirectStringTag != 0);
342
  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
343
  ASSERT(IsFlat());
344
  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
345
    case kOneByteStringTag:
346
      return true;
347
    case kTwoByteStringTag:
348
      return false;
349
    default:  // Cons or sliced string.  Need to go deeper.
350
      return GetUnderlying()->IsOneByteRepresentation();
351
  }
352
}
353

    
354

    
355
bool String::IsTwoByteRepresentationUnderneath() {
356
  uint32_t type = map()->instance_type();
357
  STATIC_ASSERT(kIsIndirectStringTag != 0);
358
  STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
359
  ASSERT(IsFlat());
360
  switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
361
    case kOneByteStringTag:
362
      return false;
363
    case kTwoByteStringTag:
364
      return true;
365
    default:  // Cons or sliced string.  Need to go deeper.
366
      return GetUnderlying()->IsTwoByteRepresentation();
367
  }
368
}
369

    
370

    
371
bool String::HasOnlyOneByteChars() {
372
  uint32_t type = map()->instance_type();
373
  return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
374
         IsOneByteRepresentation();
375
}
376

    
377

    
378
bool StringShape::IsCons() {
379
  return (type_ & kStringRepresentationMask) == kConsStringTag;
380
}
381

    
382

    
383
bool StringShape::IsSliced() {
384
  return (type_ & kStringRepresentationMask) == kSlicedStringTag;
385
}
386

    
387

    
388
bool StringShape::IsIndirect() {
389
  return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
390
}
391

    
392

    
393
bool StringShape::IsExternal() {
394
  return (type_ & kStringRepresentationMask) == kExternalStringTag;
395
}
396

    
397

    
398
bool StringShape::IsSequential() {
399
  return (type_ & kStringRepresentationMask) == kSeqStringTag;
400
}
401

    
402

    
403
StringRepresentationTag StringShape::representation_tag() {
404
  uint32_t tag = (type_ & kStringRepresentationMask);
405
  return static_cast<StringRepresentationTag>(tag);
406
}
407

    
408

    
409
uint32_t StringShape::encoding_tag() {
410
  return type_ & kStringEncodingMask;
411
}
412

    
413

    
414
uint32_t StringShape::full_representation_tag() {
415
  return (type_ & (kStringRepresentationMask | kStringEncodingMask));
416
}
417

    
418

    
419
STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
420
             Internals::kFullStringRepresentationMask);
421

    
422
STATIC_CHECK(static_cast<uint32_t>(kStringEncodingMask) ==
423
             Internals::kStringEncodingMask);
424

    
425

    
426
bool StringShape::IsSequentialAscii() {
427
  return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
428
}
429

    
430

    
431
bool StringShape::IsSequentialTwoByte() {
432
  return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
433
}
434

    
435

    
436
bool StringShape::IsExternalAscii() {
437
  return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
438
}
439

    
440

    
441
STATIC_CHECK((kExternalStringTag | kOneByteStringTag) ==
442
             Internals::kExternalAsciiRepresentationTag);
443

    
444
STATIC_CHECK(v8::String::ASCII_ENCODING == kOneByteStringTag);
445

    
446

    
447
bool StringShape::IsExternalTwoByte() {
448
  return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
449
}
450

    
451

    
452
STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
453
             Internals::kExternalTwoByteRepresentationTag);
454

    
455
STATIC_CHECK(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
456

    
457
uc32 FlatStringReader::Get(int index) {
458
  ASSERT(0 <= index && index <= length_);
459
  if (is_ascii_) {
460
    return static_cast<const byte*>(start_)[index];
461
  } else {
462
    return static_cast<const uc16*>(start_)[index];
463
  }
464
}
465

    
466

    
467
bool Object::IsNumber() {
468
  return IsSmi() || IsHeapNumber();
469
}
470

    
471

    
472
TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
473
TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
474

    
475

    
476
bool Object::IsFiller() {
477
  if (!Object::IsHeapObject()) return false;
478
  InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
479
  return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
480
}
481

    
482

    
483
TYPE_CHECKER(ExternalPixelArray, EXTERNAL_PIXEL_ARRAY_TYPE)
484

    
485

    
486
bool Object::IsExternalArray() {
487
  if (!Object::IsHeapObject())
488
    return false;
489
  InstanceType instance_type =
490
      HeapObject::cast(this)->map()->instance_type();
491
  return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
492
          instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
493
}
494

    
495

    
496
TYPE_CHECKER(ExternalByteArray, EXTERNAL_BYTE_ARRAY_TYPE)
497
TYPE_CHECKER(ExternalUnsignedByteArray, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
498
TYPE_CHECKER(ExternalShortArray, EXTERNAL_SHORT_ARRAY_TYPE)
499
TYPE_CHECKER(ExternalUnsignedShortArray, EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
500
TYPE_CHECKER(ExternalIntArray, EXTERNAL_INT_ARRAY_TYPE)
501
TYPE_CHECKER(ExternalUnsignedIntArray, EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
502
TYPE_CHECKER(ExternalFloatArray, EXTERNAL_FLOAT_ARRAY_TYPE)
503
TYPE_CHECKER(ExternalDoubleArray, EXTERNAL_DOUBLE_ARRAY_TYPE)
504

    
505

    
506
bool MaybeObject::IsFailure() {
507
  return HAS_FAILURE_TAG(this);
508
}
509

    
510

    
511
bool MaybeObject::IsRetryAfterGC() {
512
  return HAS_FAILURE_TAG(this)
513
    && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
514
}
515

    
516

    
517
bool MaybeObject::IsOutOfMemory() {
518
  return HAS_FAILURE_TAG(this)
519
      && Failure::cast(this)->IsOutOfMemoryException();
520
}
521

    
522

    
523
bool MaybeObject::IsException() {
524
  return this == Failure::Exception();
525
}
526

    
527

    
528
bool MaybeObject::IsTheHole() {
529
  return !IsFailure() && ToObjectUnchecked()->IsTheHole();
530
}
531

    
532

    
533
bool MaybeObject::IsUninitialized() {
534
  return !IsFailure() && ToObjectUnchecked()->IsUninitialized();
535
}
536

    
537

    
538
Failure* Failure::cast(MaybeObject* obj) {
539
  ASSERT(HAS_FAILURE_TAG(obj));
540
  return reinterpret_cast<Failure*>(obj);
541
}
542

    
543

    
544
bool Object::IsJSReceiver() {
545
  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
546
  return IsHeapObject() &&
547
      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
548
}
549

    
550

    
551
bool Object::IsJSObject() {
552
  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
553
  return IsHeapObject() &&
554
      HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
555
}
556

    
557

    
558
bool Object::IsJSProxy() {
559
  if (!Object::IsHeapObject()) return false;
560
  InstanceType type = HeapObject::cast(this)->map()->instance_type();
561
  return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
562
}
563

    
564

    
565
TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
566
TYPE_CHECKER(JSSet, JS_SET_TYPE)
567
TYPE_CHECKER(JSMap, JS_MAP_TYPE)
568
TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
569
TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
570
TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
571
TYPE_CHECKER(Map, MAP_TYPE)
572
TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
573
TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
574
TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
575

    
576

    
577
bool Object::IsJSWeakCollection() {
578
  return IsJSWeakMap() || IsJSWeakSet();
579
}
580

    
581

    
582
bool Object::IsDescriptorArray() {
583
  return IsFixedArray();
584
}
585

    
586

    
587
bool Object::IsTransitionArray() {
588
  return IsFixedArray();
589
}
590

    
591

    
592
bool Object::IsDeoptimizationInputData() {
593
  // Must be a fixed array.
594
  if (!IsFixedArray()) return false;
595

    
596
  // There's no sure way to detect the difference between a fixed array and
597
  // a deoptimization data array.  Since this is used for asserts we can
598
  // check that the length is zero or else the fixed size plus a multiple of
599
  // the entry size.
600
  int length = FixedArray::cast(this)->length();
601
  if (length == 0) return true;
602

    
603
  length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
604
  return length >= 0 &&
605
      length % DeoptimizationInputData::kDeoptEntrySize == 0;
606
}
607

    
608

    
609
bool Object::IsDeoptimizationOutputData() {
610
  if (!IsFixedArray()) return false;
611
  // There's actually no way to see the difference between a fixed array and
612
  // a deoptimization data array.  Since this is used for asserts we can check
613
  // that the length is plausible though.
614
  if (FixedArray::cast(this)->length() % 2 != 0) return false;
615
  return true;
616
}
617

    
618

    
619
bool Object::IsDependentCode() {
620
  if (!IsFixedArray()) return false;
621
  // There's actually no way to see the difference between a fixed array and
622
  // a dependent codes array.
623
  return true;
624
}
625

    
626

    
627
bool Object::IsTypeFeedbackCells() {
628
  if (!IsFixedArray()) return false;
629
  // There's actually no way to see the difference between a fixed array and
630
  // a cache cells array.  Since this is used for asserts we can check that
631
  // the length is plausible though.
632
  if (FixedArray::cast(this)->length() % 2 != 0) return false;
633
  return true;
634
}
635

    
636

    
637
bool Object::IsContext() {
638
  if (!Object::IsHeapObject()) return false;
639
  Map* map = HeapObject::cast(this)->map();
640
  Heap* heap = map->GetHeap();
641
  return (map == heap->function_context_map() ||
642
      map == heap->catch_context_map() ||
643
      map == heap->with_context_map() ||
644
      map == heap->native_context_map() ||
645
      map == heap->block_context_map() ||
646
      map == heap->module_context_map() ||
647
      map == heap->global_context_map());
648
}
649

    
650

    
651
bool Object::IsNativeContext() {
652
  return Object::IsHeapObject() &&
653
      HeapObject::cast(this)->map() ==
654
      HeapObject::cast(this)->GetHeap()->native_context_map();
655
}
656

    
657

    
658
bool Object::IsScopeInfo() {
659
  return Object::IsHeapObject() &&
660
      HeapObject::cast(this)->map() ==
661
      HeapObject::cast(this)->GetHeap()->scope_info_map();
662
}
663

    
664

    
665
TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
666

    
667

    
668
template <> inline bool Is<JSFunction>(Object* obj) {
669
  return obj->IsJSFunction();
670
}
671

    
672

    
673
TYPE_CHECKER(Code, CODE_TYPE)
674
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
675
TYPE_CHECKER(Cell, CELL_TYPE)
676
TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
677
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
678
TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
679
TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
680
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
681
TYPE_CHECKER(JSDate, JS_DATE_TYPE)
682
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
683

    
684

    
685
bool Object::IsStringWrapper() {
686
  return IsJSValue() && JSValue::cast(this)->value()->IsString();
687
}
688

    
689

    
690
TYPE_CHECKER(Foreign, FOREIGN_TYPE)
691

    
692

    
693
bool Object::IsBoolean() {
694
  return IsOddball() &&
695
      ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
696
}
697

    
698

    
699
TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
700
TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
701
TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
702
TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
703

    
704

    
705
bool Object::IsJSArrayBufferView() {
706
  return IsJSDataView() || IsJSTypedArray();
707
}
708

    
709

    
710
TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
711

    
712

    
713
template <> inline bool Is<JSArray>(Object* obj) {
714
  return obj->IsJSArray();
715
}
716

    
717

    
718
bool Object::IsHashTable() {
719
  return Object::IsHeapObject() &&
720
      HeapObject::cast(this)->map() ==
721
      HeapObject::cast(this)->GetHeap()->hash_table_map();
722
}
723

    
724

    
725
bool Object::IsDictionary() {
726
  return IsHashTable() &&
727
      this != HeapObject::cast(this)->GetHeap()->string_table();
728
}
729

    
730

    
731
bool Object::IsStringTable() {
732
  return IsHashTable() &&
733
      this == HeapObject::cast(this)->GetHeap()->raw_unchecked_string_table();
734
}
735

    
736

    
737
bool Object::IsJSFunctionResultCache() {
738
  if (!IsFixedArray()) return false;
739
  FixedArray* self = FixedArray::cast(this);
740
  int length = self->length();
741
  if (length < JSFunctionResultCache::kEntriesIndex) return false;
742
  if ((length - JSFunctionResultCache::kEntriesIndex)
743
      % JSFunctionResultCache::kEntrySize != 0) {
744
    return false;
745
  }
746
#ifdef VERIFY_HEAP
747
  if (FLAG_verify_heap) {
748
    reinterpret_cast<JSFunctionResultCache*>(this)->
749
        JSFunctionResultCacheVerify();
750
  }
751
#endif
752
  return true;
753
}
754

    
755

    
756
bool Object::IsNormalizedMapCache() {
757
  if (!IsFixedArray()) return false;
758
  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
759
    return false;
760
  }
761
#ifdef VERIFY_HEAP
762
  if (FLAG_verify_heap) {
763
    reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
764
  }
765
#endif
766
  return true;
767
}
768

    
769

    
770
bool Object::IsCompilationCacheTable() {
771
  return IsHashTable();
772
}
773

    
774

    
775
bool Object::IsCodeCacheHashTable() {
776
  return IsHashTable();
777
}
778

    
779

    
780
bool Object::IsPolymorphicCodeCacheHashTable() {
781
  return IsHashTable();
782
}
783

    
784

    
785
bool Object::IsMapCache() {
786
  return IsHashTable();
787
}
788

    
789

    
790
bool Object::IsObjectHashTable() {
791
  return IsHashTable();
792
}
793

    
794

    
795
bool Object::IsPrimitive() {
796
  return IsOddball() || IsNumber() || IsString();
797
}
798

    
799

    
800
bool Object::IsJSGlobalProxy() {
801
  bool result = IsHeapObject() &&
802
                (HeapObject::cast(this)->map()->instance_type() ==
803
                 JS_GLOBAL_PROXY_TYPE);
804
  ASSERT(!result || IsAccessCheckNeeded());
805
  return result;
806
}
807

    
808

    
809
bool Object::IsGlobalObject() {
810
  if (!IsHeapObject()) return false;
811

    
812
  InstanceType type = HeapObject::cast(this)->map()->instance_type();
813
  return type == JS_GLOBAL_OBJECT_TYPE ||
814
         type == JS_BUILTINS_OBJECT_TYPE;
815
}
816

    
817

    
818
TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
819
TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
820

    
821

    
822
bool Object::IsUndetectableObject() {
823
  return IsHeapObject()
824
    && HeapObject::cast(this)->map()->is_undetectable();
825
}
826

    
827

    
828
bool Object::IsAccessCheckNeeded() {
829
  return IsHeapObject()
830
    && HeapObject::cast(this)->map()->is_access_check_needed();
831
}
832

    
833

    
834
bool Object::IsStruct() {
835
  if (!IsHeapObject()) return false;
836
  switch (HeapObject::cast(this)->map()->instance_type()) {
837
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
838
  STRUCT_LIST(MAKE_STRUCT_CASE)
839
#undef MAKE_STRUCT_CASE
840
    default: return false;
841
  }
842
}
843

    
844

    
845
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)                  \
846
  bool Object::Is##Name() {                                      \
847
    return Object::IsHeapObject()                                \
848
      && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
849
  }
850
  STRUCT_LIST(MAKE_STRUCT_PREDICATE)
851
#undef MAKE_STRUCT_PREDICATE
852

    
853

    
854
bool Object::IsUndefined() {
855
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
856
}
857

    
858

    
859
bool Object::IsNull() {
860
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
861
}
862

    
863

    
864
bool Object::IsTheHole() {
865
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
866
}
867

    
868

    
869
bool Object::IsUninitialized() {
870
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
871
}
872

    
873

    
874
bool Object::IsTrue() {
875
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
876
}
877

    
878

    
879
bool Object::IsFalse() {
880
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
881
}
882

    
883

    
884
bool Object::IsArgumentsMarker() {
885
  return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
886
}
887

    
888

    
889
double Object::Number() {
890
  ASSERT(IsNumber());
891
  return IsSmi()
892
    ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
893
    : reinterpret_cast<HeapNumber*>(this)->value();
894
}
895

    
896

    
897
bool Object::IsNaN() {
898
  return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
899
}
900

    
901

    
902
MaybeObject* Object::ToSmi() {
903
  if (IsSmi()) return this;
904
  if (IsHeapNumber()) {
905
    double value = HeapNumber::cast(this)->value();
906
    int int_value = FastD2I(value);
907
    if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
908
      return Smi::FromInt(int_value);
909
    }
910
  }
911
  return Failure::Exception();
912
}
913

    
914

    
915
bool Object::HasSpecificClassOf(String* name) {
916
  return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
917
}
918

    
919

    
920
MaybeObject* Object::GetElement(Isolate* isolate, uint32_t index) {
921
  // GetElement can trigger a getter which can cause allocation.
922
  // This was not always the case. This ASSERT is here to catch
923
  // leftover incorrect uses.
924
  ASSERT(AllowHeapAllocation::IsAllowed());
925
  return GetElementWithReceiver(isolate, this, index);
926
}
927

    
928

    
929
Object* Object::GetElementNoExceptionThrown(Isolate* isolate, uint32_t index) {
930
  MaybeObject* maybe = GetElementWithReceiver(isolate, this, index);
931
  ASSERT(!maybe->IsFailure());
932
  Object* result = NULL;  // Initialization to please compiler.
933
  maybe->ToObject(&result);
934
  return result;
935
}
936

    
937

    
938
MaybeObject* Object::GetProperty(Name* key) {
939
  PropertyAttributes attributes;
940
  return GetPropertyWithReceiver(this, key, &attributes);
941
}
942

    
943

    
944
MaybeObject* Object::GetProperty(Name* key, PropertyAttributes* attributes) {
945
  return GetPropertyWithReceiver(this, key, attributes);
946
}
947

    
948

    
949
#define FIELD_ADDR(p, offset) \
950
  (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
951

    
952
#define READ_FIELD(p, offset) \
953
  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
954

    
955
#define WRITE_FIELD(p, offset, value) \
956
  (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
957

    
958
#define WRITE_BARRIER(heap, object, offset, value)                      \
959
  heap->incremental_marking()->RecordWrite(                             \
960
      object, HeapObject::RawField(object, offset), value);             \
961
  if (heap->InNewSpace(value)) {                                        \
962
    heap->RecordWrite(object->address(), offset);                       \
963
  }
964

    
965
#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
966
  if (mode == UPDATE_WRITE_BARRIER) {                                   \
967
    heap->incremental_marking()->RecordWrite(                           \
968
      object, HeapObject::RawField(object, offset), value);             \
969
    if (heap->InNewSpace(value)) {                                      \
970
      heap->RecordWrite(object->address(), offset);                     \
971
    }                                                                   \
972
  }
973

    
974
#ifndef V8_TARGET_ARCH_MIPS
975
  #define READ_DOUBLE_FIELD(p, offset) \
976
    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
977
#else  // V8_TARGET_ARCH_MIPS
978
  // Prevent gcc from using load-double (mips ldc1) on (possibly)
979
  // non-64-bit aligned HeapNumber::value.
980
  static inline double read_double_field(void* p, int offset) {
981
    union conversion {
982
      double d;
983
      uint32_t u[2];
984
    } c;
985
    c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
986
    c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
987
    return c.d;
988
  }
989
  #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
990
#endif  // V8_TARGET_ARCH_MIPS
991

    
992
#ifndef V8_TARGET_ARCH_MIPS
993
  #define WRITE_DOUBLE_FIELD(p, offset, value) \
994
    (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
995
#else  // V8_TARGET_ARCH_MIPS
996
  // Prevent gcc from using store-double (mips sdc1) on (possibly)
997
  // non-64-bit aligned HeapNumber::value.
998
  static inline void write_double_field(void* p, int offset,
999
                                        double value) {
1000
    union conversion {
1001
      double d;
1002
      uint32_t u[2];
1003
    } c;
1004
    c.d = value;
1005
    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1006
    (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1007
  }
1008
  #define WRITE_DOUBLE_FIELD(p, offset, value) \
1009
    write_double_field(p, offset, value)
1010
#endif  // V8_TARGET_ARCH_MIPS
1011

    
1012

    
1013
#define READ_INT_FIELD(p, offset) \
1014
  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
1015

    
1016
#define WRITE_INT_FIELD(p, offset, value) \
1017
  (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1018

    
1019
#define READ_INTPTR_FIELD(p, offset) \
1020
  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
1021

    
1022
#define WRITE_INTPTR_FIELD(p, offset, value) \
1023
  (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1024

    
1025
#define READ_UINT32_FIELD(p, offset) \
1026
  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
1027

    
1028
#define WRITE_UINT32_FIELD(p, offset, value) \
1029
  (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1030

    
1031
#define READ_INT32_FIELD(p, offset) \
1032
  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)))
1033

    
1034
#define WRITE_INT32_FIELD(p, offset, value) \
1035
  (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1036

    
1037
#define READ_INT64_FIELD(p, offset) \
1038
  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
1039

    
1040
#define WRITE_INT64_FIELD(p, offset, value) \
1041
  (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1042

    
1043
#define READ_SHORT_FIELD(p, offset) \
1044
  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
1045

    
1046
#define WRITE_SHORT_FIELD(p, offset, value) \
1047
  (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1048

    
1049
#define READ_BYTE_FIELD(p, offset) \
1050
  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
1051

    
1052
#define WRITE_BYTE_FIELD(p, offset, value) \
1053
  (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1054

    
1055

    
1056
Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1057
  return &READ_FIELD(obj, byte_offset);
1058
}
1059

    
1060

    
1061
int Smi::value() {
1062
  return Internals::SmiValue(this);
1063
}
1064

    
1065

    
1066
Smi* Smi::FromInt(int value) {
1067
  ASSERT(Smi::IsValid(value));
1068
  return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1069
}
1070

    
1071

    
1072
Smi* Smi::FromIntptr(intptr_t value) {
1073
  ASSERT(Smi::IsValid(value));
1074
  int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1075
  return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1076
}
1077

    
1078

    
1079
Failure::Type Failure::type() const {
1080
  return static_cast<Type>(value() & kFailureTypeTagMask);
1081
}
1082

    
1083

    
1084
bool Failure::IsInternalError() const {
1085
  return type() == INTERNAL_ERROR;
1086
}
1087

    
1088

    
1089
bool Failure::IsOutOfMemoryException() const {
1090
  return type() == OUT_OF_MEMORY_EXCEPTION;
1091
}
1092

    
1093

    
1094
AllocationSpace Failure::allocation_space() const {
1095
  ASSERT_EQ(RETRY_AFTER_GC, type());
1096
  return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
1097
                                      & kSpaceTagMask);
1098
}
1099

    
1100

    
1101
Failure* Failure::InternalError() {
1102
  return Construct(INTERNAL_ERROR);
1103
}
1104

    
1105

    
1106
Failure* Failure::Exception() {
1107
  return Construct(EXCEPTION);
1108
}
1109

    
1110

    
1111
Failure* Failure::OutOfMemoryException(intptr_t value) {
1112
  return Construct(OUT_OF_MEMORY_EXCEPTION, value);
1113
}
1114

    
1115

    
1116
intptr_t Failure::value() const {
1117
  return static_cast<intptr_t>(
1118
      reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
1119
}
1120

    
1121

    
1122
Failure* Failure::RetryAfterGC() {
1123
  return RetryAfterGC(NEW_SPACE);
1124
}
1125

    
1126

    
1127
Failure* Failure::RetryAfterGC(AllocationSpace space) {
1128
  ASSERT((space & ~kSpaceTagMask) == 0);
1129
  return Construct(RETRY_AFTER_GC, space);
1130
}
1131

    
1132

    
1133
Failure* Failure::Construct(Type type, intptr_t value) {
1134
  uintptr_t info =
1135
      (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
1136
  ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
1137
  // Fill the unused bits with a pattern that's easy to recognize in crash
1138
  // dumps.
1139
  static const int kFailureMagicPattern = 0x0BAD0000;
1140
  return reinterpret_cast<Failure*>(
1141
      (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern);
1142
}
1143

    
1144

    
1145
bool Smi::IsValid(intptr_t value) {
1146
  bool result = Internals::IsValidSmi(value);
1147
  ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
1148
  return result;
1149
}
1150

    
1151

    
1152
MapWord MapWord::FromMap(Map* map) {
1153
  return MapWord(reinterpret_cast<uintptr_t>(map));
1154
}
1155

    
1156

    
1157
Map* MapWord::ToMap() {
1158
  return reinterpret_cast<Map*>(value_);
1159
}
1160

    
1161

    
1162
bool MapWord::IsForwardingAddress() {
1163
  return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1164
}
1165

    
1166

    
1167
MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1168
  Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1169
  return MapWord(reinterpret_cast<uintptr_t>(raw));
1170
}
1171

    
1172

    
1173
HeapObject* MapWord::ToForwardingAddress() {
1174
  ASSERT(IsForwardingAddress());
1175
  return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1176
}
1177

    
1178

    
1179
#ifdef VERIFY_HEAP
1180
void HeapObject::VerifyObjectField(int offset) {
1181
  VerifyPointer(READ_FIELD(this, offset));
1182
}
1183

    
1184
void HeapObject::VerifySmiField(int offset) {
1185
  CHECK(READ_FIELD(this, offset)->IsSmi());
1186
}
1187
#endif
1188

    
1189

    
1190
Heap* HeapObject::GetHeap() {
1191
  Heap* heap =
1192
      MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1193
  SLOW_ASSERT(heap != NULL);
1194
  return heap;
1195
}
1196

    
1197

    
1198
Isolate* HeapObject::GetIsolate() {
1199
  return GetHeap()->isolate();
1200
}
1201

    
1202

    
1203
Map* HeapObject::map() {
1204
  return map_word().ToMap();
1205
}
1206

    
1207

    
1208
void HeapObject::set_map(Map* value) {
1209
  set_map_word(MapWord::FromMap(value));
1210
  if (value != NULL) {
1211
    // TODO(1600) We are passing NULL as a slot because maps can never be on
1212
    // evacuation candidate.
1213
    value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1214
  }
1215
}
1216

    
1217

    
1218
// Unsafe accessor omitting write barrier.
1219
void HeapObject::set_map_no_write_barrier(Map* value) {
1220
  set_map_word(MapWord::FromMap(value));
1221
}
1222

    
1223

    
1224
MapWord HeapObject::map_word() {
1225
  return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1226
}
1227

    
1228

    
1229
void HeapObject::set_map_word(MapWord map_word) {
1230
  // WRITE_FIELD does not invoke write barrier, but there is no need
1231
  // here.
1232
  WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1233
}
1234

    
1235

    
1236
HeapObject* HeapObject::FromAddress(Address address) {
1237
  ASSERT_TAG_ALIGNED(address);
1238
  return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1239
}
1240

    
1241

    
1242
Address HeapObject::address() {
1243
  return reinterpret_cast<Address>(this) - kHeapObjectTag;
1244
}
1245

    
1246

    
1247
int HeapObject::Size() {
1248
  return SizeFromMap(map());
1249
}
1250

    
1251

    
1252
void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1253
  v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1254
                   reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1255
}
1256

    
1257

    
1258
void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1259
  v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1260
}
1261

    
1262

    
1263
double HeapNumber::value() {
1264
  return READ_DOUBLE_FIELD(this, kValueOffset);
1265
}
1266

    
1267

    
1268
void HeapNumber::set_value(double value) {
1269
  WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1270
}
1271

    
1272

    
1273
int HeapNumber::get_exponent() {
1274
  return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1275
          kExponentShift) - kExponentBias;
1276
}
1277

    
1278

    
1279
int HeapNumber::get_sign() {
1280
  return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1281
}
1282

    
1283

    
1284
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1285

    
1286

    
1287
Object** FixedArray::GetFirstElementAddress() {
1288
  return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1289
}
1290

    
1291

    
1292
bool FixedArray::ContainsOnlySmisOrHoles() {
1293
  Object* the_hole = GetHeap()->the_hole_value();
1294
  Object** current = GetFirstElementAddress();
1295
  for (int i = 0; i < length(); ++i) {
1296
    Object* candidate = *current++;
1297
    if (!candidate->IsSmi() && candidate != the_hole) return false;
1298
  }
1299
  return true;
1300
}
1301

    
1302

    
1303
FixedArrayBase* JSObject::elements() {
1304
  Object* array = READ_FIELD(this, kElementsOffset);
1305
  return static_cast<FixedArrayBase*>(array);
1306
}
1307

    
1308

    
1309
void JSObject::ValidateElements() {
1310
#ifdef ENABLE_SLOW_ASSERTS
1311
  if (FLAG_enable_slow_asserts) {
1312
    ElementsAccessor* accessor = GetElementsAccessor();
1313
    accessor->Validate(this);
1314
  }
1315
#endif
1316
}
1317

    
1318

    
1319
bool JSObject::ShouldTrackAllocationInfo() {
1320
  if (AllocationSite::CanTrack(map()->instance_type())) {
1321
    if (!IsJSArray()) {
1322
      return true;
1323
    }
1324

    
1325
    return AllocationSite::GetMode(GetElementsKind()) ==
1326
        TRACK_ALLOCATION_SITE;
1327
  }
1328
  return false;
1329
}
1330

    
1331

    
1332
void AllocationSite::Initialize() {
1333
  SetElementsKind(GetInitialFastElementsKind());
1334
  set_nested_site(Smi::FromInt(0));
1335
  set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1336
                     SKIP_WRITE_BARRIER);
1337
}
1338

    
1339

    
1340
// Heuristic: We only need to create allocation site info if the boilerplate
1341
// elements kind is the initial elements kind.
1342
AllocationSiteMode AllocationSite::GetMode(
1343
    ElementsKind boilerplate_elements_kind) {
1344
  if (FLAG_track_allocation_sites &&
1345
      IsFastSmiElementsKind(boilerplate_elements_kind)) {
1346
    return TRACK_ALLOCATION_SITE;
1347
  }
1348

    
1349
  return DONT_TRACK_ALLOCATION_SITE;
1350
}
1351

    
1352

    
1353
AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1354
                                           ElementsKind to) {
1355
  if (FLAG_track_allocation_sites &&
1356
      IsFastSmiElementsKind(from) &&
1357
      IsMoreGeneralElementsKindTransition(from, to)) {
1358
    return TRACK_ALLOCATION_SITE;
1359
  }
1360

    
1361
  return DONT_TRACK_ALLOCATION_SITE;
1362
}
1363

    
1364

    
1365
inline bool AllocationSite::CanTrack(InstanceType type) {
1366
  return type == JS_ARRAY_TYPE;
1367
}
1368

    
1369

    
1370
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1371
  object->ValidateElements();
1372
  ElementsKind elements_kind = object->map()->elements_kind();
1373
  if (!IsFastObjectElementsKind(elements_kind)) {
1374
    if (IsFastHoleyElementsKind(elements_kind)) {
1375
      TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1376
    } else {
1377
      TransitionElementsKind(object, FAST_ELEMENTS);
1378
    }
1379
  }
1380
}
1381

    
1382

    
1383
MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1384
                                                uint32_t count,
1385
                                                EnsureElementsMode mode) {
1386
  ElementsKind current_kind = map()->elements_kind();
1387
  ElementsKind target_kind = current_kind;
1388
  ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1389
  bool is_holey = IsFastHoleyElementsKind(current_kind);
1390
  if (current_kind == FAST_HOLEY_ELEMENTS) return this;
1391
  Heap* heap = GetHeap();
1392
  Object* the_hole = heap->the_hole_value();
1393
  for (uint32_t i = 0; i < count; ++i) {
1394
    Object* current = *objects++;
1395
    if (current == the_hole) {
1396
      is_holey = true;
1397
      target_kind = GetHoleyElementsKind(target_kind);
1398
    } else if (!current->IsSmi()) {
1399
      if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1400
        if (IsFastSmiElementsKind(target_kind)) {
1401
          if (is_holey) {
1402
            target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1403
          } else {
1404
            target_kind = FAST_DOUBLE_ELEMENTS;
1405
          }
1406
        }
1407
      } else if (is_holey) {
1408
        target_kind = FAST_HOLEY_ELEMENTS;
1409
        break;
1410
      } else {
1411
        target_kind = FAST_ELEMENTS;
1412
      }
1413
    }
1414
  }
1415

    
1416
  if (target_kind != current_kind) {
1417
    return TransitionElementsKind(target_kind);
1418
  }
1419
  return this;
1420
}
1421

    
1422

    
1423
MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
1424
                                                uint32_t length,
1425
                                                EnsureElementsMode mode) {
1426
  if (elements->map() != GetHeap()->fixed_double_array_map()) {
1427
    ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
1428
           elements->map() == GetHeap()->fixed_cow_array_map());
1429
    if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1430
      mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1431
    }
1432
    Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
1433
    return EnsureCanContainElements(objects, length, mode);
1434
  }
1435

    
1436
  ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1437
  if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1438
    return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1439
  } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
1440
    FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
1441
    for (uint32_t i = 0; i < length; ++i) {
1442
      if (double_array->is_the_hole(i)) {
1443
        return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
1444
      }
1445
    }
1446
    return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
1447
  }
1448

    
1449
  return this;
1450
}
1451

    
1452

    
1453
MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
1454
                                                ElementsKind to_kind) {
1455
  Map* current_map = map();
1456
  ElementsKind from_kind = current_map->elements_kind();
1457
  if (from_kind == to_kind) return current_map;
1458

    
1459
  Context* native_context = isolate->context()->native_context();
1460
  Object* maybe_array_maps = native_context->js_array_maps();
1461
  if (maybe_array_maps->IsFixedArray()) {
1462
    FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
1463
    if (array_maps->get(from_kind) == current_map) {
1464
      Object* maybe_transitioned_map = array_maps->get(to_kind);
1465
      if (maybe_transitioned_map->IsMap()) {
1466
        return Map::cast(maybe_transitioned_map);
1467
      }
1468
    }
1469
  }
1470

    
1471
  return GetElementsTransitionMapSlow(to_kind);
1472
}
1473

    
1474

    
1475
void JSObject::set_map_and_elements(Map* new_map,
1476
                                    FixedArrayBase* value,
1477
                                    WriteBarrierMode mode) {
1478
  ASSERT(value->HasValidElements());
1479
  if (new_map != NULL) {
1480
    if (mode == UPDATE_WRITE_BARRIER) {
1481
      set_map(new_map);
1482
    } else {
1483
      ASSERT(mode == SKIP_WRITE_BARRIER);
1484
      set_map_no_write_barrier(new_map);
1485
    }
1486
  }
1487
  ASSERT((map()->has_fast_smi_or_object_elements() ||
1488
          (value == GetHeap()->empty_fixed_array())) ==
1489
         (value->map() == GetHeap()->fixed_array_map() ||
1490
          value->map() == GetHeap()->fixed_cow_array_map()));
1491
  ASSERT((value == GetHeap()->empty_fixed_array()) ||
1492
         (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
1493
  WRITE_FIELD(this, kElementsOffset, value);
1494
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1495
}
1496

    
1497

    
1498
void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1499
  set_map_and_elements(NULL, value, mode);
1500
}
1501

    
1502

    
1503
void JSObject::initialize_properties() {
1504
  ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1505
  WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1506
}
1507

    
1508

    
1509
void JSObject::initialize_elements() {
1510
  if (map()->has_fast_smi_or_object_elements() ||
1511
      map()->has_fast_double_elements()) {
1512
    ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1513
    WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1514
  } else if (map()->has_external_array_elements()) {
1515
    ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(map());
1516
    ASSERT(!GetHeap()->InNewSpace(empty_array));
1517
    WRITE_FIELD(this, kElementsOffset, empty_array);
1518
  } else {
1519
    UNREACHABLE();
1520
  }
1521
}
1522

    
1523

    
1524
MaybeObject* JSObject::ResetElements() {
1525
  if (map()->is_observed()) {
1526
    // Maintain invariant that observed elements are always in dictionary mode.
1527
    SeededNumberDictionary* dictionary;
1528
    MaybeObject* maybe = SeededNumberDictionary::Allocate(GetHeap(), 0);
1529
    if (!maybe->To(&dictionary)) return maybe;
1530
    if (map() == GetHeap()->non_strict_arguments_elements_map()) {
1531
      FixedArray::cast(elements())->set(1, dictionary);
1532
    } else {
1533
      set_elements(dictionary);
1534
    }
1535
    return this;
1536
  }
1537

    
1538
  ElementsKind elements_kind = GetInitialFastElementsKind();
1539
  if (!FLAG_smi_only_arrays) {
1540
    elements_kind = FastSmiToObjectElementsKind(elements_kind);
1541
  }
1542
  MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
1543
  Map* map;
1544
  if (!maybe->To(&map)) return maybe;
1545
  set_map(map);
1546
  initialize_elements();
1547

    
1548
  return this;
1549
}
1550

    
1551

    
1552
Handle<String> JSObject::ExpectedTransitionKey(Handle<Map> map) {
1553
  DisallowHeapAllocation no_gc;
1554
  if (!map->HasTransitionArray()) return Handle<String>::null();
1555
  TransitionArray* transitions = map->transitions();
1556
  if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1557
  int transition = TransitionArray::kSimpleTransitionIndex;
1558
  PropertyDetails details = transitions->GetTargetDetails(transition);
1559
  Name* name = transitions->GetKey(transition);
1560
  if (details.type() != FIELD) return Handle<String>::null();
1561
  if (details.attributes() != NONE) return Handle<String>::null();
1562
  if (!name->IsString()) return Handle<String>::null();
1563
  return Handle<String>(String::cast(name));
1564
}
1565

    
1566

    
1567
Handle<Map> JSObject::ExpectedTransitionTarget(Handle<Map> map) {
1568
  ASSERT(!ExpectedTransitionKey(map).is_null());
1569
  return Handle<Map>(map->transitions()->GetTarget(
1570
      TransitionArray::kSimpleTransitionIndex));
1571
}
1572

    
1573

    
1574
Handle<Map> JSObject::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1575
  DisallowHeapAllocation no_allocation;
1576
  if (!map->HasTransitionArray()) return Handle<Map>::null();
1577
  TransitionArray* transitions = map->transitions();
1578
  int transition = transitions->Search(*key);
1579
  if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1580
  PropertyDetails target_details = transitions->GetTargetDetails(transition);
1581
  if (target_details.type() != FIELD) return Handle<Map>::null();
1582
  if (target_details.attributes() != NONE) return Handle<Map>::null();
1583
  return Handle<Map>(transitions->GetTarget(transition));
1584
}
1585

    
1586

    
1587
ACCESSORS(Oddball, to_string, String, kToStringOffset)
1588
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1589

    
1590

    
1591
byte Oddball::kind() {
1592
  return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1593
}
1594

    
1595

    
1596
void Oddball::set_kind(byte value) {
1597
  WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1598
}
1599

    
1600

    
1601
Object* Cell::value() {
1602
  return READ_FIELD(this, kValueOffset);
1603
}
1604

    
1605

    
1606
void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1607
  // The write barrier is not used for global property cells.
1608
  ASSERT(!val->IsPropertyCell() && !val->IsCell());
1609
  WRITE_FIELD(this, kValueOffset, val);
1610
}
1611

    
1612
ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1613

    
1614
Object* PropertyCell::type_raw() {
1615
  return READ_FIELD(this, kTypeOffset);
1616
}
1617

    
1618

    
1619
void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1620
  WRITE_FIELD(this, kTypeOffset, val);
1621
}
1622

    
1623

    
1624
int JSObject::GetHeaderSize() {
1625
  InstanceType type = map()->instance_type();
1626
  // Check for the most common kind of JavaScript object before
1627
  // falling into the generic switch. This speeds up the internal
1628
  // field operations considerably on average.
1629
  if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1630
  switch (type) {
1631
    case JS_GENERATOR_OBJECT_TYPE:
1632
      return JSGeneratorObject::kSize;
1633
    case JS_MODULE_TYPE:
1634
      return JSModule::kSize;
1635
    case JS_GLOBAL_PROXY_TYPE:
1636
      return JSGlobalProxy::kSize;
1637
    case JS_GLOBAL_OBJECT_TYPE:
1638
      return JSGlobalObject::kSize;
1639
    case JS_BUILTINS_OBJECT_TYPE:
1640
      return JSBuiltinsObject::kSize;
1641
    case JS_FUNCTION_TYPE:
1642
      return JSFunction::kSize;
1643
    case JS_VALUE_TYPE:
1644
      return JSValue::kSize;
1645
    case JS_DATE_TYPE:
1646
      return JSDate::kSize;
1647
    case JS_ARRAY_TYPE:
1648
      return JSArray::kSize;
1649
    case JS_ARRAY_BUFFER_TYPE:
1650
      return JSArrayBuffer::kSize;
1651
    case JS_TYPED_ARRAY_TYPE:
1652
      return JSTypedArray::kSize;
1653
    case JS_DATA_VIEW_TYPE:
1654
      return JSDataView::kSize;
1655
    case JS_SET_TYPE:
1656
      return JSSet::kSize;
1657
    case JS_MAP_TYPE:
1658
      return JSMap::kSize;
1659
    case JS_WEAK_MAP_TYPE:
1660
      return JSWeakMap::kSize;
1661
    case JS_WEAK_SET_TYPE:
1662
      return JSWeakSet::kSize;
1663
    case JS_REGEXP_TYPE:
1664
      return JSRegExp::kSize;
1665
    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1666
      return JSObject::kHeaderSize;
1667
    case JS_MESSAGE_OBJECT_TYPE:
1668
      return JSMessageObject::kSize;
1669
    default:
1670
      // TODO(jkummerow): Re-enable this. Blink currently hits this
1671
      // from its CustomElementConstructorBuilder.
1672
      // UNREACHABLE();
1673
      return 0;
1674
  }
1675
}
1676

    
1677

    
1678
int JSObject::GetInternalFieldCount() {
1679
  ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1680
  // Make sure to adjust for the number of in-object properties. These
1681
  // properties do contribute to the size, but are not internal fields.
1682
  return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1683
         map()->inobject_properties();
1684
}
1685

    
1686

    
1687
int JSObject::GetInternalFieldOffset(int index) {
1688
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1689
  return GetHeaderSize() + (kPointerSize * index);
1690
}
1691

    
1692

    
1693
Object* JSObject::GetInternalField(int index) {
1694
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1695
  // Internal objects do follow immediately after the header, whereas in-object
1696
  // properties are at the end of the object. Therefore there is no need
1697
  // to adjust the index here.
1698
  return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1699
}
1700

    
1701

    
1702
void JSObject::SetInternalField(int index, Object* value) {
1703
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1704
  // Internal objects do follow immediately after the header, whereas in-object
1705
  // properties are at the end of the object. Therefore there is no need
1706
  // to adjust the index here.
1707
  int offset = GetHeaderSize() + (kPointerSize * index);
1708
  WRITE_FIELD(this, offset, value);
1709
  WRITE_BARRIER(GetHeap(), this, offset, value);
1710
}
1711

    
1712

    
1713
void JSObject::SetInternalField(int index, Smi* value) {
1714
  ASSERT(index < GetInternalFieldCount() && index >= 0);
1715
  // Internal objects do follow immediately after the header, whereas in-object
1716
  // properties are at the end of the object. Therefore there is no need
1717
  // to adjust the index here.
1718
  int offset = GetHeaderSize() + (kPointerSize * index);
1719
  WRITE_FIELD(this, offset, value);
1720
}
1721

    
1722

    
1723
MaybeObject* JSObject::FastPropertyAt(Representation representation,
1724
                                      int index) {
1725
  Object* raw_value = RawFastPropertyAt(index);
1726
  return raw_value->AllocateNewStorageFor(GetHeap(), representation);
1727
}
1728

    
1729

    
1730
// Access fast-case object properties at index. The use of these routines
1731
// is needed to correctly distinguish between properties stored in-object and
1732
// properties stored in the properties array.
1733
Object* JSObject::RawFastPropertyAt(int index) {
1734
  // Adjust for the number of properties stored in the object.
1735
  index -= map()->inobject_properties();
1736
  if (index < 0) {
1737
    int offset = map()->instance_size() + (index * kPointerSize);
1738
    return READ_FIELD(this, offset);
1739
  } else {
1740
    ASSERT(index < properties()->length());
1741
    return properties()->get(index);
1742
  }
1743
}
1744

    
1745

    
1746
void JSObject::FastPropertyAtPut(int index, Object* value) {
1747
  // Adjust for the number of properties stored in the object.
1748
  index -= map()->inobject_properties();
1749
  if (index < 0) {
1750
    int offset = map()->instance_size() + (index * kPointerSize);
1751
    WRITE_FIELD(this, offset, value);
1752
    WRITE_BARRIER(GetHeap(), this, offset, value);
1753
  } else {
1754
    ASSERT(index < properties()->length());
1755
    properties()->set(index, value);
1756
  }
1757
}
1758

    
1759

    
1760
int JSObject::GetInObjectPropertyOffset(int index) {
1761
  // Adjust for the number of properties stored in the object.
1762
  index -= map()->inobject_properties();
1763
  ASSERT(index < 0);
1764
  return map()->instance_size() + (index * kPointerSize);
1765
}
1766

    
1767

    
1768
Object* JSObject::InObjectPropertyAt(int index) {
1769
  // Adjust for the number of properties stored in the object.
1770
  index -= map()->inobject_properties();
1771
  ASSERT(index < 0);
1772
  int offset = map()->instance_size() + (index * kPointerSize);
1773
  return READ_FIELD(this, offset);
1774
}
1775

    
1776

    
1777
Object* JSObject::InObjectPropertyAtPut(int index,
1778
                                        Object* value,
1779
                                        WriteBarrierMode mode) {
1780
  // Adjust for the number of properties stored in the object.
1781
  index -= map()->inobject_properties();
1782
  ASSERT(index < 0);
1783
  int offset = map()->instance_size() + (index * kPointerSize);
1784
  WRITE_FIELD(this, offset, value);
1785
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1786
  return value;
1787
}
1788

    
1789

    
1790

    
1791
void JSObject::InitializeBody(Map* map,
1792
                              Object* pre_allocated_value,
1793
                              Object* filler_value) {
1794
  ASSERT(!filler_value->IsHeapObject() ||
1795
         !GetHeap()->InNewSpace(filler_value));
1796
  ASSERT(!pre_allocated_value->IsHeapObject() ||
1797
         !GetHeap()->InNewSpace(pre_allocated_value));
1798
  int size = map->instance_size();
1799
  int offset = kHeaderSize;
1800
  if (filler_value != pre_allocated_value) {
1801
    int pre_allocated = map->pre_allocated_property_fields();
1802
    ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1803
    for (int i = 0; i < pre_allocated; i++) {
1804
      WRITE_FIELD(this, offset, pre_allocated_value);
1805
      offset += kPointerSize;
1806
    }
1807
  }
1808
  while (offset < size) {
1809
    WRITE_FIELD(this, offset, filler_value);
1810
    offset += kPointerSize;
1811
  }
1812
}
1813

    
1814

    
1815
bool JSObject::HasFastProperties() {
1816
  ASSERT(properties()->IsDictionary() == map()->is_dictionary_map());
1817
  return !properties()->IsDictionary();
1818
}
1819

    
1820

    
1821
bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) {
1822
  // Allow extra fast properties if the object has more than
1823
  // kFastPropertiesSoftLimit in-object properties. When this is the case, it is
1824
  // very unlikely that the object is being used as a dictionary and there is a
1825
  // good chance that allowing more map transitions will be worth it.
1826
  Map* map = this->map();
1827
  if (map->unused_property_fields() != 0) return false;
1828

    
1829
  int inobject = map->inobject_properties();
1830

    
1831
  int limit;
1832
  if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED) {
1833
    limit = Max(inobject, kMaxFastProperties);
1834
  } else {
1835
    limit = Max(inobject, kFastPropertiesSoftLimit);
1836
  }
1837
  return properties()->length() > limit;
1838
}
1839

    
1840

    
1841
void Struct::InitializeBody(int object_size) {
1842
  Object* value = GetHeap()->undefined_value();
1843
  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1844
    WRITE_FIELD(this, offset, value);
1845
  }
1846
}
1847

    
1848

    
1849
bool Object::ToArrayIndex(uint32_t* index) {
1850
  if (IsSmi()) {
1851
    int value = Smi::cast(this)->value();
1852
    if (value < 0) return false;
1853
    *index = value;
1854
    return true;
1855
  }
1856
  if (IsHeapNumber()) {
1857
    double value = HeapNumber::cast(this)->value();
1858
    uint32_t uint_value = static_cast<uint32_t>(value);
1859
    if (value == static_cast<double>(uint_value)) {
1860
      *index = uint_value;
1861
      return true;
1862
    }
1863
  }
1864
  return false;
1865
}
1866

    
1867

    
1868
bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1869
  if (!this->IsJSValue()) return false;
1870

    
1871
  JSValue* js_value = JSValue::cast(this);
1872
  if (!js_value->value()->IsString()) return false;
1873

    
1874
  String* str = String::cast(js_value->value());
1875
  if (index >= static_cast<uint32_t>(str->length())) return false;
1876

    
1877
  return true;
1878
}
1879

    
1880

    
1881

    
1882
void Object::VerifyApiCallResultType() {
1883
#if ENABLE_EXTRA_CHECKS
1884
  if (!(IsSmi() ||
1885
        IsString() ||
1886
        IsSpecObject() ||
1887
        IsHeapNumber() ||
1888
        IsUndefined() ||
1889
        IsTrue() ||
1890
        IsFalse() ||
1891
        IsNull())) {
1892
    FATAL("API call returned invalid object");
1893
  }
1894
#endif  // ENABLE_EXTRA_CHECKS
1895
}
1896

    
1897

    
1898
FixedArrayBase* FixedArrayBase::cast(Object* object) {
1899
  ASSERT(object->IsFixedArray() || object->IsFixedDoubleArray() ||
1900
         object->IsConstantPoolArray());
1901
  return reinterpret_cast<FixedArrayBase*>(object);
1902
}
1903

    
1904

    
1905
Object* FixedArray::get(int index) {
1906
  SLOW_ASSERT(index >= 0 && index < this->length());
1907
  return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1908
}
1909

    
1910

    
1911
bool FixedArray::is_the_hole(int index) {
1912
  return get(index) == GetHeap()->the_hole_value();
1913
}
1914

    
1915

    
1916
void FixedArray::set(int index, Smi* value) {
1917
  ASSERT(map() != GetHeap()->fixed_cow_array_map());
1918
  ASSERT(index >= 0 && index < this->length());
1919
  ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1920
  int offset = kHeaderSize + index * kPointerSize;
1921
  WRITE_FIELD(this, offset, value);
1922
}
1923

    
1924

    
1925
void FixedArray::set(int index, Object* value) {
1926
  ASSERT(map() != GetHeap()->fixed_cow_array_map());
1927
  ASSERT(index >= 0 && index < this->length());
1928
  int offset = kHeaderSize + index * kPointerSize;
1929
  WRITE_FIELD(this, offset, value);
1930
  WRITE_BARRIER(GetHeap(), this, offset, value);
1931
}
1932

    
1933

    
1934
inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1935
  return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1936
}
1937

    
1938

    
1939
inline double FixedDoubleArray::hole_nan_as_double() {
1940
  return BitCast<double, uint64_t>(kHoleNanInt64);
1941
}
1942

    
1943

    
1944
inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
1945
  ASSERT(BitCast<uint64_t>(OS::nan_value()) != kHoleNanInt64);
1946
  ASSERT((BitCast<uint64_t>(OS::nan_value()) >> 32) != kHoleNanUpper32);
1947
  return OS::nan_value();
1948
}
1949

    
1950

    
1951
double FixedDoubleArray::get_scalar(int index) {
1952
  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1953
         map() != GetHeap()->fixed_array_map());
1954
  ASSERT(index >= 0 && index < this->length());
1955
  double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
1956
  ASSERT(!is_the_hole_nan(result));
1957
  return result;
1958
}
1959

    
1960
int64_t FixedDoubleArray::get_representation(int index) {
1961
  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1962
         map() != GetHeap()->fixed_array_map());
1963
  ASSERT(index >= 0 && index < this->length());
1964
  return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
1965
}
1966

    
1967
MaybeObject* FixedDoubleArray::get(int index) {
1968
  if (is_the_hole(index)) {
1969
    return GetHeap()->the_hole_value();
1970
  } else {
1971
    return GetHeap()->NumberFromDouble(get_scalar(index));
1972
  }
1973
}
1974

    
1975

    
1976
void FixedDoubleArray::set(int index, double value) {
1977
  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1978
         map() != GetHeap()->fixed_array_map());
1979
  int offset = kHeaderSize + index * kDoubleSize;
1980
  if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
1981
  WRITE_DOUBLE_FIELD(this, offset, value);
1982
}
1983

    
1984

    
1985
void FixedDoubleArray::set_the_hole(int index) {
1986
  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
1987
         map() != GetHeap()->fixed_array_map());
1988
  int offset = kHeaderSize + index * kDoubleSize;
1989
  WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1990
}
1991

    
1992

    
1993
bool FixedDoubleArray::is_the_hole(int index) {
1994
  int offset = kHeaderSize + index * kDoubleSize;
1995
  return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
1996
}
1997

    
1998

    
1999
SMI_ACCESSORS(ConstantPoolArray, first_ptr_index, kFirstPointerIndexOffset)
2000
SMI_ACCESSORS(ConstantPoolArray, first_int32_index, kFirstInt32IndexOffset)
2001

    
2002

    
2003
int ConstantPoolArray::first_int64_index() {
2004
  return 0;
2005
}
2006

    
2007

    
2008
int ConstantPoolArray::count_of_int64_entries() {
2009
  return first_ptr_index();
2010
}
2011

    
2012

    
2013
int ConstantPoolArray::count_of_ptr_entries() {
2014
  return first_int32_index() - first_ptr_index();
2015
}
2016

    
2017

    
2018
int ConstantPoolArray::count_of_int32_entries() {
2019
  return length() - first_int32_index();
2020
}
2021

    
2022

    
2023
void ConstantPoolArray::SetEntryCounts(int number_of_int64_entries,
2024
                                       int number_of_ptr_entries,
2025
                                       int number_of_int32_entries) {
2026
  set_first_ptr_index(number_of_int64_entries);
2027
  set_first_int32_index(number_of_int64_entries + number_of_ptr_entries);
2028
  set_length(number_of_int64_entries + number_of_ptr_entries +
2029
             number_of_int32_entries);
2030
}
2031

    
2032

    
2033
int64_t ConstantPoolArray::get_int64_entry(int index) {
2034
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2035
  ASSERT(index >= 0 && index < first_ptr_index());
2036
  return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2037
}
2038

    
2039
double ConstantPoolArray::get_int64_entry_as_double(int index) {
2040
  STATIC_ASSERT(kDoubleSize == kInt64Size);
2041
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2042
  ASSERT(index >= 0 && index < first_ptr_index());
2043
  return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2044
}
2045

    
2046

    
2047
Object* ConstantPoolArray::get_ptr_entry(int index) {
2048
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2049
  ASSERT(index >= first_ptr_index() && index < first_int32_index());
2050
  return READ_FIELD(this, OffsetOfElementAt(index));
2051
}
2052

    
2053

    
2054
int32_t ConstantPoolArray::get_int32_entry(int index) {
2055
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2056
  ASSERT(index >= first_int32_index() && index < length());
2057
  return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2058
}
2059

    
2060

    
2061
void ConstantPoolArray::set(int index, Object* value) {
2062
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2063
  ASSERT(index >= first_ptr_index() && index < first_int32_index());
2064
  WRITE_FIELD(this, OffsetOfElementAt(index), value);
2065
  WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2066
}
2067

    
2068

    
2069
void ConstantPoolArray::set(int index, int64_t value) {
2070
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2071
  ASSERT(index >= first_int64_index() && index < first_ptr_index());
2072
  WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2073
}
2074

    
2075

    
2076
void ConstantPoolArray::set(int index, double value) {
2077
  STATIC_ASSERT(kDoubleSize == kInt64Size);
2078
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2079
  ASSERT(index >= first_int64_index() && index < first_ptr_index());
2080
  WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2081
}
2082

    
2083

    
2084
void ConstantPoolArray::set(int index, int32_t value) {
2085
  ASSERT(map() == GetHeap()->constant_pool_array_map());
2086
  ASSERT(index >= this->first_int32_index() && index < length());
2087
  WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2088
}
2089

    
2090

    
2091
WriteBarrierMode HeapObject::GetWriteBarrierMode(
2092
    const DisallowHeapAllocation& promise) {
2093
  Heap* heap = GetHeap();
2094
  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2095
  if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2096
  return UPDATE_WRITE_BARRIER;
2097
}
2098

    
2099

    
2100
void FixedArray::set(int index,
2101
                     Object* value,
2102
                     WriteBarrierMode mode) {
2103
  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2104
  ASSERT(index >= 0 && index < this->length());
2105
  int offset = kHeaderSize + index * kPointerSize;
2106
  WRITE_FIELD(this, offset, value);
2107
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2108
}
2109

    
2110

    
2111
void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2112
                                              int index,
2113
                                              Object* value) {
2114
  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2115
  ASSERT(index >= 0 && index < array->length());
2116
  int offset = kHeaderSize + index * kPointerSize;
2117
  WRITE_FIELD(array, offset, value);
2118
  Heap* heap = array->GetHeap();
2119
  if (heap->InNewSpace(value)) {
2120
    heap->RecordWrite(array->address(), offset);
2121
  }
2122
}
2123

    
2124

    
2125
void FixedArray::NoWriteBarrierSet(FixedArray* array,
2126
                                   int index,
2127
                                   Object* value) {
2128
  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
2129
  ASSERT(index >= 0 && index < array->length());
2130
  ASSERT(!array->GetHeap()->InNewSpace(value));
2131
  WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2132
}
2133

    
2134

    
2135
void FixedArray::set_undefined(int index) {
2136
  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2137
  ASSERT(index >= 0 && index < this->length());
2138
  ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2139
  WRITE_FIELD(this,
2140
              kHeaderSize + index * kPointerSize,
2141
              GetHeap()->undefined_value());
2142
}
2143

    
2144

    
2145
void FixedArray::set_null(int index) {
2146
  ASSERT(index >= 0 && index < this->length());
2147
  ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2148
  WRITE_FIELD(this,
2149
              kHeaderSize + index * kPointerSize,
2150
              GetHeap()->null_value());
2151
}
2152

    
2153

    
2154
void FixedArray::set_the_hole(int index) {
2155
  ASSERT(map() != GetHeap()->fixed_cow_array_map());
2156
  ASSERT(index >= 0 && index < this->length());
2157
  ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2158
  WRITE_FIELD(this,
2159
              kHeaderSize + index * kPointerSize,
2160
              GetHeap()->the_hole_value());
2161
}
2162

    
2163

    
2164
double* FixedDoubleArray::data_start() {
2165
  return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2166
}
2167

    
2168

    
2169
Object** FixedArray::data_start() {
2170
  return HeapObject::RawField(this, kHeaderSize);
2171
}
2172

    
2173

    
2174
bool DescriptorArray::IsEmpty() {
2175
  ASSERT(length() >= kFirstIndex ||
2176
         this == GetHeap()->empty_descriptor_array());
2177
  return length() < kFirstIndex;
2178
}
2179

    
2180

    
2181
void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2182
  WRITE_FIELD(
2183
      this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2184
}
2185

    
2186

    
2187
// Perform a binary search in a fixed array. Low and high are entry indices. If
2188
// there are three entries in this array it should be called with low=0 and
2189
// high=2.
2190
template<SearchMode search_mode, typename T>
2191
int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2192
  uint32_t hash = name->Hash();
2193
  int limit = high;
2194

    
2195
  ASSERT(low <= high);
2196

    
2197
  while (low != high) {
2198
    int mid = (low + high) / 2;
2199
    Name* mid_name = array->GetSortedKey(mid);
2200
    uint32_t mid_hash = mid_name->Hash();
2201

    
2202
    if (mid_hash >= hash) {
2203
      high = mid;
2204
    } else {
2205
      low = mid + 1;
2206
    }
2207
  }
2208

    
2209
  for (; low <= limit; ++low) {
2210
    int sort_index = array->GetSortedKeyIndex(low);
2211
    Name* entry = array->GetKey(sort_index);
2212
    if (entry->Hash() != hash) break;
2213
    if (entry->Equals(name)) {
2214
      if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2215
        return sort_index;
2216
      }
2217
      return T::kNotFound;
2218
    }
2219
  }
2220

    
2221
  return T::kNotFound;
2222
}
2223

    
2224

    
2225
// Perform a linear search in this fixed array. len is the number of entry
2226
// indices that are valid.
2227
template<SearchMode search_mode, typename T>
2228
int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2229
  uint32_t hash = name->Hash();
2230
  if (search_mode == ALL_ENTRIES) {
2231
    for (int number = 0; number < len; number++) {
2232
      int sorted_index = array->GetSortedKeyIndex(number);
2233
      Name* entry = array->GetKey(sorted_index);
2234
      uint32_t current_hash = entry->Hash();
2235
      if (current_hash > hash) break;
2236
      if (current_hash == hash && entry->Equals(name)) return sorted_index;
2237
    }
2238
  } else {
2239
    ASSERT(len >= valid_entries);
2240
    for (int number = 0; number < valid_entries; number++) {
2241
      Name* entry = array->GetKey(number);
2242
      uint32_t current_hash = entry->Hash();
2243
      if (current_hash == hash && entry->Equals(name)) return number;
2244
    }
2245
  }
2246
  return T::kNotFound;
2247
}
2248

    
2249

    
2250
template<SearchMode search_mode, typename T>
2251
int Search(T* array, Name* name, int valid_entries) {
2252
  if (search_mode == VALID_ENTRIES) {
2253
    SLOW_ASSERT(array->IsSortedNoDuplicates(valid_entries));
2254
  } else {
2255
    SLOW_ASSERT(array->IsSortedNoDuplicates());
2256
  }
2257

    
2258
  int nof = array->number_of_entries();
2259
  if (nof == 0) return T::kNotFound;
2260

    
2261
  // Fast case: do linear search for small arrays.
2262
  const int kMaxElementsForLinearSearch = 8;
2263
  if ((search_mode == ALL_ENTRIES &&
2264
       nof <= kMaxElementsForLinearSearch) ||
2265
      (search_mode == VALID_ENTRIES &&
2266
       valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2267
    return LinearSearch<search_mode>(array, name, nof, valid_entries);
2268
  }
2269

    
2270
  // Slow case: perform binary search.
2271
  return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2272
}
2273

    
2274

    
2275
int DescriptorArray::Search(Name* name, int valid_descriptors) {
2276
  return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2277
}
2278

    
2279

    
2280
int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2281
  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2282
  if (number_of_own_descriptors == 0) return kNotFound;
2283

    
2284
  DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2285
  int number = cache->Lookup(map, name);
2286

    
2287
  if (number == DescriptorLookupCache::kAbsent) {
2288
    number = Search(name, number_of_own_descriptors);
2289
    cache->Update(map, name, number);
2290
  }
2291

    
2292
  return number;
2293
}
2294

    
2295

    
2296
void Map::LookupDescriptor(JSObject* holder,
2297
                           Name* name,
2298
                           LookupResult* result) {
2299
  DescriptorArray* descriptors = this->instance_descriptors();
2300
  int number = descriptors->SearchWithCache(name, this);
2301
  if (number == DescriptorArray::kNotFound) return result->NotFound();
2302
  result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2303
}
2304

    
2305

    
2306
void Map::LookupTransition(JSObject* holder,
2307
                           Name* name,
2308
                           LookupResult* result) {
2309
  if (HasTransitionArray()) {
2310
    TransitionArray* transition_array = transitions();
2311
    int number = transition_array->Search(name);
2312
    if (number != TransitionArray::kNotFound) {
2313
      return result->TransitionResult(holder, number);
2314
    }
2315
  }
2316
  result->NotFound();
2317
}
2318

    
2319

    
2320
Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2321
  ASSERT(descriptor_number < number_of_descriptors());
2322
  return HeapObject::RawField(
2323
      reinterpret_cast<HeapObject*>(this),
2324
      OffsetOfElementAt(ToKeyIndex(descriptor_number)));
2325
}
2326

    
2327

    
2328
Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2329
  return GetKeySlot(descriptor_number);
2330
}
2331

    
2332

    
2333
Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2334
  return GetValueSlot(descriptor_number - 1) + 1;
2335
}
2336

    
2337

    
2338
Name* DescriptorArray::GetKey(int descriptor_number) {
2339
  ASSERT(descriptor_number < number_of_descriptors());
2340
  return Name::cast(get(ToKeyIndex(descriptor_number)));
2341
}
2342

    
2343

    
2344
int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2345
  return GetDetails(descriptor_number).pointer();
2346
}
2347

    
2348

    
2349
Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2350
  return GetKey(GetSortedKeyIndex(descriptor_number));
2351
}
2352

    
2353

    
2354
void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2355
  PropertyDetails details = GetDetails(descriptor_index);
2356
  set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2357
}
2358

    
2359

    
2360
void DescriptorArray::SetRepresentation(int descriptor_index,
2361
                                        Representation representation) {
2362
  ASSERT(!representation.IsNone());
2363
  PropertyDetails details = GetDetails(descriptor_index);
2364
  set(ToDetailsIndex(descriptor_index),
2365
      details.CopyWithRepresentation(representation).AsSmi());
2366
}
2367

    
2368

    
2369
void DescriptorArray::InitializeRepresentations(Representation representation) {
2370
  int length = number_of_descriptors();
2371
  for (int i = 0; i < length; i++) {
2372
    SetRepresentation(i, representation);
2373
  }
2374
}
2375

    
2376

    
2377
Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2378
  ASSERT(descriptor_number < number_of_descriptors());
2379
  return HeapObject::RawField(
2380
      reinterpret_cast<HeapObject*>(this),
2381
      OffsetOfElementAt(ToValueIndex(descriptor_number)));
2382
}
2383

    
2384

    
2385
Object* DescriptorArray::GetValue(int descriptor_number) {
2386
  ASSERT(descriptor_number < number_of_descriptors());
2387
  return get(ToValueIndex(descriptor_number));
2388
}
2389

    
2390

    
2391
PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2392
  ASSERT(descriptor_number < number_of_descriptors());
2393
  Object* details = get(ToDetailsIndex(descriptor_number));
2394
  return PropertyDetails(Smi::cast(details));
2395
}
2396

    
2397

    
2398
PropertyType DescriptorArray::GetType(int descriptor_number) {
2399
  return GetDetails(descriptor_number).type();
2400
}
2401

    
2402

    
2403
int DescriptorArray::GetFieldIndex(int descriptor_number) {
2404
  ASSERT(GetDetails(descriptor_number).type() == FIELD);
2405
  return GetDetails(descriptor_number).field_index();
2406
}
2407

    
2408

    
2409
Object* DescriptorArray::GetConstant(int descriptor_number) {
2410
  return GetValue(descriptor_number);
2411
}
2412

    
2413

    
2414
Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2415
  ASSERT(GetType(descriptor_number) == CALLBACKS);
2416
  return GetValue(descriptor_number);
2417
}
2418

    
2419

    
2420
AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
2421
  ASSERT(GetType(descriptor_number) == CALLBACKS);
2422
  Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
2423
  return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
2424
}
2425

    
2426

    
2427
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
2428
  desc->Init(GetKey(descriptor_number),
2429
             GetValue(descriptor_number),
2430
             GetDetails(descriptor_number));
2431
}
2432

    
2433

    
2434
void DescriptorArray::Set(int descriptor_number,
2435
                          Descriptor* desc,
2436
                          const WhitenessWitness&) {
2437
  // Range check.
2438
  ASSERT(descriptor_number < number_of_descriptors());
2439

    
2440
  NoIncrementalWriteBarrierSet(this,
2441
                               ToKeyIndex(descriptor_number),
2442
                               desc->GetKey());
2443
  NoIncrementalWriteBarrierSet(this,
2444
                               ToValueIndex(descriptor_number),
2445
                               desc->GetValue());
2446
  NoIncrementalWriteBarrierSet(this,
2447
                               ToDetailsIndex(descriptor_number),
2448
                               desc->GetDetails().AsSmi());
2449
}
2450

    
2451

    
2452
void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
2453
  // Range check.
2454
  ASSERT(descriptor_number < number_of_descriptors());
2455

    
2456
  set(ToKeyIndex(descriptor_number), desc->GetKey());
2457
  set(ToValueIndex(descriptor_number), desc->GetValue());
2458
  set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
2459
}
2460

    
2461

    
2462
void DescriptorArray::Append(Descriptor* desc,
2463
                             const WhitenessWitness& witness) {
2464
  int descriptor_number = number_of_descriptors();
2465
  SetNumberOfDescriptors(descriptor_number + 1);
2466
  Set(descriptor_number, desc, witness);
2467

    
2468
  uint32_t hash = desc->GetKey()->Hash();
2469

    
2470
  int insertion;
2471

    
2472
  for (insertion = descriptor_number; insertion > 0; --insertion) {
2473
    Name* key = GetSortedKey(insertion - 1);
2474
    if (key->Hash() <= hash) break;
2475
    SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2476
  }
2477

    
2478
  SetSortedKey(insertion, descriptor_number);
2479
}
2480

    
2481

    
2482
void DescriptorArray::Append(Descriptor* desc) {
2483
  int descriptor_number = number_of_descriptors();
2484
  SetNumberOfDescriptors(descriptor_number + 1);
2485
  Set(descriptor_number, desc);
2486

    
2487
  uint32_t hash = desc->GetKey()->Hash();
2488

    
2489
  int insertion;
2490

    
2491
  for (insertion = descriptor_number; insertion > 0; --insertion) {
2492
    Name* key = GetSortedKey(insertion - 1);
2493
    if (key->Hash() <= hash) break;
2494
    SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
2495
  }
2496

    
2497
  SetSortedKey(insertion, descriptor_number);
2498
}
2499

    
2500

    
2501
void DescriptorArray::SwapSortedKeys(int first, int second) {
2502
  int first_key = GetSortedKeyIndex(first);
2503
  SetSortedKey(first, GetSortedKeyIndex(second));
2504
  SetSortedKey(second, first_key);
2505
}
2506

    
2507

    
2508
DescriptorArray::WhitenessWitness::WhitenessWitness(FixedArray* array)
2509
    : marking_(array->GetHeap()->incremental_marking()) {
2510
  marking_->EnterNoMarkingScope();
2511
  ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
2512
}
2513

    
2514

    
2515
DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2516
  marking_->LeaveNoMarkingScope();
2517
}
2518

    
2519

    
2520
template<typename Shape, typename Key>
2521
int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
2522
  const int kMinCapacity = 32;
2523
  int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
2524
  if (capacity < kMinCapacity) {
2525
    capacity = kMinCapacity;  // Guarantee min capacity.
2526
  }
2527
  return capacity;
2528
}
2529

    
2530

    
2531
template<typename Shape, typename Key>
2532
int HashTable<Shape, Key>::FindEntry(Key key) {
2533
  return FindEntry(GetIsolate(), key);
2534
}
2535

    
2536

    
2537
// Find entry for key otherwise return kNotFound.
2538
template<typename Shape, typename Key>
2539
int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
2540
  uint32_t capacity = Capacity();
2541
  uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
2542
  uint32_t count = 1;
2543
  // EnsureCapacity will guarantee the hash table is never full.
2544
  while (true) {
2545
    Object* element = KeyAt(entry);
2546
    // Empty entry. Uses raw unchecked accessors because it is called by the
2547
    // string table during bootstrapping.
2548
    if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
2549
    if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
2550
        Shape::IsMatch(key, element)) return entry;
2551
    entry = NextProbe(entry, count++, capacity);
2552
  }
2553
  return kNotFound;
2554
}
2555

    
2556

    
2557
bool SeededNumberDictionary::requires_slow_elements() {
2558
  Object* max_index_object = get(kMaxNumberKeyIndex);
2559
  if (!max_index_object->IsSmi()) return false;
2560
  return 0 !=
2561
      (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
2562
}
2563

    
2564
uint32_t SeededNumberDictionary::max_number_key() {
2565
  ASSERT(!requires_slow_elements());
2566
  Object* max_index_object = get(kMaxNumberKeyIndex);
2567
  if (!max_index_object->IsSmi()) return 0;
2568
  uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
2569
  return value >> kRequiresSlowElementsTagSize;
2570
}
2571

    
2572
void SeededNumberDictionary::set_requires_slow_elements() {
2573
  set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
2574
}
2575

    
2576

    
2577
// ------------------------------------
2578
// Cast operations
2579

    
2580

    
2581
CAST_ACCESSOR(FixedArray)
2582
CAST_ACCESSOR(FixedDoubleArray)
2583
CAST_ACCESSOR(ConstantPoolArray)
2584
CAST_ACCESSOR(DescriptorArray)
2585
CAST_ACCESSOR(DeoptimizationInputData)
2586
CAST_ACCESSOR(DeoptimizationOutputData)
2587
CAST_ACCESSOR(DependentCode)
2588
CAST_ACCESSOR(TypeFeedbackCells)
2589
CAST_ACCESSOR(StringTable)
2590
CAST_ACCESSOR(JSFunctionResultCache)
2591
CAST_ACCESSOR(NormalizedMapCache)
2592
CAST_ACCESSOR(ScopeInfo)
2593
CAST_ACCESSOR(CompilationCacheTable)
2594
CAST_ACCESSOR(CodeCacheHashTable)
2595
CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
2596
CAST_ACCESSOR(MapCache)
2597
CAST_ACCESSOR(String)
2598
CAST_ACCESSOR(SeqString)
2599
CAST_ACCESSOR(SeqOneByteString)
2600
CAST_ACCESSOR(SeqTwoByteString)
2601
CAST_ACCESSOR(SlicedString)
2602
CAST_ACCESSOR(ConsString)
2603
CAST_ACCESSOR(ExternalString)
2604
CAST_ACCESSOR(ExternalAsciiString)
2605
CAST_ACCESSOR(ExternalTwoByteString)
2606
CAST_ACCESSOR(Symbol)
2607
CAST_ACCESSOR(Name)
2608
CAST_ACCESSOR(JSReceiver)
2609
CAST_ACCESSOR(JSObject)
2610
CAST_ACCESSOR(Smi)
2611
CAST_ACCESSOR(HeapObject)
2612
CAST_ACCESSOR(HeapNumber)
2613
CAST_ACCESSOR(Oddball)
2614
CAST_ACCESSOR(Cell)
2615
CAST_ACCESSOR(PropertyCell)
2616
CAST_ACCESSOR(SharedFunctionInfo)
2617
CAST_ACCESSOR(Map)
2618
CAST_ACCESSOR(JSFunction)
2619
CAST_ACCESSOR(GlobalObject)
2620
CAST_ACCESSOR(JSGlobalProxy)
2621
CAST_ACCESSOR(JSGlobalObject)
2622
CAST_ACCESSOR(JSBuiltinsObject)
2623
CAST_ACCESSOR(Code)
2624
CAST_ACCESSOR(JSArray)
2625
CAST_ACCESSOR(JSArrayBuffer)
2626
CAST_ACCESSOR(JSArrayBufferView)
2627
CAST_ACCESSOR(JSTypedArray)
2628
CAST_ACCESSOR(JSDataView)
2629
CAST_ACCESSOR(JSRegExp)
2630
CAST_ACCESSOR(JSProxy)
2631
CAST_ACCESSOR(JSFunctionProxy)
2632
CAST_ACCESSOR(JSSet)
2633
CAST_ACCESSOR(JSMap)
2634
CAST_ACCESSOR(JSWeakMap)
2635
CAST_ACCESSOR(JSWeakSet)
2636
CAST_ACCESSOR(Foreign)
2637
CAST_ACCESSOR(ByteArray)
2638
CAST_ACCESSOR(FreeSpace)
2639
CAST_ACCESSOR(ExternalArray)
2640
CAST_ACCESSOR(ExternalByteArray)
2641
CAST_ACCESSOR(ExternalUnsignedByteArray)
2642
CAST_ACCESSOR(ExternalShortArray)
2643
CAST_ACCESSOR(ExternalUnsignedShortArray)
2644
CAST_ACCESSOR(ExternalIntArray)
2645
CAST_ACCESSOR(ExternalUnsignedIntArray)
2646
CAST_ACCESSOR(ExternalFloatArray)
2647
CAST_ACCESSOR(ExternalDoubleArray)
2648
CAST_ACCESSOR(ExternalPixelArray)
2649
CAST_ACCESSOR(Struct)
2650
CAST_ACCESSOR(AccessorInfo)
2651

    
2652

    
2653
#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2654
  STRUCT_LIST(MAKE_STRUCT_CAST)
2655
#undef MAKE_STRUCT_CAST
2656

    
2657

    
2658
template <typename Shape, typename Key>
2659
HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2660
  ASSERT(obj->IsHashTable());
2661
  return reinterpret_cast<HashTable*>(obj);
2662
}
2663

    
2664

    
2665
SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2666
SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2667

    
2668
SMI_ACCESSORS(String, length, kLengthOffset)
2669

    
2670

    
2671
uint32_t Name::hash_field() {
2672
  return READ_UINT32_FIELD(this, kHashFieldOffset);
2673
}
2674

    
2675

    
2676
void Name::set_hash_field(uint32_t value) {
2677
  WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
2678
#if V8_HOST_ARCH_64_BIT
2679
  WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
2680
#endif
2681
}
2682

    
2683

    
2684
bool Name::Equals(Name* other) {
2685
  if (other == this) return true;
2686
  if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
2687
      this->IsSymbol() || other->IsSymbol()) {
2688
    return false;
2689
  }
2690
  return String::cast(this)->SlowEquals(String::cast(other));
2691
}
2692

    
2693

    
2694
ACCESSORS(Symbol, name, Object, kNameOffset)
2695

    
2696

    
2697
bool String::Equals(String* other) {
2698
  if (other == this) return true;
2699
  if (this->IsInternalizedString() && other->IsInternalizedString()) {
2700
    return false;
2701
  }
2702
  return SlowEquals(other);
2703
}
2704

    
2705

    
2706
MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
2707
  if (!StringShape(this).IsCons()) return this;
2708
  ConsString* cons = ConsString::cast(this);
2709
  if (cons->IsFlat()) return cons->first();
2710
  return SlowTryFlatten(pretenure);
2711
}
2712

    
2713

    
2714
String* String::TryFlattenGetString(PretenureFlag pretenure) {
2715
  MaybeObject* flat = TryFlatten(pretenure);
2716
  Object* successfully_flattened;
2717
  if (!flat->ToObject(&successfully_flattened)) return this;
2718
  return String::cast(successfully_flattened);
2719
}
2720

    
2721

    
2722
uint16_t String::Get(int index) {
2723
  ASSERT(index >= 0 && index < length());
2724
  switch (StringShape(this).full_representation_tag()) {
2725
    case kSeqStringTag | kOneByteStringTag:
2726
      return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
2727
    case kSeqStringTag | kTwoByteStringTag:
2728
      return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
2729
    case kConsStringTag | kOneByteStringTag:
2730
    case kConsStringTag | kTwoByteStringTag:
2731
      return ConsString::cast(this)->ConsStringGet(index);
2732
    case kExternalStringTag | kOneByteStringTag:
2733
      return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
2734
    case kExternalStringTag | kTwoByteStringTag:
2735
      return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
2736
    case kSlicedStringTag | kOneByteStringTag:
2737
    case kSlicedStringTag | kTwoByteStringTag:
2738
      return SlicedString::cast(this)->SlicedStringGet(index);
2739
    default:
2740
      break;
2741
  }
2742

    
2743
  UNREACHABLE();
2744
  return 0;
2745
}
2746

    
2747

    
2748
void String::Set(int index, uint16_t value) {
2749
  ASSERT(index >= 0 && index < length());
2750
  ASSERT(StringShape(this).IsSequential());
2751

    
2752
  return this->IsOneByteRepresentation()
2753
      ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
2754
      : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
2755
}
2756

    
2757

    
2758
bool String::IsFlat() {
2759
  if (!StringShape(this).IsCons()) return true;
2760
  return ConsString::cast(this)->second()->length() == 0;
2761
}
2762

    
2763

    
2764
String* String::GetUnderlying() {
2765
  // Giving direct access to underlying string only makes sense if the
2766
  // wrapping string is already flattened.
2767
  ASSERT(this->IsFlat());
2768
  ASSERT(StringShape(this).IsIndirect());
2769
  STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
2770
  const int kUnderlyingOffset = SlicedString::kParentOffset;
2771
  return String::cast(READ_FIELD(this, kUnderlyingOffset));
2772
}
2773

    
2774

    
2775
template<class Visitor, class ConsOp>
2776
void String::Visit(
2777
    String* string,
2778
    unsigned offset,
2779
    Visitor& visitor,
2780
    ConsOp& cons_op,
2781
    int32_t type,
2782
    unsigned length) {
2783
  ASSERT(length == static_cast<unsigned>(string->length()));
2784
  ASSERT(offset <= length);
2785
  unsigned slice_offset = offset;
2786
  while (true) {
2787
    ASSERT(type == string->map()->instance_type());
2788

    
2789
    switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
2790
      case kSeqStringTag | kOneByteStringTag:
2791
        visitor.VisitOneByteString(
2792
            SeqOneByteString::cast(string)->GetChars() + slice_offset,
2793
            length - offset);
2794
        return;
2795

    
2796
      case kSeqStringTag | kTwoByteStringTag:
2797
        visitor.VisitTwoByteString(
2798
            SeqTwoByteString::cast(string)->GetChars() + slice_offset,
2799
            length - offset);
2800
        return;
2801

    
2802
      case kExternalStringTag | kOneByteStringTag:
2803
        visitor.VisitOneByteString(
2804
            ExternalAsciiString::cast(string)->GetChars() + slice_offset,
2805
            length - offset);
2806
        return;
2807

    
2808
      case kExternalStringTag | kTwoByteStringTag:
2809
        visitor.VisitTwoByteString(
2810
            ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
2811
            length - offset);
2812
        return;
2813

    
2814
      case kSlicedStringTag | kOneByteStringTag:
2815
      case kSlicedStringTag | kTwoByteStringTag: {
2816
        SlicedString* slicedString = SlicedString::cast(string);
2817
        slice_offset += slicedString->offset();
2818
        string = slicedString->parent();
2819
        type = string->map()->instance_type();
2820
        continue;
2821
      }
2822

    
2823
      case kConsStringTag | kOneByteStringTag:
2824
      case kConsStringTag | kTwoByteStringTag:
2825
        string = cons_op.Operate(string, &offset, &type, &length);
2826
        if (string == NULL) return;
2827
        slice_offset = offset;
2828
        ASSERT(length == static_cast<unsigned>(string->length()));
2829
        continue;
2830

    
2831
      default:
2832
        UNREACHABLE();
2833
        return;
2834
    }
2835
  }
2836
}
2837

    
2838

    
2839
// TODO(dcarney): Remove this class after conversion to VisitFlat.
2840
class ConsStringCaptureOp {
2841
 public:
2842
  inline ConsStringCaptureOp() : cons_string_(NULL) {}
2843
  inline String* Operate(String* string, unsigned*, int32_t*, unsigned*) {
2844
    cons_string_ = ConsString::cast(string);
2845
    return NULL;
2846
  }
2847
  ConsString* cons_string_;
2848
};
2849

    
2850

    
2851
template<class Visitor>
2852
ConsString* String::VisitFlat(Visitor* visitor,
2853
                              String* string,
2854
                              int offset,
2855
                              int length,
2856
                              int32_t type) {
2857
  ASSERT(length >= 0 && length == string->length());
2858
  ASSERT(offset >= 0 && offset <= length);
2859
  ConsStringCaptureOp op;
2860
  Visit(string, offset, *visitor, op, type, static_cast<unsigned>(length));
2861
  return op.cons_string_;
2862
}
2863

    
2864

    
2865
uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
2866
  ASSERT(index >= 0 && index < length());
2867
  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
2868
}
2869

    
2870

    
2871
void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
2872
  ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
2873
  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
2874
                   static_cast<byte>(value));
2875
}
2876

    
2877

    
2878
Address SeqOneByteString::GetCharsAddress() {
2879
  return FIELD_ADDR(this, kHeaderSize);
2880
}
2881

    
2882

    
2883
uint8_t* SeqOneByteString::GetChars() {
2884
  return reinterpret_cast<uint8_t*>(GetCharsAddress());
2885
}
2886

    
2887

    
2888
Address SeqTwoByteString::GetCharsAddress() {
2889
  return FIELD_ADDR(this, kHeaderSize);
2890
}
2891

    
2892

    
2893
uc16* SeqTwoByteString::GetChars() {
2894
  return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
2895
}
2896

    
2897

    
2898
uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
2899
  ASSERT(index >= 0 && index < length());
2900
  return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
2901
}
2902

    
2903

    
2904
void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
2905
  ASSERT(index >= 0 && index < length());
2906
  WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
2907
}
2908

    
2909

    
2910
int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
2911
  return SizeFor(length());
2912
}
2913

    
2914

    
2915
int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
2916
  return SizeFor(length());
2917
}
2918

    
2919

    
2920
String* SlicedString::parent() {
2921
  return String::cast(READ_FIELD(this, kParentOffset));
2922
}
2923

    
2924

    
2925
void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
2926
  ASSERT(parent->IsSeqString() || parent->IsExternalString());
2927
  WRITE_FIELD(this, kParentOffset, parent);
2928
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
2929
}
2930

    
2931

    
2932
SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2933

    
2934

    
2935
String* ConsString::first() {
2936
  return String::cast(READ_FIELD(this, kFirstOffset));
2937
}
2938

    
2939

    
2940
Object* ConsString::unchecked_first() {
2941
  return READ_FIELD(this, kFirstOffset);
2942
}
2943

    
2944

    
2945
void ConsString::set_first(String* value, WriteBarrierMode mode) {
2946
  WRITE_FIELD(this, kFirstOffset, value);
2947
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2948
}
2949

    
2950

    
2951
String* ConsString::second() {
2952
  return String::cast(READ_FIELD(this, kSecondOffset));
2953
}
2954

    
2955

    
2956
Object* ConsString::unchecked_second() {
2957
  return READ_FIELD(this, kSecondOffset);
2958
}
2959

    
2960

    
2961
void ConsString::set_second(String* value, WriteBarrierMode mode) {
2962
  WRITE_FIELD(this, kSecondOffset, value);
2963
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2964
}
2965

    
2966

    
2967
bool ExternalString::is_short() {
2968
  InstanceType type = map()->instance_type();
2969
  return (type & kShortExternalStringMask) == kShortExternalStringTag;
2970
}
2971

    
2972

    
2973
const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2974
  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2975
}
2976

    
2977

    
2978
void ExternalAsciiString::update_data_cache() {
2979
  if (is_short()) return;
2980
  const char** data_field =
2981
      reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
2982
  *data_field = resource()->data();
2983
}
2984

    
2985

    
2986
void ExternalAsciiString::set_resource(
2987
    const ExternalAsciiString::Resource* resource) {
2988
  *reinterpret_cast<const Resource**>(
2989
      FIELD_ADDR(this, kResourceOffset)) = resource;
2990
  if (resource != NULL) update_data_cache();
2991
}
2992

    
2993

    
2994
const uint8_t* ExternalAsciiString::GetChars() {
2995
  return reinterpret_cast<const uint8_t*>(resource()->data());
2996
}
2997

    
2998

    
2999
uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
3000
  ASSERT(index >= 0 && index < length());
3001
  return GetChars()[index];
3002
}
3003

    
3004

    
3005
const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3006
  return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3007
}
3008

    
3009

    
3010
void ExternalTwoByteString::update_data_cache() {
3011
  if (is_short()) return;
3012
  const uint16_t** data_field =
3013
      reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3014
  *data_field = resource()->data();
3015
}
3016

    
3017

    
3018
void ExternalTwoByteString::set_resource(
3019
    const ExternalTwoByteString::Resource* resource) {
3020
  *reinterpret_cast<const Resource**>(
3021
      FIELD_ADDR(this, kResourceOffset)) = resource;
3022
  if (resource != NULL) update_data_cache();
3023
}
3024

    
3025

    
3026
const uint16_t* ExternalTwoByteString::GetChars() {
3027
  return resource()->data();
3028
}
3029

    
3030

    
3031
uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3032
  ASSERT(index >= 0 && index < length());
3033
  return GetChars()[index];
3034
}
3035

    
3036

    
3037
const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3038
      unsigned start) {
3039
  return GetChars() + start;
3040
}
3041

    
3042

    
3043
String* ConsStringNullOp::Operate(String*, unsigned*, int32_t*, unsigned*) {
3044
  return NULL;
3045
}
3046

    
3047

    
3048
unsigned ConsStringIteratorOp::OffsetForDepth(unsigned depth) {
3049
  return depth & kDepthMask;
3050
}
3051

    
3052

    
3053
void ConsStringIteratorOp::PushLeft(ConsString* string) {
3054
  frames_[depth_++ & kDepthMask] = string;
3055
}
3056

    
3057

    
3058
void ConsStringIteratorOp::PushRight(ConsString* string) {
3059
  // Inplace update.
3060
  frames_[(depth_-1) & kDepthMask] = string;
3061
}
3062

    
3063

    
3064
void ConsStringIteratorOp::AdjustMaximumDepth() {
3065
  if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3066
}
3067

    
3068

    
3069
void ConsStringIteratorOp::Pop() {
3070
  ASSERT(depth_ > 0);
3071
  ASSERT(depth_ <= maximum_depth_);
3072
  depth_--;
3073
}
3074

    
3075

    
3076
bool ConsStringIteratorOp::HasMore() {
3077
  return depth_ != 0;
3078
}
3079

    
3080

    
3081
void ConsStringIteratorOp::Reset() {
3082
  depth_ = 0;
3083
}
3084

    
3085

    
3086
String* ConsStringIteratorOp::ContinueOperation(int32_t* type_out,
3087
                                                unsigned* length_out) {
3088
  bool blew_stack = false;
3089
  String* string = NextLeaf(&blew_stack, type_out, length_out);
3090
  // String found.
3091
  if (string != NULL) {
3092
    // Verify output.
3093
    ASSERT(*length_out == static_cast<unsigned>(string->length()));
3094
    ASSERT(*type_out == string->map()->instance_type());
3095
    return string;
3096
  }
3097
  // Traversal complete.
3098
  if (!blew_stack) return NULL;
3099
  // Restart search from root.
3100
  unsigned offset_out;
3101
  string = Search(&offset_out, type_out, length_out);
3102
  // Verify output.
3103
  ASSERT(string == NULL || offset_out == 0);
3104
  ASSERT(string == NULL ||
3105
         *length_out == static_cast<unsigned>(string->length()));
3106
  ASSERT(string == NULL || *type_out == string->map()->instance_type());
3107
  return string;
3108
}
3109

    
3110

    
3111
uint16_t StringCharacterStream::GetNext() {
3112
  ASSERT(buffer8_ != NULL && end_ != NULL);
3113
  // Advance cursor if needed.
3114
  // TODO(dcarney): Ensure uses of the api call HasMore first and avoid this.
3115
  if (buffer8_ == end_) HasMore();
3116
  ASSERT(buffer8_ < end_);
3117
  return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3118
}
3119

    
3120

    
3121
StringCharacterStream::StringCharacterStream(String* string,
3122
                                             ConsStringIteratorOp* op,
3123
                                             unsigned offset)
3124
  : is_one_byte_(false),
3125
    op_(op) {
3126
  Reset(string, offset);
3127
}
3128

    
3129

    
3130
void StringCharacterStream::Reset(String* string, unsigned offset) {
3131
  op_->Reset();
3132
  buffer8_ = NULL;
3133
  end_ = NULL;
3134
  int32_t type = string->map()->instance_type();
3135
  unsigned length = string->length();
3136
  String::Visit(string, offset, *this, *op_, type, length);
3137
}
3138

    
3139

    
3140
bool StringCharacterStream::HasMore() {
3141
  if (buffer8_ != end_) return true;
3142
  if (!op_->HasMore()) return false;
3143
  unsigned length;
3144
  int32_t type;
3145
  String* string = op_->ContinueOperation(&type, &length);
3146
  if (string == NULL) return false;
3147
  ASSERT(!string->IsConsString());
3148
  ASSERT(string->length() != 0);
3149
  ConsStringNullOp null_op;
3150
  String::Visit(string, 0, *this, null_op, type, length);
3151
  ASSERT(buffer8_ != end_);
3152
  return true;
3153
}
3154

    
3155

    
3156
void StringCharacterStream::VisitOneByteString(
3157
    const uint8_t* chars, unsigned length) {
3158
  is_one_byte_ = true;
3159
  buffer8_ = chars;
3160
  end_ = chars + length;
3161
}
3162

    
3163

    
3164
void StringCharacterStream::VisitTwoByteString(
3165
    const uint16_t* chars, unsigned length) {
3166
  is_one_byte_ = false;
3167
  buffer16_ = chars;
3168
  end_ = reinterpret_cast<const uint8_t*>(chars + length);
3169
}
3170

    
3171

    
3172
void JSFunctionResultCache::MakeZeroSize() {
3173
  set_finger_index(kEntriesIndex);
3174
  set_size(kEntriesIndex);
3175
}
3176

    
3177

    
3178
void JSFunctionResultCache::Clear() {
3179
  int cache_size = size();
3180
  Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
3181
  MemsetPointer(entries_start,
3182
                GetHeap()->the_hole_value(),
3183
                cache_size - kEntriesIndex);
3184
  MakeZeroSize();
3185
}
3186

    
3187

    
3188
int JSFunctionResultCache::size() {
3189
  return Smi::cast(get(kCacheSizeIndex))->value();
3190
}
3191

    
3192

    
3193
void JSFunctionResultCache::set_size(int size) {
3194
  set(kCacheSizeIndex, Smi::FromInt(size));
3195
}
3196

    
3197

    
3198
int JSFunctionResultCache::finger_index() {
3199
  return Smi::cast(get(kFingerIndex))->value();
3200
}
3201

    
3202

    
3203
void JSFunctionResultCache::set_finger_index(int finger_index) {
3204
  set(kFingerIndex, Smi::FromInt(finger_index));
3205
}
3206

    
3207

    
3208
byte ByteArray::get(int index) {
3209
  ASSERT(index >= 0 && index < this->length());
3210
  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3211
}
3212

    
3213

    
3214
void ByteArray::set(int index, byte value) {
3215
  ASSERT(index >= 0 && index < this->length());
3216
  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3217
}
3218

    
3219

    
3220
int ByteArray::get_int(int index) {
3221
  ASSERT(index >= 0 && (index * kIntSize) < this->length());
3222
  return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3223
}
3224

    
3225

    
3226
ByteArray* ByteArray::FromDataStartAddress(Address address) {
3227
  ASSERT_TAG_ALIGNED(address);
3228
  return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3229
}
3230

    
3231

    
3232
Address ByteArray::GetDataStartAddress() {
3233
  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3234
}
3235

    
3236

    
3237
uint8_t* ExternalPixelArray::external_pixel_pointer() {
3238
  return reinterpret_cast<uint8_t*>(external_pointer());
3239
}
3240

    
3241

    
3242
uint8_t ExternalPixelArray::get_scalar(int index) {
3243
  ASSERT((index >= 0) && (index < this->length()));
3244
  uint8_t* ptr = external_pixel_pointer();
3245
  return ptr[index];
3246
}
3247

    
3248

    
3249
MaybeObject* ExternalPixelArray::get(int index) {
3250
  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3251
}
3252

    
3253

    
3254
void ExternalPixelArray::set(int index, uint8_t value) {
3255
  ASSERT((index >= 0) && (index < this->length()));
3256
  uint8_t* ptr = external_pixel_pointer();
3257
  ptr[index] = value;
3258
}
3259

    
3260

    
3261
void* ExternalArray::external_pointer() {
3262
  intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3263
  return reinterpret_cast<void*>(ptr);
3264
}
3265

    
3266

    
3267
void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3268
  intptr_t ptr = reinterpret_cast<intptr_t>(value);
3269
  WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3270
}
3271

    
3272

    
3273
int8_t ExternalByteArray::get_scalar(int index) {
3274
  ASSERT((index >= 0) && (index < this->length()));
3275
  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3276
  return ptr[index];
3277
}
3278

    
3279

    
3280
MaybeObject* ExternalByteArray::get(int index) {
3281
  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3282
}
3283

    
3284

    
3285
void ExternalByteArray::set(int index, int8_t value) {
3286
  ASSERT((index >= 0) && (index < this->length()));
3287
  int8_t* ptr = static_cast<int8_t*>(external_pointer());
3288
  ptr[index] = value;
3289
}
3290

    
3291

    
3292
uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
3293
  ASSERT((index >= 0) && (index < this->length()));
3294
  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3295
  return ptr[index];
3296
}
3297

    
3298

    
3299
MaybeObject* ExternalUnsignedByteArray::get(int index) {
3300
  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3301
}
3302

    
3303

    
3304
void ExternalUnsignedByteArray::set(int index, uint8_t value) {
3305
  ASSERT((index >= 0) && (index < this->length()));
3306
  uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3307
  ptr[index] = value;
3308
}
3309

    
3310

    
3311
int16_t ExternalShortArray::get_scalar(int index) {
3312
  ASSERT((index >= 0) && (index < this->length()));
3313
  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3314
  return ptr[index];
3315
}
3316

    
3317

    
3318
MaybeObject* ExternalShortArray::get(int index) {
3319
  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3320
}
3321

    
3322

    
3323
void ExternalShortArray::set(int index, int16_t value) {
3324
  ASSERT((index >= 0) && (index < this->length()));
3325
  int16_t* ptr = static_cast<int16_t*>(external_pointer());
3326
  ptr[index] = value;
3327
}
3328

    
3329

    
3330
uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
3331
  ASSERT((index >= 0) && (index < this->length()));
3332
  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3333
  return ptr[index];
3334
}
3335

    
3336

    
3337
MaybeObject* ExternalUnsignedShortArray::get(int index) {
3338
  return Smi::FromInt(static_cast<int>(get_scalar(index)));
3339
}
3340

    
3341

    
3342
void ExternalUnsignedShortArray::set(int index, uint16_t value) {
3343
  ASSERT((index >= 0) && (index < this->length()));
3344
  uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3345
  ptr[index] = value;
3346
}
3347

    
3348

    
3349
int32_t ExternalIntArray::get_scalar(int index) {
3350
  ASSERT((index >= 0) && (index < this->length()));
3351
  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3352
  return ptr[index];
3353
}
3354

    
3355

    
3356
MaybeObject* ExternalIntArray::get(int index) {
3357
    return GetHeap()->NumberFromInt32(get_scalar(index));
3358
}
3359

    
3360

    
3361
void ExternalIntArray::set(int index, int32_t value) {
3362
  ASSERT((index >= 0) && (index < this->length()));
3363
  int32_t* ptr = static_cast<int32_t*>(external_pointer());
3364
  ptr[index] = value;
3365
}
3366

    
3367

    
3368
uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
3369
  ASSERT((index >= 0) && (index < this->length()));
3370
  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3371
  return ptr[index];
3372
}
3373

    
3374

    
3375
MaybeObject* ExternalUnsignedIntArray::get(int index) {
3376
    return GetHeap()->NumberFromUint32(get_scalar(index));
3377
}
3378

    
3379

    
3380
void ExternalUnsignedIntArray::set(int index, uint32_t value) {
3381
  ASSERT((index >= 0) && (index < this->length()));
3382
  uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3383
  ptr[index] = value;
3384
}
3385

    
3386

    
3387
float ExternalFloatArray::get_scalar(int index) {
3388
  ASSERT((index >= 0) && (index < this->length()));
3389
  float* ptr = static_cast<float*>(external_pointer());
3390
  return ptr[index];
3391
}
3392

    
3393

    
3394
MaybeObject* ExternalFloatArray::get(int index) {
3395
    return GetHeap()->NumberFromDouble(get_scalar(index));
3396
}
3397

    
3398

    
3399
void ExternalFloatArray::set(int index, float value) {
3400
  ASSERT((index >= 0) && (index < this->length()));
3401
  float* ptr = static_cast<float*>(external_pointer());
3402
  ptr[index] = value;
3403
}
3404

    
3405

    
3406
double ExternalDoubleArray::get_scalar(int index) {
3407
  ASSERT((index >= 0) && (index < this->length()));
3408
  double* ptr = static_cast<double*>(external_pointer());
3409
  return ptr[index];
3410
}
3411

    
3412

    
3413
MaybeObject* ExternalDoubleArray::get(int index) {
3414
    return GetHeap()->NumberFromDouble(get_scalar(index));
3415
}
3416

    
3417

    
3418
void ExternalDoubleArray::set(int index, double value) {
3419
  ASSERT((index >= 0) && (index < this->length()));
3420
  double* ptr = static_cast<double*>(external_pointer());
3421
  ptr[index] = value;
3422
}
3423

    
3424

    
3425
int Map::visitor_id() {
3426
  return READ_BYTE_FIELD(this, kVisitorIdOffset);
3427
}
3428

    
3429

    
3430
void Map::set_visitor_id(int id) {
3431
  ASSERT(0 <= id && id < 256);
3432
  WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
3433
}
3434

    
3435

    
3436
int Map::instance_size() {
3437
  return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
3438
}
3439

    
3440

    
3441
int Map::inobject_properties() {
3442
  return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
3443
}
3444

    
3445

    
3446
int Map::pre_allocated_property_fields() {
3447
  return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
3448
}
3449

    
3450

    
3451
int HeapObject::SizeFromMap(Map* map) {
3452
  int instance_size = map->instance_size();
3453
  if (instance_size != kVariableSizeSentinel) return instance_size;
3454
  // Only inline the most frequent cases.
3455
  int instance_type = static_cast<int>(map->instance_type());
3456
  if (instance_type == FIXED_ARRAY_TYPE) {
3457
    return FixedArray::BodyDescriptor::SizeOf(map, this);
3458
  }
3459
  if (instance_type == ASCII_STRING_TYPE ||
3460
      instance_type == ASCII_INTERNALIZED_STRING_TYPE) {
3461
    return SeqOneByteString::SizeFor(
3462
        reinterpret_cast<SeqOneByteString*>(this)->length());
3463
  }
3464
  if (instance_type == BYTE_ARRAY_TYPE) {
3465
    return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
3466
  }
3467
  if (instance_type == FREE_SPACE_TYPE) {
3468
    return reinterpret_cast<FreeSpace*>(this)->size();
3469
  }
3470
  if (instance_type == STRING_TYPE ||
3471
      instance_type == INTERNALIZED_STRING_TYPE) {
3472
    return SeqTwoByteString::SizeFor(
3473
        reinterpret_cast<SeqTwoByteString*>(this)->length());
3474
  }
3475
  if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
3476
    return FixedDoubleArray::SizeFor(
3477
        reinterpret_cast<FixedDoubleArray*>(this)->length());
3478
  }
3479
  if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
3480
    return ConstantPoolArray::SizeFor(
3481
        reinterpret_cast<ConstantPoolArray*>(this)->count_of_int64_entries(),
3482
        reinterpret_cast<ConstantPoolArray*>(this)->count_of_ptr_entries(),
3483
        reinterpret_cast<ConstantPoolArray*>(this)->count_of_int32_entries());
3484
  }
3485
  ASSERT(instance_type == CODE_TYPE);
3486
  return reinterpret_cast<Code*>(this)->CodeSize();
3487
}
3488

    
3489

    
3490
void Map::set_instance_size(int value) {
3491
  ASSERT_EQ(0, value & (kPointerSize - 1));
3492
  value >>= kPointerSizeLog2;
3493
  ASSERT(0 <= value && value < 256);
3494
  WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
3495
}
3496

    
3497

    
3498
void Map::set_inobject_properties(int value) {
3499
  ASSERT(0 <= value && value < 256);
3500
  WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
3501
}
3502

    
3503

    
3504
void Map::set_pre_allocated_property_fields(int value) {
3505
  ASSERT(0 <= value && value < 256);
3506
  WRITE_BYTE_FIELD(this,
3507
                   kPreAllocatedPropertyFieldsOffset,
3508
                   static_cast<byte>(value));
3509
}
3510

    
3511

    
3512
InstanceType Map::instance_type() {
3513
  return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
3514
}
3515

    
3516

    
3517
void Map::set_instance_type(InstanceType value) {
3518
  WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
3519
}
3520

    
3521

    
3522
int Map::unused_property_fields() {
3523
  return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
3524
}
3525

    
3526

    
3527
void Map::set_unused_property_fields(int value) {
3528
  WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
3529
}
3530

    
3531

    
3532
byte Map::bit_field() {
3533
  return READ_BYTE_FIELD(this, kBitFieldOffset);
3534
}
3535

    
3536

    
3537
void Map::set_bit_field(byte value) {
3538
  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
3539
}
3540

    
3541

    
3542
byte Map::bit_field2() {
3543
  return READ_BYTE_FIELD(this, kBitField2Offset);
3544
}
3545

    
3546

    
3547
void Map::set_bit_field2(byte value) {
3548
  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
3549
}
3550

    
3551

    
3552
void Map::set_non_instance_prototype(bool value) {
3553
  if (value) {
3554
    set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
3555
  } else {
3556
    set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
3557
  }
3558
}
3559

    
3560

    
3561
bool Map::has_non_instance_prototype() {
3562
  return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
3563
}
3564

    
3565

    
3566
void Map::set_function_with_prototype(bool value) {
3567
  set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
3568
}
3569

    
3570

    
3571
bool Map::function_with_prototype() {
3572
  return FunctionWithPrototype::decode(bit_field3());
3573
}
3574

    
3575

    
3576
void Map::set_is_access_check_needed(bool access_check_needed) {
3577
  if (access_check_needed) {
3578
    set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
3579
  } else {
3580
    set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
3581
  }
3582
}
3583

    
3584

    
3585
bool Map::is_access_check_needed() {
3586
  return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
3587
}
3588

    
3589

    
3590
void Map::set_is_extensible(bool value) {
3591
  if (value) {
3592
    set_bit_field2(bit_field2() | (1 << kIsExtensible));
3593
  } else {
3594
    set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
3595
  }
3596
}
3597

    
3598
bool Map::is_extensible() {
3599
  return ((1 << kIsExtensible) & bit_field2()) != 0;
3600
}
3601

    
3602

    
3603
void Map::set_attached_to_shared_function_info(bool value) {
3604
  if (value) {
3605
    set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
3606
  } else {
3607
    set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
3608
  }
3609
}
3610

    
3611
bool Map::attached_to_shared_function_info() {
3612
  return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
3613
}
3614

    
3615

    
3616
void Map::set_is_shared(bool value) {
3617
  set_bit_field3(IsShared::update(bit_field3(), value));
3618
}
3619

    
3620

    
3621
bool Map::is_shared() {
3622
  return IsShared::decode(bit_field3());
3623
}
3624

    
3625

    
3626
void Map::set_dictionary_map(bool value) {
3627
  if (value) mark_unstable();
3628
  set_bit_field3(DictionaryMap::update(bit_field3(), value));
3629
}
3630

    
3631

    
3632
bool Map::is_dictionary_map() {
3633
  return DictionaryMap::decode(bit_field3());
3634
}
3635

    
3636

    
3637
Code::Flags Code::flags() {
3638
  return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
3639
}
3640

    
3641

    
3642
void Map::set_owns_descriptors(bool is_shared) {
3643
  set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
3644
}
3645

    
3646

    
3647
bool Map::owns_descriptors() {
3648
  return OwnsDescriptors::decode(bit_field3());
3649
}
3650

    
3651

    
3652
void Map::set_is_observed(bool is_observed) {
3653
  ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
3654
         instance_type() > LAST_JS_OBJECT_TYPE ||
3655
         has_slow_elements_kind() || has_external_array_elements());
3656
  set_bit_field3(IsObserved::update(bit_field3(), is_observed));
3657
}
3658

    
3659

    
3660
bool Map::is_observed() {
3661
  return IsObserved::decode(bit_field3());
3662
}
3663

    
3664

    
3665
void Map::deprecate() {
3666
  set_bit_field3(Deprecated::update(bit_field3(), true));
3667
}
3668

    
3669

    
3670
bool Map::is_deprecated() {
3671
  if (!FLAG_track_fields) return false;
3672
  return Deprecated::decode(bit_field3());
3673
}
3674

    
3675

    
3676
void Map::set_migration_target(bool value) {
3677
  set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
3678
}
3679

    
3680

    
3681
bool Map::is_migration_target() {
3682
  if (!FLAG_track_fields) return false;
3683
  return IsMigrationTarget::decode(bit_field3());
3684
}
3685

    
3686

    
3687
void Map::freeze() {
3688
  set_bit_field3(IsFrozen::update(bit_field3(), true));
3689
}
3690

    
3691

    
3692
bool Map::is_frozen() {
3693
  return IsFrozen::decode(bit_field3());
3694
}
3695

    
3696

    
3697
void Map::mark_unstable() {
3698
  set_bit_field3(IsUnstable::update(bit_field3(), true));
3699
}
3700

    
3701

    
3702
bool Map::is_stable() {
3703
  return !IsUnstable::decode(bit_field3());
3704
}
3705

    
3706

    
3707
bool Map::has_code_cache() {
3708
  return code_cache() != GetIsolate()->heap()->empty_fixed_array();
3709
}
3710

    
3711

    
3712
bool Map::CanBeDeprecated() {
3713
  int descriptor = LastAdded();
3714
  for (int i = 0; i <= descriptor; i++) {
3715
    PropertyDetails details = instance_descriptors()->GetDetails(i);
3716
    if (FLAG_track_fields && details.representation().IsNone()) {
3717
      return true;
3718
    }
3719
    if (FLAG_track_fields && details.representation().IsSmi()) {
3720
      return true;
3721
    }
3722
    if (FLAG_track_double_fields && details.representation().IsDouble()) {
3723
      return true;
3724
    }
3725
    if (FLAG_track_heap_object_fields &&
3726
        details.representation().IsHeapObject()) {
3727
      return true;
3728
    }
3729
    if (FLAG_track_fields && details.type() == CONSTANT) {
3730
      return true;
3731
    }
3732
  }
3733
  return false;
3734
}
3735

    
3736

    
3737
void Map::NotifyLeafMapLayoutChange() {
3738
  if (is_stable()) {
3739
    mark_unstable();
3740
    dependent_code()->DeoptimizeDependentCodeGroup(
3741
        GetIsolate(),
3742
        DependentCode::kPrototypeCheckGroup);
3743
  }
3744
}
3745

    
3746

    
3747
bool Map::CanOmitMapChecks() {
3748
  return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
3749
}
3750

    
3751

    
3752
int DependentCode::number_of_entries(DependencyGroup group) {
3753
  if (length() == 0) return 0;
3754
  return Smi::cast(get(group))->value();
3755
}
3756

    
3757

    
3758
void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
3759
  set(group, Smi::FromInt(value));
3760
}
3761

    
3762

    
3763
bool DependentCode::is_code_at(int i) {
3764
  return get(kCodesStartIndex + i)->IsCode();
3765
}
3766

    
3767
Code* DependentCode::code_at(int i) {
3768
  return Code::cast(get(kCodesStartIndex + i));
3769
}
3770

    
3771

    
3772
CompilationInfo* DependentCode::compilation_info_at(int i) {
3773
  return reinterpret_cast<CompilationInfo*>(
3774
      Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
3775
}
3776

    
3777

    
3778
void DependentCode::set_object_at(int i, Object* object) {
3779
  set(kCodesStartIndex + i, object);
3780
}
3781

    
3782

    
3783
Object* DependentCode::object_at(int i) {
3784
  return get(kCodesStartIndex + i);
3785
}
3786

    
3787

    
3788
Object** DependentCode::slot_at(int i) {
3789
  return HeapObject::RawField(
3790
      this, FixedArray::OffsetOfElementAt(kCodesStartIndex + i));
3791
}
3792

    
3793

    
3794
void DependentCode::clear_at(int i) {
3795
  set_undefined(kCodesStartIndex + i);
3796
}
3797

    
3798

    
3799
void DependentCode::copy(int from, int to) {
3800
  set(kCodesStartIndex + to, get(kCodesStartIndex + from));
3801
}
3802

    
3803

    
3804
void DependentCode::ExtendGroup(DependencyGroup group) {
3805
  GroupStartIndexes starts(this);
3806
  for (int g = kGroupCount - 1; g > group; g--) {
3807
    if (starts.at(g) < starts.at(g + 1)) {
3808
      copy(starts.at(g), starts.at(g + 1));
3809
    }
3810
  }
3811
}
3812

    
3813

    
3814
void Code::set_flags(Code::Flags flags) {
3815
  STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
3816
  // Make sure that all call stubs have an arguments count.
3817
  ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
3818
          ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
3819
         ExtractArgumentsCountFromFlags(flags) >= 0);
3820
  WRITE_INT_FIELD(this, kFlagsOffset, flags);
3821
}
3822

    
3823

    
3824
Code::Kind Code::kind() {
3825
  return ExtractKindFromFlags(flags());
3826
}
3827

    
3828

    
3829
InlineCacheState Code::ic_state() {
3830
  InlineCacheState result = ExtractICStateFromFlags(flags());
3831
  // Only allow uninitialized or debugger states for non-IC code
3832
  // objects. This is used in the debugger to determine whether or not
3833
  // a call to code object has been replaced with a debug break call.
3834
  ASSERT(is_inline_cache_stub() ||
3835
         result == UNINITIALIZED ||
3836
         result == DEBUG_STUB);
3837
  return result;
3838
}
3839

    
3840

    
3841
Code::ExtraICState Code::extra_ic_state() {
3842
  ASSERT((is_inline_cache_stub() && !needs_extended_extra_ic_state(kind()))
3843
         || ic_state() == DEBUG_STUB);
3844
  return ExtractExtraICStateFromFlags(flags());
3845
}
3846

    
3847

    
3848
Code::ExtraICState Code::extended_extra_ic_state() {
3849
  ASSERT(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
3850
  ASSERT(needs_extended_extra_ic_state(kind()));
3851
  return ExtractExtendedExtraICStateFromFlags(flags());
3852
}
3853

    
3854

    
3855
Code::StubType Code::type() {
3856
  return ExtractTypeFromFlags(flags());
3857
}
3858

    
3859

    
3860
int Code::arguments_count() {
3861
  ASSERT(is_call_stub() || is_keyed_call_stub() ||
3862
         kind() == STUB || is_handler());
3863
  return ExtractArgumentsCountFromFlags(flags());
3864
}
3865

    
3866

    
3867
inline bool Code::is_crankshafted() {
3868
  return IsCrankshaftedField::decode(
3869
      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3870
}
3871

    
3872

    
3873
inline void Code::set_is_crankshafted(bool value) {
3874
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3875
  int updated = IsCrankshaftedField::update(previous, value);
3876
  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3877
}
3878

    
3879

    
3880
int Code::major_key() {
3881
  ASSERT(kind() == STUB ||
3882
         kind() == HANDLER ||
3883
         kind() == BINARY_OP_IC ||
3884
         kind() == COMPARE_IC ||
3885
         kind() == COMPARE_NIL_IC ||
3886
         kind() == STORE_IC ||
3887
         kind() == LOAD_IC ||
3888
         kind() == KEYED_LOAD_IC ||
3889
         kind() == TO_BOOLEAN_IC);
3890
  return StubMajorKeyField::decode(
3891
      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
3892
}
3893

    
3894

    
3895
void Code::set_major_key(int major) {
3896
  ASSERT(kind() == STUB ||
3897
         kind() == HANDLER ||
3898
         kind() == BINARY_OP_IC ||
3899
         kind() == COMPARE_IC ||
3900
         kind() == COMPARE_NIL_IC ||
3901
         kind() == LOAD_IC ||
3902
         kind() == KEYED_LOAD_IC ||
3903
         kind() == STORE_IC ||
3904
         kind() == KEYED_STORE_IC ||
3905
         kind() == TO_BOOLEAN_IC);
3906
  ASSERT(0 <= major && major < 256);
3907
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
3908
  int updated = StubMajorKeyField::update(previous, major);
3909
  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
3910
}
3911

    
3912

    
3913
bool Code::is_pregenerated() {
3914
  return (kind() == STUB && IsPregeneratedField::decode(flags()));
3915
}
3916

    
3917

    
3918
void Code::set_is_pregenerated(bool value) {
3919
  ASSERT(kind() == STUB);
3920
  Flags f = flags();
3921
  f = static_cast<Flags>(IsPregeneratedField::update(f, value));
3922
  set_flags(f);
3923
}
3924

    
3925

    
3926
bool Code::optimizable() {
3927
  ASSERT_EQ(FUNCTION, kind());
3928
  return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
3929
}
3930

    
3931

    
3932
void Code::set_optimizable(bool value) {
3933
  ASSERT_EQ(FUNCTION, kind());
3934
  WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
3935
}
3936

    
3937

    
3938
bool Code::has_deoptimization_support() {
3939
  ASSERT_EQ(FUNCTION, kind());
3940
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3941
  return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
3942
}
3943

    
3944

    
3945
void Code::set_has_deoptimization_support(bool value) {
3946
  ASSERT_EQ(FUNCTION, kind());
3947
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3948
  flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
3949
  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3950
}
3951

    
3952

    
3953
bool Code::has_debug_break_slots() {
3954
  ASSERT_EQ(FUNCTION, kind());
3955
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3956
  return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
3957
}
3958

    
3959

    
3960
void Code::set_has_debug_break_slots(bool value) {
3961
  ASSERT_EQ(FUNCTION, kind());
3962
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3963
  flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
3964
  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3965
}
3966

    
3967

    
3968
bool Code::is_compiled_optimizable() {
3969
  ASSERT_EQ(FUNCTION, kind());
3970
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3971
  return FullCodeFlagsIsCompiledOptimizable::decode(flags);
3972
}
3973

    
3974

    
3975
void Code::set_compiled_optimizable(bool value) {
3976
  ASSERT_EQ(FUNCTION, kind());
3977
  byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
3978
  flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
3979
  WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
3980
}
3981

    
3982

    
3983
int Code::allow_osr_at_loop_nesting_level() {
3984
  ASSERT_EQ(FUNCTION, kind());
3985
  return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
3986
}
3987

    
3988

    
3989
void Code::set_allow_osr_at_loop_nesting_level(int level) {
3990
  ASSERT_EQ(FUNCTION, kind());
3991
  ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
3992
  WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
3993
}
3994

    
3995

    
3996
int Code::profiler_ticks() {
3997
  ASSERT_EQ(FUNCTION, kind());
3998
  return READ_BYTE_FIELD(this, kProfilerTicksOffset);
3999
}
4000

    
4001

    
4002
void Code::set_profiler_ticks(int ticks) {
4003
  ASSERT_EQ(FUNCTION, kind());
4004
  ASSERT(ticks < 256);
4005
  WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4006
}
4007

    
4008

    
4009
unsigned Code::stack_slots() {
4010
  ASSERT(is_crankshafted());
4011
  return StackSlotsField::decode(
4012
      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4013
}
4014

    
4015

    
4016
void Code::set_stack_slots(unsigned slots) {
4017
  CHECK(slots <= (1 << kStackSlotsBitCount));
4018
  ASSERT(is_crankshafted());
4019
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4020
  int updated = StackSlotsField::update(previous, slots);
4021
  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4022
}
4023

    
4024

    
4025
unsigned Code::safepoint_table_offset() {
4026
  ASSERT(is_crankshafted());
4027
  return SafepointTableOffsetField::decode(
4028
      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4029
}
4030

    
4031

    
4032
void Code::set_safepoint_table_offset(unsigned offset) {
4033
  CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4034
  ASSERT(is_crankshafted());
4035
  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4036
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4037
  int updated = SafepointTableOffsetField::update(previous, offset);
4038
  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4039
}
4040

    
4041

    
4042
unsigned Code::back_edge_table_offset() {
4043
  ASSERT_EQ(FUNCTION, kind());
4044
  return BackEdgeTableOffsetField::decode(
4045
      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4046
}
4047

    
4048

    
4049
void Code::set_back_edge_table_offset(unsigned offset) {
4050
  ASSERT_EQ(FUNCTION, kind());
4051
  ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4052
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4053
  int updated = BackEdgeTableOffsetField::update(previous, offset);
4054
  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4055
}
4056

    
4057

    
4058
bool Code::back_edges_patched_for_osr() {
4059
  ASSERT_EQ(FUNCTION, kind());
4060
  return BackEdgesPatchedForOSRField::decode(
4061
      READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4062
}
4063

    
4064

    
4065
void Code::set_back_edges_patched_for_osr(bool value) {
4066
  ASSERT_EQ(FUNCTION, kind());
4067
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4068
  int updated = BackEdgesPatchedForOSRField::update(previous, value);
4069
  WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4070
}
4071

    
4072

    
4073

    
4074
CheckType Code::check_type() {
4075
  ASSERT(is_call_stub() || is_keyed_call_stub());
4076
  byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
4077
  return static_cast<CheckType>(type);
4078
}
4079

    
4080

    
4081
void Code::set_check_type(CheckType value) {
4082
  ASSERT(is_call_stub() || is_keyed_call_stub());
4083
  WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
4084
}
4085

    
4086

    
4087
byte Code::to_boolean_state() {
4088
  return extended_extra_ic_state();
4089
}
4090

    
4091

    
4092
bool Code::has_function_cache() {
4093
  ASSERT(kind() == STUB);
4094
  return HasFunctionCacheField::decode(
4095
      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4096
}
4097

    
4098

    
4099
void Code::set_has_function_cache(bool flag) {
4100
  ASSERT(kind() == STUB);
4101
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4102
  int updated = HasFunctionCacheField::update(previous, flag);
4103
  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4104
}
4105

    
4106

    
4107
bool Code::marked_for_deoptimization() {
4108
  ASSERT(kind() == OPTIMIZED_FUNCTION);
4109
  return MarkedForDeoptimizationField::decode(
4110
      READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4111
}
4112

    
4113

    
4114
void Code::set_marked_for_deoptimization(bool flag) {
4115
  ASSERT(kind() == OPTIMIZED_FUNCTION);
4116
  int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4117
  int updated = MarkedForDeoptimizationField::update(previous, flag);
4118
  WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4119
}
4120

    
4121

    
4122
bool Code::is_inline_cache_stub() {
4123
  Kind kind = this->kind();
4124
  switch (kind) {
4125
#define CASE(name) case name: return true;
4126
    IC_KIND_LIST(CASE)
4127
#undef CASE
4128
    default: return false;
4129
  }
4130
}
4131

    
4132

    
4133
bool Code::is_keyed_stub() {
4134
  return is_keyed_load_stub() || is_keyed_store_stub() || is_keyed_call_stub();
4135
}
4136

    
4137

    
4138
bool Code::is_debug_stub() {
4139
  return ic_state() == DEBUG_STUB;
4140
}
4141

    
4142

    
4143
Code::Flags Code::ComputeFlags(Kind kind,
4144
                               InlineCacheState ic_state,
4145
                               ExtraICState extra_ic_state,
4146
                               StubType type,
4147
                               int argc,
4148
                               InlineCacheHolderFlag holder) {
4149
  ASSERT(argc <= Code::kMaxArguments);
4150
  // Since the extended extra ic state overlaps with the argument count
4151
  // for CALL_ICs, do so checks to make sure that they don't interfere.
4152
  ASSERT((kind != Code::CALL_IC &&
4153
          kind != Code::KEYED_CALL_IC) ||
4154
         (ExtraICStateField::encode(extra_ic_state) | true));
4155
  // Compute the bit mask.
4156
  unsigned int bits = KindField::encode(kind)
4157
      | ICStateField::encode(ic_state)
4158
      | TypeField::encode(type)
4159
      | ExtendedExtraICStateField::encode(extra_ic_state)
4160
      | CacheHolderField::encode(holder);
4161
  if (!Code::needs_extended_extra_ic_state(kind)) {
4162
    bits |= (argc << kArgumentsCountShift);
4163
  }
4164
  return static_cast<Flags>(bits);
4165
}
4166

    
4167

    
4168
Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4169
                                          ExtraICState extra_ic_state,
4170
                                          StubType type,
4171
                                          int argc,
4172
                                          InlineCacheHolderFlag holder) {
4173
  return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, argc, holder);
4174
}
4175

    
4176

    
4177
Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4178
  return KindField::decode(flags);
4179
}
4180

    
4181

    
4182
InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4183
  return ICStateField::decode(flags);
4184
}
4185

    
4186

    
4187
Code::ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4188
  return ExtraICStateField::decode(flags);
4189
}
4190

    
4191

    
4192
Code::ExtraICState Code::ExtractExtendedExtraICStateFromFlags(
4193
    Flags flags) {
4194
  return ExtendedExtraICStateField::decode(flags);
4195
}
4196

    
4197

    
4198
Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4199
  return TypeField::decode(flags);
4200
}
4201

    
4202

    
4203
int Code::ExtractArgumentsCountFromFlags(Flags flags) {
4204
  return (flags & kArgumentsCountMask) >> kArgumentsCountShift;
4205
}
4206

    
4207

    
4208
InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
4209
  return CacheHolderField::decode(flags);
4210
}
4211

    
4212

    
4213
Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
4214
  int bits = flags & ~TypeField::kMask;
4215
  return static_cast<Flags>(bits);
4216
}
4217

    
4218

    
4219
Code* Code::GetCodeFromTargetAddress(Address address) {
4220
  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
4221
  // GetCodeFromTargetAddress might be called when marking objects during mark
4222
  // sweep. reinterpret_cast is therefore used instead of the more appropriate
4223
  // Code::cast. Code::cast does not work when the object's map is
4224
  // marked.
4225
  Code* result = reinterpret_cast<Code*>(code);
4226
  return result;
4227
}
4228

    
4229

    
4230
Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4231
  return HeapObject::
4232
      FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4233
}
4234

    
4235

    
4236
Object* Map::prototype() {
4237
  return READ_FIELD(this, kPrototypeOffset);
4238
}
4239

    
4240

    
4241
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
4242
  ASSERT(value->IsNull() || value->IsJSReceiver());
4243
  WRITE_FIELD(this, kPrototypeOffset, value);
4244
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
4245
}
4246

    
4247

    
4248
// If the descriptor is using the empty transition array, install a new empty
4249
// transition array that will have place for an element transition.
4250
static MaybeObject* EnsureHasTransitionArray(Map* map) {
4251
  TransitionArray* transitions;
4252
  MaybeObject* maybe_transitions;
4253
  if (!map->HasTransitionArray()) {
4254
    maybe_transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
4255
    if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4256
    transitions->set_back_pointer_storage(map->GetBackPointer());
4257
  } else if (!map->transitions()->IsFullTransitionArray()) {
4258
    maybe_transitions = map->transitions()->ExtendToFullTransitionArray();
4259
    if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4260
  } else {
4261
    return map;
4262
  }
4263
  map->set_transitions(transitions);
4264
  return transitions;
4265
}
4266

    
4267

    
4268
void Map::InitializeDescriptors(DescriptorArray* descriptors) {
4269
  int len = descriptors->number_of_descriptors();
4270
  set_instance_descriptors(descriptors);
4271
  SetNumberOfOwnDescriptors(len);
4272
}
4273

    
4274

    
4275
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
4276

    
4277

    
4278
void Map::set_bit_field3(uint32_t bits) {
4279
  // Ensure the upper 2 bits have the same value by sign extending it. This is
4280
  // necessary to be able to use the 31st bit.
4281
  int value = bits << 1;
4282
  WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
4283
}
4284

    
4285

    
4286
uint32_t Map::bit_field3() {
4287
  Object* value = READ_FIELD(this, kBitField3Offset);
4288
  return Smi::cast(value)->value();
4289
}
4290

    
4291

    
4292
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
4293
  Object* back_pointer = GetBackPointer();
4294

    
4295
  if (Heap::ShouldZapGarbage() && HasTransitionArray()) {
4296
    ZapTransitions();
4297
  }
4298

    
4299
  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, back_pointer);
4300
  CONDITIONAL_WRITE_BARRIER(
4301
      heap, this, kTransitionsOrBackPointerOffset, back_pointer, mode);
4302
}
4303

    
4304

    
4305
void Map::AppendDescriptor(Descriptor* desc,
4306
                           const DescriptorArray::WhitenessWitness& witness) {
4307
  DescriptorArray* descriptors = instance_descriptors();
4308
  int number_of_own_descriptors = NumberOfOwnDescriptors();
4309
  ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors);
4310
  descriptors->Append(desc, witness);
4311
  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
4312
}
4313

    
4314

    
4315
Object* Map::GetBackPointer() {
4316
  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4317
  if (object->IsDescriptorArray()) {
4318
    return TransitionArray::cast(object)->back_pointer_storage();
4319
  } else {
4320
    ASSERT(object->IsMap() || object->IsUndefined());
4321
    return object;
4322
  }
4323
}
4324

    
4325

    
4326
bool Map::HasElementsTransition() {
4327
  return HasTransitionArray() && transitions()->HasElementsTransition();
4328
}
4329

    
4330

    
4331
bool Map::HasTransitionArray() {
4332
  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4333
  return object->IsTransitionArray();
4334
}
4335

    
4336

    
4337
Map* Map::elements_transition_map() {
4338
  int index = transitions()->Search(GetHeap()->elements_transition_symbol());
4339
  return transitions()->GetTarget(index);
4340
}
4341

    
4342

    
4343
bool Map::CanHaveMoreTransitions() {
4344
  if (!HasTransitionArray()) return true;
4345
  return FixedArray::SizeFor(transitions()->length() +
4346
                             TransitionArray::kTransitionSize)
4347
      <= Page::kMaxNonCodeHeapObjectSize;
4348
}
4349

    
4350

    
4351
MaybeObject* Map::AddTransition(Name* key,
4352
                                Map* target,
4353
                                SimpleTransitionFlag flag) {
4354
  if (HasTransitionArray()) return transitions()->CopyInsert(key, target);
4355
  return TransitionArray::NewWith(flag, key, target, GetBackPointer());
4356
}
4357

    
4358

    
4359
void Map::SetTransition(int transition_index, Map* target) {
4360
  transitions()->SetTarget(transition_index, target);
4361
}
4362

    
4363

    
4364
Map* Map::GetTransition(int transition_index) {
4365
  return transitions()->GetTarget(transition_index);
4366
}
4367

    
4368

    
4369
MaybeObject* Map::set_elements_transition_map(Map* transitioned_map) {
4370
  TransitionArray* transitions;
4371
  MaybeObject* maybe_transitions = AddTransition(
4372
      GetHeap()->elements_transition_symbol(),
4373
      transitioned_map,
4374
      FULL_TRANSITION);
4375
  if (!maybe_transitions->To(&transitions)) return maybe_transitions;
4376
  set_transitions(transitions);
4377
  return transitions;
4378
}
4379

    
4380

    
4381
FixedArray* Map::GetPrototypeTransitions() {
4382
  if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
4383
  if (!transitions()->HasPrototypeTransitions()) {
4384
    return GetHeap()->empty_fixed_array();
4385
  }
4386
  return transitions()->GetPrototypeTransitions();
4387
}
4388

    
4389

    
4390
MaybeObject* Map::SetPrototypeTransitions(FixedArray* proto_transitions) {
4391
  MaybeObject* allow_prototype = EnsureHasTransitionArray(this);
4392
  if (allow_prototype->IsFailure()) return allow_prototype;
4393
  int old_number_of_transitions = NumberOfProtoTransitions();
4394
#ifdef DEBUG
4395
  if (HasPrototypeTransitions()) {
4396
    ASSERT(GetPrototypeTransitions() != proto_transitions);
4397
    ZapPrototypeTransitions();
4398
  }
4399
#endif
4400
  transitions()->SetPrototypeTransitions(proto_transitions);
4401
  SetNumberOfProtoTransitions(old_number_of_transitions);
4402
  return this;
4403
}
4404

    
4405

    
4406
bool Map::HasPrototypeTransitions() {
4407
  return HasTransitionArray() && transitions()->HasPrototypeTransitions();
4408
}
4409

    
4410

    
4411
TransitionArray* Map::transitions() {
4412
  ASSERT(HasTransitionArray());
4413
  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4414
  return TransitionArray::cast(object);
4415
}
4416

    
4417

    
4418
void Map::set_transitions(TransitionArray* transition_array,
4419
                          WriteBarrierMode mode) {
4420
  // Transition arrays are not shared. When one is replaced, it should not
4421
  // keep referenced objects alive, so we zap it.
4422
  // When there is another reference to the array somewhere (e.g. a handle),
4423
  // not zapping turns from a waste of memory into a source of crashes.
4424
  if (HasTransitionArray()) {
4425
    ASSERT(transitions() != transition_array);
4426
    ZapTransitions();
4427
  }
4428

    
4429
  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
4430
  CONDITIONAL_WRITE_BARRIER(
4431
      GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
4432
}
4433

    
4434

    
4435
void Map::init_back_pointer(Object* undefined) {
4436
  ASSERT(undefined->IsUndefined());
4437
  WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
4438
}
4439

    
4440

    
4441
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
4442
  ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
4443
  ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
4444
         (value->IsMap() && GetBackPointer()->IsUndefined()));
4445
  Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
4446
  if (object->IsTransitionArray()) {
4447
    TransitionArray::cast(object)->set_back_pointer_storage(value);
4448
  } else {
4449
    WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
4450
    CONDITIONAL_WRITE_BARRIER(
4451
        GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
4452
  }
4453
}
4454

    
4455

    
4456
// Can either be Smi (no transitions), normal transition array, or a transition
4457
// array with the header overwritten as a Smi (thus iterating).
4458
TransitionArray* Map::unchecked_transition_array() {
4459
  Object* object = *HeapObject::RawField(this,
4460
                                         Map::kTransitionsOrBackPointerOffset);
4461
  TransitionArray* transition_array = static_cast<TransitionArray*>(object);
4462
  return transition_array;
4463
}
4464

    
4465

    
4466
HeapObject* Map::UncheckedPrototypeTransitions() {
4467
  ASSERT(HasTransitionArray());
4468
  ASSERT(unchecked_transition_array()->HasPrototypeTransitions());
4469
  return unchecked_transition_array()->UncheckedPrototypeTransitions();
4470
}
4471

    
4472

    
4473
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
4474
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
4475
ACCESSORS(Map, constructor, Object, kConstructorOffset)
4476

    
4477
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
4478
ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
4479
ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
4480

    
4481
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
4482
ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
4483
ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
4484
ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
4485

    
4486
ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
4487

    
4488
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
4489
ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
4490
ACCESSORS(AccessorInfo, expected_receiver_type, Object,
4491
          kExpectedReceiverTypeOffset)
4492

    
4493
ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
4494
          kSerializedDataOffset)
4495

    
4496
ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
4497
          kDescriptorOffset)
4498

    
4499
ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
4500
ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
4501
ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
4502

    
4503
ACCESSORS(Box, value, Object, kValueOffset)
4504

    
4505
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
4506
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
4507
ACCESSORS_TO_SMI(AccessorPair, access_flags, kAccessFlagsOffset)
4508

    
4509
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
4510
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
4511
ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
4512

    
4513
ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
4514
ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
4515
ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
4516
ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
4517
ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
4518
ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
4519

    
4520
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
4521
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
4522

    
4523
ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
4524
ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
4525
ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
4526

    
4527
ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
4528
ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
4529
ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
4530
          kPrototypeTemplateOffset)
4531
ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
4532
ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
4533
          kNamedPropertyHandlerOffset)
4534
ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
4535
          kIndexedPropertyHandlerOffset)
4536
ACCESSORS(FunctionTemplateInfo, instance_template, Object,
4537
          kInstanceTemplateOffset)
4538
ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
4539
ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
4540
ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
4541
          kInstanceCallHandlerOffset)
4542
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
4543
          kAccessCheckInfoOffset)
4544
ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
4545

    
4546
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
4547
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
4548
          kInternalFieldCountOffset)
4549

    
4550
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
4551
ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
4552

    
4553
ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
4554

    
4555
ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
4556
ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
4557
ACCESSORS(AllocationSite, dependent_code, DependentCode,
4558
          kDependentCodeOffset)
4559
ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
4560
ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
4561

    
4562
ACCESSORS(Script, source, Object, kSourceOffset)
4563
ACCESSORS(Script, name, Object, kNameOffset)
4564
ACCESSORS(Script, id, Smi, kIdOffset)
4565
ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
4566
ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
4567
ACCESSORS(Script, data, Object, kDataOffset)
4568
ACCESSORS(Script, context_data, Object, kContextOffset)
4569
ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
4570
ACCESSORS_TO_SMI(Script, type, kTypeOffset)
4571
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
4572
ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
4573
ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
4574
                 kEvalFrominstructionsOffsetOffset)
4575
ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
4576
BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
4577

    
4578
Script::CompilationType Script::compilation_type() {
4579
  return BooleanBit::get(flags(), kCompilationTypeBit) ?
4580
      COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
4581
}
4582
void Script::set_compilation_type(CompilationType type) {
4583
  set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
4584
      type == COMPILATION_TYPE_EVAL));
4585
}
4586
Script::CompilationState Script::compilation_state() {
4587
  return BooleanBit::get(flags(), kCompilationStateBit) ?
4588
      COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
4589
}
4590
void Script::set_compilation_state(CompilationState state) {
4591
  set_flags(BooleanBit::set(flags(), kCompilationStateBit,
4592
      state == COMPILATION_STATE_COMPILED));
4593
}
4594

    
4595

    
4596
#ifdef ENABLE_DEBUGGER_SUPPORT
4597
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
4598
ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
4599
ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
4600
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
4601

    
4602
ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
4603
ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
4604
ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
4605
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
4606
#endif
4607

    
4608
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
4609
ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
4610
                 kOptimizedCodeMapOffset)
4611
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
4612
ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
4613
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
4614
          kInstanceClassNameOffset)
4615
ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
4616
ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
4617
ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
4618
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
4619
SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
4620

    
4621

    
4622
SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
4623
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
4624
               kHiddenPrototypeBit)
4625
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
4626
BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
4627
               kNeedsAccessCheckBit)
4628
BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
4629
               kReadOnlyPrototypeBit)
4630
BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
4631
               kRemovePrototypeBit)
4632
BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
4633
               kDoNotCacheBit)
4634
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
4635
               kIsExpressionBit)
4636
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
4637
               kIsTopLevelBit)
4638

    
4639
BOOL_ACCESSORS(SharedFunctionInfo,
4640
               compiler_hints,
4641
               allows_lazy_compilation,
4642
               kAllowLazyCompilation)
4643
BOOL_ACCESSORS(SharedFunctionInfo,
4644
               compiler_hints,
4645
               allows_lazy_compilation_without_context,
4646
               kAllowLazyCompilationWithoutContext)
4647
BOOL_ACCESSORS(SharedFunctionInfo,
4648
               compiler_hints,
4649
               uses_arguments,
4650
               kUsesArguments)
4651
BOOL_ACCESSORS(SharedFunctionInfo,
4652
               compiler_hints,
4653
               has_duplicate_parameters,
4654
               kHasDuplicateParameters)
4655

    
4656

    
4657
#if V8_HOST_ARCH_32_BIT
4658
SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
4659
SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
4660
              kFormalParameterCountOffset)
4661
SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
4662
              kExpectedNofPropertiesOffset)
4663
SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
4664
SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
4665
              kStartPositionAndTypeOffset)
4666
SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
4667
SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
4668
              kFunctionTokenPositionOffset)
4669
SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
4670
              kCompilerHintsOffset)
4671
SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
4672
              kOptCountAndBailoutReasonOffset)
4673
SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
4674

    
4675
#else
4676

    
4677
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
4678
  STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
4679
  int holder::name() {                                            \
4680
    int value = READ_INT_FIELD(this, offset);                     \
4681
    ASSERT(kHeapObjectTag == 1);                                  \
4682
    ASSERT((value & kHeapObjectTag) == 0);                        \
4683
    return value >> 1;                                            \
4684
  }                                                               \
4685
  void holder::set_##name(int value) {                            \
4686
    ASSERT(kHeapObjectTag == 1);                                  \
4687
    ASSERT((value & 0xC0000000) == 0xC0000000 ||                  \
4688
           (value & 0xC0000000) == 0x000000000);                  \
4689
    WRITE_INT_FIELD(this,                                         \
4690
                    offset,                                       \
4691
                    (value << 1) & ~kHeapObjectTag);              \
4692
  }
4693

    
4694
#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
4695
  STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
4696
  INT_ACCESSORS(holder, name, offset)
4697

    
4698

    
4699
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
4700
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4701
                        formal_parameter_count,
4702
                        kFormalParameterCountOffset)
4703

    
4704
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4705
                        expected_nof_properties,
4706
                        kExpectedNofPropertiesOffset)
4707
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
4708

    
4709
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
4710
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4711
                        start_position_and_type,
4712
                        kStartPositionAndTypeOffset)
4713

    
4714
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4715
                        function_token_position,
4716
                        kFunctionTokenPositionOffset)
4717
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
4718
                        compiler_hints,
4719
                        kCompilerHintsOffset)
4720

    
4721
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
4722
                        opt_count_and_bailout_reason,
4723
                        kOptCountAndBailoutReasonOffset)
4724

    
4725
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
4726

    
4727
#endif
4728

    
4729

    
4730
int SharedFunctionInfo::construction_count() {
4731
  return READ_BYTE_FIELD(this, kConstructionCountOffset);
4732
}
4733

    
4734

    
4735
void SharedFunctionInfo::set_construction_count(int value) {
4736
  ASSERT(0 <= value && value < 256);
4737
  WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
4738
}
4739

    
4740

    
4741
BOOL_ACCESSORS(SharedFunctionInfo,
4742
               compiler_hints,
4743
               live_objects_may_exist,
4744
               kLiveObjectsMayExist)
4745

    
4746

    
4747
bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
4748
  return initial_map() != GetHeap()->undefined_value();
4749
}
4750

    
4751

    
4752
BOOL_GETTER(SharedFunctionInfo,
4753
            compiler_hints,
4754
            optimization_disabled,
4755
            kOptimizationDisabled)
4756

    
4757

    
4758
void SharedFunctionInfo::set_optimization_disabled(bool disable) {
4759
  set_compiler_hints(BooleanBit::set(compiler_hints(),
4760
                                     kOptimizationDisabled,
4761
                                     disable));
4762
  // If disabling optimizations we reflect that in the code object so
4763
  // it will not be counted as optimizable code.
4764
  if ((code()->kind() == Code::FUNCTION) && disable) {
4765
    code()->set_optimizable(false);
4766
  }
4767
}
4768

    
4769

    
4770
int SharedFunctionInfo::profiler_ticks() {
4771
  if (code()->kind() != Code::FUNCTION) return 0;
4772
  return code()->profiler_ticks();
4773
}
4774

    
4775

    
4776
LanguageMode SharedFunctionInfo::language_mode() {
4777
  int hints = compiler_hints();
4778
  if (BooleanBit::get(hints, kExtendedModeFunction)) {
4779
    ASSERT(BooleanBit::get(hints, kStrictModeFunction));
4780
    return EXTENDED_MODE;
4781
  }
4782
  return BooleanBit::get(hints, kStrictModeFunction)
4783
      ? STRICT_MODE : CLASSIC_MODE;
4784
}
4785

    
4786

    
4787
void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
4788
  // We only allow language mode transitions that go set the same language mode
4789
  // again or go up in the chain:
4790
  //   CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
4791
  ASSERT(this->language_mode() == CLASSIC_MODE ||
4792
         this->language_mode() == language_mode ||
4793
         language_mode == EXTENDED_MODE);
4794
  int hints = compiler_hints();
4795
  hints = BooleanBit::set(
4796
      hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
4797
  hints = BooleanBit::set(
4798
      hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
4799
  set_compiler_hints(hints);
4800
}
4801

    
4802

    
4803
bool SharedFunctionInfo::is_classic_mode() {
4804
  return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
4805
}
4806

    
4807
BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
4808
            kExtendedModeFunction)
4809
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
4810
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
4811
               name_should_print_as_anonymous,
4812
               kNameShouldPrintAsAnonymous)
4813
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
4814
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
4815
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
4816
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
4817
               kDontOptimize)
4818
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
4819
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
4820
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
4821
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
4822

    
4823
void SharedFunctionInfo::BeforeVisitingPointers() {
4824
  if (IsInobjectSlackTrackingInProgress()) DetachInitialMap();
4825
}
4826

    
4827

    
4828
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
4829
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
4830

    
4831
ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
4832

    
4833
bool Script::HasValidSource() {
4834
  Object* src = this->source();
4835
  if (!src->IsString()) return true;
4836
  String* src_str = String::cast(src);
4837
  if (!StringShape(src_str).IsExternal()) return true;
4838
  if (src_str->IsOneByteRepresentation()) {
4839
    return ExternalAsciiString::cast(src)->resource() != NULL;
4840
  } else if (src_str->IsTwoByteRepresentation()) {
4841
    return ExternalTwoByteString::cast(src)->resource() != NULL;
4842
  }
4843
  return true;
4844
}
4845

    
4846

    
4847
void SharedFunctionInfo::DontAdaptArguments() {
4848
  ASSERT(code()->kind() == Code::BUILTIN);
4849
  set_formal_parameter_count(kDontAdaptArgumentsSentinel);
4850
}
4851

    
4852

    
4853
int SharedFunctionInfo::start_position() {
4854
  return start_position_and_type() >> kStartPositionShift;
4855
}
4856

    
4857

    
4858
void SharedFunctionInfo::set_start_position(int start_position) {
4859
  set_start_position_and_type((start_position << kStartPositionShift)
4860
    | (start_position_and_type() & ~kStartPositionMask));
4861
}
4862

    
4863

    
4864
Code* SharedFunctionInfo::code() {
4865
  return Code::cast(READ_FIELD(this, kCodeOffset));
4866
}
4867

    
4868

    
4869
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
4870
  WRITE_FIELD(this, kCodeOffset, value);
4871
  CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
4872
}
4873

    
4874

    
4875
void SharedFunctionInfo::ReplaceCode(Code* value) {
4876
  // If the GC metadata field is already used then the function was
4877
  // enqueued as a code flushing candidate and we remove it now.
4878
  if (code()->gc_metadata() != NULL) {
4879
    CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
4880
    flusher->EvictCandidate(this);
4881
  }
4882

    
4883
  ASSERT(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
4884
  set_code(value);
4885
}
4886

    
4887

    
4888
ScopeInfo* SharedFunctionInfo::scope_info() {
4889
  return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
4890
}
4891

    
4892

    
4893
void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
4894
                                        WriteBarrierMode mode) {
4895
  WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
4896
  CONDITIONAL_WRITE_BARRIER(GetHeap(),
4897
                            this,
4898
                            kScopeInfoOffset,
4899
                            reinterpret_cast<Object*>(value),
4900
                            mode);
4901
}
4902

    
4903

    
4904
bool SharedFunctionInfo::is_compiled() {
4905
  return code() !=
4906
      GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
4907
}
4908

    
4909

    
4910
bool SharedFunctionInfo::IsApiFunction() {
4911
  return function_data()->IsFunctionTemplateInfo();
4912
}
4913

    
4914

    
4915
FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
4916
  ASSERT(IsApiFunction());
4917
  return FunctionTemplateInfo::cast(function_data());
4918
}
4919

    
4920

    
4921
bool SharedFunctionInfo::HasBuiltinFunctionId() {
4922
  return function_data()->IsSmi();
4923
}
4924

    
4925

    
4926
BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
4927
  ASSERT(HasBuiltinFunctionId());
4928
  return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
4929
}
4930

    
4931

    
4932
int SharedFunctionInfo::ic_age() {
4933
  return ICAgeBits::decode(counters());
4934
}
4935

    
4936

    
4937
void SharedFunctionInfo::set_ic_age(int ic_age) {
4938
  set_counters(ICAgeBits::update(counters(), ic_age));
4939
}
4940

    
4941

    
4942
int SharedFunctionInfo::deopt_count() {
4943
  return DeoptCountBits::decode(counters());
4944
}
4945

    
4946

    
4947
void SharedFunctionInfo::set_deopt_count(int deopt_count) {
4948
  set_counters(DeoptCountBits::update(counters(), deopt_count));
4949
}
4950

    
4951

    
4952
void SharedFunctionInfo::increment_deopt_count() {
4953
  int value = counters();
4954
  int deopt_count = DeoptCountBits::decode(value);
4955
  deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
4956
  set_counters(DeoptCountBits::update(value, deopt_count));
4957
}
4958

    
4959

    
4960
int SharedFunctionInfo::opt_reenable_tries() {
4961
  return OptReenableTriesBits::decode(counters());
4962
}
4963

    
4964

    
4965
void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
4966
  set_counters(OptReenableTriesBits::update(counters(), tries));
4967
}
4968

    
4969

    
4970
int SharedFunctionInfo::opt_count() {
4971
  return OptCountBits::decode(opt_count_and_bailout_reason());
4972
}
4973

    
4974

    
4975
void SharedFunctionInfo::set_opt_count(int opt_count) {
4976
  set_opt_count_and_bailout_reason(
4977
      OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
4978
}
4979

    
4980

    
4981
BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
4982
  BailoutReason reason = static_cast<BailoutReason>(
4983
      DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
4984
  return reason;
4985
}
4986

    
4987

    
4988
bool SharedFunctionInfo::has_deoptimization_support() {
4989
  Code* code = this->code();
4990
  return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
4991
}
4992

    
4993

    
4994
void SharedFunctionInfo::TryReenableOptimization() {
4995
  int tries = opt_reenable_tries();
4996
  set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
4997
  // We reenable optimization whenever the number of tries is a large
4998
  // enough power of 2.
4999
  if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5000
    set_optimization_disabled(false);
5001
    set_opt_count(0);
5002
    set_deopt_count(0);
5003
    code()->set_optimizable(true);
5004
  }
5005
}
5006

    
5007

    
5008
bool JSFunction::IsBuiltin() {
5009
  return context()->global_object()->IsJSBuiltinsObject();
5010
}
5011

    
5012

    
5013
bool JSFunction::NeedsArgumentsAdaption() {
5014
  return shared()->formal_parameter_count() !=
5015
      SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5016
}
5017

    
5018

    
5019
bool JSFunction::IsOptimized() {
5020
  return code()->kind() == Code::OPTIMIZED_FUNCTION;
5021
}
5022

    
5023

    
5024
bool JSFunction::IsOptimizable() {
5025
  return code()->kind() == Code::FUNCTION && code()->optimizable();
5026
}
5027

    
5028

    
5029
bool JSFunction::IsMarkedForLazyRecompilation() {
5030
  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
5031
}
5032

    
5033

    
5034
bool JSFunction::IsMarkedForConcurrentRecompilation() {
5035
  return code() == GetIsolate()->builtins()->builtin(
5036
      Builtins::kConcurrentRecompile);
5037
}
5038

    
5039

    
5040
bool JSFunction::IsInRecompileQueue() {
5041
  return code() == GetIsolate()->builtins()->builtin(
5042
      Builtins::kInRecompileQueue);
5043
}
5044

    
5045

    
5046
Code* JSFunction::code() {
5047
  return Code::cast(
5048
      Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5049
}
5050

    
5051

    
5052
void JSFunction::set_code(Code* value) {
5053
  ASSERT(!GetHeap()->InNewSpace(value));
5054
  Address entry = value->entry();
5055
  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5056
  GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5057
      this,
5058
      HeapObject::RawField(this, kCodeEntryOffset),
5059
      value);
5060
}
5061

    
5062

    
5063
void JSFunction::set_code_no_write_barrier(Code* value) {
5064
  ASSERT(!GetHeap()->InNewSpace(value));
5065
  Address entry = value->entry();
5066
  WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5067
}
5068

    
5069

    
5070
void JSFunction::ReplaceCode(Code* code) {
5071
  bool was_optimized = IsOptimized();
5072
  bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5073

    
5074
  set_code(code);
5075

    
5076
  // Add/remove the function from the list of optimized functions for this
5077
  // context based on the state change.
5078
  if (!was_optimized && is_optimized) {
5079
    context()->native_context()->AddOptimizedFunction(this);
5080
  }
5081
  if (was_optimized && !is_optimized) {
5082
    // TODO(titzer): linear in the number of optimized functions; fix!
5083
    context()->native_context()->RemoveOptimizedFunction(this);
5084
  }
5085
}
5086

    
5087

    
5088
Context* JSFunction::context() {
5089
  return Context::cast(READ_FIELD(this, kContextOffset));
5090
}
5091

    
5092

    
5093
void JSFunction::set_context(Object* value) {
5094
  ASSERT(value->IsUndefined() || value->IsContext());
5095
  WRITE_FIELD(this, kContextOffset, value);
5096
  WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5097
}
5098

    
5099
ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5100
          kPrototypeOrInitialMapOffset)
5101

    
5102

    
5103
Map* JSFunction::initial_map() {
5104
  return Map::cast(prototype_or_initial_map());
5105
}
5106

    
5107

    
5108
void JSFunction::set_initial_map(Map* value) {
5109
  set_prototype_or_initial_map(value);
5110
}
5111

    
5112

    
5113
bool JSFunction::has_initial_map() {
5114
  return prototype_or_initial_map()->IsMap();
5115
}
5116

    
5117

    
5118
bool JSFunction::has_instance_prototype() {
5119
  return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5120
}
5121

    
5122

    
5123
bool JSFunction::has_prototype() {
5124
  return map()->has_non_instance_prototype() || has_instance_prototype();
5125
}
5126

    
5127

    
5128
Object* JSFunction::instance_prototype() {
5129
  ASSERT(has_instance_prototype());
5130
  if (has_initial_map()) return initial_map()->prototype();
5131
  // When there is no initial map and the prototype is a JSObject, the
5132
  // initial map field is used for the prototype field.
5133
  return prototype_or_initial_map();
5134
}
5135

    
5136

    
5137
Object* JSFunction::prototype() {
5138
  ASSERT(has_prototype());
5139
  // If the function's prototype property has been set to a non-JSObject
5140
  // value, that value is stored in the constructor field of the map.
5141
  if (map()->has_non_instance_prototype()) return map()->constructor();
5142
  return instance_prototype();
5143
}
5144

    
5145

    
5146
bool JSFunction::should_have_prototype() {
5147
  return map()->function_with_prototype();
5148
}
5149

    
5150

    
5151
bool JSFunction::is_compiled() {
5152
  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
5153
}
5154

    
5155

    
5156
FixedArray* JSFunction::literals() {
5157
  ASSERT(!shared()->bound());
5158
  return literals_or_bindings();
5159
}
5160

    
5161

    
5162
void JSFunction::set_literals(FixedArray* literals) {
5163
  ASSERT(!shared()->bound());
5164
  set_literals_or_bindings(literals);
5165
}
5166

    
5167

    
5168
FixedArray* JSFunction::function_bindings() {
5169
  ASSERT(shared()->bound());
5170
  return literals_or_bindings();
5171
}
5172

    
5173

    
5174
void JSFunction::set_function_bindings(FixedArray* bindings) {
5175
  ASSERT(shared()->bound());
5176
  // Bound function literal may be initialized to the empty fixed array
5177
  // before the bindings are set.
5178
  ASSERT(bindings == GetHeap()->empty_fixed_array() ||
5179
         bindings->map() == GetHeap()->fixed_cow_array_map());
5180
  set_literals_or_bindings(bindings);
5181
}
5182

    
5183

    
5184
int JSFunction::NumberOfLiterals() {
5185
  ASSERT(!shared()->bound());
5186
  return literals()->length();
5187
}
5188

    
5189

    
5190
Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5191
  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5192
  return READ_FIELD(this, OffsetOfFunctionWithId(id));
5193
}
5194

    
5195

    
5196
void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5197
                                              Object* value) {
5198
  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5199
  WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5200
  WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5201
}
5202

    
5203

    
5204
Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
5205
  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5206
  return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
5207
}
5208

    
5209

    
5210
void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
5211
                                                   Code* value) {
5212
  ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
5213
  WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
5214
  ASSERT(!GetHeap()->InNewSpace(value));
5215
}
5216

    
5217

    
5218
ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
5219
ACCESSORS(JSProxy, hash, Object, kHashOffset)
5220
ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
5221
ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
5222

    
5223

    
5224
void JSProxy::InitializeBody(int object_size, Object* value) {
5225
  ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
5226
  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
5227
    WRITE_FIELD(this, offset, value);
5228
  }
5229
}
5230

    
5231

    
5232
ACCESSORS(JSSet, table, Object, kTableOffset)
5233
ACCESSORS(JSMap, table, Object, kTableOffset)
5234
ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
5235
ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
5236

    
5237

    
5238
Address Foreign::foreign_address() {
5239
  return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
5240
}
5241

    
5242

    
5243
void Foreign::set_foreign_address(Address value) {
5244
  WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
5245
}
5246

    
5247

    
5248
ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
5249
ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
5250
ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
5251
SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
5252
ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
5253
SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
5254

    
5255

    
5256
JSGeneratorObject* JSGeneratorObject::cast(Object* obj) {
5257
  ASSERT(obj->IsJSGeneratorObject());
5258
  ASSERT(HeapObject::cast(obj)->Size() == JSGeneratorObject::kSize);
5259
  return reinterpret_cast<JSGeneratorObject*>(obj);
5260
}
5261

    
5262

    
5263
ACCESSORS(JSModule, context, Object, kContextOffset)
5264
ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
5265

    
5266

    
5267
JSModule* JSModule::cast(Object* obj) {
5268
  ASSERT(obj->IsJSModule());
5269
  ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
5270
  return reinterpret_cast<JSModule*>(obj);
5271
}
5272

    
5273

    
5274
ACCESSORS(JSValue, value, Object, kValueOffset)
5275

    
5276

    
5277
JSValue* JSValue::cast(Object* obj) {
5278
  ASSERT(obj->IsJSValue());
5279
  ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
5280
  return reinterpret_cast<JSValue*>(obj);
5281
}
5282

    
5283

    
5284
ACCESSORS(JSDate, value, Object, kValueOffset)
5285
ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
5286
ACCESSORS(JSDate, year, Object, kYearOffset)
5287
ACCESSORS(JSDate, month, Object, kMonthOffset)
5288
ACCESSORS(JSDate, day, Object, kDayOffset)
5289
ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
5290
ACCESSORS(JSDate, hour, Object, kHourOffset)
5291
ACCESSORS(JSDate, min, Object, kMinOffset)
5292
ACCESSORS(JSDate, sec, Object, kSecOffset)
5293

    
5294

    
5295
JSDate* JSDate::cast(Object* obj) {
5296
  ASSERT(obj->IsJSDate());
5297
  ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
5298
  return reinterpret_cast<JSDate*>(obj);
5299
}
5300

    
5301

    
5302
ACCESSORS(JSMessageObject, type, String, kTypeOffset)
5303
ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
5304
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
5305
ACCESSORS(JSMessageObject, stack_trace, Object, kStackTraceOffset)
5306
ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
5307
SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
5308
SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
5309

    
5310

    
5311
JSMessageObject* JSMessageObject::cast(Object* obj) {
5312
  ASSERT(obj->IsJSMessageObject());
5313
  ASSERT(HeapObject::cast(obj)->Size() == JSMessageObject::kSize);
5314
  return reinterpret_cast<JSMessageObject*>(obj);
5315
}
5316

    
5317

    
5318
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
5319
INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
5320
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
5321
ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
5322
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
5323

    
5324

    
5325
// Type feedback slot: type_feedback_info for FUNCTIONs, stub_info for STUBs.
5326
void Code::InitializeTypeFeedbackInfoNoWriteBarrier(Object* value) {
5327
  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5328
}
5329

    
5330

    
5331
Object* Code::type_feedback_info() {
5332
  ASSERT(kind() == FUNCTION);
5333
  return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
5334
}
5335

    
5336

    
5337
void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
5338
  ASSERT(kind() == FUNCTION);
5339
  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5340
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5341
                            value, mode);
5342
}
5343

    
5344

    
5345
Object* Code::next_code_link() {
5346
  CHECK(kind() == OPTIMIZED_FUNCTION);
5347
  return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
5348
}
5349

    
5350

    
5351
void Code::set_next_code_link(Object* value, WriteBarrierMode mode) {
5352
  CHECK(kind() == OPTIMIZED_FUNCTION);
5353
  WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
5354
  CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
5355
                            value, mode);
5356
}
5357

    
5358

    
5359
int Code::stub_info() {
5360
  ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
5361
         kind() == BINARY_OP_IC || kind() == LOAD_IC);
5362
  Object* value = READ_FIELD(this, kTypeFeedbackInfoOffset);
5363
  return Smi::cast(value)->value();
5364
}
5365

    
5366

    
5367
void Code::set_stub_info(int value) {
5368
  ASSERT(kind() == COMPARE_IC ||
5369
         kind() == COMPARE_NIL_IC ||
5370
         kind() == BINARY_OP_IC ||
5371
         kind() == STUB ||
5372
         kind() == LOAD_IC ||
5373
         kind() == KEYED_LOAD_IC ||
5374
         kind() == STORE_IC ||
5375
         kind() == KEYED_STORE_IC);
5376
  WRITE_FIELD(this, kTypeFeedbackInfoOffset, Smi::FromInt(value));
5377
}
5378

    
5379

    
5380
ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
5381
INT_ACCESSORS(Code, ic_age, kICAgeOffset)
5382

    
5383

    
5384
byte* Code::instruction_start()  {
5385
  return FIELD_ADDR(this, kHeaderSize);
5386
}
5387

    
5388

    
5389
byte* Code::instruction_end()  {
5390
  return instruction_start() + instruction_size();
5391
}
5392

    
5393

    
5394
int Code::body_size() {
5395
  return RoundUp(instruction_size(), kObjectAlignment);
5396
}
5397

    
5398

    
5399
ByteArray* Code::unchecked_relocation_info() {
5400
  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
5401
}
5402

    
5403

    
5404
byte* Code::relocation_start() {
5405
  return unchecked_relocation_info()->GetDataStartAddress();
5406
}
5407

    
5408

    
5409
int Code::relocation_size() {
5410
  return unchecked_relocation_info()->length();
5411
}
5412

    
5413

    
5414
byte* Code::entry() {
5415
  return instruction_start();
5416
}
5417

    
5418

    
5419
bool Code::contains(byte* inner_pointer) {
5420
  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5421
}
5422

    
5423

    
5424
ACCESSORS(JSArray, length, Object, kLengthOffset)
5425

    
5426

    
5427
void* JSArrayBuffer::backing_store() {
5428
  intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5429
  return reinterpret_cast<void*>(ptr);
5430
}
5431

    
5432

    
5433
void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
5434
  intptr_t ptr = reinterpret_cast<intptr_t>(value);
5435
  WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
5436
}
5437

    
5438

    
5439
ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
5440
ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
5441

    
5442

    
5443
bool JSArrayBuffer::is_external() {
5444
  return BooleanBit::get(flag(), kIsExternalBit);
5445
}
5446

    
5447

    
5448
void JSArrayBuffer::set_is_external(bool value) {
5449
  set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
5450
}
5451

    
5452

    
5453
ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
5454
ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
5455

    
5456

    
5457
ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
5458
ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
5459
ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
5460
ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
5461
ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
5462

    
5463
ACCESSORS(JSRegExp, data, Object, kDataOffset)
5464

    
5465

    
5466
JSRegExp::Type JSRegExp::TypeTag() {
5467
  Object* data = this->data();
5468
  if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
5469
  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
5470
  return static_cast<JSRegExp::Type>(smi->value());
5471
}
5472

    
5473

    
5474
int JSRegExp::CaptureCount() {
5475
  switch (TypeTag()) {
5476
    case ATOM:
5477
      return 0;
5478
    case IRREGEXP:
5479
      return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
5480
    default:
5481
      UNREACHABLE();
5482
      return -1;
5483
  }
5484
}
5485

    
5486

    
5487
JSRegExp::Flags JSRegExp::GetFlags() {
5488
  ASSERT(this->data()->IsFixedArray());
5489
  Object* data = this->data();
5490
  Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
5491
  return Flags(smi->value());
5492
}
5493

    
5494

    
5495
String* JSRegExp::Pattern() {
5496
  ASSERT(this->data()->IsFixedArray());
5497
  Object* data = this->data();
5498
  String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
5499
  return pattern;
5500
}
5501

    
5502

    
5503
Object* JSRegExp::DataAt(int index) {
5504
  ASSERT(TypeTag() != NOT_COMPILED);
5505
  return FixedArray::cast(data())->get(index);
5506
}
5507

    
5508

    
5509
void JSRegExp::SetDataAt(int index, Object* value) {
5510
  ASSERT(TypeTag() != NOT_COMPILED);
5511
  ASSERT(index >= kDataIndex);  // Only implementation data can be set this way.
5512
  FixedArray::cast(data())->set(index, value);
5513
}
5514

    
5515

    
5516
ElementsKind JSObject::GetElementsKind() {
5517
  ElementsKind kind = map()->elements_kind();
5518
#if DEBUG
5519
  FixedArrayBase* fixed_array =
5520
      reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
5521

    
5522
  // If a GC was caused while constructing this object, the elements
5523
  // pointer may point to a one pointer filler map.
5524
  if (ElementsAreSafeToExamine()) {
5525
    Map* map = fixed_array->map();
5526
    ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
5527
            (map == GetHeap()->fixed_array_map() ||
5528
             map == GetHeap()->fixed_cow_array_map())) ||
5529
           (IsFastDoubleElementsKind(kind) &&
5530
            (fixed_array->IsFixedDoubleArray() ||
5531
             fixed_array == GetHeap()->empty_fixed_array())) ||
5532
           (kind == DICTIONARY_ELEMENTS &&
5533
            fixed_array->IsFixedArray() &&
5534
            fixed_array->IsDictionary()) ||
5535
           (kind > DICTIONARY_ELEMENTS));
5536
    ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
5537
           (elements()->IsFixedArray() && elements()->length() >= 2));
5538
  }
5539
#endif
5540
  return kind;
5541
}
5542

    
5543

    
5544
ElementsAccessor* JSObject::GetElementsAccessor() {
5545
  return ElementsAccessor::ForKind(GetElementsKind());
5546
}
5547

    
5548

    
5549
bool JSObject::HasFastObjectElements() {
5550
  return IsFastObjectElementsKind(GetElementsKind());
5551
}
5552

    
5553

    
5554
bool JSObject::HasFastSmiElements() {
5555
  return IsFastSmiElementsKind(GetElementsKind());
5556
}
5557

    
5558

    
5559
bool JSObject::HasFastSmiOrObjectElements() {
5560
  return IsFastSmiOrObjectElementsKind(GetElementsKind());
5561
}
5562

    
5563

    
5564
bool JSObject::HasFastDoubleElements() {
5565
  return IsFastDoubleElementsKind(GetElementsKind());
5566
}
5567

    
5568

    
5569
bool JSObject::HasFastHoleyElements() {
5570
  return IsFastHoleyElementsKind(GetElementsKind());
5571
}
5572

    
5573

    
5574
bool JSObject::HasFastElements() {
5575
  return IsFastElementsKind(GetElementsKind());
5576
}
5577

    
5578

    
5579
bool JSObject::HasDictionaryElements() {
5580
  return GetElementsKind() == DICTIONARY_ELEMENTS;
5581
}
5582

    
5583

    
5584
bool JSObject::HasNonStrictArgumentsElements() {
5585
  return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
5586
}
5587

    
5588

    
5589
bool JSObject::HasExternalArrayElements() {
5590
  HeapObject* array = elements();
5591
  ASSERT(array != NULL);
5592
  return array->IsExternalArray();
5593
}
5594

    
5595

    
5596
#define EXTERNAL_ELEMENTS_CHECK(name, type)          \
5597
bool JSObject::HasExternal##name##Elements() {       \
5598
  HeapObject* array = elements();                    \
5599
  ASSERT(array != NULL);                             \
5600
  if (!array->IsHeapObject())                        \
5601
    return false;                                    \
5602
  return array->map()->instance_type() == type;      \
5603
}
5604

    
5605

    
5606
EXTERNAL_ELEMENTS_CHECK(Byte, EXTERNAL_BYTE_ARRAY_TYPE)
5607
EXTERNAL_ELEMENTS_CHECK(UnsignedByte, EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE)
5608
EXTERNAL_ELEMENTS_CHECK(Short, EXTERNAL_SHORT_ARRAY_TYPE)
5609
EXTERNAL_ELEMENTS_CHECK(UnsignedShort,
5610
                        EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE)
5611
EXTERNAL_ELEMENTS_CHECK(Int, EXTERNAL_INT_ARRAY_TYPE)
5612
EXTERNAL_ELEMENTS_CHECK(UnsignedInt,
5613
                        EXTERNAL_UNSIGNED_INT_ARRAY_TYPE)
5614
EXTERNAL_ELEMENTS_CHECK(Float,
5615
                        EXTERNAL_FLOAT_ARRAY_TYPE)
5616
EXTERNAL_ELEMENTS_CHECK(Double,
5617
                        EXTERNAL_DOUBLE_ARRAY_TYPE)
5618
EXTERNAL_ELEMENTS_CHECK(Pixel, EXTERNAL_PIXEL_ARRAY_TYPE)
5619

    
5620

    
5621
bool JSObject::HasNamedInterceptor() {
5622
  return map()->has_named_interceptor();
5623
}
5624

    
5625

    
5626
bool JSObject::HasIndexedInterceptor() {
5627
  return map()->has_indexed_interceptor();
5628
}
5629

    
5630

    
5631
MaybeObject* JSObject::EnsureWritableFastElements() {
5632
  ASSERT(HasFastSmiOrObjectElements());
5633
  FixedArray* elems = FixedArray::cast(elements());
5634
  Isolate* isolate = GetIsolate();
5635
  if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
5636
  Object* writable_elems;
5637
  { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
5638
      elems, isolate->heap()->fixed_array_map());
5639
    if (!maybe_writable_elems->ToObject(&writable_elems)) {
5640
      return maybe_writable_elems;
5641
    }
5642
  }
5643
  set_elements(FixedArray::cast(writable_elems));
5644
  isolate->counters()->cow_arrays_converted()->Increment();
5645
  return writable_elems;
5646
}
5647

    
5648

    
5649
NameDictionary* JSObject::property_dictionary() {
5650
  ASSERT(!HasFastProperties());
5651
  return NameDictionary::cast(properties());
5652
}
5653

    
5654

    
5655
SeededNumberDictionary* JSObject::element_dictionary() {
5656
  ASSERT(HasDictionaryElements());
5657
  return SeededNumberDictionary::cast(elements());
5658
}
5659

    
5660

    
5661
bool Name::IsHashFieldComputed(uint32_t field) {
5662
  return (field & kHashNotComputedMask) == 0;
5663
}
5664

    
5665

    
5666
bool Name::HasHashCode() {
5667
  return IsHashFieldComputed(hash_field());
5668
}
5669

    
5670

    
5671
uint32_t Name::Hash() {
5672
  // Fast case: has hash code already been computed?
5673
  uint32_t field = hash_field();
5674
  if (IsHashFieldComputed(field)) return field >> kHashShift;
5675
  // Slow case: compute hash code and set it. Has to be a string.
5676
  return String::cast(this)->ComputeAndSetHash();
5677
}
5678

    
5679

    
5680
StringHasher::StringHasher(int length, uint32_t seed)
5681
  : length_(length),
5682
    raw_running_hash_(seed),
5683
    array_index_(0),
5684
    is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
5685
    is_first_char_(true) {
5686
  ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
5687
}
5688

    
5689

    
5690
bool StringHasher::has_trivial_hash() {
5691
  return length_ > String::kMaxHashCalcLength;
5692
}
5693

    
5694

    
5695
uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
5696
  running_hash += c;
5697
  running_hash += (running_hash << 10);
5698
  running_hash ^= (running_hash >> 6);
5699
  return running_hash;
5700
}
5701

    
5702

    
5703
uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
5704
  running_hash += (running_hash << 3);
5705
  running_hash ^= (running_hash >> 11);
5706
  running_hash += (running_hash << 15);
5707
  if ((running_hash & String::kHashBitMask) == 0) {
5708
    return kZeroHash;
5709
  }
5710
  return running_hash;
5711
}
5712

    
5713

    
5714
void StringHasher::AddCharacter(uint16_t c) {
5715
  // Use the Jenkins one-at-a-time hash function to update the hash
5716
  // for the given character.
5717
  raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
5718
}
5719

    
5720

    
5721
bool StringHasher::UpdateIndex(uint16_t c) {
5722
  ASSERT(is_array_index_);
5723
  if (c < '0' || c > '9') {
5724
    is_array_index_ = false;
5725
    return false;
5726
  }
5727
  int d = c - '0';
5728
  if (is_first_char_) {
5729
    is_first_char_ = false;
5730
    if (c == '0' && length_ > 1) {
5731
      is_array_index_ = false;
5732
      return false;
5733
    }
5734
  }
5735
  if (array_index_ > 429496729U - ((d + 2) >> 3)) {
5736
    is_array_index_ = false;
5737
    return false;
5738
  }
5739
  array_index_ = array_index_ * 10 + d;
5740
  return true;
5741
}
5742

    
5743

    
5744
template<typename Char>
5745
inline void StringHasher::AddCharacters(const Char* chars, int length) {
5746
  ASSERT(sizeof(Char) == 1 || sizeof(Char) == 2);
5747
  int i = 0;
5748
  if (is_array_index_) {
5749
    for (; i < length; i++) {
5750
      AddCharacter(chars[i]);
5751
      if (!UpdateIndex(chars[i])) {
5752
        i++;
5753
        break;
5754
      }
5755
    }
5756
  }
5757
  for (; i < length; i++) {
5758
    ASSERT(!is_array_index_);
5759
    AddCharacter(chars[i]);
5760
  }
5761
}
5762

    
5763

    
5764
template <typename schar>
5765
uint32_t StringHasher::HashSequentialString(const schar* chars,
5766
                                            int length,
5767
                                            uint32_t seed) {
5768
  StringHasher hasher(length, seed);
5769
  if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
5770
  return hasher.GetHashField();
5771
}
5772

    
5773

    
5774
bool Name::AsArrayIndex(uint32_t* index) {
5775
  return IsString() && String::cast(this)->AsArrayIndex(index);
5776
}
5777

    
5778

    
5779
bool String::AsArrayIndex(uint32_t* index) {
5780
  uint32_t field = hash_field();
5781
  if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
5782
    return false;
5783
  }
5784
  return SlowAsArrayIndex(index);
5785
}
5786

    
5787

    
5788
Object* JSReceiver::GetPrototype() {
5789
  return map()->prototype();
5790
}
5791

    
5792

    
5793
Object* JSReceiver::GetConstructor() {
5794
  return map()->constructor();
5795
}
5796

    
5797

    
5798
bool JSReceiver::HasProperty(Handle<JSReceiver> object,
5799
                             Handle<Name> name) {
5800
  if (object->IsJSProxy()) {
5801
    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5802
    return JSProxy::HasPropertyWithHandler(proxy, name);
5803
  }
5804
  return object->GetPropertyAttribute(*name) != ABSENT;
5805
}
5806

    
5807

    
5808
bool JSReceiver::HasLocalProperty(Handle<JSReceiver> object,
5809
                                  Handle<Name> name) {
5810
  if (object->IsJSProxy()) {
5811
    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5812
    return JSProxy::HasPropertyWithHandler(proxy, name);
5813
  }
5814
  return object->GetLocalPropertyAttribute(*name) != ABSENT;
5815
}
5816

    
5817

    
5818
PropertyAttributes JSReceiver::GetPropertyAttribute(Name* key) {
5819
  uint32_t index;
5820
  if (IsJSObject() && key->AsArrayIndex(&index)) {
5821
    return GetElementAttribute(index);
5822
  }
5823
  return GetPropertyAttributeWithReceiver(this, key);
5824
}
5825

    
5826

    
5827
PropertyAttributes JSReceiver::GetElementAttribute(uint32_t index) {
5828
  if (IsJSProxy()) {
5829
    return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
5830
  }
5831
  return JSObject::cast(this)->GetElementAttributeWithReceiver(
5832
      this, index, true);
5833
}
5834

    
5835

    
5836
// TODO(504): this may be useful in other places too where JSGlobalProxy
5837
// is used.
5838
Object* JSObject::BypassGlobalProxy() {
5839
  if (IsJSGlobalProxy()) {
5840
    Object* proto = GetPrototype();
5841
    if (proto->IsNull()) return GetHeap()->undefined_value();
5842
    ASSERT(proto->IsJSGlobalObject());
5843
    return proto;
5844
  }
5845
  return this;
5846
}
5847

    
5848

    
5849
MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
5850
  return IsJSProxy()
5851
      ? JSProxy::cast(this)->GetIdentityHash(flag)
5852
      : JSObject::cast(this)->GetIdentityHash(flag);
5853
}
5854

    
5855

    
5856
bool JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
5857
  if (object->IsJSProxy()) {
5858
    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5859
    return JSProxy::HasElementWithHandler(proxy, index);
5860
  }
5861
  return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
5862
      *object, index, true) != ABSENT;
5863
}
5864

    
5865

    
5866
bool JSReceiver::HasLocalElement(Handle<JSReceiver> object, uint32_t index) {
5867
  if (object->IsJSProxy()) {
5868
    Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
5869
    return JSProxy::HasElementWithHandler(proxy, index);
5870
  }
5871
  return Handle<JSObject>::cast(object)->GetElementAttributeWithReceiver(
5872
      *object, index, false) != ABSENT;
5873
}
5874

    
5875

    
5876
PropertyAttributes JSReceiver::GetLocalElementAttribute(uint32_t index) {
5877
  if (IsJSProxy()) {
5878
    return JSProxy::cast(this)->GetElementAttributeWithHandler(this, index);
5879
  }
5880
  return JSObject::cast(this)->GetElementAttributeWithReceiver(
5881
      this, index, false);
5882
}
5883

    
5884

    
5885
bool AccessorInfo::all_can_read() {
5886
  return BooleanBit::get(flag(), kAllCanReadBit);
5887
}
5888

    
5889

    
5890
void AccessorInfo::set_all_can_read(bool value) {
5891
  set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
5892
}
5893

    
5894

    
5895
bool AccessorInfo::all_can_write() {
5896
  return BooleanBit::get(flag(), kAllCanWriteBit);
5897
}
5898

    
5899

    
5900
void AccessorInfo::set_all_can_write(bool value) {
5901
  set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
5902
}
5903

    
5904

    
5905
bool AccessorInfo::prohibits_overwriting() {
5906
  return BooleanBit::get(flag(), kProhibitsOverwritingBit);
5907
}
5908

    
5909

    
5910
void AccessorInfo::set_prohibits_overwriting(bool value) {
5911
  set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
5912
}
5913

    
5914

    
5915
PropertyAttributes AccessorInfo::property_attributes() {
5916
  return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
5917
}
5918

    
5919

    
5920
void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
5921
  set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
5922
}
5923

    
5924

    
5925
bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
5926
  Object* function_template = expected_receiver_type();
5927
  if (!function_template->IsFunctionTemplateInfo()) return true;
5928
  return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
5929
}
5930

    
5931

    
5932
void AccessorPair::set_access_flags(v8::AccessControl access_control) {
5933
  int current = access_flags()->value();
5934
  current = BooleanBit::set(current,
5935
                            kProhibitsOverwritingBit,
5936
                            access_control & PROHIBITS_OVERWRITING);
5937
  current = BooleanBit::set(current,
5938
                            kAllCanReadBit,
5939
                            access_control & ALL_CAN_READ);
5940
  current = BooleanBit::set(current,
5941
                            kAllCanWriteBit,
5942
                            access_control & ALL_CAN_WRITE);
5943
  set_access_flags(Smi::FromInt(current));
5944
}
5945

    
5946

    
5947
bool AccessorPair::all_can_read() {
5948
  return BooleanBit::get(access_flags(), kAllCanReadBit);
5949
}
5950

    
5951

    
5952
bool AccessorPair::all_can_write() {
5953
  return BooleanBit::get(access_flags(), kAllCanWriteBit);
5954
}
5955

    
5956

    
5957
bool AccessorPair::prohibits_overwriting() {
5958
  return BooleanBit::get(access_flags(), kProhibitsOverwritingBit);
5959
}
5960

    
5961

    
5962
template<typename Shape, typename Key>
5963
void Dictionary<Shape, Key>::SetEntry(int entry,
5964
                                      Object* key,
5965
                                      Object* value) {
5966
  SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
5967
}
5968

    
5969

    
5970
template<typename Shape, typename Key>
5971
void Dictionary<Shape, Key>::SetEntry(int entry,
5972
                                      Object* key,
5973
                                      Object* value,
5974
                                      PropertyDetails details) {
5975
  ASSERT(!key->IsName() ||
5976
         details.IsDeleted() ||
5977
         details.dictionary_index() > 0);
5978
  int index = HashTable<Shape, Key>::EntryToIndex(entry);
5979
  DisallowHeapAllocation no_gc;
5980
  WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
5981
  FixedArray::set(index, key, mode);
5982
  FixedArray::set(index+1, value, mode);
5983
  FixedArray::set(index+2, details.AsSmi());
5984
}
5985

    
5986

    
5987
bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
5988
  ASSERT(other->IsNumber());
5989
  return key == static_cast<uint32_t>(other->Number());
5990
}
5991

    
5992

    
5993
uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
5994
  return ComputeIntegerHash(key, 0);
5995
}
5996

    
5997

    
5998
uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
5999
                                                      Object* other) {
6000
  ASSERT(other->IsNumber());
6001
  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6002
}
6003

    
6004
uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6005
  return ComputeIntegerHash(key, seed);
6006
}
6007

    
6008
uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6009
                                                          uint32_t seed,
6010
                                                          Object* other) {
6011
  ASSERT(other->IsNumber());
6012
  return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6013
}
6014

    
6015
MaybeObject* NumberDictionaryShape::AsObject(Heap* heap, uint32_t key) {
6016
  return heap->NumberFromUint32(key);
6017
}
6018

    
6019

    
6020
bool NameDictionaryShape::IsMatch(Name* key, Object* other) {
6021
  // We know that all entries in a hash table had their hash keys created.
6022
  // Use that knowledge to have fast failure.
6023
  if (key->Hash() != Name::cast(other)->Hash()) return false;
6024
  return key->Equals(Name::cast(other));
6025
}
6026

    
6027

    
6028
uint32_t NameDictionaryShape::Hash(Name* key) {
6029
  return key->Hash();
6030
}
6031

    
6032

    
6033
uint32_t NameDictionaryShape::HashForObject(Name* key, Object* other) {
6034
  return Name::cast(other)->Hash();
6035
}
6036

    
6037

    
6038
MaybeObject* NameDictionaryShape::AsObject(Heap* heap, Name* key) {
6039
  ASSERT(key->IsUniqueName());
6040
  return key;
6041
}
6042

    
6043

    
6044
template <int entrysize>
6045
bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6046
  return key->SameValue(other);
6047
}
6048

    
6049

    
6050
template <int entrysize>
6051
uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
6052
  MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
6053
  return Smi::cast(maybe_hash->ToObjectChecked())->value();
6054
}
6055

    
6056

    
6057
template <int entrysize>
6058
uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
6059
                                                        Object* other) {
6060
  MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
6061
  return Smi::cast(maybe_hash->ToObjectChecked())->value();
6062
}
6063

    
6064

    
6065
template <int entrysize>
6066
MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Heap* heap,
6067
                                                       Object* key) {
6068
  return key;
6069
}
6070

    
6071

    
6072
template <int entrysize>
6073
bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
6074
  return key->SameValue(other);
6075
}
6076

    
6077

    
6078
template <int entrysize>
6079
uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
6080
  intptr_t hash = reinterpret_cast<intptr_t>(key);
6081
  return (uint32_t)(hash & 0xFFFFFFFF);
6082
}
6083

    
6084

    
6085
template <int entrysize>
6086
uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
6087
                                                      Object* other) {
6088
  intptr_t hash = reinterpret_cast<intptr_t>(other);
6089
  return (uint32_t)(hash & 0xFFFFFFFF);
6090
}
6091

    
6092

    
6093
template <int entrysize>
6094
MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
6095
                                                    Object* key) {
6096
  return key;
6097
}
6098

    
6099

    
6100
void Map::ClearCodeCache(Heap* heap) {
6101
  // No write barrier is needed since empty_fixed_array is not in new space.
6102
  // Please note this function is used during marking:
6103
  //  - MarkCompactCollector::MarkUnmarkedObject
6104
  //  - IncrementalMarking::Step
6105
  ASSERT(!heap->InNewSpace(heap->empty_fixed_array()));
6106
  WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6107
}
6108

    
6109

    
6110
void JSArray::EnsureSize(int required_size) {
6111
  ASSERT(HasFastSmiOrObjectElements());
6112
  FixedArray* elts = FixedArray::cast(elements());
6113
  const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6114
  if (elts->length() < required_size) {
6115
    // Doubling in size would be overkill, but leave some slack to avoid
6116
    // constantly growing.
6117
    Expand(required_size + (required_size >> 3));
6118
    // It's a performance benefit to keep a frequently used array in new-space.
6119
  } else if (!GetHeap()->new_space()->Contains(elts) &&
6120
             required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6121
    // Expand will allocate a new backing store in new space even if the size
6122
    // we asked for isn't larger than what we had before.
6123
    Expand(required_size);
6124
  }
6125
}
6126

    
6127

    
6128
void JSArray::set_length(Smi* length) {
6129
  // Don't need a write barrier for a Smi.
6130
  set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6131
}
6132

    
6133

    
6134
bool JSArray::AllowsSetElementsLength() {
6135
  bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6136
  ASSERT(result == !HasExternalArrayElements());
6137
  return result;
6138
}
6139

    
6140

    
6141
MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
6142
  MaybeObject* maybe_result = EnsureCanContainElements(
6143
      storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
6144
  if (maybe_result->IsFailure()) return maybe_result;
6145
  ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
6146
          IsFastDoubleElementsKind(GetElementsKind())) ||
6147
         ((storage->map() != GetHeap()->fixed_double_array_map()) &&
6148
          (IsFastObjectElementsKind(GetElementsKind()) ||
6149
           (IsFastSmiElementsKind(GetElementsKind()) &&
6150
            FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
6151
  set_elements(storage);
6152
  set_length(Smi::FromInt(storage->length()));
6153
  return this;
6154
}
6155

    
6156

    
6157
MaybeObject* FixedArray::Copy() {
6158
  if (length() == 0) return this;
6159
  return GetHeap()->CopyFixedArray(this);
6160
}
6161

    
6162

    
6163
MaybeObject* FixedDoubleArray::Copy() {
6164
  if (length() == 0) return this;
6165
  return GetHeap()->CopyFixedDoubleArray(this);
6166
}
6167

    
6168

    
6169
MaybeObject* ConstantPoolArray::Copy() {
6170
  if (length() == 0) return this;
6171
  return GetHeap()->CopyConstantPoolArray(this);
6172
}
6173

    
6174

    
6175
void TypeFeedbackCells::SetAstId(int index, TypeFeedbackId id) {
6176
  set(1 + index * 2, Smi::FromInt(id.ToInt()));
6177
}
6178

    
6179

    
6180
TypeFeedbackId TypeFeedbackCells::AstId(int index) {
6181
  return TypeFeedbackId(Smi::cast(get(1 + index * 2))->value());
6182
}
6183

    
6184

    
6185
void TypeFeedbackCells::SetCell(int index, Cell* cell) {
6186
  set(index * 2, cell);
6187
}
6188

    
6189

    
6190
Cell* TypeFeedbackCells::GetCell(int index) {
6191
  return Cell::cast(get(index * 2));
6192
}
6193

    
6194

    
6195
Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
6196
  return isolate->factory()->the_hole_value();
6197
}
6198

    
6199

    
6200
Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
6201
  return isolate->factory()->undefined_value();
6202
}
6203

    
6204

    
6205
Handle<Object> TypeFeedbackCells::MonomorphicArraySentinel(Isolate* isolate,
6206
    ElementsKind elements_kind) {
6207
  return Handle<Object>(Smi::FromInt(static_cast<int>(elements_kind)), isolate);
6208
}
6209

    
6210

    
6211
Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
6212
  return heap->the_hole_value();
6213
}
6214

    
6215

    
6216
int TypeFeedbackInfo::ic_total_count() {
6217
  int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6218
  return ICTotalCountField::decode(current);
6219
}
6220

    
6221

    
6222
void TypeFeedbackInfo::set_ic_total_count(int count) {
6223
  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6224
  value = ICTotalCountField::update(value,
6225
                                    ICTotalCountField::decode(count));
6226
  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6227
}
6228

    
6229

    
6230
int TypeFeedbackInfo::ic_with_type_info_count() {
6231
  int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6232
  return ICsWithTypeInfoCountField::decode(current);
6233
}
6234

    
6235

    
6236
void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
6237
  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6238
  int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
6239
  // We can get negative count here when the type-feedback info is
6240
  // shared between two code objects. The can only happen when
6241
  // the debugger made a shallow copy of code object (see Heap::CopyCode).
6242
  // Since we do not optimize when the debugger is active, we can skip
6243
  // this counter update.
6244
  if (new_count >= 0) {
6245
    new_count &= ICsWithTypeInfoCountField::kMask;
6246
    value = ICsWithTypeInfoCountField::update(value, new_count);
6247
    WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6248
  }
6249
}
6250

    
6251

    
6252
void TypeFeedbackInfo::initialize_storage() {
6253
  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
6254
  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
6255
}
6256

    
6257

    
6258
void TypeFeedbackInfo::change_own_type_change_checksum() {
6259
  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6260
  int checksum = OwnTypeChangeChecksum::decode(value);
6261
  checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
6262
  value = OwnTypeChangeChecksum::update(value, checksum);
6263
  // Ensure packed bit field is in Smi range.
6264
  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6265
  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6266
  WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
6267
}
6268

    
6269

    
6270
void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
6271
  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6272
  int mask = (1 << kTypeChangeChecksumBits) - 1;
6273
  value = InlinedTypeChangeChecksum::update(value, checksum & mask);
6274
  // Ensure packed bit field is in Smi range.
6275
  if (value > Smi::kMaxValue) value |= Smi::kMinValue;
6276
  if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
6277
  WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
6278
}
6279

    
6280

    
6281
int TypeFeedbackInfo::own_type_change_checksum() {
6282
  int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
6283
  return OwnTypeChangeChecksum::decode(value);
6284
}
6285

    
6286

    
6287
bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
6288
  int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
6289
  int mask = (1 << kTypeChangeChecksumBits) - 1;
6290
  return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
6291
}
6292

    
6293

    
6294
ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
6295
          kTypeFeedbackCellsOffset)
6296

    
6297

    
6298
SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
6299

    
6300

    
6301
Relocatable::Relocatable(Isolate* isolate) {
6302
  isolate_ = isolate;
6303
  prev_ = isolate->relocatable_top();
6304
  isolate->set_relocatable_top(this);
6305
}
6306

    
6307

    
6308
Relocatable::~Relocatable() {
6309
  ASSERT_EQ(isolate_->relocatable_top(), this);
6310
  isolate_->set_relocatable_top(prev_);
6311
}
6312

    
6313

    
6314
int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
6315
  return map->instance_size();
6316
}
6317

    
6318

    
6319
void Foreign::ForeignIterateBody(ObjectVisitor* v) {
6320
  v->VisitExternalReference(
6321
      reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6322
}
6323

    
6324

    
6325
template<typename StaticVisitor>
6326
void Foreign::ForeignIterateBody() {
6327
  StaticVisitor::VisitExternalReference(
6328
      reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
6329
}
6330

    
6331

    
6332
void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
6333
  typedef v8::String::ExternalAsciiStringResource Resource;
6334
  v->VisitExternalAsciiString(
6335
      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6336
}
6337

    
6338

    
6339
template<typename StaticVisitor>
6340
void ExternalAsciiString::ExternalAsciiStringIterateBody() {
6341
  typedef v8::String::ExternalAsciiStringResource Resource;
6342
  StaticVisitor::VisitExternalAsciiString(
6343
      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6344
}
6345

    
6346

    
6347
void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
6348
  typedef v8::String::ExternalStringResource Resource;
6349
  v->VisitExternalTwoByteString(
6350
      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6351
}
6352

    
6353

    
6354
template<typename StaticVisitor>
6355
void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
6356
  typedef v8::String::ExternalStringResource Resource;
6357
  StaticVisitor::VisitExternalTwoByteString(
6358
      reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
6359
}
6360

    
6361

    
6362
template<int start_offset, int end_offset, int size>
6363
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
6364
    HeapObject* obj,
6365
    ObjectVisitor* v) {
6366
    v->VisitPointers(HeapObject::RawField(obj, start_offset),
6367
                     HeapObject::RawField(obj, end_offset));
6368
}
6369

    
6370

    
6371
template<int start_offset>
6372
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
6373
                                                       int object_size,
6374
                                                       ObjectVisitor* v) {
6375
  v->VisitPointers(HeapObject::RawField(obj, start_offset),
6376
                   HeapObject::RawField(obj, object_size));
6377
}
6378

    
6379

    
6380
#undef TYPE_CHECKER
6381
#undef CAST_ACCESSOR
6382
#undef INT_ACCESSORS
6383
#undef ACCESSORS
6384
#undef ACCESSORS_TO_SMI
6385
#undef SMI_ACCESSORS
6386
#undef BOOL_GETTER
6387
#undef BOOL_ACCESSORS
6388
#undef FIELD_ADDR
6389
#undef READ_FIELD
6390
#undef WRITE_FIELD
6391
#undef WRITE_BARRIER
6392
#undef CONDITIONAL_WRITE_BARRIER
6393
#undef READ_DOUBLE_FIELD
6394
#undef WRITE_DOUBLE_FIELD
6395
#undef READ_INT_FIELD
6396
#undef WRITE_INT_FIELD
6397
#undef READ_INTPTR_FIELD
6398
#undef WRITE_INTPTR_FIELD
6399
#undef READ_UINT32_FIELD
6400
#undef WRITE_UINT32_FIELD
6401
#undef READ_SHORT_FIELD
6402
#undef WRITE_SHORT_FIELD
6403
#undef READ_BYTE_FIELD
6404
#undef WRITE_BYTE_FIELD
6405

    
6406

    
6407
} }  // namespace v8::internal
6408

    
6409
#endif  // V8_OBJECTS_INL_H_