The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / builtins.cc @ f230a1cf

History | View | Annotate | Download (60 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "api.h"
31
#include "arguments.h"
32
#include "bootstrapper.h"
33
#include "builtins.h"
34
#include "cpu-profiler.h"
35
#include "gdb-jit.h"
36
#include "ic-inl.h"
37
#include "heap-profiler.h"
38
#include "mark-compact.h"
39
#include "stub-cache.h"
40
#include "vm-state-inl.h"
41

    
42
namespace v8 {
43
namespace internal {
44

    
45
namespace {
46

    
47
// Arguments object passed to C++ builtins.
48
template <BuiltinExtraArguments extra_args>
49
class BuiltinArguments : public Arguments {
50
 public:
51
  BuiltinArguments(int length, Object** arguments)
52
      : Arguments(length, arguments) { }
53

    
54
  Object*& operator[] (int index) {
55
    ASSERT(index < length());
56
    return Arguments::operator[](index);
57
  }
58

    
59
  template <class S> Handle<S> at(int index) {
60
    ASSERT(index < length());
61
    return Arguments::at<S>(index);
62
  }
63

    
64
  Handle<Object> receiver() {
65
    return Arguments::at<Object>(0);
66
  }
67

    
68
  Handle<JSFunction> called_function() {
69
    STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
70
    return Arguments::at<JSFunction>(Arguments::length() - 1);
71
  }
72

    
73
  // Gets the total number of arguments including the receiver (but
74
  // excluding extra arguments).
75
  int length() const {
76
    STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
77
    return Arguments::length();
78
  }
79

    
80
#ifdef DEBUG
81
  void Verify() {
82
    // Check we have at least the receiver.
83
    ASSERT(Arguments::length() >= 1);
84
  }
85
#endif
86
};
87

    
88

    
89
// Specialize BuiltinArguments for the called function extra argument.
90

    
91
template <>
92
int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
93
  return Arguments::length() - 1;
94
}
95

    
96
#ifdef DEBUG
97
template <>
98
void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
99
  // Check we have at least the receiver and the called function.
100
  ASSERT(Arguments::length() >= 2);
101
  // Make sure cast to JSFunction succeeds.
102
  called_function();
103
}
104
#endif
105

    
106

    
107
#define DEF_ARG_TYPE(name, spec)                      \
108
  typedef BuiltinArguments<spec> name##ArgumentsType;
109
BUILTIN_LIST_C(DEF_ARG_TYPE)
110
#undef DEF_ARG_TYPE
111

    
112
}  // namespace
113

    
114
// ----------------------------------------------------------------------------
115
// Support macro for defining builtins in C++.
116
// ----------------------------------------------------------------------------
117
//
118
// A builtin function is defined by writing:
119
//
120
//   BUILTIN(name) {
121
//     ...
122
//   }
123
//
124
// In the body of the builtin function the arguments can be accessed
125
// through the BuiltinArguments object args.
126

    
127
#ifdef DEBUG
128

    
129
#define BUILTIN(name)                                            \
130
  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
131
      name##ArgumentsType args, Isolate* isolate);               \
132
  MUST_USE_RESULT static MaybeObject* Builtin_##name(            \
133
      int args_length, Object** args_object, Isolate* isolate) { \
134
    name##ArgumentsType args(args_length, args_object);          \
135
    args.Verify();                                               \
136
    return Builtin_Impl_##name(args, isolate);                   \
137
  }                                                              \
138
  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
139
      name##ArgumentsType args, Isolate* isolate)
140

    
141
#else  // For release mode.
142

    
143
#define BUILTIN(name)                                            \
144
  static MaybeObject* Builtin_impl##name(                        \
145
      name##ArgumentsType args, Isolate* isolate);               \
146
  static MaybeObject* Builtin_##name(                            \
147
      int args_length, Object** args_object, Isolate* isolate) { \
148
    name##ArgumentsType args(args_length, args_object);          \
149
    return Builtin_impl##name(args, isolate);                    \
150
  }                                                              \
151
  static MaybeObject* Builtin_impl##name(                        \
152
      name##ArgumentsType args, Isolate* isolate)
153
#endif
154

    
155

    
156
static inline bool CalledAsConstructor(Isolate* isolate) {
157
#ifdef DEBUG
158
  // Calculate the result using a full stack frame iterator and check
159
  // that the state of the stack is as we assume it to be in the
160
  // code below.
161
  StackFrameIterator it(isolate);
162
  ASSERT(it.frame()->is_exit());
163
  it.Advance();
164
  StackFrame* frame = it.frame();
165
  bool reference_result = frame->is_construct();
166
#endif
167
  Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
168
  // Because we know fp points to an exit frame we can use the relevant
169
  // part of ExitFrame::ComputeCallerState directly.
170
  const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
171
  Address caller_fp = Memory::Address_at(fp + kCallerOffset);
172
  // This inlines the part of StackFrame::ComputeType that grabs the
173
  // type of the current frame.  Note that StackFrame::ComputeType
174
  // has been specialized for each architecture so if any one of them
175
  // changes this code has to be changed as well.
176
  const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
177
  const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
178
  Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
179
  bool result = (marker == kConstructMarker);
180
  ASSERT_EQ(result, reference_result);
181
  return result;
182
}
183

    
184

    
185
// ----------------------------------------------------------------------------
186

    
187
BUILTIN(Illegal) {
188
  UNREACHABLE();
189
  return isolate->heap()->undefined_value();  // Make compiler happy.
190
}
191

    
192

    
193
BUILTIN(EmptyFunction) {
194
  return isolate->heap()->undefined_value();
195
}
196

    
197

    
198
static void MoveDoubleElements(FixedDoubleArray* dst,
199
                               int dst_index,
200
                               FixedDoubleArray* src,
201
                               int src_index,
202
                               int len) {
203
  if (len == 0) return;
204
  OS::MemMove(dst->data_start() + dst_index,
205
              src->data_start() + src_index,
206
              len * kDoubleSize);
207
}
208

    
209

    
210
static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
211
  ASSERT(dst->map() != heap->fixed_cow_array_map());
212
  MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
213
}
214

    
215

    
216
static void FillWithHoles(FixedDoubleArray* dst, int from, int to) {
217
  for (int i = from; i < to; i++) {
218
    dst->set_the_hole(i);
219
  }
220
}
221

    
222

    
223
static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
224
                                          FixedArrayBase* elms,
225
                                          int to_trim) {
226
  Map* map = elms->map();
227
  int entry_size;
228
  if (elms->IsFixedArray()) {
229
    entry_size = kPointerSize;
230
  } else {
231
    entry_size = kDoubleSize;
232
  }
233
  ASSERT(elms->map() != heap->fixed_cow_array_map());
234
  // For now this trick is only applied to fixed arrays in new and paged space.
235
  // In large object space the object's start must coincide with chunk
236
  // and thus the trick is just not applicable.
237
  ASSERT(!heap->lo_space()->Contains(elms));
238

    
239
  STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
240
  STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
241
  STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
242

    
243
  Object** former_start = HeapObject::RawField(elms, 0);
244

    
245
  const int len = elms->length();
246

    
247
  if (to_trim * entry_size > FixedArrayBase::kHeaderSize &&
248
      elms->IsFixedArray() &&
249
      !heap->new_space()->Contains(elms)) {
250
    // If we are doing a big trim in old space then we zap the space that was
251
    // formerly part of the array so that the GC (aided by the card-based
252
    // remembered set) won't find pointers to new-space there.
253
    Object** zap = reinterpret_cast<Object**>(elms->address());
254
    zap++;  // Header of filler must be at least one word so skip that.
255
    for (int i = 1; i < to_trim; i++) {
256
      *zap++ = Smi::FromInt(0);
257
    }
258
  }
259
  // Technically in new space this write might be omitted (except for
260
  // debug mode which iterates through the heap), but to play safer
261
  // we still do it.
262
  heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size);
263

    
264
  int new_start_index = to_trim * (entry_size / kPointerSize);
265
  former_start[new_start_index] = map;
266
  former_start[new_start_index + 1] = Smi::FromInt(len - to_trim);
267

    
268
  // Maintain marking consistency for HeapObjectIterator and
269
  // IncrementalMarking.
270
  int size_delta = to_trim * entry_size;
271
  if (heap->marking()->TransferMark(elms->address(),
272
                                    elms->address() + size_delta)) {
273
    MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
274
  }
275

    
276
  FixedArrayBase* new_elms = FixedArrayBase::cast(HeapObject::FromAddress(
277
      elms->address() + size_delta));
278
  HeapProfiler* profiler = heap->isolate()->heap_profiler();
279
  if (profiler->is_profiling()) {
280
    profiler->ObjectMoveEvent(elms->address(),
281
                              new_elms->address(),
282
                              new_elms->Size());
283
    if (profiler->is_tracking_allocations()) {
284
      // Report filler object as a new allocation.
285
      // Otherwise it will become an untracked object.
286
      profiler->NewObjectEvent(elms->address(), elms->Size());
287
    }
288
  }
289
  return new_elms;
290
}
291

    
292

    
293
static bool ArrayPrototypeHasNoElements(Heap* heap,
294
                                        Context* native_context,
295
                                        JSObject* array_proto) {
296
  // This method depends on non writability of Object and Array prototype
297
  // fields.
298
  if (array_proto->elements() != heap->empty_fixed_array()) return false;
299
  // Object.prototype
300
  Object* proto = array_proto->GetPrototype();
301
  if (proto == heap->null_value()) return false;
302
  array_proto = JSObject::cast(proto);
303
  if (array_proto != native_context->initial_object_prototype()) return false;
304
  if (array_proto->elements() != heap->empty_fixed_array()) return false;
305
  return array_proto->GetPrototype()->IsNull();
306
}
307

    
308

    
309
MUST_USE_RESULT
310
static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
311
    Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
312
  if (!receiver->IsJSArray()) return NULL;
313
  JSArray* array = JSArray::cast(receiver);
314
  HeapObject* elms = array->elements();
315
  Map* map = elms->map();
316
  if (map == heap->fixed_array_map()) {
317
    if (args == NULL || array->HasFastObjectElements()) return elms;
318
  } else if (map == heap->fixed_cow_array_map()) {
319
    MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
320
    if (args == NULL || array->HasFastObjectElements() ||
321
        !maybe_writable_result->To(&elms)) {
322
      return maybe_writable_result;
323
    }
324
  } else if (map == heap->fixed_double_array_map()) {
325
    if (args == NULL) return elms;
326
  } else {
327
    return NULL;
328
  }
329

    
330
  // Need to ensure that the arguments passed in args can be contained in
331
  // the array.
332
  int args_length = args->length();
333
  if (first_added_arg >= args_length) return array->elements();
334

    
335
  ElementsKind origin_kind = array->map()->elements_kind();
336
  ASSERT(!IsFastObjectElementsKind(origin_kind));
337
  ElementsKind target_kind = origin_kind;
338
  int arg_count = args->length() - first_added_arg;
339
  Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
340
  for (int i = 0; i < arg_count; i++) {
341
    Object* arg = arguments[i];
342
    if (arg->IsHeapObject()) {
343
      if (arg->IsHeapNumber()) {
344
        target_kind = FAST_DOUBLE_ELEMENTS;
345
      } else {
346
        target_kind = FAST_ELEMENTS;
347
        break;
348
      }
349
    }
350
  }
351
  if (target_kind != origin_kind) {
352
    MaybeObject* maybe_failure = array->TransitionElementsKind(target_kind);
353
    if (maybe_failure->IsFailure()) return maybe_failure;
354
    return array->elements();
355
  }
356
  return elms;
357
}
358

    
359

    
360
static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
361
                                                     JSArray* receiver) {
362
  if (!FLAG_clever_optimizations) return false;
363
  Context* native_context = heap->isolate()->context()->native_context();
364
  JSObject* array_proto =
365
      JSObject::cast(native_context->array_function()->prototype());
366
  return receiver->GetPrototype() == array_proto &&
367
         ArrayPrototypeHasNoElements(heap, native_context, array_proto);
368
}
369

    
370

    
371
MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
372
    Isolate* isolate,
373
    const char* name,
374
    BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
375
  HandleScope handleScope(isolate);
376

    
377
  Handle<Object> js_builtin =
378
      GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
379
                  name);
380
  Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
381
  int argc = args.length() - 1;
382
  ScopedVector<Handle<Object> > argv(argc);
383
  for (int i = 0; i < argc; ++i) {
384
    argv[i] = args.at<Object>(i + 1);
385
  }
386
  bool pending_exception;
387
  Handle<Object> result = Execution::Call(isolate,
388
                                          function,
389
                                          args.receiver(),
390
                                          argc,
391
                                          argv.start(),
392
                                          &pending_exception);
393
  if (pending_exception) return Failure::Exception();
394
  return *result;
395
}
396

    
397

    
398
BUILTIN(ArrayPush) {
399
  Heap* heap = isolate->heap();
400
  Object* receiver = *args.receiver();
401
  FixedArrayBase* elms_obj;
402
  MaybeObject* maybe_elms_obj =
403
      EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
404
  if (maybe_elms_obj == NULL) {
405
    return CallJsBuiltin(isolate, "ArrayPush", args);
406
  }
407
  if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
408

    
409
  JSArray* array = JSArray::cast(receiver);
410
  ASSERT(!array->map()->is_observed());
411

    
412
  ElementsKind kind = array->GetElementsKind();
413

    
414
  if (IsFastSmiOrObjectElementsKind(kind)) {
415
    FixedArray* elms = FixedArray::cast(elms_obj);
416

    
417
    int len = Smi::cast(array->length())->value();
418
    int to_add = args.length() - 1;
419
    if (to_add == 0) {
420
      return Smi::FromInt(len);
421
    }
422
    // Currently fixed arrays cannot grow too big, so
423
    // we should never hit this case.
424
    ASSERT(to_add <= (Smi::kMaxValue - len));
425

    
426
    int new_length = len + to_add;
427

    
428
    if (new_length > elms->length()) {
429
      // New backing storage is needed.
430
      int capacity = new_length + (new_length >> 1) + 16;
431
      FixedArray* new_elms;
432
      MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
433
      if (!maybe_obj->To(&new_elms)) return maybe_obj;
434

    
435
      ElementsAccessor* accessor = array->GetElementsAccessor();
436
      MaybeObject* maybe_failure = accessor->CopyElements(
437
           NULL, 0, kind, new_elms, 0,
438
           ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
439
      ASSERT(!maybe_failure->IsFailure());
440
      USE(maybe_failure);
441

    
442
      elms = new_elms;
443
    }
444

    
445
    // Add the provided values.
446
    DisallowHeapAllocation no_gc;
447
    WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
448
    for (int index = 0; index < to_add; index++) {
449
      elms->set(index + len, args[index + 1], mode);
450
    }
451

    
452
    if (elms != array->elements()) {
453
      array->set_elements(elms);
454
    }
455

    
456
    // Set the length.
457
    array->set_length(Smi::FromInt(new_length));
458
    return Smi::FromInt(new_length);
459
  } else {
460
    int len = Smi::cast(array->length())->value();
461
    int elms_len = elms_obj->length();
462

    
463
    int to_add = args.length() - 1;
464
    if (to_add == 0) {
465
      return Smi::FromInt(len);
466
    }
467
    // Currently fixed arrays cannot grow too big, so
468
    // we should never hit this case.
469
    ASSERT(to_add <= (Smi::kMaxValue - len));
470

    
471
    int new_length = len + to_add;
472

    
473
    FixedDoubleArray* new_elms;
474

    
475
    if (new_length > elms_len) {
476
      // New backing storage is needed.
477
      int capacity = new_length + (new_length >> 1) + 16;
478
      MaybeObject* maybe_obj =
479
          heap->AllocateUninitializedFixedDoubleArray(capacity);
480
      if (!maybe_obj->To(&new_elms)) return maybe_obj;
481

    
482
      ElementsAccessor* accessor = array->GetElementsAccessor();
483
      MaybeObject* maybe_failure = accessor->CopyElements(
484
              NULL, 0, kind, new_elms, 0,
485
              ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
486
      ASSERT(!maybe_failure->IsFailure());
487
      USE(maybe_failure);
488
    } else {
489
      // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
490
      // empty_fixed_array.
491
      new_elms = FixedDoubleArray::cast(elms_obj);
492
    }
493

    
494
    // Add the provided values.
495
    DisallowHeapAllocation no_gc;
496
    int index;
497
    for (index = 0; index < to_add; index++) {
498
      Object* arg = args[index + 1];
499
      new_elms->set(index + len, arg->Number());
500
    }
501

    
502
    if (new_elms != array->elements()) {
503
      array->set_elements(new_elms);
504
    }
505

    
506
    // Set the length.
507
    array->set_length(Smi::FromInt(new_length));
508
    return Smi::FromInt(new_length);
509
  }
510
}
511

    
512

    
513
BUILTIN(ArrayPop) {
514
  Heap* heap = isolate->heap();
515
  Object* receiver = *args.receiver();
516
  FixedArrayBase* elms_obj;
517
  MaybeObject* maybe_elms =
518
      EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
519
  if (maybe_elms == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
520
  if (!maybe_elms->To(&elms_obj)) return maybe_elms;
521

    
522
  JSArray* array = JSArray::cast(receiver);
523
  ASSERT(!array->map()->is_observed());
524

    
525
  int len = Smi::cast(array->length())->value();
526
  if (len == 0) return heap->undefined_value();
527

    
528
  ElementsAccessor* accessor = array->GetElementsAccessor();
529
  int new_length = len - 1;
530
  MaybeObject* maybe_result;
531
  if (accessor->HasElement(array, array, new_length, elms_obj)) {
532
    maybe_result = accessor->Get(array, array, new_length, elms_obj);
533
  } else {
534
    maybe_result = array->GetPrototype()->GetElement(isolate, len - 1);
535
  }
536
  if (maybe_result->IsFailure()) return maybe_result;
537
  MaybeObject* maybe_failure =
538
      accessor->SetLength(array, Smi::FromInt(new_length));
539
  if (maybe_failure->IsFailure()) return maybe_failure;
540
  return maybe_result;
541
}
542

    
543

    
544
BUILTIN(ArrayShift) {
545
  Heap* heap = isolate->heap();
546
  Object* receiver = *args.receiver();
547
  FixedArrayBase* elms_obj;
548
  MaybeObject* maybe_elms_obj =
549
      EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
550
  if (maybe_elms_obj == NULL)
551
      return CallJsBuiltin(isolate, "ArrayShift", args);
552
  if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
553

    
554
  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
555
    return CallJsBuiltin(isolate, "ArrayShift", args);
556
  }
557
  JSArray* array = JSArray::cast(receiver);
558
  ASSERT(!array->map()->is_observed());
559

    
560
  int len = Smi::cast(array->length())->value();
561
  if (len == 0) return heap->undefined_value();
562

    
563
  // Get first element
564
  ElementsAccessor* accessor = array->GetElementsAccessor();
565
  Object* first;
566
  MaybeObject* maybe_first = accessor->Get(receiver, array, 0, elms_obj);
567
  if (!maybe_first->To(&first)) return maybe_first;
568
  if (first->IsTheHole()) {
569
    first = heap->undefined_value();
570
  }
571

    
572
  if (!heap->lo_space()->Contains(elms_obj)) {
573
    array->set_elements(LeftTrimFixedArray(heap, elms_obj, 1));
574
  } else {
575
    // Shift the elements.
576
    if (elms_obj->IsFixedArray()) {
577
      FixedArray* elms = FixedArray::cast(elms_obj);
578
      DisallowHeapAllocation no_gc;
579
      heap->MoveElements(elms, 0, 1, len - 1);
580
      elms->set(len - 1, heap->the_hole_value());
581
    } else {
582
      FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
583
      MoveDoubleElements(elms, 0, elms, 1, len - 1);
584
      elms->set_the_hole(len - 1);
585
    }
586
  }
587

    
588
  // Set the length.
589
  array->set_length(Smi::FromInt(len - 1));
590

    
591
  return first;
592
}
593

    
594

    
595
BUILTIN(ArrayUnshift) {
596
  Heap* heap = isolate->heap();
597
  Object* receiver = *args.receiver();
598
  FixedArrayBase* elms_obj;
599
  MaybeObject* maybe_elms_obj =
600
      EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
601
  if (maybe_elms_obj == NULL)
602
      return CallJsBuiltin(isolate, "ArrayUnshift", args);
603
  if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
604

    
605
  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
606
    return CallJsBuiltin(isolate, "ArrayUnshift", args);
607
  }
608
  JSArray* array = JSArray::cast(receiver);
609
  ASSERT(!array->map()->is_observed());
610
  if (!array->HasFastSmiOrObjectElements()) {
611
    return CallJsBuiltin(isolate, "ArrayUnshift", args);
612
  }
613
  FixedArray* elms = FixedArray::cast(elms_obj);
614

    
615
  int len = Smi::cast(array->length())->value();
616
  int to_add = args.length() - 1;
617
  int new_length = len + to_add;
618
  // Currently fixed arrays cannot grow too big, so
619
  // we should never hit this case.
620
  ASSERT(to_add <= (Smi::kMaxValue - len));
621

    
622
  MaybeObject* maybe_object =
623
      array->EnsureCanContainElements(&args, 1, to_add,
624
                                      DONT_ALLOW_DOUBLE_ELEMENTS);
625
  if (maybe_object->IsFailure()) return maybe_object;
626

    
627
  if (new_length > elms->length()) {
628
    // New backing storage is needed.
629
    int capacity = new_length + (new_length >> 1) + 16;
630
    FixedArray* new_elms;
631
    MaybeObject* maybe_elms = heap->AllocateUninitializedFixedArray(capacity);
632
    if (!maybe_elms->To(&new_elms)) return maybe_elms;
633

    
634
    ElementsKind kind = array->GetElementsKind();
635
    ElementsAccessor* accessor = array->GetElementsAccessor();
636
    MaybeObject* maybe_failure = accessor->CopyElements(
637
            NULL, 0, kind, new_elms, to_add,
638
            ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
639
    ASSERT(!maybe_failure->IsFailure());
640
    USE(maybe_failure);
641

    
642
    elms = new_elms;
643
    array->set_elements(elms);
644
  } else {
645
    DisallowHeapAllocation no_gc;
646
    heap->MoveElements(elms, to_add, 0, len);
647
  }
648

    
649
  // Add the provided values.
650
  DisallowHeapAllocation no_gc;
651
  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
652
  for (int i = 0; i < to_add; i++) {
653
    elms->set(i, args[i + 1], mode);
654
  }
655

    
656
  // Set the length.
657
  array->set_length(Smi::FromInt(new_length));
658
  return Smi::FromInt(new_length);
659
}
660

    
661

    
662
BUILTIN(ArraySlice) {
663
  Heap* heap = isolate->heap();
664
  Object* receiver = *args.receiver();
665
  FixedArrayBase* elms;
666
  int len = -1;
667
  if (receiver->IsJSArray()) {
668
    JSArray* array = JSArray::cast(receiver);
669
    if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
670
      return CallJsBuiltin(isolate, "ArraySlice", args);
671
    }
672

    
673
    if (array->HasFastElements()) {
674
      elms = array->elements();
675
    } else {
676
      return CallJsBuiltin(isolate, "ArraySlice", args);
677
    }
678

    
679
    len = Smi::cast(array->length())->value();
680
  } else {
681
    // Array.slice(arguments, ...) is quite a common idiom (notably more
682
    // than 50% of invocations in Web apps).  Treat it in C++ as well.
683
    Map* arguments_map =
684
        isolate->context()->native_context()->arguments_boilerplate()->map();
685

    
686
    bool is_arguments_object_with_fast_elements =
687
        receiver->IsJSObject() &&
688
        JSObject::cast(receiver)->map() == arguments_map;
689
    if (!is_arguments_object_with_fast_elements) {
690
      return CallJsBuiltin(isolate, "ArraySlice", args);
691
    }
692
    JSObject* object = JSObject::cast(receiver);
693

    
694
    if (object->HasFastElements()) {
695
      elms = object->elements();
696
    } else {
697
      return CallJsBuiltin(isolate, "ArraySlice", args);
698
    }
699
    Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
700
    if (!len_obj->IsSmi()) {
701
      return CallJsBuiltin(isolate, "ArraySlice", args);
702
    }
703
    len = Smi::cast(len_obj)->value();
704
    if (len > elms->length()) {
705
      return CallJsBuiltin(isolate, "ArraySlice", args);
706
    }
707
  }
708

    
709
  JSObject* object = JSObject::cast(receiver);
710

    
711
  ASSERT(len >= 0);
712
  int n_arguments = args.length() - 1;
713

    
714
  // Note carefully choosen defaults---if argument is missing,
715
  // it's undefined which gets converted to 0 for relative_start
716
  // and to len for relative_end.
717
  int relative_start = 0;
718
  int relative_end = len;
719
  if (n_arguments > 0) {
720
    Object* arg1 = args[1];
721
    if (arg1->IsSmi()) {
722
      relative_start = Smi::cast(arg1)->value();
723
    } else if (arg1->IsHeapNumber()) {
724
      double start = HeapNumber::cast(arg1)->value();
725
      if (start < kMinInt || start > kMaxInt) {
726
        return CallJsBuiltin(isolate, "ArraySlice", args);
727
      }
728
      relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
729
    } else if (!arg1->IsUndefined()) {
730
      return CallJsBuiltin(isolate, "ArraySlice", args);
731
    }
732
    if (n_arguments > 1) {
733
      Object* arg2 = args[2];
734
      if (arg2->IsSmi()) {
735
        relative_end = Smi::cast(arg2)->value();
736
      } else if (arg2->IsHeapNumber()) {
737
        double end = HeapNumber::cast(arg2)->value();
738
        if (end < kMinInt || end > kMaxInt) {
739
          return CallJsBuiltin(isolate, "ArraySlice", args);
740
        }
741
        relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
742
      } else if (!arg2->IsUndefined()) {
743
        return CallJsBuiltin(isolate, "ArraySlice", args);
744
      }
745
    }
746
  }
747

    
748
  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
749
  int k = (relative_start < 0) ? Max(len + relative_start, 0)
750
                               : Min(relative_start, len);
751

    
752
  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
753
  int final = (relative_end < 0) ? Max(len + relative_end, 0)
754
                                 : Min(relative_end, len);
755

    
756
  // Calculate the length of result array.
757
  int result_len = Max(final - k, 0);
758

    
759
  ElementsKind kind = object->GetElementsKind();
760
  if (IsHoleyElementsKind(kind)) {
761
    bool packed = true;
762
    ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
763
    for (int i = k; i < final; i++) {
764
      if (!accessor->HasElement(object, object, i, elms)) {
765
        packed = false;
766
        break;
767
      }
768
    }
769
    if (packed) {
770
      kind = GetPackedElementsKind(kind);
771
    } else if (!receiver->IsJSArray()) {
772
      return CallJsBuiltin(isolate, "ArraySlice", args);
773
    }
774
  }
775

    
776
  JSArray* result_array;
777
  MaybeObject* maybe_array = heap->AllocateJSArrayAndStorage(kind,
778
                                                             result_len,
779
                                                             result_len);
780

    
781
  DisallowHeapAllocation no_gc;
782
  if (result_len == 0) return maybe_array;
783
  if (!maybe_array->To(&result_array)) return maybe_array;
784

    
785
  ElementsAccessor* accessor = object->GetElementsAccessor();
786
  MaybeObject* maybe_failure = accessor->CopyElements(
787
      NULL, k, kind, result_array->elements(), 0, result_len, elms);
788
  ASSERT(!maybe_failure->IsFailure());
789
  USE(maybe_failure);
790

    
791
  return result_array;
792
}
793

    
794

    
795
BUILTIN(ArraySplice) {
796
  Heap* heap = isolate->heap();
797
  Object* receiver = *args.receiver();
798
  FixedArrayBase* elms_obj;
799
  MaybeObject* maybe_elms =
800
      EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
801
  if (maybe_elms == NULL) {
802
    return CallJsBuiltin(isolate, "ArraySplice", args);
803
  }
804
  if (!maybe_elms->To(&elms_obj)) return maybe_elms;
805

    
806
  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
807
    return CallJsBuiltin(isolate, "ArraySplice", args);
808
  }
809
  JSArray* array = JSArray::cast(receiver);
810
  ASSERT(!array->map()->is_observed());
811

    
812
  int len = Smi::cast(array->length())->value();
813

    
814
  int n_arguments = args.length() - 1;
815

    
816
  int relative_start = 0;
817
  if (n_arguments > 0) {
818
    Object* arg1 = args[1];
819
    if (arg1->IsSmi()) {
820
      relative_start = Smi::cast(arg1)->value();
821
    } else if (arg1->IsHeapNumber()) {
822
      double start = HeapNumber::cast(arg1)->value();
823
      if (start < kMinInt || start > kMaxInt) {
824
        return CallJsBuiltin(isolate, "ArraySplice", args);
825
      }
826
      relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
827
    } else if (!arg1->IsUndefined()) {
828
      return CallJsBuiltin(isolate, "ArraySplice", args);
829
    }
830
  }
831
  int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
832
                                          : Min(relative_start, len);
833

    
834
  // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
835
  // given as a request to delete all the elements from the start.
836
  // And it differs from the case of undefined delete count.
837
  // This does not follow ECMA-262, but we do the same for
838
  // compatibility.
839
  int actual_delete_count;
840
  if (n_arguments == 1) {
841
    ASSERT(len - actual_start >= 0);
842
    actual_delete_count = len - actual_start;
843
  } else {
844
    int value = 0;  // ToInteger(undefined) == 0
845
    if (n_arguments > 1) {
846
      Object* arg2 = args[2];
847
      if (arg2->IsSmi()) {
848
        value = Smi::cast(arg2)->value();
849
      } else {
850
        return CallJsBuiltin(isolate, "ArraySplice", args);
851
      }
852
    }
853
    actual_delete_count = Min(Max(value, 0), len - actual_start);
854
  }
855

    
856
  ElementsKind elements_kind = array->GetElementsKind();
857

    
858
  int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
859
  int new_length = len - actual_delete_count + item_count;
860

    
861
  // For double mode we do not support changing the length.
862
  if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
863
    return CallJsBuiltin(isolate, "ArraySplice", args);
864
  }
865

    
866
  if (new_length == 0) {
867
    MaybeObject* maybe_array = heap->AllocateJSArrayWithElements(
868
        elms_obj, elements_kind, actual_delete_count);
869
    if (maybe_array->IsFailure()) return maybe_array;
870
    array->set_elements(heap->empty_fixed_array());
871
    array->set_length(Smi::FromInt(0));
872
    return maybe_array;
873
  }
874

    
875
  JSArray* result_array = NULL;
876
  MaybeObject* maybe_array =
877
      heap->AllocateJSArrayAndStorage(elements_kind,
878
                                      actual_delete_count,
879
                                      actual_delete_count);
880
  if (!maybe_array->To(&result_array)) return maybe_array;
881

    
882
  if (actual_delete_count > 0) {
883
    DisallowHeapAllocation no_gc;
884
    ElementsAccessor* accessor = array->GetElementsAccessor();
885
    MaybeObject* maybe_failure = accessor->CopyElements(
886
        NULL, actual_start, elements_kind, result_array->elements(),
887
        0, actual_delete_count, elms_obj);
888
    // Cannot fail since the origin and target array are of the same elements
889
    // kind.
890
    ASSERT(!maybe_failure->IsFailure());
891
    USE(maybe_failure);
892
  }
893

    
894
  bool elms_changed = false;
895
  if (item_count < actual_delete_count) {
896
    // Shrink the array.
897
    const bool trim_array = !heap->lo_space()->Contains(elms_obj) &&
898
      ((actual_start + item_count) <
899
          (len - actual_delete_count - actual_start));
900
    if (trim_array) {
901
      const int delta = actual_delete_count - item_count;
902

    
903
      if (elms_obj->IsFixedDoubleArray()) {
904
        FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
905
        MoveDoubleElements(elms, delta, elms, 0, actual_start);
906
      } else {
907
        FixedArray* elms = FixedArray::cast(elms_obj);
908
        DisallowHeapAllocation no_gc;
909
        heap->MoveElements(elms, delta, 0, actual_start);
910
      }
911

    
912
      elms_obj = LeftTrimFixedArray(heap, elms_obj, delta);
913

    
914
      elms_changed = true;
915
    } else {
916
      if (elms_obj->IsFixedDoubleArray()) {
917
        FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
918
        MoveDoubleElements(elms, actual_start + item_count,
919
                           elms, actual_start + actual_delete_count,
920
                           (len - actual_delete_count - actual_start));
921
        FillWithHoles(elms, new_length, len);
922
      } else {
923
        FixedArray* elms = FixedArray::cast(elms_obj);
924
        DisallowHeapAllocation no_gc;
925
        heap->MoveElements(elms, actual_start + item_count,
926
                           actual_start + actual_delete_count,
927
                           (len - actual_delete_count - actual_start));
928
        FillWithHoles(heap, elms, new_length, len);
929
      }
930
    }
931
  } else if (item_count > actual_delete_count) {
932
    FixedArray* elms = FixedArray::cast(elms_obj);
933
    // Currently fixed arrays cannot grow too big, so
934
    // we should never hit this case.
935
    ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
936

    
937
    // Check if array need to grow.
938
    if (new_length > elms->length()) {
939
      // New backing storage is needed.
940
      int capacity = new_length + (new_length >> 1) + 16;
941
      FixedArray* new_elms;
942
      MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
943
      if (!maybe_obj->To(&new_elms)) return maybe_obj;
944

    
945
      DisallowHeapAllocation no_gc;
946

    
947
      ElementsKind kind = array->GetElementsKind();
948
      ElementsAccessor* accessor = array->GetElementsAccessor();
949
      if (actual_start > 0) {
950
        // Copy the part before actual_start as is.
951
        MaybeObject* maybe_failure = accessor->CopyElements(
952
            NULL, 0, kind, new_elms, 0, actual_start, elms);
953
        ASSERT(!maybe_failure->IsFailure());
954
        USE(maybe_failure);
955
      }
956
      MaybeObject* maybe_failure = accessor->CopyElements(
957
          NULL, actual_start + actual_delete_count, kind, new_elms,
958
          actual_start + item_count,
959
          ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
960
      ASSERT(!maybe_failure->IsFailure());
961
      USE(maybe_failure);
962

    
963
      elms_obj = new_elms;
964
      elms_changed = true;
965
    } else {
966
      DisallowHeapAllocation no_gc;
967
      heap->MoveElements(elms, actual_start + item_count,
968
                         actual_start + actual_delete_count,
969
                         (len - actual_delete_count - actual_start));
970
    }
971
  }
972

    
973
  if (IsFastDoubleElementsKind(elements_kind)) {
974
    FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
975
    for (int k = actual_start; k < actual_start + item_count; k++) {
976
      Object* arg = args[3 + k - actual_start];
977
      if (arg->IsSmi()) {
978
        elms->set(k, Smi::cast(arg)->value());
979
      } else {
980
        elms->set(k, HeapNumber::cast(arg)->value());
981
      }
982
    }
983
  } else {
984
    FixedArray* elms = FixedArray::cast(elms_obj);
985
    DisallowHeapAllocation no_gc;
986
    WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
987
    for (int k = actual_start; k < actual_start + item_count; k++) {
988
      elms->set(k, args[3 + k - actual_start], mode);
989
    }
990
  }
991

    
992
  if (elms_changed) {
993
    array->set_elements(elms_obj);
994
  }
995
  // Set the length.
996
  array->set_length(Smi::FromInt(new_length));
997

    
998
  return result_array;
999
}
1000

    
1001

    
1002
BUILTIN(ArrayConcat) {
1003
  Heap* heap = isolate->heap();
1004
  Context* native_context = isolate->context()->native_context();
1005
  JSObject* array_proto =
1006
      JSObject::cast(native_context->array_function()->prototype());
1007
  if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
1008
    return CallJsBuiltin(isolate, "ArrayConcat", args);
1009
  }
1010

    
1011
  // Iterate through all the arguments performing checks
1012
  // and calculating total length.
1013
  int n_arguments = args.length();
1014
  int result_len = 0;
1015
  ElementsKind elements_kind = GetInitialFastElementsKind();
1016
  bool has_double = false;
1017
  bool is_holey = false;
1018
  for (int i = 0; i < n_arguments; i++) {
1019
    Object* arg = args[i];
1020
    if (!arg->IsJSArray() ||
1021
        !JSArray::cast(arg)->HasFastElements() ||
1022
        JSArray::cast(arg)->GetPrototype() != array_proto) {
1023
      return CallJsBuiltin(isolate, "ArrayConcat", args);
1024
    }
1025
    int len = Smi::cast(JSArray::cast(arg)->length())->value();
1026

    
1027
    // We shouldn't overflow when adding another len.
1028
    const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
1029
    STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
1030
    USE(kHalfOfMaxInt);
1031
    result_len += len;
1032
    ASSERT(result_len >= 0);
1033

    
1034
    if (result_len > FixedDoubleArray::kMaxLength) {
1035
      return CallJsBuiltin(isolate, "ArrayConcat", args);
1036
    }
1037

    
1038
    ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
1039
    has_double = has_double || IsFastDoubleElementsKind(arg_kind);
1040
    is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
1041
    if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
1042
      elements_kind = arg_kind;
1043
    }
1044
  }
1045

    
1046
  if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
1047

    
1048
  // If a double array is concatted into a fast elements array, the fast
1049
  // elements array needs to be initialized to contain proper holes, since
1050
  // boxing doubles may cause incremental marking.
1051
  ArrayStorageAllocationMode mode =
1052
      has_double && IsFastObjectElementsKind(elements_kind)
1053
      ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
1054
  JSArray* result_array;
1055
  // Allocate result.
1056
  MaybeObject* maybe_array =
1057
      heap->AllocateJSArrayAndStorage(elements_kind,
1058
                                      result_len,
1059
                                      result_len,
1060
                                      mode);
1061
  if (!maybe_array->To(&result_array)) return maybe_array;
1062
  if (result_len == 0) return result_array;
1063

    
1064
  int j = 0;
1065
  FixedArrayBase* storage = result_array->elements();
1066
  ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
1067
  for (int i = 0; i < n_arguments; i++) {
1068
    JSArray* array = JSArray::cast(args[i]);
1069
    int len = Smi::cast(array->length())->value();
1070
    ElementsKind from_kind = array->GetElementsKind();
1071
    if (len > 0) {
1072
      MaybeObject* maybe_failure =
1073
          accessor->CopyElements(array, 0, from_kind, storage, j, len);
1074
      if (maybe_failure->IsFailure()) return maybe_failure;
1075
      j += len;
1076
    }
1077
  }
1078

    
1079
  ASSERT(j == result_len);
1080

    
1081
  return result_array;
1082
}
1083

    
1084

    
1085
// -----------------------------------------------------------------------------
1086
// Strict mode poison pills
1087

    
1088

    
1089
BUILTIN(StrictModePoisonPill) {
1090
  HandleScope scope(isolate);
1091
  return isolate->Throw(*isolate->factory()->NewTypeError(
1092
      "strict_poison_pill", HandleVector<Object>(NULL, 0)));
1093
}
1094

    
1095

    
1096
// -----------------------------------------------------------------------------
1097
//
1098

    
1099

    
1100
// Searches the hidden prototype chain of the given object for the first
1101
// object that is an instance of the given type.  If no such object can
1102
// be found then Heap::null_value() is returned.
1103
static inline Object* FindHidden(Heap* heap,
1104
                                 Object* object,
1105
                                 FunctionTemplateInfo* type) {
1106
  if (object->IsInstanceOf(type)) return object;
1107
  Object* proto = object->GetPrototype(heap->isolate());
1108
  if (proto->IsJSObject() &&
1109
      JSObject::cast(proto)->map()->is_hidden_prototype()) {
1110
    return FindHidden(heap, proto, type);
1111
  }
1112
  return heap->null_value();
1113
}
1114

    
1115

    
1116
// Returns the holder JSObject if the function can legally be called
1117
// with this receiver.  Returns Heap::null_value() if the call is
1118
// illegal.  Any arguments that don't fit the expected type is
1119
// overwritten with undefined.  Note that holder and the arguments are
1120
// implicitly rewritten with the first object in the hidden prototype
1121
// chain that actually has the expected type.
1122
static inline Object* TypeCheck(Heap* heap,
1123
                                int argc,
1124
                                Object** argv,
1125
                                FunctionTemplateInfo* info) {
1126
  Object* recv = argv[0];
1127
  // API calls are only supported with JSObject receivers.
1128
  if (!recv->IsJSObject()) return heap->null_value();
1129
  Object* sig_obj = info->signature();
1130
  if (sig_obj->IsUndefined()) return recv;
1131
  SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1132
  // If necessary, check the receiver
1133
  Object* recv_type = sig->receiver();
1134
  Object* holder = recv;
1135
  if (!recv_type->IsUndefined()) {
1136
    holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1137
    if (holder == heap->null_value()) return heap->null_value();
1138
  }
1139
  Object* args_obj = sig->args();
1140
  // If there is no argument signature we're done
1141
  if (args_obj->IsUndefined()) return holder;
1142
  FixedArray* args = FixedArray::cast(args_obj);
1143
  int length = args->length();
1144
  if (argc <= length) length = argc - 1;
1145
  for (int i = 0; i < length; i++) {
1146
    Object* argtype = args->get(i);
1147
    if (argtype->IsUndefined()) continue;
1148
    Object** arg = &argv[-1 - i];
1149
    Object* current = *arg;
1150
    current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1151
    if (current == heap->null_value()) current = heap->undefined_value();
1152
    *arg = current;
1153
  }
1154
  return holder;
1155
}
1156

    
1157

    
1158
template <bool is_construct>
1159
MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
1160
    BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1161
  ASSERT(is_construct == CalledAsConstructor(isolate));
1162
  Heap* heap = isolate->heap();
1163

    
1164
  HandleScope scope(isolate);
1165
  Handle<JSFunction> function = args.called_function();
1166
  ASSERT(function->shared()->IsApiFunction());
1167

    
1168
  FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
1169
  if (is_construct) {
1170
    Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1171
    bool pending_exception = false;
1172
    isolate->factory()->ConfigureInstance(
1173
        desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1174
    ASSERT(isolate->has_pending_exception() == pending_exception);
1175
    if (pending_exception) return Failure::Exception();
1176
    fun_data = *desc;
1177
  }
1178

    
1179
  Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1180

    
1181
  if (raw_holder->IsNull()) {
1182
    // This function cannot be called with the given receiver.  Abort!
1183
    Handle<Object> obj =
1184
        isolate->factory()->NewTypeError(
1185
            "illegal_invocation", HandleVector(&function, 1));
1186
    return isolate->Throw(*obj);
1187
  }
1188

    
1189
  Object* raw_call_data = fun_data->call_code();
1190
  if (!raw_call_data->IsUndefined()) {
1191
    CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1192
    Object* callback_obj = call_data->callback();
1193
    v8::FunctionCallback callback =
1194
        v8::ToCData<v8::FunctionCallback>(callback_obj);
1195
    Object* data_obj = call_data->data();
1196
    Object* result;
1197

    
1198
    LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1199
    ASSERT(raw_holder->IsJSObject());
1200

    
1201
    FunctionCallbackArguments custom(isolate,
1202
                                     data_obj,
1203
                                     *function,
1204
                                     raw_holder,
1205
                                     &args[0] - 1,
1206
                                     args.length() - 1,
1207
                                     is_construct);
1208

    
1209
    v8::Handle<v8::Value> value = custom.Call(callback);
1210
    if (value.IsEmpty()) {
1211
      result = heap->undefined_value();
1212
    } else {
1213
      result = *reinterpret_cast<Object**>(*value);
1214
      result->VerifyApiCallResultType();
1215
    }
1216

    
1217
    RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1218
    if (!is_construct || result->IsJSObject()) return result;
1219
  }
1220

    
1221
  return *args.receiver();
1222
}
1223

    
1224

    
1225
BUILTIN(HandleApiCall) {
1226
  return HandleApiCallHelper<false>(args, isolate);
1227
}
1228

    
1229

    
1230
BUILTIN(HandleApiCallConstruct) {
1231
  return HandleApiCallHelper<true>(args, isolate);
1232
}
1233

    
1234

    
1235
// Helper function to handle calls to non-function objects created through the
1236
// API. The object can be called as either a constructor (using new) or just as
1237
// a function (without new).
1238
MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1239
    Isolate* isolate,
1240
    bool is_construct_call,
1241
    BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1242
  // Non-functions are never called as constructors. Even if this is an object
1243
  // called as a constructor the delegate call is not a construct call.
1244
  ASSERT(!CalledAsConstructor(isolate));
1245
  Heap* heap = isolate->heap();
1246

    
1247
  Handle<Object> receiver = args.receiver();
1248

    
1249
  // Get the object called.
1250
  JSObject* obj = JSObject::cast(*receiver);
1251

    
1252
  // Get the invocation callback from the function descriptor that was
1253
  // used to create the called object.
1254
  ASSERT(obj->map()->has_instance_call_handler());
1255
  JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1256
  ASSERT(constructor->shared()->IsApiFunction());
1257
  Object* handler =
1258
      constructor->shared()->get_api_func_data()->instance_call_handler();
1259
  ASSERT(!handler->IsUndefined());
1260
  CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1261
  Object* callback_obj = call_data->callback();
1262
  v8::FunctionCallback callback =
1263
      v8::ToCData<v8::FunctionCallback>(callback_obj);
1264

    
1265
  // Get the data for the call and perform the callback.
1266
  Object* result;
1267
  {
1268
    HandleScope scope(isolate);
1269
    LOG(isolate, ApiObjectAccess("call non-function", obj));
1270

    
1271
    FunctionCallbackArguments custom(isolate,
1272
                                     call_data->data(),
1273
                                     constructor,
1274
                                     obj,
1275
                                     &args[0] - 1,
1276
                                     args.length() - 1,
1277
                                     is_construct_call);
1278
    v8::Handle<v8::Value> value = custom.Call(callback);
1279
    if (value.IsEmpty()) {
1280
      result = heap->undefined_value();
1281
    } else {
1282
      result = *reinterpret_cast<Object**>(*value);
1283
      result->VerifyApiCallResultType();
1284
    }
1285
  }
1286
  // Check for exceptions and return result.
1287
  RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1288
  return result;
1289
}
1290

    
1291

    
1292
// Handle calls to non-function objects created through the API. This delegate
1293
// function is used when the call is a normal function call.
1294
BUILTIN(HandleApiCallAsFunction) {
1295
  return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1296
}
1297

    
1298

    
1299
// Handle calls to non-function objects created through the API. This delegate
1300
// function is used when the call is a construct call.
1301
BUILTIN(HandleApiCallAsConstructor) {
1302
  return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1303
}
1304

    
1305

    
1306
static void Generate_LoadIC_Initialize(MacroAssembler* masm) {
1307
  LoadIC::GenerateInitialize(masm);
1308
}
1309

    
1310

    
1311
static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) {
1312
  LoadIC::GeneratePreMonomorphic(masm);
1313
}
1314

    
1315

    
1316
static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1317
  LoadIC::GenerateMiss(masm);
1318
}
1319

    
1320

    
1321
static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) {
1322
  LoadIC::GenerateMegamorphic(masm);
1323
}
1324

    
1325

    
1326
static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1327
  LoadIC::GenerateNormal(masm);
1328
}
1329

    
1330

    
1331
static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1332
  LoadStubCompiler::GenerateLoadViaGetter(
1333
      masm, LoadStubCompiler::registers()[0], Handle<JSFunction>());
1334
}
1335

    
1336

    
1337
static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1338
  LoadIC::GenerateRuntimeGetProperty(masm);
1339
}
1340

    
1341

    
1342
static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1343
  KeyedLoadIC::GenerateInitialize(masm);
1344
}
1345

    
1346

    
1347
static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1348
  KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1349
}
1350

    
1351

    
1352
static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1353
  KeyedLoadIC::GenerateMiss(masm, MISS);
1354
}
1355

    
1356

    
1357
static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) {
1358
  KeyedLoadIC::GenerateMiss(masm, MISS_FORCE_GENERIC);
1359
}
1360

    
1361

    
1362
static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1363
  KeyedLoadIC::GenerateGeneric(masm);
1364
}
1365

    
1366

    
1367
static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1368
  KeyedLoadIC::GenerateString(masm);
1369
}
1370

    
1371

    
1372
static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1373
  KeyedLoadIC::GeneratePreMonomorphic(masm);
1374
}
1375

    
1376

    
1377
static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1378
  KeyedLoadIC::GenerateIndexedInterceptor(masm);
1379
}
1380

    
1381

    
1382
static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
1383
  KeyedLoadIC::GenerateNonStrictArguments(masm);
1384
}
1385

    
1386

    
1387
static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1388
  StoreIC::GenerateSlow(masm);
1389
}
1390

    
1391

    
1392
static void Generate_StoreIC_Slow_Strict(MacroAssembler* masm) {
1393
  StoreIC::GenerateSlow(masm);
1394
}
1395

    
1396

    
1397
static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
1398
  StoreIC::GenerateInitialize(masm);
1399
}
1400

    
1401

    
1402
static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
1403
  StoreIC::GenerateInitialize(masm);
1404
}
1405

    
1406

    
1407
static void Generate_StoreIC_PreMonomorphic(MacroAssembler* masm) {
1408
  StoreIC::GeneratePreMonomorphic(masm);
1409
}
1410

    
1411

    
1412
static void Generate_StoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1413
  StoreIC::GeneratePreMonomorphic(masm);
1414
}
1415

    
1416

    
1417
static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1418
  StoreIC::GenerateMiss(masm);
1419
}
1420

    
1421

    
1422
static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1423
  StoreIC::GenerateNormal(masm);
1424
}
1425

    
1426

    
1427
static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
1428
  StoreIC::GenerateNormal(masm);
1429
}
1430

    
1431

    
1432
static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
1433
  StoreIC::GenerateMegamorphic(masm, kNonStrictMode);
1434
}
1435

    
1436

    
1437
static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1438
  StoreIC::GenerateMegamorphic(masm, kStrictMode);
1439
}
1440

    
1441

    
1442
static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
1443
  StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode);
1444
}
1445

    
1446

    
1447
static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
1448
  StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode);
1449
}
1450

    
1451

    
1452
static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1453
  StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>());
1454
}
1455

    
1456

    
1457
static void Generate_StoreIC_Generic(MacroAssembler* masm) {
1458
  StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode);
1459
}
1460

    
1461

    
1462
static void Generate_StoreIC_Generic_Strict(MacroAssembler* masm) {
1463
  StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode);
1464
}
1465

    
1466

    
1467
static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1468
  KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
1469
}
1470

    
1471

    
1472
static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1473
  KeyedStoreIC::GenerateGeneric(masm, kStrictMode);
1474
}
1475

    
1476

    
1477
static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1478
  KeyedStoreIC::GenerateMiss(masm, MISS);
1479
}
1480

    
1481

    
1482
static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) {
1483
  KeyedStoreIC::GenerateMiss(masm, MISS_FORCE_GENERIC);
1484
}
1485

    
1486

    
1487
static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1488
  KeyedStoreIC::GenerateSlow(masm);
1489
}
1490

    
1491

    
1492
static void Generate_KeyedStoreIC_Slow_Strict(MacroAssembler* masm) {
1493
  KeyedStoreIC::GenerateSlow(masm);
1494
}
1495

    
1496

    
1497
static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1498
  KeyedStoreIC::GenerateInitialize(masm);
1499
}
1500

    
1501

    
1502
static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1503
  KeyedStoreIC::GenerateInitialize(masm);
1504
}
1505

    
1506

    
1507
static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1508
  KeyedStoreIC::GeneratePreMonomorphic(masm);
1509
}
1510

    
1511

    
1512
static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1513
  KeyedStoreIC::GeneratePreMonomorphic(masm);
1514
}
1515

    
1516

    
1517
static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
1518
  KeyedStoreIC::GenerateNonStrictArguments(masm);
1519
}
1520

    
1521

    
1522
#ifdef ENABLE_DEBUGGER_SUPPORT
1523
static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1524
  Debug::GenerateLoadICDebugBreak(masm);
1525
}
1526

    
1527

    
1528
static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1529
  Debug::GenerateStoreICDebugBreak(masm);
1530
}
1531

    
1532

    
1533
static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1534
  Debug::GenerateKeyedLoadICDebugBreak(masm);
1535
}
1536

    
1537

    
1538
static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1539
  Debug::GenerateKeyedStoreICDebugBreak(masm);
1540
}
1541

    
1542

    
1543
static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1544
  Debug::GenerateCompareNilICDebugBreak(masm);
1545
}
1546

    
1547

    
1548
static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1549
  Debug::GenerateReturnDebugBreak(masm);
1550
}
1551

    
1552

    
1553
static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1554
  Debug::GenerateCallFunctionStubDebugBreak(masm);
1555
}
1556

    
1557

    
1558
static void Generate_CallFunctionStub_Recording_DebugBreak(
1559
    MacroAssembler* masm) {
1560
  Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1561
}
1562

    
1563

    
1564
static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1565
  Debug::GenerateCallConstructStubDebugBreak(masm);
1566
}
1567

    
1568

    
1569
static void Generate_CallConstructStub_Recording_DebugBreak(
1570
    MacroAssembler* masm) {
1571
  Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1572
}
1573

    
1574

    
1575
static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1576
  Debug::GenerateSlotDebugBreak(masm);
1577
}
1578

    
1579

    
1580
static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1581
  Debug::GeneratePlainReturnLiveEdit(masm);
1582
}
1583

    
1584

    
1585
static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1586
  Debug::GenerateFrameDropperLiveEdit(masm);
1587
}
1588
#endif
1589

    
1590

    
1591
Builtins::Builtins() : initialized_(false) {
1592
  memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1593
  memset(names_, 0, sizeof(names_[0]) * builtin_count);
1594
}
1595

    
1596

    
1597
Builtins::~Builtins() {
1598
}
1599

    
1600

    
1601
#define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1602
Address const Builtins::c_functions_[cfunction_count] = {
1603
  BUILTIN_LIST_C(DEF_ENUM_C)
1604
};
1605
#undef DEF_ENUM_C
1606

    
1607
#define DEF_JS_NAME(name, ignore) #name,
1608
#define DEF_JS_ARGC(ignore, argc) argc,
1609
const char* const Builtins::javascript_names_[id_count] = {
1610
  BUILTINS_LIST_JS(DEF_JS_NAME)
1611
};
1612

    
1613
int const Builtins::javascript_argc_[id_count] = {
1614
  BUILTINS_LIST_JS(DEF_JS_ARGC)
1615
};
1616
#undef DEF_JS_NAME
1617
#undef DEF_JS_ARGC
1618

    
1619
struct BuiltinDesc {
1620
  byte* generator;
1621
  byte* c_code;
1622
  const char* s_name;  // name is only used for generating log information.
1623
  int name;
1624
  Code::Flags flags;
1625
  BuiltinExtraArguments extra_args;
1626
};
1627

    
1628
#define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1629

    
1630
class BuiltinFunctionTable {
1631
 public:
1632
  BuiltinDesc* functions() {
1633
    CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1634
    return functions_;
1635
  }
1636

    
1637
  OnceType once_;
1638
  BuiltinDesc functions_[Builtins::builtin_count + 1];
1639

    
1640
  friend class Builtins;
1641
};
1642

    
1643
static BuiltinFunctionTable builtin_function_table =
1644
    BUILTIN_FUNCTION_TABLE_INIT;
1645

    
1646
// Define array of pointers to generators and C builtin functions.
1647
// We do this in a sort of roundabout way so that we can do the initialization
1648
// within the lexical scope of Builtins:: and within a context where
1649
// Code::Flags names a non-abstract type.
1650
void Builtins::InitBuiltinFunctionTable() {
1651
  BuiltinDesc* functions = builtin_function_table.functions_;
1652
  functions[builtin_count].generator = NULL;
1653
  functions[builtin_count].c_code = NULL;
1654
  functions[builtin_count].s_name = NULL;
1655
  functions[builtin_count].name = builtin_count;
1656
  functions[builtin_count].flags = static_cast<Code::Flags>(0);
1657
  functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1658

    
1659
#define DEF_FUNCTION_PTR_C(aname, aextra_args)                         \
1660
    functions->generator = FUNCTION_ADDR(Generate_Adaptor);            \
1661
    functions->c_code = FUNCTION_ADDR(Builtin_##aname);                \
1662
    functions->s_name = #aname;                                        \
1663
    functions->name = c_##aname;                                       \
1664
    functions->flags = Code::ComputeFlags(Code::BUILTIN);              \
1665
    functions->extra_args = aextra_args;                               \
1666
    ++functions;
1667

    
1668
#define DEF_FUNCTION_PTR_A(aname, kind, state, extra)                       \
1669
    functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
1670
    functions->c_code = NULL;                                               \
1671
    functions->s_name = #aname;                                             \
1672
    functions->name = k##aname;                                             \
1673
    functions->flags = Code::ComputeFlags(Code::kind,                       \
1674
                                          state,                            \
1675
                                          extra);                           \
1676
    functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
1677
    ++functions;
1678

    
1679
#define DEF_FUNCTION_PTR_H(aname, kind, extra)                              \
1680
    functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
1681
    functions->c_code = NULL;                                               \
1682
    functions->s_name = #aname;                                             \
1683
    functions->name = k##aname;                                             \
1684
    functions->flags = Code::ComputeFlags(                                  \
1685
        Code::HANDLER, MONOMORPHIC, extra, Code::NORMAL, Code::kind);       \
1686
    functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
1687
    ++functions;
1688

    
1689
  BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1690
  BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1691
  BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1692
  BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1693

    
1694
#undef DEF_FUNCTION_PTR_C
1695
#undef DEF_FUNCTION_PTR_A
1696
}
1697

    
1698

    
1699
void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1700
  ASSERT(!initialized_);
1701
  Heap* heap = isolate->heap();
1702

    
1703
  // Create a scope for the handles in the builtins.
1704
  HandleScope scope(isolate);
1705

    
1706
  const BuiltinDesc* functions = builtin_function_table.functions();
1707

    
1708
  // For now we generate builtin adaptor code into a stack-allocated
1709
  // buffer, before copying it into individual code objects. Be careful
1710
  // with alignment, some platforms don't like unaligned code.
1711
  union { int force_alignment; byte buffer[8*KB]; } u;
1712

    
1713
  // Traverse the list of builtins and generate an adaptor in a
1714
  // separate code object for each one.
1715
  for (int i = 0; i < builtin_count; i++) {
1716
    if (create_heap_objects) {
1717
      MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1718
      // Generate the code/adaptor.
1719
      typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1720
      Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1721
      // We pass all arguments to the generator, but it may not use all of
1722
      // them.  This works because the first arguments are on top of the
1723
      // stack.
1724
      ASSERT(!masm.has_frame());
1725
      g(&masm, functions[i].name, functions[i].extra_args);
1726
      // Move the code into the object heap.
1727
      CodeDesc desc;
1728
      masm.GetCode(&desc);
1729
      Code::Flags flags =  functions[i].flags;
1730
      Object* code = NULL;
1731
      {
1732
        // During startup it's OK to always allocate and defer GC to later.
1733
        // This simplifies things because we don't need to retry.
1734
        AlwaysAllocateScope __scope__;
1735
        { MaybeObject* maybe_code =
1736
              heap->CreateCode(desc, flags, masm.CodeObject());
1737
          if (!maybe_code->ToObject(&code)) {
1738
            v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
1739
          }
1740
        }
1741
      }
1742
      // Log the event and add the code to the builtins array.
1743
      PROFILE(isolate,
1744
              CodeCreateEvent(Logger::BUILTIN_TAG,
1745
                              Code::cast(code),
1746
                              functions[i].s_name));
1747
      GDBJIT(AddCode(GDBJITInterface::BUILTIN,
1748
                     functions[i].s_name,
1749
                     Code::cast(code)));
1750
      builtins_[i] = code;
1751
#ifdef ENABLE_DISASSEMBLER
1752
      if (FLAG_print_builtin_code) {
1753
        PrintF("Builtin: %s\n", functions[i].s_name);
1754
        Code::cast(code)->Disassemble(functions[i].s_name);
1755
        PrintF("\n");
1756
      }
1757
#endif
1758
    } else {
1759
      // Deserializing. The values will be filled in during IterateBuiltins.
1760
      builtins_[i] = NULL;
1761
    }
1762
    names_[i] = functions[i].s_name;
1763
  }
1764

    
1765
  // Mark as initialized.
1766
  initialized_ = true;
1767
}
1768

    
1769

    
1770
void Builtins::TearDown() {
1771
  initialized_ = false;
1772
}
1773

    
1774

    
1775
void Builtins::IterateBuiltins(ObjectVisitor* v) {
1776
  v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1777
}
1778

    
1779

    
1780
const char* Builtins::Lookup(byte* pc) {
1781
  // may be called during initialization (disassembler!)
1782
  if (initialized_) {
1783
    for (int i = 0; i < builtin_count; i++) {
1784
      Code* entry = Code::cast(builtins_[i]);
1785
      if (entry->contains(pc)) {
1786
        return names_[i];
1787
      }
1788
    }
1789
  }
1790
  return NULL;
1791
}
1792

    
1793

    
1794
void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1795
  masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1796
}
1797

    
1798

    
1799
void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1800
  masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1801
}
1802

    
1803

    
1804
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)               \
1805
Handle<Code> Builtins::name() {                               \
1806
  Code** code_address =                                       \
1807
      reinterpret_cast<Code**>(builtin_address(k##name));     \
1808
  return Handle<Code>(code_address);                          \
1809
}
1810
#define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1811
Handle<Code> Builtins::name() {                             \
1812
  Code** code_address =                                     \
1813
      reinterpret_cast<Code**>(builtin_address(k##name));   \
1814
  return Handle<Code>(code_address);                        \
1815
}
1816
#define DEFINE_BUILTIN_ACCESSOR_H(name, kind, extra)        \
1817
Handle<Code> Builtins::name() {                             \
1818
  Code** code_address =                                     \
1819
      reinterpret_cast<Code**>(builtin_address(k##name));   \
1820
  return Handle<Code>(code_address);                        \
1821
}
1822
BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1823
BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1824
BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1825
BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1826
#undef DEFINE_BUILTIN_ACCESSOR_C
1827
#undef DEFINE_BUILTIN_ACCESSOR_A
1828

    
1829

    
1830
} }  // namespace v8::internal