The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / arm / stub-cache-arm.cc @ f230a1cf

History | View | Annotate | Download (111 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_ARM
31

    
32
#include "ic-inl.h"
33
#include "codegen.h"
34
#include "stub-cache.h"
35

    
36
namespace v8 {
37
namespace internal {
38

    
39
#define __ ACCESS_MASM(masm)
40

    
41

    
42
static void ProbeTable(Isolate* isolate,
43
                       MacroAssembler* masm,
44
                       Code::Flags flags,
45
                       StubCache::Table table,
46
                       Register receiver,
47
                       Register name,
48
                       // Number of the cache entry, not scaled.
49
                       Register offset,
50
                       Register scratch,
51
                       Register scratch2,
52
                       Register offset_scratch) {
53
  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54
  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55
  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56

    
57
  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58
  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59
  uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60

    
61
  // Check the relative positions of the address fields.
62
  ASSERT(value_off_addr > key_off_addr);
63
  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64
  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65
  ASSERT(map_off_addr > key_off_addr);
66
  ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67
  ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68

    
69
  Label miss;
70
  Register base_addr = scratch;
71
  scratch = no_reg;
72

    
73
  // Multiply by 3 because there are 3 fields per entry (name, code, map).
74
  __ add(offset_scratch, offset, Operand(offset, LSL, 1));
75

    
76
  // Calculate the base address of the entry.
77
  __ mov(base_addr, Operand(key_offset));
78
  __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
79

    
80
  // Check that the key in the entry matches the name.
81
  __ ldr(ip, MemOperand(base_addr, 0));
82
  __ cmp(name, ip);
83
  __ b(ne, &miss);
84

    
85
  // Check the map matches.
86
  __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
87
  __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
88
  __ cmp(ip, scratch2);
89
  __ b(ne, &miss);
90

    
91
  // Get the code entry from the cache.
92
  Register code = scratch2;
93
  scratch2 = no_reg;
94
  __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95

    
96
  // Check that the flags match what we're looking for.
97
  Register flags_reg = base_addr;
98
  base_addr = no_reg;
99
  __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100
  // It's a nice optimization if this constant is encodable in the bic insn.
101

    
102
  uint32_t mask = Code::kFlagsNotUsedInLookup;
103
  ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
104
  __ bic(flags_reg, flags_reg, Operand(mask));
105
  __ cmp(flags_reg, Operand(flags));
106
  __ b(ne, &miss);
107

    
108
#ifdef DEBUG
109
    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
110
      __ jmp(&miss);
111
    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
112
      __ jmp(&miss);
113
    }
114
#endif
115

    
116
  // Jump to the first instruction in the code stub.
117
  __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
118

    
119
  // Miss: fall through.
120
  __ bind(&miss);
121
}
122

    
123

    
124
// Helper function used to check that the dictionary doesn't contain
125
// the property. This function may return false negatives, so miss_label
126
// must always call a backup property check that is complete.
127
// This function is safe to call if the receiver has fast properties.
128
// Name must be unique and receiver must be a heap object.
129
static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
130
                                             Label* miss_label,
131
                                             Register receiver,
132
                                             Handle<Name> name,
133
                                             Register scratch0,
134
                                             Register scratch1) {
135
  ASSERT(name->IsUniqueName());
136
  Counters* counters = masm->isolate()->counters();
137
  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
138
  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
139

    
140
  Label done;
141

    
142
  const int kInterceptorOrAccessCheckNeededMask =
143
      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
144

    
145
  // Bail out if the receiver has a named interceptor or requires access checks.
146
  Register map = scratch1;
147
  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
148
  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
149
  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
150
  __ b(ne, miss_label);
151

    
152
  // Check that receiver is a JSObject.
153
  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
154
  __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
155
  __ b(lt, miss_label);
156

    
157
  // Load properties array.
158
  Register properties = scratch0;
159
  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
160
  // Check that the properties array is a dictionary.
161
  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
162
  Register tmp = properties;
163
  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
164
  __ cmp(map, tmp);
165
  __ b(ne, miss_label);
166

    
167
  // Restore the temporarily used register.
168
  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
169

    
170

    
171
  NameDictionaryLookupStub::GenerateNegativeLookup(masm,
172
                                                   miss_label,
173
                                                   &done,
174
                                                   receiver,
175
                                                   properties,
176
                                                   name,
177
                                                   scratch1);
178
  __ bind(&done);
179
  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
180
}
181

    
182

    
183
void StubCache::GenerateProbe(MacroAssembler* masm,
184
                              Code::Flags flags,
185
                              Register receiver,
186
                              Register name,
187
                              Register scratch,
188
                              Register extra,
189
                              Register extra2,
190
                              Register extra3) {
191
  Isolate* isolate = masm->isolate();
192
  Label miss;
193

    
194
  // Make sure that code is valid. The multiplying code relies on the
195
  // entry size being 12.
196
  ASSERT(sizeof(Entry) == 12);
197

    
198
  // Make sure the flags does not name a specific type.
199
  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
200

    
201
  // Make sure that there are no register conflicts.
202
  ASSERT(!scratch.is(receiver));
203
  ASSERT(!scratch.is(name));
204
  ASSERT(!extra.is(receiver));
205
  ASSERT(!extra.is(name));
206
  ASSERT(!extra.is(scratch));
207
  ASSERT(!extra2.is(receiver));
208
  ASSERT(!extra2.is(name));
209
  ASSERT(!extra2.is(scratch));
210
  ASSERT(!extra2.is(extra));
211

    
212
  // Check scratch, extra and extra2 registers are valid.
213
  ASSERT(!scratch.is(no_reg));
214
  ASSERT(!extra.is(no_reg));
215
  ASSERT(!extra2.is(no_reg));
216
  ASSERT(!extra3.is(no_reg));
217

    
218
  Counters* counters = masm->isolate()->counters();
219
  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
220
                      extra2, extra3);
221

    
222
  // Check that the receiver isn't a smi.
223
  __ JumpIfSmi(receiver, &miss);
224

    
225
  // Get the map of the receiver and compute the hash.
226
  __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
227
  __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
228
  __ add(scratch, scratch, Operand(ip));
229
  uint32_t mask = kPrimaryTableSize - 1;
230
  // We shift out the last two bits because they are not part of the hash and
231
  // they are always 01 for maps.
232
  __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
233
  // Mask down the eor argument to the minimum to keep the immediate
234
  // ARM-encodable.
235
  __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
236
  // Prefer and_ to ubfx here because ubfx takes 2 cycles.
237
  __ and_(scratch, scratch, Operand(mask));
238

    
239
  // Probe the primary table.
240
  ProbeTable(isolate,
241
             masm,
242
             flags,
243
             kPrimary,
244
             receiver,
245
             name,
246
             scratch,
247
             extra,
248
             extra2,
249
             extra3);
250

    
251
  // Primary miss: Compute hash for secondary probe.
252
  __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
253
  uint32_t mask2 = kSecondaryTableSize - 1;
254
  __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
255
  __ and_(scratch, scratch, Operand(mask2));
256

    
257
  // Probe the secondary table.
258
  ProbeTable(isolate,
259
             masm,
260
             flags,
261
             kSecondary,
262
             receiver,
263
             name,
264
             scratch,
265
             extra,
266
             extra2,
267
             extra3);
268

    
269
  // Cache miss: Fall-through and let caller handle the miss by
270
  // entering the runtime system.
271
  __ bind(&miss);
272
  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
273
                      extra2, extra3);
274
}
275

    
276

    
277
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
278
                                                       int index,
279
                                                       Register prototype) {
280
  // Load the global or builtins object from the current context.
281
  __ ldr(prototype,
282
         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
283
  // Load the native context from the global or builtins object.
284
  __ ldr(prototype,
285
         FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
286
  // Load the function from the native context.
287
  __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
288
  // Load the initial map.  The global functions all have initial maps.
289
  __ ldr(prototype,
290
         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
291
  // Load the prototype from the initial map.
292
  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
293
}
294

    
295

    
296
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
297
    MacroAssembler* masm,
298
    int index,
299
    Register prototype,
300
    Label* miss) {
301
  Isolate* isolate = masm->isolate();
302
  // Check we're still in the same context.
303
  __ ldr(prototype,
304
         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
305
  __ Move(ip, isolate->global_object());
306
  __ cmp(prototype, ip);
307
  __ b(ne, miss);
308
  // Get the global function with the given index.
309
  Handle<JSFunction> function(
310
      JSFunction::cast(isolate->native_context()->get(index)));
311
  // Load its initial map. The global functions all have initial maps.
312
  __ Move(prototype, Handle<Map>(function->initial_map()));
313
  // Load the prototype from the initial map.
314
  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
315
}
316

    
317

    
318
void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
319
                                            Register dst,
320
                                            Register src,
321
                                            bool inobject,
322
                                            int index,
323
                                            Representation representation) {
324
  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
325
  int offset = index * kPointerSize;
326
  if (!inobject) {
327
    // Calculate the offset into the properties array.
328
    offset = offset + FixedArray::kHeaderSize;
329
    __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
330
    src = dst;
331
  }
332
  __ ldr(dst, FieldMemOperand(src, offset));
333
}
334

    
335

    
336
void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
337
                                           Register receiver,
338
                                           Register scratch,
339
                                           Label* miss_label) {
340
  // Check that the receiver isn't a smi.
341
  __ JumpIfSmi(receiver, miss_label);
342

    
343
  // Check that the object is a JS array.
344
  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
345
  __ b(ne, miss_label);
346

    
347
  // Load length directly from the JS array.
348
  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
349
  __ Ret();
350
}
351

    
352

    
353
// Generate code to check if an object is a string.  If the object is a
354
// heap object, its map's instance type is left in the scratch1 register.
355
// If this is not needed, scratch1 and scratch2 may be the same register.
356
static void GenerateStringCheck(MacroAssembler* masm,
357
                                Register receiver,
358
                                Register scratch1,
359
                                Register scratch2,
360
                                Label* smi,
361
                                Label* non_string_object) {
362
  // Check that the receiver isn't a smi.
363
  __ JumpIfSmi(receiver, smi);
364

    
365
  // Check that the object is a string.
366
  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
367
  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
368
  __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
369
  // The cast is to resolve the overload for the argument of 0x0.
370
  __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
371
  __ b(ne, non_string_object);
372
}
373

    
374

    
375
// Generate code to load the length from a string object and return the length.
376
// If the receiver object is not a string or a wrapped string object the
377
// execution continues at the miss label. The register containing the
378
// receiver is potentially clobbered.
379
void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
380
                                            Register receiver,
381
                                            Register scratch1,
382
                                            Register scratch2,
383
                                            Label* miss) {
384
  Label check_wrapper;
385

    
386
  // Check if the object is a string leaving the instance type in the
387
  // scratch1 register.
388
  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
389

    
390
  // Load length directly from the string.
391
  __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
392
  __ Ret();
393

    
394
  // Check if the object is a JSValue wrapper.
395
  __ bind(&check_wrapper);
396
  __ cmp(scratch1, Operand(JS_VALUE_TYPE));
397
  __ b(ne, miss);
398

    
399
  // Unwrap the value and check if the wrapped value is a string.
400
  __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
401
  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
402
  __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
403
  __ Ret();
404
}
405

    
406

    
407
void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
408
                                                 Register receiver,
409
                                                 Register scratch1,
410
                                                 Register scratch2,
411
                                                 Label* miss_label) {
412
  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
413
  __ mov(r0, scratch1);
414
  __ Ret();
415
}
416

    
417

    
418
// Generate code to check that a global property cell is empty. Create
419
// the property cell at compilation time if no cell exists for the
420
// property.
421
static void GenerateCheckPropertyCell(MacroAssembler* masm,
422
                                      Handle<GlobalObject> global,
423
                                      Handle<Name> name,
424
                                      Register scratch,
425
                                      Label* miss) {
426
  Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
427
  ASSERT(cell->value()->IsTheHole());
428
  __ mov(scratch, Operand(cell));
429
  __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
430
  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
431
  __ cmp(scratch, ip);
432
  __ b(ne, miss);
433
}
434

    
435

    
436
void StoreStubCompiler::GenerateNegativeHolderLookup(
437
    MacroAssembler* masm,
438
    Handle<JSObject> holder,
439
    Register holder_reg,
440
    Handle<Name> name,
441
    Label* miss) {
442
  if (holder->IsJSGlobalObject()) {
443
    GenerateCheckPropertyCell(
444
        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
445
  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
446
    GenerateDictionaryNegativeLookup(
447
        masm, miss, holder_reg, name, scratch1(), scratch2());
448
  }
449
}
450

    
451

    
452
// Generate StoreTransition code, value is passed in r0 register.
453
// When leaving generated code after success, the receiver_reg and name_reg
454
// may be clobbered.  Upon branch to miss_label, the receiver and name
455
// registers have their original values.
456
void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
457
                                                Handle<JSObject> object,
458
                                                LookupResult* lookup,
459
                                                Handle<Map> transition,
460
                                                Handle<Name> name,
461
                                                Register receiver_reg,
462
                                                Register storage_reg,
463
                                                Register value_reg,
464
                                                Register scratch1,
465
                                                Register scratch2,
466
                                                Register scratch3,
467
                                                Label* miss_label,
468
                                                Label* slow) {
469
  // r0 : value
470
  Label exit;
471

    
472
  int descriptor = transition->LastAdded();
473
  DescriptorArray* descriptors = transition->instance_descriptors();
474
  PropertyDetails details = descriptors->GetDetails(descriptor);
475
  Representation representation = details.representation();
476
  ASSERT(!representation.IsNone());
477

    
478
  if (details.type() == CONSTANT) {
479
    Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
480
    __ Move(scratch1, constant);
481
    __ cmp(value_reg, scratch1);
482
    __ b(ne, miss_label);
483
  } else if (FLAG_track_fields && representation.IsSmi()) {
484
    __ JumpIfNotSmi(value_reg, miss_label);
485
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
486
    __ JumpIfSmi(value_reg, miss_label);
487
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
488
    Label do_store, heap_number;
489
    __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
490
    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
491

    
492
    __ JumpIfNotSmi(value_reg, &heap_number);
493
    __ SmiUntag(scratch1, value_reg);
494
    __ vmov(s0, scratch1);
495
    __ vcvt_f64_s32(d0, s0);
496
    __ jmp(&do_store);
497

    
498
    __ bind(&heap_number);
499
    __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
500
                miss_label, DONT_DO_SMI_CHECK);
501
    __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
502

    
503
    __ bind(&do_store);
504
    __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
505
  }
506

    
507
  // Stub never generated for non-global objects that require access
508
  // checks.
509
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
510

    
511
  // Perform map transition for the receiver if necessary.
512
  if (details.type() == FIELD &&
513
      object->map()->unused_property_fields() == 0) {
514
    // The properties must be extended before we can store the value.
515
    // We jump to a runtime call that extends the properties array.
516
    __ push(receiver_reg);
517
    __ mov(r2, Operand(transition));
518
    __ Push(r2, r0);
519
    __ TailCallExternalReference(
520
        ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
521
                          masm->isolate()),
522
        3,
523
        1);
524
    return;
525
  }
526

    
527
  // Update the map of the object.
528
  __ mov(scratch1, Operand(transition));
529
  __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
530

    
531
  // Update the write barrier for the map field.
532
  __ RecordWriteField(receiver_reg,
533
                      HeapObject::kMapOffset,
534
                      scratch1,
535
                      scratch2,
536
                      kLRHasNotBeenSaved,
537
                      kDontSaveFPRegs,
538
                      OMIT_REMEMBERED_SET,
539
                      OMIT_SMI_CHECK);
540

    
541
  if (details.type() == CONSTANT) {
542
    ASSERT(value_reg.is(r0));
543
    __ Ret();
544
    return;
545
  }
546

    
547
  int index = transition->instance_descriptors()->GetFieldIndex(
548
      transition->LastAdded());
549

    
550
  // Adjust for the number of properties stored in the object. Even in the
551
  // face of a transition we can use the old map here because the size of the
552
  // object and the number of in-object properties is not going to change.
553
  index -= object->map()->inobject_properties();
554

    
555
  // TODO(verwaest): Share this code as a code stub.
556
  SmiCheck smi_check = representation.IsTagged()
557
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
558
  if (index < 0) {
559
    // Set the property straight into the object.
560
    int offset = object->map()->instance_size() + (index * kPointerSize);
561
    if (FLAG_track_double_fields && representation.IsDouble()) {
562
      __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
563
    } else {
564
      __ str(value_reg, FieldMemOperand(receiver_reg, offset));
565
    }
566

    
567
    if (!FLAG_track_fields || !representation.IsSmi()) {
568
      // Update the write barrier for the array address.
569
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
570
        __ mov(storage_reg, value_reg);
571
      }
572
      __ RecordWriteField(receiver_reg,
573
                          offset,
574
                          storage_reg,
575
                          scratch1,
576
                          kLRHasNotBeenSaved,
577
                          kDontSaveFPRegs,
578
                          EMIT_REMEMBERED_SET,
579
                          smi_check);
580
    }
581
  } else {
582
    // Write to the properties array.
583
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
584
    // Get the properties array
585
    __ ldr(scratch1,
586
           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
587
    if (FLAG_track_double_fields && representation.IsDouble()) {
588
      __ str(storage_reg, FieldMemOperand(scratch1, offset));
589
    } else {
590
      __ str(value_reg, FieldMemOperand(scratch1, offset));
591
    }
592

    
593
    if (!FLAG_track_fields || !representation.IsSmi()) {
594
      // Update the write barrier for the array address.
595
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
596
        __ mov(storage_reg, value_reg);
597
      }
598
      __ RecordWriteField(scratch1,
599
                          offset,
600
                          storage_reg,
601
                          receiver_reg,
602
                          kLRHasNotBeenSaved,
603
                          kDontSaveFPRegs,
604
                          EMIT_REMEMBERED_SET,
605
                          smi_check);
606
    }
607
  }
608

    
609
  // Return the value (register r0).
610
  ASSERT(value_reg.is(r0));
611
  __ bind(&exit);
612
  __ Ret();
613
}
614

    
615

    
616
// Generate StoreField code, value is passed in r0 register.
617
// When leaving generated code after success, the receiver_reg and name_reg
618
// may be clobbered.  Upon branch to miss_label, the receiver and name
619
// registers have their original values.
620
void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
621
                                           Handle<JSObject> object,
622
                                           LookupResult* lookup,
623
                                           Register receiver_reg,
624
                                           Register name_reg,
625
                                           Register value_reg,
626
                                           Register scratch1,
627
                                           Register scratch2,
628
                                           Label* miss_label) {
629
  // r0 : value
630
  Label exit;
631

    
632
  // Stub never generated for non-global objects that require access
633
  // checks.
634
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
635

    
636
  int index = lookup->GetFieldIndex().field_index();
637

    
638
  // Adjust for the number of properties stored in the object. Even in the
639
  // face of a transition we can use the old map here because the size of the
640
  // object and the number of in-object properties is not going to change.
641
  index -= object->map()->inobject_properties();
642

    
643
  Representation representation = lookup->representation();
644
  ASSERT(!representation.IsNone());
645
  if (FLAG_track_fields && representation.IsSmi()) {
646
    __ JumpIfNotSmi(value_reg, miss_label);
647
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
648
    __ JumpIfSmi(value_reg, miss_label);
649
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
650
    // Load the double storage.
651
    if (index < 0) {
652
      int offset = object->map()->instance_size() + (index * kPointerSize);
653
      __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
654
    } else {
655
      __ ldr(scratch1,
656
             FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
657
      int offset = index * kPointerSize + FixedArray::kHeaderSize;
658
      __ ldr(scratch1, FieldMemOperand(scratch1, offset));
659
    }
660

    
661
    // Store the value into the storage.
662
    Label do_store, heap_number;
663
    __ JumpIfNotSmi(value_reg, &heap_number);
664
    __ SmiUntag(scratch2, value_reg);
665
    __ vmov(s0, scratch2);
666
    __ vcvt_f64_s32(d0, s0);
667
    __ jmp(&do_store);
668

    
669
    __ bind(&heap_number);
670
    __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
671
                miss_label, DONT_DO_SMI_CHECK);
672
    __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
673

    
674
    __ bind(&do_store);
675
    __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
676
    // Return the value (register r0).
677
    ASSERT(value_reg.is(r0));
678
    __ Ret();
679
    return;
680
  }
681

    
682
  // TODO(verwaest): Share this code as a code stub.
683
  SmiCheck smi_check = representation.IsTagged()
684
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
685
  if (index < 0) {
686
    // Set the property straight into the object.
687
    int offset = object->map()->instance_size() + (index * kPointerSize);
688
    __ str(value_reg, FieldMemOperand(receiver_reg, offset));
689

    
690
    if (!FLAG_track_fields || !representation.IsSmi()) {
691
      // Skip updating write barrier if storing a smi.
692
      __ JumpIfSmi(value_reg, &exit);
693

    
694
      // Update the write barrier for the array address.
695
      // Pass the now unused name_reg as a scratch register.
696
      __ mov(name_reg, value_reg);
697
      __ RecordWriteField(receiver_reg,
698
                          offset,
699
                          name_reg,
700
                          scratch1,
701
                          kLRHasNotBeenSaved,
702
                          kDontSaveFPRegs,
703
                          EMIT_REMEMBERED_SET,
704
                          smi_check);
705
    }
706
  } else {
707
    // Write to the properties array.
708
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
709
    // Get the properties array
710
    __ ldr(scratch1,
711
           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
712
    __ str(value_reg, FieldMemOperand(scratch1, offset));
713

    
714
    if (!FLAG_track_fields || !representation.IsSmi()) {
715
      // Skip updating write barrier if storing a smi.
716
      __ JumpIfSmi(value_reg, &exit);
717

    
718
      // Update the write barrier for the array address.
719
      // Ok to clobber receiver_reg and name_reg, since we return.
720
      __ mov(name_reg, value_reg);
721
      __ RecordWriteField(scratch1,
722
                          offset,
723
                          name_reg,
724
                          receiver_reg,
725
                          kLRHasNotBeenSaved,
726
                          kDontSaveFPRegs,
727
                          EMIT_REMEMBERED_SET,
728
                          smi_check);
729
    }
730
  }
731

    
732
  // Return the value (register r0).
733
  ASSERT(value_reg.is(r0));
734
  __ bind(&exit);
735
  __ Ret();
736
}
737

    
738

    
739
void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
740
                                            Label* label,
741
                                            Handle<Name> name) {
742
  if (!label->is_unused()) {
743
    __ bind(label);
744
    __ mov(this->name(), Operand(name));
745
  }
746
}
747

    
748

    
749
static void GenerateCallFunction(MacroAssembler* masm,
750
                                 Handle<Object> object,
751
                                 const ParameterCount& arguments,
752
                                 Label* miss,
753
                                 Code::ExtraICState extra_ic_state) {
754
  // ----------- S t a t e -------------
755
  //  -- r0: receiver
756
  //  -- r1: function to call
757
  // -----------------------------------
758

    
759
  // Check that the function really is a function.
760
  __ JumpIfSmi(r1, miss);
761
  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
762
  __ b(ne, miss);
763

    
764
  // Patch the receiver on the stack with the global proxy if
765
  // necessary.
766
  if (object->IsGlobalObject()) {
767
    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
768
    __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
769
  }
770

    
771
  // Invoke the function.
772
  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
773
      ? CALL_AS_FUNCTION
774
      : CALL_AS_METHOD;
775
  __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
776
}
777

    
778

    
779
static void PushInterceptorArguments(MacroAssembler* masm,
780
                                     Register receiver,
781
                                     Register holder,
782
                                     Register name,
783
                                     Handle<JSObject> holder_obj) {
784
  STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
785
  STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
786
  STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
787
  STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
788
  STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
789
  __ push(name);
790
  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
791
  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
792
  Register scratch = name;
793
  __ mov(scratch, Operand(interceptor));
794
  __ push(scratch);
795
  __ push(receiver);
796
  __ push(holder);
797
}
798

    
799

    
800
static void CompileCallLoadPropertyWithInterceptor(
801
    MacroAssembler* masm,
802
    Register receiver,
803
    Register holder,
804
    Register name,
805
    Handle<JSObject> holder_obj) {
806
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
807

    
808
  ExternalReference ref =
809
      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
810
                        masm->isolate());
811
  __ mov(r0, Operand(StubCache::kInterceptorArgsLength));
812
  __ mov(r1, Operand(ref));
813

    
814
  CEntryStub stub(1);
815
  __ CallStub(&stub);
816
}
817

    
818

    
819
static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
820

    
821
// Reserves space for the extra arguments to API function in the
822
// caller's frame.
823
//
824
// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
825
static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
826
                                       Register scratch) {
827
  __ mov(scratch, Operand(Smi::FromInt(0)));
828
  for (int i = 0; i < kFastApiCallArguments; i++) {
829
    __ push(scratch);
830
  }
831
}
832

    
833

    
834
// Undoes the effects of ReserveSpaceForFastApiCall.
835
static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
836
  __ Drop(kFastApiCallArguments);
837
}
838

    
839

    
840
static void GenerateFastApiDirectCall(MacroAssembler* masm,
841
                                      const CallOptimization& optimization,
842
                                      int argc,
843
                                      bool restore_context) {
844
  // ----------- S t a t e -------------
845
  //  -- sp[0] - sp[24]     : FunctionCallbackInfo, incl.
846
  //                        :  holder (set by CheckPrototypes)
847
  //  -- sp[28]             : last JS argument
848
  //  -- ...
849
  //  -- sp[(argc + 6) * 4] : first JS argument
850
  //  -- sp[(argc + 7) * 4] : receiver
851
  // -----------------------------------
852
  typedef FunctionCallbackArguments FCA;
853
  // Save calling context.
854
  __ str(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
855
  // Get the function and setup the context.
856
  Handle<JSFunction> function = optimization.constant_function();
857
  __ Move(r5, function);
858
  __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
859
  __ str(r5, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
860

    
861
  // Construct the FunctionCallbackInfo.
862
  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
863
  Handle<Object> call_data(api_call_info->data(), masm->isolate());
864
  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
865
    __ Move(r0, api_call_info);
866
    __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
867
  } else {
868
    __ Move(r6, call_data);
869
  }
870
  // Store call data.
871
  __ str(r6, MemOperand(sp, FCA::kDataIndex * kPointerSize));
872
  // Store isolate.
873
  __ mov(r5, Operand(ExternalReference::isolate_address(masm->isolate())));
874
  __ str(r5, MemOperand(sp, FCA::kIsolateIndex * kPointerSize));
875
  // Store ReturnValue default and ReturnValue.
876
  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
877
  __ str(r5, MemOperand(sp, FCA::kReturnValueOffset * kPointerSize));
878
  __ str(r5, MemOperand(sp, FCA::kReturnValueDefaultValueIndex * kPointerSize));
879

    
880
  // Prepare arguments.
881
  __ mov(r2, sp);
882

    
883
  // Allocate the v8::Arguments structure in the arguments' space since
884
  // it's not controlled by GC.
885
  const int kApiStackSpace = 4;
886

    
887
  FrameScope frame_scope(masm, StackFrame::MANUAL);
888
  __ EnterExitFrame(false, kApiStackSpace);
889

    
890
  // r0 = FunctionCallbackInfo&
891
  // Arguments is after the return address.
892
  __ add(r0, sp, Operand(1 * kPointerSize));
893
  // FunctionCallbackInfo::implicit_args_
894
  __ str(r2, MemOperand(r0, 0 * kPointerSize));
895
  // FunctionCallbackInfo::values_
896
  __ add(ip, r2, Operand((kFastApiCallArguments - 1 + argc) * kPointerSize));
897
  __ str(ip, MemOperand(r0, 1 * kPointerSize));
898
  // FunctionCallbackInfo::length_ = argc
899
  __ mov(ip, Operand(argc));
900
  __ str(ip, MemOperand(r0, 2 * kPointerSize));
901
  // FunctionCallbackInfo::is_construct_call = 0
902
  __ mov(ip, Operand::Zero());
903
  __ str(ip, MemOperand(r0, 3 * kPointerSize));
904

    
905
  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
906
  Address function_address = v8::ToCData<Address>(api_call_info->callback());
907
  ApiFunction fun(function_address);
908
  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
909
  ExternalReference ref = ExternalReference(&fun,
910
                                            type,
911
                                            masm->isolate());
912
  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
913
  ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
914
  ApiFunction thunk_fun(thunk_address);
915
  ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
916
      masm->isolate());
917

    
918
  AllowExternalCallThatCantCauseGC scope(masm);
919
  MemOperand context_restore_operand(
920
      fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
921
  MemOperand return_value_operand(fp,
922
                                  (2 + FCA::kReturnValueOffset) * kPointerSize);
923

    
924
  __ CallApiFunctionAndReturn(ref,
925
                              function_address,
926
                              thunk_ref,
927
                              r1,
928
                              kStackUnwindSpace,
929
                              return_value_operand,
930
                              restore_context ?
931
                                  &context_restore_operand : NULL);
932
}
933

    
934

    
935
// Generate call to api function.
936
static void GenerateFastApiCall(MacroAssembler* masm,
937
                                const CallOptimization& optimization,
938
                                Register receiver,
939
                                Register scratch,
940
                                int argc,
941
                                Register* values) {
942
  ASSERT(optimization.is_simple_api_call());
943
  ASSERT(!receiver.is(scratch));
944

    
945
  typedef FunctionCallbackArguments FCA;
946
  const int stack_space = kFastApiCallArguments + argc + 1;
947
  // Assign stack space for the call arguments.
948
  __ sub(sp, sp, Operand(stack_space * kPointerSize));
949
  // Write holder to stack frame.
950
  __ str(receiver, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
951
  // Write receiver to stack frame.
952
  int index = stack_space - 1;
953
  __ str(receiver, MemOperand(sp, index * kPointerSize));
954
  // Write the arguments to stack frame.
955
  for (int i = 0; i < argc; i++) {
956
    ASSERT(!receiver.is(values[i]));
957
    ASSERT(!scratch.is(values[i]));
958
    __ str(receiver, MemOperand(sp, index-- * kPointerSize));
959
  }
960

    
961
  GenerateFastApiDirectCall(masm, optimization, argc, true);
962
}
963

    
964

    
965
class CallInterceptorCompiler BASE_EMBEDDED {
966
 public:
967
  CallInterceptorCompiler(StubCompiler* stub_compiler,
968
                          const ParameterCount& arguments,
969
                          Register name,
970
                          Code::ExtraICState extra_ic_state)
971
      : stub_compiler_(stub_compiler),
972
        arguments_(arguments),
973
        name_(name),
974
        extra_ic_state_(extra_ic_state) {}
975

    
976
  void Compile(MacroAssembler* masm,
977
               Handle<JSObject> object,
978
               Handle<JSObject> holder,
979
               Handle<Name> name,
980
               LookupResult* lookup,
981
               Register receiver,
982
               Register scratch1,
983
               Register scratch2,
984
               Register scratch3,
985
               Label* miss) {
986
    ASSERT(holder->HasNamedInterceptor());
987
    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
988

    
989
    // Check that the receiver isn't a smi.
990
    __ JumpIfSmi(receiver, miss);
991
    CallOptimization optimization(lookup);
992
    if (optimization.is_constant_call()) {
993
      CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
994
                       holder, lookup, name, optimization, miss);
995
    } else {
996
      CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
997
                     name, holder, miss);
998
    }
999
  }
1000

    
1001
 private:
1002
  void CompileCacheable(MacroAssembler* masm,
1003
                        Handle<JSObject> object,
1004
                        Register receiver,
1005
                        Register scratch1,
1006
                        Register scratch2,
1007
                        Register scratch3,
1008
                        Handle<JSObject> interceptor_holder,
1009
                        LookupResult* lookup,
1010
                        Handle<Name> name,
1011
                        const CallOptimization& optimization,
1012
                        Label* miss_label) {
1013
    ASSERT(optimization.is_constant_call());
1014
    ASSERT(!lookup->holder()->IsGlobalObject());
1015
    Counters* counters = masm->isolate()->counters();
1016
    int depth1 = kInvalidProtoDepth;
1017
    int depth2 = kInvalidProtoDepth;
1018
    bool can_do_fast_api_call = false;
1019
    if (optimization.is_simple_api_call() &&
1020
        !lookup->holder()->IsGlobalObject()) {
1021
      depth1 = optimization.GetPrototypeDepthOfExpectedType(
1022
          object, interceptor_holder);
1023
      if (depth1 == kInvalidProtoDepth) {
1024
        depth2 = optimization.GetPrototypeDepthOfExpectedType(
1025
            interceptor_holder, Handle<JSObject>(lookup->holder()));
1026
      }
1027
      can_do_fast_api_call =
1028
          depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
1029
    }
1030

    
1031
    __ IncrementCounter(counters->call_const_interceptor(), 1,
1032
                        scratch1, scratch2);
1033

    
1034
    if (can_do_fast_api_call) {
1035
      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
1036
                          scratch1, scratch2);
1037
      ReserveSpaceForFastApiCall(masm, scratch1);
1038
    }
1039

    
1040
    // Check that the maps from receiver to interceptor's holder
1041
    // haven't changed and thus we can invoke interceptor.
1042
    Label miss_cleanup;
1043
    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
1044
    Register holder =
1045
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
1046
                                        scratch1, scratch2, scratch3,
1047
                                        name, depth1, miss);
1048

    
1049
    // Invoke an interceptor and if it provides a value,
1050
    // branch to |regular_invoke|.
1051
    Label regular_invoke;
1052
    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
1053
                        &regular_invoke);
1054

    
1055
    // Interceptor returned nothing for this property.  Try to use cached
1056
    // constant function.
1057

    
1058
    // Check that the maps from interceptor's holder to constant function's
1059
    // holder haven't changed and thus we can use cached constant function.
1060
    if (*interceptor_holder != lookup->holder()) {
1061
      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
1062
                                      Handle<JSObject>(lookup->holder()),
1063
                                      scratch1, scratch2, scratch3,
1064
                                      name, depth2, miss);
1065
    } else {
1066
      // CheckPrototypes has a side effect of fetching a 'holder'
1067
      // for API (object which is instanceof for the signature).  It's
1068
      // safe to omit it here, as if present, it should be fetched
1069
      // by the previous CheckPrototypes.
1070
      ASSERT(depth2 == kInvalidProtoDepth);
1071
    }
1072

    
1073
    // Invoke function.
1074
    if (can_do_fast_api_call) {
1075
      GenerateFastApiDirectCall(
1076
          masm, optimization, arguments_.immediate(), false);
1077
    } else {
1078
      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
1079
          ? CALL_AS_FUNCTION
1080
          : CALL_AS_METHOD;
1081
      Handle<JSFunction> function = optimization.constant_function();
1082
      ParameterCount expected(function);
1083
      __ InvokeFunction(function, expected, arguments_,
1084
                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
1085
    }
1086

    
1087
    // Deferred code for fast API call case---clean preallocated space.
1088
    if (can_do_fast_api_call) {
1089
      __ bind(&miss_cleanup);
1090
      FreeSpaceForFastApiCall(masm);
1091
      __ b(miss_label);
1092
    }
1093

    
1094
    // Invoke a regular function.
1095
    __ bind(&regular_invoke);
1096
    if (can_do_fast_api_call) {
1097
      FreeSpaceForFastApiCall(masm);
1098
    }
1099
  }
1100

    
1101
  void CompileRegular(MacroAssembler* masm,
1102
                      Handle<JSObject> object,
1103
                      Register receiver,
1104
                      Register scratch1,
1105
                      Register scratch2,
1106
                      Register scratch3,
1107
                      Handle<Name> name,
1108
                      Handle<JSObject> interceptor_holder,
1109
                      Label* miss_label) {
1110
    Register holder =
1111
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
1112
                                        scratch1, scratch2, scratch3,
1113
                                        name, miss_label);
1114

    
1115
    // Call a runtime function to load the interceptor property.
1116
    FrameScope scope(masm, StackFrame::INTERNAL);
1117
    // Save the name_ register across the call.
1118
    __ push(name_);
1119
    PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
1120
    __ CallExternalReference(
1121
        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
1122
                          masm->isolate()),
1123
        StubCache::kInterceptorArgsLength);
1124
    // Restore the name_ register.
1125
    __ pop(name_);
1126
    // Leave the internal frame.
1127
  }
1128

    
1129
  void LoadWithInterceptor(MacroAssembler* masm,
1130
                           Register receiver,
1131
                           Register holder,
1132
                           Handle<JSObject> holder_obj,
1133
                           Register scratch,
1134
                           Label* interceptor_succeeded) {
1135
    {
1136
      FrameScope scope(masm, StackFrame::INTERNAL);
1137
      __ Push(holder, name_);
1138
      CompileCallLoadPropertyWithInterceptor(masm,
1139
                                             receiver,
1140
                                             holder,
1141
                                             name_,
1142
                                             holder_obj);
1143
      __ pop(name_);  // Restore the name.
1144
      __ pop(receiver);  // Restore the holder.
1145
    }
1146
    // If interceptor returns no-result sentinel, call the constant function.
1147
    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
1148
    __ cmp(r0, scratch);
1149
    __ b(ne, interceptor_succeeded);
1150
  }
1151

    
1152
  StubCompiler* stub_compiler_;
1153
  const ParameterCount& arguments_;
1154
  Register name_;
1155
  Code::ExtraICState extra_ic_state_;
1156
};
1157

    
1158

    
1159
// Calls GenerateCheckPropertyCell for each global object in the prototype chain
1160
// from object to (but not including) holder.
1161
static void GenerateCheckPropertyCells(MacroAssembler* masm,
1162
                                       Handle<JSObject> object,
1163
                                       Handle<JSObject> holder,
1164
                                       Handle<Name> name,
1165
                                       Register scratch,
1166
                                       Label* miss) {
1167
  Handle<JSObject> current = object;
1168
  while (!current.is_identical_to(holder)) {
1169
    if (current->IsGlobalObject()) {
1170
      GenerateCheckPropertyCell(masm,
1171
                                Handle<GlobalObject>::cast(current),
1172
                                name,
1173
                                scratch,
1174
                                miss);
1175
    }
1176
    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
1177
  }
1178
}
1179

    
1180

    
1181
void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1182
  __ Jump(code, RelocInfo::CODE_TARGET);
1183
}
1184

    
1185

    
1186
#undef __
1187
#define __ ACCESS_MASM(masm())
1188

    
1189

    
1190
Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1191
                                       Register object_reg,
1192
                                       Handle<JSObject> holder,
1193
                                       Register holder_reg,
1194
                                       Register scratch1,
1195
                                       Register scratch2,
1196
                                       Handle<Name> name,
1197
                                       int save_at_depth,
1198
                                       Label* miss,
1199
                                       PrototypeCheckType check) {
1200
  // Make sure that the type feedback oracle harvests the receiver map.
1201
  // TODO(svenpanne) Remove this hack when all ICs are reworked.
1202
  __ mov(scratch1, Operand(Handle<Map>(object->map())));
1203

    
1204
  Handle<JSObject> first = object;
1205
  // Make sure there's no overlap between holder and object registers.
1206
  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1207
  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1208
         && !scratch2.is(scratch1));
1209

    
1210
  // Keep track of the current object in register reg.
1211
  Register reg = object_reg;
1212
  int depth = 0;
1213

    
1214
  typedef FunctionCallbackArguments FCA;
1215
  if (save_at_depth == depth) {
1216
    __ str(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1217
  }
1218

    
1219
  // Check the maps in the prototype chain.
1220
  // Traverse the prototype chain from the object and do map checks.
1221
  Handle<JSObject> current = object;
1222
  while (!current.is_identical_to(holder)) {
1223
    ++depth;
1224

    
1225
    // Only global objects and objects that do not require access
1226
    // checks are allowed in stubs.
1227
    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1228

    
1229
    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1230
    if (!current->HasFastProperties() &&
1231
        !current->IsJSGlobalObject() &&
1232
        !current->IsJSGlobalProxy()) {
1233
      if (!name->IsUniqueName()) {
1234
        ASSERT(name->IsString());
1235
        name = factory()->InternalizeString(Handle<String>::cast(name));
1236
      }
1237
      ASSERT(current->property_dictionary()->FindEntry(*name) ==
1238
             NameDictionary::kNotFound);
1239

    
1240
      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1241
                                       scratch1, scratch2);
1242

    
1243
      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1244
      reg = holder_reg;  // From now on the object will be in holder_reg.
1245
      __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1246
    } else {
1247
      Register map_reg = scratch1;
1248
      if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1249
        Handle<Map> current_map(current->map());
1250
        // CheckMap implicitly loads the map of |reg| into |map_reg|.
1251
        __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
1252
      } else {
1253
        __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
1254
      }
1255

    
1256
      // Check access rights to the global object.  This has to happen after
1257
      // the map check so that we know that the object is actually a global
1258
      // object.
1259
      if (current->IsJSGlobalProxy()) {
1260
        __ CheckAccessGlobalProxy(reg, scratch2, miss);
1261
      }
1262
      reg = holder_reg;  // From now on the object will be in holder_reg.
1263

    
1264
      if (heap()->InNewSpace(*prototype)) {
1265
        // The prototype is in new space; we cannot store a reference to it
1266
        // in the code.  Load it from the map.
1267
        __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
1268
      } else {
1269
        // The prototype is in old space; load it directly.
1270
        __ mov(reg, Operand(prototype));
1271
      }
1272
    }
1273

    
1274
    if (save_at_depth == depth) {
1275
      __ str(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1276
    }
1277

    
1278
    // Go to the next object in the prototype chain.
1279
    current = prototype;
1280
  }
1281

    
1282
  // Log the check depth.
1283
  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1284

    
1285
  if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1286
    // Check the holder map.
1287
    __ CheckMap(reg, scratch1, Handle<Map>(holder->map()), miss,
1288
                DONT_DO_SMI_CHECK);
1289
  }
1290

    
1291
  // Perform security check for access to the global object.
1292
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1293
  if (holder->IsJSGlobalProxy()) {
1294
    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1295
  }
1296

    
1297
  // If we've skipped any global objects, it's not enough to verify that
1298
  // their maps haven't changed.  We also need to check that the property
1299
  // cell for the property is still empty.
1300
  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1301

    
1302
  // Return the register containing the holder.
1303
  return reg;
1304
}
1305

    
1306

    
1307
void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1308
                                             Label* success,
1309
                                             Label* miss) {
1310
  if (!miss->is_unused()) {
1311
    __ b(success);
1312
    __ bind(miss);
1313
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1314
  }
1315
}
1316

    
1317

    
1318
void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1319
                                              Label* success,
1320
                                              Label* miss) {
1321
  if (!miss->is_unused()) {
1322
    __ b(success);
1323
    GenerateRestoreName(masm(), miss, name);
1324
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1325
  }
1326
}
1327

    
1328

    
1329
Register LoadStubCompiler::CallbackHandlerFrontend(
1330
    Handle<JSObject> object,
1331
    Register object_reg,
1332
    Handle<JSObject> holder,
1333
    Handle<Name> name,
1334
    Label* success,
1335
    Handle<Object> callback) {
1336
  Label miss;
1337

    
1338
  Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
1339

    
1340
  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1341
    ASSERT(!reg.is(scratch2()));
1342
    ASSERT(!reg.is(scratch3()));
1343
    ASSERT(!reg.is(scratch4()));
1344

    
1345
    // Load the properties dictionary.
1346
    Register dictionary = scratch4();
1347
    __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
1348

    
1349
    // Probe the dictionary.
1350
    Label probe_done;
1351
    NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1352
                                                     &miss,
1353
                                                     &probe_done,
1354
                                                     dictionary,
1355
                                                     this->name(),
1356
                                                     scratch2(),
1357
                                                     scratch3());
1358
    __ bind(&probe_done);
1359

    
1360
    // If probing finds an entry in the dictionary, scratch3 contains the
1361
    // pointer into the dictionary. Check that the value is the callback.
1362
    Register pointer = scratch3();
1363
    const int kElementsStartOffset = NameDictionary::kHeaderSize +
1364
        NameDictionary::kElementsStartIndex * kPointerSize;
1365
    const int kValueOffset = kElementsStartOffset + kPointerSize;
1366
    __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
1367
    __ cmp(scratch2(), Operand(callback));
1368
    __ b(ne, &miss);
1369
  }
1370

    
1371
  HandlerFrontendFooter(name, success, &miss);
1372
  return reg;
1373
}
1374

    
1375

    
1376
void LoadStubCompiler::NonexistentHandlerFrontend(
1377
    Handle<JSObject> object,
1378
    Handle<JSObject> last,
1379
    Handle<Name> name,
1380
    Label* success,
1381
    Handle<GlobalObject> global) {
1382
  Label miss;
1383

    
1384
  HandlerFrontendHeader(object, receiver(), last, name, &miss);
1385

    
1386
  // If the last object in the prototype chain is a global object,
1387
  // check that the global property cell is empty.
1388
  if (!global.is_null()) {
1389
    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
1390
  }
1391

    
1392
  HandlerFrontendFooter(name, success, &miss);
1393
}
1394

    
1395

    
1396
void LoadStubCompiler::GenerateLoadField(Register reg,
1397
                                         Handle<JSObject> holder,
1398
                                         PropertyIndex field,
1399
                                         Representation representation) {
1400
  if (!reg.is(receiver())) __ mov(receiver(), reg);
1401
  if (kind() == Code::LOAD_IC) {
1402
    LoadFieldStub stub(field.is_inobject(holder),
1403
                       field.translate(holder),
1404
                       representation);
1405
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1406
  } else {
1407
    KeyedLoadFieldStub stub(field.is_inobject(holder),
1408
                            field.translate(holder),
1409
                            representation);
1410
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1411
  }
1412
}
1413

    
1414

    
1415
void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1416
  // Return the constant value.
1417
  __ Move(r0, value);
1418
  __ Ret();
1419
}
1420

    
1421

    
1422
void LoadStubCompiler::GenerateLoadCallback(
1423
    const CallOptimization& call_optimization) {
1424
  GenerateFastApiCall(
1425
      masm(), call_optimization, receiver(), scratch3(), 0, NULL);
1426
}
1427

    
1428

    
1429
void LoadStubCompiler::GenerateLoadCallback(
1430
    Register reg,
1431
    Handle<ExecutableAccessorInfo> callback) {
1432
  // Build AccessorInfo::args_ list on the stack and push property name below
1433
  // the exit frame to make GC aware of them and store pointers to them.
1434
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1435
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1436
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1437
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1438
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1439
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1440
  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1441
  ASSERT(!scratch2().is(reg));
1442
  ASSERT(!scratch3().is(reg));
1443
  ASSERT(!scratch4().is(reg));
1444
  __ push(receiver());
1445
  if (heap()->InNewSpace(callback->data())) {
1446
    __ Move(scratch3(), callback);
1447
    __ ldr(scratch3(), FieldMemOperand(scratch3(),
1448
                                       ExecutableAccessorInfo::kDataOffset));
1449
  } else {
1450
    __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
1451
  }
1452
  __ push(scratch3());
1453
  __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1454
  __ mov(scratch4(), scratch3());
1455
  __ Push(scratch3(), scratch4());
1456
  __ mov(scratch4(),
1457
         Operand(ExternalReference::isolate_address(isolate())));
1458
  __ Push(scratch4(), reg);
1459
  __ mov(scratch2(), sp);  // scratch2 = PropertyAccessorInfo::args_
1460
  __ push(name());
1461
  __ mov(r0, sp);  // r0 = Handle<Name>
1462

    
1463
  const int kApiStackSpace = 1;
1464
  FrameScope frame_scope(masm(), StackFrame::MANUAL);
1465
  __ EnterExitFrame(false, kApiStackSpace);
1466

    
1467
  // Create PropertyAccessorInfo instance on the stack above the exit frame with
1468
  // scratch2 (internal::Object** args_) as the data.
1469
  __ str(scratch2(), MemOperand(sp, 1 * kPointerSize));
1470
  __ add(r1, sp, Operand(1 * kPointerSize));  // r1 = AccessorInfo&
1471

    
1472
  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
1473
  Address getter_address = v8::ToCData<Address>(callback->getter());
1474

    
1475
  ApiFunction fun(getter_address);
1476
  ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1477
  ExternalReference ref = ExternalReference(&fun, type, isolate());
1478

    
1479
  Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1480
  ExternalReference::Type thunk_type =
1481
      ExternalReference::PROFILING_GETTER_CALL;
1482
  ApiFunction thunk_fun(thunk_address);
1483
  ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
1484
      isolate());
1485
  __ CallApiFunctionAndReturn(ref,
1486
                              getter_address,
1487
                              thunk_ref,
1488
                              r2,
1489
                              kStackUnwindSpace,
1490
                              MemOperand(fp, 6 * kPointerSize),
1491
                              NULL);
1492
}
1493

    
1494

    
1495
void LoadStubCompiler::GenerateLoadInterceptor(
1496
    Register holder_reg,
1497
    Handle<JSObject> object,
1498
    Handle<JSObject> interceptor_holder,
1499
    LookupResult* lookup,
1500
    Handle<Name> name) {
1501
  ASSERT(interceptor_holder->HasNamedInterceptor());
1502
  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1503

    
1504
  // So far the most popular follow ups for interceptor loads are FIELD
1505
  // and CALLBACKS, so inline only them, other cases may be added
1506
  // later.
1507
  bool compile_followup_inline = false;
1508
  if (lookup->IsFound() && lookup->IsCacheable()) {
1509
    if (lookup->IsField()) {
1510
      compile_followup_inline = true;
1511
    } else if (lookup->type() == CALLBACKS &&
1512
               lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1513
      ExecutableAccessorInfo* callback =
1514
          ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1515
      compile_followup_inline = callback->getter() != NULL &&
1516
          callback->IsCompatibleReceiver(*object);
1517
    }
1518
  }
1519

    
1520
  if (compile_followup_inline) {
1521
    // Compile the interceptor call, followed by inline code to load the
1522
    // property from further up the prototype chain if the call fails.
1523
    // Check that the maps haven't changed.
1524
    ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1525

    
1526
    // Preserve the receiver register explicitly whenever it is different from
1527
    // the holder and it is needed should the interceptor return without any
1528
    // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1529
    // the FIELD case might cause a miss during the prototype check.
1530
    bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1531
    bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1532
        (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1533

    
1534
    // Save necessary data before invoking an interceptor.
1535
    // Requires a frame to make GC aware of pushed pointers.
1536
    {
1537
      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1538
      if (must_preserve_receiver_reg) {
1539
        __ Push(receiver(), holder_reg, this->name());
1540
      } else {
1541
        __ Push(holder_reg, this->name());
1542
      }
1543
      // Invoke an interceptor.  Note: map checks from receiver to
1544
      // interceptor's holder has been compiled before (see a caller
1545
      // of this method.)
1546
      CompileCallLoadPropertyWithInterceptor(masm(),
1547
                                             receiver(),
1548
                                             holder_reg,
1549
                                             this->name(),
1550
                                             interceptor_holder);
1551
      // Check if interceptor provided a value for property.  If it's
1552
      // the case, return immediately.
1553
      Label interceptor_failed;
1554
      __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1555
      __ cmp(r0, scratch1());
1556
      __ b(eq, &interceptor_failed);
1557
      frame_scope.GenerateLeaveFrame();
1558
      __ Ret();
1559

    
1560
      __ bind(&interceptor_failed);
1561
      __ pop(this->name());
1562
      __ pop(holder_reg);
1563
      if (must_preserve_receiver_reg) {
1564
        __ pop(receiver());
1565
      }
1566
      // Leave the internal frame.
1567
    }
1568

    
1569
    GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1570
  } else {  // !compile_followup_inline
1571
    // Call the runtime system to load the interceptor.
1572
    // Check that the maps haven't changed.
1573
    PushInterceptorArguments(masm(), receiver(), holder_reg,
1574
                             this->name(), interceptor_holder);
1575

    
1576
    ExternalReference ref =
1577
        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1578
                          isolate());
1579
    __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1580
  }
1581
}
1582

    
1583

    
1584
void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1585
  if (kind_ == Code::KEYED_CALL_IC) {
1586
    __ cmp(r2, Operand(name));
1587
    __ b(ne, miss);
1588
  }
1589
}
1590

    
1591

    
1592
void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1593
                                                   Handle<JSObject> holder,
1594
                                                   Handle<Name> name,
1595
                                                   Label* miss) {
1596
  ASSERT(holder->IsGlobalObject());
1597

    
1598
  // Get the number of arguments.
1599
  const int argc = arguments().immediate();
1600

    
1601
  // Get the receiver from the stack.
1602
  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1603

    
1604
  // Check that the maps haven't changed.
1605
  __ JumpIfSmi(r0, miss);
1606
  CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1607
}
1608

    
1609

    
1610
void CallStubCompiler::GenerateLoadFunctionFromCell(
1611
    Handle<Cell> cell,
1612
    Handle<JSFunction> function,
1613
    Label* miss) {
1614
  // Get the value from the cell.
1615
  __ mov(r3, Operand(cell));
1616
  __ ldr(r1, FieldMemOperand(r3, Cell::kValueOffset));
1617

    
1618
  // Check that the cell contains the same function.
1619
  if (heap()->InNewSpace(*function)) {
1620
    // We can't embed a pointer to a function in new space so we have
1621
    // to verify that the shared function info is unchanged. This has
1622
    // the nice side effect that multiple closures based on the same
1623
    // function can all use this call IC. Before we load through the
1624
    // function, we have to verify that it still is a function.
1625
    __ JumpIfSmi(r1, miss);
1626
    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1627
    __ b(ne, miss);
1628

    
1629
    // Check the shared function info. Make sure it hasn't changed.
1630
    __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1631
    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1632
    __ cmp(r4, r3);
1633
  } else {
1634
    __ cmp(r1, Operand(function));
1635
  }
1636
  __ b(ne, miss);
1637
}
1638

    
1639

    
1640
void CallStubCompiler::GenerateMissBranch() {
1641
  Handle<Code> code =
1642
      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1643
                                               kind_,
1644
                                               extra_state_);
1645
  __ Jump(code, RelocInfo::CODE_TARGET);
1646
}
1647

    
1648

    
1649
Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1650
                                                Handle<JSObject> holder,
1651
                                                PropertyIndex index,
1652
                                                Handle<Name> name) {
1653
  // ----------- S t a t e -------------
1654
  //  -- r2    : name
1655
  //  -- lr    : return address
1656
  // -----------------------------------
1657
  Label miss;
1658

    
1659
  GenerateNameCheck(name, &miss);
1660

    
1661
  const int argc = arguments().immediate();
1662

    
1663
  // Get the receiver of the function from the stack into r0.
1664
  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1665
  // Check that the receiver isn't a smi.
1666
  __ JumpIfSmi(r0, &miss);
1667

    
1668
  // Do the right check and compute the holder register.
1669
  Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1670
  GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder),
1671
                           index.translate(holder), Representation::Tagged());
1672

    
1673
  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1674

    
1675
  // Handle call cache miss.
1676
  __ bind(&miss);
1677
  GenerateMissBranch();
1678

    
1679
  // Return the generated code.
1680
  return GetCode(Code::FIELD, name);
1681
}
1682

    
1683

    
1684
Handle<Code> CallStubCompiler::CompileArrayCodeCall(
1685
    Handle<Object> object,
1686
    Handle<JSObject> holder,
1687
    Handle<Cell> cell,
1688
    Handle<JSFunction> function,
1689
    Handle<String> name,
1690
    Code::StubType type) {
1691
  Label miss;
1692

    
1693
  // Check that function is still array
1694
  const int argc = arguments().immediate();
1695
  GenerateNameCheck(name, &miss);
1696
  Register receiver = r1;
1697

    
1698
  if (cell.is_null()) {
1699
    __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1700

    
1701
    // Check that the receiver isn't a smi.
1702
    __ JumpIfSmi(receiver, &miss);
1703

    
1704
    // Check that the maps haven't changed.
1705
    CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0,
1706
                    r4, name, &miss);
1707
  } else {
1708
    ASSERT(cell->value() == *function);
1709
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1710
                                &miss);
1711
    GenerateLoadFunctionFromCell(cell, function, &miss);
1712
  }
1713

    
1714
  Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
1715
  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
1716
  Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
1717
  __ mov(r0, Operand(argc));
1718
  __ mov(r2, Operand(site_feedback_cell));
1719
  __ mov(r1, Operand(function));
1720

    
1721
  ArrayConstructorStub stub(isolate());
1722
  __ TailCallStub(&stub);
1723

    
1724
  __ bind(&miss);
1725
  GenerateMissBranch();
1726

    
1727
  // Return the generated code.
1728
  return GetCode(type, name);
1729
}
1730

    
1731

    
1732
Handle<Code> CallStubCompiler::CompileArrayPushCall(
1733
    Handle<Object> object,
1734
    Handle<JSObject> holder,
1735
    Handle<Cell> cell,
1736
    Handle<JSFunction> function,
1737
    Handle<String> name,
1738
    Code::StubType type) {
1739
  // ----------- S t a t e -------------
1740
  //  -- r2    : name
1741
  //  -- lr    : return address
1742
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1743
  //  -- ...
1744
  //  -- sp[argc * 4]           : receiver
1745
  // -----------------------------------
1746

    
1747
  // If object is not an array, bail out to regular call.
1748
  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1749

    
1750
  Label miss;
1751
  GenerateNameCheck(name, &miss);
1752

    
1753
  Register receiver = r1;
1754
  // Get the receiver from the stack
1755
  const int argc = arguments().immediate();
1756
  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1757

    
1758
  // Check that the receiver isn't a smi.
1759
  __ JumpIfSmi(receiver, &miss);
1760

    
1761
  // Check that the maps haven't changed.
1762
  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0, r4,
1763
                  name, &miss);
1764

    
1765
  if (argc == 0) {
1766
    // Nothing to do, just return the length.
1767
    __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1768
    __ Drop(argc + 1);
1769
    __ Ret();
1770
  } else {
1771
    Label call_builtin;
1772

    
1773
    if (argc == 1) {  // Otherwise fall through to call the builtin.
1774
      Label attempt_to_grow_elements, with_write_barrier, check_double;
1775

    
1776
      Register elements = r6;
1777
      Register end_elements = r5;
1778
      // Get the elements array of the object.
1779
      __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1780

    
1781
      // Check that the elements are in fast mode and writable.
1782
      __ CheckMap(elements,
1783
                  r0,
1784
                  Heap::kFixedArrayMapRootIndex,
1785
                  &check_double,
1786
                  DONT_DO_SMI_CHECK);
1787

    
1788
      // Get the array's length into r0 and calculate new length.
1789
      __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1790
      __ add(r0, r0, Operand(Smi::FromInt(argc)));
1791

    
1792
      // Get the elements' length.
1793
      __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1794

    
1795
      // Check if we could survive without allocation.
1796
      __ cmp(r0, r4);
1797
      __ b(gt, &attempt_to_grow_elements);
1798

    
1799
      // Check if value is a smi.
1800
      __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1801
      __ JumpIfNotSmi(r4, &with_write_barrier);
1802

    
1803
      // Save new length.
1804
      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1805

    
1806
      // Store the value.
1807
      // We may need a register containing the address end_elements below,
1808
      // so write back the value in end_elements.
1809
      __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
1810
      const int kEndElementsOffset =
1811
          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1812
      __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1813

    
1814
      // Check for a smi.
1815
      __ Drop(argc + 1);
1816
      __ Ret();
1817

    
1818
      __ bind(&check_double);
1819

    
1820
      // Check that the elements are in fast mode and writable.
1821
      __ CheckMap(elements,
1822
                  r0,
1823
                  Heap::kFixedDoubleArrayMapRootIndex,
1824
                  &call_builtin,
1825
                  DONT_DO_SMI_CHECK);
1826

    
1827
      // Get the array's length into r0 and calculate new length.
1828
      __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1829
      __ add(r0, r0, Operand(Smi::FromInt(argc)));
1830

    
1831
      // Get the elements' length.
1832
      __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1833

    
1834
      // Check if we could survive without allocation.
1835
      __ cmp(r0, r4);
1836
      __ b(gt, &call_builtin);
1837

    
1838
      __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1839
      __ StoreNumberToDoubleElements(r4, r0, elements, r5, d0,
1840
                                     &call_builtin, argc * kDoubleSize);
1841

    
1842
      // Save new length.
1843
      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1844

    
1845
      // Check for a smi.
1846
      __ Drop(argc + 1);
1847
      __ Ret();
1848

    
1849
      __ bind(&with_write_barrier);
1850

    
1851
      __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1852

    
1853
      if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
1854
        Label fast_object, not_fast_object;
1855
        __ CheckFastObjectElements(r3, r9, &not_fast_object);
1856
        __ jmp(&fast_object);
1857
        // In case of fast smi-only, convert to fast object, otherwise bail out.
1858
        __ bind(&not_fast_object);
1859
        __ CheckFastSmiElements(r3, r9, &call_builtin);
1860

    
1861
        __ ldr(r9, FieldMemOperand(r4, HeapObject::kMapOffset));
1862
        __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1863
        __ cmp(r9, ip);
1864
        __ b(eq, &call_builtin);
1865
        // edx: receiver
1866
        // r3: map
1867
        Label try_holey_map;
1868
        __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1869
                                               FAST_ELEMENTS,
1870
                                               r3,
1871
                                               r9,
1872
                                               &try_holey_map);
1873
        __ mov(r2, receiver);
1874
        ElementsTransitionGenerator::
1875
            GenerateMapChangeElementsTransition(masm(),
1876
                                                DONT_TRACK_ALLOCATION_SITE,
1877
                                                NULL);
1878
        __ jmp(&fast_object);
1879

    
1880
        __ bind(&try_holey_map);
1881
        __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1882
                                               FAST_HOLEY_ELEMENTS,
1883
                                               r3,
1884
                                               r9,
1885
                                               &call_builtin);
1886
        __ mov(r2, receiver);
1887
        ElementsTransitionGenerator::
1888
            GenerateMapChangeElementsTransition(masm(),
1889
                                                DONT_TRACK_ALLOCATION_SITE,
1890
                                                NULL);
1891
        __ bind(&fast_object);
1892
      } else {
1893
        __ CheckFastObjectElements(r3, r3, &call_builtin);
1894
      }
1895

    
1896
      // Save new length.
1897
      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1898

    
1899
      // Store the value.
1900
      // We may need a register containing the address end_elements below,
1901
      // so write back the value in end_elements.
1902
      __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
1903
      __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1904

    
1905
      __ RecordWrite(elements,
1906
                     end_elements,
1907
                     r4,
1908
                     kLRHasNotBeenSaved,
1909
                     kDontSaveFPRegs,
1910
                     EMIT_REMEMBERED_SET,
1911
                     OMIT_SMI_CHECK);
1912
      __ Drop(argc + 1);
1913
      __ Ret();
1914

    
1915
      __ bind(&attempt_to_grow_elements);
1916
      // r0: array's length + 1.
1917

    
1918
      if (!FLAG_inline_new) {
1919
        __ b(&call_builtin);
1920
      }
1921

    
1922
      __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
1923
      // Growing elements that are SMI-only requires special handling in case
1924
      // the new element is non-Smi. For now, delegate to the builtin.
1925
      Label no_fast_elements_check;
1926
      __ JumpIfSmi(r2, &no_fast_elements_check);
1927
      __ ldr(r9, FieldMemOperand(receiver, HeapObject::kMapOffset));
1928
      __ CheckFastObjectElements(r9, r9, &call_builtin);
1929
      __ bind(&no_fast_elements_check);
1930

    
1931
      ExternalReference new_space_allocation_top =
1932
          ExternalReference::new_space_allocation_top_address(isolate());
1933
      ExternalReference new_space_allocation_limit =
1934
          ExternalReference::new_space_allocation_limit_address(isolate());
1935

    
1936
      const int kAllocationDelta = 4;
1937
      // Load top and check if it is the end of elements.
1938
      __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
1939
      __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1940
      __ mov(r4, Operand(new_space_allocation_top));
1941
      __ ldr(r3, MemOperand(r4));
1942
      __ cmp(end_elements, r3);
1943
      __ b(ne, &call_builtin);
1944

    
1945
      __ mov(r9, Operand(new_space_allocation_limit));
1946
      __ ldr(r9, MemOperand(r9));
1947
      __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
1948
      __ cmp(r3, r9);
1949
      __ b(hi, &call_builtin);
1950

    
1951
      // We fit and could grow elements.
1952
      // Update new_space_allocation_top.
1953
      __ str(r3, MemOperand(r4));
1954
      // Push the argument.
1955
      __ str(r2, MemOperand(end_elements));
1956
      // Fill the rest with holes.
1957
      __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
1958
      for (int i = 1; i < kAllocationDelta; i++) {
1959
        __ str(r3, MemOperand(end_elements, i * kPointerSize));
1960
      }
1961

    
1962
      // Update elements' and array's sizes.
1963
      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1964
      __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1965
      __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1966
      __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1967

    
1968
      // Elements are in new space, so write barrier is not required.
1969
      __ Drop(argc + 1);
1970
      __ Ret();
1971
    }
1972
    __ bind(&call_builtin);
1973
    __ TailCallExternalReference(
1974
        ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
1975
  }
1976

    
1977
  // Handle call cache miss.
1978
  __ bind(&miss);
1979
  GenerateMissBranch();
1980

    
1981
  // Return the generated code.
1982
  return GetCode(type, name);
1983
}
1984

    
1985

    
1986
Handle<Code> CallStubCompiler::CompileArrayPopCall(
1987
    Handle<Object> object,
1988
    Handle<JSObject> holder,
1989
    Handle<Cell> cell,
1990
    Handle<JSFunction> function,
1991
    Handle<String> name,
1992
    Code::StubType type) {
1993
  // ----------- S t a t e -------------
1994
  //  -- r2    : name
1995
  //  -- lr    : return address
1996
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1997
  //  -- ...
1998
  //  -- sp[argc * 4]           : receiver
1999
  // -----------------------------------
2000

    
2001
  // If object is not an array, bail out to regular call.
2002
  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
2003

    
2004
  Label miss, return_undefined, call_builtin;
2005
  Register receiver = r1;
2006
  Register elements = r3;
2007
  GenerateNameCheck(name, &miss);
2008

    
2009
  // Get the receiver from the stack
2010
  const int argc = arguments().immediate();
2011
  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
2012
  // Check that the receiver isn't a smi.
2013
  __ JumpIfSmi(receiver, &miss);
2014

    
2015
  // Check that the maps haven't changed.
2016
  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
2017
                  r4, r0, name, &miss);
2018

    
2019
  // Get the elements array of the object.
2020
  __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
2021

    
2022
  // Check that the elements are in fast mode and writable.
2023
  __ CheckMap(elements,
2024
              r0,
2025
              Heap::kFixedArrayMapRootIndex,
2026
              &call_builtin,
2027
              DONT_DO_SMI_CHECK);
2028

    
2029
  // Get the array's length into r4 and calculate new length.
2030
  __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
2031
  __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
2032
  __ b(lt, &return_undefined);
2033

    
2034
  // Get the last element.
2035
  __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
2036
  // We can't address the last element in one operation. Compute the more
2037
  // expensive shift first, and use an offset later on.
2038
  __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
2039
  __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
2040
  __ cmp(r0, r6);
2041
  __ b(eq, &call_builtin);
2042

    
2043
  // Set the array's length.
2044
  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
2045

    
2046
  // Fill with the hole.
2047
  __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
2048
  __ Drop(argc + 1);
2049
  __ Ret();
2050

    
2051
  __ bind(&return_undefined);
2052
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2053
  __ Drop(argc + 1);
2054
  __ Ret();
2055

    
2056
  __ bind(&call_builtin);
2057
  __ TailCallExternalReference(
2058
      ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
2059

    
2060
  // Handle call cache miss.
2061
  __ bind(&miss);
2062
  GenerateMissBranch();
2063

    
2064
  // Return the generated code.
2065
  return GetCode(type, name);
2066
}
2067

    
2068

    
2069
Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
2070
    Handle<Object> object,
2071
    Handle<JSObject> holder,
2072
    Handle<Cell> cell,
2073
    Handle<JSFunction> function,
2074
    Handle<String> name,
2075
    Code::StubType type) {
2076
  // ----------- S t a t e -------------
2077
  //  -- r2                     : function name
2078
  //  -- lr                     : return address
2079
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2080
  //  -- ...
2081
  //  -- sp[argc * 4]           : receiver
2082
  // -----------------------------------
2083

    
2084
  // If object is not a string, bail out to regular call.
2085
  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
2086

    
2087
  const int argc = arguments().immediate();
2088
  Label miss;
2089
  Label name_miss;
2090
  Label index_out_of_range;
2091
  Label* index_out_of_range_label = &index_out_of_range;
2092

    
2093
  if (kind_ == Code::CALL_IC &&
2094
      (CallICBase::StringStubState::decode(extra_state_) ==
2095
       DEFAULT_STRING_STUB)) {
2096
    index_out_of_range_label = &miss;
2097
  }
2098
  GenerateNameCheck(name, &name_miss);
2099

    
2100
  // Check that the maps starting from the prototype haven't changed.
2101
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2102
                                            Context::STRING_FUNCTION_INDEX,
2103
                                            r0,
2104
                                            &miss);
2105
  ASSERT(!object.is_identical_to(holder));
2106
  CheckPrototypes(
2107
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2108
      r0, holder, r1, r3, r4, name, &miss);
2109

    
2110
  Register receiver = r1;
2111
  Register index = r4;
2112
  Register result = r0;
2113
  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
2114
  if (argc > 0) {
2115
    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
2116
  } else {
2117
    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2118
  }
2119

    
2120
  StringCharCodeAtGenerator generator(receiver,
2121
                                      index,
2122
                                      result,
2123
                                      &miss,  // When not a string.
2124
                                      &miss,  // When not a number.
2125
                                      index_out_of_range_label,
2126
                                      STRING_INDEX_IS_NUMBER);
2127
  generator.GenerateFast(masm());
2128
  __ Drop(argc + 1);
2129
  __ Ret();
2130

    
2131
  StubRuntimeCallHelper call_helper;
2132
  generator.GenerateSlow(masm(), call_helper);
2133

    
2134
  if (index_out_of_range.is_linked()) {
2135
    __ bind(&index_out_of_range);
2136
    __ LoadRoot(r0, Heap::kNanValueRootIndex);
2137
    __ Drop(argc + 1);
2138
    __ Ret();
2139
  }
2140

    
2141
  __ bind(&miss);
2142
  // Restore function name in r2.
2143
  __ Move(r2, name);
2144
  __ bind(&name_miss);
2145
  GenerateMissBranch();
2146

    
2147
  // Return the generated code.
2148
  return GetCode(type, name);
2149
}
2150

    
2151

    
2152
Handle<Code> CallStubCompiler::CompileStringCharAtCall(
2153
    Handle<Object> object,
2154
    Handle<JSObject> holder,
2155
    Handle<Cell> cell,
2156
    Handle<JSFunction> function,
2157
    Handle<String> name,
2158
    Code::StubType type) {
2159
  // ----------- S t a t e -------------
2160
  //  -- r2                     : function name
2161
  //  -- lr                     : return address
2162
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2163
  //  -- ...
2164
  //  -- sp[argc * 4]           : receiver
2165
  // -----------------------------------
2166

    
2167
  // If object is not a string, bail out to regular call.
2168
  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
2169

    
2170
  const int argc = arguments().immediate();
2171
  Label miss;
2172
  Label name_miss;
2173
  Label index_out_of_range;
2174
  Label* index_out_of_range_label = &index_out_of_range;
2175
  if (kind_ == Code::CALL_IC &&
2176
      (CallICBase::StringStubState::decode(extra_state_) ==
2177
       DEFAULT_STRING_STUB)) {
2178
    index_out_of_range_label = &miss;
2179
  }
2180
  GenerateNameCheck(name, &name_miss);
2181

    
2182
  // Check that the maps starting from the prototype haven't changed.
2183
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2184
                                            Context::STRING_FUNCTION_INDEX,
2185
                                            r0,
2186
                                            &miss);
2187
  ASSERT(!object.is_identical_to(holder));
2188
  CheckPrototypes(
2189
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2190
      r0, holder, r1, r3, r4, name, &miss);
2191

    
2192
  Register receiver = r0;
2193
  Register index = r4;
2194
  Register scratch = r3;
2195
  Register result = r0;
2196
  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
2197
  if (argc > 0) {
2198
    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
2199
  } else {
2200
    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2201
  }
2202

    
2203
  StringCharAtGenerator generator(receiver,
2204
                                  index,
2205
                                  scratch,
2206
                                  result,
2207
                                  &miss,  // When not a string.
2208
                                  &miss,  // When not a number.
2209
                                  index_out_of_range_label,
2210
                                  STRING_INDEX_IS_NUMBER);
2211
  generator.GenerateFast(masm());
2212
  __ Drop(argc + 1);
2213
  __ Ret();
2214

    
2215
  StubRuntimeCallHelper call_helper;
2216
  generator.GenerateSlow(masm(), call_helper);
2217

    
2218
  if (index_out_of_range.is_linked()) {
2219
    __ bind(&index_out_of_range);
2220
    __ LoadRoot(r0, Heap::kempty_stringRootIndex);
2221
    __ Drop(argc + 1);
2222
    __ Ret();
2223
  }
2224

    
2225
  __ bind(&miss);
2226
  // Restore function name in r2.
2227
  __ Move(r2, name);
2228
  __ bind(&name_miss);
2229
  GenerateMissBranch();
2230

    
2231
  // Return the generated code.
2232
  return GetCode(type, name);
2233
}
2234

    
2235

    
2236
Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2237
    Handle<Object> object,
2238
    Handle<JSObject> holder,
2239
    Handle<Cell> cell,
2240
    Handle<JSFunction> function,
2241
    Handle<String> name,
2242
    Code::StubType type) {
2243
  // ----------- S t a t e -------------
2244
  //  -- r2                     : function name
2245
  //  -- lr                     : return address
2246
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2247
  //  -- ...
2248
  //  -- sp[argc * 4]           : receiver
2249
  // -----------------------------------
2250

    
2251
  const int argc = arguments().immediate();
2252

    
2253
  // If the object is not a JSObject or we got an unexpected number of
2254
  // arguments, bail out to the regular call.
2255
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2256

    
2257
  Label miss;
2258
  GenerateNameCheck(name, &miss);
2259

    
2260
  if (cell.is_null()) {
2261
    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2262

    
2263
    __ JumpIfSmi(r1, &miss);
2264

    
2265
    CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2266
                    name, &miss);
2267
  } else {
2268
    ASSERT(cell->value() == *function);
2269
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2270
                                &miss);
2271
    GenerateLoadFunctionFromCell(cell, function, &miss);
2272
  }
2273

    
2274
  // Load the char code argument.
2275
  Register code = r1;
2276
  __ ldr(code, MemOperand(sp, 0 * kPointerSize));
2277

    
2278
  // Check the code is a smi.
2279
  Label slow;
2280
  __ JumpIfNotSmi(code, &slow);
2281

    
2282
  // Convert the smi code to uint16.
2283
  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
2284

    
2285
  StringCharFromCodeGenerator generator(code, r0);
2286
  generator.GenerateFast(masm());
2287
  __ Drop(argc + 1);
2288
  __ Ret();
2289

    
2290
  StubRuntimeCallHelper call_helper;
2291
  generator.GenerateSlow(masm(), call_helper);
2292

    
2293
  // Tail call the full function. We do not have to patch the receiver
2294
  // because the function makes no use of it.
2295
  __ bind(&slow);
2296
  ParameterCount expected(function);
2297
  __ InvokeFunction(function, expected, arguments(),
2298
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2299

    
2300
  __ bind(&miss);
2301
  // r2: function name.
2302
  GenerateMissBranch();
2303

    
2304
  // Return the generated code.
2305
  return GetCode(type, name);
2306
}
2307

    
2308

    
2309
Handle<Code> CallStubCompiler::CompileMathFloorCall(
2310
    Handle<Object> object,
2311
    Handle<JSObject> holder,
2312
    Handle<Cell> cell,
2313
    Handle<JSFunction> function,
2314
    Handle<String> name,
2315
    Code::StubType type) {
2316
  // ----------- S t a t e -------------
2317
  //  -- r2                     : function name
2318
  //  -- lr                     : return address
2319
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2320
  //  -- ...
2321
  //  -- sp[argc * 4]           : receiver
2322
  // -----------------------------------
2323

    
2324
  const int argc = arguments().immediate();
2325
  // If the object is not a JSObject or we got an unexpected number of
2326
  // arguments, bail out to the regular call.
2327
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2328

    
2329
  Label miss, slow;
2330
  GenerateNameCheck(name, &miss);
2331

    
2332
  if (cell.is_null()) {
2333
    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2334
    __ JumpIfSmi(r1, &miss);
2335
    CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2336
                    name, &miss);
2337
  } else {
2338
    ASSERT(cell->value() == *function);
2339
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2340
                                &miss);
2341
    GenerateLoadFunctionFromCell(cell, function, &miss);
2342
  }
2343

    
2344
  // Load the (only) argument into r0.
2345
  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2346

    
2347
  // If the argument is a smi, just return.
2348
  __ SmiTst(r0);
2349
  __ Drop(argc + 1, eq);
2350
  __ Ret(eq);
2351

    
2352
  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2353

    
2354
  Label smi_check, just_return;
2355

    
2356
  // Load the HeapNumber value.
2357
  // We will need access to the value in the core registers, so we load it
2358
  // with ldrd and move it to the fpu. It also spares a sub instruction for
2359
  // updating the HeapNumber value address, as vldr expects a multiple
2360
  // of 4 offset.
2361
  __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2362
  __ vmov(d1, r4, r5);
2363

    
2364
  // Check for NaN, Infinities and -0.
2365
  // They are invariant through a Math.Floor call, so just
2366
  // return the original argument.
2367
  __ Sbfx(r3, r5, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2368
  __ cmp(r3, Operand(-1));
2369
  __ b(eq, &just_return);
2370
  __ eor(r3, r5, Operand(0x80000000u));
2371
  __ orr(r3, r3, r4, SetCC);
2372
  __ b(eq, &just_return);
2373
  // Test for values that can be exactly represented as a
2374
  // signed 32-bit integer.
2375
  __ TryDoubleToInt32Exact(r0, d1, d2);
2376
  // If exact, check smi
2377
  __ b(eq, &smi_check);
2378
  __ cmp(r5, Operand(0));
2379

    
2380
  // If input is in ]+0, +inf[, the cmp has cleared overflow and negative
2381
  // (V=0 and N=0), the two following instructions won't execute and
2382
  // we fall through smi_check to check if the result can fit into a smi.
2383

    
2384
  // If input is in ]-inf, -0[, sub one and, go to slow if we have
2385
  // an overflow. Else we fall through smi check.
2386
  // Hint: if x is a negative, non integer number,
2387
  // floor(x) <=> round_to_zero(x) - 1.
2388
  __ sub(r0, r0, Operand(1), SetCC, mi);
2389
  __ b(vs, &slow);
2390

    
2391
  __ bind(&smi_check);
2392
  // Check if the result can fit into an smi. If we had an overflow,
2393
  // the result is either 0x80000000 or 0x7FFFFFFF and won't fit into an smi.
2394
  // If result doesn't fit into an smi, branch to slow.
2395
  __ SmiTag(r0, SetCC);
2396
  __ b(vs, &slow);
2397

    
2398
  __ bind(&just_return);
2399
  __ Drop(argc + 1);
2400
  __ Ret();
2401

    
2402
  __ bind(&slow);
2403
  // Tail call the full function. We do not have to patch the receiver
2404
  // because the function makes no use of it.
2405
  ParameterCount expected(function);
2406
  __ InvokeFunction(function, expected, arguments(),
2407
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2408

    
2409
  __ bind(&miss);
2410
  // r2: function name.
2411
  GenerateMissBranch();
2412

    
2413
  // Return the generated code.
2414
  return GetCode(type, name);
2415
}
2416

    
2417

    
2418
Handle<Code> CallStubCompiler::CompileMathAbsCall(
2419
    Handle<Object> object,
2420
    Handle<JSObject> holder,
2421
    Handle<Cell> cell,
2422
    Handle<JSFunction> function,
2423
    Handle<String> name,
2424
    Code::StubType type) {
2425
  // ----------- S t a t e -------------
2426
  //  -- r2                     : function name
2427
  //  -- lr                     : return address
2428
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2429
  //  -- ...
2430
  //  -- sp[argc * 4]           : receiver
2431
  // -----------------------------------
2432

    
2433
  const int argc = arguments().immediate();
2434
  // If the object is not a JSObject or we got an unexpected number of
2435
  // arguments, bail out to the regular call.
2436
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2437

    
2438
  Label miss;
2439
  GenerateNameCheck(name, &miss);
2440
  if (cell.is_null()) {
2441
    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2442
    __ JumpIfSmi(r1, &miss);
2443
    CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2444
                    name, &miss);
2445
  } else {
2446
    ASSERT(cell->value() == *function);
2447
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2448
                                &miss);
2449
    GenerateLoadFunctionFromCell(cell, function, &miss);
2450
  }
2451

    
2452
  // Load the (only) argument into r0.
2453
  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2454

    
2455
  // Check if the argument is a smi.
2456
  Label not_smi;
2457
  __ JumpIfNotSmi(r0, &not_smi);
2458

    
2459
  // Do bitwise not or do nothing depending on the sign of the
2460
  // argument.
2461
  __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2462

    
2463
  // Add 1 or do nothing depending on the sign of the argument.
2464
  __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2465

    
2466
  // If the result is still negative, go to the slow case.
2467
  // This only happens for the most negative smi.
2468
  Label slow;
2469
  __ b(mi, &slow);
2470

    
2471
  // Smi case done.
2472
  __ Drop(argc + 1);
2473
  __ Ret();
2474

    
2475
  // Check if the argument is a heap number and load its exponent and
2476
  // sign.
2477
  __ bind(&not_smi);
2478
  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2479
  __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2480

    
2481
  // Check the sign of the argument. If the argument is positive,
2482
  // just return it.
2483
  Label negative_sign;
2484
  __ tst(r1, Operand(HeapNumber::kSignMask));
2485
  __ b(ne, &negative_sign);
2486
  __ Drop(argc + 1);
2487
  __ Ret();
2488

    
2489
  // If the argument is negative, clear the sign, and return a new
2490
  // number.
2491
  __ bind(&negative_sign);
2492
  __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2493
  __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2494
  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2495
  __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2496
  __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2497
  __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2498
  __ Drop(argc + 1);
2499
  __ Ret();
2500

    
2501
  // Tail call the full function. We do not have to patch the receiver
2502
  // because the function makes no use of it.
2503
  __ bind(&slow);
2504
  ParameterCount expected(function);
2505
  __ InvokeFunction(function, expected, arguments(),
2506
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2507

    
2508
  __ bind(&miss);
2509
  // r2: function name.
2510
  GenerateMissBranch();
2511

    
2512
  // Return the generated code.
2513
  return GetCode(type, name);
2514
}
2515

    
2516

    
2517
Handle<Code> CallStubCompiler::CompileFastApiCall(
2518
    const CallOptimization& optimization,
2519
    Handle<Object> object,
2520
    Handle<JSObject> holder,
2521
    Handle<Cell> cell,
2522
    Handle<JSFunction> function,
2523
    Handle<String> name) {
2524
  Counters* counters = isolate()->counters();
2525

    
2526
  ASSERT(optimization.is_simple_api_call());
2527
  // Bail out if object is a global object as we don't want to
2528
  // repatch it to global receiver.
2529
  if (object->IsGlobalObject()) return Handle<Code>::null();
2530
  if (!cell.is_null()) return Handle<Code>::null();
2531
  if (!object->IsJSObject()) return Handle<Code>::null();
2532
  int depth = optimization.GetPrototypeDepthOfExpectedType(
2533
      Handle<JSObject>::cast(object), holder);
2534
  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2535

    
2536
  Label miss, miss_before_stack_reserved;
2537
  GenerateNameCheck(name, &miss_before_stack_reserved);
2538

    
2539
  // Get the receiver from the stack.
2540
  const int argc = arguments().immediate();
2541
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2542

    
2543
  // Check that the receiver isn't a smi.
2544
  __ JumpIfSmi(r1, &miss_before_stack_reserved);
2545

    
2546
  __ IncrementCounter(counters->call_const(), 1, r0, r3);
2547
  __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2548

    
2549
  ReserveSpaceForFastApiCall(masm(), r0);
2550

    
2551
  // Check that the maps haven't changed and find a Holder as a side effect.
2552
  CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, name,
2553
                  depth, &miss);
2554

    
2555
  GenerateFastApiDirectCall(masm(), optimization, argc, false);
2556

    
2557
  __ bind(&miss);
2558
  FreeSpaceForFastApiCall(masm());
2559

    
2560
  __ bind(&miss_before_stack_reserved);
2561
  GenerateMissBranch();
2562

    
2563
  // Return the generated code.
2564
  return GetCode(function);
2565
}
2566

    
2567

    
2568
void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
2569
                                              Handle<JSObject> holder,
2570
                                              Handle<Name> name,
2571
                                              CheckType check,
2572
                                              Label* success) {
2573
  // ----------- S t a t e -------------
2574
  //  -- r2    : name
2575
  //  -- lr    : return address
2576
  // -----------------------------------
2577
  Label miss;
2578
  GenerateNameCheck(name, &miss);
2579

    
2580
  // Get the receiver from the stack
2581
  const int argc = arguments().immediate();
2582
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2583

    
2584
  // Check that the receiver isn't a smi.
2585
  if (check != NUMBER_CHECK) {
2586
    __ JumpIfSmi(r1, &miss);
2587
  }
2588

    
2589
  // Make sure that it's okay not to patch the on stack receiver
2590
  // unless we're doing a receiver map check.
2591
  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2592
  switch (check) {
2593
    case RECEIVER_MAP_CHECK:
2594
      __ IncrementCounter(isolate()->counters()->call_const(), 1, r0, r3);
2595

    
2596
      // Check that the maps haven't changed.
2597
      CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2598
                      name, &miss);
2599

    
2600
      // Patch the receiver on the stack with the global proxy if
2601
      // necessary.
2602
      if (object->IsGlobalObject()) {
2603
        __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2604
        __ str(r3, MemOperand(sp, argc * kPointerSize));
2605
      }
2606
      break;
2607

    
2608
    case STRING_CHECK:
2609
      // Check that the object is a string.
2610
      __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2611
      __ b(ge, &miss);
2612
      // Check that the maps starting from the prototype haven't changed.
2613
      GenerateDirectLoadGlobalFunctionPrototype(
2614
          masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2615
      CheckPrototypes(
2616
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2617
          r0, holder, r3, r1, r4, name, &miss);
2618
      break;
2619

    
2620
    case SYMBOL_CHECK:
2621
      // Check that the object is a symbol.
2622
      __ CompareObjectType(r1, r1, r3, SYMBOL_TYPE);
2623
      __ b(ne, &miss);
2624
      // Check that the maps starting from the prototype haven't changed.
2625
      GenerateDirectLoadGlobalFunctionPrototype(
2626
          masm(), Context::SYMBOL_FUNCTION_INDEX, r0, &miss);
2627
      CheckPrototypes(
2628
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2629
          r0, holder, r3, r1, r4, name, &miss);
2630
      break;
2631

    
2632
    case NUMBER_CHECK: {
2633
      Label fast;
2634
      // Check that the object is a smi or a heap number.
2635
      __ JumpIfSmi(r1, &fast);
2636
      __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2637
      __ b(ne, &miss);
2638
      __ bind(&fast);
2639
      // Check that the maps starting from the prototype haven't changed.
2640
      GenerateDirectLoadGlobalFunctionPrototype(
2641
          masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2642
      CheckPrototypes(
2643
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2644
          r0, holder, r3, r1, r4, name, &miss);
2645
      break;
2646
    }
2647
    case BOOLEAN_CHECK: {
2648
      Label fast;
2649
      // Check that the object is a boolean.
2650
      __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2651
      __ cmp(r1, ip);
2652
      __ b(eq, &fast);
2653
      __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2654
      __ cmp(r1, ip);
2655
      __ b(ne, &miss);
2656
      __ bind(&fast);
2657
      // Check that the maps starting from the prototype haven't changed.
2658
      GenerateDirectLoadGlobalFunctionPrototype(
2659
          masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2660
      CheckPrototypes(
2661
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2662
          r0, holder, r3, r1, r4, name, &miss);
2663
      break;
2664
    }
2665
  }
2666

    
2667
  __ b(success);
2668

    
2669
  // Handle call cache miss.
2670
  __ bind(&miss);
2671
  GenerateMissBranch();
2672
}
2673

    
2674

    
2675
void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
2676
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2677
      ? CALL_AS_FUNCTION
2678
      : CALL_AS_METHOD;
2679
  ParameterCount expected(function);
2680
  __ InvokeFunction(function, expected, arguments(),
2681
                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
2682
}
2683

    
2684

    
2685
Handle<Code> CallStubCompiler::CompileCallConstant(
2686
    Handle<Object> object,
2687
    Handle<JSObject> holder,
2688
    Handle<Name> name,
2689
    CheckType check,
2690
    Handle<JSFunction> function) {
2691
  if (HasCustomCallGenerator(function)) {
2692
    Handle<Code> code = CompileCustomCall(object, holder,
2693
                                          Handle<Cell>::null(),
2694
                                          function, Handle<String>::cast(name),
2695
                                          Code::CONSTANT);
2696
    // A null handle means bail out to the regular compiler code below.
2697
    if (!code.is_null()) return code;
2698
  }
2699

    
2700
  Label success;
2701

    
2702
  CompileHandlerFrontend(object, holder, name, check, &success);
2703
  __ bind(&success);
2704
  CompileHandlerBackend(function);
2705

    
2706
  // Return the generated code.
2707
  return GetCode(function);
2708
}
2709

    
2710

    
2711
Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2712
                                                      Handle<JSObject> holder,
2713
                                                      Handle<Name> name) {
2714
  // ----------- S t a t e -------------
2715
  //  -- r2    : name
2716
  //  -- lr    : return address
2717
  // -----------------------------------
2718
  Label miss;
2719
  GenerateNameCheck(name, &miss);
2720

    
2721
  // Get the number of arguments.
2722
  const int argc = arguments().immediate();
2723
  LookupResult lookup(isolate());
2724
  LookupPostInterceptor(holder, name, &lookup);
2725

    
2726
  // Get the receiver from the stack.
2727
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2728

    
2729
  CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_);
2730
  compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0,
2731
                   &miss);
2732

    
2733
  // Move returned value, the function to call, to r1.
2734
  __ mov(r1, r0);
2735
  // Restore receiver.
2736
  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2737

    
2738
  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2739

    
2740
  // Handle call cache miss.
2741
  __ bind(&miss);
2742
  GenerateMissBranch();
2743

    
2744
  // Return the generated code.
2745
  return GetCode(Code::INTERCEPTOR, name);
2746
}
2747

    
2748

    
2749
Handle<Code> CallStubCompiler::CompileCallGlobal(
2750
    Handle<JSObject> object,
2751
    Handle<GlobalObject> holder,
2752
    Handle<PropertyCell> cell,
2753
    Handle<JSFunction> function,
2754
    Handle<Name> name) {
2755
  // ----------- S t a t e -------------
2756
  //  -- r2    : name
2757
  //  -- lr    : return address
2758
  // -----------------------------------
2759
  if (HasCustomCallGenerator(function)) {
2760
    Handle<Code> code = CompileCustomCall(
2761
        object, holder, cell, function, Handle<String>::cast(name),
2762
        Code::NORMAL);
2763
    // A null handle means bail out to the regular compiler code below.
2764
    if (!code.is_null()) return code;
2765
  }
2766

    
2767
  Label miss;
2768
  GenerateNameCheck(name, &miss);
2769

    
2770
  // Get the number of arguments.
2771
  const int argc = arguments().immediate();
2772
  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2773
  GenerateLoadFunctionFromCell(cell, function, &miss);
2774

    
2775
  // Patch the receiver on the stack with the global proxy if
2776
  // necessary.
2777
  if (object->IsGlobalObject()) {
2778
    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2779
    __ str(r3, MemOperand(sp, argc * kPointerSize));
2780
  }
2781

    
2782
  // Set up the context (function already in r1).
2783
  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2784

    
2785
  // Jump to the cached code (tail call).
2786
  Counters* counters = isolate()->counters();
2787
  __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2788
  ParameterCount expected(function->shared()->formal_parameter_count());
2789
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2790
      ? CALL_AS_FUNCTION
2791
      : CALL_AS_METHOD;
2792
  // We call indirectly through the code field in the function to
2793
  // allow recompilation to take effect without changing any of the
2794
  // call sites.
2795
  __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2796
  __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2797
                NullCallWrapper(), call_kind);
2798

    
2799
  // Handle call cache miss.
2800
  __ bind(&miss);
2801
  __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2802
  GenerateMissBranch();
2803

    
2804
  // Return the generated code.
2805
  return GetCode(Code::NORMAL, name);
2806
}
2807

    
2808

    
2809
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2810
    Handle<JSObject> object,
2811
    Handle<JSObject> holder,
2812
    Handle<Name> name,
2813
    Handle<ExecutableAccessorInfo> callback) {
2814
  Label success;
2815
  HandlerFrontend(object, receiver(), holder, name, &success);
2816
  __ bind(&success);
2817

    
2818
  // Stub never generated for non-global objects that require access checks.
2819
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2820

    
2821
  __ push(receiver());  // receiver
2822
  __ mov(ip, Operand(callback));  // callback info
2823
  __ push(ip);
2824
  __ mov(ip, Operand(name));
2825
  __ Push(ip, value());
2826

    
2827
  // Do tail-call to the runtime system.
2828
  ExternalReference store_callback_property =
2829
      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2830
  __ TailCallExternalReference(store_callback_property, 4, 1);
2831

    
2832
  // Return the generated code.
2833
  return GetCode(kind(), Code::CALLBACKS, name);
2834
}
2835

    
2836

    
2837
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2838
    Handle<JSObject> object,
2839
    Handle<JSObject> holder,
2840
    Handle<Name> name,
2841
    const CallOptimization& call_optimization) {
2842
  Label success;
2843
  HandlerFrontend(object, receiver(), holder, name, &success);
2844
  __ bind(&success);
2845

    
2846
  Register values[] = { value() };
2847
  GenerateFastApiCall(
2848
      masm(), call_optimization, receiver(), scratch3(), 1, values);
2849

    
2850
  // Return the generated code.
2851
  return GetCode(kind(), Code::CALLBACKS, name);
2852
}
2853

    
2854

    
2855
#undef __
2856
#define __ ACCESS_MASM(masm)
2857

    
2858

    
2859
void StoreStubCompiler::GenerateStoreViaSetter(
2860
    MacroAssembler* masm,
2861
    Handle<JSFunction> setter) {
2862
  // ----------- S t a t e -------------
2863
  //  -- r0    : value
2864
  //  -- r1    : receiver
2865
  //  -- r2    : name
2866
  //  -- lr    : return address
2867
  // -----------------------------------
2868
  {
2869
    FrameScope scope(masm, StackFrame::INTERNAL);
2870

    
2871
    // Save value register, so we can restore it later.
2872
    __ push(r0);
2873

    
2874
    if (!setter.is_null()) {
2875
      // Call the JavaScript setter with receiver and value on the stack.
2876
      __ Push(r1, r0);
2877
      ParameterCount actual(1);
2878
      ParameterCount expected(setter);
2879
      __ InvokeFunction(setter, expected, actual,
2880
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2881
    } else {
2882
      // If we generate a global code snippet for deoptimization only, remember
2883
      // the place to continue after deoptimization.
2884
      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2885
    }
2886

    
2887
    // We have to return the passed value, not the return value of the setter.
2888
    __ pop(r0);
2889

    
2890
    // Restore context register.
2891
    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2892
  }
2893
  __ Ret();
2894
}
2895

    
2896

    
2897
#undef __
2898
#define __ ACCESS_MASM(masm())
2899

    
2900

    
2901
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2902
    Handle<JSObject> object,
2903
    Handle<Name> name) {
2904
  Label miss;
2905

    
2906
  // Check that the map of the object hasn't changed.
2907
  __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
2908
              DO_SMI_CHECK);
2909

    
2910
  // Perform global security token check if needed.
2911
  if (object->IsJSGlobalProxy()) {
2912
    __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
2913
  }
2914

    
2915
  // Stub is never generated for non-global objects that require access
2916
  // checks.
2917
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2918

    
2919
  __ Push(receiver(), this->name(), value());
2920

    
2921
  __ mov(scratch1(), Operand(Smi::FromInt(strict_mode())));
2922
  __ push(scratch1());  // strict mode
2923

    
2924
  // Do tail-call to the runtime system.
2925
  ExternalReference store_ic_property =
2926
      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2927
  __ TailCallExternalReference(store_ic_property, 4, 1);
2928

    
2929
  // Handle store cache miss.
2930
  __ bind(&miss);
2931
  TailCallBuiltin(masm(), MissBuiltin(kind()));
2932

    
2933
  // Return the generated code.
2934
  return GetCode(kind(), Code::INTERCEPTOR, name);
2935
}
2936

    
2937

    
2938
Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
2939
    Handle<JSObject> object,
2940
    Handle<JSObject> last,
2941
    Handle<Name> name,
2942
    Handle<GlobalObject> global) {
2943
  Label success;
2944

    
2945
  NonexistentHandlerFrontend(object, last, name, &success, global);
2946

    
2947
  __ bind(&success);
2948
  // Return undefined if maps of the full prototype chain are still the
2949
  // same and no global property with this name contains a value.
2950
  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2951
  __ Ret();
2952

    
2953
  // Return the generated code.
2954
  return GetCode(kind(), Code::NONEXISTENT, name);
2955
}
2956

    
2957

    
2958
Register* LoadStubCompiler::registers() {
2959
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2960
  static Register registers[] = { r0, r2, r3, r1, r4, r5 };
2961
  return registers;
2962
}
2963

    
2964

    
2965
Register* KeyedLoadStubCompiler::registers() {
2966
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2967
  static Register registers[] = { r1, r0, r2, r3, r4, r5 };
2968
  return registers;
2969
}
2970

    
2971

    
2972
Register* StoreStubCompiler::registers() {
2973
  // receiver, name, value, scratch1, scratch2, scratch3.
2974
  static Register registers[] = { r1, r2, r0, r3, r4, r5 };
2975
  return registers;
2976
}
2977

    
2978

    
2979
Register* KeyedStoreStubCompiler::registers() {
2980
  // receiver, name, value, scratch1, scratch2, scratch3.
2981
  static Register registers[] = { r2, r1, r0, r3, r4, r5 };
2982
  return registers;
2983
}
2984

    
2985

    
2986
void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
2987
                                              Register name_reg,
2988
                                              Label* miss) {
2989
  __ cmp(name_reg, Operand(name));
2990
  __ b(ne, miss);
2991
}
2992

    
2993

    
2994
void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
2995
                                               Register name_reg,
2996
                                               Label* miss) {
2997
  __ cmp(name_reg, Operand(name));
2998
  __ b(ne, miss);
2999
}
3000

    
3001

    
3002
#undef __
3003
#define __ ACCESS_MASM(masm)
3004

    
3005

    
3006
void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
3007
                                             Register receiver,
3008
                                             Handle<JSFunction> getter) {
3009
  // ----------- S t a t e -------------
3010
  //  -- r0    : receiver
3011
  //  -- r2    : name
3012
  //  -- lr    : return address
3013
  // -----------------------------------
3014
  {
3015
    FrameScope scope(masm, StackFrame::INTERNAL);
3016

    
3017
    if (!getter.is_null()) {
3018
      // Call the JavaScript getter with the receiver on the stack.
3019
      __ push(receiver);
3020
      ParameterCount actual(0);
3021
      ParameterCount expected(getter);
3022
      __ InvokeFunction(getter, expected, actual,
3023
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
3024
    } else {
3025
      // If we generate a global code snippet for deoptimization only, remember
3026
      // the place to continue after deoptimization.
3027
      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
3028
    }
3029

    
3030
    // Restore context register.
3031
    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3032
  }
3033
  __ Ret();
3034
}
3035

    
3036

    
3037
#undef __
3038
#define __ ACCESS_MASM(masm())
3039

    
3040

    
3041
Handle<Code> LoadStubCompiler::CompileLoadGlobal(
3042
    Handle<JSObject> object,
3043
    Handle<GlobalObject> global,
3044
    Handle<PropertyCell> cell,
3045
    Handle<Name> name,
3046
    bool is_dont_delete) {
3047
  Label success, miss;
3048

    
3049
  __ CheckMap(
3050
      receiver(), scratch1(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK);
3051
  HandlerFrontendHeader(
3052
      object, receiver(), Handle<JSObject>::cast(global), name, &miss);
3053

    
3054
  // Get the value from the cell.
3055
  __ mov(r3, Operand(cell));
3056
  __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
3057

    
3058
  // Check for deleted property if property can actually be deleted.
3059
  if (!is_dont_delete) {
3060
    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3061
    __ cmp(r4, ip);
3062
    __ b(eq, &miss);
3063
  }
3064

    
3065
  HandlerFrontendFooter(name, &success, &miss);
3066
  __ bind(&success);
3067

    
3068
  Counters* counters = isolate()->counters();
3069
  __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
3070
  __ mov(r0, r4);
3071
  __ Ret();
3072

    
3073
  // Return the generated code.
3074
  return GetICCode(kind(), Code::NORMAL, name);
3075
}
3076

    
3077

    
3078
Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
3079
    MapHandleList* receiver_maps,
3080
    CodeHandleList* handlers,
3081
    Handle<Name> name,
3082
    Code::StubType type,
3083
    IcCheckType check) {
3084
  Label miss;
3085

    
3086
  if (check == PROPERTY) {
3087
    GenerateNameCheck(name, this->name(), &miss);
3088
  }
3089

    
3090
  __ JumpIfSmi(receiver(), &miss);
3091
  Register map_reg = scratch1();
3092

    
3093
  int receiver_count = receiver_maps->length();
3094
  int number_of_handled_maps = 0;
3095
  __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
3096
  for (int current = 0; current < receiver_count; ++current) {
3097
    Handle<Map> map = receiver_maps->at(current);
3098
    if (!map->is_deprecated()) {
3099
      number_of_handled_maps++;
3100
      __ mov(ip, Operand(receiver_maps->at(current)));
3101
      __ cmp(map_reg, ip);
3102
      __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
3103
    }
3104
  }
3105
  ASSERT(number_of_handled_maps != 0);
3106

    
3107
  __ bind(&miss);
3108
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3109

    
3110
  // Return the generated code.
3111
  InlineCacheState state =
3112
      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
3113
  return GetICCode(kind(), type, name, state);
3114
}
3115

    
3116

    
3117
Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3118
    MapHandleList* receiver_maps,
3119
    CodeHandleList* handler_stubs,
3120
    MapHandleList* transitioned_maps) {
3121
  Label miss;
3122
  __ JumpIfSmi(receiver(), &miss);
3123

    
3124
  int receiver_count = receiver_maps->length();
3125
  __ ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
3126
  for (int i = 0; i < receiver_count; ++i) {
3127
    __ mov(ip, Operand(receiver_maps->at(i)));
3128
    __ cmp(scratch1(), ip);
3129
    if (transitioned_maps->at(i).is_null()) {
3130
      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
3131
    } else {
3132
      Label next_map;
3133
      __ b(ne, &next_map);
3134
      __ mov(transition_map(), Operand(transitioned_maps->at(i)));
3135
      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
3136
      __ bind(&next_map);
3137
    }
3138
  }
3139

    
3140
  __ bind(&miss);
3141
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3142

    
3143
  // Return the generated code.
3144
  return GetICCode(
3145
      kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
3146
}
3147

    
3148

    
3149
#undef __
3150
#define __ ACCESS_MASM(masm)
3151

    
3152

    
3153
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3154
    MacroAssembler* masm) {
3155
  // ---------- S t a t e --------------
3156
  //  -- lr     : return address
3157
  //  -- r0     : key
3158
  //  -- r1     : receiver
3159
  // -----------------------------------
3160
  Label slow, miss_force_generic;
3161

    
3162
  Register key = r0;
3163
  Register receiver = r1;
3164

    
3165
  __ UntagAndJumpIfNotSmi(r2, key, &miss_force_generic);
3166
  __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3167
  __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3168
  __ Ret();
3169

    
3170
  __ bind(&slow);
3171
  __ IncrementCounter(
3172
      masm->isolate()->counters()->keyed_load_external_array_slow(),
3173
      1, r2, r3);
3174

    
3175
  // ---------- S t a t e --------------
3176
  //  -- lr     : return address
3177
  //  -- r0     : key
3178
  //  -- r1     : receiver
3179
  // -----------------------------------
3180
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
3181

    
3182
  // Miss case, call the runtime.
3183
  __ bind(&miss_force_generic);
3184

    
3185
  // ---------- S t a t e --------------
3186
  //  -- lr     : return address
3187
  //  -- r0     : key
3188
  //  -- r1     : receiver
3189
  // -----------------------------------
3190
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric);
3191
}
3192

    
3193

    
3194
#undef __
3195

    
3196
} }  // namespace v8::internal
3197

    
3198
#endif  // V8_TARGET_ARCH_ARM