The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / mips / stub-cache-mips.cc @ f230a1cf

History | View | Annotate | Download (113 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_MIPS
31

    
32
#include "ic-inl.h"
33
#include "codegen.h"
34
#include "stub-cache.h"
35

    
36
namespace v8 {
37
namespace internal {
38

    
39
#define __ ACCESS_MASM(masm)
40

    
41

    
42
static void ProbeTable(Isolate* isolate,
43
                       MacroAssembler* masm,
44
                       Code::Flags flags,
45
                       StubCache::Table table,
46
                       Register receiver,
47
                       Register name,
48
                       // Number of the cache entry, not scaled.
49
                       Register offset,
50
                       Register scratch,
51
                       Register scratch2,
52
                       Register offset_scratch) {
53
  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54
  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55
  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56

    
57
  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58
  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59
  uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60

    
61
  // Check the relative positions of the address fields.
62
  ASSERT(value_off_addr > key_off_addr);
63
  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64
  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65
  ASSERT(map_off_addr > key_off_addr);
66
  ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67
  ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68

    
69
  Label miss;
70
  Register base_addr = scratch;
71
  scratch = no_reg;
72

    
73
  // Multiply by 3 because there are 3 fields per entry (name, code, map).
74
  __ sll(offset_scratch, offset, 1);
75
  __ Addu(offset_scratch, offset_scratch, offset);
76

    
77
  // Calculate the base address of the entry.
78
  __ li(base_addr, Operand(key_offset));
79
  __ sll(at, offset_scratch, kPointerSizeLog2);
80
  __ Addu(base_addr, base_addr, at);
81

    
82
  // Check that the key in the entry matches the name.
83
  __ lw(at, MemOperand(base_addr, 0));
84
  __ Branch(&miss, ne, name, Operand(at));
85

    
86
  // Check the map matches.
87
  __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88
  __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89
  __ Branch(&miss, ne, at, Operand(scratch2));
90

    
91
  // Get the code entry from the cache.
92
  Register code = scratch2;
93
  scratch2 = no_reg;
94
  __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95

    
96
  // Check that the flags match what we're looking for.
97
  Register flags_reg = base_addr;
98
  base_addr = no_reg;
99
  __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100
  __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101
  __ Branch(&miss, ne, flags_reg, Operand(flags));
102

    
103
#ifdef DEBUG
104
    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
105
      __ jmp(&miss);
106
    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
107
      __ jmp(&miss);
108
    }
109
#endif
110

    
111
  // Jump to the first instruction in the code stub.
112
  __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
113
  __ Jump(at);
114

    
115
  // Miss: fall through.
116
  __ bind(&miss);
117
}
118

    
119

    
120
// Helper function used to check that the dictionary doesn't contain
121
// the property. This function may return false negatives, so miss_label
122
// must always call a backup property check that is complete.
123
// This function is safe to call if the receiver has fast properties.
124
// Name must be unique and receiver must be a heap object.
125
static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
126
                                             Label* miss_label,
127
                                             Register receiver,
128
                                             Handle<Name> name,
129
                                             Register scratch0,
130
                                             Register scratch1) {
131
  ASSERT(name->IsUniqueName());
132
  Counters* counters = masm->isolate()->counters();
133
  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134
  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
135

    
136
  Label done;
137

    
138
  const int kInterceptorOrAccessCheckNeededMask =
139
      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
140

    
141
  // Bail out if the receiver has a named interceptor or requires access checks.
142
  Register map = scratch1;
143
  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144
  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145
  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146
  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
147

    
148
  // Check that receiver is a JSObject.
149
  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150
  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
151

    
152
  // Load properties array.
153
  Register properties = scratch0;
154
  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
155
  // Check that the properties array is a dictionary.
156
  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
157
  Register tmp = properties;
158
  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159
  __ Branch(miss_label, ne, map, Operand(tmp));
160

    
161
  // Restore the temporarily used register.
162
  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
163

    
164

    
165
  NameDictionaryLookupStub::GenerateNegativeLookup(masm,
166
                                                   miss_label,
167
                                                   &done,
168
                                                   receiver,
169
                                                   properties,
170
                                                   name,
171
                                                   scratch1);
172
  __ bind(&done);
173
  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
174
}
175

    
176

    
177
void StubCache::GenerateProbe(MacroAssembler* masm,
178
                              Code::Flags flags,
179
                              Register receiver,
180
                              Register name,
181
                              Register scratch,
182
                              Register extra,
183
                              Register extra2,
184
                              Register extra3) {
185
  Isolate* isolate = masm->isolate();
186
  Label miss;
187

    
188
  // Make sure that code is valid. The multiplying code relies on the
189
  // entry size being 12.
190
  ASSERT(sizeof(Entry) == 12);
191

    
192
  // Make sure the flags does not name a specific type.
193
  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
194

    
195
  // Make sure that there are no register conflicts.
196
  ASSERT(!scratch.is(receiver));
197
  ASSERT(!scratch.is(name));
198
  ASSERT(!extra.is(receiver));
199
  ASSERT(!extra.is(name));
200
  ASSERT(!extra.is(scratch));
201
  ASSERT(!extra2.is(receiver));
202
  ASSERT(!extra2.is(name));
203
  ASSERT(!extra2.is(scratch));
204
  ASSERT(!extra2.is(extra));
205

    
206
  // Check register validity.
207
  ASSERT(!scratch.is(no_reg));
208
  ASSERT(!extra.is(no_reg));
209
  ASSERT(!extra2.is(no_reg));
210
  ASSERT(!extra3.is(no_reg));
211

    
212
  Counters* counters = masm->isolate()->counters();
213
  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
214
                      extra2, extra3);
215

    
216
  // Check that the receiver isn't a smi.
217
  __ JumpIfSmi(receiver, &miss);
218

    
219
  // Get the map of the receiver and compute the hash.
220
  __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
221
  __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
222
  __ Addu(scratch, scratch, at);
223
  uint32_t mask = kPrimaryTableSize - 1;
224
  // We shift out the last two bits because they are not part of the hash and
225
  // they are always 01 for maps.
226
  __ srl(scratch, scratch, kHeapObjectTagSize);
227
  __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
228
  __ And(scratch, scratch, Operand(mask));
229

    
230
  // Probe the primary table.
231
  ProbeTable(isolate,
232
             masm,
233
             flags,
234
             kPrimary,
235
             receiver,
236
             name,
237
             scratch,
238
             extra,
239
             extra2,
240
             extra3);
241

    
242
  // Primary miss: Compute hash for secondary probe.
243
  __ srl(at, name, kHeapObjectTagSize);
244
  __ Subu(scratch, scratch, at);
245
  uint32_t mask2 = kSecondaryTableSize - 1;
246
  __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
247
  __ And(scratch, scratch, Operand(mask2));
248

    
249
  // Probe the secondary table.
250
  ProbeTable(isolate,
251
             masm,
252
             flags,
253
             kSecondary,
254
             receiver,
255
             name,
256
             scratch,
257
             extra,
258
             extra2,
259
             extra3);
260

    
261
  // Cache miss: Fall-through and let caller handle the miss by
262
  // entering the runtime system.
263
  __ bind(&miss);
264
  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
265
                      extra2, extra3);
266
}
267

    
268

    
269
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
270
                                                       int index,
271
                                                       Register prototype) {
272
  // Load the global or builtins object from the current context.
273
  __ lw(prototype,
274
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
275
  // Load the native context from the global or builtins object.
276
  __ lw(prototype,
277
         FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
278
  // Load the function from the native context.
279
  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
280
  // Load the initial map.  The global functions all have initial maps.
281
  __ lw(prototype,
282
         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
283
  // Load the prototype from the initial map.
284
  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
285
}
286

    
287

    
288
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
289
    MacroAssembler* masm,
290
    int index,
291
    Register prototype,
292
    Label* miss) {
293
  Isolate* isolate = masm->isolate();
294
  // Check we're still in the same context.
295
  __ lw(prototype,
296
        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
297
  ASSERT(!prototype.is(at));
298
  __ li(at, isolate->global_object());
299
  __ Branch(miss, ne, prototype, Operand(at));
300
  // Get the global function with the given index.
301
  Handle<JSFunction> function(
302
      JSFunction::cast(isolate->native_context()->get(index)));
303
  // Load its initial map. The global functions all have initial maps.
304
  __ li(prototype, Handle<Map>(function->initial_map()));
305
  // Load the prototype from the initial map.
306
  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
307
}
308

    
309

    
310
void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
311
                                            Register dst,
312
                                            Register src,
313
                                            bool inobject,
314
                                            int index,
315
                                            Representation representation) {
316
  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
317
  int offset = index * kPointerSize;
318
  if (!inobject) {
319
    // Calculate the offset into the properties array.
320
    offset = offset + FixedArray::kHeaderSize;
321
    __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
322
    src = dst;
323
  }
324
  __ lw(dst, FieldMemOperand(src, offset));
325
}
326

    
327

    
328
void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
329
                                           Register receiver,
330
                                           Register scratch,
331
                                           Label* miss_label) {
332
  // Check that the receiver isn't a smi.
333
  __ JumpIfSmi(receiver, miss_label);
334

    
335
  // Check that the object is a JS array.
336
  __ GetObjectType(receiver, scratch, scratch);
337
  __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
338

    
339
  // Load length directly from the JS array.
340
  __ Ret(USE_DELAY_SLOT);
341
  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
342
}
343

    
344

    
345
// Generate code to check if an object is a string.  If the object is a
346
// heap object, its map's instance type is left in the scratch1 register.
347
// If this is not needed, scratch1 and scratch2 may be the same register.
348
static void GenerateStringCheck(MacroAssembler* masm,
349
                                Register receiver,
350
                                Register scratch1,
351
                                Register scratch2,
352
                                Label* smi,
353
                                Label* non_string_object) {
354
  // Check that the receiver isn't a smi.
355
  __ JumpIfSmi(receiver, smi, t0);
356

    
357
  // Check that the object is a string.
358
  __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
359
  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
360
  __ And(scratch2, scratch1, Operand(kIsNotStringMask));
361
  // The cast is to resolve the overload for the argument of 0x0.
362
  __ Branch(non_string_object,
363
            ne,
364
            scratch2,
365
            Operand(static_cast<int32_t>(kStringTag)));
366
}
367

    
368

    
369
// Generate code to load the length from a string object and return the length.
370
// If the receiver object is not a string or a wrapped string object the
371
// execution continues at the miss label. The register containing the
372
// receiver is potentially clobbered.
373
void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
374
                                            Register receiver,
375
                                            Register scratch1,
376
                                            Register scratch2,
377
                                            Label* miss) {
378
  Label check_wrapper;
379

    
380
  // Check if the object is a string leaving the instance type in the
381
  // scratch1 register.
382
  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
383

    
384
  // Load length directly from the string.
385
  __ Ret(USE_DELAY_SLOT);
386
  __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
387

    
388
  // Check if the object is a JSValue wrapper.
389
  __ bind(&check_wrapper);
390
  __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
391

    
392
  // Unwrap the value and check if the wrapped value is a string.
393
  __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
394
  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
395
  __ Ret(USE_DELAY_SLOT);
396
  __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
397
}
398

    
399

    
400
void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
401
                                                 Register receiver,
402
                                                 Register scratch1,
403
                                                 Register scratch2,
404
                                                 Label* miss_label) {
405
  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
406
  __ Ret(USE_DELAY_SLOT);
407
  __ mov(v0, scratch1);
408
}
409

    
410

    
411
// Generate code to check that a global property cell is empty. Create
412
// the property cell at compilation time if no cell exists for the
413
// property.
414
static void GenerateCheckPropertyCell(MacroAssembler* masm,
415
                                      Handle<GlobalObject> global,
416
                                      Handle<Name> name,
417
                                      Register scratch,
418
                                      Label* miss) {
419
  Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
420
  ASSERT(cell->value()->IsTheHole());
421
  __ li(scratch, Operand(cell));
422
  __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
423
  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
424
  __ Branch(miss, ne, scratch, Operand(at));
425
}
426

    
427

    
428
void StoreStubCompiler::GenerateNegativeHolderLookup(
429
    MacroAssembler* masm,
430
    Handle<JSObject> holder,
431
    Register holder_reg,
432
    Handle<Name> name,
433
    Label* miss) {
434
  if (holder->IsJSGlobalObject()) {
435
    GenerateCheckPropertyCell(
436
        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
437
  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
438
    GenerateDictionaryNegativeLookup(
439
        masm, miss, holder_reg, name, scratch1(), scratch2());
440
  }
441
}
442

    
443

    
444
// Generate StoreTransition code, value is passed in a0 register.
445
// After executing generated code, the receiver_reg and name_reg
446
// may be clobbered.
447
void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
448
                                                Handle<JSObject> object,
449
                                                LookupResult* lookup,
450
                                                Handle<Map> transition,
451
                                                Handle<Name> name,
452
                                                Register receiver_reg,
453
                                                Register storage_reg,
454
                                                Register value_reg,
455
                                                Register scratch1,
456
                                                Register scratch2,
457
                                                Register scratch3,
458
                                                Label* miss_label,
459
                                                Label* slow) {
460
  // a0 : value.
461
  Label exit;
462

    
463
  int descriptor = transition->LastAdded();
464
  DescriptorArray* descriptors = transition->instance_descriptors();
465
  PropertyDetails details = descriptors->GetDetails(descriptor);
466
  Representation representation = details.representation();
467
  ASSERT(!representation.IsNone());
468

    
469
  if (details.type() == CONSTANT) {
470
    Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
471
    __ LoadObject(scratch1, constant);
472
    __ Branch(miss_label, ne, value_reg, Operand(scratch1));
473
  } else if (FLAG_track_fields && representation.IsSmi()) {
474
    __ JumpIfNotSmi(value_reg, miss_label);
475
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
476
    __ JumpIfSmi(value_reg, miss_label);
477
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
478
    Label do_store, heap_number;
479
    __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
480
    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
481

    
482
    __ JumpIfNotSmi(value_reg, &heap_number);
483
    __ SmiUntag(scratch1, value_reg);
484
    __ mtc1(scratch1, f6);
485
    __ cvt_d_w(f4, f6);
486
    __ jmp(&do_store);
487

    
488
    __ bind(&heap_number);
489
    __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
490
                miss_label, DONT_DO_SMI_CHECK);
491
    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
492

    
493
    __ bind(&do_store);
494
    __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
495
  }
496

    
497
  // Stub never generated for non-global objects that require access
498
  // checks.
499
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
500

    
501
  // Perform map transition for the receiver if necessary.
502
  if (details.type() == FIELD &&
503
      object->map()->unused_property_fields() == 0) {
504
    // The properties must be extended before we can store the value.
505
    // We jump to a runtime call that extends the properties array.
506
    __ push(receiver_reg);
507
    __ li(a2, Operand(transition));
508
    __ Push(a2, a0);
509
    __ TailCallExternalReference(
510
           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
511
                             masm->isolate()),
512
           3, 1);
513
    return;
514
  }
515

    
516
  // Update the map of the object.
517
  __ li(scratch1, Operand(transition));
518
  __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
519

    
520
  // Update the write barrier for the map field.
521
  __ RecordWriteField(receiver_reg,
522
                      HeapObject::kMapOffset,
523
                      scratch1,
524
                      scratch2,
525
                      kRAHasNotBeenSaved,
526
                      kDontSaveFPRegs,
527
                      OMIT_REMEMBERED_SET,
528
                      OMIT_SMI_CHECK);
529

    
530
  if (details.type() == CONSTANT) {
531
    ASSERT(value_reg.is(a0));
532
    __ Ret(USE_DELAY_SLOT);
533
    __ mov(v0, a0);
534
    return;
535
  }
536

    
537
  int index = transition->instance_descriptors()->GetFieldIndex(
538
      transition->LastAdded());
539

    
540
  // Adjust for the number of properties stored in the object. Even in the
541
  // face of a transition we can use the old map here because the size of the
542
  // object and the number of in-object properties is not going to change.
543
  index -= object->map()->inobject_properties();
544

    
545
  // TODO(verwaest): Share this code as a code stub.
546
  SmiCheck smi_check = representation.IsTagged()
547
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
548
  if (index < 0) {
549
    // Set the property straight into the object.
550
    int offset = object->map()->instance_size() + (index * kPointerSize);
551
    if (FLAG_track_double_fields && representation.IsDouble()) {
552
      __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
553
    } else {
554
      __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
555
    }
556

    
557
    if (!FLAG_track_fields || !representation.IsSmi()) {
558
      // Update the write barrier for the array address.
559
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
560
        __ mov(storage_reg, value_reg);
561
      }
562
      __ RecordWriteField(receiver_reg,
563
                          offset,
564
                          storage_reg,
565
                          scratch1,
566
                          kRAHasNotBeenSaved,
567
                          kDontSaveFPRegs,
568
                          EMIT_REMEMBERED_SET,
569
                          smi_check);
570
    }
571
  } else {
572
    // Write to the properties array.
573
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
574
    // Get the properties array
575
    __ lw(scratch1,
576
          FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
577
    if (FLAG_track_double_fields && representation.IsDouble()) {
578
      __ sw(storage_reg, FieldMemOperand(scratch1, offset));
579
    } else {
580
      __ sw(value_reg, FieldMemOperand(scratch1, offset));
581
    }
582

    
583
    if (!FLAG_track_fields || !representation.IsSmi()) {
584
      // Update the write barrier for the array address.
585
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
586
        __ mov(storage_reg, value_reg);
587
      }
588
      __ RecordWriteField(scratch1,
589
                          offset,
590
                          storage_reg,
591
                          receiver_reg,
592
                          kRAHasNotBeenSaved,
593
                          kDontSaveFPRegs,
594
                          EMIT_REMEMBERED_SET,
595
                          smi_check);
596
    }
597
  }
598

    
599
  // Return the value (register v0).
600
  ASSERT(value_reg.is(a0));
601
  __ bind(&exit);
602
  __ Ret(USE_DELAY_SLOT);
603
  __ mov(v0, a0);
604
}
605

    
606

    
607
// Generate StoreField code, value is passed in a0 register.
608
// When leaving generated code after success, the receiver_reg and name_reg
609
// may be clobbered.  Upon branch to miss_label, the receiver and name
610
// registers have their original values.
611
void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
612
                                           Handle<JSObject> object,
613
                                           LookupResult* lookup,
614
                                           Register receiver_reg,
615
                                           Register name_reg,
616
                                           Register value_reg,
617
                                           Register scratch1,
618
                                           Register scratch2,
619
                                           Label* miss_label) {
620
  // a0 : value
621
  Label exit;
622

    
623
  // Stub never generated for non-global objects that require access
624
  // checks.
625
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
626

    
627
  int index = lookup->GetFieldIndex().field_index();
628

    
629
  // Adjust for the number of properties stored in the object. Even in the
630
  // face of a transition we can use the old map here because the size of the
631
  // object and the number of in-object properties is not going to change.
632
  index -= object->map()->inobject_properties();
633

    
634
  Representation representation = lookup->representation();
635
  ASSERT(!representation.IsNone());
636
  if (FLAG_track_fields && representation.IsSmi()) {
637
    __ JumpIfNotSmi(value_reg, miss_label);
638
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
639
    __ JumpIfSmi(value_reg, miss_label);
640
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
641
    // Load the double storage.
642
    if (index < 0) {
643
      int offset = object->map()->instance_size() + (index * kPointerSize);
644
      __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
645
    } else {
646
      __ lw(scratch1,
647
            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
648
      int offset = index * kPointerSize + FixedArray::kHeaderSize;
649
      __ lw(scratch1, FieldMemOperand(scratch1, offset));
650
    }
651

    
652
    // Store the value into the storage.
653
    Label do_store, heap_number;
654
    __ JumpIfNotSmi(value_reg, &heap_number);
655
    __ SmiUntag(scratch2, value_reg);
656
    __ mtc1(scratch2, f6);
657
    __ cvt_d_w(f4, f6);
658
    __ jmp(&do_store);
659

    
660
    __ bind(&heap_number);
661
    __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
662
                miss_label, DONT_DO_SMI_CHECK);
663
    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
664

    
665
    __ bind(&do_store);
666
    __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
667
    // Return the value (register v0).
668
    ASSERT(value_reg.is(a0));
669
    __ Ret(USE_DELAY_SLOT);
670
    __ mov(v0, a0);
671
    return;
672
  }
673

    
674
  // TODO(verwaest): Share this code as a code stub.
675
  SmiCheck smi_check = representation.IsTagged()
676
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
677
  if (index < 0) {
678
    // Set the property straight into the object.
679
    int offset = object->map()->instance_size() + (index * kPointerSize);
680
    __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
681

    
682
    if (!FLAG_track_fields || !representation.IsSmi()) {
683
      // Skip updating write barrier if storing a smi.
684
      __ JumpIfSmi(value_reg, &exit);
685

    
686
      // Update the write barrier for the array address.
687
      // Pass the now unused name_reg as a scratch register.
688
      __ mov(name_reg, value_reg);
689
      __ RecordWriteField(receiver_reg,
690
                          offset,
691
                          name_reg,
692
                          scratch1,
693
                          kRAHasNotBeenSaved,
694
                          kDontSaveFPRegs,
695
                          EMIT_REMEMBERED_SET,
696
                          smi_check);
697
    }
698
  } else {
699
    // Write to the properties array.
700
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
701
    // Get the properties array.
702
    __ lw(scratch1,
703
          FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
704
    __ sw(value_reg, FieldMemOperand(scratch1, offset));
705

    
706
    if (!FLAG_track_fields || !representation.IsSmi()) {
707
      // Skip updating write barrier if storing a smi.
708
      __ JumpIfSmi(value_reg, &exit);
709

    
710
      // Update the write barrier for the array address.
711
      // Ok to clobber receiver_reg and name_reg, since we return.
712
      __ mov(name_reg, value_reg);
713
      __ RecordWriteField(scratch1,
714
                          offset,
715
                          name_reg,
716
                          receiver_reg,
717
                          kRAHasNotBeenSaved,
718
                          kDontSaveFPRegs,
719
                          EMIT_REMEMBERED_SET,
720
                          smi_check);
721
    }
722
  }
723

    
724
  // Return the value (register v0).
725
  ASSERT(value_reg.is(a0));
726
  __ bind(&exit);
727
  __ Ret(USE_DELAY_SLOT);
728
  __ mov(v0, a0);
729
}
730

    
731

    
732
void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
733
                                            Label* label,
734
                                            Handle<Name> name) {
735
  if (!label->is_unused()) {
736
    __ bind(label);
737
    __ li(this->name(), Operand(name));
738
  }
739
}
740

    
741

    
742
static void GenerateCallFunction(MacroAssembler* masm,
743
                                 Handle<Object> object,
744
                                 const ParameterCount& arguments,
745
                                 Label* miss,
746
                                 Code::ExtraICState extra_ic_state) {
747
  // ----------- S t a t e -------------
748
  //  -- a0: receiver
749
  //  -- a1: function to call
750
  // -----------------------------------
751
  // Check that the function really is a function.
752
  __ JumpIfSmi(a1, miss);
753
  __ GetObjectType(a1, a3, a3);
754
  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
755

    
756
  // Patch the receiver on the stack with the global proxy if
757
  // necessary.
758
  if (object->IsGlobalObject()) {
759
    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
760
    __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
761
  }
762

    
763
  // Invoke the function.
764
  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
765
      ? CALL_AS_FUNCTION
766
      : CALL_AS_METHOD;
767
  __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
768
}
769

    
770

    
771
static void PushInterceptorArguments(MacroAssembler* masm,
772
                                     Register receiver,
773
                                     Register holder,
774
                                     Register name,
775
                                     Handle<JSObject> holder_obj) {
776
  STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
777
  STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
778
  STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
779
  STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
780
  STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
781
  __ push(name);
782
  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
783
  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
784
  Register scratch = name;
785
  __ li(scratch, Operand(interceptor));
786
  __ Push(scratch, receiver, holder);
787
}
788

    
789

    
790
static void CompileCallLoadPropertyWithInterceptor(
791
    MacroAssembler* masm,
792
    Register receiver,
793
    Register holder,
794
    Register name,
795
    Handle<JSObject> holder_obj) {
796
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
797

    
798
  ExternalReference ref =
799
      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
800
          masm->isolate());
801
  __ PrepareCEntryArgs(StubCache::kInterceptorArgsLength);
802
  __ PrepareCEntryFunction(ref);
803

    
804
  CEntryStub stub(1);
805
  __ CallStub(&stub);
806
}
807

    
808

    
809
static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
810

    
811
// Reserves space for the extra arguments to API function in the
812
// caller's frame.
813
//
814
// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
815
static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
816
                                       Register scratch) {
817
  ASSERT(Smi::FromInt(0) == 0);
818
  for (int i = 0; i < kFastApiCallArguments; i++) {
819
    __ push(zero_reg);
820
  }
821
}
822

    
823

    
824
// Undoes the effects of ReserveSpaceForFastApiCall.
825
static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
826
  __ Drop(kFastApiCallArguments);
827
}
828

    
829

    
830
static void GenerateFastApiDirectCall(MacroAssembler* masm,
831
                                      const CallOptimization& optimization,
832
                                      int argc,
833
                                      bool restore_context) {
834
  // ----------- S t a t e -------------
835
  //  -- sp[0] - sp[24]     : FunctionCallbackInfo, incl.
836
  //                        :  holder (set by CheckPrototypes)
837
  //  -- sp[28]             : last JS argument
838
  //  -- ...
839
  //  -- sp[(argc + 6) * 4] : first JS argument
840
  //  -- sp[(argc + 7) * 4] : receiver
841
  // -----------------------------------
842
  typedef FunctionCallbackArguments FCA;
843
  // Save calling context.
844
  __ sw(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
845
  // Get the function and setup the context.
846
  Handle<JSFunction> function = optimization.constant_function();
847
  __ LoadHeapObject(t1, function);
848
  __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
849
  __ sw(t1, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
850

    
851
  // Construct the FunctionCallbackInfo.
852
  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
853
  Handle<Object> call_data(api_call_info->data(), masm->isolate());
854
  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
855
    __ li(a0, api_call_info);
856
    __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
857
  } else {
858
    __ li(t2, call_data);
859
  }
860
  // Store call data.
861
  __ sw(t2, MemOperand(sp, FCA::kDataIndex * kPointerSize));
862
  // Store isolate.
863
  __ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
864
  __ sw(t3, MemOperand(sp, FCA::kIsolateIndex * kPointerSize));
865
  // Store ReturnValue default and ReturnValue.
866
  __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
867
  __ sw(t1, MemOperand(sp, FCA::kReturnValueOffset * kPointerSize));
868
  __ sw(t1, MemOperand(sp, FCA::kReturnValueDefaultValueIndex * kPointerSize));
869

    
870
  // Prepare arguments.
871
  __ Move(a2, sp);
872

    
873
  // Allocate the v8::Arguments structure in the arguments' space since
874
  // it's not controlled by GC.
875
  const int kApiStackSpace = 4;
876

    
877
  FrameScope frame_scope(masm, StackFrame::MANUAL);
878
  __ EnterExitFrame(false, kApiStackSpace);
879

    
880
  // a0 = FunctionCallbackInfo&
881
  // Arguments is built at sp + 1 (sp is a reserved spot for ra).
882
  __ Addu(a0, sp, kPointerSize);
883
  // FunctionCallbackInfo::implicit_args_
884
  __ sw(a2, MemOperand(a0, 0 * kPointerSize));
885
  // FunctionCallbackInfo::values_
886
  __ Addu(t0, a2, Operand((kFastApiCallArguments - 1 + argc) * kPointerSize));
887
  __ sw(t0, MemOperand(a0, 1 * kPointerSize));
888
  // FunctionCallbackInfo::length_ = argc
889
  __ li(t0, Operand(argc));
890
  __ sw(t0, MemOperand(a0, 2 * kPointerSize));
891
  // FunctionCallbackInfo::is_construct_call = 0
892
  __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize));
893

    
894
  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
895
  Address function_address = v8::ToCData<Address>(api_call_info->callback());
896
  ApiFunction fun(function_address);
897
  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
898
  ExternalReference ref =
899
      ExternalReference(&fun,
900
                        type,
901
                        masm->isolate());
902
  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
903
  ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
904
  ApiFunction thunk_fun(thunk_address);
905
  ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
906
      masm->isolate());
907

    
908
  AllowExternalCallThatCantCauseGC scope(masm);
909
  MemOperand context_restore_operand(
910
      fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
911
  MemOperand return_value_operand(
912
      fp, (2 + FCA::kReturnValueOffset) * kPointerSize);
913

    
914
  __ CallApiFunctionAndReturn(ref,
915
                              function_address,
916
                              thunk_ref,
917
                              a1,
918
                              kStackUnwindSpace,
919
                              return_value_operand,
920
                              restore_context ?
921
                                  &context_restore_operand : NULL);
922
}
923

    
924

    
925
// Generate call to api function.
926
static void GenerateFastApiCall(MacroAssembler* masm,
927
                                const CallOptimization& optimization,
928
                                Register receiver,
929
                                Register scratch,
930
                                int argc,
931
                                Register* values) {
932
  ASSERT(optimization.is_simple_api_call());
933
  ASSERT(!receiver.is(scratch));
934

    
935
  typedef FunctionCallbackArguments FCA;
936
  const int stack_space = kFastApiCallArguments + argc + 1;
937
  // Assign stack space for the call arguments.
938
  __ Subu(sp, sp, Operand(stack_space * kPointerSize));
939
  // Write holder to stack frame.
940
  __ sw(receiver, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
941
  // Write receiver to stack frame.
942
  int index = stack_space - 1;
943
  __ sw(receiver, MemOperand(sp, index * kPointerSize));
944
  // Write the arguments to stack frame.
945
  for (int i = 0; i < argc; i++) {
946
    ASSERT(!receiver.is(values[i]));
947
    ASSERT(!scratch.is(values[i]));
948
    __ sw(receiver, MemOperand(sp, index-- * kPointerSize));
949
  }
950

    
951
  GenerateFastApiDirectCall(masm, optimization, argc, true);
952
}
953

    
954

    
955
class CallInterceptorCompiler BASE_EMBEDDED {
956
 public:
957
  CallInterceptorCompiler(StubCompiler* stub_compiler,
958
                          const ParameterCount& arguments,
959
                          Register name,
960
                          Code::ExtraICState extra_ic_state)
961
      : stub_compiler_(stub_compiler),
962
        arguments_(arguments),
963
        name_(name),
964
        extra_ic_state_(extra_ic_state) {}
965

    
966
  void Compile(MacroAssembler* masm,
967
               Handle<JSObject> object,
968
               Handle<JSObject> holder,
969
               Handle<Name> name,
970
               LookupResult* lookup,
971
               Register receiver,
972
               Register scratch1,
973
               Register scratch2,
974
               Register scratch3,
975
               Label* miss) {
976
    ASSERT(holder->HasNamedInterceptor());
977
    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
978

    
979
    // Check that the receiver isn't a smi.
980
    __ JumpIfSmi(receiver, miss);
981
    CallOptimization optimization(lookup);
982
    if (optimization.is_constant_call()) {
983
      CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
984
                       holder, lookup, name, optimization, miss);
985
    } else {
986
      CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
987
                     name, holder, miss);
988
    }
989
  }
990

    
991
 private:
992
  void CompileCacheable(MacroAssembler* masm,
993
                        Handle<JSObject> object,
994
                        Register receiver,
995
                        Register scratch1,
996
                        Register scratch2,
997
                        Register scratch3,
998
                        Handle<JSObject> interceptor_holder,
999
                        LookupResult* lookup,
1000
                        Handle<Name> name,
1001
                        const CallOptimization& optimization,
1002
                        Label* miss_label) {
1003
    ASSERT(optimization.is_constant_call());
1004
    ASSERT(!lookup->holder()->IsGlobalObject());
1005
    Counters* counters = masm->isolate()->counters();
1006
    int depth1 = kInvalidProtoDepth;
1007
    int depth2 = kInvalidProtoDepth;
1008
    bool can_do_fast_api_call = false;
1009
    if (optimization.is_simple_api_call() &&
1010
          !lookup->holder()->IsGlobalObject()) {
1011
      depth1 = optimization.GetPrototypeDepthOfExpectedType(
1012
          object, interceptor_holder);
1013
      if (depth1 == kInvalidProtoDepth) {
1014
        depth2 = optimization.GetPrototypeDepthOfExpectedType(
1015
            interceptor_holder, Handle<JSObject>(lookup->holder()));
1016
      }
1017
      can_do_fast_api_call =
1018
          depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
1019
    }
1020

    
1021
    __ IncrementCounter(counters->call_const_interceptor(), 1,
1022
                        scratch1, scratch2);
1023

    
1024
    if (can_do_fast_api_call) {
1025
      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
1026
                          scratch1, scratch2);
1027
      ReserveSpaceForFastApiCall(masm, scratch1);
1028
    }
1029

    
1030
    // Check that the maps from receiver to interceptor's holder
1031
    // haven't changed and thus we can invoke interceptor.
1032
    Label miss_cleanup;
1033
    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
1034
    Register holder =
1035
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
1036
                                        scratch1, scratch2, scratch3,
1037
                                        name, depth1, miss);
1038

    
1039
    // Invoke an interceptor and if it provides a value,
1040
    // branch to |regular_invoke|.
1041
    Label regular_invoke;
1042
    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
1043
                        &regular_invoke);
1044

    
1045
    // Interceptor returned nothing for this property.  Try to use cached
1046
    // constant function.
1047

    
1048
    // Check that the maps from interceptor's holder to constant function's
1049
    // holder haven't changed and thus we can use cached constant function.
1050
    if (*interceptor_holder != lookup->holder()) {
1051
      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
1052
                                      Handle<JSObject>(lookup->holder()),
1053
                                      scratch1, scratch2, scratch3,
1054
                                      name, depth2, miss);
1055
    } else {
1056
      // CheckPrototypes has a side effect of fetching a 'holder'
1057
      // for API (object which is instanceof for the signature).  It's
1058
      // safe to omit it here, as if present, it should be fetched
1059
      // by the previous CheckPrototypes.
1060
      ASSERT(depth2 == kInvalidProtoDepth);
1061
    }
1062

    
1063
    // Invoke function.
1064
    if (can_do_fast_api_call) {
1065
      GenerateFastApiDirectCall(
1066
          masm, optimization, arguments_.immediate(), false);
1067
    } else {
1068
      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
1069
          ? CALL_AS_FUNCTION
1070
          : CALL_AS_METHOD;
1071
      Handle<JSFunction> function = optimization.constant_function();
1072
      ParameterCount expected(function);
1073
      __ InvokeFunction(function, expected, arguments_,
1074
                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
1075
    }
1076

    
1077
    // Deferred code for fast API call case---clean preallocated space.
1078
    if (can_do_fast_api_call) {
1079
      __ bind(&miss_cleanup);
1080
      FreeSpaceForFastApiCall(masm);
1081
      __ Branch(miss_label);
1082
    }
1083

    
1084
    // Invoke a regular function.
1085
    __ bind(&regular_invoke);
1086
    if (can_do_fast_api_call) {
1087
      FreeSpaceForFastApiCall(masm);
1088
    }
1089
  }
1090

    
1091
  void CompileRegular(MacroAssembler* masm,
1092
                      Handle<JSObject> object,
1093
                      Register receiver,
1094
                      Register scratch1,
1095
                      Register scratch2,
1096
                      Register scratch3,
1097
                      Handle<Name> name,
1098
                      Handle<JSObject> interceptor_holder,
1099
                      Label* miss_label) {
1100
    Register holder =
1101
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
1102
                                        scratch1, scratch2, scratch3,
1103
                                        name, miss_label);
1104

    
1105
    // Call a runtime function to load the interceptor property.
1106
    FrameScope scope(masm, StackFrame::INTERNAL);
1107
    // Save the name_ register across the call.
1108
    __ push(name_);
1109

    
1110
    PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
1111

    
1112
    __ CallExternalReference(
1113
          ExternalReference(
1114
              IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
1115
              masm->isolate()),
1116
          StubCache::kInterceptorArgsLength);
1117
    // Restore the name_ register.
1118
    __ pop(name_);
1119
    // Leave the internal frame.
1120
  }
1121

    
1122
  void LoadWithInterceptor(MacroAssembler* masm,
1123
                           Register receiver,
1124
                           Register holder,
1125
                           Handle<JSObject> holder_obj,
1126
                           Register scratch,
1127
                           Label* interceptor_succeeded) {
1128
    {
1129
      FrameScope scope(masm, StackFrame::INTERNAL);
1130

    
1131
      __ Push(holder, name_);
1132
      CompileCallLoadPropertyWithInterceptor(masm,
1133
                                             receiver,
1134
                                             holder,
1135
                                             name_,
1136
                                             holder_obj);
1137
      __ pop(name_);  // Restore the name.
1138
      __ pop(receiver);  // Restore the holder.
1139
    }
1140
    // If interceptor returns no-result sentinel, call the constant function.
1141
    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
1142
    __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
1143
  }
1144

    
1145
  StubCompiler* stub_compiler_;
1146
  const ParameterCount& arguments_;
1147
  Register name_;
1148
  Code::ExtraICState extra_ic_state_;
1149
};
1150

    
1151

    
1152
// Calls GenerateCheckPropertyCell for each global object in the prototype chain
1153
// from object to (but not including) holder.
1154
static void GenerateCheckPropertyCells(MacroAssembler* masm,
1155
                                       Handle<JSObject> object,
1156
                                       Handle<JSObject> holder,
1157
                                       Handle<Name> name,
1158
                                       Register scratch,
1159
                                       Label* miss) {
1160
  Handle<JSObject> current = object;
1161
  while (!current.is_identical_to(holder)) {
1162
    if (current->IsGlobalObject()) {
1163
      GenerateCheckPropertyCell(masm,
1164
                                Handle<GlobalObject>::cast(current),
1165
                                name,
1166
                                scratch,
1167
                                miss);
1168
    }
1169
    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
1170
  }
1171
}
1172

    
1173

    
1174
void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1175
  __ Jump(code, RelocInfo::CODE_TARGET);
1176
}
1177

    
1178

    
1179
#undef __
1180
#define __ ACCESS_MASM(masm())
1181

    
1182

    
1183
Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1184
                                       Register object_reg,
1185
                                       Handle<JSObject> holder,
1186
                                       Register holder_reg,
1187
                                       Register scratch1,
1188
                                       Register scratch2,
1189
                                       Handle<Name> name,
1190
                                       int save_at_depth,
1191
                                       Label* miss,
1192
                                       PrototypeCheckType check) {
1193
  // Make sure that the type feedback oracle harvests the receiver map.
1194
  // TODO(svenpanne) Remove this hack when all ICs are reworked.
1195
  __ li(scratch1, Operand(Handle<Map>(object->map())));
1196

    
1197
  Handle<JSObject> first = object;
1198
  // Make sure there's no overlap between holder and object registers.
1199
  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1200
  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1201
         && !scratch2.is(scratch1));
1202

    
1203
  // Keep track of the current object in register reg.
1204
  Register reg = object_reg;
1205
  int depth = 0;
1206

    
1207
  typedef FunctionCallbackArguments FCA;
1208
  if (save_at_depth == depth) {
1209
    __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1210
  }
1211

    
1212
  // Check the maps in the prototype chain.
1213
  // Traverse the prototype chain from the object and do map checks.
1214
  Handle<JSObject> current = object;
1215
  while (!current.is_identical_to(holder)) {
1216
    ++depth;
1217

    
1218
    // Only global objects and objects that do not require access
1219
    // checks are allowed in stubs.
1220
    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1221

    
1222
    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1223
    if (!current->HasFastProperties() &&
1224
        !current->IsJSGlobalObject() &&
1225
        !current->IsJSGlobalProxy()) {
1226
      if (!name->IsUniqueName()) {
1227
        ASSERT(name->IsString());
1228
        name = factory()->InternalizeString(Handle<String>::cast(name));
1229
      }
1230
      ASSERT(current->property_dictionary()->FindEntry(*name) ==
1231
             NameDictionary::kNotFound);
1232

    
1233
      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1234
                                       scratch1, scratch2);
1235

    
1236
      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1237
      reg = holder_reg;  // From now on the object will be in holder_reg.
1238
      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1239
    } else {
1240
      Register map_reg = scratch1;
1241
      if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1242
        Handle<Map> current_map(current->map());
1243
        // CheckMap implicitly loads the map of |reg| into |map_reg|.
1244
        __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
1245
      } else {
1246
        __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
1247
      }
1248
      // Check access rights to the global object.  This has to happen after
1249
      // the map check so that we know that the object is actually a global
1250
      // object.
1251
      if (current->IsJSGlobalProxy()) {
1252
        __ CheckAccessGlobalProxy(reg, scratch2, miss);
1253
      }
1254
      reg = holder_reg;  // From now on the object will be in holder_reg.
1255

    
1256
      if (heap()->InNewSpace(*prototype)) {
1257
        // The prototype is in new space; we cannot store a reference to it
1258
        // in the code.  Load it from the map.
1259
        __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
1260
      } else {
1261
        // The prototype is in old space; load it directly.
1262
        __ li(reg, Operand(prototype));
1263
      }
1264
    }
1265

    
1266
    if (save_at_depth == depth) {
1267
      __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1268
    }
1269

    
1270
    // Go to the next object in the prototype chain.
1271
    current = prototype;
1272
  }
1273

    
1274
  // Log the check depth.
1275
  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1276

    
1277
  if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1278
    // Check the holder map.
1279
    __ CheckMap(reg, scratch1, Handle<Map>(holder->map()), miss,
1280
                DONT_DO_SMI_CHECK);
1281
  }
1282

    
1283
  // Perform security check for access to the global object.
1284
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1285
  if (holder->IsJSGlobalProxy()) {
1286
    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1287
  }
1288

    
1289
  // If we've skipped any global objects, it's not enough to verify that
1290
  // their maps haven't changed.  We also need to check that the property
1291
  // cell for the property is still empty.
1292
  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1293

    
1294
  // Return the register containing the holder.
1295
  return reg;
1296
}
1297

    
1298

    
1299
void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1300
                                             Label* success,
1301
                                             Label* miss) {
1302
  if (!miss->is_unused()) {
1303
    __ Branch(success);
1304
    __ bind(miss);
1305
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1306
  }
1307
}
1308

    
1309

    
1310
void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1311
                                              Label* success,
1312
                                              Label* miss) {
1313
  if (!miss->is_unused()) {
1314
    __ b(success);
1315
    GenerateRestoreName(masm(), miss, name);
1316
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1317
  }
1318
}
1319

    
1320

    
1321
Register LoadStubCompiler::CallbackHandlerFrontend(
1322
    Handle<JSObject> object,
1323
    Register object_reg,
1324
    Handle<JSObject> holder,
1325
    Handle<Name> name,
1326
    Label* success,
1327
    Handle<Object> callback) {
1328
  Label miss;
1329

    
1330
  Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
1331

    
1332
  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1333
    ASSERT(!reg.is(scratch2()));
1334
    ASSERT(!reg.is(scratch3()));
1335
    ASSERT(!reg.is(scratch4()));
1336

    
1337
    // Load the properties dictionary.
1338
    Register dictionary = scratch4();
1339
    __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
1340

    
1341
    // Probe the dictionary.
1342
    Label probe_done;
1343
    NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1344
                                                     &miss,
1345
                                                     &probe_done,
1346
                                                     dictionary,
1347
                                                     this->name(),
1348
                                                     scratch2(),
1349
                                                     scratch3());
1350
    __ bind(&probe_done);
1351

    
1352
    // If probing finds an entry in the dictionary, scratch3 contains the
1353
    // pointer into the dictionary. Check that the value is the callback.
1354
    Register pointer = scratch3();
1355
    const int kElementsStartOffset = NameDictionary::kHeaderSize +
1356
        NameDictionary::kElementsStartIndex * kPointerSize;
1357
    const int kValueOffset = kElementsStartOffset + kPointerSize;
1358
    __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
1359
    __ Branch(&miss, ne, scratch2(), Operand(callback));
1360
  }
1361

    
1362
  HandlerFrontendFooter(name, success, &miss);
1363
  return reg;
1364
}
1365

    
1366

    
1367
void LoadStubCompiler::NonexistentHandlerFrontend(
1368
    Handle<JSObject> object,
1369
    Handle<JSObject> last,
1370
    Handle<Name> name,
1371
    Label* success,
1372
    Handle<GlobalObject> global) {
1373
  Label miss;
1374

    
1375
  HandlerFrontendHeader(object, receiver(), last, name, &miss);
1376

    
1377
  // If the last object in the prototype chain is a global object,
1378
  // check that the global property cell is empty.
1379
  if (!global.is_null()) {
1380
    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
1381
  }
1382

    
1383
  HandlerFrontendFooter(name, success, &miss);
1384
}
1385

    
1386

    
1387
void LoadStubCompiler::GenerateLoadField(Register reg,
1388
                                         Handle<JSObject> holder,
1389
                                         PropertyIndex field,
1390
                                         Representation representation) {
1391
  if (!reg.is(receiver())) __ mov(receiver(), reg);
1392
  if (kind() == Code::LOAD_IC) {
1393
    LoadFieldStub stub(field.is_inobject(holder),
1394
                       field.translate(holder),
1395
                       representation);
1396
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1397
  } else {
1398
    KeyedLoadFieldStub stub(field.is_inobject(holder),
1399
                            field.translate(holder),
1400
                            representation);
1401
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1402
  }
1403
}
1404

    
1405

    
1406
void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1407
  // Return the constant value.
1408
  __ LoadObject(v0, value);
1409
  __ Ret();
1410
}
1411

    
1412

    
1413
void LoadStubCompiler::GenerateLoadCallback(
1414
    const CallOptimization& call_optimization) {
1415
  GenerateFastApiCall(
1416
      masm(), call_optimization, receiver(), scratch3(), 0, NULL);
1417
}
1418

    
1419

    
1420
void LoadStubCompiler::GenerateLoadCallback(
1421
    Register reg,
1422
    Handle<ExecutableAccessorInfo> callback) {
1423
  // Build AccessorInfo::args_ list on the stack and push property name below
1424
  // the exit frame to make GC aware of them and store pointers to them.
1425
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1426
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1427
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1428
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1429
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1430
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1431
  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1432
  ASSERT(!scratch2().is(reg));
1433
  ASSERT(!scratch3().is(reg));
1434
  ASSERT(!scratch4().is(reg));
1435
  __ push(receiver());
1436
  if (heap()->InNewSpace(callback->data())) {
1437
    __ li(scratch3(), callback);
1438
    __ lw(scratch3(), FieldMemOperand(scratch3(),
1439
                                      ExecutableAccessorInfo::kDataOffset));
1440
  } else {
1441
    __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
1442
  }
1443
  __ Subu(sp, sp, 6 * kPointerSize);
1444
  __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
1445
  __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1446
  __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1447
  __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1448
  __ li(scratch4(),
1449
        Operand(ExternalReference::isolate_address(isolate())));
1450
  __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1451
  __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1452
  __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1453
  __ Addu(scratch2(), sp, 1 * kPointerSize);
1454

    
1455
  __ mov(a2, scratch2());  // Saved in case scratch2 == a1.
1456
  __ mov(a0, sp);  // (first argument - a0) = Handle<Name>
1457

    
1458
  const int kApiStackSpace = 1;
1459
  FrameScope frame_scope(masm(), StackFrame::MANUAL);
1460
  __ EnterExitFrame(false, kApiStackSpace);
1461

    
1462
  // Create PropertyAccessorInfo instance on the stack above the exit frame with
1463
  // scratch2 (internal::Object** args_) as the data.
1464
  __ sw(a2, MemOperand(sp, kPointerSize));
1465
  // (second argument - a1) = AccessorInfo&
1466
  __ Addu(a1, sp, kPointerSize);
1467

    
1468
  const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
1469
  Address getter_address = v8::ToCData<Address>(callback->getter());
1470
  ApiFunction fun(getter_address);
1471
  ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1472
  ExternalReference ref = ExternalReference(&fun, type, isolate());
1473

    
1474
  Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1475
  ExternalReference::Type thunk_type =
1476
      ExternalReference::PROFILING_GETTER_CALL;
1477
  ApiFunction thunk_fun(thunk_address);
1478
  ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
1479
      isolate());
1480
  __ CallApiFunctionAndReturn(ref,
1481
                              getter_address,
1482
                              thunk_ref,
1483
                              a2,
1484
                              kStackUnwindSpace,
1485
                              MemOperand(fp, 6 * kPointerSize),
1486
                              NULL);
1487
}
1488

    
1489

    
1490
void LoadStubCompiler::GenerateLoadInterceptor(
1491
    Register holder_reg,
1492
    Handle<JSObject> object,
1493
    Handle<JSObject> interceptor_holder,
1494
    LookupResult* lookup,
1495
    Handle<Name> name) {
1496
  ASSERT(interceptor_holder->HasNamedInterceptor());
1497
  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1498

    
1499
  // So far the most popular follow ups for interceptor loads are FIELD
1500
  // and CALLBACKS, so inline only them, other cases may be added
1501
  // later.
1502
  bool compile_followup_inline = false;
1503
  if (lookup->IsFound() && lookup->IsCacheable()) {
1504
    if (lookup->IsField()) {
1505
      compile_followup_inline = true;
1506
    } else if (lookup->type() == CALLBACKS &&
1507
        lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1508
      ExecutableAccessorInfo* callback =
1509
          ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1510
      compile_followup_inline = callback->getter() != NULL &&
1511
          callback->IsCompatibleReceiver(*object);
1512
    }
1513
  }
1514

    
1515
  if (compile_followup_inline) {
1516
    // Compile the interceptor call, followed by inline code to load the
1517
    // property from further up the prototype chain if the call fails.
1518
    // Check that the maps haven't changed.
1519
    ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1520

    
1521
    // Preserve the receiver register explicitly whenever it is different from
1522
    // the holder and it is needed should the interceptor return without any
1523
    // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1524
    // the FIELD case might cause a miss during the prototype check.
1525
    bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1526
    bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1527
        (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1528

    
1529
    // Save necessary data before invoking an interceptor.
1530
    // Requires a frame to make GC aware of pushed pointers.
1531
    {
1532
      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1533
      if (must_preserve_receiver_reg) {
1534
        __ Push(receiver(), holder_reg, this->name());
1535
      } else {
1536
        __ Push(holder_reg, this->name());
1537
      }
1538
      // Invoke an interceptor.  Note: map checks from receiver to
1539
      // interceptor's holder has been compiled before (see a caller
1540
      // of this method).
1541
      CompileCallLoadPropertyWithInterceptor(masm(),
1542
                                             receiver(),
1543
                                             holder_reg,
1544
                                             this->name(),
1545
                                             interceptor_holder);
1546
      // Check if interceptor provided a value for property.  If it's
1547
      // the case, return immediately.
1548
      Label interceptor_failed;
1549
      __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1550
      __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
1551
      frame_scope.GenerateLeaveFrame();
1552
      __ Ret();
1553

    
1554
      __ bind(&interceptor_failed);
1555
      __ pop(this->name());
1556
      __ pop(holder_reg);
1557
      if (must_preserve_receiver_reg) {
1558
        __ pop(receiver());
1559
      }
1560
      // Leave the internal frame.
1561
    }
1562
    GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1563
  } else {  // !compile_followup_inline
1564
    // Call the runtime system to load the interceptor.
1565
    // Check that the maps haven't changed.
1566
    PushInterceptorArguments(masm(), receiver(), holder_reg,
1567
                             this->name(), interceptor_holder);
1568

    
1569
    ExternalReference ref = ExternalReference(
1570
        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1571
    __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1572
  }
1573
}
1574

    
1575

    
1576
void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1577
  if (kind_ == Code::KEYED_CALL_IC) {
1578
    __ Branch(miss, ne, a2, Operand(name));
1579
  }
1580
}
1581

    
1582

    
1583
void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1584
                                                   Handle<JSObject> holder,
1585
                                                   Handle<Name> name,
1586
                                                   Label* miss) {
1587
  ASSERT(holder->IsGlobalObject());
1588

    
1589
  // Get the number of arguments.
1590
  const int argc = arguments().immediate();
1591

    
1592
  // Get the receiver from the stack.
1593
  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1594

    
1595
  // Check that the maps haven't changed.
1596
  __ JumpIfSmi(a0, miss);
1597
  CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
1598
}
1599

    
1600

    
1601
void CallStubCompiler::GenerateLoadFunctionFromCell(
1602
    Handle<Cell> cell,
1603
    Handle<JSFunction> function,
1604
    Label* miss) {
1605
  // Get the value from the cell.
1606
  __ li(a3, Operand(cell));
1607
  __ lw(a1, FieldMemOperand(a3, Cell::kValueOffset));
1608

    
1609
  // Check that the cell contains the same function.
1610
  if (heap()->InNewSpace(*function)) {
1611
    // We can't embed a pointer to a function in new space so we have
1612
    // to verify that the shared function info is unchanged. This has
1613
    // the nice side effect that multiple closures based on the same
1614
    // function can all use this call IC. Before we load through the
1615
    // function, we have to verify that it still is a function.
1616
    __ JumpIfSmi(a1, miss);
1617
    __ GetObjectType(a1, a3, a3);
1618
    __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1619

    
1620
    // Check the shared function info. Make sure it hasn't changed.
1621
    __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1622
    __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1623
    __ Branch(miss, ne, t0, Operand(a3));
1624
  } else {
1625
    __ Branch(miss, ne, a1, Operand(function));
1626
  }
1627
}
1628

    
1629

    
1630
void CallStubCompiler::GenerateMissBranch() {
1631
  Handle<Code> code =
1632
      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1633
                                               kind_,
1634
                                               extra_state_);
1635
  __ Jump(code, RelocInfo::CODE_TARGET);
1636
}
1637

    
1638

    
1639
Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1640
                                                Handle<JSObject> holder,
1641
                                                PropertyIndex index,
1642
                                                Handle<Name> name) {
1643
  // ----------- S t a t e -------------
1644
  //  -- a2    : name
1645
  //  -- ra    : return address
1646
  // -----------------------------------
1647
  Label miss;
1648

    
1649
  GenerateNameCheck(name, &miss);
1650

    
1651
  const int argc = arguments().immediate();
1652

    
1653
  // Get the receiver of the function from the stack into a0.
1654
  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1655
  // Check that the receiver isn't a smi.
1656
  __ JumpIfSmi(a0, &miss, t0);
1657

    
1658
  // Do the right check and compute the holder register.
1659
  Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1660
  GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
1661
                           index.translate(holder), Representation::Tagged());
1662

    
1663
  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1664

    
1665
  // Handle call cache miss.
1666
  __ bind(&miss);
1667
  GenerateMissBranch();
1668

    
1669
  // Return the generated code.
1670
  return GetCode(Code::FIELD, name);
1671
}
1672

    
1673

    
1674
Handle<Code> CallStubCompiler::CompileArrayCodeCall(
1675
    Handle<Object> object,
1676
    Handle<JSObject> holder,
1677
    Handle<Cell> cell,
1678
    Handle<JSFunction> function,
1679
    Handle<String> name,
1680
    Code::StubType type) {
1681
  Label miss;
1682

    
1683
  // Check that function is still array.
1684
  const int argc = arguments().immediate();
1685
  GenerateNameCheck(name, &miss);
1686
  Register receiver = a1;
1687

    
1688
  if (cell.is_null()) {
1689
    __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1690

    
1691
    // Check that the receiver isn't a smi.
1692
    __ JumpIfSmi(receiver, &miss);
1693

    
1694
    // Check that the maps haven't changed.
1695
    CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, a0,
1696
                    t0, name, &miss);
1697
  } else {
1698
    ASSERT(cell->value() == *function);
1699
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1700
                                &miss);
1701
    GenerateLoadFunctionFromCell(cell, function, &miss);
1702
  }
1703

    
1704
  Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
1705
  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
1706
  Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
1707
  __ li(a0, Operand(argc));
1708
  __ li(a2, Operand(site_feedback_cell));
1709
  __ li(a1, Operand(function));
1710

    
1711
  ArrayConstructorStub stub(isolate());
1712
  __ TailCallStub(&stub);
1713

    
1714
  __ bind(&miss);
1715
  GenerateMissBranch();
1716

    
1717
  // Return the generated code.
1718
  return GetCode(type, name);
1719
}
1720

    
1721

    
1722
Handle<Code> CallStubCompiler::CompileArrayPushCall(
1723
    Handle<Object> object,
1724
    Handle<JSObject> holder,
1725
    Handle<Cell> cell,
1726
    Handle<JSFunction> function,
1727
    Handle<String> name,
1728
    Code::StubType type) {
1729
  // ----------- S t a t e -------------
1730
  //  -- a2    : name
1731
  //  -- ra    : return address
1732
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1733
  //  -- ...
1734
  //  -- sp[argc * 4]           : receiver
1735
  // -----------------------------------
1736

    
1737
  // If object is not an array, bail out to regular call.
1738
  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1739

    
1740
  Label miss;
1741

    
1742
  GenerateNameCheck(name, &miss);
1743

    
1744
  Register receiver = a1;
1745

    
1746
  // Get the receiver from the stack.
1747
  const int argc = arguments().immediate();
1748
  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1749

    
1750
  // Check that the receiver isn't a smi.
1751
  __ JumpIfSmi(receiver, &miss);
1752

    
1753
  // Check that the maps haven't changed.
1754
  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0,
1755
                  name, &miss);
1756

    
1757
  if (argc == 0) {
1758
    // Nothing to do, just return the length.
1759
    __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1760
    __ DropAndRet(argc + 1);
1761
  } else {
1762
    Label call_builtin;
1763
    if (argc == 1) {  // Otherwise fall through to call the builtin.
1764
      Label attempt_to_grow_elements, with_write_barrier, check_double;
1765

    
1766
      Register elements = t2;
1767
      Register end_elements = t1;
1768
      // Get the elements array of the object.
1769
      __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1770

    
1771
      // Check that the elements are in fast mode and writable.
1772
      __ CheckMap(elements,
1773
                  v0,
1774
                  Heap::kFixedArrayMapRootIndex,
1775
                  &check_double,
1776
                  DONT_DO_SMI_CHECK);
1777

    
1778
      // Get the array's length into v0 and calculate new length.
1779
      __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1780
      STATIC_ASSERT(kSmiTagSize == 1);
1781
      STATIC_ASSERT(kSmiTag == 0);
1782
      __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1783

    
1784
      // Get the elements' length.
1785
      __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1786

    
1787
      // Check if we could survive without allocation.
1788
      __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1789

    
1790
      // Check if value is a smi.
1791
      __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1792
      __ JumpIfNotSmi(t0, &with_write_barrier);
1793

    
1794
      // Save new length.
1795
      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1796

    
1797
      // Store the value.
1798
      // We may need a register containing the address end_elements below,
1799
      // so write back the value in end_elements.
1800
      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1801
      __ Addu(end_elements, elements, end_elements);
1802
      const int kEndElementsOffset =
1803
          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1804
      __ Addu(end_elements, end_elements, kEndElementsOffset);
1805
      __ sw(t0, MemOperand(end_elements));
1806

    
1807
      // Check for a smi.
1808
      __ DropAndRet(argc + 1);
1809

    
1810
      __ bind(&check_double);
1811

    
1812
      // Check that the elements are in fast mode and writable.
1813
      __ CheckMap(elements,
1814
                  a0,
1815
                  Heap::kFixedDoubleArrayMapRootIndex,
1816
                  &call_builtin,
1817
                  DONT_DO_SMI_CHECK);
1818

    
1819
      // Get the array's length into v0 and calculate new length.
1820
      __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1821
      STATIC_ASSERT(kSmiTagSize == 1);
1822
      STATIC_ASSERT(kSmiTag == 0);
1823
      __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1824

    
1825
      // Get the elements' length.
1826
      __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1827

    
1828
      // Check if we could survive without allocation.
1829
      __ Branch(&call_builtin, gt, v0, Operand(t0));
1830

    
1831
      __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1832
      __ StoreNumberToDoubleElements(
1833
          t0, v0, elements, a3, t1, a2,
1834
          &call_builtin, argc * kDoubleSize);
1835

    
1836
      // Save new length.
1837
      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1838

    
1839
      // Check for a smi.
1840
      __ DropAndRet(argc + 1);
1841

    
1842
      __ bind(&with_write_barrier);
1843

    
1844
      __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1845

    
1846
      if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
1847
        Label fast_object, not_fast_object;
1848
        __ CheckFastObjectElements(a3, t3, &not_fast_object);
1849
        __ jmp(&fast_object);
1850
        // In case of fast smi-only, convert to fast object, otherwise bail out.
1851
        __ bind(&not_fast_object);
1852
        __ CheckFastSmiElements(a3, t3, &call_builtin);
1853

    
1854
        __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset));
1855
        __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1856
        __ Branch(&call_builtin, eq, t3, Operand(at));
1857
        // edx: receiver
1858
        // a3: map
1859
        Label try_holey_map;
1860
        __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1861
                                               FAST_ELEMENTS,
1862
                                               a3,
1863
                                               t3,
1864
                                               &try_holey_map);
1865
        __ mov(a2, receiver);
1866
        ElementsTransitionGenerator::
1867
            GenerateMapChangeElementsTransition(masm(),
1868
                                                DONT_TRACK_ALLOCATION_SITE,
1869
                                                NULL);
1870
        __ jmp(&fast_object);
1871

    
1872
        __ bind(&try_holey_map);
1873
        __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1874
                                               FAST_HOLEY_ELEMENTS,
1875
                                               a3,
1876
                                               t3,
1877
                                               &call_builtin);
1878
        __ mov(a2, receiver);
1879
        ElementsTransitionGenerator::
1880
            GenerateMapChangeElementsTransition(masm(),
1881
                                                DONT_TRACK_ALLOCATION_SITE,
1882
                                                NULL);
1883
        __ bind(&fast_object);
1884
      } else {
1885
        __ CheckFastObjectElements(a3, a3, &call_builtin);
1886
      }
1887

    
1888
      // Save new length.
1889
      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1890

    
1891
      // Store the value.
1892
      // We may need a register containing the address end_elements below,
1893
      // so write back the value in end_elements.
1894
      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1895
      __ Addu(end_elements, elements, end_elements);
1896
      __ Addu(end_elements, end_elements, kEndElementsOffset);
1897
      __ sw(t0, MemOperand(end_elements));
1898

    
1899
      __ RecordWrite(elements,
1900
                     end_elements,
1901
                     t0,
1902
                     kRAHasNotBeenSaved,
1903
                     kDontSaveFPRegs,
1904
                     EMIT_REMEMBERED_SET,
1905
                     OMIT_SMI_CHECK);
1906
      __ DropAndRet(argc + 1);
1907

    
1908
      __ bind(&attempt_to_grow_elements);
1909
      // v0: array's length + 1.
1910
      // t0: elements' length.
1911

    
1912
      if (!FLAG_inline_new) {
1913
        __ Branch(&call_builtin);
1914
      }
1915

    
1916
      __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1917
      // Growing elements that are SMI-only requires special handling in case
1918
      // the new element is non-Smi. For now, delegate to the builtin.
1919
      Label no_fast_elements_check;
1920
      __ JumpIfSmi(a2, &no_fast_elements_check);
1921
      __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1922
      __ CheckFastObjectElements(t3, t3, &call_builtin);
1923
      __ bind(&no_fast_elements_check);
1924

    
1925
      ExternalReference new_space_allocation_top =
1926
          ExternalReference::new_space_allocation_top_address(isolate());
1927
      ExternalReference new_space_allocation_limit =
1928
          ExternalReference::new_space_allocation_limit_address(isolate());
1929

    
1930
      const int kAllocationDelta = 4;
1931
      // Load top and check if it is the end of elements.
1932
      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1933
      __ Addu(end_elements, elements, end_elements);
1934
      __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1935
      __ li(t3, Operand(new_space_allocation_top));
1936
      __ lw(a3, MemOperand(t3));
1937
      __ Branch(&call_builtin, ne, end_elements, Operand(a3));
1938

    
1939
      __ li(t5, Operand(new_space_allocation_limit));
1940
      __ lw(t5, MemOperand(t5));
1941
      __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1942
      __ Branch(&call_builtin, hi, a3, Operand(t5));
1943

    
1944
      // We fit and could grow elements.
1945
      // Update new_space_allocation_top.
1946
      __ sw(a3, MemOperand(t3));
1947
      // Push the argument.
1948
      __ sw(a2, MemOperand(end_elements));
1949
      // Fill the rest with holes.
1950
      __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1951
      for (int i = 1; i < kAllocationDelta; i++) {
1952
        __ sw(a3, MemOperand(end_elements, i * kPointerSize));
1953
      }
1954

    
1955
      // Update elements' and array's sizes.
1956
      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1957
      __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1958
      __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1959

    
1960
      // Elements are in new space, so write barrier is not required.
1961
      __ DropAndRet(argc + 1);
1962
    }
1963
    __ bind(&call_builtin);
1964
    __ TailCallExternalReference(
1965
        ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
1966
  }
1967

    
1968
  // Handle call cache miss.
1969
  __ bind(&miss);
1970
  GenerateMissBranch();
1971

    
1972
  // Return the generated code.
1973
  return GetCode(type, name);
1974
}
1975

    
1976

    
1977
Handle<Code> CallStubCompiler::CompileArrayPopCall(
1978
    Handle<Object> object,
1979
    Handle<JSObject> holder,
1980
    Handle<Cell> cell,
1981
    Handle<JSFunction> function,
1982
    Handle<String> name,
1983
    Code::StubType type) {
1984
  // ----------- S t a t e -------------
1985
  //  -- a2    : name
1986
  //  -- ra    : return address
1987
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1988
  //  -- ...
1989
  //  -- sp[argc * 4]           : receiver
1990
  // -----------------------------------
1991

    
1992
  // If object is not an array, bail out to regular call.
1993
  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1994

    
1995
  Label miss, return_undefined, call_builtin;
1996
  Register receiver = a1;
1997
  Register elements = a3;
1998
  GenerateNameCheck(name, &miss);
1999

    
2000
  // Get the receiver from the stack.
2001
  const int argc = arguments().immediate();
2002
  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
2003
  // Check that the receiver isn't a smi.
2004
  __ JumpIfSmi(receiver, &miss);
2005

    
2006
  // Check that the maps haven't changed.
2007
  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
2008
                  t0, v0, name, &miss);
2009

    
2010
  // Get the elements array of the object.
2011
  __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
2012

    
2013
  // Check that the elements are in fast mode and writable.
2014
  __ CheckMap(elements,
2015
              v0,
2016
              Heap::kFixedArrayMapRootIndex,
2017
              &call_builtin,
2018
              DONT_DO_SMI_CHECK);
2019

    
2020
  // Get the array's length into t0 and calculate new length.
2021
  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
2022
  __ Subu(t0, t0, Operand(Smi::FromInt(1)));
2023
  __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
2024

    
2025
  // Get the last element.
2026
  __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
2027
  STATIC_ASSERT(kSmiTagSize == 1);
2028
  STATIC_ASSERT(kSmiTag == 0);
2029
  // We can't address the last element in one operation. Compute the more
2030
  // expensive shift first, and use an offset later on.
2031
  __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
2032
  __ Addu(elements, elements, t1);
2033
  __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
2034
  __ Branch(&call_builtin, eq, v0, Operand(t2));
2035

    
2036
  // Set the array's length.
2037
  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
2038

    
2039
  // Fill with the hole.
2040
  __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
2041
  __ DropAndRet(argc + 1);
2042

    
2043
  __ bind(&return_undefined);
2044
  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2045
  __ DropAndRet(argc + 1);
2046

    
2047
  __ bind(&call_builtin);
2048
  __ TailCallExternalReference(
2049
      ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
2050

    
2051
  // Handle call cache miss.
2052
  __ bind(&miss);
2053
  GenerateMissBranch();
2054

    
2055
  // Return the generated code.
2056
  return GetCode(type, name);
2057
}
2058

    
2059

    
2060
Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
2061
    Handle<Object> object,
2062
    Handle<JSObject> holder,
2063
    Handle<Cell> cell,
2064
    Handle<JSFunction> function,
2065
    Handle<String> name,
2066
    Code::StubType type) {
2067
  // ----------- S t a t e -------------
2068
  //  -- a2                     : function name
2069
  //  -- ra                     : return address
2070
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2071
  //  -- ...
2072
  //  -- sp[argc * 4]           : receiver
2073
  // -----------------------------------
2074

    
2075
  // If object is not a string, bail out to regular call.
2076
  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
2077

    
2078
  const int argc = arguments().immediate();
2079
  Label miss;
2080
  Label name_miss;
2081
  Label index_out_of_range;
2082

    
2083
  Label* index_out_of_range_label = &index_out_of_range;
2084

    
2085
  if (kind_ == Code::CALL_IC &&
2086
      (CallICBase::StringStubState::decode(extra_state_) ==
2087
       DEFAULT_STRING_STUB)) {
2088
    index_out_of_range_label = &miss;
2089
  }
2090

    
2091
  GenerateNameCheck(name, &name_miss);
2092

    
2093
  // Check that the maps starting from the prototype haven't changed.
2094
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2095
                                            Context::STRING_FUNCTION_INDEX,
2096
                                            v0,
2097
                                            &miss);
2098
  ASSERT(!object.is_identical_to(holder));
2099
  CheckPrototypes(
2100
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2101
      v0, holder, a1, a3, t0, name, &miss);
2102

    
2103
  Register receiver = a1;
2104
  Register index = t1;
2105
  Register result = v0;
2106
  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
2107
  if (argc > 0) {
2108
    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
2109
  } else {
2110
    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2111
  }
2112

    
2113
  StringCharCodeAtGenerator generator(receiver,
2114
                                      index,
2115
                                      result,
2116
                                      &miss,  // When not a string.
2117
                                      &miss,  // When not a number.
2118
                                      index_out_of_range_label,
2119
                                      STRING_INDEX_IS_NUMBER);
2120
  generator.GenerateFast(masm());
2121
  __ DropAndRet(argc + 1);
2122

    
2123
  StubRuntimeCallHelper call_helper;
2124
  generator.GenerateSlow(masm(), call_helper);
2125

    
2126
  if (index_out_of_range.is_linked()) {
2127
    __ bind(&index_out_of_range);
2128
    __ LoadRoot(v0, Heap::kNanValueRootIndex);
2129
    __ DropAndRet(argc + 1);
2130
  }
2131

    
2132
  __ bind(&miss);
2133
  // Restore function name in a2.
2134
  __ li(a2, name);
2135
  __ bind(&name_miss);
2136
  GenerateMissBranch();
2137

    
2138
  // Return the generated code.
2139
  return GetCode(type, name);
2140
}
2141

    
2142

    
2143
Handle<Code> CallStubCompiler::CompileStringCharAtCall(
2144
    Handle<Object> object,
2145
    Handle<JSObject> holder,
2146
    Handle<Cell> cell,
2147
    Handle<JSFunction> function,
2148
    Handle<String> name,
2149
    Code::StubType type) {
2150
  // ----------- S t a t e -------------
2151
  //  -- a2                     : function name
2152
  //  -- ra                     : return address
2153
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2154
  //  -- ...
2155
  //  -- sp[argc * 4]           : receiver
2156
  // -----------------------------------
2157

    
2158
  // If object is not a string, bail out to regular call.
2159
  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
2160

    
2161
  const int argc = arguments().immediate();
2162
  Label miss;
2163
  Label name_miss;
2164
  Label index_out_of_range;
2165
  Label* index_out_of_range_label = &index_out_of_range;
2166
  if (kind_ == Code::CALL_IC &&
2167
      (CallICBase::StringStubState::decode(extra_state_) ==
2168
       DEFAULT_STRING_STUB)) {
2169
    index_out_of_range_label = &miss;
2170
  }
2171
  GenerateNameCheck(name, &name_miss);
2172

    
2173
  // Check that the maps starting from the prototype haven't changed.
2174
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2175
                                            Context::STRING_FUNCTION_INDEX,
2176
                                            v0,
2177
                                            &miss);
2178
  ASSERT(!object.is_identical_to(holder));
2179
  CheckPrototypes(
2180
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2181
      v0, holder, a1, a3, t0, name, &miss);
2182

    
2183
  Register receiver = v0;
2184
  Register index = t1;
2185
  Register scratch = a3;
2186
  Register result = v0;
2187
  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
2188
  if (argc > 0) {
2189
    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
2190
  } else {
2191
    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2192
  }
2193

    
2194
  StringCharAtGenerator generator(receiver,
2195
                                  index,
2196
                                  scratch,
2197
                                  result,
2198
                                  &miss,  // When not a string.
2199
                                  &miss,  // When not a number.
2200
                                  index_out_of_range_label,
2201
                                  STRING_INDEX_IS_NUMBER);
2202
  generator.GenerateFast(masm());
2203
  __ DropAndRet(argc + 1);
2204

    
2205
  StubRuntimeCallHelper call_helper;
2206
  generator.GenerateSlow(masm(), call_helper);
2207

    
2208
  if (index_out_of_range.is_linked()) {
2209
    __ bind(&index_out_of_range);
2210
    __ LoadRoot(v0, Heap::kempty_stringRootIndex);
2211
    __ DropAndRet(argc + 1);
2212
  }
2213

    
2214
  __ bind(&miss);
2215
  // Restore function name in a2.
2216
  __ li(a2, name);
2217
  __ bind(&name_miss);
2218
  GenerateMissBranch();
2219

    
2220
  // Return the generated code.
2221
  return GetCode(type, name);
2222
}
2223

    
2224

    
2225
Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2226
    Handle<Object> object,
2227
    Handle<JSObject> holder,
2228
    Handle<Cell> cell,
2229
    Handle<JSFunction> function,
2230
    Handle<String> name,
2231
    Code::StubType type) {
2232
  // ----------- S t a t e -------------
2233
  //  -- a2                     : function name
2234
  //  -- ra                     : return address
2235
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2236
  //  -- ...
2237
  //  -- sp[argc * 4]           : receiver
2238
  // -----------------------------------
2239

    
2240
  const int argc = arguments().immediate();
2241

    
2242
  // If the object is not a JSObject or we got an unexpected number of
2243
  // arguments, bail out to the regular call.
2244
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2245

    
2246
  Label miss;
2247
  GenerateNameCheck(name, &miss);
2248

    
2249
  if (cell.is_null()) {
2250
    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2251

    
2252
    STATIC_ASSERT(kSmiTag == 0);
2253
    __ JumpIfSmi(a1, &miss);
2254

    
2255
    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
2256
                    name, &miss);
2257
  } else {
2258
    ASSERT(cell->value() == *function);
2259
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2260
                                &miss);
2261
    GenerateLoadFunctionFromCell(cell, function, &miss);
2262
  }
2263

    
2264
  // Load the char code argument.
2265
  Register code = a1;
2266
  __ lw(code, MemOperand(sp, 0 * kPointerSize));
2267

    
2268
  // Check the code is a smi.
2269
  Label slow;
2270
  STATIC_ASSERT(kSmiTag == 0);
2271
  __ JumpIfNotSmi(code, &slow);
2272

    
2273
  // Convert the smi code to uint16.
2274
  __ And(code, code, Operand(Smi::FromInt(0xffff)));
2275

    
2276
  StringCharFromCodeGenerator generator(code, v0);
2277
  generator.GenerateFast(masm());
2278
  __ DropAndRet(argc + 1);
2279

    
2280
  StubRuntimeCallHelper call_helper;
2281
  generator.GenerateSlow(masm(), call_helper);
2282

    
2283
  // Tail call the full function. We do not have to patch the receiver
2284
  // because the function makes no use of it.
2285
  __ bind(&slow);
2286
  ParameterCount expected(function);
2287
  __ InvokeFunction(function, expected, arguments(),
2288
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2289

    
2290
  __ bind(&miss);
2291
  // a2: function name.
2292
  GenerateMissBranch();
2293

    
2294
  // Return the generated code.
2295
  return GetCode(type, name);
2296
}
2297

    
2298

    
2299
Handle<Code> CallStubCompiler::CompileMathFloorCall(
2300
    Handle<Object> object,
2301
    Handle<JSObject> holder,
2302
    Handle<Cell> cell,
2303
    Handle<JSFunction> function,
2304
    Handle<String> name,
2305
    Code::StubType type) {
2306
  // ----------- S t a t e -------------
2307
  //  -- a2                     : function name
2308
  //  -- ra                     : return address
2309
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2310
  //  -- ...
2311
  //  -- sp[argc * 4]           : receiver
2312
  // -----------------------------------
2313

    
2314

    
2315
  const int argc = arguments().immediate();
2316
  // If the object is not a JSObject or we got an unexpected number of
2317
  // arguments, bail out to the regular call.
2318
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2319

    
2320
  Label miss, slow;
2321
  GenerateNameCheck(name, &miss);
2322

    
2323
  if (cell.is_null()) {
2324
    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2325
    STATIC_ASSERT(kSmiTag == 0);
2326
    __ JumpIfSmi(a1, &miss);
2327
    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
2328
                    name, &miss);
2329
  } else {
2330
    ASSERT(cell->value() == *function);
2331
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2332
                                &miss);
2333
    GenerateLoadFunctionFromCell(cell, function, &miss);
2334
  }
2335

    
2336
  // Load the (only) argument into v0.
2337
  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2338

    
2339
  // If the argument is a smi, just return.
2340
  STATIC_ASSERT(kSmiTag == 0);
2341
  __ And(t0, v0, Operand(kSmiTagMask));
2342
  __ DropAndRet(argc + 1, eq, t0, Operand(zero_reg));
2343

    
2344
  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2345

    
2346
  Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2347

    
2348
  // If fpu is enabled, we use the floor instruction.
2349

    
2350
  // Load the HeapNumber value.
2351
  __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2352

    
2353
  // Backup FCSR.
2354
  __ cfc1(a3, FCSR);
2355
  // Clearing FCSR clears the exception mask with no side-effects.
2356
  __ ctc1(zero_reg, FCSR);
2357
  // Convert the argument to an integer.
2358
  __ floor_w_d(f0, f0);
2359

    
2360
  // Start checking for special cases.
2361
  // Get the argument exponent and clear the sign bit.
2362
  __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2363
  __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2364
  __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2365

    
2366
  // Retrieve FCSR and check for fpu errors.
2367
  __ cfc1(t5, FCSR);
2368
  __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2369
  __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2370

    
2371
  // Check for NaN, Infinity, and -Infinity.
2372
  // They are invariant through a Math.Floor call, so just
2373
  // return the original argument.
2374
  __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2375
        >> HeapNumber::kMantissaBitsInTopWord));
2376
  __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2377
  // We had an overflow or underflow in the conversion. Check if we
2378
  // have a big exponent.
2379
  // If greater or equal, the argument is already round and in v0.
2380
  __ Branch(&restore_fcsr_and_return, ge, t3,
2381
      Operand(HeapNumber::kMantissaBits));
2382
  __ Branch(&wont_fit_smi);
2383

    
2384
  __ bind(&no_fpu_error);
2385
  // Move the result back to v0.
2386
  __ mfc1(v0, f0);
2387
  // Check if the result fits into a smi.
2388
  __ Addu(a1, v0, Operand(0x40000000));
2389
  __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2390
  // Tag the result.
2391
  STATIC_ASSERT(kSmiTag == 0);
2392
  __ sll(v0, v0, kSmiTagSize);
2393

    
2394
  // Check for -0.
2395
  __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2396
  // t1 already holds the HeapNumber exponent.
2397
  __ And(t0, t1, Operand(HeapNumber::kSignMask));
2398
  // If our HeapNumber is negative it was -0, so load its address and return.
2399
  // Else v0 is loaded with 0, so we can also just return.
2400
  __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2401
  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2402

    
2403
  __ bind(&restore_fcsr_and_return);
2404
  // Restore FCSR and return.
2405
  __ ctc1(a3, FCSR);
2406

    
2407
  __ DropAndRet(argc + 1);
2408

    
2409
  __ bind(&wont_fit_smi);
2410
  // Restore FCSR and fall to slow case.
2411
  __ ctc1(a3, FCSR);
2412

    
2413
  __ bind(&slow);
2414
  // Tail call the full function. We do not have to patch the receiver
2415
  // because the function makes no use of it.
2416
  ParameterCount expected(function);
2417
  __ InvokeFunction(function, expected, arguments(),
2418
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2419

    
2420
  __ bind(&miss);
2421
  // a2: function name.
2422
  GenerateMissBranch();
2423

    
2424
  // Return the generated code.
2425
  return GetCode(type, name);
2426
}
2427

    
2428

    
2429
Handle<Code> CallStubCompiler::CompileMathAbsCall(
2430
    Handle<Object> object,
2431
    Handle<JSObject> holder,
2432
    Handle<Cell> cell,
2433
    Handle<JSFunction> function,
2434
    Handle<String> name,
2435
    Code::StubType type) {
2436
  // ----------- S t a t e -------------
2437
  //  -- a2                     : function name
2438
  //  -- ra                     : return address
2439
  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2440
  //  -- ...
2441
  //  -- sp[argc * 4]           : receiver
2442
  // -----------------------------------
2443

    
2444
  const int argc = arguments().immediate();
2445
  // If the object is not a JSObject or we got an unexpected number of
2446
  // arguments, bail out to the regular call.
2447
  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2448

    
2449
  Label miss;
2450

    
2451
  GenerateNameCheck(name, &miss);
2452
  if (cell.is_null()) {
2453
    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2454
    STATIC_ASSERT(kSmiTag == 0);
2455
    __ JumpIfSmi(a1, &miss);
2456
    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
2457
                    name, &miss);
2458
  } else {
2459
    ASSERT(cell->value() == *function);
2460
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2461
                                &miss);
2462
    GenerateLoadFunctionFromCell(cell, function, &miss);
2463
  }
2464

    
2465
  // Load the (only) argument into v0.
2466
  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2467

    
2468
  // Check if the argument is a smi.
2469
  Label not_smi;
2470
  STATIC_ASSERT(kSmiTag == 0);
2471
  __ JumpIfNotSmi(v0, &not_smi);
2472

    
2473
  // Do bitwise not or do nothing depending on the sign of the
2474
  // argument.
2475
  __ sra(t0, v0, kBitsPerInt - 1);
2476
  __ Xor(a1, v0, t0);
2477

    
2478
  // Add 1 or do nothing depending on the sign of the argument.
2479
  __ Subu(v0, a1, t0);
2480

    
2481
  // If the result is still negative, go to the slow case.
2482
  // This only happens for the most negative smi.
2483
  Label slow;
2484
  __ Branch(&slow, lt, v0, Operand(zero_reg));
2485

    
2486
  // Smi case done.
2487
  __ DropAndRet(argc + 1);
2488

    
2489
  // Check if the argument is a heap number and load its exponent and
2490
  // sign.
2491
  __ bind(&not_smi);
2492
  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2493
  __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2494

    
2495
  // Check the sign of the argument. If the argument is positive,
2496
  // just return it.
2497
  Label negative_sign;
2498
  __ And(t0, a1, Operand(HeapNumber::kSignMask));
2499
  __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2500
  __ DropAndRet(argc + 1);
2501

    
2502
  // If the argument is negative, clear the sign, and return a new
2503
  // number.
2504
  __ bind(&negative_sign);
2505
  __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2506
  __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2507
  __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2508
  __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2509
  __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2510
  __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2511
  __ DropAndRet(argc + 1);
2512

    
2513
  // Tail call the full function. We do not have to patch the receiver
2514
  // because the function makes no use of it.
2515
  __ bind(&slow);
2516
  ParameterCount expected(function);
2517
  __ InvokeFunction(function, expected, arguments(),
2518
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2519

    
2520
  __ bind(&miss);
2521
  // a2: function name.
2522
  GenerateMissBranch();
2523

    
2524
  // Return the generated code.
2525
  return GetCode(type, name);
2526
}
2527

    
2528

    
2529
Handle<Code> CallStubCompiler::CompileFastApiCall(
2530
    const CallOptimization& optimization,
2531
    Handle<Object> object,
2532
    Handle<JSObject> holder,
2533
    Handle<Cell> cell,
2534
    Handle<JSFunction> function,
2535
    Handle<String> name) {
2536

    
2537
  Counters* counters = isolate()->counters();
2538

    
2539
  ASSERT(optimization.is_simple_api_call());
2540
  // Bail out if object is a global object as we don't want to
2541
  // repatch it to global receiver.
2542
  if (object->IsGlobalObject()) return Handle<Code>::null();
2543
  if (!cell.is_null()) return Handle<Code>::null();
2544
  if (!object->IsJSObject()) return Handle<Code>::null();
2545
  int depth = optimization.GetPrototypeDepthOfExpectedType(
2546
      Handle<JSObject>::cast(object), holder);
2547
  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2548

    
2549
  Label miss, miss_before_stack_reserved;
2550

    
2551
  GenerateNameCheck(name, &miss_before_stack_reserved);
2552

    
2553
  // Get the receiver from the stack.
2554
  const int argc = arguments().immediate();
2555
  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2556

    
2557
  // Check that the receiver isn't a smi.
2558
  __ JumpIfSmi(a1, &miss_before_stack_reserved);
2559

    
2560
  __ IncrementCounter(counters->call_const(), 1, a0, a3);
2561
  __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2562

    
2563
  ReserveSpaceForFastApiCall(masm(), a0);
2564

    
2565
  // Check that the maps haven't changed and find a Holder as a side effect.
2566
  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0, name,
2567
                  depth, &miss);
2568

    
2569
  GenerateFastApiDirectCall(masm(), optimization, argc, false);
2570

    
2571
  __ bind(&miss);
2572
  FreeSpaceForFastApiCall(masm());
2573

    
2574
  __ bind(&miss_before_stack_reserved);
2575
  GenerateMissBranch();
2576

    
2577
  // Return the generated code.
2578
  return GetCode(function);
2579
}
2580

    
2581

    
2582
void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
2583
                                              Handle<JSObject> holder,
2584
                                              Handle<Name> name,
2585
                                              CheckType check,
2586
                                              Label* success) {
2587
  // ----------- S t a t e -------------
2588
  //  -- a2    : name
2589
  //  -- ra    : return address
2590
  // -----------------------------------
2591
  Label miss;
2592
  GenerateNameCheck(name, &miss);
2593

    
2594
  // Get the receiver from the stack.
2595
  const int argc = arguments().immediate();
2596
  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2597

    
2598
  // Check that the receiver isn't a smi.
2599
  if (check != NUMBER_CHECK) {
2600
    __ JumpIfSmi(a1, &miss);
2601
  }
2602

    
2603
  // Make sure that it's okay not to patch the on stack receiver
2604
  // unless we're doing a receiver map check.
2605
  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2606
  switch (check) {
2607
    case RECEIVER_MAP_CHECK:
2608
      __ IncrementCounter(isolate()->counters()->call_const(), 1, a0, a3);
2609

    
2610
      // Check that the maps haven't changed.
2611
      CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
2612
                      name, &miss);
2613

    
2614
      // Patch the receiver on the stack with the global proxy if
2615
      // necessary.
2616
      if (object->IsGlobalObject()) {
2617
        __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2618
        __ sw(a3, MemOperand(sp, argc * kPointerSize));
2619
      }
2620
      break;
2621

    
2622
    case STRING_CHECK:
2623
      // Check that the object is a string.
2624
      __ GetObjectType(a1, a3, a3);
2625
      __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2626
      // Check that the maps starting from the prototype haven't changed.
2627
      GenerateDirectLoadGlobalFunctionPrototype(
2628
          masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2629
      CheckPrototypes(
2630
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2631
          a0, holder, a3, a1, t0, name, &miss);
2632
      break;
2633

    
2634
    case SYMBOL_CHECK:
2635
      // Check that the object is a symbol.
2636
      __ GetObjectType(a1, a1, a3);
2637
      __ Branch(&miss, ne, a3, Operand(SYMBOL_TYPE));
2638
      // Check that the maps starting from the prototype haven't changed.
2639
      GenerateDirectLoadGlobalFunctionPrototype(
2640
          masm(), Context::SYMBOL_FUNCTION_INDEX, a0, &miss);
2641
      CheckPrototypes(
2642
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2643
          a0, holder, a3, a1, t0, name, &miss);
2644
      break;
2645

    
2646
    case NUMBER_CHECK: {
2647
      Label fast;
2648
      // Check that the object is a smi or a heap number.
2649
      __ JumpIfSmi(a1, &fast);
2650
      __ GetObjectType(a1, a0, a0);
2651
      __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2652
      __ bind(&fast);
2653
      // Check that the maps starting from the prototype haven't changed.
2654
      GenerateDirectLoadGlobalFunctionPrototype(
2655
          masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2656
      CheckPrototypes(
2657
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2658
          a0, holder, a3, a1, t0, name, &miss);
2659
      break;
2660
    }
2661
    case BOOLEAN_CHECK: {
2662
      Label fast;
2663
      // Check that the object is a boolean.
2664
      __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2665
      __ Branch(&fast, eq, a1, Operand(t0));
2666
      __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2667
      __ Branch(&miss, ne, a1, Operand(t0));
2668
      __ bind(&fast);
2669
      // Check that the maps starting from the prototype haven't changed.
2670
      GenerateDirectLoadGlobalFunctionPrototype(
2671
          masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2672
      CheckPrototypes(
2673
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2674
          a0, holder, a3, a1, t0, name, &miss);
2675
      break;
2676
    }
2677
  }
2678

    
2679
  __ jmp(success);
2680

    
2681
  // Handle call cache miss.
2682
  __ bind(&miss);
2683

    
2684
  GenerateMissBranch();
2685
}
2686

    
2687

    
2688
void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
2689
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2690
      ? CALL_AS_FUNCTION
2691
      : CALL_AS_METHOD;
2692
  ParameterCount expected(function);
2693
  __ InvokeFunction(function, expected, arguments(),
2694
                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
2695
}
2696

    
2697

    
2698
Handle<Code> CallStubCompiler::CompileCallConstant(
2699
    Handle<Object> object,
2700
    Handle<JSObject> holder,
2701
    Handle<Name> name,
2702
    CheckType check,
2703
    Handle<JSFunction> function) {
2704
  if (HasCustomCallGenerator(function)) {
2705
    Handle<Code> code = CompileCustomCall(object, holder,
2706
                                          Handle<Cell>::null(),
2707
                                          function, Handle<String>::cast(name),
2708
                                          Code::CONSTANT);
2709
    // A null handle means bail out to the regular compiler code below.
2710
    if (!code.is_null()) return code;
2711
  }
2712

    
2713
  Label success;
2714

    
2715
  CompileHandlerFrontend(object, holder, name, check, &success);
2716
  __ bind(&success);
2717
  CompileHandlerBackend(function);
2718

    
2719
  // Return the generated code.
2720
  return GetCode(function);
2721
}
2722

    
2723

    
2724
Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2725
                                                      Handle<JSObject> holder,
2726
                                                      Handle<Name> name) {
2727
  // ----------- S t a t e -------------
2728
  //  -- a2    : name
2729
  //  -- ra    : return address
2730
  // -----------------------------------
2731

    
2732
  Label miss;
2733

    
2734
  GenerateNameCheck(name, &miss);
2735

    
2736
  // Get the number of arguments.
2737
  const int argc = arguments().immediate();
2738
  LookupResult lookup(isolate());
2739
  LookupPostInterceptor(holder, name, &lookup);
2740

    
2741
  // Get the receiver from the stack.
2742
  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2743

    
2744
  CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_);
2745
  compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
2746
                   &miss);
2747

    
2748
  // Move returned value, the function to call, to a1.
2749
  __ mov(a1, v0);
2750
  // Restore receiver.
2751
  __ lw(a0, MemOperand(sp, argc * kPointerSize));
2752

    
2753
  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2754

    
2755
  // Handle call cache miss.
2756
  __ bind(&miss);
2757
  GenerateMissBranch();
2758

    
2759
  // Return the generated code.
2760
  return GetCode(Code::INTERCEPTOR, name);
2761
}
2762

    
2763

    
2764
Handle<Code> CallStubCompiler::CompileCallGlobal(
2765
    Handle<JSObject> object,
2766
    Handle<GlobalObject> holder,
2767
    Handle<PropertyCell> cell,
2768
    Handle<JSFunction> function,
2769
    Handle<Name> name) {
2770
  // ----------- S t a t e -------------
2771
  //  -- a2    : name
2772
  //  -- ra    : return address
2773
  // -----------------------------------
2774

    
2775
  if (HasCustomCallGenerator(function)) {
2776
    Handle<Code> code = CompileCustomCall(
2777
        object, holder, cell, function, Handle<String>::cast(name),
2778
        Code::NORMAL);
2779
    // A null handle means bail out to the regular compiler code below.
2780
    if (!code.is_null()) return code;
2781
  }
2782

    
2783
  Label miss;
2784
  GenerateNameCheck(name, &miss);
2785

    
2786
  // Get the number of arguments.
2787
  const int argc = arguments().immediate();
2788
  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2789
  GenerateLoadFunctionFromCell(cell, function, &miss);
2790

    
2791
  // Patch the receiver on the stack with the global proxy if
2792
  // necessary.
2793
  if (object->IsGlobalObject()) {
2794
    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2795
    __ sw(a3, MemOperand(sp, argc * kPointerSize));
2796
  }
2797

    
2798
  // Set up the context (function already in r1).
2799
  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2800

    
2801
  // Jump to the cached code (tail call).
2802
  Counters* counters = isolate()->counters();
2803
  __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2804
  ParameterCount expected(function->shared()->formal_parameter_count());
2805
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2806
      ? CALL_AS_FUNCTION
2807
      : CALL_AS_METHOD;
2808
  // We call indirectly through the code field in the function to
2809
  // allow recompilation to take effect without changing any of the
2810
  // call sites.
2811
  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2812
  __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
2813
                NullCallWrapper(), call_kind);
2814

    
2815
  // Handle call cache miss.
2816
  __ bind(&miss);
2817
  __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2818
  GenerateMissBranch();
2819

    
2820
  // Return the generated code.
2821
  return GetCode(Code::NORMAL, name);
2822
}
2823

    
2824

    
2825
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2826
    Handle<JSObject> object,
2827
    Handle<JSObject> holder,
2828
    Handle<Name> name,
2829
    Handle<ExecutableAccessorInfo> callback) {
2830
  Label success;
2831
  HandlerFrontend(object, receiver(), holder, name, &success);
2832
  __ bind(&success);
2833

    
2834
  // Stub never generated for non-global objects that require access
2835
  // checks.
2836
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2837

    
2838
  __ push(receiver());  // Receiver.
2839
  __ li(at, Operand(callback));  // Callback info.
2840
  __ push(at);
2841
  __ li(at, Operand(name));
2842
  __ Push(at, value());
2843

    
2844
  // Do tail-call to the runtime system.
2845
  ExternalReference store_callback_property =
2846
      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2847
  __ TailCallExternalReference(store_callback_property, 4, 1);
2848

    
2849
  // Return the generated code.
2850
  return GetCode(kind(), Code::CALLBACKS, name);
2851
}
2852

    
2853

    
2854
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2855
    Handle<JSObject> object,
2856
    Handle<JSObject> holder,
2857
    Handle<Name> name,
2858
    const CallOptimization& call_optimization) {
2859
  Label success;
2860
  HandlerFrontend(object, receiver(), holder, name, &success);
2861
  __ bind(&success);
2862

    
2863
  Register values[] = { value() };
2864
  GenerateFastApiCall(
2865
      masm(), call_optimization, receiver(), scratch3(), 1, values);
2866

    
2867
  // Return the generated code.
2868
  return GetCode(kind(), Code::CALLBACKS, name);
2869
}
2870

    
2871

    
2872
#undef __
2873
#define __ ACCESS_MASM(masm)
2874

    
2875

    
2876
void StoreStubCompiler::GenerateStoreViaSetter(
2877
    MacroAssembler* masm,
2878
    Handle<JSFunction> setter) {
2879
  // ----------- S t a t e -------------
2880
  //  -- a0    : value
2881
  //  -- a1    : receiver
2882
  //  -- a2    : name
2883
  //  -- ra    : return address
2884
  // -----------------------------------
2885
  {
2886
    FrameScope scope(masm, StackFrame::INTERNAL);
2887

    
2888
    // Save value register, so we can restore it later.
2889
    __ push(a0);
2890

    
2891
    if (!setter.is_null()) {
2892
      // Call the JavaScript setter with receiver and value on the stack.
2893
      __ push(a1);
2894
      __ push(a0);
2895
      ParameterCount actual(1);
2896
      ParameterCount expected(setter);
2897
      __ InvokeFunction(setter, expected, actual,
2898
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2899
    } else {
2900
      // If we generate a global code snippet for deoptimization only, remember
2901
      // the place to continue after deoptimization.
2902
      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2903
    }
2904

    
2905
    // We have to return the passed value, not the return value of the setter.
2906
    __ pop(v0);
2907

    
2908
    // Restore context register.
2909
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2910
  }
2911
  __ Ret();
2912
}
2913

    
2914

    
2915
#undef __
2916
#define __ ACCESS_MASM(masm())
2917

    
2918

    
2919
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2920
    Handle<JSObject> object,
2921
    Handle<Name> name) {
2922
  Label miss;
2923

    
2924
  // Check that the map of the object hasn't changed.
2925
  __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
2926
              DO_SMI_CHECK);
2927

    
2928
  // Perform global security token check if needed.
2929
  if (object->IsJSGlobalProxy()) {
2930
    __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
2931
  }
2932

    
2933
  // Stub is never generated for non-global objects that require access
2934
  // checks.
2935
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2936

    
2937
  __ Push(receiver(), this->name(), value());
2938

    
2939
  __ li(scratch1(), Operand(Smi::FromInt(strict_mode())));
2940
  __ push(scratch1());  // strict mode
2941

    
2942
  // Do tail-call to the runtime system.
2943
  ExternalReference store_ic_property =
2944
      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2945
  __ TailCallExternalReference(store_ic_property, 4, 1);
2946

    
2947
  // Handle store cache miss.
2948
  __ bind(&miss);
2949
  TailCallBuiltin(masm(), MissBuiltin(kind()));
2950

    
2951
  // Return the generated code.
2952
  return GetCode(kind(), Code::INTERCEPTOR, name);
2953
}
2954

    
2955

    
2956
Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
2957
    Handle<JSObject> object,
2958
    Handle<JSObject> last,
2959
    Handle<Name> name,
2960
    Handle<GlobalObject> global) {
2961
  Label success;
2962

    
2963
  NonexistentHandlerFrontend(object, last, name, &success, global);
2964

    
2965
  __ bind(&success);
2966
  // Return undefined if maps of the full prototype chain is still the same.
2967
  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2968
  __ Ret();
2969

    
2970
  // Return the generated code.
2971
  return GetCode(kind(), Code::NONEXISTENT, name);
2972
}
2973

    
2974

    
2975
Register* LoadStubCompiler::registers() {
2976
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2977
  static Register registers[] = { a0, a2, a3, a1, t0, t1 };
2978
  return registers;
2979
}
2980

    
2981

    
2982
Register* KeyedLoadStubCompiler::registers() {
2983
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2984
  static Register registers[] = { a1, a0, a2, a3, t0, t1 };
2985
  return registers;
2986
}
2987

    
2988

    
2989
Register* StoreStubCompiler::registers() {
2990
  // receiver, name, value, scratch1, scratch2, scratch3.
2991
  static Register registers[] = { a1, a2, a0, a3, t0, t1 };
2992
  return registers;
2993
}
2994

    
2995

    
2996
Register* KeyedStoreStubCompiler::registers() {
2997
  // receiver, name, value, scratch1, scratch2, scratch3.
2998
  static Register registers[] = { a2, a1, a0, a3, t0, t1 };
2999
  return registers;
3000
}
3001

    
3002

    
3003
void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
3004
                                              Register name_reg,
3005
                                              Label* miss) {
3006
  __ Branch(miss, ne, name_reg, Operand(name));
3007
}
3008

    
3009

    
3010
void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
3011
                                               Register name_reg,
3012
                                               Label* miss) {
3013
  __ Branch(miss, ne, name_reg, Operand(name));
3014
}
3015

    
3016

    
3017
#undef __
3018
#define __ ACCESS_MASM(masm)
3019

    
3020

    
3021
void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
3022
                                             Register receiver,
3023
                                             Handle<JSFunction> getter) {
3024
  // ----------- S t a t e -------------
3025
  //  -- a0    : receiver
3026
  //  -- a2    : name
3027
  //  -- ra    : return address
3028
  // -----------------------------------
3029
  {
3030
    FrameScope scope(masm, StackFrame::INTERNAL);
3031

    
3032
    if (!getter.is_null()) {
3033
      // Call the JavaScript getter with the receiver on the stack.
3034
      __ push(receiver);
3035
      ParameterCount actual(0);
3036
      ParameterCount expected(getter);
3037
      __ InvokeFunction(getter, expected, actual,
3038
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
3039
    } else {
3040
      // If we generate a global code snippet for deoptimization only, remember
3041
      // the place to continue after deoptimization.
3042
      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
3043
    }
3044

    
3045
    // Restore context register.
3046
    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3047
  }
3048
  __ Ret();
3049
}
3050

    
3051

    
3052
#undef __
3053
#define __ ACCESS_MASM(masm())
3054

    
3055

    
3056
Handle<Code> LoadStubCompiler::CompileLoadGlobal(
3057
    Handle<JSObject> object,
3058
    Handle<GlobalObject> global,
3059
    Handle<PropertyCell> cell,
3060
    Handle<Name> name,
3061
    bool is_dont_delete) {
3062
  Label success, miss;
3063

    
3064
  __ CheckMap(
3065
      receiver(), scratch1(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK);
3066
  HandlerFrontendHeader(
3067
      object, receiver(), Handle<JSObject>::cast(global), name, &miss);
3068

    
3069
  // Get the value from the cell.
3070
  __ li(a3, Operand(cell));
3071
  __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
3072

    
3073
  // Check for deleted property if property can actually be deleted.
3074
  if (!is_dont_delete) {
3075
    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
3076
    __ Branch(&miss, eq, t0, Operand(at));
3077
  }
3078

    
3079
  HandlerFrontendFooter(name, &success, &miss);
3080
  __ bind(&success);
3081

    
3082
  Counters* counters = isolate()->counters();
3083
  __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
3084
  __ Ret(USE_DELAY_SLOT);
3085
  __ mov(v0, t0);
3086

    
3087
  // Return the generated code.
3088
  return GetICCode(kind(), Code::NORMAL, name);
3089
}
3090

    
3091

    
3092
Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
3093
    MapHandleList* receiver_maps,
3094
    CodeHandleList* handlers,
3095
    Handle<Name> name,
3096
    Code::StubType type,
3097
    IcCheckType check) {
3098
  Label miss;
3099

    
3100
  if (check == PROPERTY) {
3101
    GenerateNameCheck(name, this->name(), &miss);
3102
  }
3103

    
3104
  __ JumpIfSmi(receiver(), &miss);
3105
  Register map_reg = scratch1();
3106

    
3107
  int receiver_count = receiver_maps->length();
3108
  int number_of_handled_maps = 0;
3109
  __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
3110
  for (int current = 0; current < receiver_count; ++current) {
3111
    Handle<Map> map = receiver_maps->at(current);
3112
    if (!map->is_deprecated()) {
3113
      number_of_handled_maps++;
3114
      __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
3115
          eq, map_reg, Operand(receiver_maps->at(current)));
3116
    }
3117
  }
3118
  ASSERT(number_of_handled_maps != 0);
3119

    
3120
  __ bind(&miss);
3121
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3122

    
3123
  // Return the generated code.
3124
  InlineCacheState state =
3125
      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
3126
  return GetICCode(kind(), type, name, state);
3127
}
3128

    
3129

    
3130
Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3131
    MapHandleList* receiver_maps,
3132
    CodeHandleList* handler_stubs,
3133
    MapHandleList* transitioned_maps) {
3134
  Label miss;
3135
  __ JumpIfSmi(receiver(), &miss);
3136

    
3137
  int receiver_count = receiver_maps->length();
3138
  __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
3139
  for (int i = 0; i < receiver_count; ++i) {
3140
    if (transitioned_maps->at(i).is_null()) {
3141
      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
3142
          scratch1(), Operand(receiver_maps->at(i)));
3143
    } else {
3144
      Label next_map;
3145
      __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
3146
      __ li(transition_map(), Operand(transitioned_maps->at(i)));
3147
      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3148
      __ bind(&next_map);
3149
    }
3150
  }
3151

    
3152
  __ bind(&miss);
3153
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3154

    
3155
  // Return the generated code.
3156
  return GetICCode(
3157
      kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
3158
}
3159

    
3160

    
3161
#undef __
3162
#define __ ACCESS_MASM(masm)
3163

    
3164

    
3165
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3166
    MacroAssembler* masm) {
3167
  // ---------- S t a t e --------------
3168
  //  -- ra     : return address
3169
  //  -- a0     : key
3170
  //  -- a1     : receiver
3171
  // -----------------------------------
3172
  Label slow, miss_force_generic;
3173

    
3174
  Register key = a0;
3175
  Register receiver = a1;
3176

    
3177
  __ JumpIfNotSmi(key, &miss_force_generic);
3178
  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
3179
  __ sra(a2, a0, kSmiTagSize);
3180
  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3181
  __ Ret();
3182

    
3183
  // Slow case, key and receiver still in a0 and a1.
3184
  __ bind(&slow);
3185
  __ IncrementCounter(
3186
      masm->isolate()->counters()->keyed_load_external_array_slow(),
3187
      1, a2, a3);
3188
  // Entry registers are intact.
3189
  // ---------- S t a t e --------------
3190
  //  -- ra     : return address
3191
  //  -- a0     : key
3192
  //  -- a1     : receiver
3193
  // -----------------------------------
3194
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
3195

    
3196
  // Miss case, call the runtime.
3197
  __ bind(&miss_force_generic);
3198

    
3199
  // ---------- S t a t e --------------
3200
  //  -- ra     : return address
3201
  //  -- a0     : key
3202
  //  -- a1     : receiver
3203
  // -----------------------------------
3204
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric);
3205
}
3206

    
3207

    
3208
#undef __
3209

    
3210
} }  // namespace v8::internal
3211

    
3212
#endif  // V8_TARGET_ARCH_MIPS