The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ic-arm.cc @ 40c0f755

History | View | Annotate | Download (26.4 KB)

1
// Copyright 2006-2008 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "codegen-inl.h"
31
#include "ic-inl.h"
32
#include "runtime.h"
33
#include "stub-cache.h"
34

    
35
namespace v8 { namespace internal {
36

    
37

    
38
// ----------------------------------------------------------------------------
39
// Static IC stub generators.
40
//
41

    
42
#define __ masm->
43

    
44

    
45
// Helper function used from LoadIC/CallIC GenerateNormal.
46
static void GenerateDictionaryLoad(MacroAssembler* masm,
47
                                   Label* miss,
48
                                   Register t0,
49
                                   Register t1) {
50
  // Register use:
51
  //
52
  // t0 - used to hold the property dictionary.
53
  //
54
  // t1 - initially the receiver
55
  //    - used for the index into the property dictionary
56
  //    - holds the result on exit.
57
  //
58
  // r3 - used as temporary and to hold the capacity of the property
59
  //      dictionary.
60
  //
61
  // r2 - holds the name of the property and is unchanges.
62

    
63
  Label done;
64

    
65
  // Check for the absence of an interceptor.
66
  // Load the map into t0.
67
  __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset));
68
  // Test the has_named_interceptor bit in the map.
69
  __ ldr(t0, FieldMemOperand(t1, Map::kInstanceAttributesOffset));
70
  __ tst(t0, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8))));
71
  // Jump to miss if the interceptor bit is set.
72
  __ b(ne, miss);
73

    
74

    
75
  // Check that the properties array is a dictionary.
76
  __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
77
  __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
78
  __ cmp(r3, Operand(Factory::hash_table_map()));
79
  __ b(ne, miss);
80

    
81
  // Compute the capacity mask.
82
  const int kCapacityOffset =
83
      Array::kHeaderSize + Dictionary::kCapacityIndex * kPointerSize;
84
  __ ldr(r3, FieldMemOperand(t0, kCapacityOffset));
85
  __ mov(r3, Operand(r3, ASR, kSmiTagSize));  // convert smi to int
86
  __ sub(r3, r3, Operand(1));
87

    
88
  const int kElementsStartOffset =
89
      Array::kHeaderSize + Dictionary::kElementsStartIndex * kPointerSize;
90

    
91
  // Generate an unrolled loop that performs a few probes before
92
  // giving up. Measurements done on Gmail indicate that 2 probes
93
  // cover ~93% of loads from dictionaries.
94
  static const int kProbes = 4;
95
  for (int i = 0; i < kProbes; i++) {
96
    // Compute the masked index: (hash + i + i * i) & mask.
97
    __ ldr(t1, FieldMemOperand(r2, String::kLengthOffset));
98
    __ mov(t1, Operand(t1, LSR, String::kHashShift));
99
    if (i > 0) __ add(t1, t1, Operand(Dictionary::GetProbeOffset(i)));
100
    __ and_(t1, t1, Operand(r3));
101

    
102
    // Scale the index by multiplying by the element size.
103
    ASSERT(Dictionary::kElementSize == 3);
104
    __ add(t1, t1, Operand(t1, LSL, 1));  // t1 = t1 * 3
105

    
106
    // Check if the key is identical to the name.
107
    __ add(t1, t0, Operand(t1, LSL, 2));
108
    __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset));
109
    __ cmp(r2, Operand(ip));
110
    if (i != kProbes - 1) {
111
      __ b(eq, &done);
112
    } else {
113
      __ b(ne, miss);
114
    }
115
  }
116

    
117
  // Check that the value is a normal property.
118
  __ bind(&done);  // t1 == t0 + 4*index
119
  __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize));
120
  __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
121
  __ b(ne, miss);
122

    
123
  // Get the value at the masked, scaled index and return.
124
  __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize));
125
}
126

    
127

    
128
// Helper function used to check that a value is either not a function
129
// or is loaded if it is a function.
130
static void GenerateCheckNonFunctionOrLoaded(MacroAssembler* masm,
131
                                             Label* miss,
132
                                             Register value,
133
                                             Register scratch) {
134
  Label done;
135
  // Check if the value is a Smi.
136
  __ tst(value, Operand(kSmiTagMask));
137
  __ b(eq, &done);
138
  // Check if the value is a function.
139
  __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
140
  __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
141
  __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
142
  __ b(ne, &done);
143
  // Check if the function has been loaded.
144
  __ ldr(scratch,
145
         FieldMemOperand(value, JSFunction::kSharedFunctionInfoOffset));
146
  __ ldr(scratch,
147
         FieldMemOperand(scratch, SharedFunctionInfo::kLazyLoadDataOffset));
148
  __ cmp(scratch, Operand(Factory::undefined_value()));
149
  __ b(ne, miss);
150
  __ bind(&done);
151
}
152

    
153

    
154
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
155
  // ----------- S t a t e -------------
156
  //  -- r2    : name
157
  //  -- lr    : return address
158
  //  -- [sp]  : receiver
159
  // -----------------------------------
160

    
161
  Label miss;
162

    
163
  __ ldr(r0, MemOperand(sp, 0));
164

    
165
  StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
166
  __ bind(&miss);
167
  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
168
}
169

    
170

    
171
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
172
  // ----------- S t a t e -------------
173
  //  -- r2    : name
174
  //  -- lr    : return address
175
  //  -- [sp]  : receiver
176
  // -----------------------------------
177
  Label miss;
178

    
179
  __ ldr(r0, MemOperand(sp, 0));
180

    
181
  StubCompiler::GenerateLoadStringLength2(masm, r0, r1, r3, &miss);
182
  // Cache miss: Jump to runtime.
183
  __ bind(&miss);
184
  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
185
}
186

    
187

    
188
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
189
  // ----------- S t a t e -------------
190
  //  -- r2    : name
191
  //  -- lr    : return address
192
  //  -- [sp]  : receiver
193
  // -----------------------------------
194

    
195
  // NOTE: Right now, this code always misses on ARM which is
196
  // sub-optimal. We should port the fast case code from IA-32.
197

    
198
  Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
199
  __ Jump(ic, RelocInfo::CODE_TARGET);
200
}
201

    
202

    
203
// Defined in ic.cc.
204
Object* CallIC_Miss(Arguments args);
205

    
206
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
207
  // ----------- S t a t e -------------
208
  //  -- lr: return address
209
  // -----------------------------------
210
  Label number, non_number, non_string, boolean, probe, miss;
211

    
212
  // Get the receiver of the function from the stack into r1.
213
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
214
  // Get the name of the function from the stack; 1 ~ receiver.
215
  __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize));
216

    
217
  // Probe the stub cache.
218
  Code::Flags flags =
219
      Code::ComputeFlags(Code::CALL_IC, MONOMORPHIC, NORMAL, argc);
220
  StubCache::GenerateProbe(masm, flags, r1, r2, r3);
221

    
222
  // If the stub cache probing failed, the receiver might be a value.
223
  // For value objects, we use the map of the prototype objects for
224
  // the corresponding JSValue for the cache and that is what we need
225
  // to probe.
226
  //
227
  // Check for number.
228
  __ tst(r1, Operand(kSmiTagMask));
229
  __ b(eq, &number);
230
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
231
  __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
232
  __ cmp(r3, Operand(HEAP_NUMBER_TYPE));
233
  __ b(ne, &non_number);
234
  __ bind(&number);
235
  StubCompiler::GenerateLoadGlobalFunctionPrototype(
236
      masm, Context::NUMBER_FUNCTION_INDEX, r1);
237
  __ b(&probe);
238

    
239
  // Check for string.
240
  __ bind(&non_number);
241
  __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
242
  __ b(hs, &non_string);
243
  StubCompiler::GenerateLoadGlobalFunctionPrototype(
244
      masm, Context::STRING_FUNCTION_INDEX, r1);
245
  __ b(&probe);
246

    
247
  // Check for boolean.
248
  __ bind(&non_string);
249
  __ cmp(r1, Operand(Factory::true_value()));
250
  __ b(eq, &boolean);
251
  __ cmp(r1, Operand(Factory::false_value()));
252
  __ b(ne, &miss);
253
  __ bind(&boolean);
254
  StubCompiler::GenerateLoadGlobalFunctionPrototype(
255
      masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
256

    
257
  // Probe the stub cache for the value object.
258
  __ bind(&probe);
259
  StubCache::GenerateProbe(masm, flags, r1, r2, r3);
260

    
261
  // Cache miss: Jump to runtime.
262
  __ bind(&miss);
263
  Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
264
}
265

    
266

    
267
static void GenerateNormalHelper(MacroAssembler* masm,
268
                                 int argc,
269
                                 bool is_global_object,
270
                                 Label* miss) {
271
  // Search dictionary - put result in register r1.
272
  GenerateDictionaryLoad(masm, miss, r0, r1);
273

    
274
  // Check that the value isn't a smi.
275
  __ tst(r1, Operand(kSmiTagMask));
276
  __ b(eq, miss);
277

    
278
  // Check that the value is a JSFunction.
279
  __ ldr(r0, FieldMemOperand(r1, HeapObject::kMapOffset));
280
  __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
281
  __ cmp(r0, Operand(JS_FUNCTION_TYPE));
282
  __ b(ne, miss);
283

    
284
  // Check that the function has been loaded.
285
  __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
286
  __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kLazyLoadDataOffset));
287
  __ cmp(r0, Operand(Factory::undefined_value()));
288
  __ b(ne, miss);
289

    
290
  // Patch the receiver with the global proxy if necessary.
291
  if (is_global_object) {
292
    __ ldr(r2, MemOperand(sp, argc * kPointerSize));
293
    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
294
    __ str(r2, MemOperand(sp, argc * kPointerSize));
295
  }
296

    
297
  // Invoke the function.
298
  ParameterCount actual(argc);
299
  __ InvokeFunction(r1, actual, JUMP_FUNCTION);
300
}
301

    
302

    
303
void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
304
  // ----------- S t a t e -------------
305
  //  -- lr: return address
306
  // -----------------------------------
307

    
308
  Label miss, global_object, non_global_object;
309

    
310
  // Get the receiver of the function from the stack into r1.
311
  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
312
  // Get the name of the function from the stack; 1 ~ receiver.
313
  __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize));
314

    
315
  // Check that the receiver isn't a smi.
316
  __ tst(r1, Operand(kSmiTagMask));
317
  __ b(eq, &miss);
318

    
319
  // Check that the receiver is a valid JS object.
320
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
321
  __ ldrb(r0, FieldMemOperand(r3, Map::kInstanceTypeOffset));
322
  __ cmp(r0, Operand(FIRST_JS_OBJECT_TYPE));
323
  __ b(lt, &miss);
324

    
325
  // If this assert fails, we have to check upper bound too.
326
  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
327

    
328
  // Check for access to global object.
329
  __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE));
330
  __ b(eq, &global_object);
331
  __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE));
332
  __ b(ne, &non_global_object);
333

    
334
  // Accessing global object: Load and invoke.
335
  __ bind(&global_object);
336
  // Check that the global object does not require access checks.
337
  __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
338
  __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
339
  __ b(ne, &miss);
340
  GenerateNormalHelper(masm, argc, true, &miss);
341

    
342
  // Accessing non-global object: Check for access to global proxy.
343
  Label global_proxy, invoke;
344
  __ bind(&non_global_object);
345
  __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE));
346
  __ b(eq, &global_proxy);
347
  // Check that the non-global, non-global-proxy object does not
348
  // require access checks.
349
  __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
350
  __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
351
  __ b(ne, &miss);
352
  __ bind(&invoke);
353
  GenerateNormalHelper(masm, argc, false, &miss);
354

    
355
  // Global object access: Check access rights.
356
  __ bind(&global_proxy);
357
  __ CheckAccessGlobalProxy(r1, r0, &miss);
358
  __ b(&invoke);
359

    
360
  // Cache miss: Jump to runtime.
361
  __ bind(&miss);
362
  Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
363
}
364

    
365

    
366
void CallIC::Generate(MacroAssembler* masm,
367
                      int argc,
368
                      const ExternalReference& f) {
369
  // ----------- S t a t e -------------
370
  //  -- lr: return address
371
  // -----------------------------------
372

    
373
  // Get the receiver of the function from the stack.
374
  __ ldr(r2, MemOperand(sp, argc * kPointerSize));
375
  // Get the name of the function to call from the stack.
376
  __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
377

    
378
  __ EnterInternalFrame();
379

    
380
  // Push the receiver and the name of the function.
381
  __ stm(db_w, sp, r1.bit() | r2.bit());
382

    
383
  // Call the entry.
384
  __ mov(r0, Operand(2));
385
  __ mov(r1, Operand(f));
386

    
387
  CEntryStub stub;
388
  __ CallStub(&stub);
389

    
390
  // Move result to r1 and leave the internal frame.
391
  __ mov(r1, Operand(r0));
392
  __ LeaveInternalFrame();
393

    
394
  // Check if the receiver is a global object of some sort.
395
  Label invoke, global;
396
  __ ldr(r2, MemOperand(sp, argc * kPointerSize));  // receiver
397
  __ tst(r2, Operand(kSmiTagMask));
398
  __ b(eq, &invoke);
399
  __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
400
  __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
401
  __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE));
402
  __ b(eq, &global);
403
  __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
404
  __ b(ne, &invoke);
405

    
406
  // Patch the receiver on the stack.
407
  __ bind(&global);
408
  __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
409
  __ str(r2, MemOperand(sp, argc * kPointerSize));
410

    
411
  // Invoke the function.
412
  ParameterCount actual(argc);
413
  __ bind(&invoke);
414
  __ InvokeFunction(r1, actual, JUMP_FUNCTION);
415
}
416

    
417

    
418
// Defined in ic.cc.
419
Object* LoadIC_Miss(Arguments args);
420

    
421
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
422
  // ----------- S t a t e -------------
423
  //  -- r2    : name
424
  //  -- lr    : return address
425
  //  -- [sp]  : receiver
426
  // -----------------------------------
427

    
428
  __ ldr(r0, MemOperand(sp, 0));
429
  // Probe the stub cache.
430
  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
431
  StubCache::GenerateProbe(masm, flags, r0, r2, r3);
432

    
433
  // Cache miss: Jump to runtime.
434
  Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
435
}
436

    
437

    
438
void LoadIC::GenerateNormal(MacroAssembler* masm) {
439
  // ----------- S t a t e -------------
440
  //  -- r2    : name
441
  //  -- lr    : return address
442
  //  -- [sp]  : receiver
443
  // -----------------------------------
444

    
445
  Label miss, probe, global;
446

    
447
  __ ldr(r0, MemOperand(sp, 0));
448
  // Check that the receiver isn't a smi.
449
  __ tst(r0, Operand(kSmiTagMask));
450
  __ b(eq, &miss);
451

    
452
  // Check that the receiver is a valid JS object.
453
  __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
454
  __ ldrb(r1, FieldMemOperand(r3, Map::kInstanceTypeOffset));
455
  __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
456
  __ b(lt, &miss);
457
  // If this assert fails, we have to check upper bound too.
458
  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
459

    
460
  // Check for access to global object (unlikely).
461
  __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE));
462
  __ b(eq, &global);
463

    
464
  // Check for non-global object that requires access check.
465
  __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
466
  __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
467
  __ b(ne, &miss);
468

    
469
  __ bind(&probe);
470
  GenerateDictionaryLoad(masm, &miss, r1, r0);
471
  GenerateCheckNonFunctionOrLoaded(masm, &miss, r0, r1);
472
  __ Ret();
473

    
474
  // Global object access: Check access rights.
475
  __ bind(&global);
476
  __ CheckAccessGlobalProxy(r0, r1, &miss);
477
  __ b(&probe);
478

    
479
  // Cache miss: Restore receiver from stack and jump to runtime.
480
  __ bind(&miss);
481
  Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
482
}
483

    
484

    
485
void LoadIC::GenerateMiss(MacroAssembler* masm) {
486
  Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
487
}
488

    
489

    
490
void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
491
  // ----------- S t a t e -------------
492
  //  -- r2    : name
493
  //  -- lr    : return address
494
  //  -- [sp]  : receiver
495
  // -----------------------------------
496

    
497
  __ ldr(r3, MemOperand(sp, 0));
498
  __ stm(db_w, sp, r2.bit() | r3.bit());
499

    
500
  // Perform tail call to the entry.
501
  __ TailCallRuntime(f, 2);
502
}
503

    
504

    
505
// TODO(181): Implement map patching once loop nesting is tracked on
506
// the ARM platform so we can generate inlined fast-case code for
507
// array indexing in loops.
508
void KeyedLoadIC::PatchInlinedMapCheck(Address address, Object* value) { }
509

    
510

    
511
Object* KeyedLoadIC_Miss(Arguments args);
512

    
513

    
514
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
515
  Generate(masm, ExternalReference(IC_Utility(kKeyedLoadIC_Miss)));
516
}
517

    
518

    
519
void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
520
  // ---------- S t a t e --------------
521
  //  -- lr     : return address
522
  //  -- sp[0]  : key
523
  //  -- sp[4]  : receiver
524
  __ ldm(ia, sp, r2.bit() | r3.bit());
525
  __ stm(db_w, sp, r2.bit() | r3.bit());
526

    
527
  __ TailCallRuntime(f, 2);
528
}
529

    
530

    
531
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
532
  // ---------- S t a t e --------------
533
  //  -- lr     : return address
534
  //  -- sp[0]  : key
535
  //  -- sp[4]  : receiver
536
  Label slow, fast;
537

    
538
  // Get the key and receiver object from the stack.
539
  __ ldm(ia, sp, r0.bit() | r1.bit());
540
  // Check that the key is a smi.
541
  __ tst(r0, Operand(kSmiTagMask));
542
  __ b(ne, &slow);
543
  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
544
  // Check that the object isn't a smi.
545
  __ tst(r1, Operand(kSmiTagMask));
546
  __ b(eq, &slow);
547

    
548
  // Get the map of the receiver.
549
  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
550
  // Check that the receiver does not require access checks.  We need
551
  // to check this explicitly since this generic stub does not perform
552
  // map checks.
553
  __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
554
  __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
555
  __ b(ne, &slow);
556
  // Check that the object is some kind of JS object EXCEPT JS Value type.
557
  // In the case that the object is a value-wrapper object,
558
  // we enter the runtime system to make sure that indexing into string
559
  // objects work as intended.
560
  ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
561
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
562
  __ cmp(r2, Operand(JS_OBJECT_TYPE));
563
  __ b(lt, &slow);
564

    
565
  // Get the elements array of the object.
566
  __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
567
  // Check that the object is in fast mode (not dictionary).
568
  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
569
  __ cmp(r3, Operand(Factory::hash_table_map()));
570
  __ b(eq, &slow);
571
  // Check that the key (index) is within bounds.
572
  __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
573
  __ cmp(r0, Operand(r3));
574
  __ b(lo, &fast);
575

    
576
  // Slow case: Push extra copies of the arguments (2).
577
  __ bind(&slow);
578
  __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1);
579
  __ ldm(ia, sp, r0.bit() | r1.bit());
580
  __ stm(db_w, sp, r0.bit() | r1.bit());
581
  // Do tail-call to runtime routine.
582
  __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
583

    
584
  // Fast case: Do the load.
585
  __ bind(&fast);
586
  __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
587
  __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
588
  __ cmp(r0, Operand(Factory::the_hole_value()));
589
  // In case the loaded value is the_hole we have to consult GetProperty
590
  // to ensure the prototype chain is searched.
591
  __ b(eq, &slow);
592

    
593
  __ Ret();
594
}
595

    
596

    
597
void KeyedStoreIC::Generate(MacroAssembler* masm,
598
                            const ExternalReference& f) {
599
  // ---------- S t a t e --------------
600
  //  -- r0     : value
601
  //  -- lr     : return address
602
  //  -- sp[0]  : key
603
  //  -- sp[1]  : receiver
604

    
605
  __ ldm(ia, sp, r2.bit() | r3.bit());
606
  __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
607

    
608
  __ TailCallRuntime(f, 3);
609
}
610

    
611

    
612
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
613
  // ---------- S t a t e --------------
614
  //  -- r0     : value
615
  //  -- lr     : return address
616
  //  -- sp[0]  : key
617
  //  -- sp[1]  : receiver
618
  Label slow, fast, array, extra, exit;
619
  // Get the key and the object from the stack.
620
  __ ldm(ia, sp, r1.bit() | r3.bit());  // r1 = key, r3 = receiver
621
  // Check that the key is a smi.
622
  __ tst(r1, Operand(kSmiTagMask));
623
  __ b(ne, &slow);
624
  // Check that the object isn't a smi.
625
  __ tst(r3, Operand(kSmiTagMask));
626
  __ b(eq, &slow);
627
  // Get the map of the object.
628
  __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
629
  // Check that the receiver does not require access checks.  We need
630
  // to do this because this generic stub does not perform map checks.
631
  __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset));
632
  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
633
  __ b(ne, &slow);
634
  // Check if the object is a JS array or not.
635
  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
636
  __ cmp(r2, Operand(JS_ARRAY_TYPE));
637
  // r1 == key.
638
  __ b(eq, &array);
639
  // Check that the object is some kind of JS object.
640
  __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
641
  __ b(lt, &slow);
642

    
643

    
644
  // Object case: Check key against length in the elements array.
645
  __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
646
  // Check that the object is in fast mode (not dictionary).
647
  __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
648
  __ cmp(r2, Operand(Factory::hash_table_map()));
649
  __ b(eq, &slow);
650
  // Untag the key (for checking against untagged length in the fixed array).
651
  __ mov(r1, Operand(r1, ASR, kSmiTagSize));
652
  // Compute address to store into and check array bounds.
653
  __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
654
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
655
  __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
656
  __ cmp(r1, Operand(ip));
657
  __ b(lo, &fast);
658

    
659

    
660
  // Slow case: Push extra copies of the arguments (3).
661
  __ bind(&slow);
662
  __ ldm(ia, sp, r1.bit() | r3.bit());  // r0 == value, r1 == key, r3 == object
663
  __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
664
  // Do tail-call to runtime routine.
665
  __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
666

    
667
  // Extra capacity case: Check if there is extra capacity to
668
  // perform the store and update the length. Used for adding one
669
  // element to the array by writing to array[array.length].
670
  // r0 == value, r1 == key, r2 == elements, r3 == object
671
  __ bind(&extra);
672
  __ b(ne, &slow);  // do not leave holes in the array
673
  __ mov(r1, Operand(r1, ASR, kSmiTagSize));  // untag
674
  __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
675
  __ cmp(r1, Operand(ip));
676
  __ b(hs, &slow);
677
  __ mov(r1, Operand(r1, LSL, kSmiTagSize));  // restore tag
678
  __ add(r1, r1, Operand(1 << kSmiTagSize));  // and increment
679
  __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
680
  __ mov(r3, Operand(r2));
681
  // NOTE: Computing the address to store into must take the fact
682
  // that the key has been incremented into account.
683
  int displacement = Array::kHeaderSize - kHeapObjectTag -
684
      ((1 << kSmiTagSize) * 2);
685
  __ add(r2, r2, Operand(displacement));
686
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
687
  __ b(&fast);
688

    
689

    
690
  // Array case: Get the length and the elements array from the JS
691
  // array. Check that the array is in fast mode; if it is the
692
  // length is always a smi.
693
  // r0 == value, r3 == object
694
  __ bind(&array);
695
  __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
696
  __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
697
  __ cmp(r1, Operand(Factory::hash_table_map()));
698
  __ b(eq, &slow);
699

    
700
  // Check the key against the length in the array, compute the
701
  // address to store into and fall through to fast case.
702
  __ ldr(r1, MemOperand(sp));  // restore key
703
  // r0 == value, r1 == key, r2 == elements, r3 == object.
704
  __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
705
  __ cmp(r1, Operand(ip));
706
  __ b(hs, &extra);
707
  __ mov(r3, Operand(r2));
708
  __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
709
  __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
710

    
711

    
712
  // Fast case: Do the store.
713
  // r0 == value, r2 == address to store into, r3 == elements
714
  __ bind(&fast);
715
  __ str(r0, MemOperand(r2));
716
  // Skip write barrier if the written value is a smi.
717
  __ tst(r0, Operand(kSmiTagMask));
718
  __ b(eq, &exit);
719
  // Update write barrier for the elements array address.
720
  __ sub(r1, r2, Operand(r3));
721
  __ RecordWrite(r3, r1, r2);
722

    
723
  __ bind(&exit);
724
  __ Ret();
725
}
726

    
727

    
728
void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
729
  // ---------- S t a t e --------------
730
  //  -- r0     : value
731
  //  -- lr     : return address
732
  //  -- sp[0]  : key
733
  //  -- sp[1]  : receiver
734
  // ----------- S t a t e -------------
735

    
736
  __ ldm(ia, sp, r2.bit() | r3.bit());
737
  __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
738

    
739
  // Perform tail call to the entry.
740
  __ TailCallRuntime(
741
      ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
742
}
743

    
744

    
745
void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
746
  // ----------- S t a t e -------------
747
  //  -- r0    : value
748
  //  -- r2    : name
749
  //  -- lr    : return address
750
  //  -- [sp]  : receiver
751
  // -----------------------------------
752

    
753
  // Get the receiver from the stack and probe the stub cache.
754
  __ ldr(r1, MemOperand(sp));
755
  Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC);
756
  StubCache::GenerateProbe(masm, flags, r1, r2, r3);
757

    
758
  // Cache miss: Jump to runtime.
759
  Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
760
}
761

    
762

    
763
void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
764
  // ----------- S t a t e -------------
765
  //  -- r0    : value
766
  //  -- r2    : name
767
  //  -- lr    : return address
768
  //  -- [sp]  : receiver
769
  // -----------------------------------
770

    
771
  __ ldr(r3, MemOperand(sp));  // copy receiver
772
  __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
773

    
774
  // Perform tail call to the entry.
775
  __ TailCallRuntime(
776
      ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
777
}
778

    
779

    
780
void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
781
  // ----------- S t a t e -------------
782
  //  -- r0    : value
783
  //  -- r2    : name
784
  //  -- lr    : return address
785
  //  -- [sp]  : receiver
786
  // -----------------------------------
787

    
788
  __ ldr(r3, MemOperand(sp));  // copy receiver
789
  __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
790

    
791
  // Perform tail call to the entry.
792
  __ TailCallRuntime(f, 3);
793
}
794

    
795

    
796
#undef __
797

    
798

    
799
} }  // namespace v8::internal