The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / arm / ic-arm.cc @ f230a1cf
History | View | Annotate | Download (58 KB)
1 |
// Copyright 2012 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "v8.h" |
29 |
|
30 |
#if V8_TARGET_ARCH_ARM
|
31 |
|
32 |
#include "assembler-arm.h" |
33 |
#include "code-stubs.h" |
34 |
#include "codegen.h" |
35 |
#include "disasm.h" |
36 |
#include "ic-inl.h" |
37 |
#include "runtime.h" |
38 |
#include "stub-cache.h" |
39 |
|
40 |
namespace v8 {
|
41 |
namespace internal {
|
42 |
|
43 |
|
44 |
// ----------------------------------------------------------------------------
|
45 |
// Static IC stub generators.
|
46 |
//
|
47 |
|
48 |
#define __ ACCESS_MASM(masm)
|
49 |
|
50 |
|
51 |
static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, |
52 |
Register type, |
53 |
Label* global_object) { |
54 |
// Register usage:
|
55 |
// type: holds the receiver instance type on entry.
|
56 |
__ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE)); |
57 |
__ b(eq, global_object); |
58 |
__ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE)); |
59 |
__ b(eq, global_object); |
60 |
__ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE)); |
61 |
__ b(eq, global_object); |
62 |
} |
63 |
|
64 |
|
65 |
// Generated code falls through if the receiver is a regular non-global
|
66 |
// JS object with slow properties and no interceptors.
|
67 |
static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm, |
68 |
Register receiver, |
69 |
Register elements, |
70 |
Register t0, |
71 |
Register t1, |
72 |
Label* miss) { |
73 |
// Register usage:
|
74 |
// receiver: holds the receiver on entry and is unchanged.
|
75 |
// elements: holds the property dictionary on fall through.
|
76 |
// Scratch registers:
|
77 |
// t0: used to holds the receiver map.
|
78 |
// t1: used to holds the receiver instance type, receiver bit mask and
|
79 |
// elements map.
|
80 |
|
81 |
// Check that the receiver isn't a smi.
|
82 |
__ JumpIfSmi(receiver, miss); |
83 |
|
84 |
// Check that the receiver is a valid JS object.
|
85 |
__ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE); |
86 |
__ b(lt, miss); |
87 |
|
88 |
// If this assert fails, we have to check upper bound too.
|
89 |
STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); |
90 |
|
91 |
GenerateGlobalInstanceTypeCheck(masm, t1, miss); |
92 |
|
93 |
// Check that the global object does not require access checks.
|
94 |
__ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset)); |
95 |
__ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
|
96 |
(1 << Map::kHasNamedInterceptor)));
|
97 |
__ b(ne, miss); |
98 |
|
99 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
100 |
__ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset)); |
101 |
__ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
102 |
__ cmp(t1, ip); |
103 |
__ b(ne, miss); |
104 |
} |
105 |
|
106 |
|
107 |
// Helper function used from LoadIC/CallIC GenerateNormal.
|
108 |
//
|
109 |
// elements: Property dictionary. It is not clobbered if a jump to the miss
|
110 |
// label is done.
|
111 |
// name: Property name. It is not clobbered if a jump to the miss label is
|
112 |
// done
|
113 |
// result: Register for the result. It is only updated if a jump to the miss
|
114 |
// label is not done. Can be the same as elements or name clobbering
|
115 |
// one of these in the case of not jumping to the miss label.
|
116 |
// The two scratch registers need to be different from elements, name and
|
117 |
// result.
|
118 |
// The generated code assumes that the receiver has slow properties,
|
119 |
// is not a global object and does not have interceptors.
|
120 |
static void GenerateDictionaryLoad(MacroAssembler* masm, |
121 |
Label* miss, |
122 |
Register elements, |
123 |
Register name, |
124 |
Register result, |
125 |
Register scratch1, |
126 |
Register scratch2) { |
127 |
// Main use of the scratch registers.
|
128 |
// scratch1: Used as temporary and to hold the capacity of the property
|
129 |
// dictionary.
|
130 |
// scratch2: Used as temporary.
|
131 |
Label done; |
132 |
|
133 |
// Probe the dictionary.
|
134 |
NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
135 |
miss, |
136 |
&done, |
137 |
elements, |
138 |
name, |
139 |
scratch1, |
140 |
scratch2); |
141 |
|
142 |
// If probing finds an entry check that the value is a normal
|
143 |
// property.
|
144 |
__ bind(&done); // scratch2 == elements + 4 * index
|
145 |
const int kElementsStartOffset = NameDictionary::kHeaderSize + |
146 |
NameDictionary::kElementsStartIndex * kPointerSize; |
147 |
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
148 |
__ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); |
149 |
__ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize)); |
150 |
__ b(ne, miss); |
151 |
|
152 |
// Get the value at the masked, scaled index and return.
|
153 |
__ ldr(result, |
154 |
FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
|
155 |
} |
156 |
|
157 |
|
158 |
// Helper function used from StoreIC::GenerateNormal.
|
159 |
//
|
160 |
// elements: Property dictionary. It is not clobbered if a jump to the miss
|
161 |
// label is done.
|
162 |
// name: Property name. It is not clobbered if a jump to the miss label is
|
163 |
// done
|
164 |
// value: The value to store.
|
165 |
// The two scratch registers need to be different from elements, name and
|
166 |
// result.
|
167 |
// The generated code assumes that the receiver has slow properties,
|
168 |
// is not a global object and does not have interceptors.
|
169 |
static void GenerateDictionaryStore(MacroAssembler* masm, |
170 |
Label* miss, |
171 |
Register elements, |
172 |
Register name, |
173 |
Register value, |
174 |
Register scratch1, |
175 |
Register scratch2) { |
176 |
// Main use of the scratch registers.
|
177 |
// scratch1: Used as temporary and to hold the capacity of the property
|
178 |
// dictionary.
|
179 |
// scratch2: Used as temporary.
|
180 |
Label done; |
181 |
|
182 |
// Probe the dictionary.
|
183 |
NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
184 |
miss, |
185 |
&done, |
186 |
elements, |
187 |
name, |
188 |
scratch1, |
189 |
scratch2); |
190 |
|
191 |
// If probing finds an entry in the dictionary check that the value
|
192 |
// is a normal property that is not read only.
|
193 |
__ bind(&done); // scratch2 == elements + 4 * index
|
194 |
const int kElementsStartOffset = NameDictionary::kHeaderSize + |
195 |
NameDictionary::kElementsStartIndex * kPointerSize; |
196 |
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
197 |
const int kTypeAndReadOnlyMask = |
198 |
(PropertyDetails::TypeField::kMask | |
199 |
PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; |
200 |
__ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); |
201 |
__ tst(scratch1, Operand(kTypeAndReadOnlyMask)); |
202 |
__ b(ne, miss); |
203 |
|
204 |
// Store the value at the masked, scaled index and return.
|
205 |
const int kValueOffset = kElementsStartOffset + kPointerSize; |
206 |
__ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); |
207 |
__ str(value, MemOperand(scratch2)); |
208 |
|
209 |
// Update the write barrier. Make sure not to clobber the value.
|
210 |
__ mov(scratch1, value); |
211 |
__ RecordWrite( |
212 |
elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs); |
213 |
} |
214 |
|
215 |
|
216 |
// Checks the receiver for special cases (value type, slow case bits).
|
217 |
// Falls through for regular JS object.
|
218 |
static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, |
219 |
Register receiver, |
220 |
Register map, |
221 |
Register scratch, |
222 |
int interceptor_bit,
|
223 |
Label* slow) { |
224 |
// Check that the object isn't a smi.
|
225 |
__ JumpIfSmi(receiver, slow); |
226 |
// Get the map of the receiver.
|
227 |
__ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
228 |
// Check bit field.
|
229 |
__ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); |
230 |
__ tst(scratch, |
231 |
Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); |
232 |
__ b(ne, slow); |
233 |
// Check that the object is some kind of JS object EXCEPT JS Value type.
|
234 |
// In the case that the object is a value-wrapper object,
|
235 |
// we enter the runtime system to make sure that indexing into string
|
236 |
// objects work as intended.
|
237 |
ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
238 |
__ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
239 |
__ cmp(scratch, Operand(JS_OBJECT_TYPE)); |
240 |
__ b(lt, slow); |
241 |
} |
242 |
|
243 |
|
244 |
// Loads an indexed element from a fast case array.
|
245 |
// If not_fast_array is NULL, doesn't perform the elements map check.
|
246 |
static void GenerateFastArrayLoad(MacroAssembler* masm, |
247 |
Register receiver, |
248 |
Register key, |
249 |
Register elements, |
250 |
Register scratch1, |
251 |
Register scratch2, |
252 |
Register result, |
253 |
Label* not_fast_array, |
254 |
Label* out_of_range) { |
255 |
// Register use:
|
256 |
//
|
257 |
// receiver - holds the receiver on entry.
|
258 |
// Unchanged unless 'result' is the same register.
|
259 |
//
|
260 |
// key - holds the smi key on entry.
|
261 |
// Unchanged unless 'result' is the same register.
|
262 |
//
|
263 |
// elements - holds the elements of the receiver on exit.
|
264 |
//
|
265 |
// result - holds the result on exit if the load succeeded.
|
266 |
// Allowed to be the the same as 'receiver' or 'key'.
|
267 |
// Unchanged on bailout so 'receiver' and 'key' can be safely
|
268 |
// used by further computation.
|
269 |
//
|
270 |
// Scratch registers:
|
271 |
//
|
272 |
// scratch1 - used to hold elements map and elements length.
|
273 |
// Holds the elements map if not_fast_array branch is taken.
|
274 |
//
|
275 |
// scratch2 - used to hold the loaded value.
|
276 |
|
277 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
278 |
if (not_fast_array != NULL) { |
279 |
// Check that the object is in fast mode and writable.
|
280 |
__ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); |
281 |
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
282 |
__ cmp(scratch1, ip); |
283 |
__ b(ne, not_fast_array); |
284 |
} else {
|
285 |
__ AssertFastElements(elements); |
286 |
} |
287 |
// Check that the key (index) is within bounds.
|
288 |
__ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
289 |
__ cmp(key, Operand(scratch1)); |
290 |
__ b(hs, out_of_range); |
291 |
// Fast case: Do the load.
|
292 |
__ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
293 |
__ ldr(scratch2, MemOperand::PointerAddressFromSmiKey(scratch1, key)); |
294 |
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
295 |
__ cmp(scratch2, ip); |
296 |
// In case the loaded value is the_hole we have to consult GetProperty
|
297 |
// to ensure the prototype chain is searched.
|
298 |
__ b(eq, out_of_range); |
299 |
__ mov(result, scratch2); |
300 |
} |
301 |
|
302 |
|
303 |
// Checks whether a key is an array index string or a unique name.
|
304 |
// Falls through if a key is a unique name.
|
305 |
static void GenerateKeyNameCheck(MacroAssembler* masm, |
306 |
Register key, |
307 |
Register map, |
308 |
Register hash, |
309 |
Label* index_string, |
310 |
Label* not_unique) { |
311 |
// The key is not a smi.
|
312 |
Label unique; |
313 |
// Is it a name?
|
314 |
__ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE); |
315 |
__ b(hi, not_unique); |
316 |
STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); |
317 |
__ b(eq, &unique); |
318 |
|
319 |
// Is the string an array index, with cached numeric value?
|
320 |
__ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset)); |
321 |
__ tst(hash, Operand(Name::kContainsCachedArrayIndexMask)); |
322 |
__ b(eq, index_string); |
323 |
|
324 |
// Is the string internalized? We know it's a string, so a single
|
325 |
// bit test is enough.
|
326 |
// map: key map
|
327 |
__ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
328 |
STATIC_ASSERT(kInternalizedTag == 0);
|
329 |
__ tst(hash, Operand(kIsNotInternalizedMask)); |
330 |
__ b(ne, not_unique); |
331 |
|
332 |
__ bind(&unique); |
333 |
} |
334 |
|
335 |
|
336 |
// Defined in ic.cc.
|
337 |
Object* CallIC_Miss(Arguments args); |
338 |
|
339 |
// The generated code does not accept smi keys.
|
340 |
// The generated code falls through if both probes miss.
|
341 |
void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
342 |
int argc,
|
343 |
Code::Kind kind, |
344 |
Code::ExtraICState extra_state) { |
345 |
// ----------- S t a t e -------------
|
346 |
// -- r1 : receiver
|
347 |
// -- r2 : name
|
348 |
// -----------------------------------
|
349 |
Label number, non_number, non_string, boolean, probe, miss; |
350 |
|
351 |
// Probe the stub cache.
|
352 |
Code::Flags flags = Code::ComputeFlags(kind, |
353 |
MONOMORPHIC, |
354 |
extra_state, |
355 |
Code::NORMAL, |
356 |
argc); |
357 |
masm->isolate()->stub_cache()->GenerateProbe( |
358 |
masm, flags, r1, r2, r3, r4, r5, r6); |
359 |
|
360 |
// If the stub cache probing failed, the receiver might be a value.
|
361 |
// For value objects, we use the map of the prototype objects for
|
362 |
// the corresponding JSValue for the cache and that is what we need
|
363 |
// to probe.
|
364 |
//
|
365 |
// Check for number.
|
366 |
__ JumpIfSmi(r1, &number); |
367 |
__ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); |
368 |
__ b(ne, &non_number); |
369 |
__ bind(&number); |
370 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
371 |
masm, Context::NUMBER_FUNCTION_INDEX, r1); |
372 |
__ b(&probe); |
373 |
|
374 |
// Check for string.
|
375 |
__ bind(&non_number); |
376 |
__ cmp(r3, Operand(FIRST_NONSTRING_TYPE)); |
377 |
__ b(hs, &non_string); |
378 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
379 |
masm, Context::STRING_FUNCTION_INDEX, r1); |
380 |
__ b(&probe); |
381 |
|
382 |
// Check for boolean.
|
383 |
__ bind(&non_string); |
384 |
__ LoadRoot(ip, Heap::kTrueValueRootIndex); |
385 |
__ cmp(r1, ip); |
386 |
__ b(eq, &boolean); |
387 |
__ LoadRoot(ip, Heap::kFalseValueRootIndex); |
388 |
__ cmp(r1, ip); |
389 |
__ b(ne, &miss); |
390 |
__ bind(&boolean); |
391 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
392 |
masm, Context::BOOLEAN_FUNCTION_INDEX, r1); |
393 |
|
394 |
// Probe the stub cache for the value object.
|
395 |
__ bind(&probe); |
396 |
masm->isolate()->stub_cache()->GenerateProbe( |
397 |
masm, flags, r1, r2, r3, r4, r5, r6); |
398 |
|
399 |
__ bind(&miss); |
400 |
} |
401 |
|
402 |
|
403 |
static void GenerateFunctionTailCall(MacroAssembler* masm, |
404 |
int argc,
|
405 |
Label* miss, |
406 |
Register scratch) { |
407 |
// r1: function
|
408 |
|
409 |
// Check that the value isn't a smi.
|
410 |
__ JumpIfSmi(r1, miss); |
411 |
|
412 |
// Check that the value is a JSFunction.
|
413 |
__ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE); |
414 |
__ b(ne, miss); |
415 |
|
416 |
// Invoke the function.
|
417 |
ParameterCount actual(argc); |
418 |
__ InvokeFunction(r1, actual, JUMP_FUNCTION, |
419 |
NullCallWrapper(), CALL_AS_METHOD); |
420 |
} |
421 |
|
422 |
|
423 |
void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { |
424 |
// ----------- S t a t e -------------
|
425 |
// -- r2 : name
|
426 |
// -- lr : return address
|
427 |
// -----------------------------------
|
428 |
Label miss; |
429 |
|
430 |
// Get the receiver of the function from the stack into r1.
|
431 |
__ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
432 |
|
433 |
GenerateNameDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss); |
434 |
|
435 |
// r0: elements
|
436 |
// Search the dictionary - put result in register r1.
|
437 |
GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4); |
438 |
|
439 |
GenerateFunctionTailCall(masm, argc, &miss, r4); |
440 |
|
441 |
__ bind(&miss); |
442 |
} |
443 |
|
444 |
|
445 |
void CallICBase::GenerateMiss(MacroAssembler* masm,
|
446 |
int argc,
|
447 |
IC::UtilityId id, |
448 |
Code::ExtraICState extra_state) { |
449 |
// ----------- S t a t e -------------
|
450 |
// -- r2 : name
|
451 |
// -- lr : return address
|
452 |
// -----------------------------------
|
453 |
Isolate* isolate = masm->isolate(); |
454 |
|
455 |
if (id == IC::kCallIC_Miss) {
|
456 |
__ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
|
457 |
} else {
|
458 |
__ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
|
459 |
} |
460 |
|
461 |
// Get the receiver of the function from the stack.
|
462 |
__ ldr(r3, MemOperand(sp, argc * kPointerSize)); |
463 |
|
464 |
{ |
465 |
FrameScope scope(masm, StackFrame::INTERNAL); |
466 |
|
467 |
// Push the receiver and the name of the function.
|
468 |
__ Push(r3, r2); |
469 |
|
470 |
// Call the entry.
|
471 |
__ mov(r0, Operand(2));
|
472 |
__ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate))); |
473 |
|
474 |
CEntryStub stub(1);
|
475 |
__ CallStub(&stub); |
476 |
|
477 |
// Move result to r1 and leave the internal frame.
|
478 |
__ mov(r1, Operand(r0)); |
479 |
} |
480 |
|
481 |
// Check if the receiver is a global object of some sort.
|
482 |
// This can happen only for regular CallIC but not KeyedCallIC.
|
483 |
if (id == IC::kCallIC_Miss) {
|
484 |
Label invoke, global; |
485 |
__ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
|
486 |
__ JumpIfSmi(r2, &invoke); |
487 |
__ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE); |
488 |
__ b(eq, &global); |
489 |
__ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); |
490 |
__ b(ne, &invoke); |
491 |
|
492 |
// Patch the receiver on the stack.
|
493 |
__ bind(&global); |
494 |
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); |
495 |
__ str(r2, MemOperand(sp, argc * kPointerSize)); |
496 |
__ bind(&invoke); |
497 |
} |
498 |
|
499 |
// Invoke the function.
|
500 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state) |
501 |
? CALL_AS_FUNCTION |
502 |
: CALL_AS_METHOD; |
503 |
ParameterCount actual(argc); |
504 |
__ InvokeFunction(r1, |
505 |
actual, |
506 |
JUMP_FUNCTION, |
507 |
NullCallWrapper(), |
508 |
call_kind); |
509 |
} |
510 |
|
511 |
|
512 |
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
|
513 |
int argc,
|
514 |
Code::ExtraICState extra_ic_state) { |
515 |
// ----------- S t a t e -------------
|
516 |
// -- r2 : name
|
517 |
// -- lr : return address
|
518 |
// -----------------------------------
|
519 |
|
520 |
// Get the receiver of the function from the stack into r1.
|
521 |
__ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
522 |
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); |
523 |
GenerateMiss(masm, argc, extra_ic_state); |
524 |
} |
525 |
|
526 |
|
527 |
void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { |
528 |
// ----------- S t a t e -------------
|
529 |
// -- r2 : name
|
530 |
// -- lr : return address
|
531 |
// -----------------------------------
|
532 |
|
533 |
// Get the receiver of the function from the stack into r1.
|
534 |
__ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
535 |
|
536 |
Label do_call, slow_call, slow_load, slow_reload_receiver; |
537 |
Label check_number_dictionary, check_name, lookup_monomorphic_cache; |
538 |
Label index_smi, index_name; |
539 |
|
540 |
// Check that the key is a smi.
|
541 |
__ JumpIfNotSmi(r2, &check_name); |
542 |
__ bind(&index_smi); |
543 |
// Now the key is known to be a smi. This place is also jumped to from below
|
544 |
// where a numeric string is converted to a smi.
|
545 |
|
546 |
GenerateKeyedLoadReceiverCheck( |
547 |
masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call); |
548 |
|
549 |
GenerateFastArrayLoad( |
550 |
masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load); |
551 |
Counters* counters = masm->isolate()->counters(); |
552 |
__ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
|
553 |
|
554 |
__ bind(&do_call); |
555 |
// receiver in r1 is not used after this point.
|
556 |
// r2: key
|
557 |
// r1: function
|
558 |
GenerateFunctionTailCall(masm, argc, &slow_call, r0); |
559 |
|
560 |
__ bind(&check_number_dictionary); |
561 |
// r2: key
|
562 |
// r3: elements map
|
563 |
// r4: elements
|
564 |
// Check whether the elements is a number dictionary.
|
565 |
__ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
566 |
__ cmp(r3, ip); |
567 |
__ b(ne, &slow_load); |
568 |
__ SmiUntag(r0, r2); |
569 |
// r0: untagged index
|
570 |
__ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5); |
571 |
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
|
572 |
__ jmp(&do_call); |
573 |
|
574 |
__ bind(&slow_load); |
575 |
// This branch is taken when calling KeyedCallIC_Miss is neither required
|
576 |
// nor beneficial.
|
577 |
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
|
578 |
{ |
579 |
FrameScope scope(masm, StackFrame::INTERNAL); |
580 |
__ push(r2); // save the key
|
581 |
__ Push(r1, r2); // pass the receiver and the key
|
582 |
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
583 |
__ pop(r2); // restore the key
|
584 |
} |
585 |
__ mov(r1, r0); |
586 |
__ jmp(&do_call); |
587 |
|
588 |
__ bind(&check_name); |
589 |
GenerateKeyNameCheck(masm, r2, r0, r3, &index_name, &slow_call); |
590 |
|
591 |
// The key is known to be a unique name.
|
592 |
// If the receiver is a regular JS object with slow properties then do
|
593 |
// a quick inline probe of the receiver's dictionary.
|
594 |
// Otherwise do the monomorphic cache probe.
|
595 |
GenerateKeyedLoadReceiverCheck( |
596 |
masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
597 |
|
598 |
__ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset)); |
599 |
__ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset)); |
600 |
__ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
601 |
__ cmp(r3, ip); |
602 |
__ b(ne, &lookup_monomorphic_cache); |
603 |
|
604 |
GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4); |
605 |
__ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
|
606 |
__ jmp(&do_call); |
607 |
|
608 |
__ bind(&lookup_monomorphic_cache); |
609 |
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
|
610 |
GenerateMonomorphicCacheProbe(masm, |
611 |
argc, |
612 |
Code::KEYED_CALL_IC, |
613 |
Code::kNoExtraICState); |
614 |
// Fall through on miss.
|
615 |
|
616 |
__ bind(&slow_call); |
617 |
// This branch is taken if:
|
618 |
// - the receiver requires boxing or access check,
|
619 |
// - the key is neither smi nor a unique name,
|
620 |
// - the value loaded is not a function,
|
621 |
// - there is hope that the runtime will create a monomorphic call stub
|
622 |
// that will get fetched next time.
|
623 |
__ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
|
624 |
GenerateMiss(masm, argc); |
625 |
|
626 |
__ bind(&index_name); |
627 |
__ IndexFromHash(r3, r2); |
628 |
// Now jump to the place where smi keys are handled.
|
629 |
__ jmp(&index_smi); |
630 |
} |
631 |
|
632 |
|
633 |
void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { |
634 |
// ----------- S t a t e -------------
|
635 |
// -- r2 : name
|
636 |
// -- lr : return address
|
637 |
// -----------------------------------
|
638 |
|
639 |
// Check if the name is really a name.
|
640 |
Label miss; |
641 |
__ JumpIfSmi(r2, &miss); |
642 |
__ IsObjectNameType(r2, r0, &miss); |
643 |
|
644 |
CallICBase::GenerateNormal(masm, argc); |
645 |
__ bind(&miss); |
646 |
GenerateMiss(masm, argc); |
647 |
} |
648 |
|
649 |
|
650 |
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
|
651 |
// ----------- S t a t e -------------
|
652 |
// -- r2 : name
|
653 |
// -- lr : return address
|
654 |
// -- r0 : receiver
|
655 |
// -----------------------------------
|
656 |
|
657 |
// Probe the stub cache.
|
658 |
Code::Flags flags = Code::ComputeFlags( |
659 |
Code::HANDLER, MONOMORPHIC, Code::kNoExtraICState, |
660 |
Code::NORMAL, Code::LOAD_IC); |
661 |
masm->isolate()->stub_cache()->GenerateProbe( |
662 |
masm, flags, r0, r2, r3, r4, r5, r6); |
663 |
|
664 |
// Cache miss: Jump to runtime.
|
665 |
GenerateMiss(masm); |
666 |
} |
667 |
|
668 |
|
669 |
void LoadIC::GenerateNormal(MacroAssembler* masm) {
|
670 |
// ----------- S t a t e -------------
|
671 |
// -- r2 : name
|
672 |
// -- lr : return address
|
673 |
// -- r0 : receiver
|
674 |
// -----------------------------------
|
675 |
Label miss; |
676 |
|
677 |
GenerateNameDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss); |
678 |
|
679 |
// r1: elements
|
680 |
GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4); |
681 |
__ Ret(); |
682 |
|
683 |
// Cache miss: Jump to runtime.
|
684 |
__ bind(&miss); |
685 |
GenerateMiss(masm); |
686 |
} |
687 |
|
688 |
|
689 |
void LoadIC::GenerateMiss(MacroAssembler* masm) {
|
690 |
// ----------- S t a t e -------------
|
691 |
// -- r2 : name
|
692 |
// -- lr : return address
|
693 |
// -- r0 : receiver
|
694 |
// -----------------------------------
|
695 |
Isolate* isolate = masm->isolate(); |
696 |
|
697 |
__ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
|
698 |
|
699 |
__ mov(r3, r0); |
700 |
__ Push(r3, r2); |
701 |
|
702 |
// Perform tail call to the entry.
|
703 |
ExternalReference ref = |
704 |
ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
705 |
__ TailCallExternalReference(ref, 2, 1); |
706 |
} |
707 |
|
708 |
|
709 |
void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
|
710 |
// ---------- S t a t e --------------
|
711 |
// -- r2 : name
|
712 |
// -- lr : return address
|
713 |
// -- r0 : receiver
|
714 |
// -----------------------------------
|
715 |
|
716 |
__ mov(r3, r0); |
717 |
__ Push(r3, r2); |
718 |
|
719 |
__ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
720 |
} |
721 |
|
722 |
|
723 |
static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
|
724 |
Register object, |
725 |
Register key, |
726 |
Register scratch1, |
727 |
Register scratch2, |
728 |
Register scratch3, |
729 |
Label* unmapped_case, |
730 |
Label* slow_case) { |
731 |
Heap* heap = masm->isolate()->heap(); |
732 |
|
733 |
// Check that the receiver is a JSObject. Because of the map check
|
734 |
// later, we do not need to check for interceptors or whether it
|
735 |
// requires access checks.
|
736 |
__ JumpIfSmi(object, slow_case); |
737 |
// Check that the object is some kind of JSObject.
|
738 |
__ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE); |
739 |
__ b(lt, slow_case); |
740 |
|
741 |
// Check that the key is a positive smi.
|
742 |
__ tst(key, Operand(0x80000001));
|
743 |
__ b(ne, slow_case); |
744 |
|
745 |
// Load the elements into scratch1 and check its map.
|
746 |
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); |
747 |
__ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); |
748 |
__ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); |
749 |
|
750 |
// Check if element is in the range of mapped arguments. If not, jump
|
751 |
// to the unmapped lookup with the parameter map in scratch1.
|
752 |
__ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); |
753 |
__ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
|
754 |
__ cmp(key, Operand(scratch2)); |
755 |
__ b(cs, unmapped_case); |
756 |
|
757 |
// Load element index and check whether it is the hole.
|
758 |
const int kOffset = |
759 |
FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
|
760 |
|
761 |
__ mov(scratch3, Operand(kPointerSize >> 1));
|
762 |
__ mul(scratch3, key, scratch3); |
763 |
__ add(scratch3, scratch3, Operand(kOffset)); |
764 |
|
765 |
__ ldr(scratch2, MemOperand(scratch1, scratch3)); |
766 |
__ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); |
767 |
__ cmp(scratch2, scratch3); |
768 |
__ b(eq, unmapped_case); |
769 |
|
770 |
// Load value from context and return it. We can reuse scratch1 because
|
771 |
// we do not jump to the unmapped lookup (which requires the parameter
|
772 |
// map in scratch1).
|
773 |
__ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
774 |
__ mov(scratch3, Operand(kPointerSize >> 1));
|
775 |
__ mul(scratch3, scratch2, scratch3); |
776 |
__ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag)); |
777 |
return MemOperand(scratch1, scratch3);
|
778 |
} |
779 |
|
780 |
|
781 |
static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
|
782 |
Register key, |
783 |
Register parameter_map, |
784 |
Register scratch, |
785 |
Label* slow_case) { |
786 |
// Element is in arguments backing store, which is referenced by the
|
787 |
// second element of the parameter_map. The parameter_map register
|
788 |
// must be loaded with the parameter map of the arguments object and is
|
789 |
// overwritten.
|
790 |
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; |
791 |
Register backing_store = parameter_map; |
792 |
__ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); |
793 |
Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); |
794 |
__ CheckMap(backing_store, scratch, fixed_array_map, slow_case, |
795 |
DONT_DO_SMI_CHECK); |
796 |
__ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); |
797 |
__ cmp(key, Operand(scratch)); |
798 |
__ b(cs, slow_case); |
799 |
__ mov(scratch, Operand(kPointerSize >> 1));
|
800 |
__ mul(scratch, key, scratch); |
801 |
__ add(scratch, |
802 |
scratch, |
803 |
Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
804 |
return MemOperand(backing_store, scratch);
|
805 |
} |
806 |
|
807 |
|
808 |
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
|
809 |
// ---------- S t a t e --------------
|
810 |
// -- lr : return address
|
811 |
// -- r0 : key
|
812 |
// -- r1 : receiver
|
813 |
// -----------------------------------
|
814 |
Label slow, notin; |
815 |
MemOperand mapped_location = |
816 |
GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, ¬in, &slow); |
817 |
__ ldr(r0, mapped_location); |
818 |
__ Ret(); |
819 |
__ bind(¬in); |
820 |
// The unmapped lookup expects that the parameter map is in r2.
|
821 |
MemOperand unmapped_location = |
822 |
GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow); |
823 |
__ ldr(r2, unmapped_location); |
824 |
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
825 |
__ cmp(r2, r3); |
826 |
__ b(eq, &slow); |
827 |
__ mov(r0, r2); |
828 |
__ Ret(); |
829 |
__ bind(&slow); |
830 |
GenerateMiss(masm, MISS); |
831 |
} |
832 |
|
833 |
|
834 |
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
|
835 |
// ---------- S t a t e --------------
|
836 |
// -- r0 : value
|
837 |
// -- r1 : key
|
838 |
// -- r2 : receiver
|
839 |
// -- lr : return address
|
840 |
// -----------------------------------
|
841 |
Label slow, notin; |
842 |
MemOperand mapped_location = |
843 |
GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, ¬in, &slow); |
844 |
__ str(r0, mapped_location); |
845 |
__ add(r6, r3, r5); |
846 |
__ mov(r9, r0); |
847 |
__ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs); |
848 |
__ Ret(); |
849 |
__ bind(¬in); |
850 |
// The unmapped lookup expects that the parameter map is in r3.
|
851 |
MemOperand unmapped_location = |
852 |
GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow); |
853 |
__ str(r0, unmapped_location); |
854 |
__ add(r6, r3, r4); |
855 |
__ mov(r9, r0); |
856 |
__ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs); |
857 |
__ Ret(); |
858 |
__ bind(&slow); |
859 |
GenerateMiss(masm, MISS); |
860 |
} |
861 |
|
862 |
|
863 |
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
|
864 |
int argc) {
|
865 |
// ----------- S t a t e -------------
|
866 |
// -- r2 : name
|
867 |
// -- lr : return address
|
868 |
// -----------------------------------
|
869 |
Label slow, notin; |
870 |
// Load receiver.
|
871 |
__ ldr(r1, MemOperand(sp, argc * kPointerSize)); |
872 |
MemOperand mapped_location = |
873 |
GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, ¬in, &slow); |
874 |
__ ldr(r1, mapped_location); |
875 |
GenerateFunctionTailCall(masm, argc, &slow, r3); |
876 |
__ bind(¬in); |
877 |
// The unmapped lookup expects that the parameter map is in r3.
|
878 |
MemOperand unmapped_location = |
879 |
GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow); |
880 |
__ ldr(r1, unmapped_location); |
881 |
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
882 |
__ cmp(r1, r3); |
883 |
__ b(eq, &slow); |
884 |
GenerateFunctionTailCall(masm, argc, &slow, r3); |
885 |
__ bind(&slow); |
886 |
GenerateMiss(masm, argc); |
887 |
} |
888 |
|
889 |
|
890 |
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
|
891 |
// ---------- S t a t e --------------
|
892 |
// -- lr : return address
|
893 |
// -- r0 : key
|
894 |
// -- r1 : receiver
|
895 |
// -----------------------------------
|
896 |
Isolate* isolate = masm->isolate(); |
897 |
|
898 |
__ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
|
899 |
|
900 |
__ Push(r1, r0); |
901 |
|
902 |
// Perform tail call to the entry.
|
903 |
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC |
904 |
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate) |
905 |
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); |
906 |
|
907 |
__ TailCallExternalReference(ref, 2, 1); |
908 |
} |
909 |
|
910 |
|
911 |
void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
|
912 |
// ---------- S t a t e --------------
|
913 |
// -- lr : return address
|
914 |
// -- r0 : key
|
915 |
// -- r1 : receiver
|
916 |
// -----------------------------------
|
917 |
|
918 |
__ Push(r1, r0); |
919 |
|
920 |
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
921 |
} |
922 |
|
923 |
|
924 |
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
|
925 |
// ---------- S t a t e --------------
|
926 |
// -- lr : return address
|
927 |
// -- r0 : key
|
928 |
// -- r1 : receiver
|
929 |
// -----------------------------------
|
930 |
Label slow, check_name, index_smi, index_name, property_array_property; |
931 |
Label probe_dictionary, check_number_dictionary; |
932 |
|
933 |
Register key = r0; |
934 |
Register receiver = r1; |
935 |
|
936 |
Isolate* isolate = masm->isolate(); |
937 |
|
938 |
// Check that the key is a smi.
|
939 |
__ JumpIfNotSmi(key, &check_name); |
940 |
__ bind(&index_smi); |
941 |
// Now the key is known to be a smi. This place is also jumped to from below
|
942 |
// where a numeric string is converted to a smi.
|
943 |
|
944 |
GenerateKeyedLoadReceiverCheck( |
945 |
masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow); |
946 |
|
947 |
// Check the receiver's map to see if it has fast elements.
|
948 |
__ CheckFastElements(r2, r3, &check_number_dictionary); |
949 |
|
950 |
GenerateFastArrayLoad( |
951 |
masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
|
952 |
__ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
|
953 |
__ Ret(); |
954 |
|
955 |
__ bind(&check_number_dictionary); |
956 |
__ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
957 |
__ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); |
958 |
|
959 |
// Check whether the elements is a number dictionary.
|
960 |
// r0: key
|
961 |
// r3: elements map
|
962 |
// r4: elements
|
963 |
__ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
964 |
__ cmp(r3, ip); |
965 |
__ b(ne, &slow); |
966 |
__ SmiUntag(r2, r0); |
967 |
__ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5); |
968 |
__ Ret(); |
969 |
|
970 |
// Slow case, key and receiver still in r0 and r1.
|
971 |
__ bind(&slow); |
972 |
__ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), |
973 |
1, r2, r3);
|
974 |
GenerateRuntimeGetProperty(masm); |
975 |
|
976 |
__ bind(&check_name); |
977 |
GenerateKeyNameCheck(masm, key, r2, r3, &index_name, &slow); |
978 |
|
979 |
GenerateKeyedLoadReceiverCheck( |
980 |
masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow); |
981 |
|
982 |
// If the receiver is a fast-case object, check the keyed lookup
|
983 |
// cache. Otherwise probe the dictionary.
|
984 |
__ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset)); |
985 |
__ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); |
986 |
__ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
987 |
__ cmp(r4, ip); |
988 |
__ b(eq, &probe_dictionary); |
989 |
|
990 |
// Load the map of the receiver, compute the keyed lookup cache hash
|
991 |
// based on 32 bits of the map pointer and the name hash.
|
992 |
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
993 |
__ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift)); |
994 |
__ ldr(r4, FieldMemOperand(r0, Name::kHashFieldOffset)); |
995 |
__ eor(r3, r3, Operand(r4, ASR, Name::kHashShift)); |
996 |
int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
|
997 |
__ And(r3, r3, Operand(mask)); |
998 |
|
999 |
// Load the key (consisting of map and unique name) from the cache and
|
1000 |
// check for match.
|
1001 |
Label load_in_object_property; |
1002 |
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; |
1003 |
Label hit_on_nth_entry[kEntriesPerBucket]; |
1004 |
ExternalReference cache_keys = |
1005 |
ExternalReference::keyed_lookup_cache_keys(isolate); |
1006 |
|
1007 |
__ mov(r4, Operand(cache_keys)); |
1008 |
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
|
1009 |
|
1010 |
for (int i = 0; i < kEntriesPerBucket - 1; i++) { |
1011 |
Label try_next_entry; |
1012 |
// Load map and move r4 to next entry.
|
1013 |
__ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
|
1014 |
__ cmp(r2, r5); |
1015 |
__ b(ne, &try_next_entry); |
1016 |
__ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name
|
1017 |
__ cmp(r0, r5); |
1018 |
__ b(eq, &hit_on_nth_entry[i]); |
1019 |
__ bind(&try_next_entry); |
1020 |
} |
1021 |
|
1022 |
// Last entry: Load map and move r4 to name.
|
1023 |
__ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); |
1024 |
__ cmp(r2, r5); |
1025 |
__ b(ne, &slow); |
1026 |
__ ldr(r5, MemOperand(r4)); |
1027 |
__ cmp(r0, r5); |
1028 |
__ b(ne, &slow); |
1029 |
|
1030 |
// Get field offset.
|
1031 |
// r0 : key
|
1032 |
// r1 : receiver
|
1033 |
// r2 : receiver's map
|
1034 |
// r3 : lookup cache index
|
1035 |
ExternalReference cache_field_offsets = |
1036 |
ExternalReference::keyed_lookup_cache_field_offsets(isolate); |
1037 |
|
1038 |
// Hit on nth entry.
|
1039 |
for (int i = kEntriesPerBucket - 1; i >= 0; i--) { |
1040 |
__ bind(&hit_on_nth_entry[i]); |
1041 |
__ mov(r4, Operand(cache_field_offsets)); |
1042 |
if (i != 0) { |
1043 |
__ add(r3, r3, Operand(i)); |
1044 |
} |
1045 |
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); |
1046 |
__ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); |
1047 |
__ sub(r5, r5, r6, SetCC); |
1048 |
__ b(ge, &property_array_property); |
1049 |
if (i != 0) { |
1050 |
__ jmp(&load_in_object_property); |
1051 |
} |
1052 |
} |
1053 |
|
1054 |
// Load in-object property.
|
1055 |
__ bind(&load_in_object_property); |
1056 |
__ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); |
1057 |
__ add(r6, r6, r5); // Index from start of object.
|
1058 |
__ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
|
1059 |
__ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); |
1060 |
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), |
1061 |
1, r2, r3);
|
1062 |
__ Ret(); |
1063 |
|
1064 |
// Load property array property.
|
1065 |
__ bind(&property_array_property); |
1066 |
__ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); |
1067 |
__ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1068 |
__ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); |
1069 |
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), |
1070 |
1, r2, r3);
|
1071 |
__ Ret(); |
1072 |
|
1073 |
// Do a quick inline probe of the receiver's dictionary, if it
|
1074 |
// exists.
|
1075 |
__ bind(&probe_dictionary); |
1076 |
// r1: receiver
|
1077 |
// r0: key
|
1078 |
// r3: elements
|
1079 |
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
1080 |
__ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
1081 |
GenerateGlobalInstanceTypeCheck(masm, r2, &slow); |
1082 |
// Load the property to r0.
|
1083 |
GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4); |
1084 |
__ IncrementCounter( |
1085 |
isolate->counters()->keyed_load_generic_symbol(), 1, r2, r3);
|
1086 |
__ Ret(); |
1087 |
|
1088 |
__ bind(&index_name); |
1089 |
__ IndexFromHash(r3, key); |
1090 |
// Now jump to the place where smi keys are handled.
|
1091 |
__ jmp(&index_smi); |
1092 |
} |
1093 |
|
1094 |
|
1095 |
void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
|
1096 |
// ---------- S t a t e --------------
|
1097 |
// -- lr : return address
|
1098 |
// -- r0 : key (index)
|
1099 |
// -- r1 : receiver
|
1100 |
// -----------------------------------
|
1101 |
Label miss; |
1102 |
|
1103 |
Register receiver = r1; |
1104 |
Register index = r0; |
1105 |
Register scratch = r3; |
1106 |
Register result = r0; |
1107 |
|
1108 |
StringCharAtGenerator char_at_generator(receiver, |
1109 |
index, |
1110 |
scratch, |
1111 |
result, |
1112 |
&miss, // When not a string.
|
1113 |
&miss, // When not a number.
|
1114 |
&miss, // When index out of range.
|
1115 |
STRING_INDEX_IS_ARRAY_INDEX); |
1116 |
char_at_generator.GenerateFast(masm); |
1117 |
__ Ret(); |
1118 |
|
1119 |
StubRuntimeCallHelper call_helper; |
1120 |
char_at_generator.GenerateSlow(masm, call_helper); |
1121 |
|
1122 |
__ bind(&miss); |
1123 |
GenerateMiss(masm, MISS); |
1124 |
} |
1125 |
|
1126 |
|
1127 |
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
|
1128 |
// ---------- S t a t e --------------
|
1129 |
// -- lr : return address
|
1130 |
// -- r0 : key
|
1131 |
// -- r1 : receiver
|
1132 |
// -----------------------------------
|
1133 |
Label slow; |
1134 |
|
1135 |
// Check that the receiver isn't a smi.
|
1136 |
__ JumpIfSmi(r1, &slow); |
1137 |
|
1138 |
// Check that the key is an array index, that is Uint32.
|
1139 |
__ NonNegativeSmiTst(r0); |
1140 |
__ b(ne, &slow); |
1141 |
|
1142 |
// Get the map of the receiver.
|
1143 |
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
1144 |
|
1145 |
// Check that it has indexed interceptor and access checks
|
1146 |
// are not enabled for this object.
|
1147 |
__ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
1148 |
__ and_(r3, r3, Operand(kSlowCaseBitFieldMask)); |
1149 |
__ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
|
1150 |
__ b(ne, &slow); |
1151 |
|
1152 |
// Everything is fine, call runtime.
|
1153 |
__ Push(r1, r0); // Receiver, key.
|
1154 |
|
1155 |
// Perform tail call to the entry.
|
1156 |
__ TailCallExternalReference( |
1157 |
ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), |
1158 |
masm->isolate()), |
1159 |
2,
|
1160 |
1);
|
1161 |
|
1162 |
__ bind(&slow); |
1163 |
GenerateMiss(masm, MISS); |
1164 |
} |
1165 |
|
1166 |
|
1167 |
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
|
1168 |
// ---------- S t a t e --------------
|
1169 |
// -- r0 : value
|
1170 |
// -- r1 : key
|
1171 |
// -- r2 : receiver
|
1172 |
// -- lr : return address
|
1173 |
// -----------------------------------
|
1174 |
|
1175 |
// Push receiver, key and value for runtime call.
|
1176 |
__ Push(r2, r1, r0); |
1177 |
|
1178 |
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC |
1179 |
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric), |
1180 |
masm->isolate()) |
1181 |
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); |
1182 |
__ TailCallExternalReference(ref, 3, 1); |
1183 |
} |
1184 |
|
1185 |
|
1186 |
void StoreIC::GenerateSlow(MacroAssembler* masm) {
|
1187 |
// ---------- S t a t e --------------
|
1188 |
// -- r0 : value
|
1189 |
// -- r2 : key
|
1190 |
// -- r1 : receiver
|
1191 |
// -- lr : return address
|
1192 |
// -----------------------------------
|
1193 |
|
1194 |
// Push receiver, key and value for runtime call.
|
1195 |
__ Push(r1, r2, r0); |
1196 |
|
1197 |
// The slow case calls into the runtime to complete the store without causing
|
1198 |
// an IC miss that would otherwise cause a transition to the generic stub.
|
1199 |
ExternalReference ref = |
1200 |
ExternalReference(IC_Utility(kStoreIC_Slow), masm->isolate()); |
1201 |
__ TailCallExternalReference(ref, 3, 1); |
1202 |
} |
1203 |
|
1204 |
|
1205 |
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
|
1206 |
// ---------- S t a t e --------------
|
1207 |
// -- r0 : value
|
1208 |
// -- r1 : key
|
1209 |
// -- r2 : receiver
|
1210 |
// -- lr : return address
|
1211 |
// -----------------------------------
|
1212 |
|
1213 |
// Push receiver, key and value for runtime call.
|
1214 |
__ Push(r2, r1, r0); |
1215 |
|
1216 |
// The slow case calls into the runtime to complete the store without causing
|
1217 |
// an IC miss that would otherwise cause a transition to the generic stub.
|
1218 |
ExternalReference ref = |
1219 |
ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); |
1220 |
__ TailCallExternalReference(ref, 3, 1); |
1221 |
} |
1222 |
|
1223 |
|
1224 |
void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
1225 |
StrictModeFlag strict_mode) { |
1226 |
// ---------- S t a t e --------------
|
1227 |
// -- r0 : value
|
1228 |
// -- r1 : key
|
1229 |
// -- r2 : receiver
|
1230 |
// -- lr : return address
|
1231 |
// -----------------------------------
|
1232 |
|
1233 |
// Push receiver, key and value for runtime call.
|
1234 |
__ Push(r2, r1, r0); |
1235 |
|
1236 |
__ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
|
1237 |
__ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
|
1238 |
__ Push(r1, r0); |
1239 |
|
1240 |
__ TailCallRuntime(Runtime::kSetProperty, 5, 1); |
1241 |
} |
1242 |
|
1243 |
|
1244 |
static void KeyedStoreGenerateGenericHelper( |
1245 |
MacroAssembler* masm, |
1246 |
Label* fast_object, |
1247 |
Label* fast_double, |
1248 |
Label* slow, |
1249 |
KeyedStoreCheckMap check_map, |
1250 |
KeyedStoreIncrementLength increment_length, |
1251 |
Register value, |
1252 |
Register key, |
1253 |
Register receiver, |
1254 |
Register receiver_map, |
1255 |
Register elements_map, |
1256 |
Register elements) { |
1257 |
Label transition_smi_elements; |
1258 |
Label finish_object_store, non_double_value, transition_double_elements; |
1259 |
Label fast_double_without_map_check; |
1260 |
|
1261 |
// Fast case: Do the store, could be either Object or double.
|
1262 |
__ bind(fast_object); |
1263 |
Register scratch_value = r4; |
1264 |
Register address = r5; |
1265 |
if (check_map == kCheckMap) {
|
1266 |
__ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
1267 |
__ cmp(elements_map, |
1268 |
Operand(masm->isolate()->factory()->fixed_array_map())); |
1269 |
__ b(ne, fast_double); |
1270 |
} |
1271 |
// Smi stores don't require further checks.
|
1272 |
Label non_smi_value; |
1273 |
__ JumpIfNotSmi(value, &non_smi_value); |
1274 |
|
1275 |
if (increment_length == kIncrementLength) {
|
1276 |
// Add 1 to receiver->length.
|
1277 |
__ add(scratch_value, key, Operand(Smi::FromInt(1)));
|
1278 |
__ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1279 |
} |
1280 |
// It's irrelevant whether array is smi-only or not when writing a smi.
|
1281 |
__ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1282 |
__ str(value, MemOperand::PointerAddressFromSmiKey(address, key)); |
1283 |
__ Ret(); |
1284 |
|
1285 |
__ bind(&non_smi_value); |
1286 |
// Escape to elements kind transition case.
|
1287 |
__ CheckFastObjectElements(receiver_map, scratch_value, |
1288 |
&transition_smi_elements); |
1289 |
|
1290 |
// Fast elements array, store the value to the elements backing store.
|
1291 |
__ bind(&finish_object_store); |
1292 |
if (increment_length == kIncrementLength) {
|
1293 |
// Add 1 to receiver->length.
|
1294 |
__ add(scratch_value, key, Operand(Smi::FromInt(1)));
|
1295 |
__ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1296 |
} |
1297 |
__ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1298 |
__ add(address, address, Operand::PointerOffsetFromSmiKey(key)); |
1299 |
__ str(value, MemOperand(address)); |
1300 |
// Update write barrier for the elements array address.
|
1301 |
__ mov(scratch_value, value); // Preserve the value which is returned.
|
1302 |
__ RecordWrite(elements, |
1303 |
address, |
1304 |
scratch_value, |
1305 |
kLRHasNotBeenSaved, |
1306 |
kDontSaveFPRegs, |
1307 |
EMIT_REMEMBERED_SET, |
1308 |
OMIT_SMI_CHECK); |
1309 |
__ Ret(); |
1310 |
|
1311 |
__ bind(fast_double); |
1312 |
if (check_map == kCheckMap) {
|
1313 |
// Check for fast double array case. If this fails, call through to the
|
1314 |
// runtime.
|
1315 |
__ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex); |
1316 |
__ b(ne, slow); |
1317 |
} |
1318 |
__ bind(&fast_double_without_map_check); |
1319 |
__ StoreNumberToDoubleElements(value, key, elements, r3, d0, |
1320 |
&transition_double_elements); |
1321 |
if (increment_length == kIncrementLength) {
|
1322 |
// Add 1 to receiver->length.
|
1323 |
__ add(scratch_value, key, Operand(Smi::FromInt(1)));
|
1324 |
__ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1325 |
} |
1326 |
__ Ret(); |
1327 |
|
1328 |
__ bind(&transition_smi_elements); |
1329 |
// Transition the array appropriately depending on the value type.
|
1330 |
__ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset)); |
1331 |
__ CompareRoot(r4, Heap::kHeapNumberMapRootIndex); |
1332 |
__ b(ne, &non_double_value); |
1333 |
|
1334 |
// Value is a double. Transition FAST_SMI_ELEMENTS ->
|
1335 |
// FAST_DOUBLE_ELEMENTS and complete the store.
|
1336 |
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
1337 |
FAST_DOUBLE_ELEMENTS, |
1338 |
receiver_map, |
1339 |
r4, |
1340 |
slow); |
1341 |
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
|
1342 |
AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, |
1343 |
FAST_DOUBLE_ELEMENTS); |
1344 |
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); |
1345 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1346 |
__ jmp(&fast_double_without_map_check); |
1347 |
|
1348 |
__ bind(&non_double_value); |
1349 |
// Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
|
1350 |
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
1351 |
FAST_ELEMENTS, |
1352 |
receiver_map, |
1353 |
r4, |
1354 |
slow); |
1355 |
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
|
1356 |
mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); |
1357 |
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, |
1358 |
slow); |
1359 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1360 |
__ jmp(&finish_object_store); |
1361 |
|
1362 |
__ bind(&transition_double_elements); |
1363 |
// Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
|
1364 |
// HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
|
1365 |
// transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
|
1366 |
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, |
1367 |
FAST_ELEMENTS, |
1368 |
receiver_map, |
1369 |
r4, |
1370 |
slow); |
1371 |
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
|
1372 |
mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); |
1373 |
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); |
1374 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1375 |
__ jmp(&finish_object_store); |
1376 |
} |
1377 |
|
1378 |
|
1379 |
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
|
1380 |
StrictModeFlag strict_mode) { |
1381 |
// ---------- S t a t e --------------
|
1382 |
// -- r0 : value
|
1383 |
// -- r1 : key
|
1384 |
// -- r2 : receiver
|
1385 |
// -- lr : return address
|
1386 |
// -----------------------------------
|
1387 |
Label slow, fast_object, fast_object_grow; |
1388 |
Label fast_double, fast_double_grow; |
1389 |
Label array, extra, check_if_double_array; |
1390 |
|
1391 |
// Register usage.
|
1392 |
Register value = r0; |
1393 |
Register key = r1; |
1394 |
Register receiver = r2; |
1395 |
Register receiver_map = r3; |
1396 |
Register elements_map = r6; |
1397 |
Register elements = r9; // Elements array of the receiver.
|
1398 |
// r4 and r5 are used as general scratch registers.
|
1399 |
|
1400 |
// Check that the key is a smi.
|
1401 |
__ JumpIfNotSmi(key, &slow); |
1402 |
// Check that the object isn't a smi.
|
1403 |
__ JumpIfSmi(receiver, &slow); |
1404 |
// Get the map of the object.
|
1405 |
__ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
1406 |
// Check that the receiver does not require access checks. We need
|
1407 |
// to do this because this generic stub does not perform map checks.
|
1408 |
__ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); |
1409 |
__ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
|
1410 |
__ b(ne, &slow); |
1411 |
// Check if the object is a JS array or not.
|
1412 |
__ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); |
1413 |
__ cmp(r4, Operand(JS_ARRAY_TYPE)); |
1414 |
__ b(eq, &array); |
1415 |
// Check that the object is some kind of JSObject.
|
1416 |
__ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE)); |
1417 |
__ b(lt, &slow); |
1418 |
|
1419 |
// Object case: Check key against length in the elements array.
|
1420 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1421 |
// Check array bounds. Both the key and the length of FixedArray are smis.
|
1422 |
__ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
1423 |
__ cmp(key, Operand(ip)); |
1424 |
__ b(lo, &fast_object); |
1425 |
|
1426 |
// Slow case, handle jump to runtime.
|
1427 |
__ bind(&slow); |
1428 |
// Entry registers are intact.
|
1429 |
// r0: value.
|
1430 |
// r1: key.
|
1431 |
// r2: receiver.
|
1432 |
GenerateRuntimeSetProperty(masm, strict_mode); |
1433 |
|
1434 |
// Extra capacity case: Check if there is extra capacity to
|
1435 |
// perform the store and update the length. Used for adding one
|
1436 |
// element to the array by writing to array[array.length].
|
1437 |
__ bind(&extra); |
1438 |
// Condition code from comparing key and array length is still available.
|
1439 |
__ b(ne, &slow); // Only support writing to writing to array[array.length].
|
1440 |
// Check for room in the elements backing store.
|
1441 |
// Both the key and the length of FixedArray are smis.
|
1442 |
__ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
1443 |
__ cmp(key, Operand(ip)); |
1444 |
__ b(hs, &slow); |
1445 |
__ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
1446 |
__ cmp(elements_map, |
1447 |
Operand(masm->isolate()->factory()->fixed_array_map())); |
1448 |
__ b(ne, &check_if_double_array); |
1449 |
__ jmp(&fast_object_grow); |
1450 |
|
1451 |
__ bind(&check_if_double_array); |
1452 |
__ cmp(elements_map, |
1453 |
Operand(masm->isolate()->factory()->fixed_double_array_map())); |
1454 |
__ b(ne, &slow); |
1455 |
__ jmp(&fast_double_grow); |
1456 |
|
1457 |
// Array case: Get the length and the elements array from the JS
|
1458 |
// array. Check that the array is in fast mode (and writable); if it
|
1459 |
// is the length is always a smi.
|
1460 |
__ bind(&array); |
1461 |
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1462 |
|
1463 |
// Check the key against the length in the array.
|
1464 |
__ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1465 |
__ cmp(key, Operand(ip)); |
1466 |
__ b(hs, &extra); |
1467 |
|
1468 |
KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, |
1469 |
&slow, kCheckMap, kDontIncrementLength, |
1470 |
value, key, receiver, receiver_map, |
1471 |
elements_map, elements); |
1472 |
KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, |
1473 |
&slow, kDontCheckMap, kIncrementLength, |
1474 |
value, key, receiver, receiver_map, |
1475 |
elements_map, elements); |
1476 |
} |
1477 |
|
1478 |
|
1479 |
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
|
1480 |
StrictModeFlag strict_mode) { |
1481 |
// ----------- S t a t e -------------
|
1482 |
// -- r0 : value
|
1483 |
// -- r1 : receiver
|
1484 |
// -- r2 : name
|
1485 |
// -- lr : return address
|
1486 |
// -----------------------------------
|
1487 |
|
1488 |
// Get the receiver from the stack and probe the stub cache.
|
1489 |
Code::Flags flags = Code::ComputeFlags( |
1490 |
Code::HANDLER, MONOMORPHIC, strict_mode, |
1491 |
Code::NORMAL, Code::STORE_IC); |
1492 |
|
1493 |
masm->isolate()->stub_cache()->GenerateProbe( |
1494 |
masm, flags, r1, r2, r3, r4, r5, r6); |
1495 |
|
1496 |
// Cache miss: Jump to runtime.
|
1497 |
GenerateMiss(masm); |
1498 |
} |
1499 |
|
1500 |
|
1501 |
void StoreIC::GenerateMiss(MacroAssembler* masm) {
|
1502 |
// ----------- S t a t e -------------
|
1503 |
// -- r0 : value
|
1504 |
// -- r1 : receiver
|
1505 |
// -- r2 : name
|
1506 |
// -- lr : return address
|
1507 |
// -----------------------------------
|
1508 |
|
1509 |
__ Push(r1, r2, r0); |
1510 |
|
1511 |
// Perform tail call to the entry.
|
1512 |
ExternalReference ref = |
1513 |
ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); |
1514 |
__ TailCallExternalReference(ref, 3, 1); |
1515 |
} |
1516 |
|
1517 |
|
1518 |
void StoreIC::GenerateNormal(MacroAssembler* masm) {
|
1519 |
// ----------- S t a t e -------------
|
1520 |
// -- r0 : value
|
1521 |
// -- r1 : receiver
|
1522 |
// -- r2 : name
|
1523 |
// -- lr : return address
|
1524 |
// -----------------------------------
|
1525 |
Label miss; |
1526 |
|
1527 |
GenerateNameDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss); |
1528 |
|
1529 |
GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5); |
1530 |
Counters* counters = masm->isolate()->counters(); |
1531 |
__ IncrementCounter(counters->store_normal_hit(), |
1532 |
1, r4, r5);
|
1533 |
__ Ret(); |
1534 |
|
1535 |
__ bind(&miss); |
1536 |
__ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
|
1537 |
GenerateMiss(masm); |
1538 |
} |
1539 |
|
1540 |
|
1541 |
void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
1542 |
StrictModeFlag strict_mode) { |
1543 |
// ----------- S t a t e -------------
|
1544 |
// -- r0 : value
|
1545 |
// -- r1 : receiver
|
1546 |
// -- r2 : name
|
1547 |
// -- lr : return address
|
1548 |
// -----------------------------------
|
1549 |
|
1550 |
__ Push(r1, r2, r0); |
1551 |
|
1552 |
__ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
|
1553 |
__ mov(r0, Operand(Smi::FromInt(strict_mode))); |
1554 |
__ Push(r1, r0); |
1555 |
|
1556 |
// Do tail-call to runtime routine.
|
1557 |
__ TailCallRuntime(Runtime::kSetProperty, 5, 1); |
1558 |
} |
1559 |
|
1560 |
|
1561 |
#undef __
|
1562 |
|
1563 |
|
1564 |
Condition CompareIC::ComputeCondition(Token::Value op) { |
1565 |
switch (op) {
|
1566 |
case Token::EQ_STRICT:
|
1567 |
case Token::EQ:
|
1568 |
return eq;
|
1569 |
case Token::LT:
|
1570 |
return lt;
|
1571 |
case Token::GT:
|
1572 |
return gt;
|
1573 |
case Token::LTE:
|
1574 |
return le;
|
1575 |
case Token::GTE:
|
1576 |
return ge;
|
1577 |
default:
|
1578 |
UNREACHABLE(); |
1579 |
return kNoCondition;
|
1580 |
} |
1581 |
} |
1582 |
|
1583 |
|
1584 |
bool CompareIC::HasInlinedSmiCode(Address address) {
|
1585 |
// The address of the instruction following the call.
|
1586 |
Address cmp_instruction_address = |
1587 |
Assembler::return_address_from_call_start(address); |
1588 |
|
1589 |
// If the instruction following the call is not a cmp rx, #yyy, nothing
|
1590 |
// was inlined.
|
1591 |
Instr instr = Assembler::instr_at(cmp_instruction_address); |
1592 |
return Assembler::IsCmpImmediate(instr);
|
1593 |
} |
1594 |
|
1595 |
|
1596 |
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
|
1597 |
Address cmp_instruction_address = |
1598 |
Assembler::return_address_from_call_start(address); |
1599 |
|
1600 |
// If the instruction following the call is not a cmp rx, #yyy, nothing
|
1601 |
// was inlined.
|
1602 |
Instr instr = Assembler::instr_at(cmp_instruction_address); |
1603 |
if (!Assembler::IsCmpImmediate(instr)) {
|
1604 |
return;
|
1605 |
} |
1606 |
|
1607 |
// The delta to the start of the map check instruction and the
|
1608 |
// condition code uses at the patched jump.
|
1609 |
int delta = Assembler::GetCmpImmediateRawImmediate(instr);
|
1610 |
delta += |
1611 |
Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask; |
1612 |
// If the delta is 0 the instruction is cmp r0, #0 which also signals that
|
1613 |
// nothing was inlined.
|
1614 |
if (delta == 0) { |
1615 |
return;
|
1616 |
} |
1617 |
|
1618 |
#ifdef DEBUG
|
1619 |
if (FLAG_trace_ic) {
|
1620 |
PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
|
1621 |
address, cmp_instruction_address, delta); |
1622 |
} |
1623 |
#endif
|
1624 |
|
1625 |
Address patch_address = |
1626 |
cmp_instruction_address - delta * Instruction::kInstrSize; |
1627 |
Instr instr_at_patch = Assembler::instr_at(patch_address); |
1628 |
Instr branch_instr = |
1629 |
Assembler::instr_at(patch_address + Instruction::kInstrSize); |
1630 |
// This is patching a conditional "jump if not smi/jump if smi" site.
|
1631 |
// Enabling by changing from
|
1632 |
// cmp rx, rx
|
1633 |
// b eq/ne, <target>
|
1634 |
// to
|
1635 |
// tst rx, #kSmiTagMask
|
1636 |
// b ne/eq, <target>
|
1637 |
// and vice-versa to be disabled again.
|
1638 |
CodePatcher patcher(patch_address, 2);
|
1639 |
Register reg = Assembler::GetRn(instr_at_patch); |
1640 |
if (check == ENABLE_INLINED_SMI_CHECK) {
|
1641 |
ASSERT(Assembler::IsCmpRegister(instr_at_patch)); |
1642 |
ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), |
1643 |
Assembler::GetRm(instr_at_patch).code()); |
1644 |
patcher.masm()->tst(reg, Operand(kSmiTagMask)); |
1645 |
} else {
|
1646 |
ASSERT(check == DISABLE_INLINED_SMI_CHECK); |
1647 |
ASSERT(Assembler::IsTstImmediate(instr_at_patch)); |
1648 |
patcher.masm()->cmp(reg, reg); |
1649 |
} |
1650 |
ASSERT(Assembler::IsBranch(branch_instr)); |
1651 |
if (Assembler::GetCondition(branch_instr) == eq) {
|
1652 |
patcher.EmitCondition(ne); |
1653 |
} else {
|
1654 |
ASSERT(Assembler::GetCondition(branch_instr) == ne); |
1655 |
patcher.EmitCondition(eq); |
1656 |
} |
1657 |
} |
1658 |
|
1659 |
|
1660 |
} } // namespace v8::internal
|
1661 |
|
1662 |
#endif // V8_TARGET_ARCH_ARM |