The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / x64 / ic-x64.cc @ f230a1cf
History | View | Annotate | Download (58.9 KB)
1 |
// Copyright 2012 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "v8.h" |
29 |
|
30 |
#if V8_TARGET_ARCH_X64
|
31 |
|
32 |
#include "codegen.h" |
33 |
#include "ic-inl.h" |
34 |
#include "runtime.h" |
35 |
#include "stub-cache.h" |
36 |
|
37 |
namespace v8 {
|
38 |
namespace internal {
|
39 |
|
40 |
// ----------------------------------------------------------------------------
|
41 |
// Static IC stub generators.
|
42 |
//
|
43 |
|
44 |
#define __ ACCESS_MASM(masm)
|
45 |
|
46 |
|
47 |
static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, |
48 |
Register type, |
49 |
Label* global_object) { |
50 |
// Register usage:
|
51 |
// type: holds the receiver instance type on entry.
|
52 |
__ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE)); |
53 |
__ j(equal, global_object); |
54 |
__ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE)); |
55 |
__ j(equal, global_object); |
56 |
__ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE)); |
57 |
__ j(equal, global_object); |
58 |
} |
59 |
|
60 |
|
61 |
// Generated code falls through if the receiver is a regular non-global
|
62 |
// JS object with slow properties and no interceptors.
|
63 |
static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm, |
64 |
Register receiver, |
65 |
Register r0, |
66 |
Register r1, |
67 |
Label* miss) { |
68 |
// Register usage:
|
69 |
// receiver: holds the receiver on entry and is unchanged.
|
70 |
// r0: used to hold receiver instance type.
|
71 |
// Holds the property dictionary on fall through.
|
72 |
// r1: used to hold receivers map.
|
73 |
|
74 |
__ JumpIfSmi(receiver, miss); |
75 |
|
76 |
// Check that the receiver is a valid JS object.
|
77 |
__ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset)); |
78 |
__ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset)); |
79 |
__ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE)); |
80 |
__ j(below, miss); |
81 |
|
82 |
// If this assert fails, we have to check upper bound too.
|
83 |
STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); |
84 |
|
85 |
GenerateGlobalInstanceTypeCheck(masm, r0, miss); |
86 |
|
87 |
// Check for non-global object that requires access check.
|
88 |
__ testb(FieldOperand(r1, Map::kBitFieldOffset), |
89 |
Immediate((1 << Map::kIsAccessCheckNeeded) |
|
90 |
(1 << Map::kHasNamedInterceptor)));
|
91 |
__ j(not_zero, miss); |
92 |
|
93 |
__ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
94 |
__ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset), |
95 |
Heap::kHashTableMapRootIndex); |
96 |
__ j(not_equal, miss); |
97 |
} |
98 |
|
99 |
|
100 |
|
101 |
// Helper function used to load a property from a dictionary backing storage.
|
102 |
// This function may return false negatives, so miss_label
|
103 |
// must always call a backup property load that is complete.
|
104 |
// This function is safe to call if name is not an internalized string,
|
105 |
// and will jump to the miss_label in that case.
|
106 |
// The generated code assumes that the receiver has slow properties,
|
107 |
// is not a global object and does not have interceptors.
|
108 |
static void GenerateDictionaryLoad(MacroAssembler* masm, |
109 |
Label* miss_label, |
110 |
Register elements, |
111 |
Register name, |
112 |
Register r0, |
113 |
Register r1, |
114 |
Register result) { |
115 |
// Register use:
|
116 |
//
|
117 |
// elements - holds the property dictionary on entry and is unchanged.
|
118 |
//
|
119 |
// name - holds the name of the property on entry and is unchanged.
|
120 |
//
|
121 |
// r0 - used to hold the capacity of the property dictionary.
|
122 |
//
|
123 |
// r1 - used to hold the index into the property dictionary.
|
124 |
//
|
125 |
// result - holds the result on exit if the load succeeded.
|
126 |
|
127 |
Label done; |
128 |
|
129 |
// Probe the dictionary.
|
130 |
NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
131 |
miss_label, |
132 |
&done, |
133 |
elements, |
134 |
name, |
135 |
r0, |
136 |
r1); |
137 |
|
138 |
// If probing finds an entry in the dictionary, r1 contains the
|
139 |
// index into the dictionary. Check that the value is a normal
|
140 |
// property.
|
141 |
__ bind(&done); |
142 |
const int kElementsStartOffset = |
143 |
NameDictionary::kHeaderSize + |
144 |
NameDictionary::kElementsStartIndex * kPointerSize; |
145 |
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
146 |
__ Test(Operand(elements, r1, times_pointer_size, |
147 |
kDetailsOffset - kHeapObjectTag), |
148 |
Smi::FromInt(PropertyDetails::TypeField::kMask)); |
149 |
__ j(not_zero, miss_label); |
150 |
|
151 |
// Get the value at the masked, scaled index.
|
152 |
const int kValueOffset = kElementsStartOffset + kPointerSize; |
153 |
__ movq(result, |
154 |
Operand(elements, r1, times_pointer_size, |
155 |
kValueOffset - kHeapObjectTag)); |
156 |
} |
157 |
|
158 |
|
159 |
// Helper function used to store a property to a dictionary backing
|
160 |
// storage. This function may fail to store a property even though it
|
161 |
// is in the dictionary, so code at miss_label must always call a
|
162 |
// backup property store that is complete. This function is safe to
|
163 |
// call if name is not an internalized string, and will jump to the miss_label
|
164 |
// in that case. The generated code assumes that the receiver has slow
|
165 |
// properties, is not a global object and does not have interceptors.
|
166 |
static void GenerateDictionaryStore(MacroAssembler* masm, |
167 |
Label* miss_label, |
168 |
Register elements, |
169 |
Register name, |
170 |
Register value, |
171 |
Register scratch0, |
172 |
Register scratch1) { |
173 |
// Register use:
|
174 |
//
|
175 |
// elements - holds the property dictionary on entry and is clobbered.
|
176 |
//
|
177 |
// name - holds the name of the property on entry and is unchanged.
|
178 |
//
|
179 |
// value - holds the value to store and is unchanged.
|
180 |
//
|
181 |
// scratch0 - used during the positive dictionary lookup and is clobbered.
|
182 |
//
|
183 |
// scratch1 - used for index into the property dictionary and is clobbered.
|
184 |
Label done; |
185 |
|
186 |
// Probe the dictionary.
|
187 |
NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
188 |
miss_label, |
189 |
&done, |
190 |
elements, |
191 |
name, |
192 |
scratch0, |
193 |
scratch1); |
194 |
|
195 |
// If probing finds an entry in the dictionary, scratch0 contains the
|
196 |
// index into the dictionary. Check that the value is a normal
|
197 |
// property that is not read only.
|
198 |
__ bind(&done); |
199 |
const int kElementsStartOffset = |
200 |
NameDictionary::kHeaderSize + |
201 |
NameDictionary::kElementsStartIndex * kPointerSize; |
202 |
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
203 |
const int kTypeAndReadOnlyMask = |
204 |
(PropertyDetails::TypeField::kMask | |
205 |
PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; |
206 |
__ Test(Operand(elements, |
207 |
scratch1, |
208 |
times_pointer_size, |
209 |
kDetailsOffset - kHeapObjectTag), |
210 |
Smi::FromInt(kTypeAndReadOnlyMask)); |
211 |
__ j(not_zero, miss_label); |
212 |
|
213 |
// Store the value at the masked, scaled index.
|
214 |
const int kValueOffset = kElementsStartOffset + kPointerSize; |
215 |
__ lea(scratch1, Operand(elements, |
216 |
scratch1, |
217 |
times_pointer_size, |
218 |
kValueOffset - kHeapObjectTag)); |
219 |
__ movq(Operand(scratch1, 0), value);
|
220 |
|
221 |
// Update write barrier. Make sure not to clobber the value.
|
222 |
__ movq(scratch0, value); |
223 |
__ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs); |
224 |
} |
225 |
|
226 |
|
227 |
// Checks the receiver for special cases (value type, slow case bits).
|
228 |
// Falls through for regular JS object.
|
229 |
static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, |
230 |
Register receiver, |
231 |
Register map, |
232 |
int interceptor_bit,
|
233 |
Label* slow) { |
234 |
// Register use:
|
235 |
// receiver - holds the receiver and is unchanged.
|
236 |
// Scratch registers:
|
237 |
// map - used to hold the map of the receiver.
|
238 |
|
239 |
// Check that the object isn't a smi.
|
240 |
__ JumpIfSmi(receiver, slow); |
241 |
|
242 |
// Check that the object is some kind of JS object EXCEPT JS Value type.
|
243 |
// In the case that the object is a value-wrapper object,
|
244 |
// we enter the runtime system to make sure that indexing
|
245 |
// into string objects work as intended.
|
246 |
ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
247 |
__ CmpObjectType(receiver, JS_OBJECT_TYPE, map); |
248 |
__ j(below, slow); |
249 |
|
250 |
// Check bit field.
|
251 |
__ testb(FieldOperand(map, Map::kBitFieldOffset), |
252 |
Immediate((1 << Map::kIsAccessCheckNeeded) |
|
253 |
(1 << interceptor_bit)));
|
254 |
__ j(not_zero, slow); |
255 |
} |
256 |
|
257 |
|
258 |
// Loads an indexed element from a fast case array.
|
259 |
// If not_fast_array is NULL, doesn't perform the elements map check.
|
260 |
static void GenerateFastArrayLoad(MacroAssembler* masm, |
261 |
Register receiver, |
262 |
Register key, |
263 |
Register elements, |
264 |
Register scratch, |
265 |
Register result, |
266 |
Label* not_fast_array, |
267 |
Label* out_of_range) { |
268 |
// Register use:
|
269 |
//
|
270 |
// receiver - holds the receiver on entry.
|
271 |
// Unchanged unless 'result' is the same register.
|
272 |
//
|
273 |
// key - holds the smi key on entry.
|
274 |
// Unchanged unless 'result' is the same register.
|
275 |
//
|
276 |
// elements - holds the elements of the receiver on exit.
|
277 |
//
|
278 |
// result - holds the result on exit if the load succeeded.
|
279 |
// Allowed to be the the same as 'receiver' or 'key'.
|
280 |
// Unchanged on bailout so 'receiver' and 'key' can be safely
|
281 |
// used by further computation.
|
282 |
//
|
283 |
// Scratch registers:
|
284 |
//
|
285 |
// scratch - used to hold elements of the receiver and the loaded value.
|
286 |
|
287 |
__ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
288 |
if (not_fast_array != NULL) { |
289 |
// Check that the object is in fast mode and writable.
|
290 |
__ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), |
291 |
Heap::kFixedArrayMapRootIndex); |
292 |
__ j(not_equal, not_fast_array); |
293 |
} else {
|
294 |
__ AssertFastElements(elements); |
295 |
} |
296 |
// Check that the key (index) is within bounds.
|
297 |
__ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset)); |
298 |
// Unsigned comparison rejects negative indices.
|
299 |
__ j(above_equal, out_of_range); |
300 |
// Fast case: Do the load.
|
301 |
SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2); |
302 |
__ movq(scratch, FieldOperand(elements, |
303 |
index.reg, |
304 |
index.scale, |
305 |
FixedArray::kHeaderSize)); |
306 |
__ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); |
307 |
// In case the loaded value is the_hole we have to consult GetProperty
|
308 |
// to ensure the prototype chain is searched.
|
309 |
__ j(equal, out_of_range); |
310 |
if (!result.is(scratch)) {
|
311 |
__ movq(result, scratch); |
312 |
} |
313 |
} |
314 |
|
315 |
|
316 |
// Checks whether a key is an array index string or a unique name.
|
317 |
// Falls through if the key is a unique name.
|
318 |
static void GenerateKeyNameCheck(MacroAssembler* masm, |
319 |
Register key, |
320 |
Register map, |
321 |
Register hash, |
322 |
Label* index_string, |
323 |
Label* not_unique) { |
324 |
// Register use:
|
325 |
// key - holds the key and is unchanged. Assumed to be non-smi.
|
326 |
// Scratch registers:
|
327 |
// map - used to hold the map of the key.
|
328 |
// hash - used to hold the hash of the key.
|
329 |
Label unique; |
330 |
__ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map); |
331 |
__ j(above, not_unique); |
332 |
STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); |
333 |
__ j(equal, &unique); |
334 |
|
335 |
// Is the string an array index, with cached numeric value?
|
336 |
__ movl(hash, FieldOperand(key, Name::kHashFieldOffset)); |
337 |
__ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask)); |
338 |
__ j(zero, index_string); // The value in hash is used at jump target.
|
339 |
|
340 |
// Is the string internalized? We already know it's a string so a single
|
341 |
// bit test is enough.
|
342 |
STATIC_ASSERT(kNotInternalizedTag != 0);
|
343 |
__ testb(FieldOperand(map, Map::kInstanceTypeOffset), |
344 |
Immediate(kIsNotInternalizedMask)); |
345 |
__ j(not_zero, not_unique); |
346 |
|
347 |
__ bind(&unique); |
348 |
} |
349 |
|
350 |
|
351 |
|
352 |
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
|
353 |
// ----------- S t a t e -------------
|
354 |
// -- rax : key
|
355 |
// -- rdx : receiver
|
356 |
// -- rsp[0] : return address
|
357 |
// -----------------------------------
|
358 |
Label slow, check_name, index_smi, index_name, property_array_property; |
359 |
Label probe_dictionary, check_number_dictionary; |
360 |
|
361 |
// Check that the key is a smi.
|
362 |
__ JumpIfNotSmi(rax, &check_name); |
363 |
__ bind(&index_smi); |
364 |
// Now the key is known to be a smi. This place is also jumped to from below
|
365 |
// where a numeric string is converted to a smi.
|
366 |
|
367 |
GenerateKeyedLoadReceiverCheck( |
368 |
masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow); |
369 |
|
370 |
// Check the receiver's map to see if it has fast elements.
|
371 |
__ CheckFastElements(rcx, &check_number_dictionary); |
372 |
|
373 |
GenerateFastArrayLoad(masm, |
374 |
rdx, |
375 |
rax, |
376 |
rcx, |
377 |
rbx, |
378 |
rax, |
379 |
NULL,
|
380 |
&slow); |
381 |
Counters* counters = masm->isolate()->counters(); |
382 |
__ IncrementCounter(counters->keyed_load_generic_smi(), 1);
|
383 |
__ ret(0);
|
384 |
|
385 |
__ bind(&check_number_dictionary); |
386 |
__ SmiToInteger32(rbx, rax); |
387 |
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
388 |
|
389 |
// Check whether the elements is a number dictionary.
|
390 |
// rdx: receiver
|
391 |
// rax: key
|
392 |
// rbx: key as untagged int32
|
393 |
// rcx: elements
|
394 |
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
395 |
Heap::kHashTableMapRootIndex); |
396 |
__ j(not_equal, &slow); |
397 |
__ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); |
398 |
__ ret(0);
|
399 |
|
400 |
__ bind(&slow); |
401 |
// Slow case: Jump to runtime.
|
402 |
// rdx: receiver
|
403 |
// rax: key
|
404 |
__ IncrementCounter(counters->keyed_load_generic_slow(), 1);
|
405 |
GenerateRuntimeGetProperty(masm); |
406 |
|
407 |
__ bind(&check_name); |
408 |
GenerateKeyNameCheck(masm, rax, rcx, rbx, &index_name, &slow); |
409 |
|
410 |
GenerateKeyedLoadReceiverCheck( |
411 |
masm, rdx, rcx, Map::kHasNamedInterceptor, &slow); |
412 |
|
413 |
// If the receiver is a fast-case object, check the keyed lookup
|
414 |
// cache. Otherwise probe the dictionary leaving result in rcx.
|
415 |
__ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
416 |
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
417 |
Heap::kHashTableMapRootIndex); |
418 |
__ j(equal, &probe_dictionary); |
419 |
|
420 |
// Load the map of the receiver, compute the keyed lookup cache hash
|
421 |
// based on 32 bits of the map pointer and the string hash.
|
422 |
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
423 |
__ movl(rcx, rbx); |
424 |
__ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift)); |
425 |
__ movl(rdi, FieldOperand(rax, String::kHashFieldOffset)); |
426 |
__ shr(rdi, Immediate(String::kHashShift)); |
427 |
__ xor_(rcx, rdi); |
428 |
int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
|
429 |
__ and_(rcx, Immediate(mask)); |
430 |
|
431 |
// Load the key (consisting of map and internalized string) from the cache and
|
432 |
// check for match.
|
433 |
Label load_in_object_property; |
434 |
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; |
435 |
Label hit_on_nth_entry[kEntriesPerBucket]; |
436 |
ExternalReference cache_keys |
437 |
= ExternalReference::keyed_lookup_cache_keys(masm->isolate()); |
438 |
|
439 |
for (int i = 0; i < kEntriesPerBucket - 1; i++) { |
440 |
Label try_next_entry; |
441 |
__ movq(rdi, rcx); |
442 |
__ shl(rdi, Immediate(kPointerSizeLog2 + 1));
|
443 |
__ LoadAddress(kScratchRegister, cache_keys); |
444 |
int off = kPointerSize * i * 2; |
445 |
__ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off)); |
446 |
__ j(not_equal, &try_next_entry); |
447 |
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); |
448 |
__ j(equal, &hit_on_nth_entry[i]); |
449 |
__ bind(&try_next_entry); |
450 |
} |
451 |
|
452 |
int off = kPointerSize * (kEntriesPerBucket - 1) * 2; |
453 |
__ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off)); |
454 |
__ j(not_equal, &slow); |
455 |
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); |
456 |
__ j(not_equal, &slow); |
457 |
|
458 |
// Get field offset, which is a 32-bit integer.
|
459 |
ExternalReference cache_field_offsets |
460 |
= ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); |
461 |
|
462 |
// Hit on nth entry.
|
463 |
for (int i = kEntriesPerBucket - 1; i >= 0; i--) { |
464 |
__ bind(&hit_on_nth_entry[i]); |
465 |
if (i != 0) { |
466 |
__ addl(rcx, Immediate(i)); |
467 |
} |
468 |
__ LoadAddress(kScratchRegister, cache_field_offsets); |
469 |
__ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
|
470 |
__ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); |
471 |
__ subq(rdi, rcx); |
472 |
__ j(above_equal, &property_array_property); |
473 |
if (i != 0) { |
474 |
__ jmp(&load_in_object_property); |
475 |
} |
476 |
} |
477 |
|
478 |
// Load in-object property.
|
479 |
__ bind(&load_in_object_property); |
480 |
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset)); |
481 |
__ addq(rcx, rdi); |
482 |
__ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
|
483 |
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
|
484 |
__ ret(0);
|
485 |
|
486 |
// Load property array property.
|
487 |
__ bind(&property_array_property); |
488 |
__ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
489 |
__ movq(rax, FieldOperand(rax, rdi, times_pointer_size, |
490 |
FixedArray::kHeaderSize)); |
491 |
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
|
492 |
__ ret(0);
|
493 |
|
494 |
// Do a quick inline probe of the receiver's dictionary, if it
|
495 |
// exists.
|
496 |
__ bind(&probe_dictionary); |
497 |
// rdx: receiver
|
498 |
// rax: key
|
499 |
// rbx: elements
|
500 |
|
501 |
__ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset)); |
502 |
__ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); |
503 |
GenerateGlobalInstanceTypeCheck(masm, rcx, &slow); |
504 |
|
505 |
GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax); |
506 |
__ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
|
507 |
__ ret(0);
|
508 |
|
509 |
__ bind(&index_name); |
510 |
__ IndexFromHash(rbx, rax); |
511 |
__ jmp(&index_smi); |
512 |
} |
513 |
|
514 |
|
515 |
void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
|
516 |
// ----------- S t a t e -------------
|
517 |
// -- rax : key
|
518 |
// -- rdx : receiver
|
519 |
// -- rsp[0] : return address
|
520 |
// -----------------------------------
|
521 |
Label miss; |
522 |
|
523 |
Register receiver = rdx; |
524 |
Register index = rax; |
525 |
Register scratch = rcx; |
526 |
Register result = rax; |
527 |
|
528 |
StringCharAtGenerator char_at_generator(receiver, |
529 |
index, |
530 |
scratch, |
531 |
result, |
532 |
&miss, // When not a string.
|
533 |
&miss, // When not a number.
|
534 |
&miss, // When index out of range.
|
535 |
STRING_INDEX_IS_ARRAY_INDEX); |
536 |
char_at_generator.GenerateFast(masm); |
537 |
__ ret(0);
|
538 |
|
539 |
StubRuntimeCallHelper call_helper; |
540 |
char_at_generator.GenerateSlow(masm, call_helper); |
541 |
|
542 |
__ bind(&miss); |
543 |
GenerateMiss(masm, MISS); |
544 |
} |
545 |
|
546 |
|
547 |
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
|
548 |
// ----------- S t a t e -------------
|
549 |
// -- rax : key
|
550 |
// -- rdx : receiver
|
551 |
// -- rsp[0] : return address
|
552 |
// -----------------------------------
|
553 |
Label slow; |
554 |
|
555 |
// Check that the receiver isn't a smi.
|
556 |
__ JumpIfSmi(rdx, &slow); |
557 |
|
558 |
// Check that the key is an array index, that is Uint32.
|
559 |
STATIC_ASSERT(kSmiValueSize <= 32);
|
560 |
__ JumpUnlessNonNegativeSmi(rax, &slow); |
561 |
|
562 |
// Get the map of the receiver.
|
563 |
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
564 |
|
565 |
// Check that it has indexed interceptor and access checks
|
566 |
// are not enabled for this object.
|
567 |
__ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset)); |
568 |
__ andb(rcx, Immediate(kSlowCaseBitFieldMask)); |
569 |
__ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
|
570 |
__ j(not_zero, &slow); |
571 |
|
572 |
// Everything is fine, call runtime.
|
573 |
__ PopReturnAddressTo(rcx); |
574 |
__ push(rdx); // receiver
|
575 |
__ push(rax); // key
|
576 |
__ PushReturnAddressFrom(rcx); |
577 |
|
578 |
// Perform tail call to the entry.
|
579 |
__ TailCallExternalReference( |
580 |
ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), |
581 |
masm->isolate()), |
582 |
2,
|
583 |
1);
|
584 |
|
585 |
__ bind(&slow); |
586 |
GenerateMiss(masm, MISS); |
587 |
} |
588 |
|
589 |
|
590 |
static void KeyedStoreGenerateGenericHelper( |
591 |
MacroAssembler* masm, |
592 |
Label* fast_object, |
593 |
Label* fast_double, |
594 |
Label* slow, |
595 |
KeyedStoreCheckMap check_map, |
596 |
KeyedStoreIncrementLength increment_length) { |
597 |
Label transition_smi_elements; |
598 |
Label finish_object_store, non_double_value, transition_double_elements; |
599 |
Label fast_double_without_map_check; |
600 |
// Fast case: Do the store, could be either Object or double.
|
601 |
__ bind(fast_object); |
602 |
// rax: value
|
603 |
// rbx: receiver's elements array (a FixedArray)
|
604 |
// rcx: index
|
605 |
// rdx: receiver (a JSArray)
|
606 |
// r9: map of receiver
|
607 |
if (check_map == kCheckMap) {
|
608 |
__ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); |
609 |
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); |
610 |
__ j(not_equal, fast_double); |
611 |
} |
612 |
// Smi stores don't require further checks.
|
613 |
Label non_smi_value; |
614 |
__ JumpIfNotSmi(rax, &non_smi_value); |
615 |
if (increment_length == kIncrementLength) {
|
616 |
// Add 1 to receiver->length.
|
617 |
__ leal(rdi, Operand(rcx, 1));
|
618 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
619 |
} |
620 |
// It's irrelevant whether array is smi-only or not when writing a smi.
|
621 |
__ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), |
622 |
rax); |
623 |
__ ret(0);
|
624 |
|
625 |
__ bind(&non_smi_value); |
626 |
// Writing a non-smi, check whether array allows non-smi elements.
|
627 |
// r9: receiver's map
|
628 |
__ CheckFastObjectElements(r9, &transition_smi_elements); |
629 |
|
630 |
__ bind(&finish_object_store); |
631 |
if (increment_length == kIncrementLength) {
|
632 |
// Add 1 to receiver->length.
|
633 |
__ leal(rdi, Operand(rcx, 1));
|
634 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
635 |
} |
636 |
__ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize), |
637 |
rax); |
638 |
__ movq(rdx, rax); // Preserve the value which is returned.
|
639 |
__ RecordWriteArray( |
640 |
rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
641 |
__ ret(0);
|
642 |
|
643 |
__ bind(fast_double); |
644 |
if (check_map == kCheckMap) {
|
645 |
// Check for fast double array case. If this fails, call through to the
|
646 |
// runtime.
|
647 |
// rdi: elements array's map
|
648 |
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
649 |
__ j(not_equal, slow); |
650 |
} |
651 |
__ bind(&fast_double_without_map_check); |
652 |
__ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0, |
653 |
&transition_double_elements); |
654 |
if (increment_length == kIncrementLength) {
|
655 |
// Add 1 to receiver->length.
|
656 |
__ leal(rdi, Operand(rcx, 1));
|
657 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi); |
658 |
} |
659 |
__ ret(0);
|
660 |
|
661 |
__ bind(&transition_smi_elements); |
662 |
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
663 |
|
664 |
// Transition the array appropriately depending on the value type.
|
665 |
__ movq(r9, FieldOperand(rax, HeapObject::kMapOffset)); |
666 |
__ CompareRoot(r9, Heap::kHeapNumberMapRootIndex); |
667 |
__ j(not_equal, &non_double_value); |
668 |
|
669 |
// Value is a double. Transition FAST_SMI_ELEMENTS ->
|
670 |
// FAST_DOUBLE_ELEMENTS and complete the store.
|
671 |
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
672 |
FAST_DOUBLE_ELEMENTS, |
673 |
rbx, |
674 |
rdi, |
675 |
slow); |
676 |
AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, |
677 |
FAST_DOUBLE_ELEMENTS); |
678 |
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); |
679 |
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
680 |
__ jmp(&fast_double_without_map_check); |
681 |
|
682 |
__ bind(&non_double_value); |
683 |
// Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
|
684 |
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
685 |
FAST_ELEMENTS, |
686 |
rbx, |
687 |
rdi, |
688 |
slow); |
689 |
mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); |
690 |
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, |
691 |
slow); |
692 |
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
693 |
__ jmp(&finish_object_store); |
694 |
|
695 |
__ bind(&transition_double_elements); |
696 |
// Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
|
697 |
// HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
|
698 |
// transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
|
699 |
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
700 |
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, |
701 |
FAST_ELEMENTS, |
702 |
rbx, |
703 |
rdi, |
704 |
slow); |
705 |
mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); |
706 |
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); |
707 |
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
708 |
__ jmp(&finish_object_store); |
709 |
} |
710 |
|
711 |
|
712 |
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
|
713 |
StrictModeFlag strict_mode) { |
714 |
// ----------- S t a t e -------------
|
715 |
// -- rax : value
|
716 |
// -- rcx : key
|
717 |
// -- rdx : receiver
|
718 |
// -- rsp[0] : return address
|
719 |
// -----------------------------------
|
720 |
Label slow, slow_with_tagged_index, fast_object, fast_object_grow; |
721 |
Label fast_double, fast_double_grow; |
722 |
Label array, extra, check_if_double_array; |
723 |
|
724 |
// Check that the object isn't a smi.
|
725 |
__ JumpIfSmi(rdx, &slow_with_tagged_index); |
726 |
// Get the map from the receiver.
|
727 |
__ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); |
728 |
// Check that the receiver does not require access checks. We need
|
729 |
// to do this because this generic stub does not perform map checks.
|
730 |
__ testb(FieldOperand(r9, Map::kBitFieldOffset), |
731 |
Immediate(1 << Map::kIsAccessCheckNeeded));
|
732 |
__ j(not_zero, &slow_with_tagged_index); |
733 |
// Check that the key is a smi.
|
734 |
__ JumpIfNotSmi(rcx, &slow_with_tagged_index); |
735 |
__ SmiToInteger32(rcx, rcx); |
736 |
|
737 |
__ CmpInstanceType(r9, JS_ARRAY_TYPE); |
738 |
__ j(equal, &array); |
739 |
// Check that the object is some kind of JSObject.
|
740 |
__ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE); |
741 |
__ j(below, &slow); |
742 |
|
743 |
// Object case: Check key against length in the elements array.
|
744 |
// rax: value
|
745 |
// rdx: JSObject
|
746 |
// rcx: index
|
747 |
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
748 |
// Check array bounds.
|
749 |
__ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); |
750 |
// rax: value
|
751 |
// rbx: FixedArray
|
752 |
// rcx: index
|
753 |
__ j(above, &fast_object); |
754 |
|
755 |
// Slow case: call runtime.
|
756 |
__ bind(&slow); |
757 |
__ Integer32ToSmi(rcx, rcx); |
758 |
__ bind(&slow_with_tagged_index); |
759 |
GenerateRuntimeSetProperty(masm, strict_mode); |
760 |
// Never returns to here.
|
761 |
|
762 |
// Extra capacity case: Check if there is extra capacity to
|
763 |
// perform the store and update the length. Used for adding one
|
764 |
// element to the array by writing to array[array.length].
|
765 |
__ bind(&extra); |
766 |
// rax: value
|
767 |
// rdx: receiver (a JSArray)
|
768 |
// rbx: receiver's elements array (a FixedArray)
|
769 |
// rcx: index
|
770 |
// flags: smicompare (rdx.length(), rbx)
|
771 |
__ j(not_equal, &slow); // do not leave holes in the array
|
772 |
__ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx); |
773 |
__ j(below_equal, &slow); |
774 |
// Increment index to get new length.
|
775 |
__ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); |
776 |
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex); |
777 |
__ j(not_equal, &check_if_double_array); |
778 |
__ jmp(&fast_object_grow); |
779 |
|
780 |
__ bind(&check_if_double_array); |
781 |
// rdi: elements array's map
|
782 |
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); |
783 |
__ j(not_equal, &slow); |
784 |
__ jmp(&fast_double_grow); |
785 |
|
786 |
// Array case: Get the length and the elements array from the JS
|
787 |
// array. Check that the array is in fast mode (and writable); if it
|
788 |
// is the length is always a smi.
|
789 |
__ bind(&array); |
790 |
// rax: value
|
791 |
// rdx: receiver (a JSArray)
|
792 |
// rcx: index
|
793 |
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); |
794 |
|
795 |
// Check the key against the length in the array, compute the
|
796 |
// address to store into and fall through to fast case.
|
797 |
__ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx); |
798 |
__ j(below_equal, &extra); |
799 |
|
800 |
KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, |
801 |
&slow, kCheckMap, kDontIncrementLength); |
802 |
KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, |
803 |
&slow, kDontCheckMap, kIncrementLength); |
804 |
} |
805 |
|
806 |
|
807 |
// The generated code does not accept smi keys.
|
808 |
// The generated code falls through if both probes miss.
|
809 |
void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
810 |
int argc,
|
811 |
Code::Kind kind, |
812 |
Code::ExtraICState extra_state) { |
813 |
// ----------- S t a t e -------------
|
814 |
// rcx : function name
|
815 |
// rdx : receiver
|
816 |
// -----------------------------------
|
817 |
Label number, non_number, non_string, boolean, probe, miss; |
818 |
|
819 |
// Probe the stub cache.
|
820 |
Code::Flags flags = Code::ComputeFlags(kind, |
821 |
MONOMORPHIC, |
822 |
extra_state, |
823 |
Code::NORMAL, |
824 |
argc); |
825 |
masm->isolate()->stub_cache()->GenerateProbe( |
826 |
masm, flags, rdx, rcx, rbx, rax); |
827 |
|
828 |
// If the stub cache probing failed, the receiver might be a value.
|
829 |
// For value objects, we use the map of the prototype objects for
|
830 |
// the corresponding JSValue for the cache and that is what we need
|
831 |
// to probe.
|
832 |
//
|
833 |
// Check for number.
|
834 |
__ JumpIfSmi(rdx, &number); |
835 |
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx); |
836 |
__ j(not_equal, &non_number); |
837 |
__ bind(&number); |
838 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
839 |
masm, Context::NUMBER_FUNCTION_INDEX, rdx); |
840 |
__ jmp(&probe); |
841 |
|
842 |
// Check for string.
|
843 |
__ bind(&non_number); |
844 |
__ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE); |
845 |
__ j(above_equal, &non_string); |
846 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
847 |
masm, Context::STRING_FUNCTION_INDEX, rdx); |
848 |
__ jmp(&probe); |
849 |
|
850 |
// Check for boolean.
|
851 |
__ bind(&non_string); |
852 |
__ CompareRoot(rdx, Heap::kTrueValueRootIndex); |
853 |
__ j(equal, &boolean); |
854 |
__ CompareRoot(rdx, Heap::kFalseValueRootIndex); |
855 |
__ j(not_equal, &miss); |
856 |
__ bind(&boolean); |
857 |
StubCompiler::GenerateLoadGlobalFunctionPrototype( |
858 |
masm, Context::BOOLEAN_FUNCTION_INDEX, rdx); |
859 |
|
860 |
// Probe the stub cache for the value object.
|
861 |
__ bind(&probe); |
862 |
masm->isolate()->stub_cache()->GenerateProbe( |
863 |
masm, flags, rdx, rcx, rbx, no_reg); |
864 |
|
865 |
__ bind(&miss); |
866 |
} |
867 |
|
868 |
|
869 |
static void GenerateFunctionTailCall(MacroAssembler* masm, |
870 |
int argc,
|
871 |
Label* miss) { |
872 |
// ----------- S t a t e -------------
|
873 |
// rcx : function name
|
874 |
// rdi : function
|
875 |
// rsp[0] : return address
|
876 |
// rsp[8] : argument argc
|
877 |
// rsp[16] : argument argc - 1
|
878 |
// ...
|
879 |
// rsp[argc * 8] : argument 1
|
880 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
881 |
// -----------------------------------
|
882 |
__ JumpIfSmi(rdi, miss); |
883 |
// Check that the value is a JavaScript function.
|
884 |
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx); |
885 |
__ j(not_equal, miss); |
886 |
|
887 |
// Invoke the function.
|
888 |
ParameterCount actual(argc); |
889 |
__ InvokeFunction(rdi, actual, JUMP_FUNCTION, |
890 |
NullCallWrapper(), CALL_AS_METHOD); |
891 |
} |
892 |
|
893 |
|
894 |
// The generated code falls through if the call should be handled by runtime.
|
895 |
void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { |
896 |
// ----------- S t a t e -------------
|
897 |
// rcx : function name
|
898 |
// rsp[0] : return address
|
899 |
// rsp[8] : argument argc
|
900 |
// rsp[16] : argument argc - 1
|
901 |
// ...
|
902 |
// rsp[argc * 8] : argument 1
|
903 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
904 |
// -----------------------------------
|
905 |
Label miss; |
906 |
|
907 |
StackArgumentsAccessor args(rsp, argc); |
908 |
__ movq(rdx, args.GetReceiverOperand()); |
909 |
|
910 |
GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss); |
911 |
|
912 |
// rax: elements
|
913 |
// Search the dictionary placing the result in rdi.
|
914 |
GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi); |
915 |
|
916 |
GenerateFunctionTailCall(masm, argc, &miss); |
917 |
|
918 |
__ bind(&miss); |
919 |
} |
920 |
|
921 |
|
922 |
void CallICBase::GenerateMiss(MacroAssembler* masm,
|
923 |
int argc,
|
924 |
IC::UtilityId id, |
925 |
Code::ExtraICState extra_state) { |
926 |
// ----------- S t a t e -------------
|
927 |
// rcx : function name
|
928 |
// rsp[0] : return address
|
929 |
// rsp[8] : argument argc
|
930 |
// rsp[16] : argument argc - 1
|
931 |
// ...
|
932 |
// rsp[argc * 8] : argument 1
|
933 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
934 |
// -----------------------------------
|
935 |
|
936 |
Counters* counters = masm->isolate()->counters(); |
937 |
if (id == IC::kCallIC_Miss) {
|
938 |
__ IncrementCounter(counters->call_miss(), 1);
|
939 |
} else {
|
940 |
__ IncrementCounter(counters->keyed_call_miss(), 1);
|
941 |
} |
942 |
|
943 |
StackArgumentsAccessor args(rsp, argc); |
944 |
__ movq(rdx, args.GetReceiverOperand()); |
945 |
|
946 |
// Enter an internal frame.
|
947 |
{ |
948 |
FrameScope scope(masm, StackFrame::INTERNAL); |
949 |
|
950 |
// Push the receiver and the name of the function.
|
951 |
__ push(rdx); |
952 |
__ push(rcx); |
953 |
|
954 |
// Call the entry.
|
955 |
CEntryStub stub(1);
|
956 |
__ Set(rax, 2);
|
957 |
__ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate())); |
958 |
__ CallStub(&stub); |
959 |
|
960 |
// Move result to rdi and exit the internal frame.
|
961 |
__ movq(rdi, rax); |
962 |
} |
963 |
|
964 |
// Check if the receiver is a global object of some sort.
|
965 |
// This can happen only for regular CallIC but not KeyedCallIC.
|
966 |
if (id == IC::kCallIC_Miss) {
|
967 |
Label invoke, global; |
968 |
__ movq(rdx, args.GetReceiverOperand()); |
969 |
__ JumpIfSmi(rdx, &invoke); |
970 |
__ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx); |
971 |
__ j(equal, &global); |
972 |
__ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE); |
973 |
__ j(not_equal, &invoke); |
974 |
|
975 |
// Patch the receiver on the stack.
|
976 |
__ bind(&global); |
977 |
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
978 |
__ movq(args.GetReceiverOperand(), rdx); |
979 |
__ bind(&invoke); |
980 |
} |
981 |
|
982 |
// Invoke the function.
|
983 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state) |
984 |
? CALL_AS_FUNCTION |
985 |
: CALL_AS_METHOD; |
986 |
ParameterCount actual(argc); |
987 |
__ InvokeFunction(rdi, |
988 |
actual, |
989 |
JUMP_FUNCTION, |
990 |
NullCallWrapper(), |
991 |
call_kind); |
992 |
} |
993 |
|
994 |
|
995 |
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
|
996 |
int argc,
|
997 |
Code::ExtraICState extra_ic_state) { |
998 |
// ----------- S t a t e -------------
|
999 |
// rcx : function name
|
1000 |
// rsp[0] : return address
|
1001 |
// rsp[8] : argument argc
|
1002 |
// rsp[16] : argument argc - 1
|
1003 |
// ...
|
1004 |
// rsp[argc * 8] : argument 1
|
1005 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
1006 |
// -----------------------------------
|
1007 |
|
1008 |
StackArgumentsAccessor args(rsp, argc); |
1009 |
__ movq(rdx, args.GetReceiverOperand()); |
1010 |
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state); |
1011 |
GenerateMiss(masm, argc, extra_ic_state); |
1012 |
} |
1013 |
|
1014 |
|
1015 |
void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { |
1016 |
// ----------- S t a t e -------------
|
1017 |
// rcx : function name
|
1018 |
// rsp[0] : return address
|
1019 |
// rsp[8] : argument argc
|
1020 |
// rsp[16] : argument argc - 1
|
1021 |
// ...
|
1022 |
// rsp[argc * 8] : argument 1
|
1023 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
1024 |
// -----------------------------------
|
1025 |
|
1026 |
StackArgumentsAccessor args(rsp, argc); |
1027 |
__ movq(rdx, args.GetReceiverOperand()); |
1028 |
|
1029 |
Label do_call, slow_call, slow_load; |
1030 |
Label check_number_dictionary, check_name, lookup_monomorphic_cache; |
1031 |
Label index_smi, index_name; |
1032 |
|
1033 |
// Check that the key is a smi.
|
1034 |
__ JumpIfNotSmi(rcx, &check_name); |
1035 |
|
1036 |
__ bind(&index_smi); |
1037 |
// Now the key is known to be a smi. This place is also jumped to from below
|
1038 |
// where a numeric string is converted to a smi.
|
1039 |
|
1040 |
GenerateKeyedLoadReceiverCheck( |
1041 |
masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call); |
1042 |
|
1043 |
GenerateFastArrayLoad( |
1044 |
masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load); |
1045 |
Counters* counters = masm->isolate()->counters(); |
1046 |
__ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
|
1047 |
|
1048 |
__ bind(&do_call); |
1049 |
// receiver in rdx is not used after this point.
|
1050 |
// rcx: key
|
1051 |
// rdi: function
|
1052 |
GenerateFunctionTailCall(masm, argc, &slow_call); |
1053 |
|
1054 |
__ bind(&check_number_dictionary); |
1055 |
// rax: elements
|
1056 |
// rcx: smi key
|
1057 |
// Check whether the elements is a number dictionary.
|
1058 |
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
1059 |
Heap::kHashTableMapRootIndex); |
1060 |
__ j(not_equal, &slow_load); |
1061 |
__ SmiToInteger32(rbx, rcx); |
1062 |
// ebx: untagged index
|
1063 |
__ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi); |
1064 |
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
|
1065 |
__ jmp(&do_call); |
1066 |
|
1067 |
__ bind(&slow_load); |
1068 |
// This branch is taken when calling KeyedCallIC_Miss is neither required
|
1069 |
// nor beneficial.
|
1070 |
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
|
1071 |
{ |
1072 |
FrameScope scope(masm, StackFrame::INTERNAL); |
1073 |
__ push(rcx); // save the key
|
1074 |
__ push(rdx); // pass the receiver
|
1075 |
__ push(rcx); // pass the key
|
1076 |
__ CallRuntime(Runtime::kKeyedGetProperty, 2);
|
1077 |
__ pop(rcx); // restore the key
|
1078 |
} |
1079 |
__ movq(rdi, rax); |
1080 |
__ jmp(&do_call); |
1081 |
|
1082 |
__ bind(&check_name); |
1083 |
GenerateKeyNameCheck(masm, rcx, rax, rbx, &index_name, &slow_call); |
1084 |
|
1085 |
// The key is known to be a unique name.
|
1086 |
// If the receiver is a regular JS object with slow properties then do
|
1087 |
// a quick inline probe of the receiver's dictionary.
|
1088 |
// Otherwise do the monomorphic cache probe.
|
1089 |
GenerateKeyedLoadReceiverCheck( |
1090 |
masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
1091 |
|
1092 |
__ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); |
1093 |
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
1094 |
Heap::kHashTableMapRootIndex); |
1095 |
__ j(not_equal, &lookup_monomorphic_cache); |
1096 |
|
1097 |
GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi); |
1098 |
__ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
|
1099 |
__ jmp(&do_call); |
1100 |
|
1101 |
__ bind(&lookup_monomorphic_cache); |
1102 |
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
|
1103 |
GenerateMonomorphicCacheProbe(masm, |
1104 |
argc, |
1105 |
Code::KEYED_CALL_IC, |
1106 |
Code::kNoExtraICState); |
1107 |
// Fall through on miss.
|
1108 |
|
1109 |
__ bind(&slow_call); |
1110 |
// This branch is taken if:
|
1111 |
// - the receiver requires boxing or access check,
|
1112 |
// - the key is neither smi nor a unique name,
|
1113 |
// - the value loaded is not a function,
|
1114 |
// - there is hope that the runtime will create a monomorphic call stub
|
1115 |
// that will get fetched next time.
|
1116 |
__ IncrementCounter(counters->keyed_call_generic_slow(), 1);
|
1117 |
GenerateMiss(masm, argc); |
1118 |
|
1119 |
__ bind(&index_name); |
1120 |
__ IndexFromHash(rbx, rcx); |
1121 |
// Now jump to the place where smi keys are handled.
|
1122 |
__ jmp(&index_smi); |
1123 |
} |
1124 |
|
1125 |
|
1126 |
void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { |
1127 |
// ----------- S t a t e -------------
|
1128 |
// rcx : function name
|
1129 |
// rsp[0] : return address
|
1130 |
// rsp[8] : argument argc
|
1131 |
// rsp[16] : argument argc - 1
|
1132 |
// ...
|
1133 |
// rsp[argc * 8] : argument 1
|
1134 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
1135 |
// -----------------------------------
|
1136 |
|
1137 |
// Check if the name is really a name.
|
1138 |
Label miss; |
1139 |
__ JumpIfSmi(rcx, &miss); |
1140 |
Condition cond = masm->IsObjectNameType(rcx, rax, rax); |
1141 |
__ j(NegateCondition(cond), &miss); |
1142 |
CallICBase::GenerateNormal(masm, argc); |
1143 |
__ bind(&miss); |
1144 |
GenerateMiss(masm, argc); |
1145 |
} |
1146 |
|
1147 |
|
1148 |
static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
|
1149 |
Register object, |
1150 |
Register key, |
1151 |
Register scratch1, |
1152 |
Register scratch2, |
1153 |
Register scratch3, |
1154 |
Label* unmapped_case, |
1155 |
Label* slow_case) { |
1156 |
Heap* heap = masm->isolate()->heap(); |
1157 |
|
1158 |
// Check that the receiver is a JSObject. Because of the elements
|
1159 |
// map check later, we do not need to check for interceptors or
|
1160 |
// whether it requires access checks.
|
1161 |
__ JumpIfSmi(object, slow_case); |
1162 |
// Check that the object is some kind of JSObject.
|
1163 |
__ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1); |
1164 |
__ j(below, slow_case); |
1165 |
|
1166 |
// Check that the key is a positive smi.
|
1167 |
Condition check = masm->CheckNonNegativeSmi(key); |
1168 |
__ j(NegateCondition(check), slow_case); |
1169 |
|
1170 |
// Load the elements into scratch1 and check its map. If not, jump
|
1171 |
// to the unmapped lookup with the parameter map in scratch1.
|
1172 |
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); |
1173 |
__ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset)); |
1174 |
__ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); |
1175 |
|
1176 |
// Check if element is in the range of mapped arguments.
|
1177 |
__ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); |
1178 |
__ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
|
1179 |
__ cmpq(key, scratch2); |
1180 |
__ j(greater_equal, unmapped_case); |
1181 |
|
1182 |
// Load element index and check whether it is the hole.
|
1183 |
const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; |
1184 |
__ SmiToInteger64(scratch3, key); |
1185 |
__ movq(scratch2, FieldOperand(scratch1, |
1186 |
scratch3, |
1187 |
times_pointer_size, |
1188 |
kHeaderSize)); |
1189 |
__ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex); |
1190 |
__ j(equal, unmapped_case); |
1191 |
|
1192 |
// Load value from context and return it. We can reuse scratch1 because
|
1193 |
// we do not jump to the unmapped lookup (which requires the parameter
|
1194 |
// map in scratch1).
|
1195 |
__ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize)); |
1196 |
__ SmiToInteger64(scratch3, scratch2); |
1197 |
return FieldOperand(scratch1,
|
1198 |
scratch3, |
1199 |
times_pointer_size, |
1200 |
Context::kHeaderSize); |
1201 |
} |
1202 |
|
1203 |
|
1204 |
static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
|
1205 |
Register key, |
1206 |
Register parameter_map, |
1207 |
Register scratch, |
1208 |
Label* slow_case) { |
1209 |
// Element is in arguments backing store, which is referenced by the
|
1210 |
// second element of the parameter_map. The parameter_map register
|
1211 |
// must be loaded with the parameter map of the arguments object and is
|
1212 |
// overwritten.
|
1213 |
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; |
1214 |
Register backing_store = parameter_map; |
1215 |
__ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); |
1216 |
Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); |
1217 |
__ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); |
1218 |
__ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); |
1219 |
__ cmpq(key, scratch); |
1220 |
__ j(greater_equal, slow_case); |
1221 |
__ SmiToInteger64(scratch, key); |
1222 |
return FieldOperand(backing_store,
|
1223 |
scratch, |
1224 |
times_pointer_size, |
1225 |
FixedArray::kHeaderSize); |
1226 |
} |
1227 |
|
1228 |
|
1229 |
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
|
1230 |
// ----------- S t a t e -------------
|
1231 |
// -- rax : key
|
1232 |
// -- rdx : receiver
|
1233 |
// -- rsp[0] : return address
|
1234 |
// -----------------------------------
|
1235 |
Label slow, notin; |
1236 |
Operand mapped_location = |
1237 |
GenerateMappedArgumentsLookup( |
1238 |
masm, rdx, rax, rbx, rcx, rdi, ¬in, &slow); |
1239 |
__ movq(rax, mapped_location); |
1240 |
__ Ret(); |
1241 |
__ bind(¬in); |
1242 |
// The unmapped lookup expects that the parameter map is in rbx.
|
1243 |
Operand unmapped_location = |
1244 |
GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow); |
1245 |
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); |
1246 |
__ j(equal, &slow); |
1247 |
__ movq(rax, unmapped_location); |
1248 |
__ Ret(); |
1249 |
__ bind(&slow); |
1250 |
GenerateMiss(masm, MISS); |
1251 |
} |
1252 |
|
1253 |
|
1254 |
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
|
1255 |
// ----------- S t a t e -------------
|
1256 |
// -- rax : value
|
1257 |
// -- rcx : key
|
1258 |
// -- rdx : receiver
|
1259 |
// -- rsp[0] : return address
|
1260 |
// -----------------------------------
|
1261 |
Label slow, notin; |
1262 |
Operand mapped_location = GenerateMappedArgumentsLookup( |
1263 |
masm, rdx, rcx, rbx, rdi, r8, ¬in, &slow); |
1264 |
__ movq(mapped_location, rax); |
1265 |
__ lea(r9, mapped_location); |
1266 |
__ movq(r8, rax); |
1267 |
__ RecordWrite(rbx, |
1268 |
r9, |
1269 |
r8, |
1270 |
kDontSaveFPRegs, |
1271 |
EMIT_REMEMBERED_SET, |
1272 |
INLINE_SMI_CHECK); |
1273 |
__ Ret(); |
1274 |
__ bind(¬in); |
1275 |
// The unmapped lookup expects that the parameter map is in rbx.
|
1276 |
Operand unmapped_location = |
1277 |
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow); |
1278 |
__ movq(unmapped_location, rax); |
1279 |
__ lea(r9, unmapped_location); |
1280 |
__ movq(r8, rax); |
1281 |
__ RecordWrite(rbx, |
1282 |
r9, |
1283 |
r8, |
1284 |
kDontSaveFPRegs, |
1285 |
EMIT_REMEMBERED_SET, |
1286 |
INLINE_SMI_CHECK); |
1287 |
__ Ret(); |
1288 |
__ bind(&slow); |
1289 |
GenerateMiss(masm, MISS); |
1290 |
} |
1291 |
|
1292 |
|
1293 |
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
|
1294 |
int argc) {
|
1295 |
// ----------- S t a t e -------------
|
1296 |
// rcx : function name
|
1297 |
// rsp[0] : return address
|
1298 |
// rsp[8] : argument argc
|
1299 |
// rsp[16] : argument argc - 1
|
1300 |
// ...
|
1301 |
// rsp[argc * 8] : argument 1
|
1302 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
1303 |
// -----------------------------------
|
1304 |
Label slow, notin; |
1305 |
StackArgumentsAccessor args(rsp, argc); |
1306 |
__ movq(rdx, args.GetReceiverOperand()); |
1307 |
Operand mapped_location = GenerateMappedArgumentsLookup( |
1308 |
masm, rdx, rcx, rbx, rax, r8, ¬in, &slow); |
1309 |
__ movq(rdi, mapped_location); |
1310 |
GenerateFunctionTailCall(masm, argc, &slow); |
1311 |
__ bind(¬in); |
1312 |
// The unmapped lookup expects that the parameter map is in rbx.
|
1313 |
Operand unmapped_location = |
1314 |
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow); |
1315 |
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); |
1316 |
__ j(equal, &slow); |
1317 |
__ movq(rdi, unmapped_location); |
1318 |
GenerateFunctionTailCall(masm, argc, &slow); |
1319 |
__ bind(&slow); |
1320 |
GenerateMiss(masm, argc); |
1321 |
} |
1322 |
|
1323 |
|
1324 |
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
|
1325 |
// ----------- S t a t e -------------
|
1326 |
// -- rax : receiver
|
1327 |
// -- rcx : name
|
1328 |
// -- rsp[0] : return address
|
1329 |
// -----------------------------------
|
1330 |
|
1331 |
// Probe the stub cache.
|
1332 |
Code::Flags flags = Code::ComputeFlags( |
1333 |
Code::HANDLER, MONOMORPHIC, Code::kNoExtraICState, |
1334 |
Code::NORMAL, Code::LOAD_IC); |
1335 |
masm->isolate()->stub_cache()->GenerateProbe( |
1336 |
masm, flags, rax, rcx, rbx, rdx); |
1337 |
|
1338 |
GenerateMiss(masm); |
1339 |
} |
1340 |
|
1341 |
|
1342 |
void LoadIC::GenerateNormal(MacroAssembler* masm) {
|
1343 |
// ----------- S t a t e -------------
|
1344 |
// -- rax : receiver
|
1345 |
// -- rcx : name
|
1346 |
// -- rsp[0] : return address
|
1347 |
// -----------------------------------
|
1348 |
Label miss; |
1349 |
|
1350 |
GenerateNameDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss); |
1351 |
|
1352 |
// rdx: elements
|
1353 |
// Search the dictionary placing the result in rax.
|
1354 |
GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax); |
1355 |
__ ret(0);
|
1356 |
|
1357 |
// Cache miss: Jump to runtime.
|
1358 |
__ bind(&miss); |
1359 |
GenerateMiss(masm); |
1360 |
} |
1361 |
|
1362 |
|
1363 |
void LoadIC::GenerateMiss(MacroAssembler* masm) {
|
1364 |
// ----------- S t a t e -------------
|
1365 |
// -- rax : receiver
|
1366 |
// -- rcx : name
|
1367 |
// -- rsp[0] : return address
|
1368 |
// -----------------------------------
|
1369 |
|
1370 |
Counters* counters = masm->isolate()->counters(); |
1371 |
__ IncrementCounter(counters->load_miss(), 1);
|
1372 |
|
1373 |
__ PopReturnAddressTo(rbx); |
1374 |
__ push(rax); // receiver
|
1375 |
__ push(rcx); // name
|
1376 |
__ PushReturnAddressFrom(rbx); |
1377 |
|
1378 |
// Perform tail call to the entry.
|
1379 |
ExternalReference ref = |
1380 |
ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); |
1381 |
__ TailCallExternalReference(ref, 2, 1); |
1382 |
} |
1383 |
|
1384 |
|
1385 |
void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
|
1386 |
// ----------- S t a t e -------------
|
1387 |
// -- rax : receiver
|
1388 |
// -- rcx : name
|
1389 |
// -- rsp[0] : return address
|
1390 |
// -----------------------------------
|
1391 |
|
1392 |
__ PopReturnAddressTo(rbx); |
1393 |
__ push(rax); // receiver
|
1394 |
__ push(rcx); // name
|
1395 |
__ PushReturnAddressFrom(rbx); |
1396 |
|
1397 |
// Perform tail call to the entry.
|
1398 |
__ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
1399 |
} |
1400 |
|
1401 |
|
1402 |
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
|
1403 |
// ----------- S t a t e -------------
|
1404 |
// -- rax : key
|
1405 |
// -- rdx : receiver
|
1406 |
// -- rsp[0] : return address
|
1407 |
// -----------------------------------
|
1408 |
|
1409 |
Counters* counters = masm->isolate()->counters(); |
1410 |
__ IncrementCounter(counters->keyed_load_miss(), 1);
|
1411 |
|
1412 |
__ PopReturnAddressTo(rbx); |
1413 |
__ push(rdx); // receiver
|
1414 |
__ push(rax); // name
|
1415 |
__ PushReturnAddressFrom(rbx); |
1416 |
|
1417 |
// Perform tail call to the entry.
|
1418 |
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC |
1419 |
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), |
1420 |
masm->isolate()) |
1421 |
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); |
1422 |
__ TailCallExternalReference(ref, 2, 1); |
1423 |
} |
1424 |
|
1425 |
|
1426 |
void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
|
1427 |
// ----------- S t a t e -------------
|
1428 |
// -- rax : key
|
1429 |
// -- rdx : receiver
|
1430 |
// -- rsp[0] : return address
|
1431 |
// -----------------------------------
|
1432 |
|
1433 |
__ PopReturnAddressTo(rbx); |
1434 |
__ push(rdx); // receiver
|
1435 |
__ push(rax); // name
|
1436 |
__ PushReturnAddressFrom(rbx); |
1437 |
|
1438 |
// Perform tail call to the entry.
|
1439 |
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
1440 |
} |
1441 |
|
1442 |
|
1443 |
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
|
1444 |
StrictModeFlag strict_mode) { |
1445 |
// ----------- S t a t e -------------
|
1446 |
// -- rax : value
|
1447 |
// -- rcx : name
|
1448 |
// -- rdx : receiver
|
1449 |
// -- rsp[0] : return address
|
1450 |
// -----------------------------------
|
1451 |
|
1452 |
// Get the receiver from the stack and probe the stub cache.
|
1453 |
Code::Flags flags = Code::ComputeFlags( |
1454 |
Code::HANDLER, MONOMORPHIC, strict_mode, |
1455 |
Code::NORMAL, Code::STORE_IC); |
1456 |
masm->isolate()->stub_cache()->GenerateProbe( |
1457 |
masm, flags, rdx, rcx, rbx, no_reg); |
1458 |
|
1459 |
// Cache miss: Jump to runtime.
|
1460 |
GenerateMiss(masm); |
1461 |
} |
1462 |
|
1463 |
|
1464 |
void StoreIC::GenerateMiss(MacroAssembler* masm) {
|
1465 |
// ----------- S t a t e -------------
|
1466 |
// -- rax : value
|
1467 |
// -- rcx : name
|
1468 |
// -- rdx : receiver
|
1469 |
// -- rsp[0] : return address
|
1470 |
// -----------------------------------
|
1471 |
|
1472 |
__ PopReturnAddressTo(rbx); |
1473 |
__ push(rdx); // receiver
|
1474 |
__ push(rcx); // name
|
1475 |
__ push(rax); // value
|
1476 |
__ PushReturnAddressFrom(rbx); |
1477 |
|
1478 |
// Perform tail call to the entry.
|
1479 |
ExternalReference ref = |
1480 |
ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); |
1481 |
__ TailCallExternalReference(ref, 3, 1); |
1482 |
} |
1483 |
|
1484 |
|
1485 |
void StoreIC::GenerateNormal(MacroAssembler* masm) {
|
1486 |
// ----------- S t a t e -------------
|
1487 |
// -- rax : value
|
1488 |
// -- rcx : name
|
1489 |
// -- rdx : receiver
|
1490 |
// -- rsp[0] : return address
|
1491 |
// -----------------------------------
|
1492 |
|
1493 |
Label miss; |
1494 |
|
1495 |
GenerateNameDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss); |
1496 |
|
1497 |
GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9); |
1498 |
Counters* counters = masm->isolate()->counters(); |
1499 |
__ IncrementCounter(counters->store_normal_hit(), 1);
|
1500 |
__ ret(0);
|
1501 |
|
1502 |
__ bind(&miss); |
1503 |
__ IncrementCounter(counters->store_normal_miss(), 1);
|
1504 |
GenerateMiss(masm); |
1505 |
} |
1506 |
|
1507 |
|
1508 |
void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
1509 |
StrictModeFlag strict_mode) { |
1510 |
// ----------- S t a t e -------------
|
1511 |
// -- rax : value
|
1512 |
// -- rcx : name
|
1513 |
// -- rdx : receiver
|
1514 |
// -- rsp[0] : return address
|
1515 |
// -----------------------------------
|
1516 |
__ PopReturnAddressTo(rbx); |
1517 |
__ push(rdx); |
1518 |
__ push(rcx); |
1519 |
__ push(rax); |
1520 |
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
|
1521 |
__ Push(Smi::FromInt(strict_mode)); |
1522 |
__ PushReturnAddressFrom(rbx); |
1523 |
|
1524 |
// Do tail-call to runtime routine.
|
1525 |
__ TailCallRuntime(Runtime::kSetProperty, 5, 1); |
1526 |
} |
1527 |
|
1528 |
|
1529 |
void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
1530 |
StrictModeFlag strict_mode) { |
1531 |
// ----------- S t a t e -------------
|
1532 |
// -- rax : value
|
1533 |
// -- rcx : key
|
1534 |
// -- rdx : receiver
|
1535 |
// -- rsp[0] : return address
|
1536 |
// -----------------------------------
|
1537 |
|
1538 |
__ PopReturnAddressTo(rbx); |
1539 |
__ push(rdx); // receiver
|
1540 |
__ push(rcx); // key
|
1541 |
__ push(rax); // value
|
1542 |
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
|
1543 |
__ Push(Smi::FromInt(strict_mode)); // Strict mode.
|
1544 |
__ PushReturnAddressFrom(rbx); |
1545 |
|
1546 |
// Do tail-call to runtime routine.
|
1547 |
__ TailCallRuntime(Runtime::kSetProperty, 5, 1); |
1548 |
} |
1549 |
|
1550 |
|
1551 |
void StoreIC::GenerateSlow(MacroAssembler* masm) {
|
1552 |
// ----------- S t a t e -------------
|
1553 |
// -- rax : value
|
1554 |
// -- rcx : key
|
1555 |
// -- rdx : receiver
|
1556 |
// -- rsp[0] : return address
|
1557 |
// -----------------------------------
|
1558 |
|
1559 |
__ PopReturnAddressTo(rbx); |
1560 |
__ push(rdx); // receiver
|
1561 |
__ push(rcx); // key
|
1562 |
__ push(rax); // value
|
1563 |
__ PushReturnAddressFrom(rbx); |
1564 |
|
1565 |
// Do tail-call to runtime routine.
|
1566 |
ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate()); |
1567 |
__ TailCallExternalReference(ref, 3, 1); |
1568 |
} |
1569 |
|
1570 |
|
1571 |
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
|
1572 |
// ----------- S t a t e -------------
|
1573 |
// -- rax : value
|
1574 |
// -- rcx : key
|
1575 |
// -- rdx : receiver
|
1576 |
// -- rsp[0] : return address
|
1577 |
// -----------------------------------
|
1578 |
|
1579 |
__ PopReturnAddressTo(rbx); |
1580 |
__ push(rdx); // receiver
|
1581 |
__ push(rcx); // key
|
1582 |
__ push(rax); // value
|
1583 |
__ PushReturnAddressFrom(rbx); |
1584 |
|
1585 |
// Do tail-call to runtime routine.
|
1586 |
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); |
1587 |
__ TailCallExternalReference(ref, 3, 1); |
1588 |
} |
1589 |
|
1590 |
|
1591 |
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
|
1592 |
// ----------- S t a t e -------------
|
1593 |
// -- rax : value
|
1594 |
// -- rcx : key
|
1595 |
// -- rdx : receiver
|
1596 |
// -- rsp[0] : return address
|
1597 |
// -----------------------------------
|
1598 |
|
1599 |
__ PopReturnAddressTo(rbx); |
1600 |
__ push(rdx); // receiver
|
1601 |
__ push(rcx); // key
|
1602 |
__ push(rax); // value
|
1603 |
__ PushReturnAddressFrom(rbx); |
1604 |
|
1605 |
// Do tail-call to runtime routine.
|
1606 |
ExternalReference ref = miss_mode == MISS_FORCE_GENERIC |
1607 |
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric), |
1608 |
masm->isolate()) |
1609 |
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); |
1610 |
__ TailCallExternalReference(ref, 3, 1); |
1611 |
} |
1612 |
|
1613 |
|
1614 |
#undef __
|
1615 |
|
1616 |
|
1617 |
Condition CompareIC::ComputeCondition(Token::Value op) { |
1618 |
switch (op) {
|
1619 |
case Token::EQ_STRICT:
|
1620 |
case Token::EQ:
|
1621 |
return equal;
|
1622 |
case Token::LT:
|
1623 |
return less;
|
1624 |
case Token::GT:
|
1625 |
return greater;
|
1626 |
case Token::LTE:
|
1627 |
return less_equal;
|
1628 |
case Token::GTE:
|
1629 |
return greater_equal;
|
1630 |
default:
|
1631 |
UNREACHABLE(); |
1632 |
return no_condition;
|
1633 |
} |
1634 |
} |
1635 |
|
1636 |
|
1637 |
bool CompareIC::HasInlinedSmiCode(Address address) {
|
1638 |
// The address of the instruction following the call.
|
1639 |
Address test_instruction_address = |
1640 |
address + Assembler::kCallTargetAddressOffset; |
1641 |
|
1642 |
// If the instruction following the call is not a test al, nothing
|
1643 |
// was inlined.
|
1644 |
return *test_instruction_address == Assembler::kTestAlByte;
|
1645 |
} |
1646 |
|
1647 |
|
1648 |
void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
|
1649 |
// The address of the instruction following the call.
|
1650 |
Address test_instruction_address = |
1651 |
address + Assembler::kCallTargetAddressOffset; |
1652 |
|
1653 |
// If the instruction following the call is not a test al, nothing
|
1654 |
// was inlined.
|
1655 |
if (*test_instruction_address != Assembler::kTestAlByte) {
|
1656 |
ASSERT(*test_instruction_address == Assembler::kNopByte); |
1657 |
return;
|
1658 |
} |
1659 |
|
1660 |
Address delta_address = test_instruction_address + 1;
|
1661 |
// The delta to the start of the map check instruction and the
|
1662 |
// condition code uses at the patched jump.
|
1663 |
int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
|
1664 |
if (FLAG_trace_ic) {
|
1665 |
PrintF("[ patching ic at %p, test=%p, delta=%d\n",
|
1666 |
address, test_instruction_address, delta); |
1667 |
} |
1668 |
|
1669 |
// Patch with a short conditional jump. Enabling means switching from a short
|
1670 |
// jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
|
1671 |
// reverse operation of that.
|
1672 |
Address jmp_address = test_instruction_address - delta; |
1673 |
ASSERT((check == ENABLE_INLINED_SMI_CHECK) |
1674 |
? (*jmp_address == Assembler::kJncShortOpcode || |
1675 |
*jmp_address == Assembler::kJcShortOpcode) |
1676 |
: (*jmp_address == Assembler::kJnzShortOpcode || |
1677 |
*jmp_address == Assembler::kJzShortOpcode)); |
1678 |
Condition cc = (check == ENABLE_INLINED_SMI_CHECK) |
1679 |
? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) |
1680 |
: (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); |
1681 |
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
|
1682 |
} |
1683 |
|
1684 |
|
1685 |
} } // namespace v8::internal
|
1686 |
|
1687 |
#endif // V8_TARGET_ARCH_X64 |