The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / x64 / stub-cache-x64.cc @ f230a1cf
History | View | Annotate | Download (112 KB)
1 |
// Copyright 2012 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "v8.h" |
29 |
|
30 |
#if V8_TARGET_ARCH_X64
|
31 |
|
32 |
#include "arguments.h" |
33 |
#include "ic-inl.h" |
34 |
#include "codegen.h" |
35 |
#include "stub-cache.h" |
36 |
|
37 |
namespace v8 {
|
38 |
namespace internal {
|
39 |
|
40 |
#define __ ACCESS_MASM(masm)
|
41 |
|
42 |
|
43 |
static void ProbeTable(Isolate* isolate, |
44 |
MacroAssembler* masm, |
45 |
Code::Flags flags, |
46 |
StubCache::Table table, |
47 |
Register receiver, |
48 |
Register name, |
49 |
// The offset is scaled by 4, based on
|
50 |
// kHeapObjectTagSize, which is two bits
|
51 |
Register offset) { |
52 |
// We need to scale up the pointer by 2 because the offset is scaled by less
|
53 |
// than the pointer size.
|
54 |
ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
|
55 |
ScaleFactor scale_factor = times_2; |
56 |
|
57 |
ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry)); |
58 |
// The offset register holds the entry offset times four (due to masking
|
59 |
// and shifting optimizations).
|
60 |
ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
61 |
ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
62 |
Label miss; |
63 |
|
64 |
// Multiply by 3 because there are 3 fields per entry (name, code, map).
|
65 |
__ lea(offset, Operand(offset, offset, times_2, 0));
|
66 |
|
67 |
__ LoadAddress(kScratchRegister, key_offset); |
68 |
|
69 |
// Check that the key in the entry matches the name.
|
70 |
// Multiply entry offset by 16 to get the entry address. Since the
|
71 |
// offset register already holds the entry offset times four, multiply
|
72 |
// by a further four.
|
73 |
__ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
|
74 |
__ j(not_equal, &miss); |
75 |
|
76 |
// Get the map entry from the cache.
|
77 |
// Use key_offset + kPointerSize * 2, rather than loading map_offset.
|
78 |
__ movq(kScratchRegister, |
79 |
Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
|
80 |
__ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); |
81 |
__ j(not_equal, &miss); |
82 |
|
83 |
// Get the code entry from the cache.
|
84 |
__ LoadAddress(kScratchRegister, value_offset); |
85 |
__ movq(kScratchRegister, |
86 |
Operand(kScratchRegister, offset, scale_factor, 0));
|
87 |
|
88 |
// Check that the flags match what we're looking for.
|
89 |
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); |
90 |
__ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); |
91 |
__ cmpl(offset, Immediate(flags)); |
92 |
__ j(not_equal, &miss); |
93 |
|
94 |
#ifdef DEBUG
|
95 |
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
96 |
__ jmp(&miss); |
97 |
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { |
98 |
__ jmp(&miss); |
99 |
} |
100 |
#endif
|
101 |
|
102 |
// Jump to the first instruction in the code stub.
|
103 |
__ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
104 |
__ jmp(kScratchRegister); |
105 |
|
106 |
__ bind(&miss); |
107 |
} |
108 |
|
109 |
|
110 |
// Helper function used to check that the dictionary doesn't contain
|
111 |
// the property. This function may return false negatives, so miss_label
|
112 |
// must always call a backup property check that is complete.
|
113 |
// This function is safe to call if the receiver has fast properties.
|
114 |
// Name must be unique and receiver must be a heap object.
|
115 |
static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, |
116 |
Label* miss_label, |
117 |
Register receiver, |
118 |
Handle<Name> name, |
119 |
Register r0, |
120 |
Register r1) { |
121 |
ASSERT(name->IsUniqueName()); |
122 |
Counters* counters = masm->isolate()->counters(); |
123 |
__ IncrementCounter(counters->negative_lookups(), 1);
|
124 |
__ IncrementCounter(counters->negative_lookups_miss(), 1);
|
125 |
|
126 |
__ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset)); |
127 |
|
128 |
const int kInterceptorOrAccessCheckNeededMask = |
129 |
(1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
130 |
|
131 |
// Bail out if the receiver has a named interceptor or requires access checks.
|
132 |
__ testb(FieldOperand(r0, Map::kBitFieldOffset), |
133 |
Immediate(kInterceptorOrAccessCheckNeededMask)); |
134 |
__ j(not_zero, miss_label); |
135 |
|
136 |
// Check that receiver is a JSObject.
|
137 |
__ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE); |
138 |
__ j(below, miss_label); |
139 |
|
140 |
// Load properties array.
|
141 |
Register properties = r0; |
142 |
__ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); |
143 |
|
144 |
// Check that the properties array is a dictionary.
|
145 |
__ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), |
146 |
Heap::kHashTableMapRootIndex); |
147 |
__ j(not_equal, miss_label); |
148 |
|
149 |
Label done; |
150 |
NameDictionaryLookupStub::GenerateNegativeLookup(masm, |
151 |
miss_label, |
152 |
&done, |
153 |
properties, |
154 |
name, |
155 |
r1); |
156 |
__ bind(&done); |
157 |
__ DecrementCounter(counters->negative_lookups_miss(), 1);
|
158 |
} |
159 |
|
160 |
|
161 |
void StubCache::GenerateProbe(MacroAssembler* masm,
|
162 |
Code::Flags flags, |
163 |
Register receiver, |
164 |
Register name, |
165 |
Register scratch, |
166 |
Register extra, |
167 |
Register extra2, |
168 |
Register extra3) { |
169 |
Isolate* isolate = masm->isolate(); |
170 |
Label miss; |
171 |
USE(extra); // The register extra is not used on the X64 platform.
|
172 |
USE(extra2); // The register extra2 is not used on the X64 platform.
|
173 |
USE(extra3); // The register extra2 is not used on the X64 platform.
|
174 |
// Make sure that code is valid. The multiplying code relies on the
|
175 |
// entry size being 3 * kPointerSize.
|
176 |
ASSERT(sizeof(Entry) == 3 * kPointerSize); |
177 |
|
178 |
// Make sure the flags do not name a specific type.
|
179 |
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
|
180 |
|
181 |
// Make sure that there are no register conflicts.
|
182 |
ASSERT(!scratch.is(receiver)); |
183 |
ASSERT(!scratch.is(name)); |
184 |
|
185 |
// Check scratch register is valid, extra and extra2 are unused.
|
186 |
ASSERT(!scratch.is(no_reg)); |
187 |
ASSERT(extra2.is(no_reg)); |
188 |
ASSERT(extra3.is(no_reg)); |
189 |
|
190 |
Counters* counters = masm->isolate()->counters(); |
191 |
__ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
|
192 |
|
193 |
// Check that the receiver isn't a smi.
|
194 |
__ JumpIfSmi(receiver, &miss); |
195 |
|
196 |
// Get the map of the receiver and compute the hash.
|
197 |
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
198 |
// Use only the low 32 bits of the map pointer.
|
199 |
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
200 |
__ xor_(scratch, Immediate(flags)); |
201 |
// We mask out the last two bits because they are not part of the hash and
|
202 |
// they are always 01 for maps. Also in the two 'and' instructions below.
|
203 |
__ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
|
204 |
|
205 |
// Probe the primary table.
|
206 |
ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); |
207 |
|
208 |
// Primary miss: Compute hash for secondary probe.
|
209 |
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset)); |
210 |
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
211 |
__ xor_(scratch, Immediate(flags)); |
212 |
__ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
|
213 |
__ subl(scratch, name); |
214 |
__ addl(scratch, Immediate(flags)); |
215 |
__ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
|
216 |
|
217 |
// Probe the secondary table.
|
218 |
ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); |
219 |
|
220 |
// Cache miss: Fall-through and let caller handle the miss by
|
221 |
// entering the runtime system.
|
222 |
__ bind(&miss); |
223 |
__ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
|
224 |
} |
225 |
|
226 |
|
227 |
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
|
228 |
int index,
|
229 |
Register prototype) { |
230 |
// Load the global or builtins object from the current context.
|
231 |
__ movq(prototype, |
232 |
Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
233 |
// Load the native context from the global or builtins object.
|
234 |
__ movq(prototype, |
235 |
FieldOperand(prototype, GlobalObject::kNativeContextOffset)); |
236 |
// Load the function from the native context.
|
237 |
__ movq(prototype, Operand(prototype, Context::SlotOffset(index))); |
238 |
// Load the initial map. The global functions all have initial maps.
|
239 |
__ movq(prototype, |
240 |
FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); |
241 |
// Load the prototype from the initial map.
|
242 |
__ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
243 |
} |
244 |
|
245 |
|
246 |
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
|
247 |
MacroAssembler* masm, |
248 |
int index,
|
249 |
Register prototype, |
250 |
Label* miss) { |
251 |
Isolate* isolate = masm->isolate(); |
252 |
// Check we're still in the same context.
|
253 |
__ Move(prototype, isolate->global_object()); |
254 |
__ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), |
255 |
prototype); |
256 |
__ j(not_equal, miss); |
257 |
// Get the global function with the given index.
|
258 |
Handle<JSFunction> function( |
259 |
JSFunction::cast(isolate->native_context()->get(index))); |
260 |
// Load its initial map. The global functions all have initial maps.
|
261 |
__ Move(prototype, Handle<Map>(function->initial_map())); |
262 |
// Load the prototype from the initial map.
|
263 |
__ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
264 |
} |
265 |
|
266 |
|
267 |
void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
|
268 |
Register receiver, |
269 |
Register scratch, |
270 |
Label* miss_label) { |
271 |
// Check that the receiver isn't a smi.
|
272 |
__ JumpIfSmi(receiver, miss_label); |
273 |
|
274 |
// Check that the object is a JS array.
|
275 |
__ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); |
276 |
__ j(not_equal, miss_label); |
277 |
|
278 |
// Load length directly from the JS array.
|
279 |
__ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset)); |
280 |
__ ret(0);
|
281 |
} |
282 |
|
283 |
|
284 |
// Generate code to check if an object is a string. If the object is
|
285 |
// a string, the map's instance type is left in the scratch register.
|
286 |
static void GenerateStringCheck(MacroAssembler* masm, |
287 |
Register receiver, |
288 |
Register scratch, |
289 |
Label* smi, |
290 |
Label* non_string_object) { |
291 |
// Check that the object isn't a smi.
|
292 |
__ JumpIfSmi(receiver, smi); |
293 |
|
294 |
// Check that the object is a string.
|
295 |
__ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); |
296 |
__ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
297 |
STATIC_ASSERT(kNotStringTag != 0);
|
298 |
__ testl(scratch, Immediate(kNotStringTag)); |
299 |
__ j(not_zero, non_string_object); |
300 |
} |
301 |
|
302 |
|
303 |
void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
|
304 |
Register receiver, |
305 |
Register scratch1, |
306 |
Register scratch2, |
307 |
Label* miss) { |
308 |
Label check_wrapper; |
309 |
|
310 |
// Check if the object is a string leaving the instance type in the
|
311 |
// scratch register.
|
312 |
GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); |
313 |
|
314 |
// Load length directly from the string.
|
315 |
__ movq(rax, FieldOperand(receiver, String::kLengthOffset)); |
316 |
__ ret(0);
|
317 |
|
318 |
// Check if the object is a JSValue wrapper.
|
319 |
__ bind(&check_wrapper); |
320 |
__ cmpl(scratch1, Immediate(JS_VALUE_TYPE)); |
321 |
__ j(not_equal, miss); |
322 |
|
323 |
// Check if the wrapped value is a string and load the length
|
324 |
// directly if it is.
|
325 |
__ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); |
326 |
GenerateStringCheck(masm, scratch2, scratch1, miss, miss); |
327 |
__ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); |
328 |
__ ret(0);
|
329 |
} |
330 |
|
331 |
|
332 |
void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
|
333 |
Register receiver, |
334 |
Register result, |
335 |
Register scratch, |
336 |
Label* miss_label) { |
337 |
__ TryGetFunctionPrototype(receiver, result, miss_label); |
338 |
if (!result.is(rax)) __ movq(rax, result);
|
339 |
__ ret(0);
|
340 |
} |
341 |
|
342 |
|
343 |
void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
|
344 |
Register dst, |
345 |
Register src, |
346 |
bool inobject,
|
347 |
int index,
|
348 |
Representation representation) { |
349 |
ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); |
350 |
int offset = index * kPointerSize;
|
351 |
if (!inobject) {
|
352 |
// Calculate the offset into the properties array.
|
353 |
offset = offset + FixedArray::kHeaderSize; |
354 |
__ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset)); |
355 |
src = dst; |
356 |
} |
357 |
__ movq(dst, FieldOperand(src, offset)); |
358 |
} |
359 |
|
360 |
|
361 |
static void PushInterceptorArguments(MacroAssembler* masm, |
362 |
Register receiver, |
363 |
Register holder, |
364 |
Register name, |
365 |
Handle<JSObject> holder_obj) { |
366 |
STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
|
367 |
STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
|
368 |
STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
|
369 |
STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
|
370 |
STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
|
371 |
__ push(name); |
372 |
Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); |
373 |
ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); |
374 |
__ Move(kScratchRegister, interceptor); |
375 |
__ push(kScratchRegister); |
376 |
__ push(receiver); |
377 |
__ push(holder); |
378 |
} |
379 |
|
380 |
|
381 |
static void CompileCallLoadPropertyWithInterceptor( |
382 |
MacroAssembler* masm, |
383 |
Register receiver, |
384 |
Register holder, |
385 |
Register name, |
386 |
Handle<JSObject> holder_obj) { |
387 |
PushInterceptorArguments(masm, receiver, holder, name, holder_obj); |
388 |
|
389 |
ExternalReference ref = |
390 |
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), |
391 |
masm->isolate()); |
392 |
__ Set(rax, StubCache::kInterceptorArgsLength); |
393 |
__ LoadAddress(rbx, ref); |
394 |
|
395 |
CEntryStub stub(1);
|
396 |
__ CallStub(&stub); |
397 |
} |
398 |
|
399 |
|
400 |
// Number of pointers to be reserved on stack for fast API call.
|
401 |
static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength; |
402 |
|
403 |
|
404 |
// Reserves space for the extra arguments to API function in the
|
405 |
// caller's frame.
|
406 |
//
|
407 |
// These arguments are set by CheckPrototypes and GenerateFastApiCall.
|
408 |
static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
409 |
// ----------- S t a t e -------------
|
410 |
// -- rsp[0] : return address
|
411 |
// -- rsp[8] : last argument in the internal frame of the caller
|
412 |
// -----------------------------------
|
413 |
__ movq(scratch, StackOperandForReturnAddress(0));
|
414 |
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
415 |
__ movq(StackOperandForReturnAddress(0), scratch);
|
416 |
__ Move(scratch, Smi::FromInt(0));
|
417 |
StackArgumentsAccessor args(rsp, kFastApiCallArguments, |
418 |
ARGUMENTS_DONT_CONTAIN_RECEIVER); |
419 |
for (int i = 0; i < kFastApiCallArguments; i++) { |
420 |
__ movq(args.GetArgumentOperand(i), scratch); |
421 |
} |
422 |
} |
423 |
|
424 |
|
425 |
// Undoes the effects of ReserveSpaceForFastApiCall.
|
426 |
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { |
427 |
// ----------- S t a t e -------------
|
428 |
// -- rsp[0] : return address.
|
429 |
// -- rsp[8] : last fast api call extra argument.
|
430 |
// -- ...
|
431 |
// -- rsp[kFastApiCallArguments * 8] : first fast api call extra
|
432 |
// argument.
|
433 |
// -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
|
434 |
// frame.
|
435 |
// -----------------------------------
|
436 |
__ movq(scratch, StackOperandForReturnAddress(0));
|
437 |
__ movq(StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize), |
438 |
scratch); |
439 |
__ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); |
440 |
} |
441 |
|
442 |
|
443 |
// Generates call to API function.
|
444 |
static void GenerateFastApiCall(MacroAssembler* masm, |
445 |
const CallOptimization& optimization,
|
446 |
int argc,
|
447 |
bool restore_context) {
|
448 |
// ----------- S t a t e -------------
|
449 |
// -- rsp[0] : return address
|
450 |
// -- rsp[8] - rsp[56] : FunctionCallbackInfo, incl.
|
451 |
// : object passing the type check
|
452 |
// (set by CheckPrototypes)
|
453 |
// -- rsp[64] : last argument
|
454 |
// -- ...
|
455 |
// -- rsp[(argc + 7) * 8] : first argument
|
456 |
// -- rsp[(argc + 8) * 8] : receiver
|
457 |
// -----------------------------------
|
458 |
typedef FunctionCallbackArguments FCA;
|
459 |
StackArgumentsAccessor args(rsp, argc + kFastApiCallArguments); |
460 |
|
461 |
// Save calling context.
|
462 |
int offset = argc + kFastApiCallArguments;
|
463 |
__ movq(args.GetArgumentOperand(offset - FCA::kContextSaveIndex), rsi); |
464 |
|
465 |
// Get the function and setup the context.
|
466 |
Handle<JSFunction> function = optimization.constant_function(); |
467 |
__ Move(rdi, function); |
468 |
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
469 |
// Construct the FunctionCallbackInfo on the stack.
|
470 |
__ movq(args.GetArgumentOperand(offset - FCA::kCalleeIndex), rdi); |
471 |
Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
472 |
Handle<Object> call_data(api_call_info->data(), masm->isolate()); |
473 |
if (masm->isolate()->heap()->InNewSpace(*call_data)) {
|
474 |
__ Move(rcx, api_call_info); |
475 |
__ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); |
476 |
__ movq(args.GetArgumentOperand(offset - FCA::kDataIndex), rbx); |
477 |
} else {
|
478 |
__ Move(args.GetArgumentOperand(offset - FCA::kDataIndex), call_data); |
479 |
} |
480 |
__ movq(kScratchRegister, |
481 |
ExternalReference::isolate_address(masm->isolate())); |
482 |
__ movq(args.GetArgumentOperand(offset - FCA::kIsolateIndex), |
483 |
kScratchRegister); |
484 |
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
485 |
__ movq(args.GetArgumentOperand(offset - FCA::kReturnValueDefaultValueIndex), |
486 |
kScratchRegister); |
487 |
__ movq(args.GetArgumentOperand(offset - FCA::kReturnValueOffset), |
488 |
kScratchRegister); |
489 |
|
490 |
// Prepare arguments.
|
491 |
STATIC_ASSERT(kFastApiCallArguments == 7);
|
492 |
__ lea(rbx, Operand(rsp, 1 * kPointerSize));
|
493 |
|
494 |
// Function address is a foreign pointer outside V8's heap.
|
495 |
Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
496 |
|
497 |
// Allocate the v8::Arguments structure in the arguments' space since
|
498 |
// it's not controlled by GC.
|
499 |
const int kApiStackSpace = 4; |
500 |
|
501 |
__ PrepareCallApiFunction(kApiStackSpace); |
502 |
|
503 |
__ movq(StackSpaceOperand(0), rbx); // FunctionCallbackInfo::implicit_args_. |
504 |
__ addq(rbx, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize));
|
505 |
__ movq(StackSpaceOperand(1), rbx); // FunctionCallbackInfo::values_. |
506 |
__ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_. |
507 |
// FunctionCallbackInfo::is_construct_call_.
|
508 |
__ Set(StackSpaceOperand(3), 0); |
509 |
|
510 |
#if defined(__MINGW64__) || defined(_WIN64)
|
511 |
Register arguments_arg = rcx; |
512 |
Register callback_arg = rdx; |
513 |
#else
|
514 |
Register arguments_arg = rdi; |
515 |
Register callback_arg = rsi; |
516 |
#endif
|
517 |
|
518 |
// v8::InvocationCallback's argument.
|
519 |
__ lea(arguments_arg, StackSpaceOperand(0));
|
520 |
|
521 |
Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); |
522 |
|
523 |
StackArgumentsAccessor args_from_rbp(rbp, kFastApiCallArguments, |
524 |
ARGUMENTS_DONT_CONTAIN_RECEIVER); |
525 |
Operand context_restore_operand = args_from_rbp.GetArgumentOperand( |
526 |
kFastApiCallArguments - 1 - FCA::kContextSaveIndex);
|
527 |
Operand return_value_operand = args_from_rbp.GetArgumentOperand( |
528 |
kFastApiCallArguments - 1 - FCA::kReturnValueOffset);
|
529 |
__ CallApiFunctionAndReturn( |
530 |
function_address, |
531 |
thunk_address, |
532 |
callback_arg, |
533 |
argc + kFastApiCallArguments + 1,
|
534 |
return_value_operand, |
535 |
restore_context ? &context_restore_operand : NULL);
|
536 |
} |
537 |
|
538 |
|
539 |
// Generate call to api function.
|
540 |
static void GenerateFastApiCall(MacroAssembler* masm, |
541 |
const CallOptimization& optimization,
|
542 |
Register receiver, |
543 |
Register scratch, |
544 |
int argc,
|
545 |
Register* values) { |
546 |
ASSERT(optimization.is_simple_api_call()); |
547 |
ASSERT(!receiver.is(scratch)); |
548 |
|
549 |
const int fast_api_call_argc = argc + kFastApiCallArguments; |
550 |
StackArgumentsAccessor args(rsp, fast_api_call_argc); |
551 |
// argc + 1 is the argument number before FastApiCall arguments, 1 ~ receiver
|
552 |
const int kHolderIndex = argc + 1 + |
553 |
kFastApiCallArguments - 1 - FunctionCallbackArguments::kHolderIndex;
|
554 |
__ movq(scratch, StackOperandForReturnAddress(0));
|
555 |
// Assign stack space for the call arguments and receiver.
|
556 |
__ subq(rsp, Immediate((fast_api_call_argc + 1) * kPointerSize));
|
557 |
__ movq(StackOperandForReturnAddress(0), scratch);
|
558 |
// Write holder to stack frame.
|
559 |
__ movq(args.GetArgumentOperand(kHolderIndex), receiver); |
560 |
__ movq(args.GetReceiverOperand(), receiver); |
561 |
// Write the arguments to stack frame.
|
562 |
for (int i = 0; i < argc; i++) { |
563 |
ASSERT(!receiver.is(values[i])); |
564 |
ASSERT(!scratch.is(values[i])); |
565 |
__ movq(args.GetArgumentOperand(i + 1), values[i]);
|
566 |
} |
567 |
|
568 |
GenerateFastApiCall(masm, optimization, argc, true);
|
569 |
} |
570 |
|
571 |
|
572 |
class CallInterceptorCompiler BASE_EMBEDDED { |
573 |
public:
|
574 |
CallInterceptorCompiler(StubCompiler* stub_compiler, |
575 |
const ParameterCount& arguments,
|
576 |
Register name, |
577 |
Code::ExtraICState extra_ic_state) |
578 |
: stub_compiler_(stub_compiler), |
579 |
arguments_(arguments), |
580 |
name_(name), |
581 |
extra_ic_state_(extra_ic_state) {} |
582 |
|
583 |
void Compile(MacroAssembler* masm,
|
584 |
Handle<JSObject> object, |
585 |
Handle<JSObject> holder, |
586 |
Handle<Name> name, |
587 |
LookupResult* lookup, |
588 |
Register receiver, |
589 |
Register scratch1, |
590 |
Register scratch2, |
591 |
Register scratch3, |
592 |
Label* miss) { |
593 |
ASSERT(holder->HasNamedInterceptor()); |
594 |
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); |
595 |
|
596 |
// Check that the receiver isn't a smi.
|
597 |
__ JumpIfSmi(receiver, miss); |
598 |
|
599 |
CallOptimization optimization(lookup); |
600 |
if (optimization.is_constant_call()) {
|
601 |
CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3, |
602 |
holder, lookup, name, optimization, miss); |
603 |
} else {
|
604 |
CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3, |
605 |
name, holder, miss); |
606 |
} |
607 |
} |
608 |
|
609 |
private:
|
610 |
void CompileCacheable(MacroAssembler* masm,
|
611 |
Handle<JSObject> object, |
612 |
Register receiver, |
613 |
Register scratch1, |
614 |
Register scratch2, |
615 |
Register scratch3, |
616 |
Handle<JSObject> interceptor_holder, |
617 |
LookupResult* lookup, |
618 |
Handle<Name> name, |
619 |
const CallOptimization& optimization,
|
620 |
Label* miss_label) { |
621 |
ASSERT(optimization.is_constant_call()); |
622 |
ASSERT(!lookup->holder()->IsGlobalObject()); |
623 |
|
624 |
int depth1 = kInvalidProtoDepth;
|
625 |
int depth2 = kInvalidProtoDepth;
|
626 |
bool can_do_fast_api_call = false; |
627 |
if (optimization.is_simple_api_call() &&
|
628 |
!lookup->holder()->IsGlobalObject()) { |
629 |
depth1 = optimization.GetPrototypeDepthOfExpectedType( |
630 |
object, interceptor_holder); |
631 |
if (depth1 == kInvalidProtoDepth) {
|
632 |
depth2 = optimization.GetPrototypeDepthOfExpectedType( |
633 |
interceptor_holder, Handle<JSObject>(lookup->holder())); |
634 |
} |
635 |
can_do_fast_api_call = |
636 |
depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth; |
637 |
} |
638 |
|
639 |
Counters* counters = masm->isolate()->counters(); |
640 |
__ IncrementCounter(counters->call_const_interceptor(), 1);
|
641 |
|
642 |
if (can_do_fast_api_call) {
|
643 |
__ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
|
644 |
ReserveSpaceForFastApiCall(masm, scratch1); |
645 |
} |
646 |
|
647 |
// Check that the maps from receiver to interceptor's holder
|
648 |
// haven't changed and thus we can invoke interceptor.
|
649 |
Label miss_cleanup; |
650 |
Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; |
651 |
Register holder = |
652 |
stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, |
653 |
scratch1, scratch2, scratch3, |
654 |
name, depth1, miss); |
655 |
|
656 |
// Invoke an interceptor and if it provides a value,
|
657 |
// branch to |regular_invoke|.
|
658 |
Label regular_invoke; |
659 |
LoadWithInterceptor(masm, receiver, holder, interceptor_holder, |
660 |
®ular_invoke); |
661 |
|
662 |
// Interceptor returned nothing for this property. Try to use cached
|
663 |
// constant function.
|
664 |
|
665 |
// Check that the maps from interceptor's holder to constant function's
|
666 |
// holder haven't changed and thus we can use cached constant function.
|
667 |
if (*interceptor_holder != lookup->holder()) {
|
668 |
stub_compiler_->CheckPrototypes(interceptor_holder, receiver, |
669 |
Handle<JSObject>(lookup->holder()), |
670 |
scratch1, scratch2, scratch3, |
671 |
name, depth2, miss); |
672 |
} else {
|
673 |
// CheckPrototypes has a side effect of fetching a 'holder'
|
674 |
// for API (object which is instanceof for the signature). It's
|
675 |
// safe to omit it here, as if present, it should be fetched
|
676 |
// by the previous CheckPrototypes.
|
677 |
ASSERT(depth2 == kInvalidProtoDepth); |
678 |
} |
679 |
|
680 |
// Invoke function.
|
681 |
if (can_do_fast_api_call) {
|
682 |
GenerateFastApiCall(masm, optimization, arguments_.immediate(), false);
|
683 |
} else {
|
684 |
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) |
685 |
? CALL_AS_FUNCTION |
686 |
: CALL_AS_METHOD; |
687 |
Handle<JSFunction> fun = optimization.constant_function(); |
688 |
ParameterCount expected(fun); |
689 |
__ InvokeFunction(fun, expected, arguments_, |
690 |
JUMP_FUNCTION, NullCallWrapper(), call_kind); |
691 |
} |
692 |
|
693 |
// Deferred code for fast API call case---clean preallocated space.
|
694 |
if (can_do_fast_api_call) {
|
695 |
__ bind(&miss_cleanup); |
696 |
FreeSpaceForFastApiCall(masm, scratch1); |
697 |
__ jmp(miss_label); |
698 |
} |
699 |
|
700 |
// Invoke a regular function.
|
701 |
__ bind(®ular_invoke); |
702 |
if (can_do_fast_api_call) {
|
703 |
FreeSpaceForFastApiCall(masm, scratch1); |
704 |
} |
705 |
} |
706 |
|
707 |
void CompileRegular(MacroAssembler* masm,
|
708 |
Handle<JSObject> object, |
709 |
Register receiver, |
710 |
Register scratch1, |
711 |
Register scratch2, |
712 |
Register scratch3, |
713 |
Handle<Name> name, |
714 |
Handle<JSObject> interceptor_holder, |
715 |
Label* miss_label) { |
716 |
Register holder = |
717 |
stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, |
718 |
scratch1, scratch2, scratch3, |
719 |
name, miss_label); |
720 |
|
721 |
FrameScope scope(masm, StackFrame::INTERNAL); |
722 |
// Save the name_ register across the call.
|
723 |
__ push(name_); |
724 |
|
725 |
PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder); |
726 |
|
727 |
__ CallExternalReference( |
728 |
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), |
729 |
masm->isolate()), |
730 |
StubCache::kInterceptorArgsLength); |
731 |
|
732 |
// Restore the name_ register.
|
733 |
__ pop(name_); |
734 |
|
735 |
// Leave the internal frame.
|
736 |
} |
737 |
|
738 |
void LoadWithInterceptor(MacroAssembler* masm,
|
739 |
Register receiver, |
740 |
Register holder, |
741 |
Handle<JSObject> holder_obj, |
742 |
Label* interceptor_succeeded) { |
743 |
{ |
744 |
FrameScope scope(masm, StackFrame::INTERNAL); |
745 |
__ push(holder); // Save the holder.
|
746 |
__ push(name_); // Save the name.
|
747 |
|
748 |
CompileCallLoadPropertyWithInterceptor(masm, |
749 |
receiver, |
750 |
holder, |
751 |
name_, |
752 |
holder_obj); |
753 |
|
754 |
__ pop(name_); // Restore the name.
|
755 |
__ pop(receiver); // Restore the holder.
|
756 |
// Leave the internal frame.
|
757 |
} |
758 |
|
759 |
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); |
760 |
__ j(not_equal, interceptor_succeeded); |
761 |
} |
762 |
|
763 |
StubCompiler* stub_compiler_; |
764 |
const ParameterCount& arguments_;
|
765 |
Register name_; |
766 |
Code::ExtraICState extra_ic_state_; |
767 |
}; |
768 |
|
769 |
|
770 |
void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
|
771 |
Label* label, |
772 |
Handle<Name> name) { |
773 |
if (!label->is_unused()) {
|
774 |
__ bind(label); |
775 |
__ Move(this->name(), name);
|
776 |
} |
777 |
} |
778 |
|
779 |
|
780 |
// Generate code to check that a global property cell is empty. Create
|
781 |
// the property cell at compilation time if no cell exists for the
|
782 |
// property.
|
783 |
static void GenerateCheckPropertyCell(MacroAssembler* masm, |
784 |
Handle<GlobalObject> global, |
785 |
Handle<Name> name, |
786 |
Register scratch, |
787 |
Label* miss) { |
788 |
Handle<PropertyCell> cell = |
789 |
GlobalObject::EnsurePropertyCell(global, name); |
790 |
ASSERT(cell->value()->IsTheHole()); |
791 |
__ Move(scratch, cell); |
792 |
__ Cmp(FieldOperand(scratch, Cell::kValueOffset), |
793 |
masm->isolate()->factory()->the_hole_value()); |
794 |
__ j(not_equal, miss); |
795 |
} |
796 |
|
797 |
|
798 |
void StoreStubCompiler::GenerateNegativeHolderLookup(
|
799 |
MacroAssembler* masm, |
800 |
Handle<JSObject> holder, |
801 |
Register holder_reg, |
802 |
Handle<Name> name, |
803 |
Label* miss) { |
804 |
if (holder->IsJSGlobalObject()) {
|
805 |
GenerateCheckPropertyCell( |
806 |
masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss); |
807 |
} else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { |
808 |
GenerateDictionaryNegativeLookup( |
809 |
masm, miss, holder_reg, name, scratch1(), scratch2()); |
810 |
} |
811 |
} |
812 |
|
813 |
|
814 |
// Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
|
815 |
// store is successful.
|
816 |
void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
817 |
Handle<JSObject> object, |
818 |
LookupResult* lookup, |
819 |
Handle<Map> transition, |
820 |
Handle<Name> name, |
821 |
Register receiver_reg, |
822 |
Register storage_reg, |
823 |
Register value_reg, |
824 |
Register scratch1, |
825 |
Register scratch2, |
826 |
Register unused, |
827 |
Label* miss_label, |
828 |
Label* slow) { |
829 |
int descriptor = transition->LastAdded();
|
830 |
DescriptorArray* descriptors = transition->instance_descriptors(); |
831 |
PropertyDetails details = descriptors->GetDetails(descriptor); |
832 |
Representation representation = details.representation(); |
833 |
ASSERT(!representation.IsNone()); |
834 |
|
835 |
if (details.type() == CONSTANT) {
|
836 |
Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); |
837 |
__ Cmp(value_reg, constant); |
838 |
__ j(not_equal, miss_label); |
839 |
} else if (FLAG_track_fields && representation.IsSmi()) { |
840 |
__ JumpIfNotSmi(value_reg, miss_label); |
841 |
} else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
842 |
__ JumpIfSmi(value_reg, miss_label); |
843 |
} else if (FLAG_track_double_fields && representation.IsDouble()) { |
844 |
Label do_store, heap_number; |
845 |
__ AllocateHeapNumber(storage_reg, scratch1, slow); |
846 |
|
847 |
__ JumpIfNotSmi(value_reg, &heap_number); |
848 |
__ SmiToInteger32(scratch1, value_reg); |
849 |
__ Cvtlsi2sd(xmm0, scratch1); |
850 |
__ jmp(&do_store); |
851 |
|
852 |
__ bind(&heap_number); |
853 |
__ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
854 |
miss_label, DONT_DO_SMI_CHECK); |
855 |
__ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
856 |
|
857 |
__ bind(&do_store); |
858 |
__ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); |
859 |
} |
860 |
|
861 |
// Stub never generated for non-global objects that require access
|
862 |
// checks.
|
863 |
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
864 |
|
865 |
// Perform map transition for the receiver if necessary.
|
866 |
if (details.type() == FIELD &&
|
867 |
object->map()->unused_property_fields() == 0) {
|
868 |
// The properties must be extended before we can store the value.
|
869 |
// We jump to a runtime call that extends the properties array.
|
870 |
__ PopReturnAddressTo(scratch1); |
871 |
__ push(receiver_reg); |
872 |
__ Push(transition); |
873 |
__ push(value_reg); |
874 |
__ PushReturnAddressFrom(scratch1); |
875 |
__ TailCallExternalReference( |
876 |
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
877 |
masm->isolate()), |
878 |
3,
|
879 |
1);
|
880 |
return;
|
881 |
} |
882 |
|
883 |
// Update the map of the object.
|
884 |
__ Move(scratch1, transition); |
885 |
__ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); |
886 |
|
887 |
// Update the write barrier for the map field.
|
888 |
__ RecordWriteField(receiver_reg, |
889 |
HeapObject::kMapOffset, |
890 |
scratch1, |
891 |
scratch2, |
892 |
kDontSaveFPRegs, |
893 |
OMIT_REMEMBERED_SET, |
894 |
OMIT_SMI_CHECK); |
895 |
|
896 |
if (details.type() == CONSTANT) {
|
897 |
ASSERT(value_reg.is(rax)); |
898 |
__ ret(0);
|
899 |
return;
|
900 |
} |
901 |
|
902 |
int index = transition->instance_descriptors()->GetFieldIndex(
|
903 |
transition->LastAdded()); |
904 |
|
905 |
// Adjust for the number of properties stored in the object. Even in the
|
906 |
// face of a transition we can use the old map here because the size of the
|
907 |
// object and the number of in-object properties is not going to change.
|
908 |
index -= object->map()->inobject_properties(); |
909 |
|
910 |
// TODO(verwaest): Share this code as a code stub.
|
911 |
SmiCheck smi_check = representation.IsTagged() |
912 |
? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
913 |
if (index < 0) { |
914 |
// Set the property straight into the object.
|
915 |
int offset = object->map()->instance_size() + (index * kPointerSize);
|
916 |
if (FLAG_track_double_fields && representation.IsDouble()) {
|
917 |
__ movq(FieldOperand(receiver_reg, offset), storage_reg); |
918 |
} else {
|
919 |
__ movq(FieldOperand(receiver_reg, offset), value_reg); |
920 |
} |
921 |
|
922 |
if (!FLAG_track_fields || !representation.IsSmi()) {
|
923 |
// Update the write barrier for the array address.
|
924 |
if (!FLAG_track_double_fields || !representation.IsDouble()) {
|
925 |
__ movq(storage_reg, value_reg); |
926 |
} |
927 |
__ RecordWriteField( |
928 |
receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs, |
929 |
EMIT_REMEMBERED_SET, smi_check); |
930 |
} |
931 |
} else {
|
932 |
// Write to the properties array.
|
933 |
int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
934 |
// Get the properties array (optimistically).
|
935 |
__ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
936 |
if (FLAG_track_double_fields && representation.IsDouble()) {
|
937 |
__ movq(FieldOperand(scratch1, offset), storage_reg); |
938 |
} else {
|
939 |
__ movq(FieldOperand(scratch1, offset), value_reg); |
940 |
} |
941 |
|
942 |
if (!FLAG_track_fields || !representation.IsSmi()) {
|
943 |
// Update the write barrier for the array address.
|
944 |
if (!FLAG_track_double_fields || !representation.IsDouble()) {
|
945 |
__ movq(storage_reg, value_reg); |
946 |
} |
947 |
__ RecordWriteField( |
948 |
scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs, |
949 |
EMIT_REMEMBERED_SET, smi_check); |
950 |
} |
951 |
} |
952 |
|
953 |
// Return the value (register rax).
|
954 |
ASSERT(value_reg.is(rax)); |
955 |
__ ret(0);
|
956 |
} |
957 |
|
958 |
|
959 |
// Both name_reg and receiver_reg are preserved on jumps to miss_label,
|
960 |
// but may be destroyed if store is successful.
|
961 |
void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
|
962 |
Handle<JSObject> object, |
963 |
LookupResult* lookup, |
964 |
Register receiver_reg, |
965 |
Register name_reg, |
966 |
Register value_reg, |
967 |
Register scratch1, |
968 |
Register scratch2, |
969 |
Label* miss_label) { |
970 |
// Stub never generated for non-global objects that require access
|
971 |
// checks.
|
972 |
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
973 |
|
974 |
int index = lookup->GetFieldIndex().field_index();
|
975 |
|
976 |
// Adjust for the number of properties stored in the object. Even in the
|
977 |
// face of a transition we can use the old map here because the size of the
|
978 |
// object and the number of in-object properties is not going to change.
|
979 |
index -= object->map()->inobject_properties(); |
980 |
|
981 |
Representation representation = lookup->representation(); |
982 |
ASSERT(!representation.IsNone()); |
983 |
if (FLAG_track_fields && representation.IsSmi()) {
|
984 |
__ JumpIfNotSmi(value_reg, miss_label); |
985 |
} else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
986 |
__ JumpIfSmi(value_reg, miss_label); |
987 |
} else if (FLAG_track_double_fields && representation.IsDouble()) { |
988 |
// Load the double storage.
|
989 |
if (index < 0) { |
990 |
int offset = object->map()->instance_size() + (index * kPointerSize);
|
991 |
__ movq(scratch1, FieldOperand(receiver_reg, offset)); |
992 |
} else {
|
993 |
__ movq(scratch1, |
994 |
FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
995 |
int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
996 |
__ movq(scratch1, FieldOperand(scratch1, offset)); |
997 |
} |
998 |
|
999 |
// Store the value into the storage.
|
1000 |
Label do_store, heap_number; |
1001 |
__ JumpIfNotSmi(value_reg, &heap_number); |
1002 |
__ SmiToInteger32(scratch2, value_reg); |
1003 |
__ Cvtlsi2sd(xmm0, scratch2); |
1004 |
__ jmp(&do_store); |
1005 |
|
1006 |
__ bind(&heap_number); |
1007 |
__ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
1008 |
miss_label, DONT_DO_SMI_CHECK); |
1009 |
__ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
1010 |
__ bind(&do_store); |
1011 |
__ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); |
1012 |
// Return the value (register rax).
|
1013 |
ASSERT(value_reg.is(rax)); |
1014 |
__ ret(0);
|
1015 |
return;
|
1016 |
} |
1017 |
|
1018 |
// TODO(verwaest): Share this code as a code stub.
|
1019 |
SmiCheck smi_check = representation.IsTagged() |
1020 |
? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
1021 |
if (index < 0) { |
1022 |
// Set the property straight into the object.
|
1023 |
int offset = object->map()->instance_size() + (index * kPointerSize);
|
1024 |
__ movq(FieldOperand(receiver_reg, offset), value_reg); |
1025 |
|
1026 |
if (!FLAG_track_fields || !representation.IsSmi()) {
|
1027 |
// Update the write barrier for the array address.
|
1028 |
// Pass the value being stored in the now unused name_reg.
|
1029 |
__ movq(name_reg, value_reg); |
1030 |
__ RecordWriteField( |
1031 |
receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs, |
1032 |
EMIT_REMEMBERED_SET, smi_check); |
1033 |
} |
1034 |
} else {
|
1035 |
// Write to the properties array.
|
1036 |
int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
1037 |
// Get the properties array (optimistically).
|
1038 |
__ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
1039 |
__ movq(FieldOperand(scratch1, offset), value_reg); |
1040 |
|
1041 |
if (!FLAG_track_fields || !representation.IsSmi()) {
|
1042 |
// Update the write barrier for the array address.
|
1043 |
// Pass the value being stored in the now unused name_reg.
|
1044 |
__ movq(name_reg, value_reg); |
1045 |
__ RecordWriteField( |
1046 |
scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs, |
1047 |
EMIT_REMEMBERED_SET, smi_check); |
1048 |
} |
1049 |
} |
1050 |
|
1051 |
// Return the value (register rax).
|
1052 |
ASSERT(value_reg.is(rax)); |
1053 |
__ ret(0);
|
1054 |
} |
1055 |
|
1056 |
|
1057 |
// Calls GenerateCheckPropertyCell for each global object in the prototype chain
|
1058 |
// from object to (but not including) holder.
|
1059 |
static void GenerateCheckPropertyCells(MacroAssembler* masm, |
1060 |
Handle<JSObject> object, |
1061 |
Handle<JSObject> holder, |
1062 |
Handle<Name> name, |
1063 |
Register scratch, |
1064 |
Label* miss) { |
1065 |
Handle<JSObject> current = object; |
1066 |
while (!current.is_identical_to(holder)) {
|
1067 |
if (current->IsGlobalObject()) {
|
1068 |
GenerateCheckPropertyCell(masm, |
1069 |
Handle<GlobalObject>::cast(current), |
1070 |
name, |
1071 |
scratch, |
1072 |
miss); |
1073 |
} |
1074 |
current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); |
1075 |
} |
1076 |
} |
1077 |
|
1078 |
|
1079 |
void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
|
1080 |
__ jmp(code, RelocInfo::CODE_TARGET); |
1081 |
} |
1082 |
|
1083 |
|
1084 |
#undef __
|
1085 |
#define __ ACCESS_MASM((masm()))
|
1086 |
|
1087 |
|
1088 |
Register StubCompiler::CheckPrototypes(Handle<JSObject> object, |
1089 |
Register object_reg, |
1090 |
Handle<JSObject> holder, |
1091 |
Register holder_reg, |
1092 |
Register scratch1, |
1093 |
Register scratch2, |
1094 |
Handle<Name> name, |
1095 |
int save_at_depth,
|
1096 |
Label* miss, |
1097 |
PrototypeCheckType check) { |
1098 |
// Make sure that the type feedback oracle harvests the receiver map.
|
1099 |
// TODO(svenpanne) Remove this hack when all ICs are reworked.
|
1100 |
__ Move(scratch1, Handle<Map>(object->map())); |
1101 |
|
1102 |
Handle<JSObject> first = object; |
1103 |
// Make sure there's no overlap between holder and object registers.
|
1104 |
ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); |
1105 |
ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) |
1106 |
&& !scratch2.is(scratch1)); |
1107 |
|
1108 |
// Keep track of the current object in register reg. On the first
|
1109 |
// iteration, reg is an alias for object_reg, on later iterations,
|
1110 |
// it is an alias for holder_reg.
|
1111 |
Register reg = object_reg; |
1112 |
int depth = 0; |
1113 |
|
1114 |
StackArgumentsAccessor args(rsp, kFastApiCallArguments, |
1115 |
ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1116 |
const int kHolderIndex = kFastApiCallArguments - 1 - |
1117 |
FunctionCallbackArguments::kHolderIndex; |
1118 |
|
1119 |
if (save_at_depth == depth) {
|
1120 |
__ movq(args.GetArgumentOperand(kHolderIndex), object_reg); |
1121 |
} |
1122 |
|
1123 |
// Check the maps in the prototype chain.
|
1124 |
// Traverse the prototype chain from the object and do map checks.
|
1125 |
Handle<JSObject> current = object; |
1126 |
while (!current.is_identical_to(holder)) {
|
1127 |
++depth; |
1128 |
|
1129 |
// Only global objects and objects that do not require access
|
1130 |
// checks are allowed in stubs.
|
1131 |
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); |
1132 |
|
1133 |
Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); |
1134 |
if (!current->HasFastProperties() &&
|
1135 |
!current->IsJSGlobalObject() && |
1136 |
!current->IsJSGlobalProxy()) { |
1137 |
if (!name->IsUniqueName()) {
|
1138 |
ASSERT(name->IsString()); |
1139 |
name = factory()->InternalizeString(Handle<String>::cast(name)); |
1140 |
} |
1141 |
ASSERT(current->property_dictionary()->FindEntry(*name) == |
1142 |
NameDictionary::kNotFound); |
1143 |
|
1144 |
GenerateDictionaryNegativeLookup(masm(), miss, reg, name, |
1145 |
scratch1, scratch2); |
1146 |
|
1147 |
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); |
1148 |
reg = holder_reg; // From now on the object will be in holder_reg.
|
1149 |
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); |
1150 |
} else {
|
1151 |
bool in_new_space = heap()->InNewSpace(*prototype);
|
1152 |
Handle<Map> current_map(current->map()); |
1153 |
if (in_new_space) {
|
1154 |
// Save the map in scratch1 for later.
|
1155 |
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); |
1156 |
} |
1157 |
if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
|
1158 |
__ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); |
1159 |
} |
1160 |
|
1161 |
// Check access rights to the global object. This has to happen after
|
1162 |
// the map check so that we know that the object is actually a global
|
1163 |
// object.
|
1164 |
if (current->IsJSGlobalProxy()) {
|
1165 |
__ CheckAccessGlobalProxy(reg, scratch2, miss); |
1166 |
} |
1167 |
reg = holder_reg; // From now on the object will be in holder_reg.
|
1168 |
|
1169 |
if (in_new_space) {
|
1170 |
// The prototype is in new space; we cannot store a reference to it
|
1171 |
// in the code. Load it from the map.
|
1172 |
__ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); |
1173 |
} else {
|
1174 |
// The prototype is in old space; load it directly.
|
1175 |
__ Move(reg, prototype); |
1176 |
} |
1177 |
} |
1178 |
|
1179 |
if (save_at_depth == depth) {
|
1180 |
__ movq(args.GetArgumentOperand(kHolderIndex), reg); |
1181 |
} |
1182 |
|
1183 |
// Go to the next object in the prototype chain.
|
1184 |
current = prototype; |
1185 |
} |
1186 |
ASSERT(current.is_identical_to(holder)); |
1187 |
|
1188 |
// Log the check depth.
|
1189 |
LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); |
1190 |
|
1191 |
if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
|
1192 |
// Check the holder map.
|
1193 |
__ CheckMap(reg, Handle<Map>(holder->map()), miss, DONT_DO_SMI_CHECK); |
1194 |
} |
1195 |
|
1196 |
// Perform security check for access to the global object.
|
1197 |
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); |
1198 |
if (current->IsJSGlobalProxy()) {
|
1199 |
__ CheckAccessGlobalProxy(reg, scratch1, miss); |
1200 |
} |
1201 |
|
1202 |
// If we've skipped any global objects, it's not enough to verify that
|
1203 |
// their maps haven't changed. We also need to check that the property
|
1204 |
// cell for the property is still empty.
|
1205 |
GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); |
1206 |
|
1207 |
// Return the register containing the holder.
|
1208 |
return reg;
|
1209 |
} |
1210 |
|
1211 |
|
1212 |
void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
|
1213 |
Label* success, |
1214 |
Label* miss) { |
1215 |
if (!miss->is_unused()) {
|
1216 |
__ jmp(success); |
1217 |
__ bind(miss); |
1218 |
TailCallBuiltin(masm(), MissBuiltin(kind())); |
1219 |
} |
1220 |
} |
1221 |
|
1222 |
|
1223 |
void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
|
1224 |
Label* success, |
1225 |
Label* miss) { |
1226 |
if (!miss->is_unused()) {
|
1227 |
__ jmp(success); |
1228 |
GenerateRestoreName(masm(), miss, name); |
1229 |
TailCallBuiltin(masm(), MissBuiltin(kind())); |
1230 |
} |
1231 |
} |
1232 |
|
1233 |
|
1234 |
Register LoadStubCompiler::CallbackHandlerFrontend( |
1235 |
Handle<JSObject> object, |
1236 |
Register object_reg, |
1237 |
Handle<JSObject> holder, |
1238 |
Handle<Name> name, |
1239 |
Label* success, |
1240 |
Handle<Object> callback) { |
1241 |
Label miss; |
1242 |
|
1243 |
Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss); |
1244 |
|
1245 |
if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
|
1246 |
ASSERT(!reg.is(scratch2())); |
1247 |
ASSERT(!reg.is(scratch3())); |
1248 |
ASSERT(!reg.is(scratch4())); |
1249 |
|
1250 |
// Load the properties dictionary.
|
1251 |
Register dictionary = scratch4(); |
1252 |
__ movq(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset)); |
1253 |
|
1254 |
// Probe the dictionary.
|
1255 |
Label probe_done; |
1256 |
NameDictionaryLookupStub::GeneratePositiveLookup(masm(), |
1257 |
&miss, |
1258 |
&probe_done, |
1259 |
dictionary, |
1260 |
this->name(),
|
1261 |
scratch2(), |
1262 |
scratch3()); |
1263 |
__ bind(&probe_done); |
1264 |
|
1265 |
// If probing finds an entry in the dictionary, scratch3 contains the
|
1266 |
// index into the dictionary. Check that the value is the callback.
|
1267 |
Register index = scratch3(); |
1268 |
const int kElementsStartOffset = |
1269 |
NameDictionary::kHeaderSize + |
1270 |
NameDictionary::kElementsStartIndex * kPointerSize; |
1271 |
const int kValueOffset = kElementsStartOffset + kPointerSize; |
1272 |
__ movq(scratch2(), |
1273 |
Operand(dictionary, index, times_pointer_size, |
1274 |
kValueOffset - kHeapObjectTag)); |
1275 |
__ movq(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT); |
1276 |
__ cmpq(scratch2(), scratch3()); |
1277 |
__ j(not_equal, &miss); |
1278 |
} |
1279 |
|
1280 |
HandlerFrontendFooter(name, success, &miss); |
1281 |
return reg;
|
1282 |
} |
1283 |
|
1284 |
|
1285 |
void LoadStubCompiler::NonexistentHandlerFrontend(
|
1286 |
Handle<JSObject> object, |
1287 |
Handle<JSObject> last, |
1288 |
Handle<Name> name, |
1289 |
Label* success, |
1290 |
Handle<GlobalObject> global) { |
1291 |
Label miss; |
1292 |
|
1293 |
HandlerFrontendHeader(object, receiver(), last, name, &miss); |
1294 |
|
1295 |
// If the last object in the prototype chain is a global object,
|
1296 |
// check that the global property cell is empty.
|
1297 |
if (!global.is_null()) {
|
1298 |
GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); |
1299 |
} |
1300 |
|
1301 |
HandlerFrontendFooter(name, success, &miss); |
1302 |
} |
1303 |
|
1304 |
|
1305 |
void LoadStubCompiler::GenerateLoadField(Register reg,
|
1306 |
Handle<JSObject> holder, |
1307 |
PropertyIndex field, |
1308 |
Representation representation) { |
1309 |
if (!reg.is(receiver())) __ movq(receiver(), reg);
|
1310 |
if (kind() == Code::LOAD_IC) {
|
1311 |
LoadFieldStub stub(field.is_inobject(holder), |
1312 |
field.translate(holder), |
1313 |
representation); |
1314 |
GenerateTailCall(masm(), stub.GetCode(isolate())); |
1315 |
} else {
|
1316 |
KeyedLoadFieldStub stub(field.is_inobject(holder), |
1317 |
field.translate(holder), |
1318 |
representation); |
1319 |
GenerateTailCall(masm(), stub.GetCode(isolate())); |
1320 |
} |
1321 |
} |
1322 |
|
1323 |
|
1324 |
void LoadStubCompiler::GenerateLoadCallback(
|
1325 |
const CallOptimization& call_optimization) {
|
1326 |
GenerateFastApiCall( |
1327 |
masm(), call_optimization, receiver(), scratch3(), 0, NULL); |
1328 |
} |
1329 |
|
1330 |
|
1331 |
void LoadStubCompiler::GenerateLoadCallback(
|
1332 |
Register reg, |
1333 |
Handle<ExecutableAccessorInfo> callback) { |
1334 |
// Insert additional parameters into the stack frame above return address.
|
1335 |
ASSERT(!scratch4().is(reg)); |
1336 |
__ PopReturnAddressTo(scratch4()); |
1337 |
|
1338 |
STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
|
1339 |
STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
|
1340 |
STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
|
1341 |
STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
|
1342 |
STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
|
1343 |
STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
|
1344 |
STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
|
1345 |
__ push(receiver()); // receiver
|
1346 |
if (heap()->InNewSpace(callback->data())) {
|
1347 |
ASSERT(!scratch2().is(reg)); |
1348 |
__ Move(scratch2(), callback); |
1349 |
__ push(FieldOperand(scratch2(), |
1350 |
ExecutableAccessorInfo::kDataOffset)); // data
|
1351 |
} else {
|
1352 |
__ Push(Handle<Object>(callback->data(), isolate())); |
1353 |
} |
1354 |
ASSERT(!kScratchRegister.is(reg)); |
1355 |
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); |
1356 |
__ push(kScratchRegister); // return value
|
1357 |
__ push(kScratchRegister); // return value default
|
1358 |
__ PushAddress(ExternalReference::isolate_address(isolate())); |
1359 |
__ push(reg); // holder
|
1360 |
__ push(name()); // name
|
1361 |
// Save a pointer to where we pushed the arguments pointer. This will be
|
1362 |
// passed as the const PropertyAccessorInfo& to the C++ callback.
|
1363 |
|
1364 |
Address getter_address = v8::ToCData<Address>(callback->getter()); |
1365 |
|
1366 |
#if defined(__MINGW64__) || defined(_WIN64)
|
1367 |
Register getter_arg = r8; |
1368 |
Register accessor_info_arg = rdx; |
1369 |
Register name_arg = rcx; |
1370 |
#else
|
1371 |
Register getter_arg = rdx; |
1372 |
Register accessor_info_arg = rsi; |
1373 |
Register name_arg = rdi; |
1374 |
#endif
|
1375 |
|
1376 |
ASSERT(!name_arg.is(scratch4())); |
1377 |
__ movq(name_arg, rsp); |
1378 |
__ PushReturnAddressFrom(scratch4()); |
1379 |
|
1380 |
// v8::Arguments::values_ and handler for name.
|
1381 |
const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; |
1382 |
|
1383 |
// Allocate v8::AccessorInfo in non-GCed stack space.
|
1384 |
const int kArgStackSpace = 1; |
1385 |
|
1386 |
__ PrepareCallApiFunction(kArgStackSpace); |
1387 |
__ lea(rax, Operand(name_arg, 1 * kPointerSize));
|
1388 |
|
1389 |
// v8::PropertyAccessorInfo::args_.
|
1390 |
__ movq(StackSpaceOperand(0), rax);
|
1391 |
|
1392 |
// The context register (rsi) has been saved in PrepareCallApiFunction and
|
1393 |
// could be used to pass arguments.
|
1394 |
__ lea(accessor_info_arg, StackSpaceOperand(0));
|
1395 |
|
1396 |
Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); |
1397 |
|
1398 |
// The name handler is counted as an argument.
|
1399 |
StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength); |
1400 |
Operand return_value_operand = args.GetArgumentOperand( |
1401 |
PropertyCallbackArguments::kArgsLength - 1 -
|
1402 |
PropertyCallbackArguments::kReturnValueOffset); |
1403 |
__ CallApiFunctionAndReturn(getter_address, |
1404 |
thunk_address, |
1405 |
getter_arg, |
1406 |
kStackSpace, |
1407 |
return_value_operand, |
1408 |
NULL);
|
1409 |
} |
1410 |
|
1411 |
|
1412 |
void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
|
1413 |
// Return the constant value.
|
1414 |
__ Move(rax, value); |
1415 |
__ ret(0);
|
1416 |
} |
1417 |
|
1418 |
|
1419 |
void LoadStubCompiler::GenerateLoadInterceptor(
|
1420 |
Register holder_reg, |
1421 |
Handle<JSObject> object, |
1422 |
Handle<JSObject> interceptor_holder, |
1423 |
LookupResult* lookup, |
1424 |
Handle<Name> name) { |
1425 |
ASSERT(interceptor_holder->HasNamedInterceptor()); |
1426 |
ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); |
1427 |
|
1428 |
// So far the most popular follow ups for interceptor loads are FIELD
|
1429 |
// and CALLBACKS, so inline only them, other cases may be added
|
1430 |
// later.
|
1431 |
bool compile_followup_inline = false; |
1432 |
if (lookup->IsFound() && lookup->IsCacheable()) {
|
1433 |
if (lookup->IsField()) {
|
1434 |
compile_followup_inline = true;
|
1435 |
} else if (lookup->type() == CALLBACKS && |
1436 |
lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { |
1437 |
ExecutableAccessorInfo* callback = |
1438 |
ExecutableAccessorInfo::cast(lookup->GetCallbackObject()); |
1439 |
compile_followup_inline = callback->getter() != NULL &&
|
1440 |
callback->IsCompatibleReceiver(*object); |
1441 |
} |
1442 |
} |
1443 |
|
1444 |
if (compile_followup_inline) {
|
1445 |
// Compile the interceptor call, followed by inline code to load the
|
1446 |
// property from further up the prototype chain if the call fails.
|
1447 |
// Check that the maps haven't changed.
|
1448 |
ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); |
1449 |
|
1450 |
// Preserve the receiver register explicitly whenever it is different from
|
1451 |
// the holder and it is needed should the interceptor return without any
|
1452 |
// result. The CALLBACKS case needs the receiver to be passed into C++ code,
|
1453 |
// the FIELD case might cause a miss during the prototype check.
|
1454 |
bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
|
1455 |
bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
|
1456 |
(lookup->type() == CALLBACKS || must_perfrom_prototype_check); |
1457 |
|
1458 |
// Save necessary data before invoking an interceptor.
|
1459 |
// Requires a frame to make GC aware of pushed pointers.
|
1460 |
{ |
1461 |
FrameScope frame_scope(masm(), StackFrame::INTERNAL); |
1462 |
|
1463 |
if (must_preserve_receiver_reg) {
|
1464 |
__ push(receiver()); |
1465 |
} |
1466 |
__ push(holder_reg); |
1467 |
__ push(this->name());
|
1468 |
|
1469 |
// Invoke an interceptor. Note: map checks from receiver to
|
1470 |
// interceptor's holder has been compiled before (see a caller
|
1471 |
// of this method.)
|
1472 |
CompileCallLoadPropertyWithInterceptor(masm(), |
1473 |
receiver(), |
1474 |
holder_reg, |
1475 |
this->name(),
|
1476 |
interceptor_holder); |
1477 |
|
1478 |
// Check if interceptor provided a value for property. If it's
|
1479 |
// the case, return immediately.
|
1480 |
Label interceptor_failed; |
1481 |
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); |
1482 |
__ j(equal, &interceptor_failed); |
1483 |
frame_scope.GenerateLeaveFrame(); |
1484 |
__ ret(0);
|
1485 |
|
1486 |
__ bind(&interceptor_failed); |
1487 |
__ pop(this->name());
|
1488 |
__ pop(holder_reg); |
1489 |
if (must_preserve_receiver_reg) {
|
1490 |
__ pop(receiver()); |
1491 |
} |
1492 |
|
1493 |
// Leave the internal frame.
|
1494 |
} |
1495 |
|
1496 |
GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup); |
1497 |
} else { // !compile_followup_inline |
1498 |
// Call the runtime system to load the interceptor.
|
1499 |
// Check that the maps haven't changed.
|
1500 |
__ PopReturnAddressTo(scratch2()); |
1501 |
PushInterceptorArguments(masm(), receiver(), holder_reg, |
1502 |
this->name(), interceptor_holder);
|
1503 |
__ PushReturnAddressFrom(scratch2()); |
1504 |
|
1505 |
ExternalReference ref = ExternalReference( |
1506 |
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); |
1507 |
__ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
|
1508 |
} |
1509 |
} |
1510 |
|
1511 |
|
1512 |
void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
|
1513 |
if (kind_ == Code::KEYED_CALL_IC) {
|
1514 |
__ Cmp(rcx, name); |
1515 |
__ j(not_equal, miss); |
1516 |
} |
1517 |
} |
1518 |
|
1519 |
|
1520 |
void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
|
1521 |
Handle<JSObject> holder, |
1522 |
Handle<Name> name, |
1523 |
Label* miss) { |
1524 |
ASSERT(holder->IsGlobalObject()); |
1525 |
|
1526 |
StackArgumentsAccessor args(rsp, arguments()); |
1527 |
__ movq(rdx, args.GetReceiverOperand()); |
1528 |
|
1529 |
|
1530 |
// Check that the maps haven't changed.
|
1531 |
__ JumpIfSmi(rdx, miss); |
1532 |
CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss); |
1533 |
} |
1534 |
|
1535 |
|
1536 |
void CallStubCompiler::GenerateLoadFunctionFromCell(
|
1537 |
Handle<Cell> cell, |
1538 |
Handle<JSFunction> function, |
1539 |
Label* miss) { |
1540 |
// Get the value from the cell.
|
1541 |
__ Move(rdi, cell); |
1542 |
__ movq(rdi, FieldOperand(rdi, Cell::kValueOffset)); |
1543 |
|
1544 |
// Check that the cell contains the same function.
|
1545 |
if (heap()->InNewSpace(*function)) {
|
1546 |
// We can't embed a pointer to a function in new space so we have
|
1547 |
// to verify that the shared function info is unchanged. This has
|
1548 |
// the nice side effect that multiple closures based on the same
|
1549 |
// function can all use this call IC. Before we load through the
|
1550 |
// function, we have to verify that it still is a function.
|
1551 |
__ JumpIfSmi(rdi, miss); |
1552 |
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax); |
1553 |
__ j(not_equal, miss); |
1554 |
|
1555 |
// Check the shared function info. Make sure it hasn't changed.
|
1556 |
__ Move(rax, Handle<SharedFunctionInfo>(function->shared())); |
1557 |
__ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax); |
1558 |
} else {
|
1559 |
__ Cmp(rdi, function); |
1560 |
} |
1561 |
__ j(not_equal, miss); |
1562 |
} |
1563 |
|
1564 |
|
1565 |
void CallStubCompiler::GenerateMissBranch() {
|
1566 |
Handle<Code> code = |
1567 |
isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), |
1568 |
kind_, |
1569 |
extra_state_); |
1570 |
__ Jump(code, RelocInfo::CODE_TARGET); |
1571 |
} |
1572 |
|
1573 |
|
1574 |
Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, |
1575 |
Handle<JSObject> holder, |
1576 |
PropertyIndex index, |
1577 |
Handle<Name> name) { |
1578 |
// ----------- S t a t e -------------
|
1579 |
// rcx : function name
|
1580 |
// rsp[0] : return address
|
1581 |
// rsp[8] : argument argc
|
1582 |
// rsp[16] : argument argc - 1
|
1583 |
// ...
|
1584 |
// rsp[argc * 8] : argument 1
|
1585 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
1586 |
// -----------------------------------
|
1587 |
Label miss; |
1588 |
|
1589 |
GenerateNameCheck(name, &miss); |
1590 |
|
1591 |
StackArgumentsAccessor args(rsp, arguments()); |
1592 |
__ movq(rdx, args.GetReceiverOperand()); |
1593 |
|
1594 |
// Check that the receiver isn't a smi.
|
1595 |
__ JumpIfSmi(rdx, &miss); |
1596 |
|
1597 |
// Do the right check and compute the holder register.
|
1598 |
Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, |
1599 |
name, &miss); |
1600 |
|
1601 |
GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder), |
1602 |
index.translate(holder), Representation::Tagged()); |
1603 |
|
1604 |
// Check that the function really is a function.
|
1605 |
__ JumpIfSmi(rdi, &miss); |
1606 |
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx); |
1607 |
__ j(not_equal, &miss); |
1608 |
|
1609 |
// Patch the receiver on the stack with the global proxy if
|
1610 |
// necessary.
|
1611 |
if (object->IsGlobalObject()) {
|
1612 |
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
1613 |
__ movq(args.GetReceiverOperand(), rdx); |
1614 |
} |
1615 |
|
1616 |
// Invoke the function.
|
1617 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
1618 |
? CALL_AS_FUNCTION |
1619 |
: CALL_AS_METHOD; |
1620 |
__ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, |
1621 |
NullCallWrapper(), call_kind); |
1622 |
|
1623 |
// Handle call cache miss.
|
1624 |
__ bind(&miss); |
1625 |
GenerateMissBranch(); |
1626 |
|
1627 |
// Return the generated code.
|
1628 |
return GetCode(Code::FIELD, name);
|
1629 |
} |
1630 |
|
1631 |
|
1632 |
Handle<Code> CallStubCompiler::CompileArrayCodeCall( |
1633 |
Handle<Object> object, |
1634 |
Handle<JSObject> holder, |
1635 |
Handle<Cell> cell, |
1636 |
Handle<JSFunction> function, |
1637 |
Handle<String> name, |
1638 |
Code::StubType type) { |
1639 |
Label miss; |
1640 |
|
1641 |
// Check that function is still array
|
1642 |
const int argc = arguments().immediate(); |
1643 |
StackArgumentsAccessor args(rsp, argc); |
1644 |
GenerateNameCheck(name, &miss); |
1645 |
|
1646 |
if (cell.is_null()) {
|
1647 |
__ movq(rdx, args.GetReceiverOperand()); |
1648 |
|
1649 |
// Check that the receiver isn't a smi.
|
1650 |
__ JumpIfSmi(rdx, &miss); |
1651 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
1652 |
name, &miss); |
1653 |
} else {
|
1654 |
ASSERT(cell->value() == *function); |
1655 |
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, |
1656 |
&miss); |
1657 |
GenerateLoadFunctionFromCell(cell, function, &miss); |
1658 |
} |
1659 |
|
1660 |
Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite(); |
1661 |
site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind())); |
1662 |
Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site); |
1663 |
__ movq(rax, Immediate(argc)); |
1664 |
__ Move(rbx, site_feedback_cell); |
1665 |
__ Move(rdi, function); |
1666 |
|
1667 |
ArrayConstructorStub stub(isolate()); |
1668 |
__ TailCallStub(&stub); |
1669 |
|
1670 |
__ bind(&miss); |
1671 |
GenerateMissBranch(); |
1672 |
|
1673 |
// Return the generated code.
|
1674 |
return GetCode(type, name);
|
1675 |
} |
1676 |
|
1677 |
|
1678 |
Handle<Code> CallStubCompiler::CompileArrayPushCall( |
1679 |
Handle<Object> object, |
1680 |
Handle<JSObject> holder, |
1681 |
Handle<Cell> cell, |
1682 |
Handle<JSFunction> function, |
1683 |
Handle<String> name, |
1684 |
Code::StubType type) { |
1685 |
// ----------- S t a t e -------------
|
1686 |
// -- rcx : name
|
1687 |
// -- rsp[0] : return address
|
1688 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
1689 |
// -- ...
|
1690 |
// -- rsp[(argc + 1) * 8] : receiver
|
1691 |
// -----------------------------------
|
1692 |
|
1693 |
// If object is not an array, bail out to regular call.
|
1694 |
if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); |
1695 |
|
1696 |
Label miss; |
1697 |
GenerateNameCheck(name, &miss); |
1698 |
|
1699 |
const int argc = arguments().immediate(); |
1700 |
StackArgumentsAccessor args(rsp, argc); |
1701 |
__ movq(rdx, args.GetReceiverOperand()); |
1702 |
|
1703 |
// Check that the receiver isn't a smi.
|
1704 |
__ JumpIfSmi(rdx, &miss); |
1705 |
|
1706 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
1707 |
name, &miss); |
1708 |
|
1709 |
if (argc == 0) { |
1710 |
// Noop, return the length.
|
1711 |
__ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1712 |
__ ret((argc + 1) * kPointerSize);
|
1713 |
} else {
|
1714 |
Label call_builtin; |
1715 |
|
1716 |
if (argc == 1) { // Otherwise fall through to call builtin. |
1717 |
Label attempt_to_grow_elements, with_write_barrier, check_double; |
1718 |
|
1719 |
// Get the elements array of the object.
|
1720 |
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1721 |
|
1722 |
// Check that the elements are in fast mode and writable.
|
1723 |
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), |
1724 |
factory()->fixed_array_map()); |
1725 |
__ j(not_equal, &check_double); |
1726 |
|
1727 |
// Get the array's length into rax and calculate new length.
|
1728 |
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1729 |
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
1730 |
__ addl(rax, Immediate(argc)); |
1731 |
|
1732 |
// Get the elements' length into rcx.
|
1733 |
__ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); |
1734 |
|
1735 |
// Check if we could survive without allocation.
|
1736 |
__ cmpl(rax, rcx); |
1737 |
__ j(greater, &attempt_to_grow_elements); |
1738 |
|
1739 |
// Check if value is a smi.
|
1740 |
__ movq(rcx, args.GetArgumentOperand(1));
|
1741 |
__ JumpIfNotSmi(rcx, &with_write_barrier); |
1742 |
|
1743 |
// Save new length.
|
1744 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1745 |
|
1746 |
// Store the value.
|
1747 |
__ movq(FieldOperand(rdi, |
1748 |
rax, |
1749 |
times_pointer_size, |
1750 |
FixedArray::kHeaderSize - argc * kPointerSize), |
1751 |
rcx); |
1752 |
|
1753 |
__ Integer32ToSmi(rax, rax); // Return new length as smi.
|
1754 |
__ ret((argc + 1) * kPointerSize);
|
1755 |
|
1756 |
__ bind(&check_double); |
1757 |
|
1758 |
// Check that the elements are in double mode.
|
1759 |
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), |
1760 |
factory()->fixed_double_array_map()); |
1761 |
__ j(not_equal, &call_builtin); |
1762 |
|
1763 |
// Get the array's length into rax and calculate new length.
|
1764 |
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
1765 |
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
1766 |
__ addl(rax, Immediate(argc)); |
1767 |
|
1768 |
// Get the elements' length into rcx.
|
1769 |
__ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); |
1770 |
|
1771 |
// Check if we could survive without allocation.
|
1772 |
__ cmpl(rax, rcx); |
1773 |
__ j(greater, &call_builtin); |
1774 |
|
1775 |
__ movq(rcx, args.GetArgumentOperand(1));
|
1776 |
__ StoreNumberToDoubleElements( |
1777 |
rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); |
1778 |
|
1779 |
// Save new length.
|
1780 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1781 |
__ Integer32ToSmi(rax, rax); // Return new length as smi.
|
1782 |
__ ret((argc + 1) * kPointerSize);
|
1783 |
|
1784 |
__ bind(&with_write_barrier); |
1785 |
|
1786 |
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); |
1787 |
|
1788 |
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
|
1789 |
Label fast_object, not_fast_object; |
1790 |
__ CheckFastObjectElements(rbx, ¬_fast_object, Label::kNear); |
1791 |
__ jmp(&fast_object); |
1792 |
// In case of fast smi-only, convert to fast object, otherwise bail out.
|
1793 |
__ bind(¬_fast_object); |
1794 |
__ CheckFastSmiElements(rbx, &call_builtin); |
1795 |
__ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), |
1796 |
factory()->heap_number_map()); |
1797 |
__ j(equal, &call_builtin); |
1798 |
// rdx: receiver
|
1799 |
// rbx: map
|
1800 |
|
1801 |
Label try_holey_map; |
1802 |
__ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
1803 |
FAST_ELEMENTS, |
1804 |
rbx, |
1805 |
rdi, |
1806 |
&try_holey_map); |
1807 |
|
1808 |
ElementsTransitionGenerator:: |
1809 |
GenerateMapChangeElementsTransition(masm(), |
1810 |
DONT_TRACK_ALLOCATION_SITE, |
1811 |
NULL);
|
1812 |
// Restore edi.
|
1813 |
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1814 |
__ jmp(&fast_object); |
1815 |
|
1816 |
__ bind(&try_holey_map); |
1817 |
__ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, |
1818 |
FAST_HOLEY_ELEMENTS, |
1819 |
rbx, |
1820 |
rdi, |
1821 |
&call_builtin); |
1822 |
ElementsTransitionGenerator:: |
1823 |
GenerateMapChangeElementsTransition(masm(), |
1824 |
DONT_TRACK_ALLOCATION_SITE, |
1825 |
NULL);
|
1826 |
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); |
1827 |
__ bind(&fast_object); |
1828 |
} else {
|
1829 |
__ CheckFastObjectElements(rbx, &call_builtin); |
1830 |
} |
1831 |
|
1832 |
// Save new length.
|
1833 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1834 |
|
1835 |
// Store the value.
|
1836 |
__ lea(rdx, FieldOperand(rdi, |
1837 |
rax, times_pointer_size, |
1838 |
FixedArray::kHeaderSize - argc * kPointerSize)); |
1839 |
__ movq(Operand(rdx, 0), rcx);
|
1840 |
|
1841 |
__ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, |
1842 |
OMIT_SMI_CHECK); |
1843 |
|
1844 |
__ Integer32ToSmi(rax, rax); // Return new length as smi.
|
1845 |
__ ret((argc + 1) * kPointerSize);
|
1846 |
|
1847 |
__ bind(&attempt_to_grow_elements); |
1848 |
if (!FLAG_inline_new) {
|
1849 |
__ jmp(&call_builtin); |
1850 |
} |
1851 |
|
1852 |
__ movq(rbx, args.GetArgumentOperand(1));
|
1853 |
// Growing elements that are SMI-only requires special handling in case
|
1854 |
// the new element is non-Smi. For now, delegate to the builtin.
|
1855 |
Label no_fast_elements_check; |
1856 |
__ JumpIfSmi(rbx, &no_fast_elements_check); |
1857 |
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
1858 |
__ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); |
1859 |
__ bind(&no_fast_elements_check); |
1860 |
|
1861 |
ExternalReference new_space_allocation_top = |
1862 |
ExternalReference::new_space_allocation_top_address(isolate()); |
1863 |
ExternalReference new_space_allocation_limit = |
1864 |
ExternalReference::new_space_allocation_limit_address(isolate()); |
1865 |
|
1866 |
const int kAllocationDelta = 4; |
1867 |
// Load top.
|
1868 |
__ Load(rcx, new_space_allocation_top); |
1869 |
|
1870 |
// Check if it's the end of elements.
|
1871 |
__ lea(rdx, FieldOperand(rdi, |
1872 |
rax, times_pointer_size, |
1873 |
FixedArray::kHeaderSize - argc * kPointerSize)); |
1874 |
__ cmpq(rdx, rcx); |
1875 |
__ j(not_equal, &call_builtin); |
1876 |
__ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); |
1877 |
Operand limit_operand = |
1878 |
masm()->ExternalOperand(new_space_allocation_limit); |
1879 |
__ cmpq(rcx, limit_operand); |
1880 |
__ j(above, &call_builtin); |
1881 |
|
1882 |
// We fit and could grow elements.
|
1883 |
__ Store(new_space_allocation_top, rcx); |
1884 |
|
1885 |
// Push the argument...
|
1886 |
__ movq(Operand(rdx, 0), rbx);
|
1887 |
// ... and fill the rest with holes.
|
1888 |
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
1889 |
for (int i = 1; i < kAllocationDelta; i++) { |
1890 |
__ movq(Operand(rdx, i * kPointerSize), kScratchRegister); |
1891 |
} |
1892 |
|
1893 |
// We know the elements array is in new space so we don't need the
|
1894 |
// remembered set, but we just pushed a value onto it so we may have to
|
1895 |
// tell the incremental marker to rescan the object that we just grew. We
|
1896 |
// don't need to worry about the holes because they are in old space and
|
1897 |
// already marked black.
|
1898 |
__ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); |
1899 |
|
1900 |
// Restore receiver to rdx as finish sequence assumes it's here.
|
1901 |
__ movq(rdx, args.GetReceiverOperand()); |
1902 |
|
1903 |
// Increment element's and array's sizes.
|
1904 |
__ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), |
1905 |
Smi::FromInt(kAllocationDelta)); |
1906 |
|
1907 |
// Make new length a smi before returning it.
|
1908 |
__ Integer32ToSmi(rax, rax); |
1909 |
__ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
1910 |
|
1911 |
__ ret((argc + 1) * kPointerSize);
|
1912 |
} |
1913 |
|
1914 |
__ bind(&call_builtin); |
1915 |
__ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, |
1916 |
isolate()), |
1917 |
argc + 1,
|
1918 |
1);
|
1919 |
} |
1920 |
|
1921 |
__ bind(&miss); |
1922 |
GenerateMissBranch(); |
1923 |
|
1924 |
// Return the generated code.
|
1925 |
return GetCode(type, name);
|
1926 |
} |
1927 |
|
1928 |
|
1929 |
Handle<Code> CallStubCompiler::CompileArrayPopCall( |
1930 |
Handle<Object> object, |
1931 |
Handle<JSObject> holder, |
1932 |
Handle<Cell> cell, |
1933 |
Handle<JSFunction> function, |
1934 |
Handle<String> name, |
1935 |
Code::StubType type) { |
1936 |
// ----------- S t a t e -------------
|
1937 |
// -- rcx : name
|
1938 |
// -- rsp[0] : return address
|
1939 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
1940 |
// -- ...
|
1941 |
// -- rsp[(argc + 1) * 8] : receiver
|
1942 |
// -----------------------------------
|
1943 |
|
1944 |
// If object is not an array, bail out to regular call.
|
1945 |
if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); |
1946 |
|
1947 |
Label miss, return_undefined, call_builtin; |
1948 |
GenerateNameCheck(name, &miss); |
1949 |
|
1950 |
const int argc = arguments().immediate(); |
1951 |
StackArgumentsAccessor args(rsp, argc); |
1952 |
__ movq(rdx, args.GetReceiverOperand()); |
1953 |
|
1954 |
// Check that the receiver isn't a smi.
|
1955 |
__ JumpIfSmi(rdx, &miss); |
1956 |
|
1957 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
1958 |
name, &miss); |
1959 |
|
1960 |
// Get the elements array of the object.
|
1961 |
__ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); |
1962 |
|
1963 |
// Check that the elements are in fast mode and writable.
|
1964 |
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), |
1965 |
Heap::kFixedArrayMapRootIndex); |
1966 |
__ j(not_equal, &call_builtin); |
1967 |
|
1968 |
// Get the array's length into rcx and calculate new length.
|
1969 |
__ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); |
1970 |
__ subl(rcx, Immediate(1));
|
1971 |
__ j(negative, &return_undefined); |
1972 |
|
1973 |
// Get the last element.
|
1974 |
__ LoadRoot(r9, Heap::kTheHoleValueRootIndex); |
1975 |
__ movq(rax, FieldOperand(rbx, |
1976 |
rcx, times_pointer_size, |
1977 |
FixedArray::kHeaderSize)); |
1978 |
// Check if element is already the hole.
|
1979 |
__ cmpq(rax, r9); |
1980 |
// If so, call slow-case to also check prototypes for value.
|
1981 |
__ j(equal, &call_builtin); |
1982 |
|
1983 |
// Set the array's length.
|
1984 |
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); |
1985 |
|
1986 |
// Fill with the hole and return original value.
|
1987 |
__ movq(FieldOperand(rbx, |
1988 |
rcx, times_pointer_size, |
1989 |
FixedArray::kHeaderSize), |
1990 |
r9); |
1991 |
__ ret((argc + 1) * kPointerSize);
|
1992 |
|
1993 |
__ bind(&return_undefined); |
1994 |
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
1995 |
__ ret((argc + 1) * kPointerSize);
|
1996 |
|
1997 |
__ bind(&call_builtin); |
1998 |
__ TailCallExternalReference( |
1999 |
ExternalReference(Builtins::c_ArrayPop, isolate()), |
2000 |
argc + 1,
|
2001 |
1);
|
2002 |
|
2003 |
__ bind(&miss); |
2004 |
GenerateMissBranch(); |
2005 |
|
2006 |
// Return the generated code.
|
2007 |
return GetCode(type, name);
|
2008 |
} |
2009 |
|
2010 |
|
2011 |
Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall( |
2012 |
Handle<Object> object, |
2013 |
Handle<JSObject> holder, |
2014 |
Handle<Cell> cell, |
2015 |
Handle<JSFunction> function, |
2016 |
Handle<String> name, |
2017 |
Code::StubType type) { |
2018 |
// ----------- S t a t e -------------
|
2019 |
// -- rcx : function name
|
2020 |
// -- rsp[0] : return address
|
2021 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
2022 |
// -- ...
|
2023 |
// -- rsp[(argc + 1) * 8] : receiver
|
2024 |
// -----------------------------------
|
2025 |
|
2026 |
// If object is not a string, bail out to regular call.
|
2027 |
if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); |
2028 |
|
2029 |
const int argc = arguments().immediate(); |
2030 |
StackArgumentsAccessor args(rsp, argc); |
2031 |
|
2032 |
Label miss; |
2033 |
Label name_miss; |
2034 |
Label index_out_of_range; |
2035 |
Label* index_out_of_range_label = &index_out_of_range; |
2036 |
if (kind_ == Code::CALL_IC &&
|
2037 |
(CallICBase::StringStubState::decode(extra_state_) == |
2038 |
DEFAULT_STRING_STUB)) { |
2039 |
index_out_of_range_label = &miss; |
2040 |
} |
2041 |
GenerateNameCheck(name, &name_miss); |
2042 |
|
2043 |
// Check that the maps starting from the prototype haven't changed.
|
2044 |
GenerateDirectLoadGlobalFunctionPrototype(masm(), |
2045 |
Context::STRING_FUNCTION_INDEX, |
2046 |
rax, |
2047 |
&miss); |
2048 |
ASSERT(!object.is_identical_to(holder)); |
2049 |
CheckPrototypes( |
2050 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2051 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2052 |
|
2053 |
Register receiver = rbx; |
2054 |
Register index = rdi; |
2055 |
Register result = rax; |
2056 |
__ movq(receiver, args.GetReceiverOperand()); |
2057 |
if (argc > 0) { |
2058 |
__ movq(index, args.GetArgumentOperand(1));
|
2059 |
} else {
|
2060 |
__ LoadRoot(index, Heap::kUndefinedValueRootIndex); |
2061 |
} |
2062 |
|
2063 |
StringCharCodeAtGenerator generator(receiver, |
2064 |
index, |
2065 |
result, |
2066 |
&miss, // When not a string.
|
2067 |
&miss, // When not a number.
|
2068 |
index_out_of_range_label, |
2069 |
STRING_INDEX_IS_NUMBER); |
2070 |
generator.GenerateFast(masm()); |
2071 |
__ ret((argc + 1) * kPointerSize);
|
2072 |
|
2073 |
StubRuntimeCallHelper call_helper; |
2074 |
generator.GenerateSlow(masm(), call_helper); |
2075 |
|
2076 |
if (index_out_of_range.is_linked()) {
|
2077 |
__ bind(&index_out_of_range); |
2078 |
__ LoadRoot(rax, Heap::kNanValueRootIndex); |
2079 |
__ ret((argc + 1) * kPointerSize);
|
2080 |
} |
2081 |
|
2082 |
__ bind(&miss); |
2083 |
// Restore function name in rcx.
|
2084 |
__ Move(rcx, name); |
2085 |
__ bind(&name_miss); |
2086 |
GenerateMissBranch(); |
2087 |
|
2088 |
// Return the generated code.
|
2089 |
return GetCode(type, name);
|
2090 |
} |
2091 |
|
2092 |
|
2093 |
Handle<Code> CallStubCompiler::CompileStringCharAtCall( |
2094 |
Handle<Object> object, |
2095 |
Handle<JSObject> holder, |
2096 |
Handle<Cell> cell, |
2097 |
Handle<JSFunction> function, |
2098 |
Handle<String> name, |
2099 |
Code::StubType type) { |
2100 |
// ----------- S t a t e -------------
|
2101 |
// -- rcx : function name
|
2102 |
// -- rsp[0] : return address
|
2103 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
2104 |
// -- ...
|
2105 |
// -- rsp[(argc + 1) * 8] : receiver
|
2106 |
// -----------------------------------
|
2107 |
|
2108 |
// If object is not a string, bail out to regular call.
|
2109 |
if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); |
2110 |
|
2111 |
const int argc = arguments().immediate(); |
2112 |
StackArgumentsAccessor args(rsp, argc); |
2113 |
|
2114 |
Label miss; |
2115 |
Label name_miss; |
2116 |
Label index_out_of_range; |
2117 |
Label* index_out_of_range_label = &index_out_of_range; |
2118 |
if (kind_ == Code::CALL_IC &&
|
2119 |
(CallICBase::StringStubState::decode(extra_state_) == |
2120 |
DEFAULT_STRING_STUB)) { |
2121 |
index_out_of_range_label = &miss; |
2122 |
} |
2123 |
GenerateNameCheck(name, &name_miss); |
2124 |
|
2125 |
// Check that the maps starting from the prototype haven't changed.
|
2126 |
GenerateDirectLoadGlobalFunctionPrototype(masm(), |
2127 |
Context::STRING_FUNCTION_INDEX, |
2128 |
rax, |
2129 |
&miss); |
2130 |
ASSERT(!object.is_identical_to(holder)); |
2131 |
CheckPrototypes( |
2132 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2133 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2134 |
|
2135 |
Register receiver = rax; |
2136 |
Register index = rdi; |
2137 |
Register scratch = rdx; |
2138 |
Register result = rax; |
2139 |
__ movq(receiver, args.GetReceiverOperand()); |
2140 |
if (argc > 0) { |
2141 |
__ movq(index, args.GetArgumentOperand(1));
|
2142 |
} else {
|
2143 |
__ LoadRoot(index, Heap::kUndefinedValueRootIndex); |
2144 |
} |
2145 |
|
2146 |
StringCharAtGenerator generator(receiver, |
2147 |
index, |
2148 |
scratch, |
2149 |
result, |
2150 |
&miss, // When not a string.
|
2151 |
&miss, // When not a number.
|
2152 |
index_out_of_range_label, |
2153 |
STRING_INDEX_IS_NUMBER); |
2154 |
generator.GenerateFast(masm()); |
2155 |
__ ret((argc + 1) * kPointerSize);
|
2156 |
|
2157 |
StubRuntimeCallHelper call_helper; |
2158 |
generator.GenerateSlow(masm(), call_helper); |
2159 |
|
2160 |
if (index_out_of_range.is_linked()) {
|
2161 |
__ bind(&index_out_of_range); |
2162 |
__ LoadRoot(rax, Heap::kempty_stringRootIndex); |
2163 |
__ ret((argc + 1) * kPointerSize);
|
2164 |
} |
2165 |
__ bind(&miss); |
2166 |
// Restore function name in rcx.
|
2167 |
__ Move(rcx, name); |
2168 |
__ bind(&name_miss); |
2169 |
GenerateMissBranch(); |
2170 |
|
2171 |
// Return the generated code.
|
2172 |
return GetCode(type, name);
|
2173 |
} |
2174 |
|
2175 |
|
2176 |
Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall( |
2177 |
Handle<Object> object, |
2178 |
Handle<JSObject> holder, |
2179 |
Handle<Cell> cell, |
2180 |
Handle<JSFunction> function, |
2181 |
Handle<String> name, |
2182 |
Code::StubType type) { |
2183 |
// ----------- S t a t e -------------
|
2184 |
// -- rcx : function name
|
2185 |
// -- rsp[0] : return address
|
2186 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
2187 |
// -- ...
|
2188 |
// -- rsp[(argc + 1) * 8] : receiver
|
2189 |
// -----------------------------------
|
2190 |
|
2191 |
// If the object is not a JSObject or we got an unexpected number of
|
2192 |
// arguments, bail out to the regular call.
|
2193 |
const int argc = arguments().immediate(); |
2194 |
StackArgumentsAccessor args(rsp, argc); |
2195 |
if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); |
2196 |
|
2197 |
Label miss; |
2198 |
GenerateNameCheck(name, &miss); |
2199 |
|
2200 |
if (cell.is_null()) {
|
2201 |
__ movq(rdx, args.GetReceiverOperand()); |
2202 |
__ JumpIfSmi(rdx, &miss); |
2203 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
2204 |
name, &miss); |
2205 |
} else {
|
2206 |
ASSERT(cell->value() == *function); |
2207 |
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, |
2208 |
&miss); |
2209 |
GenerateLoadFunctionFromCell(cell, function, &miss); |
2210 |
} |
2211 |
|
2212 |
// Load the char code argument.
|
2213 |
Register code = rbx; |
2214 |
__ movq(code, args.GetArgumentOperand(1));
|
2215 |
|
2216 |
// Check the code is a smi.
|
2217 |
Label slow; |
2218 |
__ JumpIfNotSmi(code, &slow); |
2219 |
|
2220 |
// Convert the smi code to uint16.
|
2221 |
__ SmiAndConstant(code, code, Smi::FromInt(0xffff));
|
2222 |
|
2223 |
StringCharFromCodeGenerator generator(code, rax); |
2224 |
generator.GenerateFast(masm()); |
2225 |
__ ret(2 * kPointerSize);
|
2226 |
|
2227 |
StubRuntimeCallHelper call_helper; |
2228 |
generator.GenerateSlow(masm(), call_helper); |
2229 |
|
2230 |
// Tail call the full function. We do not have to patch the receiver
|
2231 |
// because the function makes no use of it.
|
2232 |
__ bind(&slow); |
2233 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
2234 |
? CALL_AS_FUNCTION |
2235 |
: CALL_AS_METHOD; |
2236 |
ParameterCount expected(function); |
2237 |
__ InvokeFunction(function, expected, arguments(), |
2238 |
JUMP_FUNCTION, NullCallWrapper(), call_kind); |
2239 |
|
2240 |
__ bind(&miss); |
2241 |
// rcx: function name.
|
2242 |
GenerateMissBranch(); |
2243 |
|
2244 |
// Return the generated code.
|
2245 |
return GetCode(type, name);
|
2246 |
} |
2247 |
|
2248 |
|
2249 |
Handle<Code> CallStubCompiler::CompileMathFloorCall( |
2250 |
Handle<Object> object, |
2251 |
Handle<JSObject> holder, |
2252 |
Handle<Cell> cell, |
2253 |
Handle<JSFunction> function, |
2254 |
Handle<String> name, |
2255 |
Code::StubType type) { |
2256 |
// ----------- S t a t e -------------
|
2257 |
// -- rcx : name
|
2258 |
// -- rsp[0] : return address
|
2259 |
// -- rsp[(argc - n) * 4] : arg[n] (zero-based)
|
2260 |
// -- ...
|
2261 |
// -- rsp[(argc + 1) * 4] : receiver
|
2262 |
// -----------------------------------
|
2263 |
const int argc = arguments().immediate(); |
2264 |
StackArgumentsAccessor args(rsp, argc); |
2265 |
|
2266 |
// If the object is not a JSObject or we got an unexpected number of
|
2267 |
// arguments, bail out to the regular call.
|
2268 |
if (!object->IsJSObject() || argc != 1) { |
2269 |
return Handle<Code>::null();
|
2270 |
} |
2271 |
|
2272 |
Label miss; |
2273 |
GenerateNameCheck(name, &miss); |
2274 |
|
2275 |
if (cell.is_null()) {
|
2276 |
__ movq(rdx, args.GetReceiverOperand()); |
2277 |
|
2278 |
STATIC_ASSERT(kSmiTag == 0);
|
2279 |
__ JumpIfSmi(rdx, &miss); |
2280 |
|
2281 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
2282 |
name, &miss); |
2283 |
} else {
|
2284 |
ASSERT(cell->value() == *function); |
2285 |
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, |
2286 |
&miss); |
2287 |
GenerateLoadFunctionFromCell(cell, function, &miss); |
2288 |
} |
2289 |
|
2290 |
// Load the (only) argument into rax.
|
2291 |
__ movq(rax, args.GetArgumentOperand(1));
|
2292 |
|
2293 |
// Check if the argument is a smi.
|
2294 |
Label smi; |
2295 |
STATIC_ASSERT(kSmiTag == 0);
|
2296 |
__ JumpIfSmi(rax, &smi); |
2297 |
|
2298 |
// Check if the argument is a heap number and load its value into xmm0.
|
2299 |
Label slow; |
2300 |
__ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); |
2301 |
__ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset)); |
2302 |
|
2303 |
// Check if the argument is strictly positive. Note this also discards NaN.
|
2304 |
__ xorpd(xmm1, xmm1); |
2305 |
__ ucomisd(xmm0, xmm1); |
2306 |
__ j(below_equal, &slow); |
2307 |
|
2308 |
// Do a truncating conversion.
|
2309 |
__ cvttsd2si(rax, xmm0); |
2310 |
|
2311 |
// Checks for 0x80000000 which signals a failed conversion.
|
2312 |
Label conversion_failure; |
2313 |
__ cmpl(rax, Immediate(0x80000000));
|
2314 |
__ j(equal, &conversion_failure); |
2315 |
|
2316 |
// Smi tag and return.
|
2317 |
__ Integer32ToSmi(rax, rax); |
2318 |
__ bind(&smi); |
2319 |
__ ret(2 * kPointerSize);
|
2320 |
|
2321 |
// Check if the argument is < 2^kMantissaBits.
|
2322 |
Label already_round; |
2323 |
__ bind(&conversion_failure); |
2324 |
int64_t kTwoMantissaBits= V8_INT64_C(0x4330000000000000);
|
2325 |
__ movq(rbx, kTwoMantissaBits, RelocInfo::NONE64); |
2326 |
__ movq(xmm1, rbx); |
2327 |
__ ucomisd(xmm0, xmm1); |
2328 |
__ j(above_equal, &already_round); |
2329 |
|
2330 |
// Save a copy of the argument.
|
2331 |
__ movaps(xmm2, xmm0); |
2332 |
|
2333 |
// Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
|
2334 |
__ addsd(xmm0, xmm1); |
2335 |
__ subsd(xmm0, xmm1); |
2336 |
|
2337 |
// Compare the argument and the tentative result to get the right mask:
|
2338 |
// if xmm2 < xmm0:
|
2339 |
// xmm2 = 1...1
|
2340 |
// else:
|
2341 |
// xmm2 = 0...0
|
2342 |
__ cmpltsd(xmm2, xmm0); |
2343 |
|
2344 |
// Subtract 1 if the argument was less than the tentative result.
|
2345 |
int64_t kOne = V8_INT64_C(0x3ff0000000000000);
|
2346 |
__ movq(rbx, kOne, RelocInfo::NONE64); |
2347 |
__ movq(xmm1, rbx); |
2348 |
__ andpd(xmm1, xmm2); |
2349 |
__ subsd(xmm0, xmm1); |
2350 |
|
2351 |
// Return a new heap number.
|
2352 |
__ AllocateHeapNumber(rax, rbx, &slow); |
2353 |
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
2354 |
__ ret(2 * kPointerSize);
|
2355 |
|
2356 |
// Return the argument (when it's an already round heap number).
|
2357 |
__ bind(&already_round); |
2358 |
__ movq(rax, args.GetArgumentOperand(1));
|
2359 |
__ ret(2 * kPointerSize);
|
2360 |
|
2361 |
// Tail call the full function. We do not have to patch the receiver
|
2362 |
// because the function makes no use of it.
|
2363 |
__ bind(&slow); |
2364 |
ParameterCount expected(function); |
2365 |
__ InvokeFunction(function, expected, arguments(), |
2366 |
JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
2367 |
|
2368 |
__ bind(&miss); |
2369 |
// rcx: function name.
|
2370 |
GenerateMissBranch(); |
2371 |
|
2372 |
// Return the generated code.
|
2373 |
return GetCode(type, name);
|
2374 |
} |
2375 |
|
2376 |
|
2377 |
Handle<Code> CallStubCompiler::CompileMathAbsCall( |
2378 |
Handle<Object> object, |
2379 |
Handle<JSObject> holder, |
2380 |
Handle<Cell> cell, |
2381 |
Handle<JSFunction> function, |
2382 |
Handle<String> name, |
2383 |
Code::StubType type) { |
2384 |
// ----------- S t a t e -------------
|
2385 |
// -- rcx : function name
|
2386 |
// -- rsp[0] : return address
|
2387 |
// -- rsp[(argc - n) * 8] : arg[n] (zero-based)
|
2388 |
// -- ...
|
2389 |
// -- rsp[(argc + 1) * 8] : receiver
|
2390 |
// -----------------------------------
|
2391 |
|
2392 |
// If the object is not a JSObject or we got an unexpected number of
|
2393 |
// arguments, bail out to the regular call.
|
2394 |
const int argc = arguments().immediate(); |
2395 |
StackArgumentsAccessor args(rsp, argc); |
2396 |
if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); |
2397 |
|
2398 |
Label miss; |
2399 |
GenerateNameCheck(name, &miss); |
2400 |
|
2401 |
if (cell.is_null()) {
|
2402 |
__ movq(rdx, args.GetReceiverOperand()); |
2403 |
__ JumpIfSmi(rdx, &miss); |
2404 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
2405 |
name, &miss); |
2406 |
} else {
|
2407 |
ASSERT(cell->value() == *function); |
2408 |
GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, |
2409 |
&miss); |
2410 |
GenerateLoadFunctionFromCell(cell, function, &miss); |
2411 |
} |
2412 |
// Load the (only) argument into rax.
|
2413 |
__ movq(rax, args.GetArgumentOperand(1));
|
2414 |
|
2415 |
// Check if the argument is a smi.
|
2416 |
Label not_smi; |
2417 |
STATIC_ASSERT(kSmiTag == 0);
|
2418 |
__ JumpIfNotSmi(rax, ¬_smi); |
2419 |
|
2420 |
// Branchless abs implementation, refer to below:
|
2421 |
// http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs
|
2422 |
// Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
|
2423 |
// otherwise.
|
2424 |
__ movq(rbx, rax); |
2425 |
__ sar(rbx, Immediate(kBitsPerPointer - 1));
|
2426 |
|
2427 |
// Do bitwise not or do nothing depending on ebx.
|
2428 |
__ xor_(rax, rbx); |
2429 |
|
2430 |
// Add 1 or do nothing depending on ebx.
|
2431 |
__ subq(rax, rbx); |
2432 |
|
2433 |
// If the result is still negative, go to the slow case.
|
2434 |
// This only happens for the most negative smi.
|
2435 |
Label slow; |
2436 |
__ j(negative, &slow); |
2437 |
|
2438 |
__ ret(2 * kPointerSize);
|
2439 |
|
2440 |
// Check if the argument is a heap number and load its value.
|
2441 |
__ bind(¬_smi); |
2442 |
__ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); |
2443 |
__ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); |
2444 |
|
2445 |
// Check the sign of the argument. If the argument is positive,
|
2446 |
// just return it.
|
2447 |
Label negative_sign; |
2448 |
const int sign_mask_shift = |
2449 |
(HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; |
2450 |
__ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
|
2451 |
RelocInfo::NONE64); |
2452 |
__ testq(rbx, rdi); |
2453 |
__ j(not_zero, &negative_sign); |
2454 |
__ ret(2 * kPointerSize);
|
2455 |
|
2456 |
// If the argument is negative, clear the sign, and return a new
|
2457 |
// number. We still have the sign mask in rdi.
|
2458 |
__ bind(&negative_sign); |
2459 |
__ xor_(rbx, rdi); |
2460 |
__ AllocateHeapNumber(rax, rdx, &slow); |
2461 |
__ MoveDouble(FieldOperand(rax, HeapNumber::kValueOffset), rbx); |
2462 |
__ ret(2 * kPointerSize);
|
2463 |
|
2464 |
// Tail call the full function. We do not have to patch the receiver
|
2465 |
// because the function makes no use of it.
|
2466 |
__ bind(&slow); |
2467 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
2468 |
? CALL_AS_FUNCTION |
2469 |
: CALL_AS_METHOD; |
2470 |
ParameterCount expected(function); |
2471 |
__ InvokeFunction(function, expected, arguments(), |
2472 |
JUMP_FUNCTION, NullCallWrapper(), call_kind); |
2473 |
|
2474 |
__ bind(&miss); |
2475 |
// rcx: function name.
|
2476 |
GenerateMissBranch(); |
2477 |
|
2478 |
// Return the generated code.
|
2479 |
return GetCode(type, name);
|
2480 |
} |
2481 |
|
2482 |
|
2483 |
Handle<Code> CallStubCompiler::CompileFastApiCall( |
2484 |
const CallOptimization& optimization,
|
2485 |
Handle<Object> object, |
2486 |
Handle<JSObject> holder, |
2487 |
Handle<Cell> cell, |
2488 |
Handle<JSFunction> function, |
2489 |
Handle<String> name) { |
2490 |
ASSERT(optimization.is_simple_api_call()); |
2491 |
// Bail out if object is a global object as we don't want to
|
2492 |
// repatch it to global receiver.
|
2493 |
if (object->IsGlobalObject()) return Handle<Code>::null(); |
2494 |
if (!cell.is_null()) return Handle<Code>::null(); |
2495 |
if (!object->IsJSObject()) return Handle<Code>::null(); |
2496 |
int depth = optimization.GetPrototypeDepthOfExpectedType(
|
2497 |
Handle<JSObject>::cast(object), holder); |
2498 |
if (depth == kInvalidProtoDepth) return Handle<Code>::null(); |
2499 |
|
2500 |
Label miss, miss_before_stack_reserved; |
2501 |
GenerateNameCheck(name, &miss_before_stack_reserved); |
2502 |
|
2503 |
const int argc = arguments().immediate(); |
2504 |
StackArgumentsAccessor args(rsp, argc); |
2505 |
__ movq(rdx, args.GetReceiverOperand()); |
2506 |
|
2507 |
// Check that the receiver isn't a smi.
|
2508 |
__ JumpIfSmi(rdx, &miss_before_stack_reserved); |
2509 |
|
2510 |
Counters* counters = isolate()->counters(); |
2511 |
__ IncrementCounter(counters->call_const(), 1);
|
2512 |
__ IncrementCounter(counters->call_const_fast_api(), 1);
|
2513 |
|
2514 |
// Allocate space for v8::Arguments implicit values. Must be initialized
|
2515 |
// before calling any runtime function.
|
2516 |
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
2517 |
|
2518 |
// Check that the maps haven't changed and find a Holder as a side effect.
|
2519 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, |
2520 |
name, depth, &miss); |
2521 |
|
2522 |
// Move the return address on top of the stack.
|
2523 |
__ movq(rax, |
2524 |
StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize)); |
2525 |
__ movq(StackOperandForReturnAddress(0), rax);
|
2526 |
|
2527 |
GenerateFastApiCall(masm(), optimization, argc, false);
|
2528 |
|
2529 |
__ bind(&miss); |
2530 |
__ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); |
2531 |
|
2532 |
__ bind(&miss_before_stack_reserved); |
2533 |
GenerateMissBranch(); |
2534 |
|
2535 |
// Return the generated code.
|
2536 |
return GetCode(function);
|
2537 |
} |
2538 |
|
2539 |
|
2540 |
void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
|
2541 |
Handle<JSObject> holder, |
2542 |
Handle<Name> name, |
2543 |
CheckType check, |
2544 |
Label* success) { |
2545 |
// ----------- S t a t e -------------
|
2546 |
// rcx : function name
|
2547 |
// rsp[0] : return address
|
2548 |
// rsp[8] : argument argc
|
2549 |
// rsp[16] : argument argc - 1
|
2550 |
// ...
|
2551 |
// rsp[argc * 8] : argument 1
|
2552 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
2553 |
// -----------------------------------
|
2554 |
Label miss; |
2555 |
GenerateNameCheck(name, &miss); |
2556 |
|
2557 |
StackArgumentsAccessor args(rsp, arguments()); |
2558 |
__ movq(rdx, args.GetReceiverOperand()); |
2559 |
|
2560 |
// Check that the receiver isn't a smi.
|
2561 |
if (check != NUMBER_CHECK) {
|
2562 |
__ JumpIfSmi(rdx, &miss); |
2563 |
} |
2564 |
|
2565 |
// Make sure that it's okay not to patch the on stack receiver
|
2566 |
// unless we're doing a receiver map check.
|
2567 |
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); |
2568 |
|
2569 |
Counters* counters = isolate()->counters(); |
2570 |
switch (check) {
|
2571 |
case RECEIVER_MAP_CHECK:
|
2572 |
__ IncrementCounter(counters->call_const(), 1);
|
2573 |
|
2574 |
// Check that the maps haven't changed.
|
2575 |
CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, |
2576 |
rdi, name, &miss); |
2577 |
|
2578 |
// Patch the receiver on the stack with the global proxy if
|
2579 |
// necessary.
|
2580 |
if (object->IsGlobalObject()) {
|
2581 |
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
2582 |
__ movq(args.GetReceiverOperand(), rdx); |
2583 |
} |
2584 |
break;
|
2585 |
|
2586 |
case STRING_CHECK:
|
2587 |
// Check that the object is a string.
|
2588 |
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax); |
2589 |
__ j(above_equal, &miss); |
2590 |
// Check that the maps starting from the prototype haven't changed.
|
2591 |
GenerateDirectLoadGlobalFunctionPrototype( |
2592 |
masm(), Context::STRING_FUNCTION_INDEX, rax, &miss); |
2593 |
CheckPrototypes( |
2594 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2595 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2596 |
break;
|
2597 |
|
2598 |
case SYMBOL_CHECK:
|
2599 |
// Check that the object is a symbol.
|
2600 |
__ CmpObjectType(rdx, SYMBOL_TYPE, rax); |
2601 |
__ j(not_equal, &miss); |
2602 |
// Check that the maps starting from the prototype haven't changed.
|
2603 |
GenerateDirectLoadGlobalFunctionPrototype( |
2604 |
masm(), Context::SYMBOL_FUNCTION_INDEX, rax, &miss); |
2605 |
CheckPrototypes( |
2606 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2607 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2608 |
break;
|
2609 |
|
2610 |
case NUMBER_CHECK: {
|
2611 |
Label fast; |
2612 |
// Check that the object is a smi or a heap number.
|
2613 |
__ JumpIfSmi(rdx, &fast); |
2614 |
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax); |
2615 |
__ j(not_equal, &miss); |
2616 |
__ bind(&fast); |
2617 |
// Check that the maps starting from the prototype haven't changed.
|
2618 |
GenerateDirectLoadGlobalFunctionPrototype( |
2619 |
masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss); |
2620 |
CheckPrototypes( |
2621 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2622 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2623 |
break;
|
2624 |
} |
2625 |
case BOOLEAN_CHECK: {
|
2626 |
Label fast; |
2627 |
// Check that the object is a boolean.
|
2628 |
__ CompareRoot(rdx, Heap::kTrueValueRootIndex); |
2629 |
__ j(equal, &fast); |
2630 |
__ CompareRoot(rdx, Heap::kFalseValueRootIndex); |
2631 |
__ j(not_equal, &miss); |
2632 |
__ bind(&fast); |
2633 |
// Check that the maps starting from the prototype haven't changed.
|
2634 |
GenerateDirectLoadGlobalFunctionPrototype( |
2635 |
masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss); |
2636 |
CheckPrototypes( |
2637 |
Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), |
2638 |
rax, holder, rbx, rdx, rdi, name, &miss); |
2639 |
break;
|
2640 |
} |
2641 |
} |
2642 |
|
2643 |
__ jmp(success); |
2644 |
|
2645 |
// Handle call cache miss.
|
2646 |
__ bind(&miss); |
2647 |
GenerateMissBranch(); |
2648 |
} |
2649 |
|
2650 |
|
2651 |
void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
|
2652 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
2653 |
? CALL_AS_FUNCTION |
2654 |
: CALL_AS_METHOD; |
2655 |
ParameterCount expected(function); |
2656 |
__ InvokeFunction(function, expected, arguments(), |
2657 |
JUMP_FUNCTION, NullCallWrapper(), call_kind); |
2658 |
} |
2659 |
|
2660 |
|
2661 |
Handle<Code> CallStubCompiler::CompileCallConstant( |
2662 |
Handle<Object> object, |
2663 |
Handle<JSObject> holder, |
2664 |
Handle<Name> name, |
2665 |
CheckType check, |
2666 |
Handle<JSFunction> function) { |
2667 |
if (HasCustomCallGenerator(function)) {
|
2668 |
Handle<Code> code = CompileCustomCall(object, holder, |
2669 |
Handle<PropertyCell>::null(), |
2670 |
function, Handle<String>::cast(name), |
2671 |
Code::CONSTANT); |
2672 |
// A null handle means bail out to the regular compiler code below.
|
2673 |
if (!code.is_null()) return code; |
2674 |
} |
2675 |
|
2676 |
Label success; |
2677 |
|
2678 |
CompileHandlerFrontend(object, holder, name, check, &success); |
2679 |
__ bind(&success); |
2680 |
CompileHandlerBackend(function); |
2681 |
|
2682 |
// Return the generated code.
|
2683 |
return GetCode(function);
|
2684 |
} |
2685 |
|
2686 |
|
2687 |
Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, |
2688 |
Handle<JSObject> holder, |
2689 |
Handle<Name> name) { |
2690 |
// ----------- S t a t e -------------
|
2691 |
// rcx : function name
|
2692 |
// rsp[0] : return address
|
2693 |
// rsp[8] : argument argc
|
2694 |
// rsp[16] : argument argc - 1
|
2695 |
// ...
|
2696 |
// rsp[argc * 8] : argument 1
|
2697 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
2698 |
// -----------------------------------
|
2699 |
Label miss; |
2700 |
GenerateNameCheck(name, &miss); |
2701 |
|
2702 |
|
2703 |
LookupResult lookup(isolate()); |
2704 |
LookupPostInterceptor(holder, name, &lookup); |
2705 |
|
2706 |
// Get the receiver from the stack.
|
2707 |
StackArgumentsAccessor args(rsp, arguments()); |
2708 |
__ movq(rdx, args.GetReceiverOperand()); |
2709 |
|
2710 |
CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_);
|
2711 |
compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, |
2712 |
&miss); |
2713 |
|
2714 |
// Restore receiver.
|
2715 |
__ movq(rdx, args.GetReceiverOperand()); |
2716 |
|
2717 |
// Check that the function really is a function.
|
2718 |
__ JumpIfSmi(rax, &miss); |
2719 |
__ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); |
2720 |
__ j(not_equal, &miss); |
2721 |
|
2722 |
// Patch the receiver on the stack with the global proxy if
|
2723 |
// necessary.
|
2724 |
if (object->IsGlobalObject()) {
|
2725 |
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
2726 |
__ movq(args.GetReceiverOperand(), rdx); |
2727 |
} |
2728 |
|
2729 |
// Invoke the function.
|
2730 |
__ movq(rdi, rax); |
2731 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
2732 |
? CALL_AS_FUNCTION |
2733 |
: CALL_AS_METHOD; |
2734 |
__ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, |
2735 |
NullCallWrapper(), call_kind); |
2736 |
|
2737 |
// Handle load cache miss.
|
2738 |
__ bind(&miss); |
2739 |
GenerateMissBranch(); |
2740 |
|
2741 |
// Return the generated code.
|
2742 |
return GetCode(Code::INTERCEPTOR, name);
|
2743 |
} |
2744 |
|
2745 |
|
2746 |
Handle<Code> CallStubCompiler::CompileCallGlobal( |
2747 |
Handle<JSObject> object, |
2748 |
Handle<GlobalObject> holder, |
2749 |
Handle<PropertyCell> cell, |
2750 |
Handle<JSFunction> function, |
2751 |
Handle<Name> name) { |
2752 |
// ----------- S t a t e -------------
|
2753 |
// rcx : function name
|
2754 |
// rsp[0] : return address
|
2755 |
// rsp[8] : argument argc
|
2756 |
// rsp[16] : argument argc - 1
|
2757 |
// ...
|
2758 |
// rsp[argc * 8] : argument 1
|
2759 |
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
2760 |
// -----------------------------------
|
2761 |
|
2762 |
if (HasCustomCallGenerator(function)) {
|
2763 |
Handle<Code> code = CompileCustomCall( |
2764 |
object, holder, cell, function, Handle<String>::cast(name), |
2765 |
Code::NORMAL); |
2766 |
// A null handle means bail out to the regular compiler code below.
|
2767 |
if (!code.is_null()) return code; |
2768 |
} |
2769 |
|
2770 |
Label miss; |
2771 |
GenerateNameCheck(name, &miss); |
2772 |
|
2773 |
StackArgumentsAccessor args(rsp, arguments()); |
2774 |
GenerateGlobalReceiverCheck(object, holder, name, &miss); |
2775 |
GenerateLoadFunctionFromCell(cell, function, &miss); |
2776 |
|
2777 |
// Patch the receiver on the stack with the global proxy.
|
2778 |
if (object->IsGlobalObject()) {
|
2779 |
__ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); |
2780 |
__ movq(args.GetReceiverOperand(), rdx); |
2781 |
} |
2782 |
|
2783 |
// Set up the context (function already in rdi).
|
2784 |
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
2785 |
|
2786 |
// Jump to the cached code (tail call).
|
2787 |
Counters* counters = isolate()->counters(); |
2788 |
__ IncrementCounter(counters->call_global_inline(), 1);
|
2789 |
ParameterCount expected(function->shared()->formal_parameter_count()); |
2790 |
CallKind call_kind = CallICBase::Contextual::decode(extra_state_) |
2791 |
? CALL_AS_FUNCTION |
2792 |
: CALL_AS_METHOD; |
2793 |
// We call indirectly through the code field in the function to
|
2794 |
// allow recompilation to take effect without changing any of the
|
2795 |
// call sites.
|
2796 |
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
2797 |
__ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION, |
2798 |
NullCallWrapper(), call_kind); |
2799 |
|
2800 |
// Handle call cache miss.
|
2801 |
__ bind(&miss); |
2802 |
__ IncrementCounter(counters->call_global_inline_miss(), 1);
|
2803 |
GenerateMissBranch(); |
2804 |
|
2805 |
// Return the generated code.
|
2806 |
return GetCode(Code::NORMAL, name);
|
2807 |
} |
2808 |
|
2809 |
|
2810 |
Handle<Code> StoreStubCompiler::CompileStoreCallback( |
2811 |
Handle<JSObject> object, |
2812 |
Handle<JSObject> holder, |
2813 |
Handle<Name> name, |
2814 |
Handle<ExecutableAccessorInfo> callback) { |
2815 |
Label success; |
2816 |
HandlerFrontend(object, receiver(), holder, name, &success); |
2817 |
__ bind(&success); |
2818 |
|
2819 |
__ PopReturnAddressTo(scratch1()); |
2820 |
__ push(receiver()); |
2821 |
__ Push(callback); // callback info
|
2822 |
__ Push(name); |
2823 |
__ push(value()); |
2824 |
__ PushReturnAddressFrom(scratch1()); |
2825 |
|
2826 |
// Do tail-call to the runtime system.
|
2827 |
ExternalReference store_callback_property = |
2828 |
ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); |
2829 |
__ TailCallExternalReference(store_callback_property, 4, 1); |
2830 |
|
2831 |
// Return the generated code.
|
2832 |
return GetCode(kind(), Code::CALLBACKS, name);
|
2833 |
} |
2834 |
|
2835 |
|
2836 |
Handle<Code> StoreStubCompiler::CompileStoreCallback( |
2837 |
Handle<JSObject> object, |
2838 |
Handle<JSObject> holder, |
2839 |
Handle<Name> name, |
2840 |
const CallOptimization& call_optimization) {
|
2841 |
Label success; |
2842 |
HandlerFrontend(object, receiver(), holder, name, &success); |
2843 |
__ bind(&success); |
2844 |
|
2845 |
Register values[] = { value() }; |
2846 |
GenerateFastApiCall( |
2847 |
masm(), call_optimization, receiver(), scratch3(), 1, values);
|
2848 |
|
2849 |
// Return the generated code.
|
2850 |
return GetCode(kind(), Code::CALLBACKS, name);
|
2851 |
} |
2852 |
|
2853 |
|
2854 |
#undef __
|
2855 |
#define __ ACCESS_MASM(masm)
|
2856 |
|
2857 |
|
2858 |
void StoreStubCompiler::GenerateStoreViaSetter(
|
2859 |
MacroAssembler* masm, |
2860 |
Handle<JSFunction> setter) { |
2861 |
// ----------- S t a t e -------------
|
2862 |
// -- rax : value
|
2863 |
// -- rcx : name
|
2864 |
// -- rdx : receiver
|
2865 |
// -- rsp[0] : return address
|
2866 |
// -----------------------------------
|
2867 |
{ |
2868 |
FrameScope scope(masm, StackFrame::INTERNAL); |
2869 |
|
2870 |
// Save value register, so we can restore it later.
|
2871 |
__ push(rax); |
2872 |
|
2873 |
if (!setter.is_null()) {
|
2874 |
// Call the JavaScript setter with receiver and value on the stack.
|
2875 |
__ push(rdx); |
2876 |
__ push(rax); |
2877 |
ParameterCount actual(1);
|
2878 |
ParameterCount expected(setter); |
2879 |
__ InvokeFunction(setter, expected, actual, |
2880 |
CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
2881 |
} else {
|
2882 |
// If we generate a global code snippet for deoptimization only, remember
|
2883 |
// the place to continue after deoptimization.
|
2884 |
masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
2885 |
} |
2886 |
|
2887 |
// We have to return the passed value, not the return value of the setter.
|
2888 |
__ pop(rax); |
2889 |
|
2890 |
// Restore context register.
|
2891 |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2892 |
} |
2893 |
__ ret(0);
|
2894 |
} |
2895 |
|
2896 |
|
2897 |
#undef __
|
2898 |
#define __ ACCESS_MASM(masm())
|
2899 |
|
2900 |
|
2901 |
Handle<Code> StoreStubCompiler::CompileStoreInterceptor( |
2902 |
Handle<JSObject> object, |
2903 |
Handle<Name> name) { |
2904 |
__ PopReturnAddressTo(scratch1()); |
2905 |
__ push(receiver()); |
2906 |
__ push(this->name());
|
2907 |
__ push(value()); |
2908 |
__ Push(Smi::FromInt(strict_mode())); |
2909 |
__ PushReturnAddressFrom(scratch1()); |
2910 |
|
2911 |
// Do tail-call to the runtime system.
|
2912 |
ExternalReference store_ic_property = |
2913 |
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate()); |
2914 |
__ TailCallExternalReference(store_ic_property, 4, 1); |
2915 |
|
2916 |
// Return the generated code.
|
2917 |
return GetCode(kind(), Code::INTERCEPTOR, name);
|
2918 |
} |
2919 |
|
2920 |
|
2921 |
Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( |
2922 |
MapHandleList* receiver_maps, |
2923 |
CodeHandleList* handler_stubs, |
2924 |
MapHandleList* transitioned_maps) { |
2925 |
Label miss; |
2926 |
__ JumpIfSmi(receiver(), &miss, Label::kNear); |
2927 |
|
2928 |
__ movq(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset)); |
2929 |
int receiver_count = receiver_maps->length();
|
2930 |
for (int i = 0; i < receiver_count; ++i) { |
2931 |
// Check map and tail call if there's a match
|
2932 |
__ Cmp(scratch1(), receiver_maps->at(i)); |
2933 |
if (transitioned_maps->at(i).is_null()) {
|
2934 |
__ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); |
2935 |
} else {
|
2936 |
Label next_map; |
2937 |
__ j(not_equal, &next_map, Label::kNear); |
2938 |
__ movq(transition_map(), |
2939 |
transitioned_maps->at(i), |
2940 |
RelocInfo::EMBEDDED_OBJECT); |
2941 |
__ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); |
2942 |
__ bind(&next_map); |
2943 |
} |
2944 |
} |
2945 |
|
2946 |
__ bind(&miss); |
2947 |
|
2948 |
TailCallBuiltin(masm(), MissBuiltin(kind())); |
2949 |
|
2950 |
// Return the generated code.
|
2951 |
return GetICCode(
|
2952 |
kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); |
2953 |
} |
2954 |
|
2955 |
|
2956 |
Handle<Code> LoadStubCompiler::CompileLoadNonexistent( |
2957 |
Handle<JSObject> object, |
2958 |
Handle<JSObject> last, |
2959 |
Handle<Name> name, |
2960 |
Handle<GlobalObject> global) { |
2961 |
Label success; |
2962 |
|
2963 |
NonexistentHandlerFrontend(object, last, name, &success, global); |
2964 |
|
2965 |
__ bind(&success); |
2966 |
// Return undefined if maps of the full prototype chain are still the
|
2967 |
// same and no global property with this name contains a value.
|
2968 |
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
2969 |
__ ret(0);
|
2970 |
|
2971 |
// Return the generated code.
|
2972 |
return GetCode(kind(), Code::NONEXISTENT, name);
|
2973 |
} |
2974 |
|
2975 |
|
2976 |
Register* LoadStubCompiler::registers() { |
2977 |
// receiver, name, scratch1, scratch2, scratch3, scratch4.
|
2978 |
static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
|
2979 |
return registers;
|
2980 |
} |
2981 |
|
2982 |
|
2983 |
Register* KeyedLoadStubCompiler::registers() { |
2984 |
// receiver, name, scratch1, scratch2, scratch3, scratch4.
|
2985 |
static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
|
2986 |
return registers;
|
2987 |
} |
2988 |
|
2989 |
|
2990 |
Register* StoreStubCompiler::registers() { |
2991 |
// receiver, name, value, scratch1, scratch2, scratch3.
|
2992 |
static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
|
2993 |
return registers;
|
2994 |
} |
2995 |
|
2996 |
|
2997 |
Register* KeyedStoreStubCompiler::registers() { |
2998 |
// receiver, name, value, scratch1, scratch2, scratch3.
|
2999 |
static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
|
3000 |
return registers;
|
3001 |
} |
3002 |
|
3003 |
|
3004 |
void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
|
3005 |
Register name_reg, |
3006 |
Label* miss) { |
3007 |
__ Cmp(name_reg, name); |
3008 |
__ j(not_equal, miss); |
3009 |
} |
3010 |
|
3011 |
|
3012 |
void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
|
3013 |
Register name_reg, |
3014 |
Label* miss) { |
3015 |
__ Cmp(name_reg, name); |
3016 |
__ j(not_equal, miss); |
3017 |
} |
3018 |
|
3019 |
|
3020 |
#undef __
|
3021 |
#define __ ACCESS_MASM(masm)
|
3022 |
|
3023 |
|
3024 |
void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
|
3025 |
Register receiver, |
3026 |
Handle<JSFunction> getter) { |
3027 |
// ----------- S t a t e -------------
|
3028 |
// -- rax : receiver
|
3029 |
// -- rcx : name
|
3030 |
// -- rsp[0] : return address
|
3031 |
// -----------------------------------
|
3032 |
{ |
3033 |
FrameScope scope(masm, StackFrame::INTERNAL); |
3034 |
|
3035 |
if (!getter.is_null()) {
|
3036 |
// Call the JavaScript getter with the receiver on the stack.
|
3037 |
__ push(receiver); |
3038 |
ParameterCount actual(0);
|
3039 |
ParameterCount expected(getter); |
3040 |
__ InvokeFunction(getter, expected, actual, |
3041 |
CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
3042 |
} else {
|
3043 |
// If we generate a global code snippet for deoptimization only, remember
|
3044 |
// the place to continue after deoptimization.
|
3045 |
masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
3046 |
} |
3047 |
|
3048 |
// Restore context register.
|
3049 |
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
3050 |
} |
3051 |
__ ret(0);
|
3052 |
} |
3053 |
|
3054 |
|
3055 |
#undef __
|
3056 |
#define __ ACCESS_MASM(masm())
|
3057 |
|
3058 |
|
3059 |
Handle<Code> LoadStubCompiler::CompileLoadGlobal( |
3060 |
Handle<JSObject> object, |
3061 |
Handle<GlobalObject> global, |
3062 |
Handle<PropertyCell> cell, |
3063 |
Handle<Name> name, |
3064 |
bool is_dont_delete) {
|
3065 |
Label success, miss; |
3066 |
// TODO(verwaest): Directly store to rax. Currently we cannot do this, since
|
3067 |
// rax is used as receiver(), which we would otherwise clobber before a
|
3068 |
// potential miss.
|
3069 |
|
3070 |
__ CheckMap(receiver(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK); |
3071 |
HandlerFrontendHeader( |
3072 |
object, receiver(), Handle<JSObject>::cast(global), name, &miss); |
3073 |
|
3074 |
// Get the value from the cell.
|
3075 |
__ Move(rbx, cell); |
3076 |
__ movq(rbx, FieldOperand(rbx, PropertyCell::kValueOffset)); |
3077 |
|
3078 |
// Check for deleted property if property can actually be deleted.
|
3079 |
if (!is_dont_delete) {
|
3080 |
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
3081 |
__ j(equal, &miss); |
3082 |
} else if (FLAG_debug_code) { |
3083 |
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); |
3084 |
__ Check(not_equal, kDontDeleteCellsCannotContainTheHole); |
3085 |
} |
3086 |
|
3087 |
HandlerFrontendFooter(name, &success, &miss); |
3088 |
__ bind(&success); |
3089 |
|
3090 |
Counters* counters = isolate()->counters(); |
3091 |
__ IncrementCounter(counters->named_load_global_stub(), 1);
|
3092 |
__ movq(rax, rbx); |
3093 |
__ ret(0);
|
3094 |
|
3095 |
// Return the generated code.
|
3096 |
return GetICCode(kind(), Code::NORMAL, name);
|
3097 |
} |
3098 |
|
3099 |
|
3100 |
Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( |
3101 |
MapHandleList* receiver_maps, |
3102 |
CodeHandleList* handlers, |
3103 |
Handle<Name> name, |
3104 |
Code::StubType type, |
3105 |
IcCheckType check) { |
3106 |
Label miss; |
3107 |
|
3108 |
if (check == PROPERTY) {
|
3109 |
GenerateNameCheck(name, this->name(), &miss);
|
3110 |
} |
3111 |
|
3112 |
__ JumpIfSmi(receiver(), &miss); |
3113 |
Register map_reg = scratch1(); |
3114 |
__ movq(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); |
3115 |
int receiver_count = receiver_maps->length();
|
3116 |
int number_of_handled_maps = 0; |
3117 |
for (int current = 0; current < receiver_count; ++current) { |
3118 |
Handle<Map> map = receiver_maps->at(current); |
3119 |
if (!map->is_deprecated()) {
|
3120 |
number_of_handled_maps++; |
3121 |
// Check map and tail call if there's a match
|
3122 |
__ Cmp(map_reg, receiver_maps->at(current)); |
3123 |
__ j(equal, handlers->at(current), RelocInfo::CODE_TARGET); |
3124 |
} |
3125 |
} |
3126 |
ASSERT(number_of_handled_maps > 0);
|
3127 |
|
3128 |
__ bind(&miss); |
3129 |
TailCallBuiltin(masm(), MissBuiltin(kind())); |
3130 |
|
3131 |
// Return the generated code.
|
3132 |
InlineCacheState state = |
3133 |
number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
|
3134 |
return GetICCode(kind(), type, name, state);
|
3135 |
} |
3136 |
|
3137 |
|
3138 |
#undef __
|
3139 |
#define __ ACCESS_MASM(masm)
|
3140 |
|
3141 |
|
3142 |
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
|
3143 |
MacroAssembler* masm) { |
3144 |
// ----------- S t a t e -------------
|
3145 |
// -- rax : key
|
3146 |
// -- rdx : receiver
|
3147 |
// -- rsp[0] : return address
|
3148 |
// -----------------------------------
|
3149 |
Label slow, miss_force_generic; |
3150 |
|
3151 |
// This stub is meant to be tail-jumped to, the receiver must already
|
3152 |
// have been verified by the caller to not be a smi.
|
3153 |
|
3154 |
__ JumpIfNotSmi(rax, &miss_force_generic); |
3155 |
__ SmiToInteger32(rbx, rax); |
3156 |
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); |
3157 |
|
3158 |
// Check whether the elements is a number dictionary.
|
3159 |
// rdx: receiver
|
3160 |
// rax: key
|
3161 |
// rbx: key as untagged int32
|
3162 |
// rcx: elements
|
3163 |
__ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); |
3164 |
__ ret(0);
|
3165 |
|
3166 |
__ bind(&slow); |
3167 |
// ----------- S t a t e -------------
|
3168 |
// -- rax : key
|
3169 |
// -- rdx : receiver
|
3170 |
// -- rsp[0] : return address
|
3171 |
// -----------------------------------
|
3172 |
TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); |
3173 |
|
3174 |
__ bind(&miss_force_generic); |
3175 |
// ----------- S t a t e -------------
|
3176 |
// -- rax : key
|
3177 |
// -- rdx : receiver
|
3178 |
// -- rsp[0] : return address
|
3179 |
// -----------------------------------
|
3180 |
TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric); |
3181 |
} |
3182 |
|
3183 |
|
3184 |
#undef __
|
3185 |
|
3186 |
} } // namespace v8::internal
|
3187 |
|
3188 |
#endif // V8_TARGET_ARCH_X64 |