The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / stub-cache-ia32.cc @ f230a1cf

History | View | Annotate | Download (113 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_IA32
31

    
32
#include "ic-inl.h"
33
#include "codegen.h"
34
#include "stub-cache.h"
35

    
36
namespace v8 {
37
namespace internal {
38

    
39
#define __ ACCESS_MASM(masm)
40

    
41

    
42
static void ProbeTable(Isolate* isolate,
43
                       MacroAssembler* masm,
44
                       Code::Flags flags,
45
                       StubCache::Table table,
46
                       Register name,
47
                       Register receiver,
48
                       // Number of the cache entry pointer-size scaled.
49
                       Register offset,
50
                       Register extra) {
51
  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52
  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53
  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
54

    
55
  Label miss;
56

    
57
  // Multiply by 3 because there are 3 fields per entry (name, code, map).
58
  __ lea(offset, Operand(offset, offset, times_2, 0));
59

    
60
  if (extra.is_valid()) {
61
    // Get the code entry from the cache.
62
    __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
63

    
64
    // Check that the key in the entry matches the name.
65
    __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
66
    __ j(not_equal, &miss);
67

    
68
    // Check the map matches.
69
    __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
70
    __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
71
    __ j(not_equal, &miss);
72

    
73
    // Check that the flags match what we're looking for.
74
    __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
75
    __ and_(offset, ~Code::kFlagsNotUsedInLookup);
76
    __ cmp(offset, flags);
77
    __ j(not_equal, &miss);
78

    
79
#ifdef DEBUG
80
    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
81
      __ jmp(&miss);
82
    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
83
      __ jmp(&miss);
84
    }
85
#endif
86

    
87
    // Jump to the first instruction in the code stub.
88
    __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
89
    __ jmp(extra);
90

    
91
    __ bind(&miss);
92
  } else {
93
    // Save the offset on the stack.
94
    __ push(offset);
95

    
96
    // Check that the key in the entry matches the name.
97
    __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
98
    __ j(not_equal, &miss);
99

    
100
    // Check the map matches.
101
    __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
102
    __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
103
    __ j(not_equal, &miss);
104

    
105
    // Restore offset register.
106
    __ mov(offset, Operand(esp, 0));
107

    
108
    // Get the code entry from the cache.
109
    __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
110

    
111
    // Check that the flags match what we're looking for.
112
    __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
113
    __ and_(offset, ~Code::kFlagsNotUsedInLookup);
114
    __ cmp(offset, flags);
115
    __ j(not_equal, &miss);
116

    
117
#ifdef DEBUG
118
    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
119
      __ jmp(&miss);
120
    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
121
      __ jmp(&miss);
122
    }
123
#endif
124

    
125
    // Restore offset and re-load code entry from cache.
126
    __ pop(offset);
127
    __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
128

    
129
    // Jump to the first instruction in the code stub.
130
    __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
131
    __ jmp(offset);
132

    
133
    // Pop at miss.
134
    __ bind(&miss);
135
    __ pop(offset);
136
  }
137
}
138

    
139

    
140
// Helper function used to check that the dictionary doesn't contain
141
// the property. This function may return false negatives, so miss_label
142
// must always call a backup property check that is complete.
143
// This function is safe to call if the receiver has fast properties.
144
// Name must be unique and receiver must be a heap object.
145
static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
146
                                             Label* miss_label,
147
                                             Register receiver,
148
                                             Handle<Name> name,
149
                                             Register r0,
150
                                             Register r1) {
151
  ASSERT(name->IsUniqueName());
152
  Counters* counters = masm->isolate()->counters();
153
  __ IncrementCounter(counters->negative_lookups(), 1);
154
  __ IncrementCounter(counters->negative_lookups_miss(), 1);
155

    
156
  __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
157

    
158
  const int kInterceptorOrAccessCheckNeededMask =
159
      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
160

    
161
  // Bail out if the receiver has a named interceptor or requires access checks.
162
  __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
163
            kInterceptorOrAccessCheckNeededMask);
164
  __ j(not_zero, miss_label);
165

    
166
  // Check that receiver is a JSObject.
167
  __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE);
168
  __ j(below, miss_label);
169

    
170
  // Load properties array.
171
  Register properties = r0;
172
  __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
173

    
174
  // Check that the properties array is a dictionary.
175
  __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
176
         Immediate(masm->isolate()->factory()->hash_table_map()));
177
  __ j(not_equal, miss_label);
178

    
179
  Label done;
180
  NameDictionaryLookupStub::GenerateNegativeLookup(masm,
181
                                                   miss_label,
182
                                                   &done,
183
                                                   properties,
184
                                                   name,
185
                                                   r1);
186
  __ bind(&done);
187
  __ DecrementCounter(counters->negative_lookups_miss(), 1);
188
}
189

    
190

    
191
void StubCache::GenerateProbe(MacroAssembler* masm,
192
                              Code::Flags flags,
193
                              Register receiver,
194
                              Register name,
195
                              Register scratch,
196
                              Register extra,
197
                              Register extra2,
198
                              Register extra3) {
199
  Label miss;
200

    
201
  // Assert that code is valid.  The multiplying code relies on the entry size
202
  // being 12.
203
  ASSERT(sizeof(Entry) == 12);
204

    
205
  // Assert the flags do not name a specific type.
206
  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
207

    
208
  // Assert that there are no register conflicts.
209
  ASSERT(!scratch.is(receiver));
210
  ASSERT(!scratch.is(name));
211
  ASSERT(!extra.is(receiver));
212
  ASSERT(!extra.is(name));
213
  ASSERT(!extra.is(scratch));
214

    
215
  // Assert scratch and extra registers are valid, and extra2/3 are unused.
216
  ASSERT(!scratch.is(no_reg));
217
  ASSERT(extra2.is(no_reg));
218
  ASSERT(extra3.is(no_reg));
219

    
220
  Register offset = scratch;
221
  scratch = no_reg;
222

    
223
  Counters* counters = masm->isolate()->counters();
224
  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
225

    
226
  // Check that the receiver isn't a smi.
227
  __ JumpIfSmi(receiver, &miss);
228

    
229
  // Get the map of the receiver and compute the hash.
230
  __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
231
  __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
232
  __ xor_(offset, flags);
233
  // We mask out the last two bits because they are not part of the hash and
234
  // they are always 01 for maps.  Also in the two 'and' instructions below.
235
  __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
236
  // ProbeTable expects the offset to be pointer scaled, which it is, because
237
  // the heap object tag size is 2 and the pointer size log 2 is also 2.
238
  ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
239

    
240
  // Probe the primary table.
241
  ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
242

    
243
  // Primary miss: Compute hash for secondary probe.
244
  __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
245
  __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
246
  __ xor_(offset, flags);
247
  __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
248
  __ sub(offset, name);
249
  __ add(offset, Immediate(flags));
250
  __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
251

    
252
  // Probe the secondary table.
253
  ProbeTable(
254
      isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
255

    
256
  // Cache miss: Fall-through and let caller handle the miss by
257
  // entering the runtime system.
258
  __ bind(&miss);
259
  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
260
}
261

    
262

    
263
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
264
                                                       int index,
265
                                                       Register prototype) {
266
  __ LoadGlobalFunction(index, prototype);
267
  __ LoadGlobalFunctionInitialMap(prototype, prototype);
268
  // Load the prototype from the initial map.
269
  __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
270
}
271

    
272

    
273
void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
274
    MacroAssembler* masm,
275
    int index,
276
    Register prototype,
277
    Label* miss) {
278
  // Check we're still in the same context.
279
  __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)),
280
         masm->isolate()->global_object());
281
  __ j(not_equal, miss);
282
  // Get the global function with the given index.
283
  Handle<JSFunction> function(
284
      JSFunction::cast(masm->isolate()->native_context()->get(index)));
285
  // Load its initial map. The global functions all have initial maps.
286
  __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
287
  // Load the prototype from the initial map.
288
  __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
289
}
290

    
291

    
292
void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
293
                                           Register receiver,
294
                                           Register scratch,
295
                                           Label* miss_label) {
296
  // Check that the receiver isn't a smi.
297
  __ JumpIfSmi(receiver, miss_label);
298

    
299
  // Check that the object is a JS array.
300
  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
301
  __ j(not_equal, miss_label);
302

    
303
  // Load length directly from the JS array.
304
  __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
305
  __ ret(0);
306
}
307

    
308

    
309
// Generate code to check if an object is a string.  If the object is
310
// a string, the map's instance type is left in the scratch register.
311
static void GenerateStringCheck(MacroAssembler* masm,
312
                                Register receiver,
313
                                Register scratch,
314
                                Label* smi,
315
                                Label* non_string_object) {
316
  // Check that the object isn't a smi.
317
  __ JumpIfSmi(receiver, smi);
318

    
319
  // Check that the object is a string.
320
  __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
321
  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
322
  STATIC_ASSERT(kNotStringTag != 0);
323
  __ test(scratch, Immediate(kNotStringTag));
324
  __ j(not_zero, non_string_object);
325
}
326

    
327

    
328
void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
329
                                            Register receiver,
330
                                            Register scratch1,
331
                                            Register scratch2,
332
                                            Label* miss) {
333
  Label check_wrapper;
334

    
335
  // Check if the object is a string leaving the instance type in the
336
  // scratch register.
337
  GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
338

    
339
  // Load length from the string and convert to a smi.
340
  __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
341
  __ ret(0);
342

    
343
  // Check if the object is a JSValue wrapper.
344
  __ bind(&check_wrapper);
345
  __ cmp(scratch1, JS_VALUE_TYPE);
346
  __ j(not_equal, miss);
347

    
348
  // Check if the wrapped value is a string and load the length
349
  // directly if it is.
350
  __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
351
  GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
352
  __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
353
  __ ret(0);
354
}
355

    
356

    
357
void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
358
                                                 Register receiver,
359
                                                 Register scratch1,
360
                                                 Register scratch2,
361
                                                 Label* miss_label) {
362
  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
363
  __ mov(eax, scratch1);
364
  __ ret(0);
365
}
366

    
367

    
368
void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
369
                                            Register dst,
370
                                            Register src,
371
                                            bool inobject,
372
                                            int index,
373
                                            Representation representation) {
374
  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
375
  int offset = index * kPointerSize;
376
  if (!inobject) {
377
    // Calculate the offset into the properties array.
378
    offset = offset + FixedArray::kHeaderSize;
379
    __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
380
    src = dst;
381
  }
382
  __ mov(dst, FieldOperand(src, offset));
383
}
384

    
385

    
386
static void PushInterceptorArguments(MacroAssembler* masm,
387
                                     Register receiver,
388
                                     Register holder,
389
                                     Register name,
390
                                     Handle<JSObject> holder_obj) {
391
  STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
392
  STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
393
  STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
394
  STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
395
  STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
396
  __ push(name);
397
  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
398
  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
399
  Register scratch = name;
400
  __ mov(scratch, Immediate(interceptor));
401
  __ push(scratch);
402
  __ push(receiver);
403
  __ push(holder);
404
}
405

    
406

    
407
static void CompileCallLoadPropertyWithInterceptor(
408
    MacroAssembler* masm,
409
    Register receiver,
410
    Register holder,
411
    Register name,
412
    Handle<JSObject> holder_obj) {
413
  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
414
  __ CallExternalReference(
415
      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
416
                        masm->isolate()),
417
      StubCache::kInterceptorArgsLength);
418
}
419

    
420

    
421
// Number of pointers to be reserved on stack for fast API call.
422
static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
423

    
424

    
425
// Reserves space for the extra arguments to API function in the
426
// caller's frame.
427
//
428
// These arguments are set by CheckPrototypes and GenerateFastApiCall.
429
static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
430
  // ----------- S t a t e -------------
431
  //  -- esp[0] : return address
432
  //  -- esp[4] : last argument in the internal frame of the caller
433
  // -----------------------------------
434
  __ pop(scratch);
435
  for (int i = 0; i < kFastApiCallArguments; i++) {
436
    __ push(Immediate(Smi::FromInt(0)));
437
  }
438
  __ push(scratch);
439
}
440

    
441

    
442
// Undoes the effects of ReserveSpaceForFastApiCall.
443
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
444
  // ----------- S t a t e -------------
445
  //  -- esp[0]  : return address.
446
  //  -- esp[4]  : last fast api call extra argument.
447
  //  -- ...
448
  //  -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
449
  //  -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
450
  //                                          frame.
451
  // -----------------------------------
452
  __ pop(scratch);
453
  __ add(esp, Immediate(kPointerSize * kFastApiCallArguments));
454
  __ push(scratch);
455
}
456

    
457

    
458
// Generates call to API function.
459
static void GenerateFastApiCall(MacroAssembler* masm,
460
                                const CallOptimization& optimization,
461
                                int argc,
462
                                bool restore_context) {
463
  // ----------- S t a t e -------------
464
  //  -- esp[0]              : return address
465
  //  -- esp[4] - esp[28]    : FunctionCallbackInfo, incl.
466
  //                         :  object passing the type check
467
  //                            (set by CheckPrototypes)
468
  //  -- esp[32]             : last argument
469
  //  -- ...
470
  //  -- esp[(argc + 7) * 4] : first argument
471
  //  -- esp[(argc + 8) * 4] : receiver
472
  // -----------------------------------
473

    
474
  typedef FunctionCallbackArguments FCA;
475
  // Save calling context.
476
  __ mov(Operand(esp, (1 + FCA::kContextSaveIndex) * kPointerSize), esi);
477

    
478
  // Get the function and setup the context.
479
  Handle<JSFunction> function = optimization.constant_function();
480
  __ LoadHeapObject(edi, function);
481
  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
482

    
483
  // Construct the FunctionCallbackInfo.
484
  __ mov(Operand(esp, (1 + FCA::kCalleeIndex) * kPointerSize), edi);
485
  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
486
  Handle<Object> call_data(api_call_info->data(), masm->isolate());
487
  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
488
    __ mov(ecx, api_call_info);
489
    __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
490
    __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize), ebx);
491
  } else {
492
    __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize),
493
           Immediate(call_data));
494
  }
495
  __ mov(Operand(esp, (1 + FCA::kIsolateIndex) * kPointerSize),
496
         Immediate(reinterpret_cast<int>(masm->isolate())));
497
  __ mov(Operand(esp, (1 + FCA::kReturnValueOffset) * kPointerSize),
498
         masm->isolate()->factory()->undefined_value());
499
  __ mov(Operand(esp, (1 + FCA::kReturnValueDefaultValueIndex) * kPointerSize),
500
         masm->isolate()->factory()->undefined_value());
501

    
502
  // Prepare arguments.
503
  STATIC_ASSERT(kFastApiCallArguments == 7);
504
  __ lea(eax, Operand(esp, 1 * kPointerSize));
505

    
506
  // API function gets reference to the v8::Arguments. If CPU profiler
507
  // is enabled wrapper function will be called and we need to pass
508
  // address of the callback as additional parameter, always allocate
509
  // space for it.
510
  const int kApiArgc = 1 + 1;
511

    
512
  // Allocate the v8::Arguments structure in the arguments' space since
513
  // it's not controlled by GC.
514
  const int kApiStackSpace = 4;
515

    
516
  // Function address is a foreign pointer outside V8's heap.
517
  Address function_address = v8::ToCData<Address>(api_call_info->callback());
518
  __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
519

    
520
  // FunctionCallbackInfo::implicit_args_.
521
  __ mov(ApiParameterOperand(2), eax);
522
  __ add(eax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize));
523
  // FunctionCallbackInfo::values_.
524
  __ mov(ApiParameterOperand(3), eax);
525
  // FunctionCallbackInfo::length_.
526
  __ Set(ApiParameterOperand(4), Immediate(argc));
527
  // FunctionCallbackInfo::is_construct_call_.
528
  __ Set(ApiParameterOperand(5), Immediate(0));
529

    
530
  // v8::InvocationCallback's argument.
531
  __ lea(eax, ApiParameterOperand(2));
532
  __ mov(ApiParameterOperand(0), eax);
533

    
534
  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
535

    
536
  Operand context_restore_operand(ebp,
537
                                  (2 + FCA::kContextSaveIndex) * kPointerSize);
538
  Operand return_value_operand(ebp,
539
                               (2 + FCA::kReturnValueOffset) * kPointerSize);
540
  __ CallApiFunctionAndReturn(function_address,
541
                              thunk_address,
542
                              ApiParameterOperand(1),
543
                              argc + kFastApiCallArguments + 1,
544
                              return_value_operand,
545
                              restore_context ?
546
                                  &context_restore_operand : NULL);
547
}
548

    
549

    
550
// Generate call to api function.
551
static void GenerateFastApiCall(MacroAssembler* masm,
552
                                const CallOptimization& optimization,
553
                                Register receiver,
554
                                Register scratch,
555
                                int argc,
556
                                Register* values) {
557
  ASSERT(optimization.is_simple_api_call());
558
  ASSERT(!receiver.is(scratch));
559

    
560
  const int stack_space = kFastApiCallArguments + argc + 1;
561
  const int kHolderIndex = FunctionCallbackArguments::kHolderIndex + 1;
562
  // Copy return value.
563
  __ mov(scratch, Operand(esp, 0));
564
  // Assign stack space for the call arguments.
565
  __ sub(esp, Immediate(stack_space * kPointerSize));
566
  // Move the return address on top of the stack.
567
  __ mov(Operand(esp, 0), scratch);
568
  // Write holder to stack frame.
569
  __ mov(Operand(esp, kHolderIndex * kPointerSize), receiver);
570
  // Write receiver to stack frame.
571
  int index = stack_space;
572
  __ mov(Operand(esp, index-- * kPointerSize), receiver);
573
  // Write the arguments to stack frame.
574
  for (int i = 0; i < argc; i++) {
575
    ASSERT(!receiver.is(values[i]));
576
    ASSERT(!scratch.is(values[i]));
577
    __ mov(Operand(esp, index-- * kPointerSize), values[i]);
578
  }
579

    
580
  GenerateFastApiCall(masm, optimization, argc, true);
581
}
582

    
583

    
584
class CallInterceptorCompiler BASE_EMBEDDED {
585
 public:
586
  CallInterceptorCompiler(StubCompiler* stub_compiler,
587
                          const ParameterCount& arguments,
588
                          Register name,
589
                          Code::ExtraICState extra_state)
590
      : stub_compiler_(stub_compiler),
591
        arguments_(arguments),
592
        name_(name),
593
        extra_state_(extra_state) {}
594

    
595
  void Compile(MacroAssembler* masm,
596
               Handle<JSObject> object,
597
               Handle<JSObject> holder,
598
               Handle<Name> name,
599
               LookupResult* lookup,
600
               Register receiver,
601
               Register scratch1,
602
               Register scratch2,
603
               Register scratch3,
604
               Label* miss) {
605
    ASSERT(holder->HasNamedInterceptor());
606
    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
607

    
608
    // Check that the receiver isn't a smi.
609
    __ JumpIfSmi(receiver, miss);
610

    
611
    CallOptimization optimization(lookup);
612
    if (optimization.is_constant_call()) {
613
      CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
614
                       holder, lookup, name, optimization, miss);
615
    } else {
616
      CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
617
                     name, holder, miss);
618
    }
619
  }
620

    
621
 private:
622
  void CompileCacheable(MacroAssembler* masm,
623
                        Handle<JSObject> object,
624
                        Register receiver,
625
                        Register scratch1,
626
                        Register scratch2,
627
                        Register scratch3,
628
                        Handle<JSObject> interceptor_holder,
629
                        LookupResult* lookup,
630
                        Handle<Name> name,
631
                        const CallOptimization& optimization,
632
                        Label* miss_label) {
633
    ASSERT(optimization.is_constant_call());
634
    ASSERT(!lookup->holder()->IsGlobalObject());
635

    
636
    int depth1 = kInvalidProtoDepth;
637
    int depth2 = kInvalidProtoDepth;
638
    bool can_do_fast_api_call = false;
639
    if (optimization.is_simple_api_call() &&
640
        !lookup->holder()->IsGlobalObject()) {
641
      depth1 = optimization.GetPrototypeDepthOfExpectedType(
642
          object, interceptor_holder);
643
      if (depth1 == kInvalidProtoDepth) {
644
        depth2 = optimization.GetPrototypeDepthOfExpectedType(
645
            interceptor_holder, Handle<JSObject>(lookup->holder()));
646
      }
647
      can_do_fast_api_call =
648
          depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
649
    }
650

    
651
    Counters* counters = masm->isolate()->counters();
652
    __ IncrementCounter(counters->call_const_interceptor(), 1);
653

    
654
    if (can_do_fast_api_call) {
655
      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
656
      ReserveSpaceForFastApiCall(masm, scratch1);
657
    }
658

    
659
    // Check that the maps from receiver to interceptor's holder
660
    // haven't changed and thus we can invoke interceptor.
661
    Label miss_cleanup;
662
    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
663
    Register holder =
664
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
665
                                        scratch1, scratch2, scratch3,
666
                                        name, depth1, miss);
667

    
668
    // Invoke an interceptor and if it provides a value,
669
    // branch to |regular_invoke|.
670
    Label regular_invoke;
671
    LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
672
                        &regular_invoke);
673

    
674
    // Interceptor returned nothing for this property.  Try to use cached
675
    // constant function.
676

    
677
    // Check that the maps from interceptor's holder to constant function's
678
    // holder haven't changed and thus we can use cached constant function.
679
    if (*interceptor_holder != lookup->holder()) {
680
      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
681
                                      Handle<JSObject>(lookup->holder()),
682
                                      scratch1, scratch2, scratch3,
683
                                      name, depth2, miss);
684
    } else {
685
      // CheckPrototypes has a side effect of fetching a 'holder'
686
      // for API (object which is instanceof for the signature).  It's
687
      // safe to omit it here, as if present, it should be fetched
688
      // by the previous CheckPrototypes.
689
      ASSERT(depth2 == kInvalidProtoDepth);
690
    }
691

    
692
    // Invoke function.
693
    if (can_do_fast_api_call) {
694
      GenerateFastApiCall(masm, optimization, arguments_.immediate(), false);
695
    } else {
696
      CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
697
          ? CALL_AS_FUNCTION
698
          : CALL_AS_METHOD;
699
      Handle<JSFunction> function = optimization.constant_function();
700
      ParameterCount expected(function);
701
      __ InvokeFunction(function, expected, arguments_,
702
                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
703
    }
704

    
705
    // Deferred code for fast API call case---clean preallocated space.
706
    if (can_do_fast_api_call) {
707
      __ bind(&miss_cleanup);
708
      FreeSpaceForFastApiCall(masm, scratch1);
709
      __ jmp(miss_label);
710
    }
711

    
712
    // Invoke a regular function.
713
    __ bind(&regular_invoke);
714
    if (can_do_fast_api_call) {
715
      FreeSpaceForFastApiCall(masm, scratch1);
716
    }
717
  }
718

    
719
  void CompileRegular(MacroAssembler* masm,
720
                      Handle<JSObject> object,
721
                      Register receiver,
722
                      Register scratch1,
723
                      Register scratch2,
724
                      Register scratch3,
725
                      Handle<Name> name,
726
                      Handle<JSObject> interceptor_holder,
727
                      Label* miss_label) {
728
    Register holder =
729
        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
730
                                        scratch1, scratch2, scratch3,
731
                                        name, miss_label);
732

    
733
    FrameScope scope(masm, StackFrame::INTERNAL);
734
    // Save the name_ register across the call.
735
    __ push(name_);
736

    
737
    PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
738

    
739
    __ CallExternalReference(
740
        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
741
                          masm->isolate()),
742
        StubCache::kInterceptorArgsLength);
743

    
744
    // Restore the name_ register.
745
    __ pop(name_);
746

    
747
    // Leave the internal frame.
748
  }
749

    
750
  void LoadWithInterceptor(MacroAssembler* masm,
751
                           Register receiver,
752
                           Register holder,
753
                           Handle<JSObject> holder_obj,
754
                           Label* interceptor_succeeded) {
755
    {
756
      FrameScope scope(masm, StackFrame::INTERNAL);
757
      __ push(holder);  // Save the holder.
758
      __ push(name_);  // Save the name.
759

    
760
      CompileCallLoadPropertyWithInterceptor(masm,
761
                                             receiver,
762
                                             holder,
763
                                             name_,
764
                                             holder_obj);
765

    
766
      __ pop(name_);  // Restore the name.
767
      __ pop(receiver);  // Restore the holder.
768
      // Leave the internal frame.
769
    }
770

    
771
    __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
772
    __ j(not_equal, interceptor_succeeded);
773
  }
774

    
775
  StubCompiler* stub_compiler_;
776
  const ParameterCount& arguments_;
777
  Register name_;
778
  Code::ExtraICState extra_state_;
779
};
780

    
781

    
782
void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
783
                                            Label* label,
784
                                            Handle<Name> name) {
785
  if (!label->is_unused()) {
786
    __ bind(label);
787
    __ mov(this->name(), Immediate(name));
788
  }
789
}
790

    
791

    
792
// Generate code to check that a global property cell is empty. Create
793
// the property cell at compilation time if no cell exists for the
794
// property.
795
static void GenerateCheckPropertyCell(MacroAssembler* masm,
796
                                      Handle<GlobalObject> global,
797
                                      Handle<Name> name,
798
                                      Register scratch,
799
                                      Label* miss) {
800
  Handle<PropertyCell> cell =
801
      GlobalObject::EnsurePropertyCell(global, name);
802
  ASSERT(cell->value()->IsTheHole());
803
  Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
804
  if (Serializer::enabled()) {
805
    __ mov(scratch, Immediate(cell));
806
    __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
807
           Immediate(the_hole));
808
  } else {
809
    __ cmp(Operand::ForCell(cell), Immediate(the_hole));
810
  }
811
  __ j(not_equal, miss);
812
}
813

    
814

    
815
void StoreStubCompiler::GenerateNegativeHolderLookup(
816
    MacroAssembler* masm,
817
    Handle<JSObject> holder,
818
    Register holder_reg,
819
    Handle<Name> name,
820
    Label* miss) {
821
  if (holder->IsJSGlobalObject()) {
822
    GenerateCheckPropertyCell(
823
        masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
824
  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
825
    GenerateDictionaryNegativeLookup(
826
        masm, miss, holder_reg, name, scratch1(), scratch2());
827
  }
828
}
829

    
830

    
831
// Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
832
// store is successful.
833
void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
834
                                                Handle<JSObject> object,
835
                                                LookupResult* lookup,
836
                                                Handle<Map> transition,
837
                                                Handle<Name> name,
838
                                                Register receiver_reg,
839
                                                Register storage_reg,
840
                                                Register value_reg,
841
                                                Register scratch1,
842
                                                Register scratch2,
843
                                                Register unused,
844
                                                Label* miss_label,
845
                                                Label* slow) {
846
  int descriptor = transition->LastAdded();
847
  DescriptorArray* descriptors = transition->instance_descriptors();
848
  PropertyDetails details = descriptors->GetDetails(descriptor);
849
  Representation representation = details.representation();
850
  ASSERT(!representation.IsNone());
851

    
852
  if (details.type() == CONSTANT) {
853
    Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
854
    __ CmpObject(value_reg, constant);
855
    __ j(not_equal, miss_label);
856
  } else if (FLAG_track_fields && representation.IsSmi()) {
857
      __ JumpIfNotSmi(value_reg, miss_label);
858
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
859
    __ JumpIfSmi(value_reg, miss_label);
860
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
861
    Label do_store, heap_number;
862
    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
863

    
864
    __ JumpIfNotSmi(value_reg, &heap_number);
865
    __ SmiUntag(value_reg);
866
    if (CpuFeatures::IsSupported(SSE2)) {
867
      CpuFeatureScope use_sse2(masm, SSE2);
868
      __ Cvtsi2sd(xmm0, value_reg);
869
    } else {
870
      __ push(value_reg);
871
      __ fild_s(Operand(esp, 0));
872
      __ pop(value_reg);
873
    }
874
    __ SmiTag(value_reg);
875
    __ jmp(&do_store);
876

    
877
    __ bind(&heap_number);
878
    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
879
                miss_label, DONT_DO_SMI_CHECK);
880
    if (CpuFeatures::IsSupported(SSE2)) {
881
      CpuFeatureScope use_sse2(masm, SSE2);
882
      __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
883
    } else {
884
      __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
885
    }
886

    
887
    __ bind(&do_store);
888
    if (CpuFeatures::IsSupported(SSE2)) {
889
      CpuFeatureScope use_sse2(masm, SSE2);
890
      __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
891
    } else {
892
      __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
893
    }
894
  }
895

    
896
  // Stub never generated for non-global objects that require access
897
  // checks.
898
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
899

    
900
  // Perform map transition for the receiver if necessary.
901
  if (details.type() == FIELD &&
902
      object->map()->unused_property_fields() == 0) {
903
    // The properties must be extended before we can store the value.
904
    // We jump to a runtime call that extends the properties array.
905
    __ pop(scratch1);  // Return address.
906
    __ push(receiver_reg);
907
    __ push(Immediate(transition));
908
    __ push(value_reg);
909
    __ push(scratch1);
910
    __ TailCallExternalReference(
911
        ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
912
                          masm->isolate()),
913
        3,
914
        1);
915
    return;
916
  }
917

    
918
  // Update the map of the object.
919
  __ mov(scratch1, Immediate(transition));
920
  __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
921

    
922
  // Update the write barrier for the map field.
923
  __ RecordWriteField(receiver_reg,
924
                      HeapObject::kMapOffset,
925
                      scratch1,
926
                      scratch2,
927
                      kDontSaveFPRegs,
928
                      OMIT_REMEMBERED_SET,
929
                      OMIT_SMI_CHECK);
930

    
931
  if (details.type() == CONSTANT) {
932
    ASSERT(value_reg.is(eax));
933
    __ ret(0);
934
    return;
935
  }
936

    
937
  int index = transition->instance_descriptors()->GetFieldIndex(
938
      transition->LastAdded());
939

    
940
  // Adjust for the number of properties stored in the object. Even in the
941
  // face of a transition we can use the old map here because the size of the
942
  // object and the number of in-object properties is not going to change.
943
  index -= object->map()->inobject_properties();
944

    
945
  SmiCheck smi_check = representation.IsTagged()
946
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
947
  // TODO(verwaest): Share this code as a code stub.
948
  if (index < 0) {
949
    // Set the property straight into the object.
950
    int offset = object->map()->instance_size() + (index * kPointerSize);
951
    if (FLAG_track_double_fields && representation.IsDouble()) {
952
      __ mov(FieldOperand(receiver_reg, offset), storage_reg);
953
    } else {
954
      __ mov(FieldOperand(receiver_reg, offset), value_reg);
955
    }
956

    
957
    if (!FLAG_track_fields || !representation.IsSmi()) {
958
      // Update the write barrier for the array address.
959
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
960
        __ mov(storage_reg, value_reg);
961
      }
962
      __ RecordWriteField(receiver_reg,
963
                          offset,
964
                          storage_reg,
965
                          scratch1,
966
                          kDontSaveFPRegs,
967
                          EMIT_REMEMBERED_SET,
968
                          smi_check);
969
    }
970
  } else {
971
    // Write to the properties array.
972
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
973
    // Get the properties array (optimistically).
974
    __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
975
    if (FLAG_track_double_fields && representation.IsDouble()) {
976
      __ mov(FieldOperand(scratch1, offset), storage_reg);
977
    } else {
978
      __ mov(FieldOperand(scratch1, offset), value_reg);
979
    }
980

    
981
    if (!FLAG_track_fields || !representation.IsSmi()) {
982
      // Update the write barrier for the array address.
983
      if (!FLAG_track_double_fields || !representation.IsDouble()) {
984
        __ mov(storage_reg, value_reg);
985
      }
986
      __ RecordWriteField(scratch1,
987
                          offset,
988
                          storage_reg,
989
                          receiver_reg,
990
                          kDontSaveFPRegs,
991
                          EMIT_REMEMBERED_SET,
992
                          smi_check);
993
    }
994
  }
995

    
996
  // Return the value (register eax).
997
  ASSERT(value_reg.is(eax));
998
  __ ret(0);
999
}
1000

    
1001

    
1002
// Both name_reg and receiver_reg are preserved on jumps to miss_label,
1003
// but may be destroyed if store is successful.
1004
void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
1005
                                           Handle<JSObject> object,
1006
                                           LookupResult* lookup,
1007
                                           Register receiver_reg,
1008
                                           Register name_reg,
1009
                                           Register value_reg,
1010
                                           Register scratch1,
1011
                                           Register scratch2,
1012
                                           Label* miss_label) {
1013
  // Stub never generated for non-global objects that require access
1014
  // checks.
1015
  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1016

    
1017
  int index = lookup->GetFieldIndex().field_index();
1018

    
1019
  // Adjust for the number of properties stored in the object. Even in the
1020
  // face of a transition we can use the old map here because the size of the
1021
  // object and the number of in-object properties is not going to change.
1022
  index -= object->map()->inobject_properties();
1023

    
1024
  Representation representation = lookup->representation();
1025
  ASSERT(!representation.IsNone());
1026
  if (FLAG_track_fields && representation.IsSmi()) {
1027
    __ JumpIfNotSmi(value_reg, miss_label);
1028
  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1029
    __ JumpIfSmi(value_reg, miss_label);
1030
  } else if (FLAG_track_double_fields && representation.IsDouble()) {
1031
    // Load the double storage.
1032
    if (index < 0) {
1033
      int offset = object->map()->instance_size() + (index * kPointerSize);
1034
      __ mov(scratch1, FieldOperand(receiver_reg, offset));
1035
    } else {
1036
      __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
1037
      int offset = index * kPointerSize + FixedArray::kHeaderSize;
1038
      __ mov(scratch1, FieldOperand(scratch1, offset));
1039
    }
1040

    
1041
    // Store the value into the storage.
1042
    Label do_store, heap_number;
1043
    __ JumpIfNotSmi(value_reg, &heap_number);
1044
    __ SmiUntag(value_reg);
1045
    if (CpuFeatures::IsSupported(SSE2)) {
1046
      CpuFeatureScope use_sse2(masm, SSE2);
1047
      __ Cvtsi2sd(xmm0, value_reg);
1048
    } else {
1049
      __ push(value_reg);
1050
      __ fild_s(Operand(esp, 0));
1051
      __ pop(value_reg);
1052
    }
1053
    __ SmiTag(value_reg);
1054
    __ jmp(&do_store);
1055
    __ bind(&heap_number);
1056
    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
1057
                miss_label, DONT_DO_SMI_CHECK);
1058
    if (CpuFeatures::IsSupported(SSE2)) {
1059
      CpuFeatureScope use_sse2(masm, SSE2);
1060
      __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
1061
    } else {
1062
      __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
1063
    }
1064
    __ bind(&do_store);
1065
    if (CpuFeatures::IsSupported(SSE2)) {
1066
      CpuFeatureScope use_sse2(masm, SSE2);
1067
      __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
1068
    } else {
1069
      __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
1070
    }
1071
    // Return the value (register eax).
1072
    ASSERT(value_reg.is(eax));
1073
    __ ret(0);
1074
    return;
1075
  }
1076

    
1077
  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
1078
  // TODO(verwaest): Share this code as a code stub.
1079
  SmiCheck smi_check = representation.IsTagged()
1080
      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
1081
  if (index < 0) {
1082
    // Set the property straight into the object.
1083
    int offset = object->map()->instance_size() + (index * kPointerSize);
1084
    __ mov(FieldOperand(receiver_reg, offset), value_reg);
1085

    
1086
    if (!FLAG_track_fields || !representation.IsSmi()) {
1087
      // Update the write barrier for the array address.
1088
      // Pass the value being stored in the now unused name_reg.
1089
      __ mov(name_reg, value_reg);
1090
      __ RecordWriteField(receiver_reg,
1091
                          offset,
1092
                          name_reg,
1093
                          scratch1,
1094
                          kDontSaveFPRegs,
1095
                          EMIT_REMEMBERED_SET,
1096
                          smi_check);
1097
    }
1098
  } else {
1099
    // Write to the properties array.
1100
    int offset = index * kPointerSize + FixedArray::kHeaderSize;
1101
    // Get the properties array (optimistically).
1102
    __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
1103
    __ mov(FieldOperand(scratch1, offset), value_reg);
1104

    
1105
    if (!FLAG_track_fields || !representation.IsSmi()) {
1106
      // Update the write barrier for the array address.
1107
      // Pass the value being stored in the now unused name_reg.
1108
      __ mov(name_reg, value_reg);
1109
      __ RecordWriteField(scratch1,
1110
                          offset,
1111
                          name_reg,
1112
                          receiver_reg,
1113
                          kDontSaveFPRegs,
1114
                          EMIT_REMEMBERED_SET,
1115
                          smi_check);
1116
    }
1117
  }
1118

    
1119
  // Return the value (register eax).
1120
  ASSERT(value_reg.is(eax));
1121
  __ ret(0);
1122
}
1123

    
1124

    
1125
// Calls GenerateCheckPropertyCell for each global object in the prototype chain
1126
// from object to (but not including) holder.
1127
static void GenerateCheckPropertyCells(MacroAssembler* masm,
1128
                                       Handle<JSObject> object,
1129
                                       Handle<JSObject> holder,
1130
                                       Handle<Name> name,
1131
                                       Register scratch,
1132
                                       Label* miss) {
1133
  Handle<JSObject> current = object;
1134
  while (!current.is_identical_to(holder)) {
1135
    if (current->IsGlobalObject()) {
1136
      GenerateCheckPropertyCell(masm,
1137
                                Handle<GlobalObject>::cast(current),
1138
                                name,
1139
                                scratch,
1140
                                miss);
1141
    }
1142
    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
1143
  }
1144
}
1145

    
1146

    
1147
void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1148
  __ jmp(code, RelocInfo::CODE_TARGET);
1149
}
1150

    
1151

    
1152
#undef __
1153
#define __ ACCESS_MASM(masm())
1154

    
1155

    
1156
Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1157
                                       Register object_reg,
1158
                                       Handle<JSObject> holder,
1159
                                       Register holder_reg,
1160
                                       Register scratch1,
1161
                                       Register scratch2,
1162
                                       Handle<Name> name,
1163
                                       int save_at_depth,
1164
                                       Label* miss,
1165
                                       PrototypeCheckType check) {
1166
  const int kHolderIndex = FunctionCallbackArguments::kHolderIndex + 1;
1167
  // Make sure that the type feedback oracle harvests the receiver map.
1168
  // TODO(svenpanne) Remove this hack when all ICs are reworked.
1169
  __ mov(scratch1, Handle<Map>(object->map()));
1170

    
1171
  Handle<JSObject> first = object;
1172
  // Make sure there's no overlap between holder and object registers.
1173
  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1174
  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1175
         && !scratch2.is(scratch1));
1176

    
1177
  // Keep track of the current object in register reg.
1178
  Register reg = object_reg;
1179
  Handle<JSObject> current = object;
1180
  int depth = 0;
1181

    
1182
  if (save_at_depth == depth) {
1183
    __ mov(Operand(esp, kHolderIndex * kPointerSize), reg);
1184
  }
1185

    
1186
  // Traverse the prototype chain and check the maps in the prototype chain for
1187
  // fast and global objects or do negative lookup for normal objects.
1188
  while (!current.is_identical_to(holder)) {
1189
    ++depth;
1190

    
1191
    // Only global objects and objects that do not require access
1192
    // checks are allowed in stubs.
1193
    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1194

    
1195
    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1196
    if (!current->HasFastProperties() &&
1197
        !current->IsJSGlobalObject() &&
1198
        !current->IsJSGlobalProxy()) {
1199
      if (!name->IsUniqueName()) {
1200
        ASSERT(name->IsString());
1201
        name = factory()->InternalizeString(Handle<String>::cast(name));
1202
      }
1203
      ASSERT(current->property_dictionary()->FindEntry(*name) ==
1204
             NameDictionary::kNotFound);
1205

    
1206
      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1207
                                       scratch1, scratch2);
1208

    
1209
      __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
1210
      reg = holder_reg;  // From now on the object will be in holder_reg.
1211
      __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
1212
    } else {
1213
      bool in_new_space = heap()->InNewSpace(*prototype);
1214
      Handle<Map> current_map(current->map());
1215
      if (!current.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1216
        __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
1217
      }
1218

    
1219
      // Check access rights to the global object.  This has to happen after
1220
      // the map check so that we know that the object is actually a global
1221
      // object.
1222
      if (current->IsJSGlobalProxy()) {
1223
        __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
1224
      }
1225

    
1226
      if (in_new_space) {
1227
        // Save the map in scratch1 for later.
1228
        __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
1229
      }
1230

    
1231
      reg = holder_reg;  // From now on the object will be in holder_reg.
1232

    
1233
      if (in_new_space) {
1234
        // The prototype is in new space; we cannot store a reference to it
1235
        // in the code.  Load it from the map.
1236
        __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
1237
      } else {
1238
        // The prototype is in old space; load it directly.
1239
        __ mov(reg, prototype);
1240
      }
1241
    }
1242

    
1243
    if (save_at_depth == depth) {
1244
      __ mov(Operand(esp, kHolderIndex * kPointerSize), reg);
1245
    }
1246

    
1247
    // Go to the next object in the prototype chain.
1248
    current = prototype;
1249
  }
1250
  ASSERT(current.is_identical_to(holder));
1251

    
1252
  // Log the check depth.
1253
  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1254

    
1255
  if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
1256
    // Check the holder map.
1257
    __ CheckMap(reg, Handle<Map>(holder->map()), miss, DONT_DO_SMI_CHECK);
1258
  }
1259

    
1260
  // Perform security check for access to the global object.
1261
  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1262
  if (holder->IsJSGlobalProxy()) {
1263
    __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
1264
  }
1265

    
1266
  // If we've skipped any global objects, it's not enough to verify that
1267
  // their maps haven't changed.  We also need to check that the property
1268
  // cell for the property is still empty.
1269
  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1270

    
1271
  // Return the register containing the holder.
1272
  return reg;
1273
}
1274

    
1275

    
1276
void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1277
                                             Label* success,
1278
                                             Label* miss) {
1279
  if (!miss->is_unused()) {
1280
    __ jmp(success);
1281
    __ bind(miss);
1282
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1283
  }
1284
}
1285

    
1286

    
1287
void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1288
                                              Label* success,
1289
                                              Label* miss) {
1290
  if (!miss->is_unused()) {
1291
    __ jmp(success);
1292
    GenerateRestoreName(masm(), miss, name);
1293
    TailCallBuiltin(masm(), MissBuiltin(kind()));
1294
  }
1295
}
1296

    
1297

    
1298
Register LoadStubCompiler::CallbackHandlerFrontend(
1299
    Handle<JSObject> object,
1300
    Register object_reg,
1301
    Handle<JSObject> holder,
1302
    Handle<Name> name,
1303
    Label* success,
1304
    Handle<Object> callback) {
1305
  Label miss;
1306

    
1307
  Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
1308

    
1309
  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1310
    ASSERT(!reg.is(scratch2()));
1311
    ASSERT(!reg.is(scratch3()));
1312
    Register dictionary = scratch1();
1313
    bool must_preserve_dictionary_reg = reg.is(dictionary);
1314

    
1315
    // Load the properties dictionary.
1316
    if (must_preserve_dictionary_reg) {
1317
      __ push(dictionary);
1318
    }
1319
    __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
1320

    
1321
    // Probe the dictionary.
1322
    Label probe_done, pop_and_miss;
1323
    NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1324
                                                     &pop_and_miss,
1325
                                                     &probe_done,
1326
                                                     dictionary,
1327
                                                     this->name(),
1328
                                                     scratch2(),
1329
                                                     scratch3());
1330
    __ bind(&pop_and_miss);
1331
    if (must_preserve_dictionary_reg) {
1332
      __ pop(dictionary);
1333
    }
1334
    __ jmp(&miss);
1335
    __ bind(&probe_done);
1336

    
1337
    // If probing finds an entry in the dictionary, scratch2 contains the
1338
    // index into the dictionary. Check that the value is the callback.
1339
    Register index = scratch2();
1340
    const int kElementsStartOffset =
1341
        NameDictionary::kHeaderSize +
1342
        NameDictionary::kElementsStartIndex * kPointerSize;
1343
    const int kValueOffset = kElementsStartOffset + kPointerSize;
1344
    __ mov(scratch3(),
1345
           Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
1346
    if (must_preserve_dictionary_reg) {
1347
      __ pop(dictionary);
1348
    }
1349
    __ cmp(scratch3(), callback);
1350
    __ j(not_equal, &miss);
1351
  }
1352

    
1353
  HandlerFrontendFooter(name, success, &miss);
1354
  return reg;
1355
}
1356

    
1357

    
1358
void LoadStubCompiler::NonexistentHandlerFrontend(
1359
    Handle<JSObject> object,
1360
    Handle<JSObject> last,
1361
    Handle<Name> name,
1362
    Label* success,
1363
    Handle<GlobalObject> global) {
1364
  Label miss;
1365

    
1366
  HandlerFrontendHeader(object, receiver(), last, name, &miss);
1367

    
1368
  // If the last object in the prototype chain is a global object,
1369
  // check that the global property cell is empty.
1370
  if (!global.is_null()) {
1371
    GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
1372
  }
1373

    
1374
  HandlerFrontendFooter(name, success, &miss);
1375
}
1376

    
1377

    
1378
void LoadStubCompiler::GenerateLoadField(Register reg,
1379
                                         Handle<JSObject> holder,
1380
                                         PropertyIndex field,
1381
                                         Representation representation) {
1382
  if (!reg.is(receiver())) __ mov(receiver(), reg);
1383
  if (kind() == Code::LOAD_IC) {
1384
    LoadFieldStub stub(field.is_inobject(holder),
1385
                       field.translate(holder),
1386
                       representation);
1387
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1388
  } else {
1389
    KeyedLoadFieldStub stub(field.is_inobject(holder),
1390
                            field.translate(holder),
1391
                            representation);
1392
    GenerateTailCall(masm(), stub.GetCode(isolate()));
1393
  }
1394
}
1395

    
1396

    
1397
void LoadStubCompiler::GenerateLoadCallback(
1398
    const CallOptimization& call_optimization) {
1399
  GenerateFastApiCall(
1400
      masm(), call_optimization, receiver(), scratch3(), 0, NULL);
1401
}
1402

    
1403

    
1404
void LoadStubCompiler::GenerateLoadCallback(
1405
    Register reg,
1406
    Handle<ExecutableAccessorInfo> callback) {
1407
  // Insert additional parameters into the stack frame above return address.
1408
  ASSERT(!scratch3().is(reg));
1409
  __ pop(scratch3());  // Get return address to place it below.
1410

    
1411
  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1412
  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1413
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1414
  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1415
  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1416
  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1417
  __ push(receiver());  // receiver
1418
  // Push data from ExecutableAccessorInfo.
1419
  if (isolate()->heap()->InNewSpace(callback->data())) {
1420
    ASSERT(!scratch2().is(reg));
1421
    __ mov(scratch2(), Immediate(callback));
1422
    __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
1423
  } else {
1424
    __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1425
  }
1426
  __ push(Immediate(isolate()->factory()->undefined_value()));  // ReturnValue
1427
  // ReturnValue default value
1428
  __ push(Immediate(isolate()->factory()->undefined_value()));
1429
  __ push(Immediate(reinterpret_cast<int>(isolate())));
1430
  __ push(reg);  // holder
1431

    
1432
  // Save a pointer to where we pushed the arguments. This will be
1433
  // passed as the const PropertyAccessorInfo& to the C++ callback.
1434
  __ push(esp);
1435

    
1436
  __ push(name());  // name
1437
  __ mov(ebx, esp);  // esp points to reference to name (handler).
1438

    
1439
  __ push(scratch3());  // Restore return address.
1440

    
1441
  // array for v8::Arguments::values_, handler for name and pointer
1442
  // to the values (it considered as smi in GC).
1443
  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
1444
  // Allocate space for opional callback address parameter in case
1445
  // CPU profiler is active.
1446
  const int kApiArgc = 2 + 1;
1447

    
1448
  Address getter_address = v8::ToCData<Address>(callback->getter());
1449
  __ PrepareCallApiFunction(kApiArgc);
1450
  __ mov(ApiParameterOperand(0), ebx);  // name.
1451
  __ add(ebx, Immediate(kPointerSize));
1452
  __ mov(ApiParameterOperand(1), ebx);  // arguments pointer.
1453

    
1454
  // Emitting a stub call may try to allocate (if the code is not
1455
  // already generated).  Do not allow the assembler to perform a
1456
  // garbage collection but instead return the allocation failure
1457
  // object.
1458

    
1459
  Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1460

    
1461
  __ CallApiFunctionAndReturn(getter_address,
1462
                              thunk_address,
1463
                              ApiParameterOperand(2),
1464
                              kStackSpace,
1465
                              Operand(ebp, 7 * kPointerSize),
1466
                              NULL);
1467
}
1468

    
1469

    
1470
void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1471
  // Return the constant value.
1472
  __ LoadObject(eax, value);
1473
  __ ret(0);
1474
}
1475

    
1476

    
1477
void LoadStubCompiler::GenerateLoadInterceptor(
1478
    Register holder_reg,
1479
    Handle<JSObject> object,
1480
    Handle<JSObject> interceptor_holder,
1481
    LookupResult* lookup,
1482
    Handle<Name> name) {
1483
  ASSERT(interceptor_holder->HasNamedInterceptor());
1484
  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1485

    
1486
  // So far the most popular follow ups for interceptor loads are FIELD
1487
  // and CALLBACKS, so inline only them, other cases may be added
1488
  // later.
1489
  bool compile_followup_inline = false;
1490
  if (lookup->IsFound() && lookup->IsCacheable()) {
1491
    if (lookup->IsField()) {
1492
      compile_followup_inline = true;
1493
    } else if (lookup->type() == CALLBACKS &&
1494
               lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1495
      ExecutableAccessorInfo* callback =
1496
          ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1497
      compile_followup_inline = callback->getter() != NULL &&
1498
          callback->IsCompatibleReceiver(*object);
1499
    }
1500
  }
1501

    
1502
  if (compile_followup_inline) {
1503
    // Compile the interceptor call, followed by inline code to load the
1504
    // property from further up the prototype chain if the call fails.
1505
    // Check that the maps haven't changed.
1506
    ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1507

    
1508
    // Preserve the receiver register explicitly whenever it is different from
1509
    // the holder and it is needed should the interceptor return without any
1510
    // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1511
    // the FIELD case might cause a miss during the prototype check.
1512
    bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1513
    bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1514
        (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1515

    
1516
    // Save necessary data before invoking an interceptor.
1517
    // Requires a frame to make GC aware of pushed pointers.
1518
    {
1519
      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1520

    
1521
      if (must_preserve_receiver_reg) {
1522
        __ push(receiver());
1523
      }
1524
      __ push(holder_reg);
1525
      __ push(this->name());
1526

    
1527
      // Invoke an interceptor.  Note: map checks from receiver to
1528
      // interceptor's holder has been compiled before (see a caller
1529
      // of this method.)
1530
      CompileCallLoadPropertyWithInterceptor(masm(),
1531
                                             receiver(),
1532
                                             holder_reg,
1533
                                             this->name(),
1534
                                             interceptor_holder);
1535

    
1536
      // Check if interceptor provided a value for property.  If it's
1537
      // the case, return immediately.
1538
      Label interceptor_failed;
1539
      __ cmp(eax, factory()->no_interceptor_result_sentinel());
1540
      __ j(equal, &interceptor_failed);
1541
      frame_scope.GenerateLeaveFrame();
1542
      __ ret(0);
1543

    
1544
      // Clobber registers when generating debug-code to provoke errors.
1545
      __ bind(&interceptor_failed);
1546
      if (FLAG_debug_code) {
1547
        __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1548
        __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1549
        __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1550
      }
1551

    
1552
      __ pop(this->name());
1553
      __ pop(holder_reg);
1554
      if (must_preserve_receiver_reg) {
1555
        __ pop(receiver());
1556
      }
1557

    
1558
      // Leave the internal frame.
1559
    }
1560

    
1561
    GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1562
  } else {  // !compile_followup_inline
1563
    // Call the runtime system to load the interceptor.
1564
    // Check that the maps haven't changed.
1565
    __ pop(scratch2());  // save old return address
1566
    PushInterceptorArguments(masm(), receiver(), holder_reg,
1567
                             this->name(), interceptor_holder);
1568
    __ push(scratch2());  // restore old return address
1569

    
1570
    ExternalReference ref =
1571
        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1572
                          isolate());
1573
    __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1574
  }
1575
}
1576

    
1577

    
1578
void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1579
  if (kind_ == Code::KEYED_CALL_IC) {
1580
    __ cmp(ecx, Immediate(name));
1581
    __ j(not_equal, miss);
1582
  }
1583
}
1584

    
1585

    
1586
void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1587
                                                   Handle<JSObject> holder,
1588
                                                   Handle<Name> name,
1589
                                                   Label* miss) {
1590
  ASSERT(holder->IsGlobalObject());
1591

    
1592
  // Get the number of arguments.
1593
  const int argc = arguments().immediate();
1594

    
1595
  // Get the receiver from the stack.
1596
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1597

    
1598

    
1599
  // Check that the maps haven't changed.
1600
  __ JumpIfSmi(edx, miss);
1601
  CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
1602
}
1603

    
1604

    
1605
void CallStubCompiler::GenerateLoadFunctionFromCell(
1606
    Handle<Cell> cell,
1607
    Handle<JSFunction> function,
1608
    Label* miss) {
1609
  // Get the value from the cell.
1610
  if (Serializer::enabled()) {
1611
    __ mov(edi, Immediate(cell));
1612
    __ mov(edi, FieldOperand(edi, Cell::kValueOffset));
1613
  } else {
1614
    __ mov(edi, Operand::ForCell(cell));
1615
  }
1616

    
1617
  // Check that the cell contains the same function.
1618
  if (isolate()->heap()->InNewSpace(*function)) {
1619
    // We can't embed a pointer to a function in new space so we have
1620
    // to verify that the shared function info is unchanged. This has
1621
    // the nice side effect that multiple closures based on the same
1622
    // function can all use this call IC. Before we load through the
1623
    // function, we have to verify that it still is a function.
1624
    __ JumpIfSmi(edi, miss);
1625
    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1626
    __ j(not_equal, miss);
1627

    
1628
    // Check the shared function info. Make sure it hasn't changed.
1629
    __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
1630
           Immediate(Handle<SharedFunctionInfo>(function->shared())));
1631
  } else {
1632
    __ cmp(edi, Immediate(function));
1633
  }
1634
  __ j(not_equal, miss);
1635
}
1636

    
1637

    
1638
void CallStubCompiler::GenerateMissBranch() {
1639
  Handle<Code> code =
1640
      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1641
                                               kind_,
1642
                                               extra_state_);
1643
  __ jmp(code, RelocInfo::CODE_TARGET);
1644
}
1645

    
1646

    
1647
Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1648
                                                Handle<JSObject> holder,
1649
                                                PropertyIndex index,
1650
                                                Handle<Name> name) {
1651
  // ----------- S t a t e -------------
1652
  //  -- ecx                 : name
1653
  //  -- esp[0]              : return address
1654
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1655
  //  -- ...
1656
  //  -- esp[(argc + 1) * 4] : receiver
1657
  // -----------------------------------
1658
  Label miss;
1659

    
1660
  GenerateNameCheck(name, &miss);
1661

    
1662
  // Get the receiver from the stack.
1663
  const int argc = arguments().immediate();
1664
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1665

    
1666
  // Check that the receiver isn't a smi.
1667
  __ JumpIfSmi(edx, &miss);
1668

    
1669
  // Do the right check and compute the holder register.
1670
  Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
1671
                                 name, &miss);
1672

    
1673
  GenerateFastPropertyLoad(
1674
      masm(), edi, reg, index.is_inobject(holder),
1675
      index.translate(holder), Representation::Tagged());
1676

    
1677
  // Check that the function really is a function.
1678
  __ JumpIfSmi(edi, &miss);
1679
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1680
  __ j(not_equal, &miss);
1681

    
1682
  // Patch the receiver on the stack with the global proxy if
1683
  // necessary.
1684
  if (object->IsGlobalObject()) {
1685
    __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
1686
    __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
1687
  }
1688

    
1689
  // Invoke the function.
1690
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1691
      ? CALL_AS_FUNCTION
1692
      : CALL_AS_METHOD;
1693
  __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
1694
                    NullCallWrapper(), call_kind);
1695

    
1696
  // Handle call cache miss.
1697
  __ bind(&miss);
1698
  GenerateMissBranch();
1699

    
1700
  // Return the generated code.
1701
  return GetCode(Code::FIELD, name);
1702
}
1703

    
1704

    
1705
Handle<Code> CallStubCompiler::CompileArrayCodeCall(
1706
    Handle<Object> object,
1707
    Handle<JSObject> holder,
1708
    Handle<Cell> cell,
1709
    Handle<JSFunction> function,
1710
    Handle<String> name,
1711
    Code::StubType type) {
1712
  Label miss;
1713

    
1714
  // Check that function is still array
1715
  const int argc = arguments().immediate();
1716
  GenerateNameCheck(name, &miss);
1717

    
1718
  if (cell.is_null()) {
1719
    // Get the receiver from the stack.
1720
    __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1721

    
1722
    // Check that the receiver isn't a smi.
1723
    __ JumpIfSmi(edx, &miss);
1724
    CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
1725
                    name, &miss);
1726
  } else {
1727
    ASSERT(cell->value() == *function);
1728
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1729
                                &miss);
1730
    GenerateLoadFunctionFromCell(cell, function, &miss);
1731
  }
1732

    
1733
  Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
1734
  site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
1735
  Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
1736
  __ mov(eax, Immediate(argc));
1737
  __ mov(ebx, site_feedback_cell);
1738
  __ mov(edi, function);
1739

    
1740
  ArrayConstructorStub stub(isolate());
1741
  __ TailCallStub(&stub);
1742

    
1743
  __ bind(&miss);
1744
  GenerateMissBranch();
1745

    
1746
  // Return the generated code.
1747
  return GetCode(type, name);
1748
}
1749

    
1750

    
1751
Handle<Code> CallStubCompiler::CompileArrayPushCall(
1752
    Handle<Object> object,
1753
    Handle<JSObject> holder,
1754
    Handle<Cell> cell,
1755
    Handle<JSFunction> function,
1756
    Handle<String> name,
1757
    Code::StubType type) {
1758
  // ----------- S t a t e -------------
1759
  //  -- ecx                 : name
1760
  //  -- esp[0]              : return address
1761
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1762
  //  -- ...
1763
  //  -- esp[(argc + 1) * 4] : receiver
1764
  // -----------------------------------
1765

    
1766
  // If object is not an array, bail out to regular call.
1767
  if (!object->IsJSArray() || !cell.is_null()) {
1768
    return Handle<Code>::null();
1769
  }
1770

    
1771
  Label miss;
1772

    
1773
  GenerateNameCheck(name, &miss);
1774

    
1775
  // Get the receiver from the stack.
1776
  const int argc = arguments().immediate();
1777
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1778

    
1779
  // Check that the receiver isn't a smi.
1780
  __ JumpIfSmi(edx, &miss);
1781

    
1782
  CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
1783
                  name, &miss);
1784

    
1785
  if (argc == 0) {
1786
    // Noop, return the length.
1787
    __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1788
    __ ret((argc + 1) * kPointerSize);
1789
  } else {
1790
    Label call_builtin;
1791

    
1792
    if (argc == 1) {  // Otherwise fall through to call builtin.
1793
      Label attempt_to_grow_elements, with_write_barrier, check_double;
1794

    
1795
      // Get the elements array of the object.
1796
      __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
1797

    
1798
      // Check that the elements are in fast mode and writable.
1799
      __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
1800
             Immediate(factory()->fixed_array_map()));
1801
      __ j(not_equal, &check_double);
1802

    
1803
      // Get the array's length into eax and calculate new length.
1804
      __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1805
      STATIC_ASSERT(kSmiTagSize == 1);
1806
      STATIC_ASSERT(kSmiTag == 0);
1807
      __ add(eax, Immediate(Smi::FromInt(argc)));
1808

    
1809
      // Get the elements' length into ecx.
1810
      __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
1811

    
1812
      // Check if we could survive without allocation.
1813
      __ cmp(eax, ecx);
1814
      __ j(greater, &attempt_to_grow_elements);
1815

    
1816
      // Check if value is a smi.
1817
      __ mov(ecx, Operand(esp, argc * kPointerSize));
1818
      __ JumpIfNotSmi(ecx, &with_write_barrier);
1819

    
1820
      // Save new length.
1821
      __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1822

    
1823
      // Store the value.
1824
      __ mov(FieldOperand(edi,
1825
                          eax,
1826
                          times_half_pointer_size,
1827
                          FixedArray::kHeaderSize - argc * kPointerSize),
1828
             ecx);
1829

    
1830
      __ ret((argc + 1) * kPointerSize);
1831

    
1832
      __ bind(&check_double);
1833

    
1834

    
1835
      // Check that the elements are in double mode.
1836
      __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
1837
             Immediate(factory()->fixed_double_array_map()));
1838
      __ j(not_equal, &call_builtin);
1839

    
1840
      // Get the array's length into eax and calculate new length.
1841
      __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1842
      STATIC_ASSERT(kSmiTagSize == 1);
1843
      STATIC_ASSERT(kSmiTag == 0);
1844
      __ add(eax, Immediate(Smi::FromInt(argc)));
1845

    
1846
      // Get the elements' length into ecx.
1847
      __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
1848

    
1849
      // Check if we could survive without allocation.
1850
      __ cmp(eax, ecx);
1851
      __ j(greater, &call_builtin);
1852

    
1853
      __ mov(ecx, Operand(esp, argc * kPointerSize));
1854
      __ StoreNumberToDoubleElements(
1855
          ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize);
1856

    
1857
      // Save new length.
1858
      __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1859
      __ ret((argc + 1) * kPointerSize);
1860

    
1861
      __ bind(&with_write_barrier);
1862

    
1863
      __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1864

    
1865
      if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1866
        Label fast_object, not_fast_object;
1867
        __ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
1868
        __ jmp(&fast_object);
1869
        // In case of fast smi-only, convert to fast object, otherwise bail out.
1870
        __ bind(&not_fast_object);
1871
        __ CheckFastSmiElements(ebx, &call_builtin);
1872
        __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
1873
               Immediate(factory()->heap_number_map()));
1874
        __ j(equal, &call_builtin);
1875
        // edi: elements array
1876
        // edx: receiver
1877
        // ebx: map
1878
        Label try_holey_map;
1879
        __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1880
                                               FAST_ELEMENTS,
1881
                                               ebx,
1882
                                               edi,
1883
                                               &try_holey_map);
1884

    
1885
        ElementsTransitionGenerator::
1886
            GenerateMapChangeElementsTransition(masm(),
1887
                                                DONT_TRACK_ALLOCATION_SITE,
1888
                                                NULL);
1889
        // Restore edi.
1890
        __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
1891
        __ jmp(&fast_object);
1892

    
1893
        __ bind(&try_holey_map);
1894
        __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1895
                                               FAST_HOLEY_ELEMENTS,
1896
                                               ebx,
1897
                                               edi,
1898
                                               &call_builtin);
1899
        ElementsTransitionGenerator::
1900
            GenerateMapChangeElementsTransition(masm(),
1901
                                                DONT_TRACK_ALLOCATION_SITE,
1902
                                                NULL);
1903
        // Restore edi.
1904
        __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
1905
        __ bind(&fast_object);
1906
      } else {
1907
        __ CheckFastObjectElements(ebx, &call_builtin);
1908
      }
1909

    
1910
      // Save new length.
1911
      __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1912

    
1913
      // Store the value.
1914
      __ lea(edx, FieldOperand(edi,
1915
                               eax, times_half_pointer_size,
1916
                               FixedArray::kHeaderSize - argc * kPointerSize));
1917
      __ mov(Operand(edx, 0), ecx);
1918

    
1919
      __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1920
                     OMIT_SMI_CHECK);
1921

    
1922
      __ ret((argc + 1) * kPointerSize);
1923

    
1924
      __ bind(&attempt_to_grow_elements);
1925
      if (!FLAG_inline_new) {
1926
        __ jmp(&call_builtin);
1927
      }
1928

    
1929
      __ mov(ebx, Operand(esp, argc * kPointerSize));
1930
      // Growing elements that are SMI-only requires special handling in case
1931
      // the new element is non-Smi. For now, delegate to the builtin.
1932
      Label no_fast_elements_check;
1933
      __ JumpIfSmi(ebx, &no_fast_elements_check);
1934
      __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1935
      __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
1936
      __ bind(&no_fast_elements_check);
1937

    
1938
      // We could be lucky and the elements array could be at the top of
1939
      // new-space.  In this case we can just grow it in place by moving the
1940
      // allocation pointer up.
1941

    
1942
      ExternalReference new_space_allocation_top =
1943
          ExternalReference::new_space_allocation_top_address(isolate());
1944
      ExternalReference new_space_allocation_limit =
1945
          ExternalReference::new_space_allocation_limit_address(isolate());
1946

    
1947
      const int kAllocationDelta = 4;
1948
      // Load top.
1949
      __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
1950

    
1951
      // Check if it's the end of elements.
1952
      __ lea(edx, FieldOperand(edi,
1953
                               eax, times_half_pointer_size,
1954
                               FixedArray::kHeaderSize - argc * kPointerSize));
1955
      __ cmp(edx, ecx);
1956
      __ j(not_equal, &call_builtin);
1957
      __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
1958
      __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
1959
      __ j(above, &call_builtin);
1960

    
1961
      // We fit and could grow elements.
1962
      __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
1963

    
1964
      // Push the argument...
1965
      __ mov(Operand(edx, 0), ebx);
1966
      // ... and fill the rest with holes.
1967
      for (int i = 1; i < kAllocationDelta; i++) {
1968
        __ mov(Operand(edx, i * kPointerSize),
1969
               Immediate(factory()->the_hole_value()));
1970
      }
1971

    
1972
      // We know the elements array is in new space so we don't need the
1973
      // remembered set, but we just pushed a value onto it so we may have to
1974
      // tell the incremental marker to rescan the object that we just grew.  We
1975
      // don't need to worry about the holes because they are in old space and
1976
      // already marked black.
1977
      __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1978

    
1979
      // Restore receiver to edx as finish sequence assumes it's here.
1980
      __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1981

    
1982
      // Increment element's and array's sizes.
1983
      __ add(FieldOperand(edi, FixedArray::kLengthOffset),
1984
             Immediate(Smi::FromInt(kAllocationDelta)));
1985

    
1986
      // NOTE: This only happen in new-space, where we don't
1987
      // care about the black-byte-count on pages. Otherwise we should
1988
      // update that too if the object is black.
1989

    
1990
      __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1991

    
1992
      __ ret((argc + 1) * kPointerSize);
1993
    }
1994

    
1995
    __ bind(&call_builtin);
1996
    __ TailCallExternalReference(
1997
        ExternalReference(Builtins::c_ArrayPush, isolate()),
1998
        argc + 1,
1999
        1);
2000
  }
2001

    
2002
  __ bind(&miss);
2003
  GenerateMissBranch();
2004

    
2005
  // Return the generated code.
2006
  return GetCode(type, name);
2007
}
2008

    
2009

    
2010
Handle<Code> CallStubCompiler::CompileArrayPopCall(
2011
    Handle<Object> object,
2012
    Handle<JSObject> holder,
2013
    Handle<Cell> cell,
2014
    Handle<JSFunction> function,
2015
    Handle<String> name,
2016
    Code::StubType type) {
2017
  // ----------- S t a t e -------------
2018
  //  -- ecx                 : name
2019
  //  -- esp[0]              : return address
2020
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2021
  //  -- ...
2022
  //  -- esp[(argc + 1) * 4] : receiver
2023
  // -----------------------------------
2024

    
2025
  // If object is not an array, bail out to regular call.
2026
  if (!object->IsJSArray() || !cell.is_null()) {
2027
    return Handle<Code>::null();
2028
  }
2029

    
2030
  Label miss, return_undefined, call_builtin;
2031

    
2032
  GenerateNameCheck(name, &miss);
2033

    
2034
  // Get the receiver from the stack.
2035
  const int argc = arguments().immediate();
2036
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2037

    
2038
  // Check that the receiver isn't a smi.
2039
  __ JumpIfSmi(edx, &miss);
2040
  CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
2041
                  name, &miss);
2042

    
2043
  // Get the elements array of the object.
2044
  __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
2045

    
2046
  // Check that the elements are in fast mode and writable.
2047
  __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
2048
         Immediate(factory()->fixed_array_map()));
2049
  __ j(not_equal, &call_builtin);
2050

    
2051
  // Get the array's length into ecx and calculate new length.
2052
  __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
2053
  __ sub(ecx, Immediate(Smi::FromInt(1)));
2054
  __ j(negative, &return_undefined);
2055

    
2056
  // Get the last element.
2057
  STATIC_ASSERT(kSmiTagSize == 1);
2058
  STATIC_ASSERT(kSmiTag == 0);
2059
  __ mov(eax, FieldOperand(ebx,
2060
                           ecx, times_half_pointer_size,
2061
                           FixedArray::kHeaderSize));
2062
  __ cmp(eax, Immediate(factory()->the_hole_value()));
2063
  __ j(equal, &call_builtin);
2064

    
2065
  // Set the array's length.
2066
  __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
2067

    
2068
  // Fill with the hole.
2069
  __ mov(FieldOperand(ebx,
2070
                      ecx, times_half_pointer_size,
2071
                      FixedArray::kHeaderSize),
2072
         Immediate(factory()->the_hole_value()));
2073
  __ ret((argc + 1) * kPointerSize);
2074

    
2075
  __ bind(&return_undefined);
2076
  __ mov(eax, Immediate(factory()->undefined_value()));
2077
  __ ret((argc + 1) * kPointerSize);
2078

    
2079
  __ bind(&call_builtin);
2080
  __ TailCallExternalReference(
2081
      ExternalReference(Builtins::c_ArrayPop, isolate()),
2082
      argc + 1,
2083
      1);
2084

    
2085
  __ bind(&miss);
2086
  GenerateMissBranch();
2087

    
2088
  // Return the generated code.
2089
  return GetCode(type, name);
2090
}
2091

    
2092

    
2093
Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
2094
    Handle<Object> object,
2095
    Handle<JSObject> holder,
2096
    Handle<Cell> cell,
2097
    Handle<JSFunction> function,
2098
    Handle<String> name,
2099
    Code::StubType type) {
2100
  // ----------- S t a t e -------------
2101
  //  -- ecx                 : function name
2102
  //  -- esp[0]              : return address
2103
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2104
  //  -- ...
2105
  //  -- esp[(argc + 1) * 4] : receiver
2106
  // -----------------------------------
2107

    
2108
  // If object is not a string, bail out to regular call.
2109
  if (!object->IsString() || !cell.is_null()) {
2110
    return Handle<Code>::null();
2111
  }
2112

    
2113
  const int argc = arguments().immediate();
2114

    
2115
  Label miss;
2116
  Label name_miss;
2117
  Label index_out_of_range;
2118
  Label* index_out_of_range_label = &index_out_of_range;
2119

    
2120
  if (kind_ == Code::CALL_IC &&
2121
      (CallICBase::StringStubState::decode(extra_state_) ==
2122
       DEFAULT_STRING_STUB)) {
2123
    index_out_of_range_label = &miss;
2124
  }
2125

    
2126
  GenerateNameCheck(name, &name_miss);
2127

    
2128
  // Check that the maps starting from the prototype haven't changed.
2129
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2130
                                            Context::STRING_FUNCTION_INDEX,
2131
                                            eax,
2132
                                            &miss);
2133
  ASSERT(!object.is_identical_to(holder));
2134
  CheckPrototypes(
2135
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2136
      eax, holder, ebx, edx, edi, name, &miss);
2137

    
2138
  Register receiver = ebx;
2139
  Register index = edi;
2140
  Register result = eax;
2141
  __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
2142
  if (argc > 0) {
2143
    __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
2144
  } else {
2145
    __ Set(index, Immediate(factory()->undefined_value()));
2146
  }
2147

    
2148
  StringCharCodeAtGenerator generator(receiver,
2149
                                      index,
2150
                                      result,
2151
                                      &miss,  // When not a string.
2152
                                      &miss,  // When not a number.
2153
                                      index_out_of_range_label,
2154
                                      STRING_INDEX_IS_NUMBER);
2155
  generator.GenerateFast(masm());
2156
  __ ret((argc + 1) * kPointerSize);
2157

    
2158
  StubRuntimeCallHelper call_helper;
2159
  generator.GenerateSlow(masm(), call_helper);
2160

    
2161
  if (index_out_of_range.is_linked()) {
2162
    __ bind(&index_out_of_range);
2163
    __ Set(eax, Immediate(factory()->nan_value()));
2164
    __ ret((argc + 1) * kPointerSize);
2165
  }
2166

    
2167
  __ bind(&miss);
2168
  // Restore function name in ecx.
2169
  __ Set(ecx, Immediate(name));
2170
  __ bind(&name_miss);
2171
  GenerateMissBranch();
2172

    
2173
  // Return the generated code.
2174
  return GetCode(type, name);
2175
}
2176

    
2177

    
2178
Handle<Code> CallStubCompiler::CompileStringCharAtCall(
2179
    Handle<Object> object,
2180
    Handle<JSObject> holder,
2181
    Handle<Cell> cell,
2182
    Handle<JSFunction> function,
2183
    Handle<String> name,
2184
    Code::StubType type) {
2185
  // ----------- S t a t e -------------
2186
  //  -- ecx                 : function name
2187
  //  -- esp[0]              : return address
2188
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2189
  //  -- ...
2190
  //  -- esp[(argc + 1) * 4] : receiver
2191
  // -----------------------------------
2192

    
2193
  // If object is not a string, bail out to regular call.
2194
  if (!object->IsString() || !cell.is_null()) {
2195
    return Handle<Code>::null();
2196
  }
2197

    
2198
  const int argc = arguments().immediate();
2199

    
2200
  Label miss;
2201
  Label name_miss;
2202
  Label index_out_of_range;
2203
  Label* index_out_of_range_label = &index_out_of_range;
2204

    
2205
  if (kind_ == Code::CALL_IC &&
2206
      (CallICBase::StringStubState::decode(extra_state_) ==
2207
       DEFAULT_STRING_STUB)) {
2208
    index_out_of_range_label = &miss;
2209
  }
2210

    
2211
  GenerateNameCheck(name, &name_miss);
2212

    
2213
  // Check that the maps starting from the prototype haven't changed.
2214
  GenerateDirectLoadGlobalFunctionPrototype(masm(),
2215
                                            Context::STRING_FUNCTION_INDEX,
2216
                                            eax,
2217
                                            &miss);
2218
  ASSERT(!object.is_identical_to(holder));
2219
  CheckPrototypes(
2220
      Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2221
      eax, holder, ebx, edx, edi, name, &miss);
2222

    
2223
  Register receiver = eax;
2224
  Register index = edi;
2225
  Register scratch = edx;
2226
  Register result = eax;
2227
  __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
2228
  if (argc > 0) {
2229
    __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
2230
  } else {
2231
    __ Set(index, Immediate(factory()->undefined_value()));
2232
  }
2233

    
2234
  StringCharAtGenerator generator(receiver,
2235
                                  index,
2236
                                  scratch,
2237
                                  result,
2238
                                  &miss,  // When not a string.
2239
                                  &miss,  // When not a number.
2240
                                  index_out_of_range_label,
2241
                                  STRING_INDEX_IS_NUMBER);
2242
  generator.GenerateFast(masm());
2243
  __ ret((argc + 1) * kPointerSize);
2244

    
2245
  StubRuntimeCallHelper call_helper;
2246
  generator.GenerateSlow(masm(), call_helper);
2247

    
2248
  if (index_out_of_range.is_linked()) {
2249
    __ bind(&index_out_of_range);
2250
    __ Set(eax, Immediate(factory()->empty_string()));
2251
    __ ret((argc + 1) * kPointerSize);
2252
  }
2253

    
2254
  __ bind(&miss);
2255
  // Restore function name in ecx.
2256
  __ Set(ecx, Immediate(name));
2257
  __ bind(&name_miss);
2258
  GenerateMissBranch();
2259

    
2260
  // Return the generated code.
2261
  return GetCode(type, name);
2262
}
2263

    
2264

    
2265
Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2266
    Handle<Object> object,
2267
    Handle<JSObject> holder,
2268
    Handle<Cell> cell,
2269
    Handle<JSFunction> function,
2270
    Handle<String> name,
2271
    Code::StubType type) {
2272
  // ----------- S t a t e -------------
2273
  //  -- ecx                 : function name
2274
  //  -- esp[0]              : return address
2275
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2276
  //  -- ...
2277
  //  -- esp[(argc + 1) * 4] : receiver
2278
  // -----------------------------------
2279

    
2280
  const int argc = arguments().immediate();
2281

    
2282
  // If the object is not a JSObject or we got an unexpected number of
2283
  // arguments, bail out to the regular call.
2284
  if (!object->IsJSObject() || argc != 1) {
2285
    return Handle<Code>::null();
2286
  }
2287

    
2288
  Label miss;
2289
  GenerateNameCheck(name, &miss);
2290

    
2291
  if (cell.is_null()) {
2292
    __ mov(edx, Operand(esp, 2 * kPointerSize));
2293
    STATIC_ASSERT(kSmiTag == 0);
2294
    __ JumpIfSmi(edx, &miss);
2295
    CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
2296
                    name, &miss);
2297
  } else {
2298
    ASSERT(cell->value() == *function);
2299
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2300
                                &miss);
2301
    GenerateLoadFunctionFromCell(cell, function, &miss);
2302
  }
2303

    
2304
  // Load the char code argument.
2305
  Register code = ebx;
2306
  __ mov(code, Operand(esp, 1 * kPointerSize));
2307

    
2308
  // Check the code is a smi.
2309
  Label slow;
2310
  STATIC_ASSERT(kSmiTag == 0);
2311
  __ JumpIfNotSmi(code, &slow);
2312

    
2313
  // Convert the smi code to uint16.
2314
  __ and_(code, Immediate(Smi::FromInt(0xffff)));
2315

    
2316
  StringCharFromCodeGenerator generator(code, eax);
2317
  generator.GenerateFast(masm());
2318
  __ ret(2 * kPointerSize);
2319

    
2320
  StubRuntimeCallHelper call_helper;
2321
  generator.GenerateSlow(masm(), call_helper);
2322

    
2323
  // Tail call the full function. We do not have to patch the receiver
2324
  // because the function makes no use of it.
2325
  __ bind(&slow);
2326
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2327
      ? CALL_AS_FUNCTION
2328
      : CALL_AS_METHOD;
2329
  ParameterCount expected(function);
2330
  __ InvokeFunction(function, expected, arguments(),
2331
                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
2332

    
2333
  __ bind(&miss);
2334
  // ecx: function name.
2335
  GenerateMissBranch();
2336

    
2337
  // Return the generated code.
2338
  return GetCode(type, name);
2339
}
2340

    
2341

    
2342
Handle<Code> CallStubCompiler::CompileMathFloorCall(
2343
    Handle<Object> object,
2344
    Handle<JSObject> holder,
2345
    Handle<Cell> cell,
2346
    Handle<JSFunction> function,
2347
    Handle<String> name,
2348
    Code::StubType type) {
2349
  // ----------- S t a t e -------------
2350
  //  -- ecx                 : name
2351
  //  -- esp[0]              : return address
2352
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2353
  //  -- ...
2354
  //  -- esp[(argc + 1) * 4] : receiver
2355
  // -----------------------------------
2356

    
2357
  if (!CpuFeatures::IsSupported(SSE2)) {
2358
    return Handle<Code>::null();
2359
  }
2360

    
2361
  CpuFeatureScope use_sse2(masm(), SSE2);
2362

    
2363
  const int argc = arguments().immediate();
2364

    
2365
  // If the object is not a JSObject or we got an unexpected number of
2366
  // arguments, bail out to the regular call.
2367
  if (!object->IsJSObject() || argc != 1) {
2368
    return Handle<Code>::null();
2369
  }
2370

    
2371
  Label miss;
2372
  GenerateNameCheck(name, &miss);
2373

    
2374
  if (cell.is_null()) {
2375
    __ mov(edx, Operand(esp, 2 * kPointerSize));
2376

    
2377
    STATIC_ASSERT(kSmiTag == 0);
2378
    __ JumpIfSmi(edx, &miss);
2379

    
2380
    CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
2381
                    name, &miss);
2382
  } else {
2383
    ASSERT(cell->value() == *function);
2384
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2385
                                &miss);
2386
    GenerateLoadFunctionFromCell(cell, function, &miss);
2387
  }
2388

    
2389
  // Load the (only) argument into eax.
2390
  __ mov(eax, Operand(esp, 1 * kPointerSize));
2391

    
2392
  // Check if the argument is a smi.
2393
  Label smi;
2394
  STATIC_ASSERT(kSmiTag == 0);
2395
  __ JumpIfSmi(eax, &smi);
2396

    
2397
  // Check if the argument is a heap number and load its value into xmm0.
2398
  Label slow;
2399
  __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2400
  __ movsd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
2401

    
2402
  // Check if the argument is strictly positive. Note this also
2403
  // discards NaN.
2404
  __ xorpd(xmm1, xmm1);
2405
  __ ucomisd(xmm0, xmm1);
2406
  __ j(below_equal, &slow);
2407

    
2408
  // Do a truncating conversion.
2409
  __ cvttsd2si(eax, Operand(xmm0));
2410

    
2411
  // Check if the result fits into a smi. Note this also checks for
2412
  // 0x80000000 which signals a failed conversion.
2413
  Label wont_fit_into_smi;
2414
  __ test(eax, Immediate(0xc0000000));
2415
  __ j(not_zero, &wont_fit_into_smi);
2416

    
2417
  // Smi tag and return.
2418
  __ SmiTag(eax);
2419
  __ bind(&smi);
2420
  __ ret(2 * kPointerSize);
2421

    
2422
  // Check if the argument is < 2^kMantissaBits.
2423
  Label already_round;
2424
  __ bind(&wont_fit_into_smi);
2425
  __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits);
2426
  __ ucomisd(xmm0, xmm1);
2427
  __ j(above_equal, &already_round);
2428

    
2429
  // Save a copy of the argument.
2430
  __ movaps(xmm2, xmm0);
2431

    
2432
  // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
2433
  __ addsd(xmm0, xmm1);
2434
  __ subsd(xmm0, xmm1);
2435

    
2436
  // Compare the argument and the tentative result to get the right mask:
2437
  //   if xmm2 < xmm0:
2438
  //     xmm2 = 1...1
2439
  //   else:
2440
  //     xmm2 = 0...0
2441
  __ cmpltsd(xmm2, xmm0);
2442

    
2443
  // Subtract 1 if the argument was less than the tentative result.
2444
  __ LoadPowerOf2(xmm1, ebx, 0);
2445
  __ andpd(xmm1, xmm2);
2446
  __ subsd(xmm0, xmm1);
2447

    
2448
  // Return a new heap number.
2449
  __ AllocateHeapNumber(eax, ebx, edx, &slow);
2450
  __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2451
  __ ret(2 * kPointerSize);
2452

    
2453
  // Return the argument (when it's an already round heap number).
2454
  __ bind(&already_round);
2455
  __ mov(eax, Operand(esp, 1 * kPointerSize));
2456
  __ ret(2 * kPointerSize);
2457

    
2458
  // Tail call the full function. We do not have to patch the receiver
2459
  // because the function makes no use of it.
2460
  __ bind(&slow);
2461
  ParameterCount expected(function);
2462
  __ InvokeFunction(function, expected, arguments(),
2463
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2464

    
2465
  __ bind(&miss);
2466
  // ecx: function name.
2467
  GenerateMissBranch();
2468

    
2469
  // Return the generated code.
2470
  return GetCode(type, name);
2471
}
2472

    
2473

    
2474
Handle<Code> CallStubCompiler::CompileMathAbsCall(
2475
    Handle<Object> object,
2476
    Handle<JSObject> holder,
2477
    Handle<Cell> cell,
2478
    Handle<JSFunction> function,
2479
    Handle<String> name,
2480
    Code::StubType type) {
2481
  // ----------- S t a t e -------------
2482
  //  -- ecx                 : name
2483
  //  -- esp[0]              : return address
2484
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2485
  //  -- ...
2486
  //  -- esp[(argc + 1) * 4] : receiver
2487
  // -----------------------------------
2488

    
2489
  const int argc = arguments().immediate();
2490

    
2491
  // If the object is not a JSObject or we got an unexpected number of
2492
  // arguments, bail out to the regular call.
2493
  if (!object->IsJSObject() || argc != 1) {
2494
    return Handle<Code>::null();
2495
  }
2496

    
2497
  Label miss;
2498
  GenerateNameCheck(name, &miss);
2499

    
2500
  if (cell.is_null()) {
2501
    __ mov(edx, Operand(esp, 2 * kPointerSize));
2502

    
2503
    STATIC_ASSERT(kSmiTag == 0);
2504
    __ JumpIfSmi(edx, &miss);
2505

    
2506
    CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
2507
                    name, &miss);
2508
  } else {
2509
    ASSERT(cell->value() == *function);
2510
    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2511
                                &miss);
2512
    GenerateLoadFunctionFromCell(cell, function, &miss);
2513
  }
2514

    
2515
  // Load the (only) argument into eax.
2516
  __ mov(eax, Operand(esp, 1 * kPointerSize));
2517

    
2518
  // Check if the argument is a smi.
2519
  Label not_smi;
2520
  STATIC_ASSERT(kSmiTag == 0);
2521
  __ JumpIfNotSmi(eax, &not_smi);
2522

    
2523
  // Branchless abs implementation, refer to below:
2524
  // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs
2525
  // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2526
  // otherwise.
2527
  __ mov(ebx, eax);
2528
  __ sar(ebx, kBitsPerInt - 1);
2529

    
2530
  // Do bitwise not or do nothing depending on ebx.
2531
  __ xor_(eax, ebx);
2532

    
2533
  // Add 1 or do nothing depending on ebx.
2534
  __ sub(eax, ebx);
2535

    
2536
  // If the result is still negative, go to the slow case.
2537
  // This only happens for the most negative smi.
2538
  Label slow;
2539
  __ j(negative, &slow);
2540

    
2541
  // Smi case done.
2542
  __ ret(2 * kPointerSize);
2543

    
2544
  // Check if the argument is a heap number and load its exponent and
2545
  // sign into ebx.
2546
  __ bind(&not_smi);
2547
  __ CheckMap(eax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2548
  __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
2549

    
2550
  // Check the sign of the argument. If the argument is positive,
2551
  // just return it.
2552
  Label negative_sign;
2553
  __ test(ebx, Immediate(HeapNumber::kSignMask));
2554
  __ j(not_zero, &negative_sign);
2555
  __ ret(2 * kPointerSize);
2556

    
2557
  // If the argument is negative, clear the sign, and return a new
2558
  // number.
2559
  __ bind(&negative_sign);
2560
  __ and_(ebx, ~HeapNumber::kSignMask);
2561
  __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset));
2562
  __ AllocateHeapNumber(eax, edi, edx, &slow);
2563
  __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx);
2564
  __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
2565
  __ ret(2 * kPointerSize);
2566

    
2567
  // Tail call the full function. We do not have to patch the receiver
2568
  // because the function makes no use of it.
2569
  __ bind(&slow);
2570
  ParameterCount expected(function);
2571
  __ InvokeFunction(function, expected, arguments(),
2572
                    JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2573

    
2574
  __ bind(&miss);
2575
  // ecx: function name.
2576
  GenerateMissBranch();
2577

    
2578
  // Return the generated code.
2579
  return GetCode(type, name);
2580
}
2581

    
2582

    
2583
Handle<Code> CallStubCompiler::CompileFastApiCall(
2584
    const CallOptimization& optimization,
2585
    Handle<Object> object,
2586
    Handle<JSObject> holder,
2587
    Handle<Cell> cell,
2588
    Handle<JSFunction> function,
2589
    Handle<String> name) {
2590
  ASSERT(optimization.is_simple_api_call());
2591
  // Bail out if object is a global object as we don't want to
2592
  // repatch it to global receiver.
2593
  if (object->IsGlobalObject()) return Handle<Code>::null();
2594
  if (!cell.is_null()) return Handle<Code>::null();
2595
  if (!object->IsJSObject()) return Handle<Code>::null();
2596
  int depth = optimization.GetPrototypeDepthOfExpectedType(
2597
      Handle<JSObject>::cast(object), holder);
2598
  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2599

    
2600
  Label miss, miss_before_stack_reserved;
2601

    
2602
  GenerateNameCheck(name, &miss_before_stack_reserved);
2603

    
2604
  // Get the receiver from the stack.
2605
  const int argc = arguments().immediate();
2606
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2607

    
2608
  // Check that the receiver isn't a smi.
2609
  __ JumpIfSmi(edx, &miss_before_stack_reserved);
2610

    
2611
  Counters* counters = isolate()->counters();
2612
  __ IncrementCounter(counters->call_const(), 1);
2613
  __ IncrementCounter(counters->call_const_fast_api(), 1);
2614

    
2615
  // Allocate space for v8::Arguments implicit values. Must be initialized
2616
  // before calling any runtime function.
2617
  __ sub(esp, Immediate(kFastApiCallArguments * kPointerSize));
2618

    
2619
  // Check that the maps haven't changed and find a Holder as a side effect.
2620
  CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax, edi,
2621
                  name, depth, &miss);
2622

    
2623
  // Move the return address on top of the stack.
2624
  __ mov(eax, Operand(esp, kFastApiCallArguments * kPointerSize));
2625
  __ mov(Operand(esp, 0 * kPointerSize), eax);
2626

    
2627
  // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
2628
  // duplicate of return address and will be overwritten.
2629
  GenerateFastApiCall(masm(), optimization, argc, false);
2630

    
2631
  __ bind(&miss);
2632
  __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
2633

    
2634
  __ bind(&miss_before_stack_reserved);
2635
  GenerateMissBranch();
2636

    
2637
  // Return the generated code.
2638
  return GetCode(function);
2639
}
2640

    
2641

    
2642
void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
2643
                                              Handle<JSObject> holder,
2644
                                              Handle<Name> name,
2645
                                              CheckType check,
2646
                                              Label* success) {
2647
  // ----------- S t a t e -------------
2648
  //  -- ecx                 : name
2649
  //  -- esp[0]              : return address
2650
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2651
  //  -- ...
2652
  //  -- esp[(argc + 1) * 4] : receiver
2653
  // -----------------------------------
2654
  Label miss;
2655
  GenerateNameCheck(name, &miss);
2656

    
2657
  // Get the receiver from the stack.
2658
  const int argc = arguments().immediate();
2659
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2660

    
2661
  // Check that the receiver isn't a smi.
2662
  if (check != NUMBER_CHECK) {
2663
    __ JumpIfSmi(edx, &miss);
2664
  }
2665

    
2666
  // Make sure that it's okay not to patch the on stack receiver
2667
  // unless we're doing a receiver map check.
2668
  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2669
  switch (check) {
2670
    case RECEIVER_MAP_CHECK:
2671
      __ IncrementCounter(isolate()->counters()->call_const(), 1);
2672

    
2673
      // Check that the maps haven't changed.
2674
      CheckPrototypes(Handle<JSObject>::cast(object), edx, holder, ebx, eax,
2675
                      edi, name, &miss);
2676

    
2677
      // Patch the receiver on the stack with the global proxy if
2678
      // necessary.
2679
      if (object->IsGlobalObject()) {
2680
        __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2681
        __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2682
      }
2683
      break;
2684

    
2685
    case STRING_CHECK:
2686
      // Check that the object is a string.
2687
      __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
2688
      __ j(above_equal, &miss);
2689
      // Check that the maps starting from the prototype haven't changed.
2690
      GenerateDirectLoadGlobalFunctionPrototype(
2691
          masm(), Context::STRING_FUNCTION_INDEX, eax, &miss);
2692
      CheckPrototypes(
2693
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2694
          eax, holder, ebx, edx, edi, name, &miss);
2695
      break;
2696

    
2697
    case SYMBOL_CHECK:
2698
      // Check that the object is a symbol.
2699
      __ CmpObjectType(edx, SYMBOL_TYPE, eax);
2700
      __ j(not_equal, &miss);
2701
      // Check that the maps starting from the prototype haven't changed.
2702
      GenerateDirectLoadGlobalFunctionPrototype(
2703
          masm(), Context::SYMBOL_FUNCTION_INDEX, eax, &miss);
2704
      CheckPrototypes(
2705
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2706
          eax, holder, ebx, edx, edi, name, &miss);
2707
      break;
2708

    
2709
    case NUMBER_CHECK: {
2710
      Label fast;
2711
      // Check that the object is a smi or a heap number.
2712
      __ JumpIfSmi(edx, &fast);
2713
      __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
2714
      __ j(not_equal, &miss);
2715
      __ bind(&fast);
2716
      // Check that the maps starting from the prototype haven't changed.
2717
      GenerateDirectLoadGlobalFunctionPrototype(
2718
          masm(), Context::NUMBER_FUNCTION_INDEX, eax, &miss);
2719
      CheckPrototypes(
2720
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2721
          eax, holder, ebx, edx, edi, name, &miss);
2722
      break;
2723
    }
2724
    case BOOLEAN_CHECK: {
2725
      Label fast;
2726
      // Check that the object is a boolean.
2727
      __ cmp(edx, factory()->true_value());
2728
      __ j(equal, &fast);
2729
      __ cmp(edx, factory()->false_value());
2730
      __ j(not_equal, &miss);
2731
      __ bind(&fast);
2732
      // Check that the maps starting from the prototype haven't changed.
2733
      GenerateDirectLoadGlobalFunctionPrototype(
2734
          masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss);
2735
      CheckPrototypes(
2736
          Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2737
          eax, holder, ebx, edx, edi, name, &miss);
2738
      break;
2739
    }
2740
  }
2741

    
2742
  __ jmp(success);
2743

    
2744
  // Handle call cache miss.
2745
  __ bind(&miss);
2746
  GenerateMissBranch();
2747
}
2748

    
2749

    
2750
void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
2751
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2752
      ? CALL_AS_FUNCTION
2753
      : CALL_AS_METHOD;
2754
  ParameterCount expected(function);
2755
  __ InvokeFunction(function, expected, arguments(),
2756
                    JUMP_FUNCTION, NullCallWrapper(), call_kind);
2757
}
2758

    
2759

    
2760
Handle<Code> CallStubCompiler::CompileCallConstant(
2761
    Handle<Object> object,
2762
    Handle<JSObject> holder,
2763
    Handle<Name> name,
2764
    CheckType check,
2765
    Handle<JSFunction> function) {
2766

    
2767
  if (HasCustomCallGenerator(function)) {
2768
    Handle<Code> code = CompileCustomCall(object, holder,
2769
                                          Handle<Cell>::null(),
2770
                                          function, Handle<String>::cast(name),
2771
                                          Code::CONSTANT);
2772
    // A null handle means bail out to the regular compiler code below.
2773
    if (!code.is_null()) return code;
2774
  }
2775

    
2776
  Label success;
2777

    
2778
  CompileHandlerFrontend(object, holder, name, check, &success);
2779
  __ bind(&success);
2780
  CompileHandlerBackend(function);
2781

    
2782
  // Return the generated code.
2783
  return GetCode(function);
2784
}
2785

    
2786

    
2787
Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2788
                                                      Handle<JSObject> holder,
2789
                                                      Handle<Name> name) {
2790
  // ----------- S t a t e -------------
2791
  //  -- ecx                 : name
2792
  //  -- esp[0]              : return address
2793
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2794
  //  -- ...
2795
  //  -- esp[(argc + 1) * 4] : receiver
2796
  // -----------------------------------
2797
  Label miss;
2798

    
2799
  GenerateNameCheck(name, &miss);
2800

    
2801
  // Get the number of arguments.
2802
  const int argc = arguments().immediate();
2803

    
2804
  LookupResult lookup(isolate());
2805
  LookupPostInterceptor(holder, name, &lookup);
2806

    
2807
  // Get the receiver from the stack.
2808
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2809

    
2810
  CallInterceptorCompiler compiler(this, arguments(), ecx, extra_state_);
2811
  compiler.Compile(masm(), object, holder, name, &lookup, edx, ebx, edi, eax,
2812
                   &miss);
2813

    
2814
  // Restore receiver.
2815
  __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2816

    
2817
  // Check that the function really is a function.
2818
  __ JumpIfSmi(eax, &miss);
2819
  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2820
  __ j(not_equal, &miss);
2821

    
2822
  // Patch the receiver on the stack with the global proxy if
2823
  // necessary.
2824
  if (object->IsGlobalObject()) {
2825
    __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2826
    __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2827
  }
2828

    
2829
  // Invoke the function.
2830
  __ mov(edi, eax);
2831
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2832
      ? CALL_AS_FUNCTION
2833
      : CALL_AS_METHOD;
2834
  __ InvokeFunction(edi, arguments(), JUMP_FUNCTION,
2835
                    NullCallWrapper(), call_kind);
2836

    
2837
  // Handle load cache miss.
2838
  __ bind(&miss);
2839
  GenerateMissBranch();
2840

    
2841
  // Return the generated code.
2842
  return GetCode(Code::INTERCEPTOR, name);
2843
}
2844

    
2845

    
2846
Handle<Code> CallStubCompiler::CompileCallGlobal(
2847
    Handle<JSObject> object,
2848
    Handle<GlobalObject> holder,
2849
    Handle<PropertyCell> cell,
2850
    Handle<JSFunction> function,
2851
    Handle<Name> name) {
2852
  // ----------- S t a t e -------------
2853
  //  -- ecx                 : name
2854
  //  -- esp[0]              : return address
2855
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
2856
  //  -- ...
2857
  //  -- esp[(argc + 1) * 4] : receiver
2858
  // -----------------------------------
2859

    
2860
  if (HasCustomCallGenerator(function)) {
2861
    Handle<Code> code = CompileCustomCall(
2862
        object, holder, cell, function, Handle<String>::cast(name),
2863
        Code::NORMAL);
2864
    // A null handle means bail out to the regular compiler code below.
2865
    if (!code.is_null()) return code;
2866
  }
2867

    
2868
  Label miss;
2869
  GenerateNameCheck(name, &miss);
2870

    
2871
  // Get the number of arguments.
2872
  const int argc = arguments().immediate();
2873
  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2874
  GenerateLoadFunctionFromCell(cell, function, &miss);
2875

    
2876
  // Patch the receiver on the stack with the global proxy.
2877
  if (object->IsGlobalObject()) {
2878
    __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2879
    __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2880
  }
2881

    
2882
  // Set up the context (function already in edi).
2883
  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2884

    
2885
  // Jump to the cached code (tail call).
2886
  Counters* counters = isolate()->counters();
2887
  __ IncrementCounter(counters->call_global_inline(), 1);
2888
  ParameterCount expected(function->shared()->formal_parameter_count());
2889
  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2890
      ? CALL_AS_FUNCTION
2891
      : CALL_AS_METHOD;
2892
  // We call indirectly through the code field in the function to
2893
  // allow recompilation to take effect without changing any of the
2894
  // call sites.
2895
  __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2896
                expected, arguments(), JUMP_FUNCTION,
2897
                NullCallWrapper(), call_kind);
2898

    
2899
  // Handle call cache miss.
2900
  __ bind(&miss);
2901
  __ IncrementCounter(counters->call_global_inline_miss(), 1);
2902
  GenerateMissBranch();
2903

    
2904
  // Return the generated code.
2905
  return GetCode(Code::NORMAL, name);
2906
}
2907

    
2908

    
2909
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2910
    Handle<JSObject> object,
2911
    Handle<JSObject> holder,
2912
    Handle<Name> name,
2913
    Handle<ExecutableAccessorInfo> callback) {
2914
  Label success;
2915
  HandlerFrontend(object, receiver(), holder, name, &success);
2916
  __ bind(&success);
2917

    
2918
  __ pop(scratch1());  // remove the return address
2919
  __ push(receiver());
2920
  __ Push(callback);
2921
  __ Push(name);
2922
  __ push(value());
2923
  __ push(scratch1());  // restore return address
2924

    
2925
  // Do tail-call to the runtime system.
2926
  ExternalReference store_callback_property =
2927
      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2928
  __ TailCallExternalReference(store_callback_property, 4, 1);
2929

    
2930
  // Return the generated code.
2931
  return GetCode(kind(), Code::CALLBACKS, name);
2932
}
2933

    
2934

    
2935
Handle<Code> StoreStubCompiler::CompileStoreCallback(
2936
    Handle<JSObject> object,
2937
    Handle<JSObject> holder,
2938
    Handle<Name> name,
2939
    const CallOptimization& call_optimization) {
2940
  Label success;
2941
  HandlerFrontend(object, receiver(), holder, name, &success);
2942
  __ bind(&success);
2943

    
2944
  Register values[] = { value() };
2945
  GenerateFastApiCall(
2946
      masm(), call_optimization, receiver(), scratch1(), 1, values);
2947

    
2948
  // Return the generated code.
2949
  return GetCode(kind(), Code::CALLBACKS, name);
2950
}
2951

    
2952

    
2953
#undef __
2954
#define __ ACCESS_MASM(masm)
2955

    
2956

    
2957
void StoreStubCompiler::GenerateStoreViaSetter(
2958
    MacroAssembler* masm,
2959
    Handle<JSFunction> setter) {
2960
  // ----------- S t a t e -------------
2961
  //  -- eax    : value
2962
  //  -- ecx    : name
2963
  //  -- edx    : receiver
2964
  //  -- esp[0] : return address
2965
  // -----------------------------------
2966
  {
2967
    FrameScope scope(masm, StackFrame::INTERNAL);
2968

    
2969
    // Save value register, so we can restore it later.
2970
    __ push(eax);
2971

    
2972
    if (!setter.is_null()) {
2973
      // Call the JavaScript setter with receiver and value on the stack.
2974
      __ push(edx);
2975
      __ push(eax);
2976
      ParameterCount actual(1);
2977
      ParameterCount expected(setter);
2978
      __ InvokeFunction(setter, expected, actual,
2979
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2980
    } else {
2981
      // If we generate a global code snippet for deoptimization only, remember
2982
      // the place to continue after deoptimization.
2983
      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2984
    }
2985

    
2986
    // We have to return the passed value, not the return value of the setter.
2987
    __ pop(eax);
2988

    
2989
    // Restore context register.
2990
    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2991
  }
2992
  __ ret(0);
2993
}
2994

    
2995

    
2996
#undef __
2997
#define __ ACCESS_MASM(masm())
2998

    
2999

    
3000
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
3001
    Handle<JSObject> object,
3002
    Handle<Name> name) {
3003
  __ pop(scratch1());  // remove the return address
3004
  __ push(receiver());
3005
  __ push(this->name());
3006
  __ push(value());
3007
  __ push(Immediate(Smi::FromInt(strict_mode())));
3008
  __ push(scratch1());  // restore return address
3009

    
3010
  // Do tail-call to the runtime system.
3011
  ExternalReference store_ic_property =
3012
      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
3013
  __ TailCallExternalReference(store_ic_property, 4, 1);
3014

    
3015
  // Return the generated code.
3016
  return GetCode(kind(), Code::INTERCEPTOR, name);
3017
}
3018

    
3019

    
3020
Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3021
    MapHandleList* receiver_maps,
3022
    CodeHandleList* handler_stubs,
3023
    MapHandleList* transitioned_maps) {
3024
  Label miss;
3025
  __ JumpIfSmi(receiver(), &miss, Label::kNear);
3026
  __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
3027
  for (int i = 0; i < receiver_maps->length(); ++i) {
3028
    __ cmp(scratch1(), receiver_maps->at(i));
3029
    if (transitioned_maps->at(i).is_null()) {
3030
      __ j(equal, handler_stubs->at(i));
3031
    } else {
3032
      Label next_map;
3033
      __ j(not_equal, &next_map, Label::kNear);
3034
      __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
3035
      __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3036
      __ bind(&next_map);
3037
    }
3038
  }
3039
  __ bind(&miss);
3040
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3041

    
3042
  // Return the generated code.
3043
  return GetICCode(
3044
      kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
3045
}
3046

    
3047

    
3048
Handle<Code> LoadStubCompiler::CompileLoadNonexistent(
3049
    Handle<JSObject> object,
3050
    Handle<JSObject> last,
3051
    Handle<Name> name,
3052
    Handle<GlobalObject> global) {
3053
  Label success;
3054

    
3055
  NonexistentHandlerFrontend(object, last, name, &success, global);
3056

    
3057
  __ bind(&success);
3058
  // Return undefined if maps of the full prototype chain are still the
3059
  // same and no global property with this name contains a value.
3060
  __ mov(eax, isolate()->factory()->undefined_value());
3061
  __ ret(0);
3062

    
3063
  // Return the generated code.
3064
  return GetCode(kind(), Code::NONEXISTENT, name);
3065
}
3066

    
3067

    
3068
Register* LoadStubCompiler::registers() {
3069
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
3070
  static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
3071
  return registers;
3072
}
3073

    
3074

    
3075
Register* KeyedLoadStubCompiler::registers() {
3076
  // receiver, name, scratch1, scratch2, scratch3, scratch4.
3077
  static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
3078
  return registers;
3079
}
3080

    
3081

    
3082
Register* StoreStubCompiler::registers() {
3083
  // receiver, name, value, scratch1, scratch2, scratch3.
3084
  static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
3085
  return registers;
3086
}
3087

    
3088

    
3089
Register* KeyedStoreStubCompiler::registers() {
3090
  // receiver, name, value, scratch1, scratch2, scratch3.
3091
  static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
3092
  return registers;
3093
}
3094

    
3095

    
3096
void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
3097
                                              Register name_reg,
3098
                                              Label* miss) {
3099
  __ cmp(name_reg, Immediate(name));
3100
  __ j(not_equal, miss);
3101
}
3102

    
3103

    
3104
void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
3105
                                               Register name_reg,
3106
                                               Label* miss) {
3107
  __ cmp(name_reg, Immediate(name));
3108
  __ j(not_equal, miss);
3109
}
3110

    
3111

    
3112
#undef __
3113
#define __ ACCESS_MASM(masm)
3114

    
3115

    
3116
void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
3117
                                             Register receiver,
3118
                                             Handle<JSFunction> getter) {
3119
  {
3120
    FrameScope scope(masm, StackFrame::INTERNAL);
3121

    
3122
    if (!getter.is_null()) {
3123
      // Call the JavaScript getter with the receiver on the stack.
3124
      __ push(receiver);
3125
      ParameterCount actual(0);
3126
      ParameterCount expected(getter);
3127
      __ InvokeFunction(getter, expected, actual,
3128
                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
3129
    } else {
3130
      // If we generate a global code snippet for deoptimization only, remember
3131
      // the place to continue after deoptimization.
3132
      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
3133
    }
3134

    
3135
    // Restore context register.
3136
    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3137
  }
3138
  __ ret(0);
3139
}
3140

    
3141

    
3142
#undef __
3143
#define __ ACCESS_MASM(masm())
3144

    
3145

    
3146
Handle<Code> LoadStubCompiler::CompileLoadGlobal(
3147
    Handle<JSObject> object,
3148
    Handle<GlobalObject> global,
3149
    Handle<PropertyCell> cell,
3150
    Handle<Name> name,
3151
    bool is_dont_delete) {
3152
  Label success, miss;
3153

    
3154
  __ CheckMap(receiver(), Handle<Map>(object->map()), &miss, DO_SMI_CHECK);
3155
  HandlerFrontendHeader(
3156
      object, receiver(), Handle<JSObject>::cast(global), name, &miss);
3157
  // Get the value from the cell.
3158
  if (Serializer::enabled()) {
3159
    __ mov(eax, Immediate(cell));
3160
    __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
3161
  } else {
3162
    __ mov(eax, Operand::ForCell(cell));
3163
  }
3164

    
3165
  // Check for deleted property if property can actually be deleted.
3166
  if (!is_dont_delete) {
3167
    __ cmp(eax, factory()->the_hole_value());
3168
    __ j(equal, &miss);
3169
  } else if (FLAG_debug_code) {
3170
    __ cmp(eax, factory()->the_hole_value());
3171
    __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
3172
  }
3173

    
3174
  HandlerFrontendFooter(name, &success, &miss);
3175
  __ bind(&success);
3176

    
3177
  Counters* counters = isolate()->counters();
3178
  __ IncrementCounter(counters->named_load_global_stub(), 1);
3179
  // The code above already loads the result into the return register.
3180
  __ ret(0);
3181

    
3182
  // Return the generated code.
3183
  return GetICCode(kind(), Code::NORMAL, name);
3184
}
3185

    
3186

    
3187
Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
3188
    MapHandleList* receiver_maps,
3189
    CodeHandleList* handlers,
3190
    Handle<Name> name,
3191
    Code::StubType type,
3192
    IcCheckType check) {
3193
  Label miss;
3194

    
3195
  if (check == PROPERTY) {
3196
    GenerateNameCheck(name, this->name(), &miss);
3197
  }
3198

    
3199
  __ JumpIfSmi(receiver(), &miss);
3200
  Register map_reg = scratch1();
3201
  __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
3202
  int receiver_count = receiver_maps->length();
3203
  int number_of_handled_maps = 0;
3204
  for (int current = 0; current < receiver_count; ++current) {
3205
    Handle<Map> map = receiver_maps->at(current);
3206
    if (!map->is_deprecated()) {
3207
      number_of_handled_maps++;
3208
      __ cmp(map_reg, map);
3209
      __ j(equal, handlers->at(current));
3210
    }
3211
  }
3212
  ASSERT(number_of_handled_maps != 0);
3213

    
3214
  __ bind(&miss);
3215
  TailCallBuiltin(masm(), MissBuiltin(kind()));
3216

    
3217
  // Return the generated code.
3218
  InlineCacheState state =
3219
      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
3220
  return GetICCode(kind(), type, name, state);
3221
}
3222

    
3223

    
3224
#undef __
3225
#define __ ACCESS_MASM(masm)
3226

    
3227

    
3228
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3229
    MacroAssembler* masm) {
3230
  // ----------- S t a t e -------------
3231
  //  -- ecx    : key
3232
  //  -- edx    : receiver
3233
  //  -- esp[0] : return address
3234
  // -----------------------------------
3235
  Label slow, miss_force_generic;
3236

    
3237
  // This stub is meant to be tail-jumped to, the receiver must already
3238
  // have been verified by the caller to not be a smi.
3239
  __ JumpIfNotSmi(ecx, &miss_force_generic);
3240
  __ mov(ebx, ecx);
3241
  __ SmiUntag(ebx);
3242
  __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
3243

    
3244
  // Push receiver on the stack to free up a register for the dictionary
3245
  // probing.
3246
  __ push(edx);
3247
  __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
3248
  // Pop receiver before returning.
3249
  __ pop(edx);
3250
  __ ret(0);
3251

    
3252
  __ bind(&slow);
3253
  __ pop(edx);
3254

    
3255
  // ----------- S t a t e -------------
3256
  //  -- ecx    : key
3257
  //  -- edx    : receiver
3258
  //  -- esp[0] : return address
3259
  // -----------------------------------
3260
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
3261

    
3262
  __ bind(&miss_force_generic);
3263
  // ----------- S t a t e -------------
3264
  //  -- ecx    : key
3265
  //  -- edx    : receiver
3266
  //  -- esp[0] : return address
3267
  // -----------------------------------
3268
  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric);
3269
}
3270

    
3271

    
3272
#undef __
3273

    
3274
} }  // namespace v8::internal
3275

    
3276
#endif  // V8_TARGET_ARCH_IA32