The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / builtins-ia32.cc @ f230a1cf

History | View | Annotate | Download (47.3 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_IA32
31

    
32
#include "codegen.h"
33
#include "deoptimizer.h"
34
#include "full-codegen.h"
35

    
36
namespace v8 {
37
namespace internal {
38

    
39

    
40
#define __ ACCESS_MASM(masm)
41

    
42

    
43
void Builtins::Generate_Adaptor(MacroAssembler* masm,
44
                                CFunctionId id,
45
                                BuiltinExtraArguments extra_args) {
46
  // ----------- S t a t e -------------
47
  //  -- eax                : number of arguments excluding receiver
48
  //  -- edi                : called function (only guaranteed when
49
  //                          extra_args requires it)
50
  //  -- esi                : context
51
  //  -- esp[0]             : return address
52
  //  -- esp[4]             : last argument
53
  //  -- ...
54
  //  -- esp[4 * argc]      : first argument (argc == eax)
55
  //  -- esp[4 * (argc +1)] : receiver
56
  // -----------------------------------
57

    
58
  // Insert extra arguments.
59
  int num_extra_args = 0;
60
  if (extra_args == NEEDS_CALLED_FUNCTION) {
61
    num_extra_args = 1;
62
    Register scratch = ebx;
63
    __ pop(scratch);  // Save return address.
64
    __ push(edi);
65
    __ push(scratch);  // Restore return address.
66
  } else {
67
    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68
  }
69

    
70
  // JumpToExternalReference expects eax to contain the number of arguments
71
  // including the receiver and the extra arguments.
72
  __ add(eax, Immediate(num_extra_args + 1));
73
  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
74
}
75

    
76

    
77
static void CallRuntimePassFunction(MacroAssembler* masm,
78
                                    Runtime::FunctionId function_id) {
79
  FrameScope scope(masm, StackFrame::INTERNAL);
80
  // Push a copy of the function.
81
  __ push(edi);
82
  // Push call kind information.
83
  __ push(ecx);
84
  // Function is also the parameter to the runtime call.
85
  __ push(edi);
86

    
87
  __ CallRuntime(function_id, 1);
88
  // Restore call kind information.
89
  __ pop(ecx);
90
  // Restore receiver.
91
  __ pop(edi);
92
}
93

    
94

    
95
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
96
  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
97
  __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
98
  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
99
  __ jmp(eax);
100
}
101

    
102

    
103
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
104
  // Checking whether the queued function is ready for install is optional,
105
  // since we come across interrupts and stack checks elsewhere.  However,
106
  // not checking may delay installing ready functions, and always checking
107
  // would be quite expensive.  A good compromise is to first check against
108
  // stack limit as a cue for an interrupt signal.
109
  Label ok;
110
  ExternalReference stack_limit =
111
      ExternalReference::address_of_stack_limit(masm->isolate());
112
  __ cmp(esp, Operand::StaticVariable(stack_limit));
113
  __ j(above_equal, &ok, Label::kNear);
114

    
115
  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
116
  // Tail call to returned code.
117
  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
118
  __ jmp(eax);
119

    
120
  __ bind(&ok);
121
  GenerateTailCallToSharedCode(masm);
122
}
123

    
124

    
125
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
126
  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
127
  GenerateTailCallToSharedCode(masm);
128
}
129

    
130

    
131
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
132
                                           bool is_api_function,
133
                                           bool count_constructions) {
134
  // ----------- S t a t e -------------
135
  //  -- eax: number of arguments
136
  //  -- edi: constructor function
137
  // -----------------------------------
138

    
139
  // Should never count constructions for api objects.
140
  ASSERT(!is_api_function || !count_constructions);
141

    
142
  // Enter a construct frame.
143
  {
144
    FrameScope scope(masm, StackFrame::CONSTRUCT);
145

    
146
    // Store a smi-tagged arguments count on the stack.
147
    __ SmiTag(eax);
148
    __ push(eax);
149

    
150
    // Push the function to invoke on the stack.
151
    __ push(edi);
152

    
153
    // Try to allocate the object without transitioning into C code. If any of
154
    // the preconditions is not met, the code bails out to the runtime call.
155
    Label rt_call, allocated;
156
    if (FLAG_inline_new) {
157
      Label undo_allocation;
158
#ifdef ENABLE_DEBUGGER_SUPPORT
159
      ExternalReference debug_step_in_fp =
160
          ExternalReference::debug_step_in_fp_address(masm->isolate());
161
      __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
162
      __ j(not_equal, &rt_call);
163
#endif
164

    
165
      // Verified that the constructor is a JSFunction.
166
      // Load the initial map and verify that it is in fact a map.
167
      // edi: constructor
168
      __ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
169
      // Will both indicate a NULL and a Smi
170
      __ JumpIfSmi(eax, &rt_call);
171
      // edi: constructor
172
      // eax: initial map (if proven valid below)
173
      __ CmpObjectType(eax, MAP_TYPE, ebx);
174
      __ j(not_equal, &rt_call);
175

    
176
      // Check that the constructor is not constructing a JSFunction (see
177
      // comments in Runtime_NewObject in runtime.cc). In which case the
178
      // initial map's instance type would be JS_FUNCTION_TYPE.
179
      // edi: constructor
180
      // eax: initial map
181
      __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
182
      __ j(equal, &rt_call);
183

    
184
      if (count_constructions) {
185
        Label allocate;
186
        // Decrease generous allocation count.
187
        __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
188
        __ dec_b(FieldOperand(ecx,
189
                              SharedFunctionInfo::kConstructionCountOffset));
190
        __ j(not_zero, &allocate);
191

    
192
        __ push(eax);
193
        __ push(edi);
194

    
195
        __ push(edi);  // constructor
196
        // The call will replace the stub, so the countdown is only done once.
197
        __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
198

    
199
        __ pop(edi);
200
        __ pop(eax);
201

    
202
        __ bind(&allocate);
203
      }
204

    
205
      // Now allocate the JSObject on the heap.
206
      // edi: constructor
207
      // eax: initial map
208
      __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
209
      __ shl(edi, kPointerSizeLog2);
210
      __ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
211
      // Allocated the JSObject, now initialize the fields.
212
      // eax: initial map
213
      // ebx: JSObject
214
      // edi: start of next object
215
      __ mov(Operand(ebx, JSObject::kMapOffset), eax);
216
      Factory* factory = masm->isolate()->factory();
217
      __ mov(ecx, factory->empty_fixed_array());
218
      __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
219
      __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
220
      // Set extra fields in the newly allocated object.
221
      // eax: initial map
222
      // ebx: JSObject
223
      // edi: start of next object
224
      __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
225
      __ mov(edx, factory->undefined_value());
226
      if (count_constructions) {
227
        __ movzx_b(esi,
228
                   FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
229
        __ lea(esi,
230
               Operand(ebx, esi, times_pointer_size, JSObject::kHeaderSize));
231
        // esi: offset of first field after pre-allocated fields
232
        if (FLAG_debug_code) {
233
          __ cmp(esi, edi);
234
          __ Assert(less_equal,
235
                    kUnexpectedNumberOfPreAllocatedPropertyFields);
236
        }
237
        __ InitializeFieldsWithFiller(ecx, esi, edx);
238
        __ mov(edx, factory->one_pointer_filler_map());
239
      }
240
      __ InitializeFieldsWithFiller(ecx, edi, edx);
241

    
242
      // Add the object tag to make the JSObject real, so that we can continue
243
      // and jump into the continuation code at any time from now on. Any
244
      // failures need to undo the allocation, so that the heap is in a
245
      // consistent state and verifiable.
246
      // eax: initial map
247
      // ebx: JSObject
248
      // edi: start of next object
249
      __ or_(ebx, Immediate(kHeapObjectTag));
250

    
251
      // Check if a non-empty properties array is needed.
252
      // Allocate and initialize a FixedArray if it is.
253
      // eax: initial map
254
      // ebx: JSObject
255
      // edi: start of next object
256
      // Calculate the total number of properties described by the map.
257
      __ movzx_b(edx, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
258
      __ movzx_b(ecx,
259
                 FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
260
      __ add(edx, ecx);
261
      // Calculate unused properties past the end of the in-object properties.
262
      __ movzx_b(ecx, FieldOperand(eax, Map::kInObjectPropertiesOffset));
263
      __ sub(edx, ecx);
264
      // Done if no extra properties are to be allocated.
265
      __ j(zero, &allocated);
266
      __ Assert(positive, kPropertyAllocationCountFailed);
267

    
268
      // Scale the number of elements by pointer size and add the header for
269
      // FixedArrays to the start of the next object calculation from above.
270
      // ebx: JSObject
271
      // edi: start of next object (will be start of FixedArray)
272
      // edx: number of elements in properties array
273
      __ Allocate(FixedArray::kHeaderSize,
274
                  times_pointer_size,
275
                  edx,
276
                  REGISTER_VALUE_IS_INT32,
277
                  edi,
278
                  ecx,
279
                  no_reg,
280
                  &undo_allocation,
281
                  RESULT_CONTAINS_TOP);
282

    
283
      // Initialize the FixedArray.
284
      // ebx: JSObject
285
      // edi: FixedArray
286
      // edx: number of elements
287
      // ecx: start of next object
288
      __ mov(eax, factory->fixed_array_map());
289
      __ mov(Operand(edi, FixedArray::kMapOffset), eax);  // setup the map
290
      __ SmiTag(edx);
291
      __ mov(Operand(edi, FixedArray::kLengthOffset), edx);  // and length
292

    
293
      // Initialize the fields to undefined.
294
      // ebx: JSObject
295
      // edi: FixedArray
296
      // ecx: start of next object
297
      { Label loop, entry;
298
        __ mov(edx, factory->undefined_value());
299
        __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
300
        __ jmp(&entry);
301
        __ bind(&loop);
302
        __ mov(Operand(eax, 0), edx);
303
        __ add(eax, Immediate(kPointerSize));
304
        __ bind(&entry);
305
        __ cmp(eax, ecx);
306
        __ j(below, &loop);
307
      }
308

    
309
      // Store the initialized FixedArray into the properties field of
310
      // the JSObject
311
      // ebx: JSObject
312
      // edi: FixedArray
313
      __ or_(edi, Immediate(kHeapObjectTag));  // add the heap tag
314
      __ mov(FieldOperand(ebx, JSObject::kPropertiesOffset), edi);
315

    
316

    
317
      // Continue with JSObject being successfully allocated
318
      // ebx: JSObject
319
      __ jmp(&allocated);
320

    
321
      // Undo the setting of the new top so that the heap is verifiable. For
322
      // example, the map's unused properties potentially do not match the
323
      // allocated objects unused properties.
324
      // ebx: JSObject (previous new top)
325
      __ bind(&undo_allocation);
326
      __ UndoAllocationInNewSpace(ebx);
327
    }
328

    
329
    // Allocate the new receiver object using the runtime call.
330
    __ bind(&rt_call);
331
    // Must restore edi (constructor) before calling runtime.
332
    __ mov(edi, Operand(esp, 0));
333
    // edi: function (constructor)
334
    __ push(edi);
335
    __ CallRuntime(Runtime::kNewObject, 1);
336
    __ mov(ebx, eax);  // store result in ebx
337

    
338
    // New object allocated.
339
    // ebx: newly allocated object
340
    __ bind(&allocated);
341
    // Retrieve the function from the stack.
342
    __ pop(edi);
343

    
344
    // Retrieve smi-tagged arguments count from the stack.
345
    __ mov(eax, Operand(esp, 0));
346
    __ SmiUntag(eax);
347

    
348
    // Push the allocated receiver to the stack. We need two copies
349
    // because we may have to return the original one and the calling
350
    // conventions dictate that the called function pops the receiver.
351
    __ push(ebx);
352
    __ push(ebx);
353

    
354
    // Set up pointer to last argument.
355
    __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
356

    
357
    // Copy arguments and receiver to the expression stack.
358
    Label loop, entry;
359
    __ mov(ecx, eax);
360
    __ jmp(&entry);
361
    __ bind(&loop);
362
    __ push(Operand(ebx, ecx, times_4, 0));
363
    __ bind(&entry);
364
    __ dec(ecx);
365
    __ j(greater_equal, &loop);
366

    
367
    // Call the function.
368
    if (is_api_function) {
369
      __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
370
      Handle<Code> code =
371
          masm->isolate()->builtins()->HandleApiCallConstruct();
372
      ParameterCount expected(0);
373
      __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
374
                    CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
375
    } else {
376
      ParameterCount actual(eax);
377
      __ InvokeFunction(edi, actual, CALL_FUNCTION,
378
                        NullCallWrapper(), CALL_AS_METHOD);
379
    }
380

    
381
    // Store offset of return address for deoptimizer.
382
    if (!is_api_function && !count_constructions) {
383
      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
384
    }
385

    
386
    // Restore context from the frame.
387
    __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
388

    
389
    // If the result is an object (in the ECMA sense), we should get rid
390
    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
391
    // on page 74.
392
    Label use_receiver, exit;
393

    
394
    // If the result is a smi, it is *not* an object in the ECMA sense.
395
    __ JumpIfSmi(eax, &use_receiver);
396

    
397
    // If the type of the result (stored in its map) is less than
398
    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
399
    __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
400
    __ j(above_equal, &exit);
401

    
402
    // Throw away the result of the constructor invocation and use the
403
    // on-stack receiver as the result.
404
    __ bind(&use_receiver);
405
    __ mov(eax, Operand(esp, 0));
406

    
407
    // Restore the arguments count and leave the construct frame.
408
    __ bind(&exit);
409
    __ mov(ebx, Operand(esp, kPointerSize));  // Get arguments count.
410

    
411
    // Leave construct frame.
412
  }
413

    
414
  // Remove caller arguments from the stack and return.
415
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
416
  __ pop(ecx);
417
  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
418
  __ push(ecx);
419
  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
420
  __ ret(0);
421
}
422

    
423

    
424
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
425
  Generate_JSConstructStubHelper(masm, false, true);
426
}
427

    
428

    
429
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
430
  Generate_JSConstructStubHelper(masm, false, false);
431
}
432

    
433

    
434
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
435
  Generate_JSConstructStubHelper(masm, true, false);
436
}
437

    
438

    
439
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
440
                                             bool is_construct) {
441
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
442

    
443
  // Clear the context before we push it when entering the internal frame.
444
  __ Set(esi, Immediate(0));
445

    
446
  {
447
    FrameScope scope(masm, StackFrame::INTERNAL);
448

    
449
    // Load the previous frame pointer (ebx) to access C arguments
450
    __ mov(ebx, Operand(ebp, 0));
451

    
452
    // Get the function from the frame and setup the context.
453
    __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
454
    __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset));
455

    
456
    // Push the function and the receiver onto the stack.
457
    __ push(ecx);
458
    __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
459

    
460
    // Load the number of arguments and setup pointer to the arguments.
461
    __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
462
    __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
463

    
464
    // Copy arguments to the stack in a loop.
465
    Label loop, entry;
466
    __ Set(ecx, Immediate(0));
467
    __ jmp(&entry);
468
    __ bind(&loop);
469
    __ mov(edx, Operand(ebx, ecx, times_4, 0));  // push parameter from argv
470
    __ push(Operand(edx, 0));  // dereference handle
471
    __ inc(ecx);
472
    __ bind(&entry);
473
    __ cmp(ecx, eax);
474
    __ j(not_equal, &loop);
475

    
476
    // Get the function from the stack and call it.
477
    // kPointerSize for the receiver.
478
    __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
479

    
480
    // Invoke the code.
481
    if (is_construct) {
482
      // No type feedback cell is available
483
      Handle<Object> undefined_sentinel(
484
          masm->isolate()->heap()->undefined_value(), masm->isolate());
485
      __ mov(ebx, Immediate(undefined_sentinel));
486
      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
487
      __ CallStub(&stub);
488
    } else {
489
      ParameterCount actual(eax);
490
      __ InvokeFunction(edi, actual, CALL_FUNCTION,
491
                        NullCallWrapper(), CALL_AS_METHOD);
492
    }
493

    
494
    // Exit the internal frame. Notice that this also removes the empty.
495
    // context and the function left on the stack by the code
496
    // invocation.
497
  }
498
  __ ret(kPointerSize);  // Remove receiver.
499
}
500

    
501

    
502
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
503
  Generate_JSEntryTrampolineHelper(masm, false);
504
}
505

    
506

    
507
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
508
  Generate_JSEntryTrampolineHelper(masm, true);
509
}
510

    
511

    
512
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
513
  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
514
  // Do a tail-call of the compiled function.
515
  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
516
  __ jmp(eax);
517
}
518

    
519

    
520
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
521
  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
522
  // Do a tail-call of the compiled function.
523
  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
524
  __ jmp(eax);
525
}
526

    
527

    
528
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
529
  // For now, we are relying on the fact that make_code_young doesn't do any
530
  // garbage collection which allows us to save/restore the registers without
531
  // worrying about which of them contain pointers. We also don't build an
532
  // internal frame to make the code faster, since we shouldn't have to do stack
533
  // crawls in MakeCodeYoung. This seems a bit fragile.
534

    
535
  // Re-execute the code that was patched back to the young age when
536
  // the stub returns.
537
  __ sub(Operand(esp, 0), Immediate(5));
538
  __ pushad();
539
  __ mov(eax, Operand(esp, 8 * kPointerSize));
540
  {
541
    FrameScope scope(masm, StackFrame::MANUAL);
542
    __ PrepareCallCFunction(2, ebx);
543
    __ mov(Operand(esp, 1 * kPointerSize),
544
           Immediate(ExternalReference::isolate_address(masm->isolate())));
545
    __ mov(Operand(esp, 0), eax);
546
    __ CallCFunction(
547
        ExternalReference::get_make_code_young_function(masm->isolate()), 2);
548
  }
549
  __ popad();
550
  __ ret(0);
551
}
552

    
553
#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
554
void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
555
    MacroAssembler* masm) {                                  \
556
  GenerateMakeCodeYoungAgainCommon(masm);                    \
557
}                                                            \
558
void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
559
    MacroAssembler* masm) {                                  \
560
  GenerateMakeCodeYoungAgainCommon(masm);                    \
561
}
562
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
563
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
564

    
565

    
566
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
567
  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
568
  // that make_code_young doesn't do any garbage collection which allows us to
569
  // save/restore the registers without worrying about which of them contain
570
  // pointers.
571
  __ pushad();
572
  __ mov(eax, Operand(esp, 8 * kPointerSize));
573
  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
574
  {  // NOLINT
575
    FrameScope scope(masm, StackFrame::MANUAL);
576
    __ PrepareCallCFunction(2, ebx);
577
    __ mov(Operand(esp, 1 * kPointerSize),
578
           Immediate(ExternalReference::isolate_address(masm->isolate())));
579
    __ mov(Operand(esp, 0), eax);
580
    __ CallCFunction(
581
        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
582
        2);
583
  }
584
  __ popad();
585

    
586
  // Perform prologue operations usually performed by the young code stub.
587
  __ pop(eax);   // Pop return address into scratch register.
588
  __ push(ebp);  // Caller's frame pointer.
589
  __ mov(ebp, esp);
590
  __ push(esi);  // Callee's context.
591
  __ push(edi);  // Callee's JS Function.
592
  __ push(eax);  // Push return address after frame prologue.
593

    
594
  // Jump to point after the code-age stub.
595
  __ ret(0);
596
}
597

    
598

    
599
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
600
  GenerateMakeCodeYoungAgainCommon(masm);
601
}
602

    
603

    
604
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
605
  // Enter an internal frame.
606
  {
607
    FrameScope scope(masm, StackFrame::INTERNAL);
608

    
609
    // Preserve registers across notification, this is important for compiled
610
    // stubs that tail call the runtime on deopts passing their parameters in
611
    // registers.
612
    __ pushad();
613
    __ CallRuntime(Runtime::kNotifyStubFailure, 0);
614
    __ popad();
615
    // Tear down internal frame.
616
  }
617

    
618
  __ pop(MemOperand(esp, 0));  // Ignore state offset
619
  __ ret(0);  // Return to IC Miss stub, continuation still on stack.
620
}
621

    
622

    
623
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
624
                                             Deoptimizer::BailoutType type) {
625
  {
626
    FrameScope scope(masm, StackFrame::INTERNAL);
627

    
628
    // Pass deoptimization type to the runtime system.
629
    __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
630
    __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
631

    
632
    // Tear down internal frame.
633
  }
634

    
635
  // Get the full codegen state from the stack and untag it.
636
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
637
  __ SmiUntag(ecx);
638

    
639
  // Switch on the state.
640
  Label not_no_registers, not_tos_eax;
641
  __ cmp(ecx, FullCodeGenerator::NO_REGISTERS);
642
  __ j(not_equal, &not_no_registers, Label::kNear);
643
  __ ret(1 * kPointerSize);  // Remove state.
644

    
645
  __ bind(&not_no_registers);
646
  __ mov(eax, Operand(esp, 2 * kPointerSize));
647
  __ cmp(ecx, FullCodeGenerator::TOS_REG);
648
  __ j(not_equal, &not_tos_eax, Label::kNear);
649
  __ ret(2 * kPointerSize);  // Remove state, eax.
650

    
651
  __ bind(&not_tos_eax);
652
  __ Abort(kNoCasesLeft);
653
}
654

    
655

    
656
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
657
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
658
}
659

    
660

    
661
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
662
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
663
}
664

    
665

    
666
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
667
  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
668
}
669

    
670

    
671
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
672
  Factory* factory = masm->isolate()->factory();
673

    
674
  // 1. Make sure we have at least one argument.
675
  { Label done;
676
    __ test(eax, eax);
677
    __ j(not_zero, &done);
678
    __ pop(ebx);
679
    __ push(Immediate(factory->undefined_value()));
680
    __ push(ebx);
681
    __ inc(eax);
682
    __ bind(&done);
683
  }
684

    
685
  // 2. Get the function to call (passed as receiver) from the stack, check
686
  //    if it is a function.
687
  Label slow, non_function;
688
  // 1 ~ return address.
689
  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
690
  __ JumpIfSmi(edi, &non_function);
691
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
692
  __ j(not_equal, &slow);
693

    
694

    
695
  // 3a. Patch the first argument if necessary when calling a function.
696
  Label shift_arguments;
697
  __ Set(edx, Immediate(0));  // indicate regular JS_FUNCTION
698
  { Label convert_to_object, use_global_receiver, patch_receiver;
699
    // Change context eagerly in case we need the global receiver.
700
    __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
701

    
702
    // Do not transform the receiver for strict mode functions.
703
    __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
704
    __ test_b(FieldOperand(ebx, SharedFunctionInfo::kStrictModeByteOffset),
705
              1 << SharedFunctionInfo::kStrictModeBitWithinByte);
706
    __ j(not_equal, &shift_arguments);
707

    
708
    // Do not transform the receiver for natives (shared already in ebx).
709
    __ test_b(FieldOperand(ebx, SharedFunctionInfo::kNativeByteOffset),
710
              1 << SharedFunctionInfo::kNativeBitWithinByte);
711
    __ j(not_equal, &shift_arguments);
712

    
713
    // Compute the receiver in non-strict mode.
714
    __ mov(ebx, Operand(esp, eax, times_4, 0));  // First argument.
715

    
716
    // Call ToObject on the receiver if it is not an object, or use the
717
    // global object if it is null or undefined.
718
    __ JumpIfSmi(ebx, &convert_to_object);
719
    __ cmp(ebx, factory->null_value());
720
    __ j(equal, &use_global_receiver);
721
    __ cmp(ebx, factory->undefined_value());
722
    __ j(equal, &use_global_receiver);
723
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
724
    __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
725
    __ j(above_equal, &shift_arguments);
726

    
727
    __ bind(&convert_to_object);
728

    
729
    { // In order to preserve argument count.
730
      FrameScope scope(masm, StackFrame::INTERNAL);
731
      __ SmiTag(eax);
732
      __ push(eax);
733

    
734
      __ push(ebx);
735
      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
736
      __ mov(ebx, eax);
737
      __ Set(edx, Immediate(0));  // restore
738

    
739
      __ pop(eax);
740
      __ SmiUntag(eax);
741
    }
742

    
743
    // Restore the function to edi.
744
    __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
745
    __ jmp(&patch_receiver);
746

    
747
    // Use the global receiver object from the called function as the
748
    // receiver.
749
    __ bind(&use_global_receiver);
750
    const int kGlobalIndex =
751
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
752
    __ mov(ebx, FieldOperand(esi, kGlobalIndex));
753
    __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
754
    __ mov(ebx, FieldOperand(ebx, kGlobalIndex));
755
    __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
756

    
757
    __ bind(&patch_receiver);
758
    __ mov(Operand(esp, eax, times_4, 0), ebx);
759

    
760
    __ jmp(&shift_arguments);
761
  }
762

    
763
  // 3b. Check for function proxy.
764
  __ bind(&slow);
765
  __ Set(edx, Immediate(1));  // indicate function proxy
766
  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
767
  __ j(equal, &shift_arguments);
768
  __ bind(&non_function);
769
  __ Set(edx, Immediate(2));  // indicate non-function
770

    
771
  // 3c. Patch the first argument when calling a non-function.  The
772
  //     CALL_NON_FUNCTION builtin expects the non-function callee as
773
  //     receiver, so overwrite the first argument which will ultimately
774
  //     become the receiver.
775
  __ mov(Operand(esp, eax, times_4, 0), edi);
776

    
777
  // 4. Shift arguments and return address one slot down on the stack
778
  //    (overwriting the original receiver).  Adjust argument count to make
779
  //    the original first argument the new receiver.
780
  __ bind(&shift_arguments);
781
  { Label loop;
782
    __ mov(ecx, eax);
783
    __ bind(&loop);
784
    __ mov(ebx, Operand(esp, ecx, times_4, 0));
785
    __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
786
    __ dec(ecx);
787
    __ j(not_sign, &loop);  // While non-negative (to copy return address).
788
    __ pop(ebx);  // Discard copy of return address.
789
    __ dec(eax);  // One fewer argument (first argument is new receiver).
790
  }
791

    
792
  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
793
  //     or a function proxy via CALL_FUNCTION_PROXY.
794
  { Label function, non_proxy;
795
    __ test(edx, edx);
796
    __ j(zero, &function);
797
    __ Set(ebx, Immediate(0));
798
    __ cmp(edx, Immediate(1));
799
    __ j(not_equal, &non_proxy);
800

    
801
    __ pop(edx);   // return address
802
    __ push(edi);  // re-add proxy object as additional argument
803
    __ push(edx);
804
    __ inc(eax);
805
    __ SetCallKind(ecx, CALL_AS_FUNCTION);
806
    __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
807
    __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
808
           RelocInfo::CODE_TARGET);
809

    
810
    __ bind(&non_proxy);
811
    __ SetCallKind(ecx, CALL_AS_METHOD);
812
    __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
813
    __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
814
           RelocInfo::CODE_TARGET);
815
    __ bind(&function);
816
  }
817

    
818
  // 5b. Get the code to call from the function and check that the number of
819
  //     expected arguments matches what we're providing.  If so, jump
820
  //     (tail-call) to the code in register edx without checking arguments.
821
  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
822
  __ mov(ebx,
823
         FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
824
  __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
825
  __ SmiUntag(ebx);
826
  __ SetCallKind(ecx, CALL_AS_METHOD);
827
  __ cmp(eax, ebx);
828
  __ j(not_equal,
829
       masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
830

    
831
  ParameterCount expected(0);
832
  __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper(),
833
                CALL_AS_METHOD);
834
}
835

    
836

    
837
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
838
  static const int kArgumentsOffset = 2 * kPointerSize;
839
  static const int kReceiverOffset = 3 * kPointerSize;
840
  static const int kFunctionOffset = 4 * kPointerSize;
841
  {
842
    FrameScope frame_scope(masm, StackFrame::INTERNAL);
843

    
844
    __ push(Operand(ebp, kFunctionOffset));  // push this
845
    __ push(Operand(ebp, kArgumentsOffset));  // push arguments
846
    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
847

    
848
    // Check the stack for overflow. We are not trying to catch
849
    // interruptions (e.g. debug break and preemption) here, so the "real stack
850
    // limit" is checked.
851
    Label okay;
852
    ExternalReference real_stack_limit =
853
        ExternalReference::address_of_real_stack_limit(masm->isolate());
854
    __ mov(edi, Operand::StaticVariable(real_stack_limit));
855
    // Make ecx the space we have left. The stack might already be overflowed
856
    // here which will cause ecx to become negative.
857
    __ mov(ecx, esp);
858
    __ sub(ecx, edi);
859
    // Make edx the space we need for the array when it is unrolled onto the
860
    // stack.
861
    __ mov(edx, eax);
862
    __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
863
    // Check if the arguments will overflow the stack.
864
    __ cmp(ecx, edx);
865
    __ j(greater, &okay);  // Signed comparison.
866

    
867
    // Out of stack space.
868
    __ push(Operand(ebp, 4 * kPointerSize));  // push this
869
    __ push(eax);
870
    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
871
    __ bind(&okay);
872
    // End of stack check.
873

    
874
    // Push current index and limit.
875
    const int kLimitOffset =
876
        StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
877
    const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
878
    __ push(eax);  // limit
879
    __ push(Immediate(0));  // index
880

    
881
    // Get the receiver.
882
    __ mov(ebx, Operand(ebp, kReceiverOffset));
883

    
884
    // Check that the function is a JS function (otherwise it must be a proxy).
885
    Label push_receiver;
886
    __ mov(edi, Operand(ebp, kFunctionOffset));
887
    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
888
    __ j(not_equal, &push_receiver);
889

    
890
    // Change context eagerly to get the right global object if necessary.
891
    __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
892

    
893
    // Compute the receiver.
894
    // Do not transform the receiver for strict mode functions.
895
    Label call_to_object, use_global_receiver;
896
    __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
897
    __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
898
              1 << SharedFunctionInfo::kStrictModeBitWithinByte);
899
    __ j(not_equal, &push_receiver);
900

    
901
    Factory* factory = masm->isolate()->factory();
902

    
903
    // Do not transform the receiver for natives (shared already in ecx).
904
    __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
905
              1 << SharedFunctionInfo::kNativeBitWithinByte);
906
    __ j(not_equal, &push_receiver);
907

    
908
    // Compute the receiver in non-strict mode.
909
    // Call ToObject on the receiver if it is not an object, or use the
910
    // global object if it is null or undefined.
911
    __ JumpIfSmi(ebx, &call_to_object);
912
    __ cmp(ebx, factory->null_value());
913
    __ j(equal, &use_global_receiver);
914
    __ cmp(ebx, factory->undefined_value());
915
    __ j(equal, &use_global_receiver);
916
    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
917
    __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
918
    __ j(above_equal, &push_receiver);
919

    
920
    __ bind(&call_to_object);
921
    __ push(ebx);
922
    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
923
    __ mov(ebx, eax);
924
    __ jmp(&push_receiver);
925

    
926
    // Use the current global receiver object as the receiver.
927
    __ bind(&use_global_receiver);
928
    const int kGlobalOffset =
929
        Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
930
    __ mov(ebx, FieldOperand(esi, kGlobalOffset));
931
    __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
932
    __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
933
    __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
934

    
935
    // Push the receiver.
936
    __ bind(&push_receiver);
937
    __ push(ebx);
938

    
939
    // Copy all arguments from the array to the stack.
940
    Label entry, loop;
941
    __ mov(ecx, Operand(ebp, kIndexOffset));
942
    __ jmp(&entry);
943
    __ bind(&loop);
944
    __ mov(edx, Operand(ebp, kArgumentsOffset));  // load arguments
945

    
946
    // Use inline caching to speed up access to arguments.
947
    Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
948
    __ call(ic, RelocInfo::CODE_TARGET);
949
    // It is important that we do not have a test instruction after the
950
    // call.  A test instruction after the call is used to indicate that
951
    // we have generated an inline version of the keyed load.  In this
952
    // case, we know that we are not generating a test instruction next.
953

    
954
    // Push the nth argument.
955
    __ push(eax);
956

    
957
    // Update the index on the stack and in register eax.
958
    __ mov(ecx, Operand(ebp, kIndexOffset));
959
    __ add(ecx, Immediate(1 << kSmiTagSize));
960
    __ mov(Operand(ebp, kIndexOffset), ecx);
961

    
962
    __ bind(&entry);
963
    __ cmp(ecx, Operand(ebp, kLimitOffset));
964
    __ j(not_equal, &loop);
965

    
966
    // Invoke the function.
967
    Label call_proxy;
968
    __ mov(eax, ecx);
969
    ParameterCount actual(eax);
970
    __ SmiUntag(eax);
971
    __ mov(edi, Operand(ebp, kFunctionOffset));
972
    __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
973
    __ j(not_equal, &call_proxy);
974
    __ InvokeFunction(edi, actual, CALL_FUNCTION,
975
                      NullCallWrapper(), CALL_AS_METHOD);
976

    
977
    frame_scope.GenerateLeaveFrame();
978
    __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
979

    
980
    // Invoke the function proxy.
981
    __ bind(&call_proxy);
982
    __ push(edi);  // add function proxy as last argument
983
    __ inc(eax);
984
    __ Set(ebx, Immediate(0));
985
    __ SetCallKind(ecx, CALL_AS_METHOD);
986
    __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
987
    __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
988
            RelocInfo::CODE_TARGET);
989

    
990
    // Leave internal frame.
991
  }
992
  __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
993
}
994

    
995

    
996
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
997
  // ----------- S t a t e -------------
998
  //  -- eax : argc
999
  //  -- esp[0] : return address
1000
  //  -- esp[4] : last argument
1001
  // -----------------------------------
1002
  Label generic_array_code;
1003

    
1004
  // Get the InternalArray function.
1005
  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1006

    
1007
  if (FLAG_debug_code) {
1008
    // Initial map for the builtin InternalArray function should be a map.
1009
    __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1010
    // Will both indicate a NULL and a Smi.
1011
    __ test(ebx, Immediate(kSmiTagMask));
1012
    __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1013
    __ CmpObjectType(ebx, MAP_TYPE, ecx);
1014
    __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1015
  }
1016

    
1017
  // Run the native code for the InternalArray function called as a normal
1018
  // function.
1019
  // tail call a stub
1020
  InternalArrayConstructorStub stub(masm->isolate());
1021
  __ TailCallStub(&stub);
1022
}
1023

    
1024

    
1025
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1026
  // ----------- S t a t e -------------
1027
  //  -- eax : argc
1028
  //  -- esp[0] : return address
1029
  //  -- esp[4] : last argument
1030
  // -----------------------------------
1031
  Label generic_array_code;
1032

    
1033
  // Get the Array function.
1034
  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1035

    
1036
  if (FLAG_debug_code) {
1037
    // Initial map for the builtin Array function should be a map.
1038
    __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1039
    // Will both indicate a NULL and a Smi.
1040
    __ test(ebx, Immediate(kSmiTagMask));
1041
    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1042
    __ CmpObjectType(ebx, MAP_TYPE, ecx);
1043
    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1044
  }
1045

    
1046
  // Run the native code for the Array function called as a normal function.
1047
  // tail call a stub
1048
  Handle<Object> undefined_sentinel(
1049
      masm->isolate()->heap()->undefined_value(),
1050
      masm->isolate());
1051
  __ mov(ebx, Immediate(undefined_sentinel));
1052
  ArrayConstructorStub stub(masm->isolate());
1053
  __ TailCallStub(&stub);
1054
}
1055

    
1056

    
1057
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1058
  // ----------- S t a t e -------------
1059
  //  -- eax                 : number of arguments
1060
  //  -- edi                 : constructor function
1061
  //  -- esp[0]              : return address
1062
  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1063
  //  -- esp[(argc + 1) * 4] : receiver
1064
  // -----------------------------------
1065
  Counters* counters = masm->isolate()->counters();
1066
  __ IncrementCounter(counters->string_ctor_calls(), 1);
1067

    
1068
  if (FLAG_debug_code) {
1069
    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
1070
    __ cmp(edi, ecx);
1071
    __ Assert(equal, kUnexpectedStringFunction);
1072
  }
1073

    
1074
  // Load the first argument into eax and get rid of the rest
1075
  // (including the receiver).
1076
  Label no_arguments;
1077
  __ test(eax, eax);
1078
  __ j(zero, &no_arguments);
1079
  __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1080
  __ pop(ecx);
1081
  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1082
  __ push(ecx);
1083
  __ mov(eax, ebx);
1084

    
1085
  // Lookup the argument in the number to string cache.
1086
  Label not_cached, argument_is_string;
1087
  __ LookupNumberStringCache(eax,  // Input.
1088
                             ebx,  // Result.
1089
                             ecx,  // Scratch 1.
1090
                             edx,  // Scratch 2.
1091
                             &not_cached);
1092
  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1093
  __ bind(&argument_is_string);
1094
  // ----------- S t a t e -------------
1095
  //  -- ebx    : argument converted to string
1096
  //  -- edi    : constructor function
1097
  //  -- esp[0] : return address
1098
  // -----------------------------------
1099

    
1100
  // Allocate a JSValue and put the tagged pointer into eax.
1101
  Label gc_required;
1102
  __ Allocate(JSValue::kSize,
1103
              eax,  // Result.
1104
              ecx,  // New allocation top (we ignore it).
1105
              no_reg,
1106
              &gc_required,
1107
              TAG_OBJECT);
1108

    
1109
  // Set the map.
1110
  __ LoadGlobalFunctionInitialMap(edi, ecx);
1111
  if (FLAG_debug_code) {
1112
    __ cmpb(FieldOperand(ecx, Map::kInstanceSizeOffset),
1113
            JSValue::kSize >> kPointerSizeLog2);
1114
    __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1115
    __ cmpb(FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset), 0);
1116
    __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1117
  }
1118
  __ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
1119

    
1120
  // Set properties and elements.
1121
  Factory* factory = masm->isolate()->factory();
1122
  __ Set(ecx, Immediate(factory->empty_fixed_array()));
1123
  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
1124
  __ mov(FieldOperand(eax, JSObject::kElementsOffset), ecx);
1125

    
1126
  // Set the value.
1127
  __ mov(FieldOperand(eax, JSValue::kValueOffset), ebx);
1128

    
1129
  // Ensure the object is fully initialized.
1130
  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1131

    
1132
  // We're done. Return.
1133
  __ ret(0);
1134

    
1135
  // The argument was not found in the number to string cache. Check
1136
  // if it's a string already before calling the conversion builtin.
1137
  Label convert_argument;
1138
  __ bind(&not_cached);
1139
  STATIC_ASSERT(kSmiTag == 0);
1140
  __ JumpIfSmi(eax, &convert_argument);
1141
  Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1142
  __ j(NegateCondition(is_string), &convert_argument);
1143
  __ mov(ebx, eax);
1144
  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1145
  __ jmp(&argument_is_string);
1146

    
1147
  // Invoke the conversion builtin and put the result into ebx.
1148
  __ bind(&convert_argument);
1149
  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1150
  {
1151
    FrameScope scope(masm, StackFrame::INTERNAL);
1152
    __ push(edi);  // Preserve the function.
1153
    __ push(eax);
1154
    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1155
    __ pop(edi);
1156
  }
1157
  __ mov(ebx, eax);
1158
  __ jmp(&argument_is_string);
1159

    
1160
  // Load the empty string into ebx, remove the receiver from the
1161
  // stack, and jump back to the case where the argument is a string.
1162
  __ bind(&no_arguments);
1163
  __ Set(ebx, Immediate(factory->empty_string()));
1164
  __ pop(ecx);
1165
  __ lea(esp, Operand(esp, kPointerSize));
1166
  __ push(ecx);
1167
  __ jmp(&argument_is_string);
1168

    
1169
  // At this point the argument is already a string. Call runtime to
1170
  // create a string wrapper.
1171
  __ bind(&gc_required);
1172
  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1173
  {
1174
    FrameScope scope(masm, StackFrame::INTERNAL);
1175
    __ push(ebx);
1176
    __ CallRuntime(Runtime::kNewStringWrapper, 1);
1177
  }
1178
  __ ret(0);
1179
}
1180

    
1181

    
1182
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1183
  __ push(ebp);
1184
  __ mov(ebp, esp);
1185

    
1186
  // Store the arguments adaptor context sentinel.
1187
  __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1188

    
1189
  // Push the function on the stack.
1190
  __ push(edi);
1191

    
1192
  // Preserve the number of arguments on the stack. Must preserve eax,
1193
  // ebx and ecx because these registers are used when copying the
1194
  // arguments and the receiver.
1195
  STATIC_ASSERT(kSmiTagSize == 1);
1196
  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1197
  __ push(edi);
1198
}
1199

    
1200

    
1201
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1202
  // Retrieve the number of arguments from the stack.
1203
  __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1204

    
1205
  // Leave the frame.
1206
  __ leave();
1207

    
1208
  // Remove caller arguments from the stack.
1209
  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1210
  __ pop(ecx);
1211
  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
1212
  __ push(ecx);
1213
}
1214

    
1215

    
1216
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1217
  // ----------- S t a t e -------------
1218
  //  -- eax : actual number of arguments
1219
  //  -- ebx : expected number of arguments
1220
  //  -- ecx : call kind information
1221
  //  -- edx : code entry to call
1222
  // -----------------------------------
1223

    
1224
  Label invoke, dont_adapt_arguments;
1225
  __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1226

    
1227
  Label enough, too_few;
1228
  __ cmp(eax, ebx);
1229
  __ j(less, &too_few);
1230
  __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
1231
  __ j(equal, &dont_adapt_arguments);
1232

    
1233
  {  // Enough parameters: Actual >= expected.
1234
    __ bind(&enough);
1235
    EnterArgumentsAdaptorFrame(masm);
1236

    
1237
    // Copy receiver and all expected arguments.
1238
    const int offset = StandardFrameConstants::kCallerSPOffset;
1239
    __ lea(eax, Operand(ebp, eax, times_4, offset));
1240
    __ mov(edi, -1);  // account for receiver
1241

    
1242
    Label copy;
1243
    __ bind(&copy);
1244
    __ inc(edi);
1245
    __ push(Operand(eax, 0));
1246
    __ sub(eax, Immediate(kPointerSize));
1247
    __ cmp(edi, ebx);
1248
    __ j(less, &copy);
1249
    __ jmp(&invoke);
1250
  }
1251

    
1252
  {  // Too few parameters: Actual < expected.
1253
    __ bind(&too_few);
1254
    EnterArgumentsAdaptorFrame(masm);
1255

    
1256
    // Copy receiver and all actual arguments.
1257
    const int offset = StandardFrameConstants::kCallerSPOffset;
1258
    __ lea(edi, Operand(ebp, eax, times_4, offset));
1259
    // ebx = expected - actual.
1260
    __ sub(ebx, eax);
1261
    // eax = -actual - 1
1262
    __ neg(eax);
1263
    __ sub(eax, Immediate(1));
1264

    
1265
    Label copy;
1266
    __ bind(&copy);
1267
    __ inc(eax);
1268
    __ push(Operand(edi, 0));
1269
    __ sub(edi, Immediate(kPointerSize));
1270
    __ test(eax, eax);
1271
    __ j(not_zero, &copy);
1272

    
1273
    // Fill remaining expected arguments with undefined values.
1274
    Label fill;
1275
    __ bind(&fill);
1276
    __ inc(eax);
1277
    __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1278
    __ cmp(eax, ebx);
1279
    __ j(less, &fill);
1280
  }
1281

    
1282
  // Call the entry point.
1283
  __ bind(&invoke);
1284
  // Restore function pointer.
1285
  __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1286
  __ call(edx);
1287

    
1288
  // Store offset of return address for deoptimizer.
1289
  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1290

    
1291
  // Leave frame and return.
1292
  LeaveArgumentsAdaptorFrame(masm);
1293
  __ ret(0);
1294

    
1295
  // -------------------------------------------
1296
  // Dont adapt arguments.
1297
  // -------------------------------------------
1298
  __ bind(&dont_adapt_arguments);
1299
  __ jmp(edx);
1300
}
1301

    
1302

    
1303
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1304
  // Lookup the function in the JavaScript frame.
1305
  __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1306
  {
1307
    FrameScope scope(masm, StackFrame::INTERNAL);
1308
    // Lookup and calculate pc offset.
1309
    __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
1310
    __ mov(ebx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
1311
    __ sub(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1312
    __ sub(edx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
1313
    __ SmiTag(edx);
1314

    
1315
    // Pass both function and pc offset as arguments.
1316
    __ push(eax);
1317
    __ push(edx);
1318
    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
1319
  }
1320

    
1321
  Label skip;
1322
  // If the code object is null, just return to the unoptimized code.
1323
  __ cmp(eax, Immediate(0));
1324
  __ j(not_equal, &skip, Label::kNear);
1325
  __ ret(0);
1326

    
1327
  __ bind(&skip);
1328

    
1329
  // Load deoptimization data from the code object.
1330
  __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1331

    
1332
  // Load the OSR entrypoint offset from the deoptimization data.
1333
  __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
1334
      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1335
  __ SmiUntag(ebx);
1336

    
1337
  // Compute the target address = code_obj + header_size + osr_offset
1338
  __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
1339

    
1340
  // Overwrite the return address on the stack.
1341
  __ mov(Operand(esp, 0), eax);
1342

    
1343
  // And "return" to the OSR entry point of the function.
1344
  __ ret(0);
1345
}
1346

    
1347

    
1348
void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1349
  // We check the stack limit as indicator that recompilation might be done.
1350
  Label ok;
1351
  ExternalReference stack_limit =
1352
      ExternalReference::address_of_stack_limit(masm->isolate());
1353
  __ cmp(esp, Operand::StaticVariable(stack_limit));
1354
  __ j(above_equal, &ok, Label::kNear);
1355
  {
1356
    FrameScope scope(masm, StackFrame::INTERNAL);
1357
    __ CallRuntime(Runtime::kStackGuard, 0);
1358
  }
1359
  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1360
         RelocInfo::CODE_TARGET);
1361

    
1362
  __ bind(&ok);
1363
  __ ret(0);
1364
}
1365

    
1366
#undef __
1367
}
1368
}  // namespace v8::internal
1369

    
1370
#endif  // V8_TARGET_ARCH_IA32