The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / ia32 / code-stubs-ia32.cc @ f230a1cf

History | View | Annotate | Download (213 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#if V8_TARGET_ARCH_IA32
31

    
32
#include "bootstrapper.h"
33
#include "code-stubs.h"
34
#include "isolate.h"
35
#include "jsregexp.h"
36
#include "regexp-macro-assembler.h"
37
#include "runtime.h"
38
#include "stub-cache.h"
39
#include "codegen.h"
40
#include "runtime.h"
41

    
42
namespace v8 {
43
namespace internal {
44

    
45

    
46
void FastNewClosureStub::InitializeInterfaceDescriptor(
47
    Isolate* isolate,
48
    CodeStubInterfaceDescriptor* descriptor) {
49
  static Register registers[] = { ebx };
50
  descriptor->register_param_count_ = 1;
51
  descriptor->register_params_ = registers;
52
  descriptor->deoptimization_handler_ =
53
      Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
54
}
55

    
56

    
57
void ToNumberStub::InitializeInterfaceDescriptor(
58
    Isolate* isolate,
59
    CodeStubInterfaceDescriptor* descriptor) {
60
  static Register registers[] = { eax };
61
  descriptor->register_param_count_ = 1;
62
  descriptor->register_params_ = registers;
63
  descriptor->deoptimization_handler_ = NULL;
64
}
65

    
66

    
67
void NumberToStringStub::InitializeInterfaceDescriptor(
68
    Isolate* isolate,
69
    CodeStubInterfaceDescriptor* descriptor) {
70
  static Register registers[] = { eax };
71
  descriptor->register_param_count_ = 1;
72
  descriptor->register_params_ = registers;
73
  descriptor->deoptimization_handler_ =
74
      Runtime::FunctionForId(Runtime::kNumberToString)->entry;
75
}
76

    
77

    
78
void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
79
    Isolate* isolate,
80
    CodeStubInterfaceDescriptor* descriptor) {
81
  static Register registers[] = { eax, ebx, ecx };
82
  descriptor->register_param_count_ = 3;
83
  descriptor->register_params_ = registers;
84
  descriptor->deoptimization_handler_ =
85
      Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
86
}
87

    
88

    
89
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
90
    Isolate* isolate,
91
    CodeStubInterfaceDescriptor* descriptor) {
92
  static Register registers[] = { eax, ebx, ecx, edx };
93
  descriptor->register_param_count_ = 4;
94
  descriptor->register_params_ = registers;
95
  descriptor->deoptimization_handler_ =
96
      Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
97
}
98

    
99

    
100
void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
101
    Isolate* isolate,
102
    CodeStubInterfaceDescriptor* descriptor) {
103
  static Register registers[] = { ebx };
104
  descriptor->register_param_count_ = 1;
105
  descriptor->register_params_ = registers;
106
  descriptor->deoptimization_handler_ = NULL;
107
}
108

    
109

    
110
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
111
    Isolate* isolate,
112
    CodeStubInterfaceDescriptor* descriptor) {
113
  static Register registers[] = { edx, ecx };
114
  descriptor->register_param_count_ = 2;
115
  descriptor->register_params_ = registers;
116
  descriptor->deoptimization_handler_ =
117
      FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
118
}
119

    
120

    
121
void LoadFieldStub::InitializeInterfaceDescriptor(
122
    Isolate* isolate,
123
    CodeStubInterfaceDescriptor* descriptor) {
124
  static Register registers[] = { edx };
125
  descriptor->register_param_count_ = 1;
126
  descriptor->register_params_ = registers;
127
  descriptor->deoptimization_handler_ = NULL;
128
}
129

    
130

    
131
void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
132
    Isolate* isolate,
133
    CodeStubInterfaceDescriptor* descriptor) {
134
  static Register registers[] = { edx };
135
  descriptor->register_param_count_ = 1;
136
  descriptor->register_params_ = registers;
137
  descriptor->deoptimization_handler_ = NULL;
138
}
139

    
140

    
141
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
142
    Isolate* isolate,
143
    CodeStubInterfaceDescriptor* descriptor) {
144
  static Register registers[] = { edx, ecx, eax };
145
  descriptor->register_param_count_ = 3;
146
  descriptor->register_params_ = registers;
147
  descriptor->deoptimization_handler_ =
148
      FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
149
}
150

    
151

    
152
void TransitionElementsKindStub::InitializeInterfaceDescriptor(
153
    Isolate* isolate,
154
    CodeStubInterfaceDescriptor* descriptor) {
155
  static Register registers[] = { eax, ebx };
156
  descriptor->register_param_count_ = 2;
157
  descriptor->register_params_ = registers;
158
  descriptor->deoptimization_handler_ =
159
      Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
160
}
161

    
162

    
163
static void InitializeArrayConstructorDescriptor(
164
    Isolate* isolate,
165
    CodeStubInterfaceDescriptor* descriptor,
166
    int constant_stack_parameter_count) {
167
  // register state
168
  // eax -- number of arguments
169
  // edi -- function
170
  // ebx -- type info cell with elements kind
171
  static Register registers[] = { edi, ebx };
172
  descriptor->register_param_count_ = 2;
173

    
174
  if (constant_stack_parameter_count != 0) {
175
    // stack param count needs (constructor pointer, and single argument)
176
    descriptor->stack_parameter_count_ = eax;
177
  }
178
  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
179
  descriptor->register_params_ = registers;
180
  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
181
  descriptor->deoptimization_handler_ =
182
      Runtime::FunctionForId(Runtime::kArrayConstructor)->entry;
183
}
184

    
185

    
186
static void InitializeInternalArrayConstructorDescriptor(
187
    Isolate* isolate,
188
    CodeStubInterfaceDescriptor* descriptor,
189
    int constant_stack_parameter_count) {
190
  // register state
191
  // eax -- number of arguments
192
  // edi -- constructor function
193
  static Register registers[] = { edi };
194
  descriptor->register_param_count_ = 1;
195

    
196
  if (constant_stack_parameter_count != 0) {
197
    // stack param count needs (constructor pointer, and single argument)
198
    descriptor->stack_parameter_count_ = eax;
199
  }
200
  descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
201
  descriptor->register_params_ = registers;
202
  descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
203
  descriptor->deoptimization_handler_ =
204
      Runtime::FunctionForId(Runtime::kInternalArrayConstructor)->entry;
205
}
206

    
207

    
208
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
209
    Isolate* isolate,
210
    CodeStubInterfaceDescriptor* descriptor) {
211
  InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
212
}
213

    
214

    
215
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
216
    Isolate* isolate,
217
    CodeStubInterfaceDescriptor* descriptor) {
218
  InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
219
}
220

    
221

    
222
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
223
    Isolate* isolate,
224
    CodeStubInterfaceDescriptor* descriptor) {
225
  InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
226
}
227

    
228

    
229
void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
230
    Isolate* isolate,
231
    CodeStubInterfaceDescriptor* descriptor) {
232
  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
233
}
234

    
235

    
236
void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
237
    Isolate* isolate,
238
    CodeStubInterfaceDescriptor* descriptor) {
239
  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
240
}
241

    
242

    
243
void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
244
    Isolate* isolate,
245
    CodeStubInterfaceDescriptor* descriptor) {
246
  InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
247
}
248

    
249

    
250
void CompareNilICStub::InitializeInterfaceDescriptor(
251
    Isolate* isolate,
252
    CodeStubInterfaceDescriptor* descriptor) {
253
  static Register registers[] = { eax };
254
  descriptor->register_param_count_ = 1;
255
  descriptor->register_params_ = registers;
256
  descriptor->deoptimization_handler_ =
257
      FUNCTION_ADDR(CompareNilIC_Miss);
258
  descriptor->SetMissHandler(
259
      ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
260
}
261

    
262
void ToBooleanStub::InitializeInterfaceDescriptor(
263
    Isolate* isolate,
264
    CodeStubInterfaceDescriptor* descriptor) {
265
  static Register registers[] = { eax };
266
  descriptor->register_param_count_ = 1;
267
  descriptor->register_params_ = registers;
268
  descriptor->deoptimization_handler_ =
269
      FUNCTION_ADDR(ToBooleanIC_Miss);
270
  descriptor->SetMissHandler(
271
      ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
272
}
273

    
274

    
275
void StoreGlobalStub::InitializeInterfaceDescriptor(
276
    Isolate* isolate,
277
    CodeStubInterfaceDescriptor* descriptor) {
278
  static Register registers[] = { edx, ecx, eax };
279
  descriptor->register_param_count_ = 3;
280
  descriptor->register_params_ = registers;
281
  descriptor->deoptimization_handler_ =
282
      FUNCTION_ADDR(StoreIC_MissFromStubFailure);
283
}
284

    
285

    
286
void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
287
    Isolate* isolate,
288
    CodeStubInterfaceDescriptor* descriptor) {
289
  static Register registers[] = { eax, ebx, ecx, edx };
290
  descriptor->register_param_count_ = 4;
291
  descriptor->register_params_ = registers;
292
  descriptor->deoptimization_handler_ =
293
      FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
294
}
295

    
296

    
297
void BinaryOpStub::InitializeInterfaceDescriptor(
298
    Isolate* isolate,
299
    CodeStubInterfaceDescriptor* descriptor) {
300
  static Register registers[] = { edx, eax };
301
  descriptor->register_param_count_ = 2;
302
  descriptor->register_params_ = registers;
303
  descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
304
  descriptor->SetMissHandler(
305
      ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
306
}
307

    
308

    
309
#define __ ACCESS_MASM(masm)
310

    
311

    
312
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
313
  // Update the static counter each time a new code stub is generated.
314
  Isolate* isolate = masm->isolate();
315
  isolate->counters()->code_stubs()->Increment();
316

    
317
  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
318
  int param_count = descriptor->register_param_count_;
319
  {
320
    // Call the runtime system in a fresh internal frame.
321
    FrameScope scope(masm, StackFrame::INTERNAL);
322
    ASSERT(descriptor->register_param_count_ == 0 ||
323
           eax.is(descriptor->register_params_[param_count - 1]));
324
    // Push arguments
325
    for (int i = 0; i < param_count; ++i) {
326
      __ push(descriptor->register_params_[i]);
327
    }
328
    ExternalReference miss = descriptor->miss_handler();
329
    __ CallExternalReference(miss, descriptor->register_param_count_);
330
  }
331

    
332
  __ ret(0);
333
}
334

    
335

    
336
void FastNewContextStub::Generate(MacroAssembler* masm) {
337
  // Try to allocate the context in new space.
338
  Label gc;
339
  int length = slots_ + Context::MIN_CONTEXT_SLOTS;
340
  __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
341
              eax, ebx, ecx, &gc, TAG_OBJECT);
342

    
343
  // Get the function from the stack.
344
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
345

    
346
  // Set up the object header.
347
  Factory* factory = masm->isolate()->factory();
348
  __ mov(FieldOperand(eax, HeapObject::kMapOffset),
349
         factory->function_context_map());
350
  __ mov(FieldOperand(eax, Context::kLengthOffset),
351
         Immediate(Smi::FromInt(length)));
352

    
353
  // Set up the fixed slots.
354
  __ Set(ebx, Immediate(0));  // Set to NULL.
355
  __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
356
  __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
357
  __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
358

    
359
  // Copy the global object from the previous context.
360
  __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
361
  __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), ebx);
362

    
363
  // Initialize the rest of the slots to undefined.
364
  __ mov(ebx, factory->undefined_value());
365
  for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
366
    __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
367
  }
368

    
369
  // Return and remove the on-stack parameter.
370
  __ mov(esi, eax);
371
  __ ret(1 * kPointerSize);
372

    
373
  // Need to collect. Call into runtime system.
374
  __ bind(&gc);
375
  __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
376
}
377

    
378

    
379
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
380
  // Stack layout on entry:
381
  //
382
  // [esp + (1 * kPointerSize)]: function
383
  // [esp + (2 * kPointerSize)]: serialized scope info
384

    
385
  // Try to allocate the context in new space.
386
  Label gc;
387
  int length = slots_ + Context::MIN_CONTEXT_SLOTS;
388
  __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
389

    
390
  // Get the function or sentinel from the stack.
391
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
392

    
393
  // Get the serialized scope info from the stack.
394
  __ mov(ebx, Operand(esp, 2 * kPointerSize));
395

    
396
  // Set up the object header.
397
  Factory* factory = masm->isolate()->factory();
398
  __ mov(FieldOperand(eax, HeapObject::kMapOffset),
399
         factory->block_context_map());
400
  __ mov(FieldOperand(eax, Context::kLengthOffset),
401
         Immediate(Smi::FromInt(length)));
402

    
403
  // If this block context is nested in the native context we get a smi
404
  // sentinel instead of a function. The block context should get the
405
  // canonical empty function of the native context as its closure which
406
  // we still have to look up.
407
  Label after_sentinel;
408
  __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
409
  if (FLAG_debug_code) {
410
    __ cmp(ecx, 0);
411
    __ Assert(equal, kExpected0AsASmiSentinel);
412
  }
413
  __ mov(ecx, GlobalObjectOperand());
414
  __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
415
  __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
416
  __ bind(&after_sentinel);
417

    
418
  // Set up the fixed slots.
419
  __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
420
  __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
421
  __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
422

    
423
  // Copy the global object from the previous context.
424
  __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
425
  __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
426

    
427
  // Initialize the rest of the slots to the hole value.
428
  if (slots_ == 1) {
429
    __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
430
           factory->the_hole_value());
431
  } else {
432
    __ mov(ebx, factory->the_hole_value());
433
    for (int i = 0; i < slots_; i++) {
434
      __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
435
    }
436
  }
437

    
438
  // Return and remove the on-stack parameters.
439
  __ mov(esi, eax);
440
  __ ret(2 * kPointerSize);
441

    
442
  // Need to collect. Call into runtime system.
443
  __ bind(&gc);
444
  __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
445
}
446

    
447

    
448
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
449
  // We don't allow a GC during a store buffer overflow so there is no need to
450
  // store the registers in any particular way, but we do have to store and
451
  // restore them.
452
  __ pushad();
453
  if (save_doubles_ == kSaveFPRegs) {
454
    CpuFeatureScope scope(masm, SSE2);
455
    __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
456
    for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
457
      XMMRegister reg = XMMRegister::from_code(i);
458
      __ movsd(Operand(esp, i * kDoubleSize), reg);
459
    }
460
  }
461
  const int argument_count = 1;
462

    
463
  AllowExternalCallThatCantCauseGC scope(masm);
464
  __ PrepareCallCFunction(argument_count, ecx);
465
  __ mov(Operand(esp, 0 * kPointerSize),
466
         Immediate(ExternalReference::isolate_address(masm->isolate())));
467
  __ CallCFunction(
468
      ExternalReference::store_buffer_overflow_function(masm->isolate()),
469
      argument_count);
470
  if (save_doubles_ == kSaveFPRegs) {
471
    CpuFeatureScope scope(masm, SSE2);
472
    for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
473
      XMMRegister reg = XMMRegister::from_code(i);
474
      __ movsd(reg, Operand(esp, i * kDoubleSize));
475
    }
476
    __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
477
  }
478
  __ popad();
479
  __ ret(0);
480
}
481

    
482

    
483
class FloatingPointHelper : public AllStatic {
484
 public:
485
  enum ArgLocation {
486
    ARGS_ON_STACK,
487
    ARGS_IN_REGISTERS
488
  };
489

    
490
  // Code pattern for loading a floating point value. Input value must
491
  // be either a smi or a heap number object (fp value). Requirements:
492
  // operand in register number. Returns operand as floating point number
493
  // on FPU stack.
494
  static void LoadFloatOperand(MacroAssembler* masm, Register number);
495

    
496
  // Test if operands are smi or number objects (fp). Requirements:
497
  // operand_1 in eax, operand_2 in edx; falls through on float
498
  // operands, jumps to the non_float label otherwise.
499
  static void CheckFloatOperands(MacroAssembler* masm,
500
                                 Label* non_float,
501
                                 Register scratch);
502

    
503
  // Test if operands are numbers (smi or HeapNumber objects), and load
504
  // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
505
  // either operand is not a number.  Operands are in edx and eax.
506
  // Leaves operands unchanged.
507
  static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
508
};
509

    
510

    
511
void DoubleToIStub::Generate(MacroAssembler* masm) {
512
  Register input_reg = this->source();
513
  Register final_result_reg = this->destination();
514
  ASSERT(is_truncating());
515

    
516
  Label check_negative, process_64_bits, done, done_no_stash;
517

    
518
  int double_offset = offset();
519

    
520
  // Account for return address and saved regs if input is esp.
521
  if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
522

    
523
  MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
524
  MemOperand exponent_operand(MemOperand(input_reg,
525
                                         double_offset + kDoubleSize / 2));
526

    
527
  Register scratch1;
528
  {
529
    Register scratch_candidates[3] = { ebx, edx, edi };
530
    for (int i = 0; i < 3; i++) {
531
      scratch1 = scratch_candidates[i];
532
      if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
533
    }
534
  }
535
  // Since we must use ecx for shifts below, use some other register (eax)
536
  // to calculate the result if ecx is the requested return register.
537
  Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
538
  // Save ecx if it isn't the return register and therefore volatile, or if it
539
  // is the return register, then save the temp register we use in its stead for
540
  // the result.
541
  Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
542
  __ push(scratch1);
543
  __ push(save_reg);
544

    
545
  bool stash_exponent_copy = !input_reg.is(esp);
546
  __ mov(scratch1, mantissa_operand);
547
  if (CpuFeatures::IsSupported(SSE3)) {
548
    CpuFeatureScope scope(masm, SSE3);
549
    // Load x87 register with heap number.
550
    __ fld_d(mantissa_operand);
551
  }
552
  __ mov(ecx, exponent_operand);
553
  if (stash_exponent_copy) __ push(ecx);
554

    
555
  __ and_(ecx, HeapNumber::kExponentMask);
556
  __ shr(ecx, HeapNumber::kExponentShift);
557
  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
558
  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
559
  __ j(below, &process_64_bits);
560

    
561
  // Result is entirely in lower 32-bits of mantissa
562
  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
563
  if (CpuFeatures::IsSupported(SSE3)) {
564
    __ fstp(0);
565
  }
566
  __ sub(ecx, Immediate(delta));
567
  __ xor_(result_reg, result_reg);
568
  __ cmp(ecx, Immediate(31));
569
  __ j(above, &done);
570
  __ shl_cl(scratch1);
571
  __ jmp(&check_negative);
572

    
573
  __ bind(&process_64_bits);
574
  if (CpuFeatures::IsSupported(SSE3)) {
575
    CpuFeatureScope scope(masm, SSE3);
576
    if (stash_exponent_copy) {
577
      // Already a copy of the exponent on the stack, overwrite it.
578
      STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
579
      __ sub(esp, Immediate(kDoubleSize / 2));
580
    } else {
581
      // Reserve space for 64 bit answer.
582
      __ sub(esp, Immediate(kDoubleSize));  // Nolint.
583
    }
584
    // Do conversion, which cannot fail because we checked the exponent.
585
    __ fisttp_d(Operand(esp, 0));
586
    __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
587
    __ add(esp, Immediate(kDoubleSize));
588
    __ jmp(&done_no_stash);
589
  } else {
590
    // Result must be extracted from shifted 32-bit mantissa
591
    __ sub(ecx, Immediate(delta));
592
    __ neg(ecx);
593
    if (stash_exponent_copy) {
594
      __ mov(result_reg, MemOperand(esp, 0));
595
    } else {
596
      __ mov(result_reg, exponent_operand);
597
    }
598
    __ and_(result_reg,
599
            Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
600
    __ add(result_reg,
601
           Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
602
    __ shrd(result_reg, scratch1);
603
    __ shr_cl(result_reg);
604
    __ test(ecx, Immediate(32));
605
    if (CpuFeatures::IsSupported(CMOV)) {
606
      CpuFeatureScope use_cmov(masm, CMOV);
607
      __ cmov(not_equal, scratch1, result_reg);
608
    } else {
609
      Label skip_mov;
610
      __ j(equal, &skip_mov, Label::kNear);
611
      __ mov(scratch1, result_reg);
612
      __ bind(&skip_mov);
613
    }
614
  }
615

    
616
  // If the double was negative, negate the integer result.
617
  __ bind(&check_negative);
618
  __ mov(result_reg, scratch1);
619
  __ neg(result_reg);
620
  if (stash_exponent_copy) {
621
    __ cmp(MemOperand(esp, 0), Immediate(0));
622
  } else {
623
    __ cmp(exponent_operand, Immediate(0));
624
  }
625
  if (CpuFeatures::IsSupported(CMOV)) {
626
    CpuFeatureScope use_cmov(masm, CMOV);
627
    __ cmov(greater, result_reg, scratch1);
628
  } else {
629
    Label skip_mov;
630
    __ j(less_equal, &skip_mov, Label::kNear);
631
    __ mov(result_reg, scratch1);
632
    __ bind(&skip_mov);
633
  }
634

    
635
  // Restore registers
636
  __ bind(&done);
637
  if (stash_exponent_copy) {
638
    __ add(esp, Immediate(kDoubleSize / 2));
639
  }
640
  __ bind(&done_no_stash);
641
  if (!final_result_reg.is(result_reg)) {
642
    ASSERT(final_result_reg.is(ecx));
643
    __ mov(final_result_reg, result_reg);
644
  }
645
  __ pop(save_reg);
646
  __ pop(scratch1);
647
  __ ret(0);
648
}
649

    
650

    
651
void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
652
  // TAGGED case:
653
  //   Input:
654
  //     esp[4]: tagged number input argument (should be number).
655
  //     esp[0]: return address.
656
  //   Output:
657
  //     eax: tagged double result.
658
  // UNTAGGED case:
659
  //   Input::
660
  //     esp[0]: return address.
661
  //     xmm1: untagged double input argument
662
  //   Output:
663
  //     xmm1: untagged double result.
664

    
665
  Label runtime_call;
666
  Label runtime_call_clear_stack;
667
  Label skip_cache;
668
  const bool tagged = (argument_type_ == TAGGED);
669
  if (tagged) {
670
    // Test that eax is a number.
671
    Label input_not_smi;
672
    Label loaded;
673
    __ mov(eax, Operand(esp, kPointerSize));
674
    __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear);
675
    // Input is a smi. Untag and load it onto the FPU stack.
676
    // Then load the low and high words of the double into ebx, edx.
677
    STATIC_ASSERT(kSmiTagSize == 1);
678
    __ sar(eax, 1);
679
    __ sub(esp, Immediate(2 * kPointerSize));
680
    __ mov(Operand(esp, 0), eax);
681
    __ fild_s(Operand(esp, 0));
682
    __ fst_d(Operand(esp, 0));
683
    __ pop(edx);
684
    __ pop(ebx);
685
    __ jmp(&loaded, Label::kNear);
686
    __ bind(&input_not_smi);
687
    // Check if input is a HeapNumber.
688
    __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
689
    Factory* factory = masm->isolate()->factory();
690
    __ cmp(ebx, Immediate(factory->heap_number_map()));
691
    __ j(not_equal, &runtime_call);
692
    // Input is a HeapNumber. Push it on the FPU stack and load its
693
    // low and high words into ebx, edx.
694
    __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
695
    __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
696
    __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
697

    
698
    __ bind(&loaded);
699
  } else {  // UNTAGGED.
700
    CpuFeatureScope scope(masm, SSE2);
701
    if (CpuFeatures::IsSupported(SSE4_1)) {
702
      CpuFeatureScope sse4_scope(masm, SSE4_1);
703
      __ pextrd(edx, xmm1, 0x1);  // copy xmm1[63..32] to edx.
704
    } else {
705
      __ pshufd(xmm0, xmm1, 0x1);
706
      __ movd(edx, xmm0);
707
    }
708
    __ movd(ebx, xmm1);
709
  }
710

    
711
  // ST[0] or xmm1  == double value
712
  // ebx = low 32 bits of double value
713
  // edx = high 32 bits of double value
714
  // Compute hash (the shifts are arithmetic):
715
  //   h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
716
  __ mov(ecx, ebx);
717
  __ xor_(ecx, edx);
718
  __ mov(eax, ecx);
719
  __ sar(eax, 16);
720
  __ xor_(ecx, eax);
721
  __ mov(eax, ecx);
722
  __ sar(eax, 8);
723
  __ xor_(ecx, eax);
724
  ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
725
  __ and_(ecx,
726
          Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
727

    
728
  // ST[0] or xmm1 == double value.
729
  // ebx = low 32 bits of double value.
730
  // edx = high 32 bits of double value.
731
  // ecx = TranscendentalCache::hash(double value).
732
  ExternalReference cache_array =
733
      ExternalReference::transcendental_cache_array_address(masm->isolate());
734
  __ mov(eax, Immediate(cache_array));
735
  int cache_array_index =
736
      type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
737
  __ mov(eax, Operand(eax, cache_array_index));
738
  // Eax points to the cache for the type type_.
739
  // If NULL, the cache hasn't been initialized yet, so go through runtime.
740
  __ test(eax, eax);
741
  __ j(zero, &runtime_call_clear_stack);
742
#ifdef DEBUG
743
  // Check that the layout of cache elements match expectations.
744
  { TranscendentalCache::SubCache::Element test_elem[2];
745
    char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
746
    char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
747
    char* elem_in0  = reinterpret_cast<char*>(&(test_elem[0].in[0]));
748
    char* elem_in1  = reinterpret_cast<char*>(&(test_elem[0].in[1]));
749
    char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
750
    CHECK_EQ(12, elem2_start - elem_start);  // Two uint_32's and a pointer.
751
    CHECK_EQ(0, elem_in0 - elem_start);
752
    CHECK_EQ(kIntSize, elem_in1 - elem_start);
753
    CHECK_EQ(2 * kIntSize, elem_out - elem_start);
754
  }
755
#endif
756
  // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
757
  __ lea(ecx, Operand(ecx, ecx, times_2, 0));
758
  __ lea(ecx, Operand(eax, ecx, times_4, 0));
759
  // Check if cache matches: Double value is stored in uint32_t[2] array.
760
  Label cache_miss;
761
  __ cmp(ebx, Operand(ecx, 0));
762
  __ j(not_equal, &cache_miss, Label::kNear);
763
  __ cmp(edx, Operand(ecx, kIntSize));
764
  __ j(not_equal, &cache_miss, Label::kNear);
765
  // Cache hit!
766
  Counters* counters = masm->isolate()->counters();
767
  __ IncrementCounter(counters->transcendental_cache_hit(), 1);
768
  __ mov(eax, Operand(ecx, 2 * kIntSize));
769
  if (tagged) {
770
    __ fstp(0);
771
    __ ret(kPointerSize);
772
  } else {  // UNTAGGED.
773
    CpuFeatureScope scope(masm, SSE2);
774
    __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
775
    __ Ret();
776
  }
777

    
778
  __ bind(&cache_miss);
779
  __ IncrementCounter(counters->transcendental_cache_miss(), 1);
780
  // Update cache with new value.
781
  // We are short on registers, so use no_reg as scratch.
782
  // This gives slightly larger code.
783
  if (tagged) {
784
    __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
785
  } else {  // UNTAGGED.
786
    CpuFeatureScope scope(masm, SSE2);
787
    __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
788
    __ sub(esp, Immediate(kDoubleSize));
789
    __ movsd(Operand(esp, 0), xmm1);
790
    __ fld_d(Operand(esp, 0));
791
    __ add(esp, Immediate(kDoubleSize));
792
  }
793
  GenerateOperation(masm, type_);
794
  __ mov(Operand(ecx, 0), ebx);
795
  __ mov(Operand(ecx, kIntSize), edx);
796
  __ mov(Operand(ecx, 2 * kIntSize), eax);
797
  __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
798
  if (tagged) {
799
    __ ret(kPointerSize);
800
  } else {  // UNTAGGED.
801
    CpuFeatureScope scope(masm, SSE2);
802
    __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
803
    __ Ret();
804

    
805
    // Skip cache and return answer directly, only in untagged case.
806
    __ bind(&skip_cache);
807
    __ sub(esp, Immediate(kDoubleSize));
808
    __ movsd(Operand(esp, 0), xmm1);
809
    __ fld_d(Operand(esp, 0));
810
    GenerateOperation(masm, type_);
811
    __ fstp_d(Operand(esp, 0));
812
    __ movsd(xmm1, Operand(esp, 0));
813
    __ add(esp, Immediate(kDoubleSize));
814
    // We return the value in xmm1 without adding it to the cache, but
815
    // we cause a scavenging GC so that future allocations will succeed.
816
    {
817
      FrameScope scope(masm, StackFrame::INTERNAL);
818
      // Allocate an unused object bigger than a HeapNumber.
819
      __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
820
      __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
821
    }
822
    __ Ret();
823
  }
824

    
825
  // Call runtime, doing whatever allocation and cleanup is necessary.
826
  if (tagged) {
827
    __ bind(&runtime_call_clear_stack);
828
    __ fstp(0);
829
    __ bind(&runtime_call);
830
    ExternalReference runtime =
831
        ExternalReference(RuntimeFunction(), masm->isolate());
832
    __ TailCallExternalReference(runtime, 1, 1);
833
  } else {  // UNTAGGED.
834
    CpuFeatureScope scope(masm, SSE2);
835
    __ bind(&runtime_call_clear_stack);
836
    __ bind(&runtime_call);
837
    __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
838
    __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
839
    {
840
      FrameScope scope(masm, StackFrame::INTERNAL);
841
      __ push(eax);
842
      __ CallRuntime(RuntimeFunction(), 1);
843
    }
844
    __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
845
    __ Ret();
846
  }
847
}
848

    
849

    
850
Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
851
  switch (type_) {
852
    case TranscendentalCache::SIN: return Runtime::kMath_sin;
853
    case TranscendentalCache::COS: return Runtime::kMath_cos;
854
    case TranscendentalCache::TAN: return Runtime::kMath_tan;
855
    case TranscendentalCache::LOG: return Runtime::kMath_log;
856
    default:
857
      UNIMPLEMENTED();
858
      return Runtime::kAbort;
859
  }
860
}
861

    
862

    
863
void TranscendentalCacheStub::GenerateOperation(
864
    MacroAssembler* masm, TranscendentalCache::Type type) {
865
  // Only free register is edi.
866
  // Input value is on FP stack, and also in ebx/edx.
867
  // Input value is possibly in xmm1.
868
  // Address of result (a newly allocated HeapNumber) may be in eax.
869
  if (type == TranscendentalCache::SIN ||
870
      type == TranscendentalCache::COS ||
871
      type == TranscendentalCache::TAN) {
872
    // Both fsin and fcos require arguments in the range +/-2^63 and
873
    // return NaN for infinities and NaN. They can share all code except
874
    // the actual fsin/fcos operation.
875
    Label in_range, done;
876
    // If argument is outside the range -2^63..2^63, fsin/cos doesn't
877
    // work. We must reduce it to the appropriate range.
878
    __ mov(edi, edx);
879
    __ and_(edi, Immediate(0x7ff00000));  // Exponent only.
880
    int supported_exponent_limit =
881
        (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
882
    __ cmp(edi, Immediate(supported_exponent_limit));
883
    __ j(below, &in_range, Label::kNear);
884
    // Check for infinity and NaN. Both return NaN for sin.
885
    __ cmp(edi, Immediate(0x7ff00000));
886
    Label non_nan_result;
887
    __ j(not_equal, &non_nan_result, Label::kNear);
888
    // Input is +/-Infinity or NaN. Result is NaN.
889
    __ fstp(0);
890
    // NaN is represented by 0x7ff8000000000000.
891
    __ push(Immediate(0x7ff80000));
892
    __ push(Immediate(0));
893
    __ fld_d(Operand(esp, 0));
894
    __ add(esp, Immediate(2 * kPointerSize));
895
    __ jmp(&done, Label::kNear);
896

    
897
    __ bind(&non_nan_result);
898

    
899
    // Use fpmod to restrict argument to the range +/-2*PI.
900
    __ mov(edi, eax);  // Save eax before using fnstsw_ax.
901
    __ fldpi();
902
    __ fadd(0);
903
    __ fld(1);
904
    // FPU Stack: input, 2*pi, input.
905
    {
906
      Label no_exceptions;
907
      __ fwait();
908
      __ fnstsw_ax();
909
      // Clear if Illegal Operand or Zero Division exceptions are set.
910
      __ test(eax, Immediate(5));
911
      __ j(zero, &no_exceptions, Label::kNear);
912
      __ fnclex();
913
      __ bind(&no_exceptions);
914
    }
915

    
916
    // Compute st(0) % st(1)
917
    {
918
      Label partial_remainder_loop;
919
      __ bind(&partial_remainder_loop);
920
      __ fprem1();
921
      __ fwait();
922
      __ fnstsw_ax();
923
      __ test(eax, Immediate(0x400 /* C2 */));
924
      // If C2 is set, computation only has partial result. Loop to
925
      // continue computation.
926
      __ j(not_zero, &partial_remainder_loop);
927
    }
928
    // FPU Stack: input, 2*pi, input % 2*pi
929
    __ fstp(2);
930
    __ fstp(0);
931
    __ mov(eax, edi);  // Restore eax (allocated HeapNumber pointer).
932

    
933
    // FPU Stack: input % 2*pi
934
    __ bind(&in_range);
935
    switch (type) {
936
      case TranscendentalCache::SIN:
937
        __ fsin();
938
        break;
939
      case TranscendentalCache::COS:
940
        __ fcos();
941
        break;
942
      case TranscendentalCache::TAN:
943
        // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
944
        // FP register stack.
945
        __ fptan();
946
        __ fstp(0);  // Pop FP register stack.
947
        break;
948
      default:
949
        UNREACHABLE();
950
    }
951
    __ bind(&done);
952
  } else {
953
    ASSERT(type == TranscendentalCache::LOG);
954
    __ fldln2();
955
    __ fxch();
956
    __ fyl2x();
957
  }
958
}
959

    
960

    
961
void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
962
                                           Register number) {
963
  Label load_smi, done;
964

    
965
  __ JumpIfSmi(number, &load_smi, Label::kNear);
966
  __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
967
  __ jmp(&done, Label::kNear);
968

    
969
  __ bind(&load_smi);
970
  __ SmiUntag(number);
971
  __ push(number);
972
  __ fild_s(Operand(esp, 0));
973
  __ pop(number);
974

    
975
  __ bind(&done);
976
}
977

    
978

    
979
void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
980
                                           Label* not_numbers) {
981
  Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
982
  // Load operand in edx into xmm0, or branch to not_numbers.
983
  __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
984
  Factory* factory = masm->isolate()->factory();
985
  __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
986
  __ j(not_equal, not_numbers);  // Argument in edx is not a number.
987
  __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
988
  __ bind(&load_eax);
989
  // Load operand in eax into xmm1, or branch to not_numbers.
990
  __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
991
  __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
992
  __ j(equal, &load_float_eax, Label::kNear);
993
  __ jmp(not_numbers);  // Argument in eax is not a number.
994
  __ bind(&load_smi_edx);
995
  __ SmiUntag(edx);  // Untag smi before converting to float.
996
  __ Cvtsi2sd(xmm0, edx);
997
  __ SmiTag(edx);  // Retag smi for heap number overwriting test.
998
  __ jmp(&load_eax);
999
  __ bind(&load_smi_eax);
1000
  __ SmiUntag(eax);  // Untag smi before converting to float.
1001
  __ Cvtsi2sd(xmm1, eax);
1002
  __ SmiTag(eax);  // Retag smi for heap number overwriting test.
1003
  __ jmp(&done, Label::kNear);
1004
  __ bind(&load_float_eax);
1005
  __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
1006
  __ bind(&done);
1007
}
1008

    
1009

    
1010
void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
1011
                                             Label* non_float,
1012
                                             Register scratch) {
1013
  Label test_other, done;
1014
  // Test if both operands are floats or smi -> scratch=k_is_float;
1015
  // Otherwise scratch = k_not_float.
1016
  __ JumpIfSmi(edx, &test_other, Label::kNear);
1017
  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
1018
  Factory* factory = masm->isolate()->factory();
1019
  __ cmp(scratch, factory->heap_number_map());
1020
  __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
1021

    
1022
  __ bind(&test_other);
1023
  __ JumpIfSmi(eax, &done, Label::kNear);
1024
  __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
1025
  __ cmp(scratch, factory->heap_number_map());
1026
  __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
1027

    
1028
  // Fall-through: Both operands are numbers.
1029
  __ bind(&done);
1030
}
1031

    
1032

    
1033
void MathPowStub::Generate(MacroAssembler* masm) {
1034
  CpuFeatureScope use_sse2(masm, SSE2);
1035
  Factory* factory = masm->isolate()->factory();
1036
  const Register exponent = eax;
1037
  const Register base = edx;
1038
  const Register scratch = ecx;
1039
  const XMMRegister double_result = xmm3;
1040
  const XMMRegister double_base = xmm2;
1041
  const XMMRegister double_exponent = xmm1;
1042
  const XMMRegister double_scratch = xmm4;
1043

    
1044
  Label call_runtime, done, exponent_not_smi, int_exponent;
1045

    
1046
  // Save 1 in double_result - we need this several times later on.
1047
  __ mov(scratch, Immediate(1));
1048
  __ Cvtsi2sd(double_result, scratch);
1049

    
1050
  if (exponent_type_ == ON_STACK) {
1051
    Label base_is_smi, unpack_exponent;
1052
    // The exponent and base are supplied as arguments on the stack.
1053
    // This can only happen if the stub is called from non-optimized code.
1054
    // Load input parameters from stack.
1055
    __ mov(base, Operand(esp, 2 * kPointerSize));
1056
    __ mov(exponent, Operand(esp, 1 * kPointerSize));
1057

    
1058
    __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1059
    __ cmp(FieldOperand(base, HeapObject::kMapOffset),
1060
           factory->heap_number_map());
1061
    __ j(not_equal, &call_runtime);
1062

    
1063
    __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1064
    __ jmp(&unpack_exponent, Label::kNear);
1065

    
1066
    __ bind(&base_is_smi);
1067
    __ SmiUntag(base);
1068
    __ Cvtsi2sd(double_base, base);
1069

    
1070
    __ bind(&unpack_exponent);
1071
    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1072
    __ SmiUntag(exponent);
1073
    __ jmp(&int_exponent);
1074

    
1075
    __ bind(&exponent_not_smi);
1076
    __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
1077
           factory->heap_number_map());
1078
    __ j(not_equal, &call_runtime);
1079
    __ movsd(double_exponent,
1080
              FieldOperand(exponent, HeapNumber::kValueOffset));
1081
  } else if (exponent_type_ == TAGGED) {
1082
    __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1083
    __ SmiUntag(exponent);
1084
    __ jmp(&int_exponent);
1085

    
1086
    __ bind(&exponent_not_smi);
1087
    __ movsd(double_exponent,
1088
              FieldOperand(exponent, HeapNumber::kValueOffset));
1089
  }
1090

    
1091
  if (exponent_type_ != INTEGER) {
1092
    Label fast_power, try_arithmetic_simplification;
1093
    __ DoubleToI(exponent, double_exponent, double_scratch,
1094
                 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
1095
    __ jmp(&int_exponent);
1096

    
1097
    __ bind(&try_arithmetic_simplification);
1098
    // Skip to runtime if possibly NaN (indicated by the indefinite integer).
1099
    __ cvttsd2si(exponent, Operand(double_exponent));
1100
    __ cmp(exponent, Immediate(0x80000000u));
1101
    __ j(equal, &call_runtime);
1102

    
1103
    if (exponent_type_ == ON_STACK) {
1104
      // Detect square root case.  Crankshaft detects constant +/-0.5 at
1105
      // compile time and uses DoMathPowHalf instead.  We then skip this check
1106
      // for non-constant cases of +/-0.5 as these hardly occur.
1107
      Label continue_sqrt, continue_rsqrt, not_plus_half;
1108
      // Test for 0.5.
1109
      // Load double_scratch with 0.5.
1110
      __ mov(scratch, Immediate(0x3F000000u));
1111
      __ movd(double_scratch, scratch);
1112
      __ cvtss2sd(double_scratch, double_scratch);
1113
      // Already ruled out NaNs for exponent.
1114
      __ ucomisd(double_scratch, double_exponent);
1115
      __ j(not_equal, &not_plus_half, Label::kNear);
1116

    
1117
      // Calculates square root of base.  Check for the special case of
1118
      // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
1119
      // According to IEEE-754, single-precision -Infinity has the highest
1120
      // 9 bits set and the lowest 23 bits cleared.
1121
      __ mov(scratch, 0xFF800000u);
1122
      __ movd(double_scratch, scratch);
1123
      __ cvtss2sd(double_scratch, double_scratch);
1124
      __ ucomisd(double_base, double_scratch);
1125
      // Comparing -Infinity with NaN results in "unordered", which sets the
1126
      // zero flag as if both were equal.  However, it also sets the carry flag.
1127
      __ j(not_equal, &continue_sqrt, Label::kNear);
1128
      __ j(carry, &continue_sqrt, Label::kNear);
1129

    
1130
      // Set result to Infinity in the special case.
1131
      __ xorps(double_result, double_result);
1132
      __ subsd(double_result, double_scratch);
1133
      __ jmp(&done);
1134

    
1135
      __ bind(&continue_sqrt);
1136
      // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
1137
      __ xorps(double_scratch, double_scratch);
1138
      __ addsd(double_scratch, double_base);  // Convert -0 to +0.
1139
      __ sqrtsd(double_result, double_scratch);
1140
      __ jmp(&done);
1141

    
1142
      // Test for -0.5.
1143
      __ bind(&not_plus_half);
1144
      // Load double_exponent with -0.5 by substracting 1.
1145
      __ subsd(double_scratch, double_result);
1146
      // Already ruled out NaNs for exponent.
1147
      __ ucomisd(double_scratch, double_exponent);
1148
      __ j(not_equal, &fast_power, Label::kNear);
1149

    
1150
      // Calculates reciprocal of square root of base.  Check for the special
1151
      // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
1152
      // According to IEEE-754, single-precision -Infinity has the highest
1153
      // 9 bits set and the lowest 23 bits cleared.
1154
      __ mov(scratch, 0xFF800000u);
1155
      __ movd(double_scratch, scratch);
1156
      __ cvtss2sd(double_scratch, double_scratch);
1157
      __ ucomisd(double_base, double_scratch);
1158
      // Comparing -Infinity with NaN results in "unordered", which sets the
1159
      // zero flag as if both were equal.  However, it also sets the carry flag.
1160
      __ j(not_equal, &continue_rsqrt, Label::kNear);
1161
      __ j(carry, &continue_rsqrt, Label::kNear);
1162

    
1163
      // Set result to 0 in the special case.
1164
      __ xorps(double_result, double_result);
1165
      __ jmp(&done);
1166

    
1167
      __ bind(&continue_rsqrt);
1168
      // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
1169
      __ xorps(double_exponent, double_exponent);
1170
      __ addsd(double_exponent, double_base);  // Convert -0 to +0.
1171
      __ sqrtsd(double_exponent, double_exponent);
1172
      __ divsd(double_result, double_exponent);
1173
      __ jmp(&done);
1174
    }
1175

    
1176
    // Using FPU instructions to calculate power.
1177
    Label fast_power_failed;
1178
    __ bind(&fast_power);
1179
    __ fnclex();  // Clear flags to catch exceptions later.
1180
    // Transfer (B)ase and (E)xponent onto the FPU register stack.
1181
    __ sub(esp, Immediate(kDoubleSize));
1182
    __ movsd(Operand(esp, 0), double_exponent);
1183
    __ fld_d(Operand(esp, 0));  // E
1184
    __ movsd(Operand(esp, 0), double_base);
1185
    __ fld_d(Operand(esp, 0));  // B, E
1186

    
1187
    // Exponent is in st(1) and base is in st(0)
1188
    // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
1189
    // FYL2X calculates st(1) * log2(st(0))
1190
    __ fyl2x();    // X
1191
    __ fld(0);     // X, X
1192
    __ frndint();  // rnd(X), X
1193
    __ fsub(1);    // rnd(X), X-rnd(X)
1194
    __ fxch(1);    // X - rnd(X), rnd(X)
1195
    // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
1196
    __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
1197
    __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
1198
    __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
1199
    // FSCALE calculates st(0) * 2^st(1)
1200
    __ fscale();   // 2^X, rnd(X)
1201
    __ fstp(1);    // 2^X
1202
    // Bail out to runtime in case of exceptions in the status word.
1203
    __ fnstsw_ax();
1204
    __ test_b(eax, 0x5F);  // We check for all but precision exception.
1205
    __ j(not_zero, &fast_power_failed, Label::kNear);
1206
    __ fstp_d(Operand(esp, 0));
1207
    __ movsd(double_result, Operand(esp, 0));
1208
    __ add(esp, Immediate(kDoubleSize));
1209
    __ jmp(&done);
1210

    
1211
    __ bind(&fast_power_failed);
1212
    __ fninit();
1213
    __ add(esp, Immediate(kDoubleSize));
1214
    __ jmp(&call_runtime);
1215
  }
1216

    
1217
  // Calculate power with integer exponent.
1218
  __ bind(&int_exponent);
1219
  const XMMRegister double_scratch2 = double_exponent;
1220
  __ mov(scratch, exponent);  // Back up exponent.
1221
  __ movsd(double_scratch, double_base);  // Back up base.
1222
  __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
1223

    
1224
  // Get absolute value of exponent.
1225
  Label no_neg, while_true, while_false;
1226
  __ test(scratch, scratch);
1227
  __ j(positive, &no_neg, Label::kNear);
1228
  __ neg(scratch);
1229
  __ bind(&no_neg);
1230

    
1231
  __ j(zero, &while_false, Label::kNear);
1232
  __ shr(scratch, 1);
1233
  // Above condition means CF==0 && ZF==0.  This means that the
1234
  // bit that has been shifted out is 0 and the result is not 0.
1235
  __ j(above, &while_true, Label::kNear);
1236
  __ movsd(double_result, double_scratch);
1237
  __ j(zero, &while_false, Label::kNear);
1238

    
1239
  __ bind(&while_true);
1240
  __ shr(scratch, 1);
1241
  __ mulsd(double_scratch, double_scratch);
1242
  __ j(above, &while_true, Label::kNear);
1243
  __ mulsd(double_result, double_scratch);
1244
  __ j(not_zero, &while_true);
1245

    
1246
  __ bind(&while_false);
1247
  // scratch has the original value of the exponent - if the exponent is
1248
  // negative, return 1/result.
1249
  __ test(exponent, exponent);
1250
  __ j(positive, &done);
1251
  __ divsd(double_scratch2, double_result);
1252
  __ movsd(double_result, double_scratch2);
1253
  // Test whether result is zero.  Bail out to check for subnormal result.
1254
  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
1255
  __ xorps(double_scratch2, double_scratch2);
1256
  __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
1257
  // double_exponent aliased as double_scratch2 has already been overwritten
1258
  // and may not have contained the exponent value in the first place when the
1259
  // exponent is a smi.  We reset it with exponent value before bailing out.
1260
  __ j(not_equal, &done);
1261
  __ Cvtsi2sd(double_exponent, exponent);
1262

    
1263
  // Returning or bailing out.
1264
  Counters* counters = masm->isolate()->counters();
1265
  if (exponent_type_ == ON_STACK) {
1266
    // The arguments are still on the stack.
1267
    __ bind(&call_runtime);
1268
    __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1269

    
1270
    // The stub is called from non-optimized code, which expects the result
1271
    // as heap number in exponent.
1272
    __ bind(&done);
1273
    __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
1274
    __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
1275
    __ IncrementCounter(counters->math_pow(), 1);
1276
    __ ret(2 * kPointerSize);
1277
  } else {
1278
    __ bind(&call_runtime);
1279
    {
1280
      AllowExternalCallThatCantCauseGC scope(masm);
1281
      __ PrepareCallCFunction(4, scratch);
1282
      __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
1283
      __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
1284
      __ CallCFunction(
1285
          ExternalReference::power_double_double_function(masm->isolate()), 4);
1286
    }
1287
    // Return value is in st(0) on ia32.
1288
    // Store it into the (fixed) result register.
1289
    __ sub(esp, Immediate(kDoubleSize));
1290
    __ fstp_d(Operand(esp, 0));
1291
    __ movsd(double_result, Operand(esp, 0));
1292
    __ add(esp, Immediate(kDoubleSize));
1293

    
1294
    __ bind(&done);
1295
    __ IncrementCounter(counters->math_pow(), 1);
1296
    __ ret(0);
1297
  }
1298
}
1299

    
1300

    
1301
void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1302
  // ----------- S t a t e -------------
1303
  //  -- ecx    : name
1304
  //  -- edx    : receiver
1305
  //  -- esp[0] : return address
1306
  // -----------------------------------
1307
  Label miss;
1308

    
1309
  if (kind() == Code::KEYED_LOAD_IC) {
1310
    __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string()));
1311
    __ j(not_equal, &miss);
1312
  }
1313

    
1314
  StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
1315
  __ bind(&miss);
1316
  StubCompiler::TailCallBuiltin(
1317
      masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1318
}
1319

    
1320

    
1321
void StringLengthStub::Generate(MacroAssembler* masm) {
1322
  // ----------- S t a t e -------------
1323
  //  -- ecx    : name
1324
  //  -- edx    : receiver
1325
  //  -- esp[0] : return address
1326
  // -----------------------------------
1327
  Label miss;
1328

    
1329
  if (kind() == Code::KEYED_LOAD_IC) {
1330
    __ cmp(ecx, Immediate(masm->isolate()->factory()->length_string()));
1331
    __ j(not_equal, &miss);
1332
  }
1333

    
1334
  StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss);
1335
  __ bind(&miss);
1336
  StubCompiler::TailCallBuiltin(
1337
      masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1338
}
1339

    
1340

    
1341
void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
1342
  // ----------- S t a t e -------------
1343
  //  -- eax    : value
1344
  //  -- ecx    : name
1345
  //  -- edx    : receiver
1346
  //  -- esp[0] : return address
1347
  // -----------------------------------
1348
  //
1349
  // This accepts as a receiver anything JSArray::SetElementsLength accepts
1350
  // (currently anything except for external arrays which means anything with
1351
  // elements of FixedArray type).  Value must be a number, but only smis are
1352
  // accepted as the most common case.
1353

    
1354
  Label miss;
1355

    
1356
  Register receiver = edx;
1357
  Register value = eax;
1358
  Register scratch = ebx;
1359

    
1360
  if (kind() == Code::KEYED_STORE_IC) {
1361
    __ cmp(ecx, Immediate(masm->isolate()->factory()->length_string()));
1362
    __ j(not_equal, &miss);
1363
  }
1364

    
1365
  // Check that the receiver isn't a smi.
1366
  __ JumpIfSmi(receiver, &miss);
1367

    
1368
  // Check that the object is a JS array.
1369
  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1370
  __ j(not_equal, &miss);
1371

    
1372
  // Check that elements are FixedArray.
1373
  // We rely on StoreIC_ArrayLength below to deal with all types of
1374
  // fast elements (including COW).
1375
  __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1376
  __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1377
  __ j(not_equal, &miss);
1378

    
1379
  // Check that the array has fast properties, otherwise the length
1380
  // property might have been redefined.
1381
  __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1382
  __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1383
                 Heap::kHashTableMapRootIndex);
1384
  __ j(equal, &miss);
1385

    
1386
  // Check that value is a smi.
1387
  __ JumpIfNotSmi(value, &miss);
1388

    
1389
  // Prepare tail call to StoreIC_ArrayLength.
1390
  __ pop(scratch);
1391
  __ push(receiver);
1392
  __ push(value);
1393
  __ push(scratch);  // return address
1394

    
1395
  ExternalReference ref =
1396
      ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
1397
  __ TailCallExternalReference(ref, 2, 1);
1398

    
1399
  __ bind(&miss);
1400

    
1401
  StubCompiler::TailCallBuiltin(
1402
      masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1403
}
1404

    
1405

    
1406
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1407
  // The key is in edx and the parameter count is in eax.
1408

    
1409
  // The displacement is used for skipping the frame pointer on the
1410
  // stack. It is the offset of the last parameter (if any) relative
1411
  // to the frame pointer.
1412
  static const int kDisplacement = 1 * kPointerSize;
1413

    
1414
  // Check that the key is a smi.
1415
  Label slow;
1416
  __ JumpIfNotSmi(edx, &slow, Label::kNear);
1417

    
1418
  // Check if the calling frame is an arguments adaptor frame.
1419
  Label adaptor;
1420
  __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1421
  __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
1422
  __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1423
  __ j(equal, &adaptor, Label::kNear);
1424

    
1425
  // Check index against formal parameters count limit passed in
1426
  // through register eax. Use unsigned comparison to get negative
1427
  // check for free.
1428
  __ cmp(edx, eax);
1429
  __ j(above_equal, &slow, Label::kNear);
1430

    
1431
  // Read the argument from the stack and return it.
1432
  STATIC_ASSERT(kSmiTagSize == 1);
1433
  STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
1434
  __ lea(ebx, Operand(ebp, eax, times_2, 0));
1435
  __ neg(edx);
1436
  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1437
  __ ret(0);
1438

    
1439
  // Arguments adaptor case: Check index against actual arguments
1440
  // limit found in the arguments adaptor frame. Use unsigned
1441
  // comparison to get negative check for free.
1442
  __ bind(&adaptor);
1443
  __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1444
  __ cmp(edx, ecx);
1445
  __ j(above_equal, &slow, Label::kNear);
1446

    
1447
  // Read the argument from the stack and return it.
1448
  STATIC_ASSERT(kSmiTagSize == 1);
1449
  STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
1450
  __ lea(ebx, Operand(ebx, ecx, times_2, 0));
1451
  __ neg(edx);
1452
  __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1453
  __ ret(0);
1454

    
1455
  // Slow-case: Handle non-smi or out-of-bounds access to arguments
1456
  // by calling the runtime system.
1457
  __ bind(&slow);
1458
  __ pop(ebx);  // Return address.
1459
  __ push(edx);
1460
  __ push(ebx);
1461
  __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1462
}
1463

    
1464

    
1465
void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
1466
  // esp[0] : return address
1467
  // esp[4] : number of parameters
1468
  // esp[8] : receiver displacement
1469
  // esp[12] : function
1470

    
1471
  // Check if the calling frame is an arguments adaptor frame.
1472
  Label runtime;
1473
  __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1474
  __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1475
  __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1476
  __ j(not_equal, &runtime, Label::kNear);
1477

    
1478
  // Patch the arguments.length and the parameters pointer.
1479
  __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1480
  __ mov(Operand(esp, 1 * kPointerSize), ecx);
1481
  __ lea(edx, Operand(edx, ecx, times_2,
1482
              StandardFrameConstants::kCallerSPOffset));
1483
  __ mov(Operand(esp, 2 * kPointerSize), edx);
1484

    
1485
  __ bind(&runtime);
1486
  __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1487
}
1488

    
1489

    
1490
void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
1491
  Isolate* isolate = masm->isolate();
1492

    
1493
  // esp[0] : return address
1494
  // esp[4] : number of parameters (tagged)
1495
  // esp[8] : receiver displacement
1496
  // esp[12] : function
1497

    
1498
  // ebx = parameter count (tagged)
1499
  __ mov(ebx, Operand(esp, 1 * kPointerSize));
1500

    
1501
  // Check if the calling frame is an arguments adaptor frame.
1502
  // TODO(rossberg): Factor out some of the bits that are shared with the other
1503
  // Generate* functions.
1504
  Label runtime;
1505
  Label adaptor_frame, try_allocate;
1506
  __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1507
  __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1508
  __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1509
  __ j(equal, &adaptor_frame, Label::kNear);
1510

    
1511
  // No adaptor, parameter count = argument count.
1512
  __ mov(ecx, ebx);
1513
  __ jmp(&try_allocate, Label::kNear);
1514

    
1515
  // We have an adaptor frame. Patch the parameters pointer.
1516
  __ bind(&adaptor_frame);
1517
  __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1518
  __ lea(edx, Operand(edx, ecx, times_2,
1519
                      StandardFrameConstants::kCallerSPOffset));
1520
  __ mov(Operand(esp, 2 * kPointerSize), edx);
1521

    
1522
  // ebx = parameter count (tagged)
1523
  // ecx = argument count (tagged)
1524
  // esp[4] = parameter count (tagged)
1525
  // esp[8] = address of receiver argument
1526
  // Compute the mapped parameter count = min(ebx, ecx) in ebx.
1527
  __ cmp(ebx, ecx);
1528
  __ j(less_equal, &try_allocate, Label::kNear);
1529
  __ mov(ebx, ecx);
1530

    
1531
  __ bind(&try_allocate);
1532

    
1533
  // Save mapped parameter count.
1534
  __ push(ebx);
1535

    
1536
  // Compute the sizes of backing store, parameter map, and arguments object.
1537
  // 1. Parameter map, has 2 extra words containing context and backing store.
1538
  const int kParameterMapHeaderSize =
1539
      FixedArray::kHeaderSize + 2 * kPointerSize;
1540
  Label no_parameter_map;
1541
  __ test(ebx, ebx);
1542
  __ j(zero, &no_parameter_map, Label::kNear);
1543
  __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
1544
  __ bind(&no_parameter_map);
1545

    
1546
  // 2. Backing store.
1547
  __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
1548

    
1549
  // 3. Arguments object.
1550
  __ add(ebx, Immediate(Heap::kArgumentsObjectSize));
1551

    
1552
  // Do the allocation of all three objects in one go.
1553
  __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
1554

    
1555
  // eax = address of new object(s) (tagged)
1556
  // ecx = argument count (tagged)
1557
  // esp[0] = mapped parameter count (tagged)
1558
  // esp[8] = parameter count (tagged)
1559
  // esp[12] = address of receiver argument
1560
  // Get the arguments boilerplate from the current native context into edi.
1561
  Label has_mapped_parameters, copy;
1562
  __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1563
  __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1564
  __ mov(ebx, Operand(esp, 0 * kPointerSize));
1565
  __ test(ebx, ebx);
1566
  __ j(not_zero, &has_mapped_parameters, Label::kNear);
1567
  __ mov(edi, Operand(edi,
1568
         Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
1569
  __ jmp(&copy, Label::kNear);
1570

    
1571
  __ bind(&has_mapped_parameters);
1572
  __ mov(edi, Operand(edi,
1573
            Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
1574
  __ bind(&copy);
1575

    
1576
  // eax = address of new object (tagged)
1577
  // ebx = mapped parameter count (tagged)
1578
  // ecx = argument count (tagged)
1579
  // edi = address of boilerplate object (tagged)
1580
  // esp[0] = mapped parameter count (tagged)
1581
  // esp[8] = parameter count (tagged)
1582
  // esp[12] = address of receiver argument
1583
  // Copy the JS object part.
1584
  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1585
    __ mov(edx, FieldOperand(edi, i));
1586
    __ mov(FieldOperand(eax, i), edx);
1587
  }
1588

    
1589
  // Set up the callee in-object property.
1590
  STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1591
  __ mov(edx, Operand(esp, 4 * kPointerSize));
1592
  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1593
                      Heap::kArgumentsCalleeIndex * kPointerSize),
1594
         edx);
1595

    
1596
  // Use the length (smi tagged) and set that as an in-object property too.
1597
  STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1598
  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1599
                      Heap::kArgumentsLengthIndex * kPointerSize),
1600
         ecx);
1601

    
1602
  // Set up the elements pointer in the allocated arguments object.
1603
  // If we allocated a parameter map, edi will point there, otherwise to the
1604
  // backing store.
1605
  __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
1606
  __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1607

    
1608
  // eax = address of new object (tagged)
1609
  // ebx = mapped parameter count (tagged)
1610
  // ecx = argument count (tagged)
1611
  // edi = address of parameter map or backing store (tagged)
1612
  // esp[0] = mapped parameter count (tagged)
1613
  // esp[8] = parameter count (tagged)
1614
  // esp[12] = address of receiver argument
1615
  // Free a register.
1616
  __ push(eax);
1617

    
1618
  // Initialize parameter map. If there are no mapped arguments, we're done.
1619
  Label skip_parameter_map;
1620
  __ test(ebx, ebx);
1621
  __ j(zero, &skip_parameter_map);
1622

    
1623
  __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1624
         Immediate(isolate->factory()->non_strict_arguments_elements_map()));
1625
  __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
1626
  __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
1627
  __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
1628
  __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
1629
  __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
1630

    
1631
  // Copy the parameter slots and the holes in the arguments.
1632
  // We need to fill in mapped_parameter_count slots. They index the context,
1633
  // where parameters are stored in reverse order, at
1634
  //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1635
  // The mapped parameter thus need to get indices
1636
  //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
1637
  //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1638
  // We loop from right to left.
1639
  Label parameters_loop, parameters_test;
1640
  __ push(ecx);
1641
  __ mov(eax, Operand(esp, 2 * kPointerSize));
1642
  __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1643
  __ add(ebx, Operand(esp, 4 * kPointerSize));
1644
  __ sub(ebx, eax);
1645
  __ mov(ecx, isolate->factory()->the_hole_value());
1646
  __ mov(edx, edi);
1647
  __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
1648
  // eax = loop variable (tagged)
1649
  // ebx = mapping index (tagged)
1650
  // ecx = the hole value
1651
  // edx = address of parameter map (tagged)
1652
  // edi = address of backing store (tagged)
1653
  // esp[0] = argument count (tagged)
1654
  // esp[4] = address of new object (tagged)
1655
  // esp[8] = mapped parameter count (tagged)
1656
  // esp[16] = parameter count (tagged)
1657
  // esp[20] = address of receiver argument
1658
  __ jmp(&parameters_test, Label::kNear);
1659

    
1660
  __ bind(&parameters_loop);
1661
  __ sub(eax, Immediate(Smi::FromInt(1)));
1662
  __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1663
  __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
1664
  __ add(ebx, Immediate(Smi::FromInt(1)));
1665
  __ bind(&parameters_test);
1666
  __ test(eax, eax);
1667
  __ j(not_zero, &parameters_loop, Label::kNear);
1668
  __ pop(ecx);
1669

    
1670
  __ bind(&skip_parameter_map);
1671

    
1672
  // ecx = argument count (tagged)
1673
  // edi = address of backing store (tagged)
1674
  // esp[0] = address of new object (tagged)
1675
  // esp[4] = mapped parameter count (tagged)
1676
  // esp[12] = parameter count (tagged)
1677
  // esp[16] = address of receiver argument
1678
  // Copy arguments header and remaining slots (if there are any).
1679
  __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1680
         Immediate(isolate->factory()->fixed_array_map()));
1681
  __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1682

    
1683
  Label arguments_loop, arguments_test;
1684
  __ mov(ebx, Operand(esp, 1 * kPointerSize));
1685
  __ mov(edx, Operand(esp, 4 * kPointerSize));
1686
  __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
1687
  __ sub(edx, ebx);
1688
  __ jmp(&arguments_test, Label::kNear);
1689

    
1690
  __ bind(&arguments_loop);
1691
  __ sub(edx, Immediate(kPointerSize));
1692
  __ mov(eax, Operand(edx, 0));
1693
  __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
1694
  __ add(ebx, Immediate(Smi::FromInt(1)));
1695

    
1696
  __ bind(&arguments_test);
1697
  __ cmp(ebx, ecx);
1698
  __ j(less, &arguments_loop, Label::kNear);
1699

    
1700
  // Restore.
1701
  __ pop(eax);  // Address of arguments object.
1702
  __ pop(ebx);  // Parameter count.
1703

    
1704
  // Return and remove the on-stack parameters.
1705
  __ ret(3 * kPointerSize);
1706

    
1707
  // Do the runtime call to allocate the arguments object.
1708
  __ bind(&runtime);
1709
  __ pop(eax);  // Remove saved parameter count.
1710
  __ mov(Operand(esp, 1 * kPointerSize), ecx);  // Patch argument count.
1711
  __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1712
}
1713

    
1714

    
1715
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1716
  Isolate* isolate = masm->isolate();
1717

    
1718
  // esp[0] : return address
1719
  // esp[4] : number of parameters
1720
  // esp[8] : receiver displacement
1721
  // esp[12] : function
1722

    
1723
  // Check if the calling frame is an arguments adaptor frame.
1724
  Label adaptor_frame, try_allocate, runtime;
1725
  __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1726
  __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1727
  __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1728
  __ j(equal, &adaptor_frame, Label::kNear);
1729

    
1730
  // Get the length from the frame.
1731
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1732
  __ jmp(&try_allocate, Label::kNear);
1733

    
1734
  // Patch the arguments.length and the parameters pointer.
1735
  __ bind(&adaptor_frame);
1736
  __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1737
  __ mov(Operand(esp, 1 * kPointerSize), ecx);
1738
  __ lea(edx, Operand(edx, ecx, times_2,
1739
                      StandardFrameConstants::kCallerSPOffset));
1740
  __ mov(Operand(esp, 2 * kPointerSize), edx);
1741

    
1742
  // Try the new space allocation. Start out with computing the size of
1743
  // the arguments object and the elements array.
1744
  Label add_arguments_object;
1745
  __ bind(&try_allocate);
1746
  __ test(ecx, ecx);
1747
  __ j(zero, &add_arguments_object, Label::kNear);
1748
  __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1749
  __ bind(&add_arguments_object);
1750
  __ add(ecx, Immediate(Heap::kArgumentsObjectSizeStrict));
1751

    
1752
  // Do the allocation of both objects in one go.
1753
  __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1754

    
1755
  // Get the arguments boilerplate from the current native context.
1756
  __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1757
  __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1758
  const int offset =
1759
      Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
1760
  __ mov(edi, Operand(edi, offset));
1761

    
1762
  // Copy the JS object part.
1763
  for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1764
    __ mov(ebx, FieldOperand(edi, i));
1765
    __ mov(FieldOperand(eax, i), ebx);
1766
  }
1767

    
1768
  // Get the length (smi tagged) and set that as an in-object property too.
1769
  STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1770
  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1771
  __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1772
                      Heap::kArgumentsLengthIndex * kPointerSize),
1773
         ecx);
1774

    
1775
  // If there are no actual arguments, we're done.
1776
  Label done;
1777
  __ test(ecx, ecx);
1778
  __ j(zero, &done, Label::kNear);
1779

    
1780
  // Get the parameters pointer from the stack.
1781
  __ mov(edx, Operand(esp, 2 * kPointerSize));
1782

    
1783
  // Set up the elements pointer in the allocated arguments object and
1784
  // initialize the header in the elements fixed array.
1785
  __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
1786
  __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1787
  __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1788
         Immediate(isolate->factory()->fixed_array_map()));
1789

    
1790
  __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1791
  // Untag the length for the loop below.
1792
  __ SmiUntag(ecx);
1793

    
1794
  // Copy the fixed array slots.
1795
  Label loop;
1796
  __ bind(&loop);
1797
  __ mov(ebx, Operand(edx, -1 * kPointerSize));  // Skip receiver.
1798
  __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
1799
  __ add(edi, Immediate(kPointerSize));
1800
  __ sub(edx, Immediate(kPointerSize));
1801
  __ dec(ecx);
1802
  __ j(not_zero, &loop);
1803

    
1804
  // Return and remove the on-stack parameters.
1805
  __ bind(&done);
1806
  __ ret(3 * kPointerSize);
1807

    
1808
  // Do the runtime call to allocate the arguments object.
1809
  __ bind(&runtime);
1810
  __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
1811
}
1812

    
1813

    
1814
void RegExpExecStub::Generate(MacroAssembler* masm) {
1815
  // Just jump directly to runtime if native RegExp is not selected at compile
1816
  // time or if regexp entry in generated code is turned off runtime switch or
1817
  // at compilation.
1818
#ifdef V8_INTERPRETED_REGEXP
1819
  __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1820
#else  // V8_INTERPRETED_REGEXP
1821

    
1822
  // Stack frame on entry.
1823
  //  esp[0]: return address
1824
  //  esp[4]: last_match_info (expected JSArray)
1825
  //  esp[8]: previous index
1826
  //  esp[12]: subject string
1827
  //  esp[16]: JSRegExp object
1828

    
1829
  static const int kLastMatchInfoOffset = 1 * kPointerSize;
1830
  static const int kPreviousIndexOffset = 2 * kPointerSize;
1831
  static const int kSubjectOffset = 3 * kPointerSize;
1832
  static const int kJSRegExpOffset = 4 * kPointerSize;
1833

    
1834
  Label runtime;
1835
  Factory* factory = masm->isolate()->factory();
1836

    
1837
  // Ensure that a RegExp stack is allocated.
1838
  ExternalReference address_of_regexp_stack_memory_address =
1839
      ExternalReference::address_of_regexp_stack_memory_address(
1840
          masm->isolate());
1841
  ExternalReference address_of_regexp_stack_memory_size =
1842
      ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
1843
  __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1844
  __ test(ebx, ebx);
1845
  __ j(zero, &runtime);
1846

    
1847
  // Check that the first argument is a JSRegExp object.
1848
  __ mov(eax, Operand(esp, kJSRegExpOffset));
1849
  STATIC_ASSERT(kSmiTag == 0);
1850
  __ JumpIfSmi(eax, &runtime);
1851
  __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1852
  __ j(not_equal, &runtime);
1853

    
1854
  // Check that the RegExp has been compiled (data contains a fixed array).
1855
  __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1856
  if (FLAG_debug_code) {
1857
    __ test(ecx, Immediate(kSmiTagMask));
1858
    __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1859
    __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1860
    __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1861
  }
1862

    
1863
  // ecx: RegExp data (FixedArray)
1864
  // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1865
  __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
1866
  __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1867
  __ j(not_equal, &runtime);
1868

    
1869
  // ecx: RegExp data (FixedArray)
1870
  // Check that the number of captures fit in the static offsets vector buffer.
1871
  __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1872
  // Check (number_of_captures + 1) * 2 <= offsets vector size
1873
  // Or          number_of_captures * 2 <= offsets vector size - 2
1874
  // Multiplying by 2 comes for free since edx is smi-tagged.
1875
  STATIC_ASSERT(kSmiTag == 0);
1876
  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1877
  STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1878
  __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
1879
  __ j(above, &runtime);
1880

    
1881
  // Reset offset for possibly sliced string.
1882
  __ Set(edi, Immediate(0));
1883
  __ mov(eax, Operand(esp, kSubjectOffset));
1884
  __ JumpIfSmi(eax, &runtime);
1885
  __ mov(edx, eax);  // Make a copy of the original subject string.
1886
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1887
  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1888

    
1889
  // eax: subject string
1890
  // edx: subject string
1891
  // ebx: subject string instance type
1892
  // ecx: RegExp data (FixedArray)
1893
  // Handle subject string according to its encoding and representation:
1894
  // (1) Sequential two byte?  If yes, go to (9).
1895
  // (2) Sequential one byte?  If yes, go to (6).
1896
  // (3) Anything but sequential or cons?  If yes, go to (7).
1897
  // (4) Cons string.  If the string is flat, replace subject with first string.
1898
  //     Otherwise bailout.
1899
  // (5a) Is subject sequential two byte?  If yes, go to (9).
1900
  // (5b) Is subject external?  If yes, go to (8).
1901
  // (6) One byte sequential.  Load regexp code for one byte.
1902
  // (E) Carry on.
1903
  /// [...]
1904

    
1905
  // Deferred code at the end of the stub:
1906
  // (7) Not a long external string?  If yes, go to (10).
1907
  // (8) External string.  Make it, offset-wise, look like a sequential string.
1908
  // (8a) Is the external string one byte?  If yes, go to (6).
1909
  // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
1910
  // (10) Short external string or not a string?  If yes, bail out to runtime.
1911
  // (11) Sliced string.  Replace subject with parent. Go to (5a).
1912

    
1913
  Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1914
        external_string /* 8 */, check_underlying /* 5a */,
1915
        not_seq_nor_cons /* 7 */, check_code /* E */,
1916
        not_long_external /* 10 */;
1917

    
1918
  // (1) Sequential two byte?  If yes, go to (9).
1919
  __ and_(ebx, kIsNotStringMask |
1920
               kStringRepresentationMask |
1921
               kStringEncodingMask |
1922
               kShortExternalStringMask);
1923
  STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1924
  __ j(zero, &seq_two_byte_string);  // Go to (9).
1925

    
1926
  // (2) Sequential one byte?  If yes, go to (6).
1927
  // Any other sequential string must be one byte.
1928
  __ and_(ebx, Immediate(kIsNotStringMask |
1929
                         kStringRepresentationMask |
1930
                         kShortExternalStringMask));
1931
  __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).
1932

    
1933
  // (3) Anything but sequential or cons?  If yes, go to (7).
1934
  // We check whether the subject string is a cons, since sequential strings
1935
  // have already been covered.
1936
  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1937
  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1938
  STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1939
  STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1940
  __ cmp(ebx, Immediate(kExternalStringTag));
1941
  __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
1942

    
1943
  // (4) Cons string.  Check that it's flat.
1944
  // Replace subject with first string and reload instance type.
1945
  __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1946
  __ j(not_equal, &runtime);
1947
  __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
1948
  __ bind(&check_underlying);
1949
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1950
  __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1951

    
1952
  // (5a) Is subject sequential two byte?  If yes, go to (9).
1953
  __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
1954
  STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1955
  __ j(zero, &seq_two_byte_string);  // Go to (9).
1956
  // (5b) Is subject external?  If yes, go to (8).
1957
  __ test_b(ebx, kStringRepresentationMask);
1958
  // The underlying external string is never a short external string.
1959
  STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength);
1960
  STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1961
  __ j(not_zero, &external_string);  // Go to (8).
1962

    
1963
  // eax: sequential subject string (or look-alike, external string)
1964
  // edx: original subject string
1965
  // ecx: RegExp data (FixedArray)
1966
  // (6) One byte sequential.  Load regexp code for one byte.
1967
  __ bind(&seq_one_byte_string);
1968
  // Load previous index and check range before edx is overwritten.  We have
1969
  // to use edx instead of eax here because it might have been only made to
1970
  // look like a sequential string when it actually is an external string.
1971
  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1972
  __ JumpIfNotSmi(ebx, &runtime);
1973
  __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1974
  __ j(above_equal, &runtime);
1975
  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
1976
  __ Set(ecx, Immediate(1));  // Type is one byte.
1977

    
1978
  // (E) Carry on.  String handling is done.
1979
  __ bind(&check_code);
1980
  // edx: irregexp code
1981
  // Check that the irregexp code has been generated for the actual string
1982
  // encoding. If it has, the field contains a code object otherwise it contains
1983
  // a smi (code flushing support).
1984
  __ JumpIfSmi(edx, &runtime);
1985

    
1986
  // eax: subject string
1987
  // ebx: previous index (smi)
1988
  // edx: code
1989
  // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
1990
  // All checks done. Now push arguments for native regexp code.
1991
  Counters* counters = masm->isolate()->counters();
1992
  __ IncrementCounter(counters->regexp_entry_native(), 1);
1993

    
1994
  // Isolates: note we add an additional parameter here (isolate pointer).
1995
  static const int kRegExpExecuteArguments = 9;
1996
  __ EnterApiExitFrame(kRegExpExecuteArguments);
1997

    
1998
  // Argument 9: Pass current isolate address.
1999
  __ mov(Operand(esp, 8 * kPointerSize),
2000
      Immediate(ExternalReference::isolate_address(masm->isolate())));
2001

    
2002
  // Argument 8: Indicate that this is a direct call from JavaScript.
2003
  __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
2004

    
2005
  // Argument 7: Start (high end) of backtracking stack memory area.
2006
  __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
2007
  __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
2008
  __ mov(Operand(esp, 6 * kPointerSize), esi);
2009

    
2010
  // Argument 6: Set the number of capture registers to zero to force global
2011
  // regexps to behave as non-global.  This does not affect non-global regexps.
2012
  __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
2013

    
2014
  // Argument 5: static offsets vector buffer.
2015
  __ mov(Operand(esp, 4 * kPointerSize),
2016
         Immediate(ExternalReference::address_of_static_offsets_vector(
2017
             masm->isolate())));
2018

    
2019
  // Argument 2: Previous index.
2020
  __ SmiUntag(ebx);
2021
  __ mov(Operand(esp, 1 * kPointerSize), ebx);
2022

    
2023
  // Argument 1: Original subject string.
2024
  // The original subject is in the previous stack frame. Therefore we have to
2025
  // use ebp, which points exactly to one pointer size below the previous esp.
2026
  // (Because creating a new stack frame pushes the previous ebp onto the stack
2027
  // and thereby moves up esp by one kPointerSize.)
2028
  __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
2029
  __ mov(Operand(esp, 0 * kPointerSize), esi);
2030

    
2031
  // esi: original subject string
2032
  // eax: underlying subject string
2033
  // ebx: previous index
2034
  // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
2035
  // edx: code
2036
  // Argument 4: End of string data
2037
  // Argument 3: Start of string data
2038
  // Prepare start and end index of the input.
2039
  // Load the length from the original sliced string if that is the case.
2040
  __ mov(esi, FieldOperand(esi, String::kLengthOffset));
2041
  __ add(esi, edi);  // Calculate input end wrt offset.
2042
  __ SmiUntag(edi);
2043
  __ add(ebx, edi);  // Calculate input start wrt offset.
2044

    
2045
  // ebx: start index of the input string
2046
  // esi: end index of the input string
2047
  Label setup_two_byte, setup_rest;
2048
  __ test(ecx, ecx);
2049
  __ j(zero, &setup_two_byte, Label::kNear);
2050
  __ SmiUntag(esi);
2051
  __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
2052
  __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
2053
  __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
2054
  __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
2055
  __ jmp(&setup_rest, Label::kNear);
2056

    
2057
  __ bind(&setup_two_byte);
2058
  STATIC_ASSERT(kSmiTag == 0);
2059
  STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
2060
  __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
2061
  __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
2062
  __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
2063
  __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
2064

    
2065
  __ bind(&setup_rest);
2066

    
2067
  // Locate the code entry and call it.
2068
  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2069
  __ call(edx);
2070

    
2071
  // Drop arguments and come back to JS mode.
2072
  __ LeaveApiExitFrame(true);
2073

    
2074
  // Check the result.
2075
  Label success;
2076
  __ cmp(eax, 1);
2077
  // We expect exactly one result since we force the called regexp to behave
2078
  // as non-global.
2079
  __ j(equal, &success);
2080
  Label failure;
2081
  __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
2082
  __ j(equal, &failure);
2083
  __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
2084
  // If not exception it can only be retry. Handle that in the runtime system.
2085
  __ j(not_equal, &runtime);
2086
  // Result must now be exception. If there is no pending exception already a
2087
  // stack overflow (on the backtrack stack) was detected in RegExp code but
2088
  // haven't created the exception yet. Handle that in the runtime system.
2089
  // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2090
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2091
                                      masm->isolate());
2092
  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2093
  __ mov(eax, Operand::StaticVariable(pending_exception));
2094
  __ cmp(edx, eax);
2095
  __ j(equal, &runtime);
2096
  // For exception, throw the exception again.
2097

    
2098
  // Clear the pending exception variable.
2099
  __ mov(Operand::StaticVariable(pending_exception), edx);
2100

    
2101
  // Special handling of termination exceptions which are uncatchable
2102
  // by javascript code.
2103
  __ cmp(eax, factory->termination_exception());
2104
  Label throw_termination_exception;
2105
  __ j(equal, &throw_termination_exception, Label::kNear);
2106

    
2107
  // Handle normal exception by following handler chain.
2108
  __ Throw(eax);
2109

    
2110
  __ bind(&throw_termination_exception);
2111
  __ ThrowUncatchable(eax);
2112

    
2113
  __ bind(&failure);
2114
  // For failure to match, return null.
2115
  __ mov(eax, factory->null_value());
2116
  __ ret(4 * kPointerSize);
2117

    
2118
  // Load RegExp data.
2119
  __ bind(&success);
2120
  __ mov(eax, Operand(esp, kJSRegExpOffset));
2121
  __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
2122
  __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
2123
  // Calculate number of capture registers (number_of_captures + 1) * 2.
2124
  STATIC_ASSERT(kSmiTag == 0);
2125
  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
2126
  __ add(edx, Immediate(2));  // edx was a smi.
2127

    
2128
  // edx: Number of capture registers
2129
  // Load last_match_info which is still known to be a fast case JSArray.
2130
  // Check that the fourth object is a JSArray object.
2131
  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
2132
  __ JumpIfSmi(eax, &runtime);
2133
  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2134
  __ j(not_equal, &runtime);
2135
  // Check that the JSArray is in fast case.
2136
  __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
2137
  __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
2138
  __ cmp(eax, factory->fixed_array_map());
2139
  __ j(not_equal, &runtime);
2140
  // Check that the last match info has space for the capture registers and the
2141
  // additional information.
2142
  __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
2143
  __ SmiUntag(eax);
2144
  __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
2145
  __ cmp(edx, eax);
2146
  __ j(greater, &runtime);
2147

    
2148
  // ebx: last_match_info backing store (FixedArray)
2149
  // edx: number of capture registers
2150
  // Store the capture count.
2151
  __ SmiTag(edx);  // Number of capture registers to smi.
2152
  __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
2153
  __ SmiUntag(edx);  // Number of capture registers back from smi.
2154
  // Store last subject and last input.
2155
  __ mov(eax, Operand(esp, kSubjectOffset));
2156
  __ mov(ecx, eax);
2157
  __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
2158
  __ RecordWriteField(ebx,
2159
                      RegExpImpl::kLastSubjectOffset,
2160
                      eax,
2161
                      edi,
2162
                      kDontSaveFPRegs);
2163
  __ mov(eax, ecx);
2164
  __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
2165
  __ RecordWriteField(ebx,
2166
                      RegExpImpl::kLastInputOffset,
2167
                      eax,
2168
                      edi,
2169
                      kDontSaveFPRegs);
2170

    
2171
  // Get the static offsets vector filled by the native regexp code.
2172
  ExternalReference address_of_static_offsets_vector =
2173
      ExternalReference::address_of_static_offsets_vector(masm->isolate());
2174
  __ mov(ecx, Immediate(address_of_static_offsets_vector));
2175

    
2176
  // ebx: last_match_info backing store (FixedArray)
2177
  // ecx: offsets vector
2178
  // edx: number of capture registers
2179
  Label next_capture, done;
2180
  // Capture register counter starts from number of capture registers and
2181
  // counts down until wraping after zero.
2182
  __ bind(&next_capture);
2183
  __ sub(edx, Immediate(1));
2184
  __ j(negative, &done, Label::kNear);
2185
  // Read the value from the static offsets vector buffer.
2186
  __ mov(edi, Operand(ecx, edx, times_int_size, 0));
2187
  __ SmiTag(edi);
2188
  // Store the smi value in the last match info.
2189
  __ mov(FieldOperand(ebx,
2190
                      edx,
2191
                      times_pointer_size,
2192
                      RegExpImpl::kFirstCaptureOffset),
2193
                      edi);
2194
  __ jmp(&next_capture);
2195
  __ bind(&done);
2196

    
2197
  // Return last match info.
2198
  __ mov(eax, Operand(esp, kLastMatchInfoOffset));
2199
  __ ret(4 * kPointerSize);
2200

    
2201
  // Do the runtime call to execute the regexp.
2202
  __ bind(&runtime);
2203
  __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2204

    
2205
  // Deferred code for string handling.
2206
  // (7) Not a long external string?  If yes, go to (10).
2207
  __ bind(&not_seq_nor_cons);
2208
  // Compare flags are still set from (3).
2209
  __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
2210

    
2211
  // (8) External string.  Short external strings have been ruled out.
2212
  __ bind(&external_string);
2213
  // Reload instance type.
2214
  __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2215
  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2216
  if (FLAG_debug_code) {
2217
    // Assert that we do not have a cons or slice (indirect strings) here.
2218
    // Sequential strings have already been ruled out.
2219
    __ test_b(ebx, kIsIndirectStringMask);
2220
    __ Assert(zero, kExternalStringExpectedButNotFound);
2221
  }
2222
  __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
2223
  // Move the pointer so that offset-wise, it looks like a sequential string.
2224
  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2225
  __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2226
  STATIC_ASSERT(kTwoByteStringTag == 0);
2227
  // (8a) Is the external string one byte?  If yes, go to (6).
2228
  __ test_b(ebx, kStringEncodingMask);
2229
  __ j(not_zero, &seq_one_byte_string);  // Goto (6).
2230

    
2231
  // eax: sequential subject string (or look-alike, external string)
2232
  // edx: original subject string
2233
  // ecx: RegExp data (FixedArray)
2234
  // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
2235
  __ bind(&seq_two_byte_string);
2236
  // Load previous index and check range before edx is overwritten.  We have
2237
  // to use edx instead of eax here because it might have been only made to
2238
  // look like a sequential string when it actually is an external string.
2239
  __ mov(ebx, Operand(esp, kPreviousIndexOffset));
2240
  __ JumpIfNotSmi(ebx, &runtime);
2241
  __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
2242
  __ j(above_equal, &runtime);
2243
  __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
2244
  __ Set(ecx, Immediate(0));  // Type is two byte.
2245
  __ jmp(&check_code);  // Go to (E).
2246

    
2247
  // (10) Not a string or a short external string?  If yes, bail out to runtime.
2248
  __ bind(&not_long_external);
2249
  // Catch non-string subject or short external string.
2250
  STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
2251
  __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
2252
  __ j(not_zero, &runtime);
2253

    
2254
  // (11) Sliced string.  Replace subject with parent.  Go to (5a).
2255
  // Load offset into edi and replace subject string with parent.
2256
  __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
2257
  __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
2258
  __ jmp(&check_underlying);  // Go to (5a).
2259
#endif  // V8_INTERPRETED_REGEXP
2260
}
2261

    
2262

    
2263
void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2264
  const int kMaxInlineLength = 100;
2265
  Label slowcase;
2266
  Label done;
2267
  __ mov(ebx, Operand(esp, kPointerSize * 3));
2268
  __ JumpIfNotSmi(ebx, &slowcase);
2269
  __ cmp(ebx, Immediate(Smi::FromInt(kMaxInlineLength)));
2270
  __ j(above, &slowcase);
2271
  // Smi-tagging is equivalent to multiplying by 2.
2272
  STATIC_ASSERT(kSmiTag == 0);
2273
  STATIC_ASSERT(kSmiTagSize == 1);
2274
  // Allocate RegExpResult followed by FixedArray with size in ebx.
2275
  // JSArray:   [Map][empty properties][Elements][Length-smi][index][input]
2276
  // Elements:  [Map][Length][..elements..]
2277
  __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2278
              times_pointer_size,
2279
              ebx,  // In: Number of elements as a smi
2280
              REGISTER_VALUE_IS_SMI,
2281
              eax,  // Out: Start of allocation (tagged).
2282
              ecx,  // Out: End of allocation.
2283
              edx,  // Scratch register
2284
              &slowcase,
2285
              TAG_OBJECT);
2286
  // eax: Start of allocated area, object-tagged.
2287

    
2288
  // Set JSArray map to global.regexp_result_map().
2289
  // Set empty properties FixedArray.
2290
  // Set elements to point to FixedArray allocated right after the JSArray.
2291
  // Interleave operations for better latency.
2292
  __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
2293
  Factory* factory = masm->isolate()->factory();
2294
  __ mov(ecx, Immediate(factory->empty_fixed_array()));
2295
  __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
2296
  __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
2297
  __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
2298
  __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
2299
  __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
2300
  __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
2301

    
2302
  // Set input, index and length fields from arguments.
2303
  __ mov(ecx, Operand(esp, kPointerSize * 1));
2304
  __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
2305
  __ mov(ecx, Operand(esp, kPointerSize * 2));
2306
  __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
2307
  __ mov(ecx, Operand(esp, kPointerSize * 3));
2308
  __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
2309

    
2310
  // Fill out the elements FixedArray.
2311
  // eax: JSArray.
2312
  // ebx: FixedArray.
2313
  // ecx: Number of elements in array, as smi.
2314

    
2315
  // Set map.
2316
  __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
2317
         Immediate(factory->fixed_array_map()));
2318
  // Set length.
2319
  __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
2320
  // Fill contents of fixed-array with undefined.
2321
  __ SmiUntag(ecx);
2322
  __ mov(edx, Immediate(factory->undefined_value()));
2323
  __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
2324
  // Fill fixed array elements with undefined.
2325
  // eax: JSArray.
2326
  // ecx: Number of elements to fill.
2327
  // ebx: Start of elements in FixedArray.
2328
  // edx: undefined.
2329
  Label loop;
2330
  __ test(ecx, ecx);
2331
  __ bind(&loop);
2332
  __ j(less_equal, &done, Label::kNear);  // Jump if ecx is negative or zero.
2333
  __ sub(ecx, Immediate(1));
2334
  __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
2335
  __ jmp(&loop);
2336

    
2337
  __ bind(&done);
2338
  __ ret(3 * kPointerSize);
2339

    
2340
  __ bind(&slowcase);
2341
  __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2342
}
2343

    
2344

    
2345
static int NegativeComparisonResult(Condition cc) {
2346
  ASSERT(cc != equal);
2347
  ASSERT((cc == less) || (cc == less_equal)
2348
      || (cc == greater) || (cc == greater_equal));
2349
  return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2350
}
2351

    
2352

    
2353
static void CheckInputType(MacroAssembler* masm,
2354
                           Register input,
2355
                           CompareIC::State expected,
2356
                           Label* fail) {
2357
  Label ok;
2358
  if (expected == CompareIC::SMI) {
2359
    __ JumpIfNotSmi(input, fail);
2360
  } else if (expected == CompareIC::NUMBER) {
2361
    __ JumpIfSmi(input, &ok);
2362
    __ cmp(FieldOperand(input, HeapObject::kMapOffset),
2363
           Immediate(masm->isolate()->factory()->heap_number_map()));
2364
    __ j(not_equal, fail);
2365
  }
2366
  // We could be strict about internalized/non-internalized here, but as long as
2367
  // hydrogen doesn't care, the stub doesn't have to care either.
2368
  __ bind(&ok);
2369
}
2370

    
2371

    
2372
static void BranchIfNotInternalizedString(MacroAssembler* masm,
2373
                                          Label* label,
2374
                                          Register object,
2375
                                          Register scratch) {
2376
  __ JumpIfSmi(object, label);
2377
  __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
2378
  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
2379
  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2380
  __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2381
  __ j(not_zero, label);
2382
}
2383

    
2384

    
2385
void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
2386
  Label check_unequal_objects;
2387
  Condition cc = GetCondition();
2388

    
2389
  Label miss;
2390
  CheckInputType(masm, edx, left_, &miss);
2391
  CheckInputType(masm, eax, right_, &miss);
2392

    
2393
  // Compare two smis.
2394
  Label non_smi, smi_done;
2395
  __ mov(ecx, edx);
2396
  __ or_(ecx, eax);
2397
  __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
2398
  __ sub(edx, eax);  // Return on the result of the subtraction.
2399
  __ j(no_overflow, &smi_done, Label::kNear);
2400
  __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
2401
  __ bind(&smi_done);
2402
  __ mov(eax, edx);
2403
  __ ret(0);
2404
  __ bind(&non_smi);
2405

    
2406
  // NOTICE! This code is only reached after a smi-fast-case check, so
2407
  // it is certain that at least one operand isn't a smi.
2408

    
2409
  // Identical objects can be compared fast, but there are some tricky cases
2410
  // for NaN and undefined.
2411
  Label generic_heap_number_comparison;
2412
  {
2413
    Label not_identical;
2414
    __ cmp(eax, edx);
2415
    __ j(not_equal, &not_identical);
2416

    
2417
    if (cc != equal) {
2418
      // Check for undefined.  undefined OP undefined is false even though
2419
      // undefined == undefined.
2420
      Label check_for_nan;
2421
      __ cmp(edx, masm->isolate()->factory()->undefined_value());
2422
      __ j(not_equal, &check_for_nan, Label::kNear);
2423
      __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2424
      __ ret(0);
2425
      __ bind(&check_for_nan);
2426
    }
2427

    
2428
    // Test for NaN. Compare heap numbers in a general way,
2429
    // to hanlde NaNs correctly.
2430
    __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2431
           Immediate(masm->isolate()->factory()->heap_number_map()));
2432
    __ j(equal, &generic_heap_number_comparison, Label::kNear);
2433
    if (cc != equal) {
2434
      // Call runtime on identical JSObjects.  Otherwise return equal.
2435
      __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2436
      __ j(above_equal, &not_identical);
2437
    }
2438
    __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
2439
    __ ret(0);
2440

    
2441

    
2442
    __ bind(&not_identical);
2443
  }
2444

    
2445
  // Strict equality can quickly decide whether objects are equal.
2446
  // Non-strict object equality is slower, so it is handled later in the stub.
2447
  if (cc == equal && strict()) {
2448
    Label slow;  // Fallthrough label.
2449
    Label not_smis;
2450
    // If we're doing a strict equality comparison, we don't have to do
2451
    // type conversion, so we generate code to do fast comparison for objects
2452
    // and oddballs. Non-smi numbers and strings still go through the usual
2453
    // slow-case code.
2454
    // If either is a Smi (we know that not both are), then they can only
2455
    // be equal if the other is a HeapNumber. If so, use the slow case.
2456
    STATIC_ASSERT(kSmiTag == 0);
2457
    ASSERT_EQ(0, Smi::FromInt(0));
2458
    __ mov(ecx, Immediate(kSmiTagMask));
2459
    __ and_(ecx, eax);
2460
    __ test(ecx, edx);
2461
    __ j(not_zero, &not_smis, Label::kNear);
2462
    // One operand is a smi.
2463

    
2464
    // Check whether the non-smi is a heap number.
2465
    STATIC_ASSERT(kSmiTagMask == 1);
2466
    // ecx still holds eax & kSmiTag, which is either zero or one.
2467
    __ sub(ecx, Immediate(0x01));
2468
    __ mov(ebx, edx);
2469
    __ xor_(ebx, eax);
2470
    __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
2471
    __ xor_(ebx, eax);
2472
    // if eax was smi, ebx is now edx, else eax.
2473

    
2474
    // Check if the non-smi operand is a heap number.
2475
    __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
2476
           Immediate(masm->isolate()->factory()->heap_number_map()));
2477
    // If heap number, handle it in the slow case.
2478
    __ j(equal, &slow, Label::kNear);
2479
    // Return non-equal (ebx is not zero)
2480
    __ mov(eax, ebx);
2481
    __ ret(0);
2482

    
2483
    __ bind(&not_smis);
2484
    // If either operand is a JSObject or an oddball value, then they are not
2485
    // equal since their pointers are different
2486
    // There is no test for undetectability in strict equality.
2487

    
2488
    // Get the type of the first operand.
2489
    // If the first object is a JS object, we have done pointer comparison.
2490
    Label first_non_object;
2491
    STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
2492
    __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2493
    __ j(below, &first_non_object, Label::kNear);
2494

    
2495
    // Return non-zero (eax is not zero)
2496
    Label return_not_equal;
2497
    STATIC_ASSERT(kHeapObjectTag != 0);
2498
    __ bind(&return_not_equal);
2499
    __ ret(0);
2500

    
2501
    __ bind(&first_non_object);
2502
    // Check for oddballs: true, false, null, undefined.
2503
    __ CmpInstanceType(ecx, ODDBALL_TYPE);
2504
    __ j(equal, &return_not_equal);
2505

    
2506
    __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
2507
    __ j(above_equal, &return_not_equal);
2508

    
2509
    // Check for oddballs: true, false, null, undefined.
2510
    __ CmpInstanceType(ecx, ODDBALL_TYPE);
2511
    __ j(equal, &return_not_equal);
2512

    
2513
    // Fall through to the general case.
2514
    __ bind(&slow);
2515
  }
2516

    
2517
  // Generate the number comparison code.
2518
  Label non_number_comparison;
2519
  Label unordered;
2520
  __ bind(&generic_heap_number_comparison);
2521
  if (CpuFeatures::IsSupported(SSE2)) {
2522
    CpuFeatureScope use_sse2(masm, SSE2);
2523
    CpuFeatureScope use_cmov(masm, CMOV);
2524

    
2525
    FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
2526
    __ ucomisd(xmm0, xmm1);
2527

    
2528
    // Don't base result on EFLAGS when a NaN is involved.
2529
    __ j(parity_even, &unordered, Label::kNear);
2530
    // Return a result of -1, 0, or 1, based on EFLAGS.
2531
    __ mov(eax, 0);  // equal
2532
    __ mov(ecx, Immediate(Smi::FromInt(1)));
2533
    __ cmov(above, eax, ecx);
2534
    __ mov(ecx, Immediate(Smi::FromInt(-1)));
2535
    __ cmov(below, eax, ecx);
2536
    __ ret(0);
2537
  } else {
2538
    FloatingPointHelper::CheckFloatOperands(
2539
        masm, &non_number_comparison, ebx);
2540
    FloatingPointHelper::LoadFloatOperand(masm, eax);
2541
    FloatingPointHelper::LoadFloatOperand(masm, edx);
2542
    __ FCmp();
2543

    
2544
    // Don't base result on EFLAGS when a NaN is involved.
2545
    __ j(parity_even, &unordered, Label::kNear);
2546

    
2547
    Label below_label, above_label;
2548
    // Return a result of -1, 0, or 1, based on EFLAGS.
2549
    __ j(below, &below_label, Label::kNear);
2550
    __ j(above, &above_label, Label::kNear);
2551

    
2552
    __ Set(eax, Immediate(0));
2553
    __ ret(0);
2554

    
2555
    __ bind(&below_label);
2556
    __ mov(eax, Immediate(Smi::FromInt(-1)));
2557
    __ ret(0);
2558

    
2559
    __ bind(&above_label);
2560
    __ mov(eax, Immediate(Smi::FromInt(1)));
2561
    __ ret(0);
2562
  }
2563

    
2564
  // If one of the numbers was NaN, then the result is always false.
2565
  // The cc is never not-equal.
2566
  __ bind(&unordered);
2567
  ASSERT(cc != not_equal);
2568
  if (cc == less || cc == less_equal) {
2569
    __ mov(eax, Immediate(Smi::FromInt(1)));
2570
  } else {
2571
    __ mov(eax, Immediate(Smi::FromInt(-1)));
2572
  }
2573
  __ ret(0);
2574

    
2575
  // The number comparison code did not provide a valid result.
2576
  __ bind(&non_number_comparison);
2577

    
2578
  // Fast negative check for internalized-to-internalized equality.
2579
  Label check_for_strings;
2580
  if (cc == equal) {
2581
    BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
2582
    BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
2583

    
2584
    // We've already checked for object identity, so if both operands
2585
    // are internalized they aren't equal. Register eax already holds a
2586
    // non-zero value, which indicates not equal, so just return.
2587
    __ ret(0);
2588
  }
2589

    
2590
  __ bind(&check_for_strings);
2591

    
2592
  __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
2593
                                         &check_unequal_objects);
2594

    
2595
  // Inline comparison of ASCII strings.
2596
  if (cc == equal) {
2597
    StringCompareStub::GenerateFlatAsciiStringEquals(masm,
2598
                                                     edx,
2599
                                                     eax,
2600
                                                     ecx,
2601
                                                     ebx);
2602
  } else {
2603
    StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2604
                                                       edx,
2605
                                                       eax,
2606
                                                       ecx,
2607
                                                       ebx,
2608
                                                       edi);
2609
  }
2610
#ifdef DEBUG
2611
  __ Abort(kUnexpectedFallThroughFromStringComparison);
2612
#endif
2613

    
2614
  __ bind(&check_unequal_objects);
2615
  if (cc == equal && !strict()) {
2616
    // Non-strict equality.  Objects are unequal if
2617
    // they are both JSObjects and not undetectable,
2618
    // and their pointers are different.
2619
    Label not_both_objects;
2620
    Label return_unequal;
2621
    // At most one is a smi, so we can test for smi by adding the two.
2622
    // A smi plus a heap object has the low bit set, a heap object plus
2623
    // a heap object has the low bit clear.
2624
    STATIC_ASSERT(kSmiTag == 0);
2625
    STATIC_ASSERT(kSmiTagMask == 1);
2626
    __ lea(ecx, Operand(eax, edx, times_1, 0));
2627
    __ test(ecx, Immediate(kSmiTagMask));
2628
    __ j(not_zero, &not_both_objects, Label::kNear);
2629
    __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2630
    __ j(below, &not_both_objects, Label::kNear);
2631
    __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
2632
    __ j(below, &not_both_objects, Label::kNear);
2633
    // We do not bail out after this point.  Both are JSObjects, and
2634
    // they are equal if and only if both are undetectable.
2635
    // The and of the undetectable flags is 1 if and only if they are equal.
2636
    __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2637
              1 << Map::kIsUndetectable);
2638
    __ j(zero, &return_unequal, Label::kNear);
2639
    __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
2640
              1 << Map::kIsUndetectable);
2641
    __ j(zero, &return_unequal, Label::kNear);
2642
    // The objects are both undetectable, so they both compare as the value
2643
    // undefined, and are equal.
2644
    __ Set(eax, Immediate(EQUAL));
2645
    __ bind(&return_unequal);
2646
    // Return non-equal by returning the non-zero object pointer in eax,
2647
    // or return equal if we fell through to here.
2648
    __ ret(0);  // rax, rdx were pushed
2649
    __ bind(&not_both_objects);
2650
  }
2651

    
2652
  // Push arguments below the return address.
2653
  __ pop(ecx);
2654
  __ push(edx);
2655
  __ push(eax);
2656

    
2657
  // Figure out which native to call and setup the arguments.
2658
  Builtins::JavaScript builtin;
2659
  if (cc == equal) {
2660
    builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2661
  } else {
2662
    builtin = Builtins::COMPARE;
2663
    __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2664
  }
2665

    
2666
  // Restore return address on the stack.
2667
  __ push(ecx);
2668

    
2669
  // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2670
  // tagged as a small integer.
2671
  __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2672

    
2673
  __ bind(&miss);
2674
  GenerateMiss(masm);
2675
}
2676

    
2677

    
2678
static void GenerateRecordCallTarget(MacroAssembler* masm) {
2679
  // Cache the called function in a global property cell.  Cache states
2680
  // are uninitialized, monomorphic (indicated by a JSFunction), and
2681
  // megamorphic.
2682
  // eax : number of arguments to the construct function
2683
  // ebx : cache cell for call target
2684
  // edi : the function to call
2685
  Isolate* isolate = masm->isolate();
2686
  Label initialize, done, miss, megamorphic, not_array_function;
2687

    
2688
  // Load the cache state into ecx.
2689
  __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
2690

    
2691
  // A monomorphic cache hit or an already megamorphic state: invoke the
2692
  // function without changing the state.
2693
  __ cmp(ecx, edi);
2694
  __ j(equal, &done);
2695
  __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2696
  __ j(equal, &done);
2697

    
2698
  // If we came here, we need to see if we are the array function.
2699
  // If we didn't have a matching function, and we didn't find the megamorph
2700
  // sentinel, then we have in the cell either some other function or an
2701
  // AllocationSite. Do a map check on the object in ecx.
2702
  Handle<Map> allocation_site_map =
2703
      masm->isolate()->factory()->allocation_site_map();
2704
  __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
2705
  __ j(not_equal, &miss);
2706

    
2707
  // Load the global or builtins object from the current context
2708
  __ LoadGlobalContext(ecx);
2709
  // Make sure the function is the Array() function
2710
  __ cmp(edi, Operand(ecx,
2711
                      Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
2712
  __ j(not_equal, &megamorphic);
2713
  __ jmp(&done);
2714

    
2715
  __ bind(&miss);
2716

    
2717
  // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2718
  // megamorphic.
2719
  __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate)));
2720
  __ j(equal, &initialize);
2721
  // MegamorphicSentinel is an immortal immovable object (undefined) so no
2722
  // write-barrier is needed.
2723
  __ bind(&megamorphic);
2724
  __ mov(FieldOperand(ebx, Cell::kValueOffset),
2725
         Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2726
  __ jmp(&done, Label::kNear);
2727

    
2728
  // An uninitialized cache is patched with the function or sentinel to
2729
  // indicate the ElementsKind if function is the Array constructor.
2730
  __ bind(&initialize);
2731
  __ LoadGlobalContext(ecx);
2732
  // Make sure the function is the Array() function
2733
  __ cmp(edi, Operand(ecx,
2734
                      Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
2735
  __ j(not_equal, &not_array_function);
2736

    
2737
  // The target function is the Array constructor,
2738
  // Create an AllocationSite if we don't already have it, store it in the cell
2739
  {
2740
    FrameScope scope(masm, StackFrame::INTERNAL);
2741

    
2742
    // Arguments register must be smi-tagged to call out.
2743
    __ SmiTag(eax);
2744
    __ push(eax);
2745
    __ push(edi);
2746
    __ push(ebx);
2747

    
2748
    CreateAllocationSiteStub create_stub;
2749
    __ CallStub(&create_stub);
2750

    
2751
    __ pop(ebx);
2752
    __ pop(edi);
2753
    __ pop(eax);
2754
    __ SmiUntag(eax);
2755
  }
2756
  __ jmp(&done);
2757

    
2758
  __ bind(&not_array_function);
2759
  __ mov(FieldOperand(ebx, Cell::kValueOffset), edi);
2760
  // No need for a write barrier here - cells are rescanned.
2761

    
2762
  __ bind(&done);
2763
}
2764

    
2765

    
2766
void CallFunctionStub::Generate(MacroAssembler* masm) {
2767
  // ebx : cache cell for call target
2768
  // edi : the function to call
2769
  Isolate* isolate = masm->isolate();
2770
  Label slow, non_function;
2771

    
2772
  // The receiver might implicitly be the global object. This is
2773
  // indicated by passing the hole as the receiver to the call
2774
  // function stub.
2775
  if (ReceiverMightBeImplicit()) {
2776
    Label receiver_ok;
2777
    // Get the receiver from the stack.
2778
    // +1 ~ return address
2779
    __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
2780
    // Call as function is indicated with the hole.
2781
    __ cmp(eax, isolate->factory()->the_hole_value());
2782
    __ j(not_equal, &receiver_ok, Label::kNear);
2783
    // Patch the receiver on the stack with the global receiver object.
2784
    __ mov(ecx, GlobalObjectOperand());
2785
    __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2786
    __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ecx);
2787
    __ bind(&receiver_ok);
2788
  }
2789

    
2790
  // Check that the function really is a JavaScript function.
2791
  __ JumpIfSmi(edi, &non_function);
2792
  // Goto slow case if we do not have a function.
2793
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2794
  __ j(not_equal, &slow);
2795

    
2796
  if (RecordCallTarget()) {
2797
    GenerateRecordCallTarget(masm);
2798
  }
2799

    
2800
  // Fast-case: Just invoke the function.
2801
  ParameterCount actual(argc_);
2802

    
2803
  if (ReceiverMightBeImplicit()) {
2804
    Label call_as_function;
2805
    __ cmp(eax, isolate->factory()->the_hole_value());
2806
    __ j(equal, &call_as_function);
2807
    __ InvokeFunction(edi,
2808
                      actual,
2809
                      JUMP_FUNCTION,
2810
                      NullCallWrapper(),
2811
                      CALL_AS_METHOD);
2812
    __ bind(&call_as_function);
2813
  }
2814
  __ InvokeFunction(edi,
2815
                    actual,
2816
                    JUMP_FUNCTION,
2817
                    NullCallWrapper(),
2818
                    CALL_AS_FUNCTION);
2819

    
2820
  // Slow-case: Non-function called.
2821
  __ bind(&slow);
2822
  if (RecordCallTarget()) {
2823
    // If there is a call target cache, mark it megamorphic in the
2824
    // non-function case.  MegamorphicSentinel is an immortal immovable
2825
    // object (undefined) so no write barrier is needed.
2826
    __ mov(FieldOperand(ebx, Cell::kValueOffset),
2827
           Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2828
  }
2829
  // Check for function proxy.
2830
  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2831
  __ j(not_equal, &non_function);
2832
  __ pop(ecx);
2833
  __ push(edi);  // put proxy as additional argument under return address
2834
  __ push(ecx);
2835
  __ Set(eax, Immediate(argc_ + 1));
2836
  __ Set(ebx, Immediate(0));
2837
  __ SetCallKind(ecx, CALL_AS_FUNCTION);
2838
  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2839
  {
2840
    Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2841
    __ jmp(adaptor, RelocInfo::CODE_TARGET);
2842
  }
2843

    
2844
  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2845
  // of the original receiver from the call site).
2846
  __ bind(&non_function);
2847
  __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
2848
  __ Set(eax, Immediate(argc_));
2849
  __ Set(ebx, Immediate(0));
2850
  __ SetCallKind(ecx, CALL_AS_METHOD);
2851
  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2852
  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2853
  __ jmp(adaptor, RelocInfo::CODE_TARGET);
2854
}
2855

    
2856

    
2857
void CallConstructStub::Generate(MacroAssembler* masm) {
2858
  // eax : number of arguments
2859
  // ebx : cache cell for call target
2860
  // edi : constructor function
2861
  Label slow, non_function_call;
2862

    
2863
  // Check that function is not a smi.
2864
  __ JumpIfSmi(edi, &non_function_call);
2865
  // Check that function is a JSFunction.
2866
  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2867
  __ j(not_equal, &slow);
2868

    
2869
  if (RecordCallTarget()) {
2870
    GenerateRecordCallTarget(masm);
2871
  }
2872

    
2873
  // Jump to the function-specific construct stub.
2874
  Register jmp_reg = ecx;
2875
  __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2876
  __ mov(jmp_reg, FieldOperand(jmp_reg,
2877
                               SharedFunctionInfo::kConstructStubOffset));
2878
  __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2879
  __ jmp(jmp_reg);
2880

    
2881
  // edi: called object
2882
  // eax: number of arguments
2883
  // ecx: object map
2884
  Label do_call;
2885
  __ bind(&slow);
2886
  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2887
  __ j(not_equal, &non_function_call);
2888
  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2889
  __ jmp(&do_call);
2890

    
2891
  __ bind(&non_function_call);
2892
  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2893
  __ bind(&do_call);
2894
  // Set expected number of arguments to zero (not changing eax).
2895
  __ Set(ebx, Immediate(0));
2896
  Handle<Code> arguments_adaptor =
2897
      masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2898
  __ SetCallKind(ecx, CALL_AS_METHOD);
2899
  __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2900
}
2901

    
2902

    
2903
bool CEntryStub::NeedsImmovableCode() {
2904
  return false;
2905
}
2906

    
2907

    
2908
bool CEntryStub::IsPregenerated(Isolate* isolate) {
2909
  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
2910
          result_size_ == 1;
2911
}
2912

    
2913

    
2914
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2915
  CEntryStub::GenerateAheadOfTime(isolate);
2916
  StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2917
  StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2918
  // It is important that the store buffer overflow stubs are generated first.
2919
  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
2920
  ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2921
  CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2922
  if (Serializer::enabled()) {
2923
    PlatformFeatureScope sse2(SSE2);
2924
    BinaryOpStub::GenerateAheadOfTime(isolate);
2925
  } else {
2926
    BinaryOpStub::GenerateAheadOfTime(isolate);
2927
  }
2928
}
2929

    
2930

    
2931
void CodeStub::GenerateFPStubs(Isolate* isolate) {
2932
  if (CpuFeatures::IsSupported(SSE2)) {
2933
    CEntryStub save_doubles(1, kSaveFPRegs);
2934
    // Stubs might already be in the snapshot, detect that and don't regenerate,
2935
    // which would lead to code stub initialization state being messed up.
2936
    Code* save_doubles_code;
2937
    if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
2938
      save_doubles_code = *(save_doubles.GetCode(isolate));
2939
    }
2940
    save_doubles_code->set_is_pregenerated(true);
2941
    isolate->set_fp_stubs_generated(true);
2942
  }
2943
}
2944

    
2945

    
2946
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2947
  CEntryStub stub(1, kDontSaveFPRegs);
2948
  Handle<Code> code = stub.GetCode(isolate);
2949
  code->set_is_pregenerated(true);
2950
}
2951

    
2952

    
2953
static void JumpIfOOM(MacroAssembler* masm,
2954
                      Register value,
2955
                      Register scratch,
2956
                      Label* oom_label) {
2957
  __ mov(scratch, value);
2958
  STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
2959
  STATIC_ASSERT(kFailureTag == 3);
2960
  __ and_(scratch, 0xf);
2961
  __ cmp(scratch, 0xf);
2962
  __ j(equal, oom_label);
2963
}
2964

    
2965

    
2966
void CEntryStub::GenerateCore(MacroAssembler* masm,
2967
                              Label* throw_normal_exception,
2968
                              Label* throw_termination_exception,
2969
                              Label* throw_out_of_memory_exception,
2970
                              bool do_gc,
2971
                              bool always_allocate_scope) {
2972
  // eax: result parameter for PerformGC, if any
2973
  // ebx: pointer to C function  (C callee-saved)
2974
  // ebp: frame pointer  (restored after C call)
2975
  // esp: stack pointer  (restored after C call)
2976
  // edi: number of arguments including receiver  (C callee-saved)
2977
  // esi: pointer to the first argument (C callee-saved)
2978

    
2979
  // Result returned in eax, or eax+edx if result_size_ is 2.
2980

    
2981
  // Check stack alignment.
2982
  if (FLAG_debug_code) {
2983
    __ CheckStackAlignment();
2984
  }
2985

    
2986
  if (do_gc) {
2987
    // Pass failure code returned from last attempt as first argument to
2988
    // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
2989
    // stack alignment is known to be correct. This function takes one argument
2990
    // which is passed on the stack, and we know that the stack has been
2991
    // prepared to pass at least one argument.
2992
    __ mov(Operand(esp, 1 * kPointerSize),
2993
           Immediate(ExternalReference::isolate_address(masm->isolate())));
2994
    __ mov(Operand(esp, 0 * kPointerSize), eax);  // Result.
2995
    __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
2996
  }
2997

    
2998
  ExternalReference scope_depth =
2999
      ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
3000
  if (always_allocate_scope) {
3001
    __ inc(Operand::StaticVariable(scope_depth));
3002
  }
3003

    
3004
  // Call C function.
3005
  __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
3006
  __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
3007
  __ mov(Operand(esp, 2 * kPointerSize),
3008
         Immediate(ExternalReference::isolate_address(masm->isolate())));
3009
  __ call(ebx);
3010
  // Result is in eax or edx:eax - do not destroy these registers!
3011

    
3012
  if (always_allocate_scope) {
3013
    __ dec(Operand::StaticVariable(scope_depth));
3014
  }
3015

    
3016
  // Runtime functions should not return 'the hole'.  Allowing it to escape may
3017
  // lead to crashes in the IC code later.
3018
  if (FLAG_debug_code) {
3019
    Label okay;
3020
    __ cmp(eax, masm->isolate()->factory()->the_hole_value());
3021
    __ j(not_equal, &okay, Label::kNear);
3022
    __ int3();
3023
    __ bind(&okay);
3024
  }
3025

    
3026
  // Check for failure result.
3027
  Label failure_returned;
3028
  STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3029
  __ lea(ecx, Operand(eax, 1));
3030
  // Lower 2 bits of ecx are 0 iff eax has failure tag.
3031
  __ test(ecx, Immediate(kFailureTagMask));
3032
  __ j(zero, &failure_returned);
3033

    
3034
  ExternalReference pending_exception_address(
3035
      Isolate::kPendingExceptionAddress, masm->isolate());
3036

    
3037
  // Check that there is no pending exception, otherwise we
3038
  // should have returned some failure value.
3039
  if (FLAG_debug_code) {
3040
    __ push(edx);
3041
    __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3042
    Label okay;
3043
    __ cmp(edx, Operand::StaticVariable(pending_exception_address));
3044
    // Cannot use check here as it attempts to generate call into runtime.
3045
    __ j(equal, &okay, Label::kNear);
3046
    __ int3();
3047
    __ bind(&okay);
3048
    __ pop(edx);
3049
  }
3050

    
3051
  // Exit the JavaScript to C++ exit frame.
3052
  __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
3053
  __ ret(0);
3054

    
3055
  // Handling of failure.
3056
  __ bind(&failure_returned);
3057

    
3058
  Label retry;
3059
  // If the returned exception is RETRY_AFTER_GC continue at retry label
3060
  STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3061
  __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3062
  __ j(zero, &retry, Label::kNear);
3063

    
3064
  // Special handling of out of memory exceptions.
3065
  JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
3066

    
3067
  // Retrieve the pending exception.
3068
  __ mov(eax, Operand::StaticVariable(pending_exception_address));
3069

    
3070
  // See if we just retrieved an OOM exception.
3071
  JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
3072

    
3073
  // Clear the pending exception.
3074
  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3075
  __ mov(Operand::StaticVariable(pending_exception_address), edx);
3076

    
3077
  // Special handling of termination exceptions which are uncatchable
3078
  // by javascript code.
3079
  __ cmp(eax, masm->isolate()->factory()->termination_exception());
3080
  __ j(equal, throw_termination_exception);
3081

    
3082
  // Handle normal exception.
3083
  __ jmp(throw_normal_exception);
3084

    
3085
  // Retry.
3086
  __ bind(&retry);
3087
}
3088

    
3089

    
3090
void CEntryStub::Generate(MacroAssembler* masm) {
3091
  // eax: number of arguments including receiver
3092
  // ebx: pointer to C function  (C callee-saved)
3093
  // ebp: frame pointer  (restored after C call)
3094
  // esp: stack pointer  (restored after C call)
3095
  // esi: current context (C callee-saved)
3096
  // edi: JS function of the caller (C callee-saved)
3097

    
3098
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
3099

    
3100
  // NOTE: Invocations of builtins may return failure objects instead
3101
  // of a proper result. The builtin entry handles this by performing
3102
  // a garbage collection and retrying the builtin (twice).
3103

    
3104
  // Enter the exit frame that transitions from JavaScript to C++.
3105
  __ EnterExitFrame(save_doubles_ == kSaveFPRegs);
3106

    
3107
  // eax: result parameter for PerformGC, if any (setup below)
3108
  // ebx: pointer to builtin function  (C callee-saved)
3109
  // ebp: frame pointer  (restored after C call)
3110
  // esp: stack pointer  (restored after C call)
3111
  // edi: number of arguments including receiver (C callee-saved)
3112
  // esi: argv pointer (C callee-saved)
3113

    
3114
  Label throw_normal_exception;
3115
  Label throw_termination_exception;
3116
  Label throw_out_of_memory_exception;
3117

    
3118
  // Call into the runtime system.
3119
  GenerateCore(masm,
3120
               &throw_normal_exception,
3121
               &throw_termination_exception,
3122
               &throw_out_of_memory_exception,
3123
               false,
3124
               false);
3125

    
3126
  // Do space-specific GC and retry runtime call.
3127
  GenerateCore(masm,
3128
               &throw_normal_exception,
3129
               &throw_termination_exception,
3130
               &throw_out_of_memory_exception,
3131
               true,
3132
               false);
3133

    
3134
  // Do full GC and retry runtime call one final time.
3135
  Failure* failure = Failure::InternalError();
3136
  __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
3137
  GenerateCore(masm,
3138
               &throw_normal_exception,
3139
               &throw_termination_exception,
3140
               &throw_out_of_memory_exception,
3141
               true,
3142
               true);
3143

    
3144
  __ bind(&throw_out_of_memory_exception);
3145
  // Set external caught exception to false.
3146
  Isolate* isolate = masm->isolate();
3147
  ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
3148
                                    isolate);
3149
  __ mov(Operand::StaticVariable(external_caught), Immediate(false));
3150

    
3151
  // Set pending exception and eax to out of memory exception.
3152
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3153
                                      isolate);
3154
  Label already_have_failure;
3155
  JumpIfOOM(masm, eax, ecx, &already_have_failure);
3156
  __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException(0x1)));
3157
  __ bind(&already_have_failure);
3158
  __ mov(Operand::StaticVariable(pending_exception), eax);
3159
  // Fall through to the next label.
3160

    
3161
  __ bind(&throw_termination_exception);
3162
  __ ThrowUncatchable(eax);
3163

    
3164
  __ bind(&throw_normal_exception);
3165
  __ Throw(eax);
3166
}
3167

    
3168

    
3169
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3170
  Label invoke, handler_entry, exit;
3171
  Label not_outermost_js, not_outermost_js_2;
3172

    
3173
  ProfileEntryHookStub::MaybeCallEntryHook(masm);
3174

    
3175
  // Set up frame.
3176
  __ push(ebp);
3177
  __ mov(ebp, esp);
3178

    
3179
  // Push marker in two places.
3180
  int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3181
  __ push(Immediate(Smi::FromInt(marker)));  // context slot
3182
  __ push(Immediate(Smi::FromInt(marker)));  // function slot
3183
  // Save callee-saved registers (C calling conventions).
3184
  __ push(edi);
3185
  __ push(esi);
3186
  __ push(ebx);
3187

    
3188
  // Save copies of the top frame descriptor on the stack.
3189
  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
3190
  __ push(Operand::StaticVariable(c_entry_fp));
3191

    
3192
  // If this is the outermost JS call, set js_entry_sp value.
3193
  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
3194
                                masm->isolate());
3195
  __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
3196
  __ j(not_equal, &not_outermost_js, Label::kNear);
3197
  __ mov(Operand::StaticVariable(js_entry_sp), ebp);
3198
  __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3199
  __ jmp(&invoke, Label::kNear);
3200
  __ bind(&not_outermost_js);
3201
  __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
3202

    
3203
  // Jump to a faked try block that does the invoke, with a faked catch
3204
  // block that sets the pending exception.
3205
  __ jmp(&invoke);
3206
  __ bind(&handler_entry);
3207
  handler_offset_ = handler_entry.pos();
3208
  // Caught exception: Store result (exception) in the pending exception
3209
  // field in the JSEnv and return a failure sentinel.
3210
  ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3211
                                      masm->isolate());
3212
  __ mov(Operand::StaticVariable(pending_exception), eax);
3213
  __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
3214
  __ jmp(&exit);
3215

    
3216
  // Invoke: Link this frame into the handler chain.  There's only one
3217
  // handler block in this code object, so its index is 0.
3218
  __ bind(&invoke);
3219
  __ PushTryHandler(StackHandler::JS_ENTRY, 0);
3220

    
3221
  // Clear any pending exceptions.
3222
  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3223
  __ mov(Operand::StaticVariable(pending_exception), edx);
3224

    
3225
  // Fake a receiver (NULL).
3226
  __ push(Immediate(0));  // receiver
3227

    
3228
  // Invoke the function by calling through JS entry trampoline builtin and
3229
  // pop the faked function when we return. Notice that we cannot store a
3230
  // reference to the trampoline code directly in this stub, because the
3231
  // builtin stubs may not have been generated yet.
3232
  if (is_construct) {
3233
    ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3234
                                      masm->isolate());
3235
    __ mov(edx, Immediate(construct_entry));
3236
  } else {
3237
    ExternalReference entry(Builtins::kJSEntryTrampoline,
3238
                            masm->isolate());
3239
    __ mov(edx, Immediate(entry));
3240
  }
3241
  __ mov(edx, Operand(edx, 0));  // deref address
3242
  __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
3243
  __ call(edx);
3244

    
3245
  // Unlink this frame from the handler chain.
3246
  __ PopTryHandler();
3247

    
3248
  __ bind(&exit);
3249
  // Check if the current stack frame is marked as the outermost JS frame.
3250
  __ pop(ebx);
3251
  __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3252
  __ j(not_equal, &not_outermost_js_2);
3253
  __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
3254
  __ bind(&not_outermost_js_2);
3255

    
3256
  // Restore the top frame descriptor from the stack.
3257
  __ pop(Operand::StaticVariable(ExternalReference(
3258
      Isolate::kCEntryFPAddress,
3259
      masm->isolate())));
3260

    
3261
  // Restore callee-saved registers (C calling conventions).
3262
  __ pop(ebx);
3263
  __ pop(esi);
3264
  __ pop(edi);
3265
  __ add(esp, Immediate(2 * kPointerSize));  // remove markers
3266

    
3267
  // Restore frame pointer and return.
3268
  __ pop(ebp);
3269
  __ ret(0);
3270
}
3271

    
3272

    
3273
// Generate stub code for instanceof.
3274
// This code can patch a call site inlined cache of the instance of check,
3275
// which looks like this.
3276
//
3277
//   81 ff XX XX XX XX   cmp    edi, <the hole, patched to a map>
3278
//   75 0a               jne    <some near label>
3279
//   b8 XX XX XX XX      mov    eax, <the hole, patched to either true or false>
3280
//
3281
// If call site patching is requested the stack will have the delta from the
3282
// return address to the cmp instruction just below the return address. This
3283
// also means that call site patching can only take place with arguments in
3284
// registers. TOS looks like this when call site patching is requested
3285
//
3286
//   esp[0] : return address
3287
//   esp[4] : delta from return address to cmp instruction
3288
//
3289
void InstanceofStub::Generate(MacroAssembler* masm) {
3290
  // Call site inlining and patching implies arguments in registers.
3291
  ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
3292

    
3293
  // Fixed register usage throughout the stub.
3294
  Register object = eax;  // Object (lhs).
3295
  Register map = ebx;  // Map of the object.
3296
  Register function = edx;  // Function (rhs).
3297
  Register prototype = edi;  // Prototype of the function.
3298
  Register scratch = ecx;
3299

    
3300
  // Constants describing the call site code to patch.
3301
  static const int kDeltaToCmpImmediate = 2;
3302
  static const int kDeltaToMov = 8;
3303
  static const int kDeltaToMovImmediate = 9;
3304
  static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
3305
  static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
3306
  static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
3307

    
3308
  ASSERT_EQ(object.code(), InstanceofStub::left().code());
3309
  ASSERT_EQ(function.code(), InstanceofStub::right().code());
3310

    
3311
  // Get the object and function - they are always both needed.
3312
  Label slow, not_js_object;
3313
  if (!HasArgsInRegisters()) {
3314
    __ mov(object, Operand(esp, 2 * kPointerSize));
3315
    __ mov(function, Operand(esp, 1 * kPointerSize));
3316
  }
3317

    
3318
  // Check that the left hand is a JS object.
3319
  __ JumpIfSmi(object, &not_js_object);
3320
  __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
3321

    
3322
  // If there is a call site cache don't look in the global cache, but do the
3323
  // real lookup and update the call site cache.
3324
  if (!HasCallSiteInlineCheck()) {
3325
    // Look up the function and the map in the instanceof cache.
3326
    Label miss;
3327
    __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
3328
    __ j(not_equal, &miss, Label::kNear);
3329
    __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
3330
    __ j(not_equal, &miss, Label::kNear);
3331
    __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
3332
    __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3333
    __ bind(&miss);
3334
  }
3335

    
3336
  // Get the prototype of the function.
3337
  __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
3338

    
3339
  // Check that the function prototype is a JS object.
3340
  __ JumpIfSmi(prototype, &slow);
3341
  __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
3342

    
3343
  // Update the global instanceof or call site inlined cache with the current
3344
  // map and function. The cached answer will be set when it is known below.
3345
  if (!HasCallSiteInlineCheck()) {
3346
    __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
3347
    __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
3348
  } else {
3349
    // The constants for the code patching are based on no push instructions
3350
    // at the call site.
3351
    ASSERT(HasArgsInRegisters());
3352
    // Get return address and delta to inlined map check.
3353
    __ mov(scratch, Operand(esp, 0 * kPointerSize));
3354
    __ sub(scratch, Operand(esp, 1 * kPointerSize));
3355
    if (FLAG_debug_code) {
3356
      __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
3357
      __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
3358
      __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
3359
      __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
3360
    }
3361
    __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
3362
    __ mov(Operand(scratch, 0), map);
3363
  }
3364

    
3365
  // Loop through the prototype chain of the object looking for the function
3366
  // prototype.
3367
  __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
3368
  Label loop, is_instance, is_not_instance;
3369
  __ bind(&loop);
3370
  __ cmp(scratch, prototype);
3371
  __ j(equal, &is_instance, Label::kNear);
3372
  Factory* factory = masm->isolate()->factory();
3373
  __ cmp(scratch, Immediate(factory->null_value()));
3374
  __ j(equal, &is_not_instance, Label::kNear);
3375
  __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3376
  __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
3377
  __ jmp(&loop);
3378

    
3379
  __ bind(&is_instance);
3380
  if (!HasCallSiteInlineCheck()) {
3381
    __ mov(eax, Immediate(0));
3382
    __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3383
  } else {
3384
    // Get return address and delta to inlined map check.
3385
    __ mov(eax, factory->true_value());
3386
    __ mov(scratch, Operand(esp, 0 * kPointerSize));
3387
    __ sub(scratch, Operand(esp, 1 * kPointerSize));
3388
    if (FLAG_debug_code) {
3389
      __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3390
      __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3391
    }
3392
    __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
3393
    if (!ReturnTrueFalseObject()) {
3394
      __ Set(eax, Immediate(0));
3395
    }
3396
  }
3397
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3398

    
3399
  __ bind(&is_not_instance);
3400
  if (!HasCallSiteInlineCheck()) {
3401
    __ mov(eax, Immediate(Smi::FromInt(1)));
3402
    __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3403
  } else {
3404
    // Get return address and delta to inlined map check.
3405
    __ mov(eax, factory->false_value());
3406
    __ mov(scratch, Operand(esp, 0 * kPointerSize));
3407
    __ sub(scratch, Operand(esp, 1 * kPointerSize));
3408
    if (FLAG_debug_code) {
3409
      __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3410
      __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3411
    }
3412
    __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
3413
    if (!ReturnTrueFalseObject()) {
3414
      __ Set(eax, Immediate(Smi::FromInt(1)));
3415
    }
3416
  }
3417
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3418

    
3419
  Label object_not_null, object_not_null_or_smi;
3420
  __ bind(&not_js_object);
3421
  // Before null, smi and string value checks, check that the rhs is a function
3422
  // as for a non-function rhs an exception needs to be thrown.
3423
  __ JumpIfSmi(function, &slow, Label::kNear);
3424
  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
3425
  __ j(not_equal, &slow, Label::kNear);
3426

    
3427
  // Null is not instance of anything.
3428
  __ cmp(object, factory->null_value());
3429
  __ j(not_equal, &object_not_null, Label::kNear);
3430
  __ Set(eax, Immediate(Smi::FromInt(1)));
3431
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3432

    
3433
  __ bind(&object_not_null);
3434
  // Smi values is not instance of anything.
3435
  __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
3436
  __ Set(eax, Immediate(Smi::FromInt(1)));
3437
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3438

    
3439
  __ bind(&object_not_null_or_smi);
3440
  // String values is not instance of anything.
3441
  Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
3442
  __ j(NegateCondition(is_string), &slow, Label::kNear);
3443
  __ Set(eax, Immediate(Smi::FromInt(1)));
3444
  __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3445

    
3446
  // Slow-case: Go through the JavaScript implementation.
3447
  __ bind(&slow);
3448
  if (!ReturnTrueFalseObject()) {
3449
    // Tail call the builtin which returns 0 or 1.
3450
    if (HasArgsInRegisters()) {
3451
      // Push arguments below return address.
3452
      __ pop(scratch);
3453
      __ push(object);
3454
      __ push(function);
3455
      __ push(scratch);
3456
    }
3457
    __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3458
  } else {
3459
    // Call the builtin and convert 0/1 to true/false.
3460
    {
3461
      FrameScope scope(masm, StackFrame::INTERNAL);
3462
      __ push(object);
3463
      __ push(function);
3464
      __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3465
    }
3466
    Label true_value, done;
3467
    __ test(eax, eax);
3468
    __ j(zero, &true_value, Label::kNear);
3469
    __ mov(eax, factory->false_value());
3470
    __ jmp(&done, Label::kNear);
3471
    __ bind(&true_value);
3472
    __ mov(eax, factory->true_value());
3473
    __ bind(&done);
3474
    __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3475
  }
3476
}
3477

    
3478

    
3479
Register InstanceofStub::left() { return eax; }
3480

    
3481

    
3482
Register InstanceofStub::right() { return edx; }
3483

    
3484

    
3485
// -------------------------------------------------------------------------
3486
// StringCharCodeAtGenerator
3487

    
3488
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3489
  // If the receiver is a smi trigger the non-string case.
3490
  STATIC_ASSERT(kSmiTag == 0);
3491
  __ JumpIfSmi(object_, receiver_not_string_);
3492

    
3493
  // Fetch the instance type of the receiver into result register.
3494
  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3495
  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3496
  // If the receiver is not a string trigger the non-string case.
3497
  __ test(result_, Immediate(kIsNotStringMask));
3498
  __ j(not_zero, receiver_not_string_);
3499

    
3500
  // If the index is non-smi trigger the non-smi case.
3501
  STATIC_ASSERT(kSmiTag == 0);
3502
  __ JumpIfNotSmi(index_, &index_not_smi_);
3503
  __ bind(&got_smi_index_);
3504

    
3505
  // Check for index out of range.
3506
  __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
3507
  __ j(above_equal, index_out_of_range_);
3508

    
3509
  __ SmiUntag(index_);
3510

    
3511
  Factory* factory = masm->isolate()->factory();
3512
  StringCharLoadGenerator::Generate(
3513
      masm, factory, object_, index_, result_, &call_runtime_);
3514

    
3515
  __ SmiTag(result_);
3516
  __ bind(&exit_);
3517
}
3518

    
3519

    
3520
void StringCharCodeAtGenerator::GenerateSlow(
3521
    MacroAssembler* masm,
3522
    const RuntimeCallHelper& call_helper) {
3523
  __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3524

    
3525
  // Index is not a smi.
3526
  __ bind(&index_not_smi_);
3527
  // If index is a heap number, try converting it to an integer.
3528
  __ CheckMap(index_,
3529
              masm->isolate()->factory()->heap_number_map(),
3530
              index_not_number_,
3531
              DONT_DO_SMI_CHECK);
3532
  call_helper.BeforeCall(masm);
3533
  __ push(object_);
3534
  __ push(index_);  // Consumed by runtime conversion function.
3535
  if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3536
    __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3537
  } else {
3538
    ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3539
    // NumberToSmi discards numbers that are not exact integers.
3540
    __ CallRuntime(Runtime::kNumberToSmi, 1);
3541
  }
3542
  if (!index_.is(eax)) {
3543
    // Save the conversion result before the pop instructions below
3544
    // have a chance to overwrite it.
3545
    __ mov(index_, eax);
3546
  }
3547
  __ pop(object_);
3548
  // Reload the instance type.
3549
  __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3550
  __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3551
  call_helper.AfterCall(masm);
3552
  // If index is still not a smi, it must be out of range.
3553
  STATIC_ASSERT(kSmiTag == 0);
3554
  __ JumpIfNotSmi(index_, index_out_of_range_);
3555
  // Otherwise, return to the fast path.
3556
  __ jmp(&got_smi_index_);
3557

    
3558
  // Call runtime. We get here when the receiver is a string and the
3559
  // index is a number, but the code of getting the actual character
3560
  // is too complex (e.g., when the string needs to be flattened).
3561
  __ bind(&call_runtime_);
3562
  call_helper.BeforeCall(masm);
3563
  __ push(object_);
3564
  __ SmiTag(index_);
3565
  __ push(index_);
3566
  __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3567
  if (!result_.is(eax)) {
3568
    __ mov(result_, eax);
3569
  }
3570
  call_helper.AfterCall(masm);
3571
  __ jmp(&exit_);
3572

    
3573
  __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3574
}
3575

    
3576

    
3577
// -------------------------------------------------------------------------
3578
// StringCharFromCodeGenerator
3579

    
3580
void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3581
  // Fast case of Heap::LookupSingleCharacterStringFromCode.
3582
  STATIC_ASSERT(kSmiTag == 0);
3583
  STATIC_ASSERT(kSmiShiftSize == 0);
3584
  ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
3585
  __ test(code_,
3586
          Immediate(kSmiTagMask |
3587
                    ((~String::kMaxOneByteCharCode) << kSmiTagSize)));
3588
  __ j(not_zero, &slow_case_);
3589

    
3590
  Factory* factory = masm->isolate()->factory();
3591
  __ Set(result_, Immediate(factory->single_character_string_cache()));
3592
  STATIC_ASSERT(kSmiTag == 0);
3593
  STATIC_ASSERT(kSmiTagSize == 1);
3594
  STATIC_ASSERT(kSmiShiftSize == 0);
3595
  // At this point code register contains smi tagged ASCII char code.
3596
  __ mov(result_, FieldOperand(result_,
3597
                               code_, times_half_pointer_size,
3598
                               FixedArray::kHeaderSize));
3599
  __ cmp(result_, factory->undefined_value());
3600
  __ j(equal, &slow_case_);
3601
  __ bind(&exit_);
3602
}
3603

    
3604

    
3605
void StringCharFromCodeGenerator::GenerateSlow(
3606
    MacroAssembler* masm,
3607
    const RuntimeCallHelper& call_helper) {
3608
  __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3609

    
3610
  __ bind(&slow_case_);
3611
  call_helper.BeforeCall(masm);
3612
  __ push(code_);
3613
  __ CallRuntime(Runtime::kCharFromCode, 1);
3614
  if (!result_.is(eax)) {
3615
    __ mov(result_, eax);
3616
  }
3617
  call_helper.AfterCall(masm);
3618
  __ jmp(&exit_);
3619

    
3620
  __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3621
}
3622

    
3623

    
3624
void StringAddStub::Generate(MacroAssembler* masm) {
3625
  Label call_runtime, call_builtin;
3626
  Builtins::JavaScript builtin_id = Builtins::ADD;
3627

    
3628
  // Load the two arguments.
3629
  __ mov(eax, Operand(esp, 2 * kPointerSize));  // First argument.
3630
  __ mov(edx, Operand(esp, 1 * kPointerSize));  // Second argument.
3631

    
3632
  // Make sure that both arguments are strings if not known in advance.
3633
  // Otherwise, at least one of the arguments is definitely a string,
3634
  // and we convert the one that is not known to be a string.
3635
  if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3636
    ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
3637
    ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
3638
    __ JumpIfSmi(eax, &call_runtime);
3639
    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
3640
    __ j(above_equal, &call_runtime);
3641

    
3642
    // First argument is a a string, test second.
3643
    __ JumpIfSmi(edx, &call_runtime);
3644
    __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
3645
    __ j(above_equal, &call_runtime);
3646
  } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
3647
    ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
3648
    GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
3649
                            &call_builtin);
3650
    builtin_id = Builtins::STRING_ADD_RIGHT;
3651
  } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
3652
    ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
3653
    GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
3654
                            &call_builtin);
3655
    builtin_id = Builtins::STRING_ADD_LEFT;
3656
  }
3657

    
3658
  // Both arguments are strings.
3659
  // eax: first string
3660
  // edx: second string
3661
  // Check if either of the strings are empty. In that case return the other.
3662
  Label second_not_zero_length, both_not_zero_length;
3663
  __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
3664
  STATIC_ASSERT(kSmiTag == 0);
3665
  __ test(ecx, ecx);
3666
  __ j(not_zero, &second_not_zero_length, Label::kNear);
3667
  // Second string is empty, result is first string which is already in eax.
3668
  Counters* counters = masm->isolate()->counters();
3669
  __ IncrementCounter(counters->string_add_native(), 1);
3670
  __ ret(2 * kPointerSize);
3671
  __ bind(&second_not_zero_length);
3672
  __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3673
  STATIC_ASSERT(kSmiTag == 0);
3674
  __ test(ebx, ebx);
3675
  __ j(not_zero, &both_not_zero_length, Label::kNear);
3676
  // First string is empty, result is second string which is in edx.
3677
  __ mov(eax, edx);
3678
  __ IncrementCounter(counters->string_add_native(), 1);
3679
  __ ret(2 * kPointerSize);
3680

    
3681
  // Both strings are non-empty.
3682
  // eax: first string
3683
  // ebx: length of first string as a smi
3684
  // ecx: length of second string as a smi
3685
  // edx: second string
3686
  // Look at the length of the result of adding the two strings.
3687
  Label string_add_flat_result, longer_than_two;
3688
  __ bind(&both_not_zero_length);
3689
  __ add(ebx, ecx);
3690
  STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
3691
  // Handle exceptionally long strings in the runtime system.
3692
  __ j(overflow, &call_runtime);
3693
  // Use the string table when adding two one character strings, as it
3694
  // helps later optimizations to return an internalized string here.
3695
  __ cmp(ebx, Immediate(Smi::FromInt(2)));
3696
  __ j(not_equal, &longer_than_two);
3697

    
3698
  // Check that both strings are non-external ASCII strings.
3699
  __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, &call_runtime);
3700

    
3701
  // Get the two characters forming the new string.
3702
  __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3703
  __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3704

    
3705
  // Try to lookup two character string in string table. If it is not found
3706
  // just allocate a new one.
3707
  Label make_two_character_string, make_two_character_string_no_reload;
3708
  StringHelper::GenerateTwoCharacterStringTableProbe(
3709
      masm, ebx, ecx, eax, edx, edi,
3710
      &make_two_character_string_no_reload, &make_two_character_string);
3711
  __ IncrementCounter(counters->string_add_native(), 1);
3712
  __ ret(2 * kPointerSize);
3713

    
3714
  // Allocate a two character string.
3715
  __ bind(&make_two_character_string);
3716
  // Reload the arguments.
3717
  __ mov(eax, Operand(esp, 2 * kPointerSize));  // First argument.
3718
  __ mov(edx, Operand(esp, 1 * kPointerSize));  // Second argument.
3719
  // Get the two characters forming the new string.
3720
  __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3721
  __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3722
  __ bind(&make_two_character_string_no_reload);
3723
  __ IncrementCounter(counters->string_add_make_two_char(), 1);
3724
  __ AllocateAsciiString(eax, 2, edi, edx, &call_runtime);
3725
  // Pack both characters in ebx.
3726
  __ shl(ecx, kBitsPerByte);
3727
  __ or_(ebx, ecx);
3728
  // Set the characters in the new string.
3729
  __ mov_w(FieldOperand(eax, SeqOneByteString::kHeaderSize), ebx);
3730
  __ IncrementCounter(counters->string_add_native(), 1);
3731
  __ ret(2 * kPointerSize);
3732

    
3733
  __ bind(&longer_than_two);
3734
  // Check if resulting string will be flat.
3735
  __ cmp(ebx, Immediate(Smi::FromInt(ConsString::kMinLength)));
3736
  __ j(below, &string_add_flat_result);
3737

    
3738
  // If result is not supposed to be flat allocate a cons string object. If both
3739
  // strings are ASCII the result is an ASCII cons string.
3740
  Label non_ascii, allocated, ascii_data;
3741
  __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
3742
  __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
3743
  __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3744
  __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3745
  __ and_(ecx, edi);
3746
  STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3747
  STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3748
  __ test(ecx, Immediate(kStringEncodingMask));
3749
  __ j(zero, &non_ascii);
3750
  __ bind(&ascii_data);
3751
  // Allocate an ASCII cons string.
3752
  __ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime);
3753
  __ bind(&allocated);
3754
  // Fill the fields of the cons string.
3755
  __ AssertSmi(ebx);
3756
  __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
3757
  __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
3758
         Immediate(String::kEmptyHashField));
3759

    
3760
  Label skip_write_barrier, after_writing;
3761
  ExternalReference high_promotion_mode = ExternalReference::
3762
      new_space_high_promotion_mode_active_address(masm->isolate());
3763
  __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
3764
  __ j(zero, &skip_write_barrier);
3765

    
3766
  __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3767
  __ RecordWriteField(ecx,
3768
                     ConsString::kFirstOffset,
3769
                     eax,
3770
                     ebx,
3771
                     kDontSaveFPRegs);
3772
  __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3773
  __ RecordWriteField(ecx,
3774
                     ConsString::kSecondOffset,
3775
                     edx,
3776
                     ebx,
3777
                     kDontSaveFPRegs);
3778
  __ jmp(&after_writing);
3779

    
3780
  __ bind(&skip_write_barrier);
3781
  __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3782
  __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3783

    
3784
  __ bind(&after_writing);
3785

    
3786
  __ mov(eax, ecx);
3787
  __ IncrementCounter(counters->string_add_native(), 1);
3788
  __ ret(2 * kPointerSize);
3789
  __ bind(&non_ascii);
3790
  // At least one of the strings is two-byte. Check whether it happens
3791
  // to contain only one byte characters.
3792
  // ecx: first instance type AND second instance type.
3793
  // edi: second instance type.
3794
  __ test(ecx, Immediate(kOneByteDataHintMask));
3795
  __ j(not_zero, &ascii_data);
3796
  __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3797
  __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3798
  __ xor_(edi, ecx);
3799
  STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
3800
  __ and_(edi, kOneByteStringTag | kOneByteDataHintTag);
3801
  __ cmp(edi, kOneByteStringTag | kOneByteDataHintTag);
3802
  __ j(equal, &ascii_data);
3803
  // Allocate a two byte cons string.
3804
  __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
3805
  __ jmp(&allocated);
3806

    
3807
  // We cannot encounter sliced strings or cons strings here since:
3808
  STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
3809
  // Handle creating a flat result from either external or sequential strings.
3810
  // Locate the first characters' locations.
3811
  // eax: first string
3812
  // ebx: length of resulting flat string as a smi
3813
  // edx: second string
3814
  Label first_prepared, second_prepared;
3815
  Label first_is_sequential, second_is_sequential;
3816
  __ bind(&string_add_flat_result);
3817
  __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3818
  __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3819
  // ecx: instance type of first string
3820
  STATIC_ASSERT(kSeqStringTag == 0);
3821
  __ test_b(ecx, kStringRepresentationMask);
3822
  __ j(zero, &first_is_sequential, Label::kNear);
3823
  // Rule out short external string and load string resource.
3824
  STATIC_ASSERT(kShortExternalStringTag != 0);
3825
  __ test_b(ecx, kShortExternalStringMask);
3826
  __ j(not_zero, &call_runtime);
3827
  __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
3828
  STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3829
  __ jmp(&first_prepared, Label::kNear);
3830
  __ bind(&first_is_sequential);
3831
  __ add(eax, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3832
  __ bind(&first_prepared);
3833

    
3834
  __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3835
  __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3836
  // Check whether both strings have same encoding.
3837
  // edi: instance type of second string
3838
  __ xor_(ecx, edi);
3839
  __ test_b(ecx, kStringEncodingMask);
3840
  __ j(not_zero, &call_runtime);
3841
  STATIC_ASSERT(kSeqStringTag == 0);
3842
  __ test_b(edi, kStringRepresentationMask);
3843
  __ j(zero, &second_is_sequential, Label::kNear);
3844
  // Rule out short external string and load string resource.
3845
  STATIC_ASSERT(kShortExternalStringTag != 0);
3846
  __ test_b(edi, kShortExternalStringMask);
3847
  __ j(not_zero, &call_runtime);
3848
  __ mov(edx, FieldOperand(edx, ExternalString::kResourceDataOffset));
3849
  STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3850
  __ jmp(&second_prepared, Label::kNear);
3851
  __ bind(&second_is_sequential);
3852
  __ add(edx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3853
  __ bind(&second_prepared);
3854

    
3855
  // Push the addresses of both strings' first characters onto the stack.
3856
  __ push(edx);
3857
  __ push(eax);
3858

    
3859
  Label non_ascii_string_add_flat_result, call_runtime_drop_two;
3860
  // edi: instance type of second string
3861
  // First string and second string have the same encoding.
3862
  STATIC_ASSERT(kTwoByteStringTag == 0);
3863
  __ test_b(edi, kStringEncodingMask);
3864
  __ j(zero, &non_ascii_string_add_flat_result);
3865

    
3866
  // Both strings are ASCII strings.
3867
  // ebx: length of resulting flat string as a smi
3868
  __ SmiUntag(ebx);
3869
  __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3870
  // eax: result string
3871
  __ mov(ecx, eax);
3872
  // Locate first character of result.
3873
  __ add(ecx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3874
  // Load first argument's length and first character location.  Account for
3875
  // values currently on the stack when fetching arguments from it.
3876
  __ mov(edx, Operand(esp, 4 * kPointerSize));
3877
  __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3878
  __ SmiUntag(edi);
3879
  __ pop(edx);
3880
  // eax: result string
3881
  // ecx: first character of result
3882
  // edx: first char of first argument
3883
  // edi: length of first argument
3884
  StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3885
  // Load second argument's length and first character location.  Account for
3886
  // values currently on the stack when fetching arguments from it.
3887
  __ mov(edx, Operand(esp, 2 * kPointerSize));
3888
  __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3889
  __ SmiUntag(edi);
3890
  __ pop(edx);
3891
  // eax: result string
3892
  // ecx: next character of result
3893
  // edx: first char of second argument
3894
  // edi: length of second argument
3895
  StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3896
  __ IncrementCounter(counters->string_add_native(), 1);
3897
  __ ret(2 * kPointerSize);
3898

    
3899
  // Handle creating a flat two byte result.
3900
  // eax: first string - known to be two byte
3901
  // ebx: length of resulting flat string as a smi
3902
  // edx: second string
3903
  __ bind(&non_ascii_string_add_flat_result);
3904
  // Both strings are two byte strings.
3905
  __ SmiUntag(ebx);
3906
  __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3907
  // eax: result string
3908
  __ mov(ecx, eax);
3909
  // Locate first character of result.
3910
  __ add(ecx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3911
  // Load second argument's length and first character location.  Account for
3912
  // values currently on the stack when fetching arguments from it.
3913
  __ mov(edx, Operand(esp, 4 * kPointerSize));
3914
  __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3915
  __ SmiUntag(edi);
3916
  __ pop(edx);
3917
  // eax: result string
3918
  // ecx: first character of result
3919
  // edx: first char of first argument
3920
  // edi: length of first argument
3921
  StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3922
  // Load second argument's length and first character location.  Account for
3923
  // values currently on the stack when fetching arguments from it.
3924
  __ mov(edx, Operand(esp, 2 * kPointerSize));
3925
  __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3926
  __ SmiUntag(edi);
3927
  __ pop(edx);
3928
  // eax: result string
3929
  // ecx: next character of result
3930
  // edx: first char of second argument
3931
  // edi: length of second argument
3932
  StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3933
  __ IncrementCounter(counters->string_add_native(), 1);
3934
  __ ret(2 * kPointerSize);
3935

    
3936
  // Recover stack pointer before jumping to runtime.
3937
  __ bind(&call_runtime_drop_two);
3938
  __ Drop(2);
3939
  // Just jump to runtime to add the two strings.
3940
  __ bind(&call_runtime);
3941
  __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3942

    
3943
  if (call_builtin.is_linked()) {
3944
    __ bind(&call_builtin);
3945
    __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
3946
  }
3947
}
3948

    
3949

    
3950
void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3951
  __ push(eax);
3952
  __ push(edx);
3953
}
3954

    
3955

    
3956
void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
3957
                                            Register temp) {
3958
  __ pop(temp);
3959
  __ pop(edx);
3960
  __ pop(eax);
3961
  __ push(temp);
3962
}
3963

    
3964

    
3965
void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
3966
                                            int stack_offset,
3967
                                            Register arg,
3968
                                            Register scratch1,
3969
                                            Register scratch2,
3970
                                            Register scratch3,
3971
                                            Label* slow) {
3972
  // First check if the argument is already a string.
3973
  Label not_string, done;
3974
  __ JumpIfSmi(arg, &not_string);
3975
  __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
3976
  __ j(below, &done);
3977

    
3978
  // Check the number to string cache.
3979
  __ bind(&not_string);
3980
  // Puts the cached result into scratch1.
3981
  __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
3982
  __ mov(arg, scratch1);
3983
  __ mov(Operand(esp, stack_offset), arg);
3984
  __ bind(&done);
3985
}
3986

    
3987

    
3988
void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3989
                                          Register dest,
3990
                                          Register src,
3991
                                          Register count,
3992
                                          Register scratch,
3993
                                          bool ascii) {
3994
  Label loop;
3995
  __ bind(&loop);
3996
  // This loop just copies one character at a time, as it is only used for very
3997
  // short strings.
3998
  if (ascii) {
3999
    __ mov_b(scratch, Operand(src, 0));
4000
    __ mov_b(Operand(dest, 0), scratch);
4001
    __ add(src, Immediate(1));
4002
    __ add(dest, Immediate(1));
4003
  } else {
4004
    __ mov_w(scratch, Operand(src, 0));
4005
    __ mov_w(Operand(dest, 0), scratch);
4006
    __ add(src, Immediate(2));
4007
    __ add(dest, Immediate(2));
4008
  }
4009
  __ sub(count, Immediate(1));
4010
  __ j(not_zero, &loop);
4011
}
4012

    
4013

    
4014
void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4015
                                             Register dest,
4016
                                             Register src,
4017
                                             Register count,
4018
                                             Register scratch,
4019
                                             bool ascii) {
4020
  // Copy characters using rep movs of doublewords.
4021
  // The destination is aligned on a 4 byte boundary because we are
4022
  // copying to the beginning of a newly allocated string.
4023
  ASSERT(dest.is(edi));  // rep movs destination
4024
  ASSERT(src.is(esi));  // rep movs source
4025
  ASSERT(count.is(ecx));  // rep movs count
4026
  ASSERT(!scratch.is(dest));
4027
  ASSERT(!scratch.is(src));
4028
  ASSERT(!scratch.is(count));
4029

    
4030
  // Nothing to do for zero characters.
4031
  Label done;
4032
  __ test(count, count);
4033
  __ j(zero, &done);
4034

    
4035
  // Make count the number of bytes to copy.
4036
  if (!ascii) {
4037
    __ shl(count, 1);
4038
  }
4039

    
4040
  // Don't enter the rep movs if there are less than 4 bytes to copy.
4041
  Label last_bytes;
4042
  __ test(count, Immediate(~3));
4043
  __ j(zero, &last_bytes, Label::kNear);
4044

    
4045
  // Copy from edi to esi using rep movs instruction.
4046
  __ mov(scratch, count);
4047
  __ sar(count, 2);  // Number of doublewords to copy.
4048
  __ cld();
4049
  __ rep_movs();
4050

    
4051
  // Find number of bytes left.
4052
  __ mov(count, scratch);
4053
  __ and_(count, 3);
4054

    
4055
  // Check if there are more bytes to copy.
4056
  __ bind(&last_bytes);
4057
  __ test(count, count);
4058
  __ j(zero, &done);
4059

    
4060
  // Copy remaining characters.
4061
  Label loop;
4062
  __ bind(&loop);
4063
  __ mov_b(scratch, Operand(src, 0));
4064
  __ mov_b(Operand(dest, 0), scratch);
4065
  __ add(src, Immediate(1));
4066
  __ add(dest, Immediate(1));
4067
  __ sub(count, Immediate(1));
4068
  __ j(not_zero, &loop);
4069

    
4070
  __ bind(&done);
4071
}
4072

    
4073

    
4074
void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
4075
                                                        Register c1,
4076
                                                        Register c2,
4077
                                                        Register scratch1,
4078
                                                        Register scratch2,
4079
                                                        Register scratch3,
4080
                                                        Label* not_probed,
4081
                                                        Label* not_found) {
4082
  // Register scratch3 is the general scratch register in this function.
4083
  Register scratch = scratch3;
4084

    
4085
  // Make sure that both characters are not digits as such strings has a
4086
  // different hash algorithm. Don't try to look for these in the string table.
4087
  Label not_array_index;
4088
  __ mov(scratch, c1);
4089
  __ sub(scratch, Immediate(static_cast<int>('0')));
4090
  __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
4091
  __ j(above, &not_array_index, Label::kNear);
4092
  __ mov(scratch, c2);
4093
  __ sub(scratch, Immediate(static_cast<int>('0')));
4094
  __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
4095
  __ j(below_equal, not_probed);
4096

    
4097
  __ bind(&not_array_index);
4098
  // Calculate the two character string hash.
4099
  Register hash = scratch1;
4100
  GenerateHashInit(masm, hash, c1, scratch);
4101
  GenerateHashAddCharacter(masm, hash, c2, scratch);
4102
  GenerateHashGetHash(masm, hash, scratch);
4103

    
4104
  // Collect the two characters in a register.
4105
  Register chars = c1;
4106
  __ shl(c2, kBitsPerByte);
4107
  __ or_(chars, c2);
4108

    
4109
  // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4110
  // hash:  hash of two character string.
4111

    
4112
  // Load the string table.
4113
  Register string_table = c2;
4114
  __ LoadRoot(string_table, Heap::kStringTableRootIndex);
4115

    
4116
  // Calculate capacity mask from the string table capacity.
4117
  Register mask = scratch2;
4118
  __ mov(mask, FieldOperand(string_table, StringTable::kCapacityOffset));
4119
  __ SmiUntag(mask);
4120
  __ sub(mask, Immediate(1));
4121

    
4122
  // Registers
4123
  // chars:        two character string, char 1 in byte 0 and char 2 in byte 1.
4124
  // hash:         hash of two character string
4125
  // string_table: string table
4126
  // mask:         capacity mask
4127
  // scratch:      -
4128

    
4129
  // Perform a number of probes in the string table.
4130
  static const int kProbes = 4;
4131
  Label found_in_string_table;
4132
  Label next_probe[kProbes], next_probe_pop_mask[kProbes];
4133
  Register candidate = scratch;  // Scratch register contains candidate.
4134
  for (int i = 0; i < kProbes; i++) {
4135
    // Calculate entry in string table.
4136
    __ mov(scratch, hash);
4137
    if (i > 0) {
4138
      __ add(scratch, Immediate(StringTable::GetProbeOffset(i)));
4139
    }
4140
    __ and_(scratch, mask);
4141

    
4142
    // Load the entry from the string table.
4143
    STATIC_ASSERT(StringTable::kEntrySize == 1);
4144
    __ mov(candidate,
4145
           FieldOperand(string_table,
4146
                        scratch,
4147
                        times_pointer_size,
4148
                        StringTable::kElementsStartOffset));
4149

    
4150
    // If entry is undefined no string with this hash can be found.
4151
    Factory* factory = masm->isolate()->factory();
4152
    __ cmp(candidate, factory->undefined_value());
4153
    __ j(equal, not_found);
4154
    __ cmp(candidate, factory->the_hole_value());
4155
    __ j(equal, &next_probe[i]);
4156

    
4157
    // If length is not 2 the string is not a candidate.
4158
    __ cmp(FieldOperand(candidate, String::kLengthOffset),
4159
           Immediate(Smi::FromInt(2)));
4160
    __ j(not_equal, &next_probe[i]);
4161

    
4162
    // As we are out of registers save the mask on the stack and use that
4163
    // register as a temporary.
4164
    __ push(mask);
4165
    Register temp = mask;
4166

    
4167
    // Check that the candidate is a non-external ASCII string.
4168
    __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
4169
    __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4170
    __ JumpIfInstanceTypeIsNotSequentialAscii(
4171
        temp, temp, &next_probe_pop_mask[i]);
4172

    
4173
    // Check if the two characters match.
4174
    __ mov(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
4175
    __ and_(temp, 0x0000ffff);
4176
    __ cmp(chars, temp);
4177
    __ j(equal, &found_in_string_table);
4178
    __ bind(&next_probe_pop_mask[i]);
4179
    __ pop(mask);
4180
    __ bind(&next_probe[i]);
4181
  }
4182

    
4183
  // No matching 2 character string found by probing.
4184
  __ jmp(not_found);
4185

    
4186
  // Scratch register contains result when we fall through to here.
4187
  Register result = candidate;
4188
  __ bind(&found_in_string_table);
4189
  __ pop(mask);  // Pop saved mask from the stack.
4190
  if (!result.is(eax)) {
4191
    __ mov(eax, result);
4192
  }
4193
}
4194

    
4195

    
4196
void StringHelper::GenerateHashInit(MacroAssembler* masm,
4197
                                    Register hash,
4198
                                    Register character,
4199
                                    Register scratch) {
4200
  // hash = (seed + character) + ((seed + character) << 10);
4201
  if (Serializer::enabled()) {
4202
    __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
4203
    __ SmiUntag(scratch);
4204
    __ add(scratch, character);
4205
    __ mov(hash, scratch);
4206
    __ shl(scratch, 10);
4207
    __ add(hash, scratch);
4208
  } else {
4209
    int32_t seed = masm->isolate()->heap()->HashSeed();
4210
    __ lea(scratch, Operand(character, seed));
4211
    __ shl(scratch, 10);
4212
    __ lea(hash, Operand(scratch, character, times_1, seed));
4213
  }
4214
  // hash ^= hash >> 6;
4215
  __ mov(scratch, hash);
4216
  __ shr(scratch, 6);
4217
  __ xor_(hash, scratch);
4218
}
4219

    
4220

    
4221
void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4222
                                            Register hash,
4223
                                            Register character,
4224
                                            Register scratch) {
4225
  // hash += character;
4226
  __ add(hash, character);
4227
  // hash += hash << 10;
4228
  __ mov(scratch, hash);
4229
  __ shl(scratch, 10);
4230
  __ add(hash, scratch);
4231
  // hash ^= hash >> 6;
4232
  __ mov(scratch, hash);
4233
  __ shr(scratch, 6);
4234
  __ xor_(hash, scratch);
4235
}
4236

    
4237

    
4238
void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4239
                                       Register hash,
4240
                                       Register scratch) {
4241
  // hash += hash << 3;
4242
  __ mov(scratch, hash);
4243
  __ shl(scratch, 3);
4244
  __ add(hash, scratch);
4245
  // hash ^= hash >> 11;
4246
  __ mov(scratch, hash);
4247
  __ shr(scratch, 11);
4248
  __ xor_(hash, scratch);
4249
  // hash += hash << 15;
4250
  __ mov(scratch, hash);
4251
  __ shl(scratch, 15);
4252
  __ add(hash, scratch);
4253

    
4254
  __ and_(hash, String::kHashBitMask);
4255

    
4256
  // if (hash == 0) hash = 27;
4257
  Label hash_not_zero;
4258
  __ j(not_zero, &hash_not_zero, Label::kNear);
4259
  __ mov(hash, Immediate(StringHasher::kZeroHash));
4260
  __ bind(&hash_not_zero);
4261
}
4262

    
4263

    
4264
void SubStringStub::Generate(MacroAssembler* masm) {
4265
  Label runtime;
4266

    
4267
  // Stack frame on entry.
4268
  //  esp[0]: return address
4269
  //  esp[4]: to
4270
  //  esp[8]: from
4271
  //  esp[12]: string
4272

    
4273
  // Make sure first argument is a string.
4274
  __ mov(eax, Operand(esp, 3 * kPointerSize));
4275
  STATIC_ASSERT(kSmiTag == 0);
4276
  __ JumpIfSmi(eax, &runtime);
4277
  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
4278
  __ j(NegateCondition(is_string), &runtime);
4279

    
4280
  // eax: string
4281
  // ebx: instance type
4282

    
4283
  // Calculate length of sub string using the smi values.
4284
  __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
4285
  __ JumpIfNotSmi(ecx, &runtime);
4286
  __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
4287
  __ JumpIfNotSmi(edx, &runtime);
4288
  __ sub(ecx, edx);
4289
  __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
4290
  Label not_original_string;
4291
  // Shorter than original string's length: an actual substring.
4292
  __ j(below, &not_original_string, Label::kNear);
4293
  // Longer than original string's length or negative: unsafe arguments.
4294
  __ j(above, &runtime);
4295
  // Return original string.
4296
  Counters* counters = masm->isolate()->counters();
4297
  __ IncrementCounter(counters->sub_string_native(), 1);
4298
  __ ret(3 * kPointerSize);
4299
  __ bind(&not_original_string);
4300

    
4301
  Label single_char;
4302
  __ cmp(ecx, Immediate(Smi::FromInt(1)));
4303
  __ j(equal, &single_char);
4304

    
4305
  // eax: string
4306
  // ebx: instance type
4307
  // ecx: sub string length (smi)
4308
  // edx: from index (smi)
4309
  // Deal with different string types: update the index if necessary
4310
  // and put the underlying string into edi.
4311
  Label underlying_unpacked, sliced_string, seq_or_external_string;
4312
  // If the string is not indirect, it can only be sequential or external.
4313
  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
4314
  STATIC_ASSERT(kIsIndirectStringMask != 0);
4315
  __ test(ebx, Immediate(kIsIndirectStringMask));
4316
  __ j(zero, &seq_or_external_string, Label::kNear);
4317

    
4318
  Factory* factory = masm->isolate()->factory();
4319
  __ test(ebx, Immediate(kSlicedNotConsMask));
4320
  __ j(not_zero, &sliced_string, Label::kNear);
4321
  // Cons string.  Check whether it is flat, then fetch first part.
4322
  // Flat cons strings have an empty second part.
4323
  __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
4324
         factory->empty_string());
4325
  __ j(not_equal, &runtime);
4326
  __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
4327
  // Update instance type.
4328
  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
4329
  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
4330
  __ jmp(&underlying_unpacked, Label::kNear);
4331

    
4332
  __ bind(&sliced_string);
4333
  // Sliced string.  Fetch parent and adjust start index by offset.
4334
  __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
4335
  __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
4336
  // Update instance type.
4337
  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
4338
  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
4339
  __ jmp(&underlying_unpacked, Label::kNear);
4340

    
4341
  __ bind(&seq_or_external_string);
4342
  // Sequential or external string.  Just move string to the expected register.
4343
  __ mov(edi, eax);
4344

    
4345
  __ bind(&underlying_unpacked);
4346

    
4347
  if (FLAG_string_slices) {
4348
    Label copy_routine;
4349
    // edi: underlying subject string
4350
    // ebx: instance type of underlying subject string
4351
    // edx: adjusted start index (smi)
4352
    // ecx: length (smi)
4353
    __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
4354
    // Short slice.  Copy instead of slicing.
4355
    __ j(less, &copy_routine);
4356
    // Allocate new sliced string.  At this point we do not reload the instance
4357
    // type including the string encoding because we simply rely on the info
4358
    // provided by the original string.  It does not matter if the original
4359
    // string's encoding is wrong because we always have to recheck encoding of
4360
    // the newly created string's parent anyways due to externalized strings.
4361
    Label two_byte_slice, set_slice_header;
4362
    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
4363
    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4364
    __ test(ebx, Immediate(kStringEncodingMask));
4365
    __ j(zero, &two_byte_slice, Label::kNear);
4366
    __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
4367
    __ jmp(&set_slice_header, Label::kNear);
4368
    __ bind(&two_byte_slice);
4369
    __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
4370
    __ bind(&set_slice_header);
4371
    __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
4372
    __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
4373
           Immediate(String::kEmptyHashField));
4374
    __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
4375
    __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
4376
    __ IncrementCounter(counters->sub_string_native(), 1);
4377
    __ ret(3 * kPointerSize);
4378

    
4379
    __ bind(&copy_routine);
4380
  }
4381

    
4382
  // edi: underlying subject string
4383
  // ebx: instance type of underlying subject string
4384
  // edx: adjusted start index (smi)
4385
  // ecx: length (smi)
4386
  // The subject string can only be external or sequential string of either
4387
  // encoding at this point.
4388
  Label two_byte_sequential, runtime_drop_two, sequential_string;
4389
  STATIC_ASSERT(kExternalStringTag != 0);
4390
  STATIC_ASSERT(kSeqStringTag == 0);
4391
  __ test_b(ebx, kExternalStringTag);
4392
  __ j(zero, &sequential_string);
4393

    
4394
  // Handle external string.
4395
  // Rule out short external strings.
4396
  STATIC_CHECK(kShortExternalStringTag != 0);
4397
  __ test_b(ebx, kShortExternalStringMask);
4398
  __ j(not_zero, &runtime);
4399
  __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
4400
  // Move the pointer so that offset-wise, it looks like a sequential string.
4401
  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
4402
  __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4403

    
4404
  __ bind(&sequential_string);
4405
  // Stash away (adjusted) index and (underlying) string.
4406
  __ push(edx);
4407
  __ push(edi);
4408
  __ SmiUntag(ecx);
4409
  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
4410
  __ test_b(ebx, kStringEncodingMask);
4411
  __ j(zero, &two_byte_sequential);
4412

    
4413
  // Sequential ASCII string.  Allocate the result.
4414
  __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
4415

    
4416
  // eax: result string
4417
  // ecx: result string length
4418
  __ mov(edx, esi);  // esi used by following code.
4419
  // Locate first character of result.
4420
  __ mov(edi, eax);
4421
  __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4422
  // Load string argument and locate character of sub string start.
4423
  __ pop(esi);
4424
  __ pop(ebx);
4425
  __ SmiUntag(ebx);
4426
  __ lea(esi, FieldOperand(esi, ebx, times_1, SeqOneByteString::kHeaderSize));
4427

    
4428
  // eax: result string
4429
  // ecx: result length
4430
  // edx: original value of esi
4431
  // edi: first character of result
4432
  // esi: character of sub string start
4433
  StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
4434
  __ mov(esi, edx);  // Restore esi.
4435
  __ IncrementCounter(counters->sub_string_native(), 1);
4436
  __ ret(3 * kPointerSize);
4437

    
4438
  __ bind(&two_byte_sequential);
4439
  // Sequential two-byte string.  Allocate the result.
4440
  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
4441

    
4442
  // eax: result string
4443
  // ecx: result string length
4444
  __ mov(edx, esi);  // esi used by following code.
4445
  // Locate first character of result.
4446
  __ mov(edi, eax);
4447
  __ add(edi,
4448
         Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4449
  // Load string argument and locate character of sub string start.
4450
  __ pop(esi);
4451
  __ pop(ebx);
4452
  // As from is a smi it is 2 times the value which matches the size of a two
4453
  // byte character.
4454
  STATIC_ASSERT(kSmiTag == 0);
4455
  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4456
  __ lea(esi, FieldOperand(esi, ebx, times_1, SeqTwoByteString::kHeaderSize));
4457

    
4458
  // eax: result string
4459
  // ecx: result length
4460
  // edx: original value of esi
4461
  // edi: first character of result
4462
  // esi: character of sub string start
4463
  StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
4464
  __ mov(esi, edx);  // Restore esi.
4465
  __ IncrementCounter(counters->sub_string_native(), 1);
4466
  __ ret(3 * kPointerSize);
4467

    
4468
  // Drop pushed values on the stack before tail call.
4469
  __ bind(&runtime_drop_two);
4470
  __ Drop(2);
4471

    
4472
  // Just jump to runtime to create the sub string.
4473
  __ bind(&runtime);
4474
  __ TailCallRuntime(Runtime::kSubString, 3, 1);
4475

    
4476
  __ bind(&single_char);
4477
  // eax: string
4478
  // ebx: instance type
4479
  // ecx: sub string length (smi)
4480
  // edx: from index (smi)
4481
  StringCharAtGenerator generator(
4482
      eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
4483
  generator.GenerateFast(masm);
4484
  __ ret(3 * kPointerSize);
4485
  generator.SkipSlow(masm, &runtime);
4486
}
4487

    
4488

    
4489
void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
4490
                                                      Register left,
4491
                                                      Register right,
4492
                                                      Register scratch1,
4493
                                                      Register scratch2) {
4494
  Register length = scratch1;
4495

    
4496
  // Compare lengths.
4497
  Label strings_not_equal, check_zero_length;
4498
  __ mov(length, FieldOperand(left, String::kLengthOffset));
4499
  __ cmp(length, FieldOperand(right, String::kLengthOffset));
4500
  __ j(equal, &check_zero_length, Label::kNear);
4501
  __ bind(&strings_not_equal);
4502
  __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
4503
  __ ret(0);
4504

    
4505
  // Check if the length is zero.
4506
  Label compare_chars;
4507
  __ bind(&check_zero_length);
4508
  STATIC_ASSERT(kSmiTag == 0);
4509
  __ test(length, length);
4510
  __ j(not_zero, &compare_chars, Label::kNear);
4511
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4512
  __ ret(0);
4513

    
4514
  // Compare characters.
4515
  __ bind(&compare_chars);
4516
  GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
4517
                                &strings_not_equal, Label::kNear);
4518

    
4519
  // Characters are equal.
4520
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4521
  __ ret(0);
4522
}
4523

    
4524

    
4525
void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4526
                                                        Register left,
4527
                                                        Register right,
4528
                                                        Register scratch1,
4529
                                                        Register scratch2,
4530
                                                        Register scratch3) {
4531
  Counters* counters = masm->isolate()->counters();
4532
  __ IncrementCounter(counters->string_compare_native(), 1);
4533

    
4534
  // Find minimum length.
4535
  Label left_shorter;
4536
  __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
4537
  __ mov(scratch3, scratch1);
4538
  __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
4539

    
4540
  Register length_delta = scratch3;
4541

    
4542
  __ j(less_equal, &left_shorter, Label::kNear);
4543
  // Right string is shorter. Change scratch1 to be length of right string.
4544
  __ sub(scratch1, length_delta);
4545
  __ bind(&left_shorter);
4546

    
4547
  Register min_length = scratch1;
4548

    
4549
  // If either length is zero, just compare lengths.
4550
  Label compare_lengths;
4551
  __ test(min_length, min_length);
4552
  __ j(zero, &compare_lengths, Label::kNear);
4553

    
4554
  // Compare characters.
4555
  Label result_not_equal;
4556
  GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
4557
                                &result_not_equal, Label::kNear);
4558

    
4559
  // Compare lengths -  strings up to min-length are equal.
4560
  __ bind(&compare_lengths);
4561
  __ test(length_delta, length_delta);
4562
  Label length_not_equal;
4563
  __ j(not_zero, &length_not_equal, Label::kNear);
4564

    
4565
  // Result is EQUAL.
4566
  STATIC_ASSERT(EQUAL == 0);
4567
  STATIC_ASSERT(kSmiTag == 0);
4568
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4569
  __ ret(0);
4570

    
4571
  Label result_greater;
4572
  Label result_less;
4573
  __ bind(&length_not_equal);
4574
  __ j(greater, &result_greater, Label::kNear);
4575
  __ jmp(&result_less, Label::kNear);
4576
  __ bind(&result_not_equal);
4577
  __ j(above, &result_greater, Label::kNear);
4578
  __ bind(&result_less);
4579

    
4580
  // Result is LESS.
4581
  __ Set(eax, Immediate(Smi::FromInt(LESS)));
4582
  __ ret(0);
4583

    
4584
  // Result is GREATER.
4585
  __ bind(&result_greater);
4586
  __ Set(eax, Immediate(Smi::FromInt(GREATER)));
4587
  __ ret(0);
4588
}
4589

    
4590

    
4591
void StringCompareStub::GenerateAsciiCharsCompareLoop(
4592
    MacroAssembler* masm,
4593
    Register left,
4594
    Register right,
4595
    Register length,
4596
    Register scratch,
4597
    Label* chars_not_equal,
4598
    Label::Distance chars_not_equal_near) {
4599
  // Change index to run from -length to -1 by adding length to string
4600
  // start. This means that loop ends when index reaches zero, which
4601
  // doesn't need an additional compare.
4602
  __ SmiUntag(length);
4603
  __ lea(left,
4604
         FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
4605
  __ lea(right,
4606
         FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
4607
  __ neg(length);
4608
  Register index = length;  // index = -length;
4609

    
4610
  // Compare loop.
4611
  Label loop;
4612
  __ bind(&loop);
4613
  __ mov_b(scratch, Operand(left, index, times_1, 0));
4614
  __ cmpb(scratch, Operand(right, index, times_1, 0));
4615
  __ j(not_equal, chars_not_equal, chars_not_equal_near);
4616
  __ inc(index);
4617
  __ j(not_zero, &loop);
4618
}
4619

    
4620

    
4621
void StringCompareStub::Generate(MacroAssembler* masm) {
4622
  Label runtime;
4623

    
4624
  // Stack frame on entry.
4625
  //  esp[0]: return address
4626
  //  esp[4]: right string
4627
  //  esp[8]: left string
4628

    
4629
  __ mov(edx, Operand(esp, 2 * kPointerSize));  // left
4630
  __ mov(eax, Operand(esp, 1 * kPointerSize));  // right
4631

    
4632
  Label not_same;
4633
  __ cmp(edx, eax);
4634
  __ j(not_equal, &not_same, Label::kNear);
4635
  STATIC_ASSERT(EQUAL == 0);
4636
  STATIC_ASSERT(kSmiTag == 0);
4637
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4638
  __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
4639
  __ ret(2 * kPointerSize);
4640

    
4641
  __ bind(&not_same);
4642

    
4643
  // Check that both objects are sequential ASCII strings.
4644
  __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
4645

    
4646
  // Compare flat ASCII strings.
4647
  // Drop arguments from the stack.
4648
  __ pop(ecx);
4649
  __ add(esp, Immediate(2 * kPointerSize));
4650
  __ push(ecx);
4651
  GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
4652

    
4653
  // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4654
  // tagged as a small integer.
4655
  __ bind(&runtime);
4656
  __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4657
}
4658

    
4659

    
4660
void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4661
  ASSERT(state_ == CompareIC::SMI);
4662
  Label miss;
4663
  __ mov(ecx, edx);
4664
  __ or_(ecx, eax);
4665
  __ JumpIfNotSmi(ecx, &miss, Label::kNear);
4666

    
4667
  if (GetCondition() == equal) {
4668
    // For equality we do not care about the sign of the result.
4669
    __ sub(eax, edx);
4670
  } else {
4671
    Label done;
4672
    __ sub(edx, eax);
4673
    __ j(no_overflow, &done, Label::kNear);
4674
    // Correct sign of result in case of overflow.
4675
    __ not_(edx);
4676
    __ bind(&done);
4677
    __ mov(eax, edx);
4678
  }
4679
  __ ret(0);
4680

    
4681
  __ bind(&miss);
4682
  GenerateMiss(masm);
4683
}
4684

    
4685

    
4686
void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
4687
  ASSERT(state_ == CompareIC::NUMBER);
4688

    
4689
  Label generic_stub;
4690
  Label unordered, maybe_undefined1, maybe_undefined2;
4691
  Label miss;
4692

    
4693
  if (left_ == CompareIC::SMI) {
4694
    __ JumpIfNotSmi(edx, &miss);
4695
  }
4696
  if (right_ == CompareIC::SMI) {
4697
    __ JumpIfNotSmi(eax, &miss);
4698
  }
4699

    
4700
  // Inlining the double comparison and falling back to the general compare
4701
  // stub if NaN is involved or SSE2 or CMOV is unsupported.
4702
  if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
4703
    CpuFeatureScope scope1(masm, SSE2);
4704
    CpuFeatureScope scope2(masm, CMOV);
4705

    
4706
    // Load left and right operand.
4707
    Label done, left, left_smi, right_smi;
4708
    __ JumpIfSmi(eax, &right_smi, Label::kNear);
4709
    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4710
           masm->isolate()->factory()->heap_number_map());
4711
    __ j(not_equal, &maybe_undefined1, Label::kNear);
4712
    __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
4713
    __ jmp(&left, Label::kNear);
4714
    __ bind(&right_smi);
4715
    __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
4716
    __ SmiUntag(ecx);
4717
    __ Cvtsi2sd(xmm1, ecx);
4718

    
4719
    __ bind(&left);
4720
    __ JumpIfSmi(edx, &left_smi, Label::kNear);
4721
    __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4722
           masm->isolate()->factory()->heap_number_map());
4723
    __ j(not_equal, &maybe_undefined2, Label::kNear);
4724
    __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
4725
    __ jmp(&done);
4726
    __ bind(&left_smi);
4727
    __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
4728
    __ SmiUntag(ecx);
4729
    __ Cvtsi2sd(xmm0, ecx);
4730

    
4731
    __ bind(&done);
4732
    // Compare operands.
4733
    __ ucomisd(xmm0, xmm1);
4734

    
4735
    // Don't base result on EFLAGS when a NaN is involved.
4736
    __ j(parity_even, &unordered, Label::kNear);
4737

    
4738
    // Return a result of -1, 0, or 1, based on EFLAGS.
4739
    // Performing mov, because xor would destroy the flag register.
4740
    __ mov(eax, 0);  // equal
4741
    __ mov(ecx, Immediate(Smi::FromInt(1)));
4742
    __ cmov(above, eax, ecx);
4743
    __ mov(ecx, Immediate(Smi::FromInt(-1)));
4744
    __ cmov(below, eax, ecx);
4745
    __ ret(0);
4746
  } else {
4747
    __ mov(ecx, edx);
4748
    __ and_(ecx, eax);
4749
    __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
4750

    
4751
    __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4752
           masm->isolate()->factory()->heap_number_map());
4753
    __ j(not_equal, &maybe_undefined1, Label::kNear);
4754
    __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4755
           masm->isolate()->factory()->heap_number_map());
4756
    __ j(not_equal, &maybe_undefined2, Label::kNear);
4757
  }
4758

    
4759
  __ bind(&unordered);
4760
  __ bind(&generic_stub);
4761
  ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
4762
                     CompareIC::GENERIC);
4763
  __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4764

    
4765
  __ bind(&maybe_undefined1);
4766
  if (Token::IsOrderedRelationalCompareOp(op_)) {
4767
    __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
4768
    __ j(not_equal, &miss);
4769
    __ JumpIfSmi(edx, &unordered);
4770
    __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
4771
    __ j(not_equal, &maybe_undefined2, Label::kNear);
4772
    __ jmp(&unordered);
4773
  }
4774

    
4775
  __ bind(&maybe_undefined2);
4776
  if (Token::IsOrderedRelationalCompareOp(op_)) {
4777
    __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
4778
    __ j(equal, &unordered);
4779
  }
4780

    
4781
  __ bind(&miss);
4782
  GenerateMiss(masm);
4783
}
4784

    
4785

    
4786
void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4787
  ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
4788
  ASSERT(GetCondition() == equal);
4789

    
4790
  // Registers containing left and right operands respectively.
4791
  Register left = edx;
4792
  Register right = eax;
4793
  Register tmp1 = ecx;
4794
  Register tmp2 = ebx;
4795

    
4796
  // Check that both operands are heap objects.
4797
  Label miss;
4798
  __ mov(tmp1, left);
4799
  STATIC_ASSERT(kSmiTag == 0);
4800
  __ and_(tmp1, right);
4801
  __ JumpIfSmi(tmp1, &miss, Label::kNear);
4802

    
4803
  // Check that both operands are internalized strings.
4804
  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4805
  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4806
  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4807
  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4808
  STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
4809
  __ or_(tmp1, tmp2);
4810
  __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
4811
  __ j(not_zero, &miss, Label::kNear);
4812

    
4813
  // Internalized strings are compared by identity.
4814
  Label done;
4815
  __ cmp(left, right);
4816
  // Make sure eax is non-zero. At this point input operands are
4817
  // guaranteed to be non-zero.
4818
  ASSERT(right.is(eax));
4819
  __ j(not_equal, &done, Label::kNear);
4820
  STATIC_ASSERT(EQUAL == 0);
4821
  STATIC_ASSERT(kSmiTag == 0);
4822
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4823
  __ bind(&done);
4824
  __ ret(0);
4825

    
4826
  __ bind(&miss);
4827
  GenerateMiss(masm);
4828
}
4829

    
4830

    
4831
void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4832
  ASSERT(state_ == CompareIC::UNIQUE_NAME);
4833
  ASSERT(GetCondition() == equal);
4834

    
4835
  // Registers containing left and right operands respectively.
4836
  Register left = edx;
4837
  Register right = eax;
4838
  Register tmp1 = ecx;
4839
  Register tmp2 = ebx;
4840

    
4841
  // Check that both operands are heap objects.
4842
  Label miss;
4843
  __ mov(tmp1, left);
4844
  STATIC_ASSERT(kSmiTag == 0);
4845
  __ and_(tmp1, right);
4846
  __ JumpIfSmi(tmp1, &miss, Label::kNear);
4847

    
4848
  // Check that both operands are unique names. This leaves the instance
4849
  // types loaded in tmp1 and tmp2.
4850
  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4851
  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4852
  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4853
  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4854

    
4855
  __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4856
  __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4857

    
4858
  // Unique names are compared by identity.
4859
  Label done;
4860
  __ cmp(left, right);
4861
  // Make sure eax is non-zero. At this point input operands are
4862
  // guaranteed to be non-zero.
4863
  ASSERT(right.is(eax));
4864
  __ j(not_equal, &done, Label::kNear);
4865
  STATIC_ASSERT(EQUAL == 0);
4866
  STATIC_ASSERT(kSmiTag == 0);
4867
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4868
  __ bind(&done);
4869
  __ ret(0);
4870

    
4871
  __ bind(&miss);
4872
  GenerateMiss(masm);
4873
}
4874

    
4875

    
4876
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4877
  ASSERT(state_ == CompareIC::STRING);
4878
  Label miss;
4879

    
4880
  bool equality = Token::IsEqualityOp(op_);
4881

    
4882
  // Registers containing left and right operands respectively.
4883
  Register left = edx;
4884
  Register right = eax;
4885
  Register tmp1 = ecx;
4886
  Register tmp2 = ebx;
4887
  Register tmp3 = edi;
4888

    
4889
  // Check that both operands are heap objects.
4890
  __ mov(tmp1, left);
4891
  STATIC_ASSERT(kSmiTag == 0);
4892
  __ and_(tmp1, right);
4893
  __ JumpIfSmi(tmp1, &miss);
4894

    
4895
  // Check that both operands are strings. This leaves the instance
4896
  // types loaded in tmp1 and tmp2.
4897
  __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4898
  __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4899
  __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4900
  __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4901
  __ mov(tmp3, tmp1);
4902
  STATIC_ASSERT(kNotStringTag != 0);
4903
  __ or_(tmp3, tmp2);
4904
  __ test(tmp3, Immediate(kIsNotStringMask));
4905
  __ j(not_zero, &miss);
4906

    
4907
  // Fast check for identical strings.
4908
  Label not_same;
4909
  __ cmp(left, right);
4910
  __ j(not_equal, &not_same, Label::kNear);
4911
  STATIC_ASSERT(EQUAL == 0);
4912
  STATIC_ASSERT(kSmiTag == 0);
4913
  __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4914
  __ ret(0);
4915

    
4916
  // Handle not identical strings.
4917
  __ bind(&not_same);
4918

    
4919
  // Check that both strings are internalized. If they are, we're done
4920
  // because we already know they are not identical.  But in the case of
4921
  // non-equality compare, we still need to determine the order. We
4922
  // also know they are both strings.
4923
  if (equality) {
4924
    Label do_compare;
4925
    STATIC_ASSERT(kInternalizedTag == 0);
4926
    __ or_(tmp1, tmp2);
4927
    __ test(tmp1, Immediate(kIsNotInternalizedMask));
4928
    __ j(not_zero, &do_compare, Label::kNear);
4929
    // Make sure eax is non-zero. At this point input operands are
4930
    // guaranteed to be non-zero.
4931
    ASSERT(right.is(eax));
4932
    __ ret(0);
4933
    __ bind(&do_compare);
4934
  }
4935

    
4936
  // Check that both strings are sequential ASCII.
4937
  Label runtime;
4938
  __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4939

    
4940
  // Compare flat ASCII strings. Returns when done.
4941
  if (equality) {
4942
    StringCompareStub::GenerateFlatAsciiStringEquals(
4943
        masm, left, right, tmp1, tmp2);
4944
  } else {
4945
    StringCompareStub::GenerateCompareFlatAsciiStrings(
4946
        masm, left, right, tmp1, tmp2, tmp3);
4947
  }
4948

    
4949
  // Handle more complex cases in runtime.
4950
  __ bind(&runtime);
4951
  __ pop(tmp1);  // Return address.
4952
  __ push(left);
4953
  __ push(right);
4954
  __ push(tmp1);
4955
  if (equality) {
4956
    __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4957
  } else {
4958
    __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4959
  }
4960

    
4961
  __ bind(&miss);
4962
  GenerateMiss(masm);
4963
}
4964

    
4965

    
4966
void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4967
  ASSERT(state_ == CompareIC::OBJECT);
4968
  Label miss;
4969
  __ mov(ecx, edx);
4970
  __ and_(ecx, eax);
4971
  __ JumpIfSmi(ecx, &miss, Label::kNear);
4972

    
4973
  __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
4974
  __ j(not_equal, &miss, Label::kNear);
4975
  __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
4976
  __ j(not_equal, &miss, Label::kNear);
4977

    
4978
  ASSERT(GetCondition() == equal);
4979
  __ sub(eax, edx);
4980
  __ ret(0);
4981

    
4982
  __ bind(&miss);
4983
  GenerateMiss(masm);
4984
}
4985

    
4986

    
4987
void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4988
  Label miss;
4989
  __ mov(ecx, edx);
4990
  __ and_(ecx, eax);
4991
  __ JumpIfSmi(ecx, &miss, Label::kNear);
4992

    
4993
  __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4994
  __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
4995
  __ cmp(ecx, known_map_);
4996
  __ j(not_equal, &miss, Label::kNear);
4997
  __ cmp(ebx, known_map_);
4998
  __ j(not_equal, &miss, Label::kNear);
4999

    
5000
  __ sub(eax, edx);
5001
  __ ret(0);
5002

    
5003
  __ bind(&miss);
5004
  GenerateMiss(masm);
5005
}
5006

    
5007

    
5008
void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5009
  {
5010
    // Call the runtime system in a fresh internal frame.
5011
    ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
5012
                                               masm->isolate());
5013
    FrameScope scope(masm, StackFrame::INTERNAL);
5014
    __ push(edx);  // Preserve edx and eax.
5015
    __ push(eax);
5016
    __ push(edx);  // And also use them as the arguments.
5017
    __ push(eax);
5018
    __ push(Immediate(Smi::FromInt(op_)));
5019
    __ CallExternalReference(miss, 3);
5020
    // Compute the entry point of the rewritten stub.
5021
    __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
5022
    __ pop(eax);
5023
    __ pop(edx);
5024
  }
5025

    
5026
  // Do a tail call to the rewritten stub.
5027
  __ jmp(edi);
5028
}
5029

    
5030

    
5031
// Helper function used to check that the dictionary doesn't contain
5032
// the property. This function may return false negatives, so miss_label
5033
// must always call a backup property check that is complete.
5034
// This function is safe to call if the receiver has fast properties.
5035
// Name must be a unique name and receiver must be a heap object.
5036
void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
5037
                                                      Label* miss,
5038
                                                      Label* done,
5039
                                                      Register properties,
5040
                                                      Handle<Name> name,
5041
                                                      Register r0) {
5042
  ASSERT(name->IsUniqueName());
5043

    
5044
  // If names of slots in range from 1 to kProbes - 1 for the hash value are
5045
  // not equal to the name and kProbes-th slot is not used (its name is the
5046
  // undefined value), it guarantees the hash table doesn't contain the
5047
  // property. It's true even if some slots represent deleted properties
5048
  // (their names are the hole value).
5049
  for (int i = 0; i < kInlinedProbes; i++) {
5050
    // Compute the masked index: (hash + i + i * i) & mask.
5051
    Register index = r0;
5052
    // Capacity is smi 2^n.
5053
    __ mov(index, FieldOperand(properties, kCapacityOffset));
5054
    __ dec(index);
5055
    __ and_(index,
5056
            Immediate(Smi::FromInt(name->Hash() +
5057
                                   NameDictionary::GetProbeOffset(i))));
5058

    
5059
    // Scale the index by multiplying by the entry size.
5060
    ASSERT(NameDictionary::kEntrySize == 3);
5061
    __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
5062
    Register entity_name = r0;
5063
    // Having undefined at this place means the name is not contained.
5064
    ASSERT_EQ(kSmiTagSize, 1);
5065
    __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
5066
                                kElementsStartOffset - kHeapObjectTag));
5067
    __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
5068
    __ j(equal, done);
5069

    
5070
    // Stop if found the property.
5071
    __ cmp(entity_name, Handle<Name>(name));
5072
    __ j(equal, miss);
5073

    
5074
    Label good;
5075
    // Check for the hole and skip.
5076
    __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
5077
    __ j(equal, &good, Label::kNear);
5078

    
5079
    // Check if the entry name is not a unique name.
5080
    __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
5081
    __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
5082
                           miss);
5083
    __ bind(&good);
5084
  }
5085

    
5086
  NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
5087
  __ push(Immediate(Handle<Object>(name)));
5088
  __ push(Immediate(name->Hash()));
5089
  __ CallStub(&stub);
5090
  __ test(r0, r0);
5091
  __ j(not_zero, miss);
5092
  __ jmp(done);
5093
}
5094

    
5095

    
5096
// Probe the name dictionary in the |elements| register. Jump to the
5097
// |done| label if a property with the given name is found leaving the
5098
// index into the dictionary in |r0|. Jump to the |miss| label
5099
// otherwise.
5100
void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
5101
                                                      Label* miss,
5102
                                                      Label* done,
5103
                                                      Register elements,
5104
                                                      Register name,
5105
                                                      Register r0,
5106
                                                      Register r1) {
5107
  ASSERT(!elements.is(r0));
5108
  ASSERT(!elements.is(r1));
5109
  ASSERT(!name.is(r0));
5110
  ASSERT(!name.is(r1));
5111

    
5112
  __ AssertName(name);
5113

    
5114
  __ mov(r1, FieldOperand(elements, kCapacityOffset));
5115
  __ shr(r1, kSmiTagSize);  // convert smi to int
5116
  __ dec(r1);
5117

    
5118
  // Generate an unrolled loop that performs a few probes before
5119
  // giving up. Measurements done on Gmail indicate that 2 probes
5120
  // cover ~93% of loads from dictionaries.
5121
  for (int i = 0; i < kInlinedProbes; i++) {
5122
    // Compute the masked index: (hash + i + i * i) & mask.
5123
    __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
5124
    __ shr(r0, Name::kHashShift);
5125
    if (i > 0) {
5126
      __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
5127
    }
5128
    __ and_(r0, r1);
5129

    
5130
    // Scale the index by multiplying by the entry size.
5131
    ASSERT(NameDictionary::kEntrySize == 3);
5132
    __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
5133

    
5134
    // Check if the key is identical to the name.
5135
    __ cmp(name, Operand(elements,
5136
                         r0,
5137
                         times_4,
5138
                         kElementsStartOffset - kHeapObjectTag));
5139
    __ j(equal, done);
5140
  }
5141

    
5142
  NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP);
5143
  __ push(name);
5144
  __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
5145
  __ shr(r0, Name::kHashShift);
5146
  __ push(r0);
5147
  __ CallStub(&stub);
5148

    
5149
  __ test(r1, r1);
5150
  __ j(zero, miss);
5151
  __ jmp(done);
5152
}
5153

    
5154

    
5155
void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
5156
  // This stub overrides SometimesSetsUpAFrame() to return false.  That means
5157
  // we cannot call anything that could cause a GC from this stub.
5158
  // Stack frame on entry:
5159
  //  esp[0 * kPointerSize]: return address.
5160
  //  esp[1 * kPointerSize]: key's hash.
5161
  //  esp[2 * kPointerSize]: key.
5162
  // Registers:
5163
  //  dictionary_: NameDictionary to probe.
5164
  //  result_: used as scratch.
5165
  //  index_: will hold an index of entry if lookup is successful.
5166
  //          might alias with result_.
5167
  // Returns:
5168
  //  result_ is zero if lookup failed, non zero otherwise.
5169

    
5170
  Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5171

    
5172
  Register scratch = result_;
5173

    
5174
  __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
5175
  __ dec(scratch);
5176
  __ SmiUntag(scratch);
5177
  __ push(scratch);
5178

    
5179
  // If names of slots in range from 1 to kProbes - 1 for the hash value are
5180
  // not equal to the name and kProbes-th slot is not used (its name is the
5181
  // undefined value), it guarantees the hash table doesn't contain the
5182
  // property. It's true even if some slots represent deleted properties
5183
  // (their names are the null value).
5184
  for (int i = kInlinedProbes; i < kTotalProbes; i++) {
5185
    // Compute the masked index: (hash + i + i * i) & mask.
5186
    __ mov(scratch, Operand(esp, 2 * kPointerSize));
5187
    if (i > 0) {
5188
      __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
5189
    }
5190
    __ and_(scratch, Operand(esp, 0));
5191

    
5192
    // Scale the index by multiplying by the entry size.
5193
    ASSERT(NameDictionary::kEntrySize == 3);
5194
    __ lea(index_, Operand(scratch, scratch, times_2, 0));  // index *= 3.
5195

    
5196
    // Having undefined at this place means the name is not contained.
5197
    ASSERT_EQ(kSmiTagSize, 1);
5198
    __ mov(scratch, Operand(dictionary_,
5199
                            index_,
5200
                            times_pointer_size,
5201
                            kElementsStartOffset - kHeapObjectTag));
5202
    __ cmp(scratch, masm->isolate()->factory()->undefined_value());
5203
    __ j(equal, &not_in_dictionary);
5204

    
5205
    // Stop if found the property.
5206
    __ cmp(scratch, Operand(esp, 3 * kPointerSize));
5207
    __ j(equal, &in_dictionary);
5208

    
5209
    if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
5210
      // If we hit a key that is not a unique name during negative
5211
      // lookup we have to bailout as this key might be equal to the
5212
      // key we are looking for.
5213

    
5214
      // Check if the entry name is not a unique name.
5215
      __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5216
      __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
5217
                             &maybe_in_dictionary);
5218
    }
5219
  }
5220

    
5221
  __ bind(&maybe_in_dictionary);
5222
  // If we are doing negative lookup then probing failure should be
5223
  // treated as a lookup success. For positive lookup probing failure
5224
  // should be treated as lookup failure.
5225
  if (mode_ == POSITIVE_LOOKUP) {
5226
    __ mov(result_, Immediate(0));
5227
    __ Drop(1);
5228
    __ ret(2 * kPointerSize);
5229
  }
5230

    
5231
  __ bind(&in_dictionary);
5232
  __ mov(result_, Immediate(1));
5233
  __ Drop(1);
5234
  __ ret(2 * kPointerSize);
5235

    
5236
  __ bind(&not_in_dictionary);
5237
  __ mov(result_, Immediate(0));
5238
  __ Drop(1);
5239
  __ ret(2 * kPointerSize);
5240
}
5241

    
5242

    
5243
struct AheadOfTimeWriteBarrierStubList {
5244
  Register object, value, address;
5245
  RememberedSetAction action;
5246
};
5247

    
5248

    
5249
#define REG(Name) { kRegister_ ## Name ## _Code }
5250

    
5251
static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
5252
  // Used in RegExpExecStub.
5253
  { REG(ebx), REG(eax), REG(edi), EMIT_REMEMBERED_SET },
5254
  // Used in CompileArrayPushCall.
5255
  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
5256
  { REG(ebx), REG(edi), REG(edx), OMIT_REMEMBERED_SET },
5257
  // Used in StoreStubCompiler::CompileStoreField and
5258
  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
5259
  { REG(edx), REG(ecx), REG(ebx), EMIT_REMEMBERED_SET },
5260
  // GenerateStoreField calls the stub with two different permutations of
5261
  // registers.  This is the second.
5262
  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
5263
  // StoreIC::GenerateNormal via GenerateDictionaryStore
5264
  { REG(ebx), REG(edi), REG(edx), EMIT_REMEMBERED_SET },
5265
  // KeyedStoreIC::GenerateGeneric.
5266
  { REG(ebx), REG(edx), REG(ecx), EMIT_REMEMBERED_SET},
5267
  // KeyedStoreStubCompiler::GenerateStoreFastElement.
5268
  { REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
5269
  { REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
5270
  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
5271
  // and ElementsTransitionGenerator::GenerateSmiToDouble
5272
  // and ElementsTransitionGenerator::GenerateDoubleToObject
5273
  { REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
5274
  { REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
5275
  // ElementsTransitionGenerator::GenerateDoubleToObject
5276
  { REG(eax), REG(edx), REG(esi), EMIT_REMEMBERED_SET},
5277
  { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
5278
  // StoreArrayLiteralElementStub::Generate
5279
  { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
5280
  // FastNewClosureStub and StringAddStub::Generate
5281
  { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
5282
  // StringAddStub::Generate
5283
  { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
5284
  // Null termination.
5285
  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
5286
};
5287

    
5288
#undef REG
5289

    
5290
bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
5291
  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5292
       !entry->object.is(no_reg);
5293
       entry++) {
5294
    if (object_.is(entry->object) &&
5295
        value_.is(entry->value) &&
5296
        address_.is(entry->address) &&
5297
        remembered_set_action_ == entry->action &&
5298
        save_fp_regs_mode_ == kDontSaveFPRegs) {
5299
      return true;
5300
    }
5301
  }
5302
  return false;
5303
}
5304

    
5305

    
5306
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
5307
    Isolate* isolate) {
5308
  StoreBufferOverflowStub stub(kDontSaveFPRegs);
5309
  stub.GetCode(isolate)->set_is_pregenerated(true);
5310
  if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
5311
    StoreBufferOverflowStub stub2(kSaveFPRegs);
5312
    stub2.GetCode(isolate)->set_is_pregenerated(true);
5313
  }
5314
}
5315

    
5316

    
5317
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
5318
  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5319
       !entry->object.is(no_reg);
5320
       entry++) {
5321
    RecordWriteStub stub(entry->object,
5322
                         entry->value,
5323
                         entry->address,
5324
                         entry->action,
5325
                         kDontSaveFPRegs);
5326
    stub.GetCode(isolate)->set_is_pregenerated(true);
5327
  }
5328
}
5329

    
5330

    
5331
bool CodeStub::CanUseFPRegisters() {
5332
  return CpuFeatures::IsSupported(SSE2);
5333
}
5334

    
5335

    
5336
// Takes the input in 3 registers: address_ value_ and object_.  A pointer to
5337
// the value has just been written into the object, now this stub makes sure
5338
// we keep the GC informed.  The word in the object where the value has been
5339
// written is in the address register.
5340
void RecordWriteStub::Generate(MacroAssembler* masm) {
5341
  Label skip_to_incremental_noncompacting;
5342
  Label skip_to_incremental_compacting;
5343

    
5344
  // The first two instructions are generated with labels so as to get the
5345
  // offset fixed up correctly by the bind(Label*) call.  We patch it back and
5346
  // forth between a compare instructions (a nop in this position) and the
5347
  // real branch when we start and stop incremental heap marking.
5348
  __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
5349
  __ jmp(&skip_to_incremental_compacting, Label::kFar);
5350

    
5351
  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5352
    __ RememberedSetHelper(object_,
5353
                           address_,
5354
                           value_,
5355
                           save_fp_regs_mode_,
5356
                           MacroAssembler::kReturnAtEnd);
5357
  } else {
5358
    __ ret(0);
5359
  }
5360

    
5361
  __ bind(&skip_to_incremental_noncompacting);
5362
  GenerateIncremental(masm, INCREMENTAL);
5363

    
5364
  __ bind(&skip_to_incremental_compacting);
5365
  GenerateIncremental(masm, INCREMENTAL_COMPACTION);
5366

    
5367
  // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
5368
  // Will be checked in IncrementalMarking::ActivateGeneratedStub.
5369
  masm->set_byte_at(0, kTwoByteNopInstruction);
5370
  masm->set_byte_at(2, kFiveByteNopInstruction);
5371
}
5372

    
5373

    
5374
void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5375
  regs_.Save(masm);
5376

    
5377
  if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5378
    Label dont_need_remembered_set;
5379

    
5380
    __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
5381
    __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
5382
                           regs_.scratch0(),
5383
                           &dont_need_remembered_set);
5384

    
5385
    __ CheckPageFlag(regs_.object(),
5386
                     regs_.scratch0(),
5387
                     1 << MemoryChunk::SCAN_ON_SCAVENGE,
5388
                     not_zero,
5389
                     &dont_need_remembered_set);
5390

    
5391
    // First notify the incremental marker if necessary, then update the
5392
    // remembered set.
5393
    CheckNeedsToInformIncrementalMarker(
5394
        masm,
5395
        kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
5396
        mode);
5397
    InformIncrementalMarker(masm, mode);
5398
    regs_.Restore(masm);
5399
    __ RememberedSetHelper(object_,
5400
                           address_,
5401
                           value_,
5402
                           save_fp_regs_mode_,
5403
                           MacroAssembler::kReturnAtEnd);
5404

    
5405
    __ bind(&dont_need_remembered_set);
5406
  }
5407

    
5408
  CheckNeedsToInformIncrementalMarker(
5409
      masm,
5410
      kReturnOnNoNeedToInformIncrementalMarker,
5411
      mode);
5412
  InformIncrementalMarker(masm, mode);
5413
  regs_.Restore(masm);
5414
  __ ret(0);
5415
}
5416

    
5417

    
5418
void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
5419
  regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
5420
  int argument_count = 3;
5421
  __ PrepareCallCFunction(argument_count, regs_.scratch0());
5422
  __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
5423
  __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
5424
  __ mov(Operand(esp, 2 * kPointerSize),
5425
         Immediate(ExternalReference::isolate_address(masm->isolate())));
5426

    
5427
  AllowExternalCallThatCantCauseGC scope(masm);
5428
  if (mode == INCREMENTAL_COMPACTION) {
5429
    __ CallCFunction(
5430
        ExternalReference::incremental_evacuation_record_write_function(
5431
            masm->isolate()),
5432
        argument_count);
5433
  } else {
5434
    ASSERT(mode == INCREMENTAL);
5435
    __ CallCFunction(
5436
        ExternalReference::incremental_marking_record_write_function(
5437
            masm->isolate()),
5438
        argument_count);
5439
  }
5440
  regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
5441
}
5442

    
5443

    
5444
void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
5445
    MacroAssembler* masm,
5446
    OnNoNeedToInformIncrementalMarker on_no_need,
5447
    Mode mode) {
5448
  Label object_is_black, need_incremental, need_incremental_pop_object;
5449

    
5450
  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
5451
  __ and_(regs_.scratch0(), regs_.object());
5452
  __ mov(regs_.scratch1(),
5453
         Operand(regs_.scratch0(),
5454
                 MemoryChunk::kWriteBarrierCounterOffset));
5455
  __ sub(regs_.scratch1(), Immediate(1));
5456
  __ mov(Operand(regs_.scratch0(),
5457
                 MemoryChunk::kWriteBarrierCounterOffset),
5458
         regs_.scratch1());
5459
  __ j(negative, &need_incremental);
5460

    
5461
  // Let's look at the color of the object:  If it is not black we don't have
5462
  // to inform the incremental marker.
5463
  __ JumpIfBlack(regs_.object(),
5464
                 regs_.scratch0(),
5465
                 regs_.scratch1(),
5466
                 &object_is_black,
5467
                 Label::kNear);
5468

    
5469
  regs_.Restore(masm);
5470
  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5471
    __ RememberedSetHelper(object_,
5472
                           address_,
5473
                           value_,
5474
                           save_fp_regs_mode_,
5475
                           MacroAssembler::kReturnAtEnd);
5476
  } else {
5477
    __ ret(0);
5478
  }
5479

    
5480
  __ bind(&object_is_black);
5481

    
5482
  // Get the value from the slot.
5483
  __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
5484

    
5485
  if (mode == INCREMENTAL_COMPACTION) {
5486
    Label ensure_not_white;
5487

    
5488
    __ CheckPageFlag(regs_.scratch0(),  // Contains value.
5489
                     regs_.scratch1(),  // Scratch.
5490
                     MemoryChunk::kEvacuationCandidateMask,
5491
                     zero,
5492
                     &ensure_not_white,
5493
                     Label::kNear);
5494

    
5495
    __ CheckPageFlag(regs_.object(),
5496
                     regs_.scratch1(),  // Scratch.
5497
                     MemoryChunk::kSkipEvacuationSlotsRecordingMask,
5498
                     not_zero,
5499
                     &ensure_not_white,
5500
                     Label::kNear);
5501

    
5502
    __ jmp(&need_incremental);
5503

    
5504
    __ bind(&ensure_not_white);
5505
  }
5506

    
5507
  // We need an extra register for this, so we push the object register
5508
  // temporarily.
5509
  __ push(regs_.object());
5510
  __ EnsureNotWhite(regs_.scratch0(),  // The value.
5511
                    regs_.scratch1(),  // Scratch.
5512
                    regs_.object(),  // Scratch.
5513
                    &need_incremental_pop_object,
5514
                    Label::kNear);
5515
  __ pop(regs_.object());
5516

    
5517
  regs_.Restore(masm);
5518
  if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5519
    __ RememberedSetHelper(object_,
5520
                           address_,
5521
                           value_,
5522
                           save_fp_regs_mode_,
5523
                           MacroAssembler::kReturnAtEnd);
5524
  } else {
5525
    __ ret(0);
5526
  }
5527

    
5528
  __ bind(&need_incremental_pop_object);
5529
  __ pop(regs_.object());
5530

    
5531
  __ bind(&need_incremental);
5532

    
5533
  // Fall through when we need to inform the incremental marker.
5534
}
5535

    
5536

    
5537
void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
5538
  // ----------- S t a t e -------------
5539
  //  -- eax    : element value to store
5540
  //  -- ecx    : element index as smi
5541
  //  -- esp[0] : return address
5542
  //  -- esp[4] : array literal index in function
5543
  //  -- esp[8] : array literal
5544
  // clobbers ebx, edx, edi
5545
  // -----------------------------------
5546

    
5547
  Label element_done;
5548
  Label double_elements;
5549
  Label smi_element;
5550
  Label slow_elements;
5551
  Label slow_elements_from_double;
5552
  Label fast_elements;
5553

    
5554
  // Get array literal index, array literal and its map.
5555
  __ mov(edx, Operand(esp, 1 * kPointerSize));
5556
  __ mov(ebx, Operand(esp, 2 * kPointerSize));
5557
  __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
5558

    
5559
  __ CheckFastElements(edi, &double_elements);
5560

    
5561
  // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
5562
  __ JumpIfSmi(eax, &smi_element);
5563
  __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
5564

    
5565
  // Store into the array literal requires a elements transition. Call into
5566
  // the runtime.
5567

    
5568
  __ bind(&slow_elements);
5569
  __ pop(edi);  // Pop return address and remember to put back later for tail
5570
                // call.
5571
  __ push(ebx);
5572
  __ push(ecx);
5573
  __ push(eax);
5574
  __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5575
  __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
5576
  __ push(edx);
5577
  __ push(edi);  // Return return address so that tail call returns to right
5578
                 // place.
5579
  __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
5580

    
5581
  __ bind(&slow_elements_from_double);
5582
  __ pop(edx);
5583
  __ jmp(&slow_elements);
5584

    
5585
  // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
5586
  __ bind(&fast_elements);
5587
  __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
5588
  __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
5589
                           FixedArrayBase::kHeaderSize));
5590
  __ mov(Operand(ecx, 0), eax);
5591
  // Update the write barrier for the array store.
5592
  __ RecordWrite(ebx, ecx, eax,
5593
                 kDontSaveFPRegs,
5594
                 EMIT_REMEMBERED_SET,
5595
                 OMIT_SMI_CHECK);
5596
  __ ret(0);
5597

    
5598
  // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
5599
  // and value is Smi.
5600
  __ bind(&smi_element);
5601
  __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
5602
  __ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
5603
                      FixedArrayBase::kHeaderSize), eax);
5604
  __ ret(0);
5605

    
5606
  // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
5607
  __ bind(&double_elements);
5608

    
5609
  __ push(edx);
5610
  __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset));
5611
  __ StoreNumberToDoubleElements(eax,
5612
                                 edx,
5613
                                 ecx,
5614
                                 edi,
5615
                                 xmm0,
5616
                                 &slow_elements_from_double,
5617
                                 false);
5618
  __ pop(edx);
5619
  __ ret(0);
5620
}
5621

    
5622

    
5623
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
5624
  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5625
  __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5626
  int parameter_count_offset =
5627
      StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5628
  __ mov(ebx, MemOperand(ebp, parameter_count_offset));
5629
  masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5630
  __ pop(ecx);
5631
  int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
5632
      ? kPointerSize
5633
      : 0;
5634
  __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
5635
  __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
5636
}
5637

    
5638

    
5639
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5640
  if (masm->isolate()->function_entry_hook() != NULL) {
5641
    // It's always safe to call the entry hook stub, as the hook itself
5642
    // is not allowed to call back to V8.
5643
    AllowStubCallsScope allow_stub_calls(masm, true);
5644

    
5645
    ProfileEntryHookStub stub;
5646
    masm->CallStub(&stub);
5647
  }
5648
}
5649

    
5650

    
5651
void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5652
  // Save volatile registers.
5653
  const int kNumSavedRegisters = 3;
5654
  __ push(eax);
5655
  __ push(ecx);
5656
  __ push(edx);
5657

    
5658
  // Calculate and push the original stack pointer.
5659
  __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
5660
  __ push(eax);
5661

    
5662
  // Retrieve our return address and use it to calculate the calling
5663
  // function's address.
5664
  __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
5665
  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
5666
  __ push(eax);
5667

    
5668
  // Call the entry hook.
5669
  ASSERT(masm->isolate()->function_entry_hook() != NULL);
5670
  __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
5671
          RelocInfo::RUNTIME_ENTRY);
5672
  __ add(esp, Immediate(2 * kPointerSize));
5673

    
5674
  // Restore ecx.
5675
  __ pop(edx);
5676
  __ pop(ecx);
5677
  __ pop(eax);
5678

    
5679
  __ ret(0);
5680
}
5681

    
5682

    
5683
template<class T>
5684
static void CreateArrayDispatch(MacroAssembler* masm,
5685
                                AllocationSiteOverrideMode mode) {
5686
  if (mode == DISABLE_ALLOCATION_SITES) {
5687
    T stub(GetInitialFastElementsKind(),
5688
           CONTEXT_CHECK_REQUIRED,
5689
           mode);
5690
    __ TailCallStub(&stub);
5691
  } else if (mode == DONT_OVERRIDE) {
5692
    int last_index = GetSequenceIndexFromFastElementsKind(
5693
        TERMINAL_FAST_ELEMENTS_KIND);
5694
    for (int i = 0; i <= last_index; ++i) {
5695
      Label next;
5696
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5697
      __ cmp(edx, kind);
5698
      __ j(not_equal, &next);
5699
      T stub(kind);
5700
      __ TailCallStub(&stub);
5701
      __ bind(&next);
5702
    }
5703

    
5704
    // If we reached this point there is a problem.
5705
    __ Abort(kUnexpectedElementsKindInArrayConstructor);
5706
  } else {
5707
    UNREACHABLE();
5708
  }
5709
}
5710

    
5711

    
5712
static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5713
                                           AllocationSiteOverrideMode mode) {
5714
  // ebx - type info cell (if mode != DISABLE_ALLOCATION_SITES)
5715
  // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
5716
  // eax - number of arguments
5717
  // edi - constructor?
5718
  // esp[0] - return address
5719
  // esp[4] - last argument
5720
  Label normal_sequence;
5721
  if (mode == DONT_OVERRIDE) {
5722
    ASSERT(FAST_SMI_ELEMENTS == 0);
5723
    ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
5724
    ASSERT(FAST_ELEMENTS == 2);
5725
    ASSERT(FAST_HOLEY_ELEMENTS == 3);
5726
    ASSERT(FAST_DOUBLE_ELEMENTS == 4);
5727
    ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
5728

    
5729
    // is the low bit set? If so, we are holey and that is good.
5730
    __ test_b(edx, 1);
5731
    __ j(not_zero, &normal_sequence);
5732
  }
5733

    
5734
  // look at the first argument
5735
  __ mov(ecx, Operand(esp, kPointerSize));
5736
  __ test(ecx, ecx);
5737
  __ j(zero, &normal_sequence);
5738

    
5739
  if (mode == DISABLE_ALLOCATION_SITES) {
5740
    ElementsKind initial = GetInitialFastElementsKind();
5741
    ElementsKind holey_initial = GetHoleyElementsKind(initial);
5742

    
5743
    ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5744
                                                  CONTEXT_CHECK_REQUIRED,
5745
                                                  DISABLE_ALLOCATION_SITES);
5746
    __ TailCallStub(&stub_holey);
5747

    
5748
    __ bind(&normal_sequence);
5749
    ArraySingleArgumentConstructorStub stub(initial,
5750
                                            CONTEXT_CHECK_REQUIRED,
5751
                                            DISABLE_ALLOCATION_SITES);
5752
    __ TailCallStub(&stub);
5753
  } else if (mode == DONT_OVERRIDE) {
5754
    // We are going to create a holey array, but our kind is non-holey.
5755
    // Fix kind and retry.
5756
    __ inc(edx);
5757
    __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
5758
    if (FLAG_debug_code) {
5759
      Handle<Map> allocation_site_map =
5760
          masm->isolate()->factory()->allocation_site_map();
5761
      __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
5762
      __ Assert(equal, kExpectedAllocationSiteInCell);
5763
    }
5764

    
5765
    // Save the resulting elements kind in type info
5766
    __ SmiTag(edx);
5767
    __ mov(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset), edx);
5768
    __ SmiUntag(edx);
5769

    
5770
    __ bind(&normal_sequence);
5771
    int last_index = GetSequenceIndexFromFastElementsKind(
5772
        TERMINAL_FAST_ELEMENTS_KIND);
5773
    for (int i = 0; i <= last_index; ++i) {
5774
      Label next;
5775
      ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5776
      __ cmp(edx, kind);
5777
      __ j(not_equal, &next);
5778
      ArraySingleArgumentConstructorStub stub(kind);
5779
      __ TailCallStub(&stub);
5780
      __ bind(&next);
5781
    }
5782

    
5783
    // If we reached this point there is a problem.
5784
    __ Abort(kUnexpectedElementsKindInArrayConstructor);
5785
  } else {
5786
    UNREACHABLE();
5787
  }
5788
}
5789

    
5790

    
5791
template<class T>
5792
static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5793
  ElementsKind initial_kind = GetInitialFastElementsKind();
5794
  ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind);
5795

    
5796
  int to_index = GetSequenceIndexFromFastElementsKind(
5797
      TERMINAL_FAST_ELEMENTS_KIND);
5798
  for (int i = 0; i <= to_index; ++i) {
5799
    ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5800
    T stub(kind);
5801
    stub.GetCode(isolate)->set_is_pregenerated(true);
5802
    if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
5803
        (!FLAG_track_allocation_sites &&
5804
         (kind == initial_kind || kind == initial_holey_kind))) {
5805
      T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
5806
      stub1.GetCode(isolate)->set_is_pregenerated(true);
5807
    }
5808
  }
5809
}
5810

    
5811

    
5812
void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
5813
  ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5814
      isolate);
5815
  ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5816
      isolate);
5817
  ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5818
      isolate);
5819
}
5820

    
5821

    
5822
void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
5823
    Isolate* isolate) {
5824
  ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
5825
  for (int i = 0; i < 2; i++) {
5826
    // For internal arrays we only need a few things
5827
    InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5828
    stubh1.GetCode(isolate)->set_is_pregenerated(true);
5829
    InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5830
    stubh2.GetCode(isolate)->set_is_pregenerated(true);
5831
    InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5832
    stubh3.GetCode(isolate)->set_is_pregenerated(true);
5833
  }
5834
}
5835

    
5836

    
5837
void ArrayConstructorStub::GenerateDispatchToArrayStub(
5838
    MacroAssembler* masm,
5839
    AllocationSiteOverrideMode mode) {
5840
  if (argument_count_ == ANY) {
5841
    Label not_zero_case, not_one_case;
5842
    __ test(eax, eax);
5843
    __ j(not_zero, &not_zero_case);
5844
    CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5845

    
5846
    __ bind(&not_zero_case);
5847
    __ cmp(eax, 1);
5848
    __ j(greater, &not_one_case);
5849
    CreateArrayDispatchOneArgument(masm, mode);
5850

    
5851
    __ bind(&not_one_case);
5852
    CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5853
  } else if (argument_count_ == NONE) {
5854
    CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5855
  } else if (argument_count_ == ONE) {
5856
    CreateArrayDispatchOneArgument(masm, mode);
5857
  } else if (argument_count_ == MORE_THAN_ONE) {
5858
    CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5859
  } else {
5860
    UNREACHABLE();
5861
  }
5862
}
5863

    
5864

    
5865
void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5866
  // ----------- S t a t e -------------
5867
  //  -- eax : argc (only if argument_count_ == ANY)
5868
  //  -- ebx : type info cell
5869
  //  -- edi : constructor
5870
  //  -- esp[0] : return address
5871
  //  -- esp[4] : last argument
5872
  // -----------------------------------
5873
  Handle<Object> undefined_sentinel(
5874
      masm->isolate()->heap()->undefined_value(),
5875
      masm->isolate());
5876

    
5877
  if (FLAG_debug_code) {
5878
    // The array construct code is only set for the global and natives
5879
    // builtin Array functions which always have maps.
5880

    
5881
    // Initial map for the builtin Array function should be a map.
5882
    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5883
    // Will both indicate a NULL and a Smi.
5884
    __ test(ecx, Immediate(kSmiTagMask));
5885
    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5886
    __ CmpObjectType(ecx, MAP_TYPE, ecx);
5887
    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5888

    
5889
    // We should either have undefined in ebx or a valid cell
5890
    Label okay_here;
5891
    Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
5892
    __ cmp(ebx, Immediate(undefined_sentinel));
5893
    __ j(equal, &okay_here);
5894
    __ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
5895
    __ Assert(equal, kExpectedPropertyCellInRegisterEbx);
5896
    __ bind(&okay_here);
5897
  }
5898

    
5899
  Label no_info;
5900
  // If the type cell is undefined, or contains anything other than an
5901
  // AllocationSite, call an array constructor that doesn't use AllocationSites.
5902
  __ cmp(ebx, Immediate(undefined_sentinel));
5903
  __ j(equal, &no_info);
5904
  __ mov(edx, FieldOperand(ebx, Cell::kValueOffset));
5905
  __ cmp(FieldOperand(edx, 0), Immediate(
5906
      masm->isolate()->factory()->allocation_site_map()));
5907
  __ j(not_equal, &no_info);
5908

    
5909
  __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
5910
  __ SmiUntag(edx);
5911
  GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5912

    
5913
  __ bind(&no_info);
5914
  GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5915
}
5916

    
5917

    
5918
void InternalArrayConstructorStub::GenerateCase(
5919
    MacroAssembler* masm, ElementsKind kind) {
5920
  Label not_zero_case, not_one_case;
5921
  Label normal_sequence;
5922

    
5923
  __ test(eax, eax);
5924
  __ j(not_zero, &not_zero_case);
5925
  InternalArrayNoArgumentConstructorStub stub0(kind);
5926
  __ TailCallStub(&stub0);
5927

    
5928
  __ bind(&not_zero_case);
5929
  __ cmp(eax, 1);
5930
  __ j(greater, &not_one_case);
5931

    
5932
  if (IsFastPackedElementsKind(kind)) {
5933
    // We might need to create a holey array
5934
    // look at the first argument
5935
    __ mov(ecx, Operand(esp, kPointerSize));
5936
    __ test(ecx, ecx);
5937
    __ j(zero, &normal_sequence);
5938

    
5939
    InternalArraySingleArgumentConstructorStub
5940
        stub1_holey(GetHoleyElementsKind(kind));
5941
    __ TailCallStub(&stub1_holey);
5942
  }
5943

    
5944
  __ bind(&normal_sequence);
5945
  InternalArraySingleArgumentConstructorStub stub1(kind);
5946
  __ TailCallStub(&stub1);
5947

    
5948
  __ bind(&not_one_case);
5949
  InternalArrayNArgumentsConstructorStub stubN(kind);
5950
  __ TailCallStub(&stubN);
5951
}
5952

    
5953

    
5954
void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5955
  // ----------- S t a t e -------------
5956
  //  -- eax : argc
5957
  //  -- ebx : type info cell
5958
  //  -- edi : constructor
5959
  //  -- esp[0] : return address
5960
  //  -- esp[4] : last argument
5961
  // -----------------------------------
5962

    
5963
  if (FLAG_debug_code) {
5964
    // The array construct code is only set for the global and natives
5965
    // builtin Array functions which always have maps.
5966

    
5967
    // Initial map for the builtin Array function should be a map.
5968
    __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5969
    // Will both indicate a NULL and a Smi.
5970
    __ test(ecx, Immediate(kSmiTagMask));
5971
    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5972
    __ CmpObjectType(ecx, MAP_TYPE, ecx);
5973
    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5974
  }
5975

    
5976
  // Figure out the right elements kind
5977
  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5978

    
5979
  // Load the map's "bit field 2" into |result|. We only need the first byte,
5980
  // but the following masking takes care of that anyway.
5981
  __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5982
  // Retrieve elements_kind from bit field 2.
5983
  __ and_(ecx, Map::kElementsKindMask);
5984
  __ shr(ecx, Map::kElementsKindShift);
5985

    
5986
  if (FLAG_debug_code) {
5987
    Label done;
5988
    __ cmp(ecx, Immediate(FAST_ELEMENTS));
5989
    __ j(equal, &done);
5990
    __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
5991
    __ Assert(equal,
5992
              kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5993
    __ bind(&done);
5994
  }
5995

    
5996
  Label fast_elements_case;
5997
  __ cmp(ecx, Immediate(FAST_ELEMENTS));
5998
  __ j(equal, &fast_elements_case);
5999
  GenerateCase(masm, FAST_HOLEY_ELEMENTS);
6000

    
6001
  __ bind(&fast_elements_case);
6002
  GenerateCase(masm, FAST_ELEMENTS);
6003
}
6004

    
6005

    
6006
#undef __
6007

    
6008
} }  // namespace v8::internal
6009

    
6010
#endif  // V8_TARGET_ARCH_IA32