Revision f230a1cf deps/v8/src/full-codegen.cc

View differences:

deps/v8/src/full-codegen.cc
193 193
}
194 194

  
195 195

  
196
void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
197
}
198

  
199

  
196 200
void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
197 201
}
198 202

  
199 203

  
200
void BreakableStatementChecker::VisitSharedFunctionInfoLiteral(
201
    SharedFunctionInfoLiteral* expr) {
204
void BreakableStatementChecker::VisitNativeFunctionLiteral(
205
    NativeFunctionLiteral* expr) {
202 206
}
203 207

  
204 208

  
......
341 345
  code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
342 346
  code->set_handler_table(*cgen.handler_table());
343 347
#ifdef ENABLE_DEBUGGER_SUPPORT
344
  code->set_has_debug_break_slots(
345
      info->isolate()->debugger()->IsDebuggerActive());
346 348
  code->set_compiled_optimizable(info->IsOptimizable());
347 349
#endif  // ENABLE_DEBUGGER_SUPPORT
348 350
  code->set_allow_osr_at_loop_nesting_level(0);
......
826 828
void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
827 829
#ifdef ENABLE_DEBUGGER_SUPPORT
828 830
  if (!isolate()->debugger()->IsDebuggerActive()) {
829
    CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
831
    CodeGenerator::RecordPositions(masm_, stmt->position());
830 832
  } else {
831 833
    // Check if the statement will be breakable without adding a debug break
832 834
    // slot.
......
836 838
    // breakable. For breakable statements the actual recording of the
837 839
    // position will be postponed to the breakable code (typically an IC).
838 840
    bool position_recorded = CodeGenerator::RecordPositions(
839
        masm_, stmt->statement_pos(), !checker.is_breakable());
841
        masm_, stmt->position(), !checker.is_breakable());
840 842
    // If the position recording did record a new position generate a debug
841 843
    // break slot to make the statement breakable.
842 844
    if (position_recorded) {
......
844 846
    }
845 847
  }
846 848
#else
847
  CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
849
  CodeGenerator::RecordPositions(masm_, stmt->position());
848 850
#endif
849 851
}
850 852

  
851 853

  
852
void FullCodeGenerator::SetExpressionPosition(Expression* expr, int pos) {
854
void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
853 855
#ifdef ENABLE_DEBUGGER_SUPPORT
854 856
  if (!isolate()->debugger()->IsDebuggerActive()) {
855
    CodeGenerator::RecordPositions(masm_, pos);
857
    CodeGenerator::RecordPositions(masm_, expr->position());
856 858
  } else {
857 859
    // Check if the expression will be breakable without adding a debug break
858 860
    // slot.
......
866 868
    // statement positions this is used for e.g. the condition expression of
867 869
    // a do while loop.
868 870
    bool position_recorded = CodeGenerator::RecordPositions(
869
        masm_, pos, !checker.is_breakable());
871
        masm_, expr->position(), !checker.is_breakable());
870 872
    // If the position recording did record a new position generate a debug
871 873
    // break slot to make the statement breakable.
872 874
    if (position_recorded) {
......
1293 1295
  // possible to break on the condition.
1294 1296
  __ bind(loop_statement.continue_label());
1295 1297
  PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1296
  SetExpressionPosition(stmt->cond(), stmt->condition_position());
1298
  SetExpressionPosition(stmt->cond());
1297 1299
  VisitForControl(stmt->cond(),
1298 1300
                  &book_keeping,
1299 1301
                  loop_statement.break_label(),
......
1515 1517
}
1516 1518

  
1517 1519

  
1520
void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1521
  UNREACHABLE();
1522
}
1523

  
1524

  
1518 1525
void FullCodeGenerator::VisitConditional(Conditional* expr) {
1519 1526
  Comment cmnt(masm_, "[ Conditional");
1520 1527
  Label true_case, false_case, done;
......
1522 1529

  
1523 1530
  PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1524 1531
  __ bind(&true_case);
1525
  SetExpressionPosition(expr->then_expression(),
1526
                        expr->then_expression_position());
1532
  SetExpressionPosition(expr->then_expression());
1527 1533
  if (context()->IsTest()) {
1528 1534
    const TestContext* for_test = TestContext::cast(context());
1529 1535
    VisitForControl(expr->then_expression(),
......
1537 1543

  
1538 1544
  PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1539 1545
  __ bind(&false_case);
1540
  SetExpressionPosition(expr->else_expression(),
1541
                        expr->else_expression_position());
1546
  SetExpressionPosition(expr->else_expression());
1542 1547
  VisitInDuplicateContext(expr->else_expression());
1543 1548
  // If control flow falls through Visit, merge it with true case here.
1544 1549
  if (!context()->IsTest()) {
......
1567 1572
}
1568 1573

  
1569 1574

  
1570
void FullCodeGenerator::VisitSharedFunctionInfoLiteral(
1571
    SharedFunctionInfoLiteral* expr) {
1572
  Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
1573
  EmitNewClosure(expr->shared_function_info(), false);
1575
void FullCodeGenerator::VisitNativeFunctionLiteral(
1576
    NativeFunctionLiteral* expr) {
1577
  Comment cmnt(masm_, "[ NativeFunctionLiteral");
1578

  
1579
  // Compute the function template for the native function.
1580
  Handle<String> name = expr->name();
1581
  v8::Handle<v8::FunctionTemplate> fun_template =
1582
      expr->extension()->GetNativeFunction(v8::Utils::ToLocal(name));
1583
  ASSERT(!fun_template.IsEmpty());
1584

  
1585
  // Instantiate the function and create a shared function info from it.
1586
  Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1587
  const int literals = fun->NumberOfLiterals();
1588
  Handle<Code> code = Handle<Code>(fun->shared()->code());
1589
  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1590
  bool is_generator = false;
1591
  Handle<SharedFunctionInfo> shared =
1592
      isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
1593
          code, Handle<ScopeInfo>(fun->shared()->scope_info()));
1594
  shared->set_construct_stub(*construct_stub);
1595

  
1596
  // Copy the function data to the shared function info.
1597
  shared->set_function_data(fun->shared()->function_data());
1598
  int parameters = fun->shared()->formal_parameter_count();
1599
  shared->set_formal_parameter_count(parameters);
1600

  
1601
  EmitNewClosure(shared, false);
1574 1602
}
1575 1603

  
1576 1604

  
......
1615 1643
}
1616 1644

  
1617 1645

  
1646
void BackEdgeTable::Patch(Isolate* isolate,
1647
                          Code* unoptimized) {
1648
  DisallowHeapAllocation no_gc;
1649
  Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1650

  
1651
  // Iterate over the back edge table and patch every interrupt
1652
  // call to an unconditional call to the replacement code.
1653
  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1654

  
1655
  BackEdgeTable back_edges(unoptimized, &no_gc);
1656
  for (uint32_t i = 0; i < back_edges.length(); i++) {
1657
    if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1658
      ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1659
                                            unoptimized,
1660
                                            back_edges.pc(i)));
1661
      PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1662
    }
1663
  }
1664

  
1665
  unoptimized->set_back_edges_patched_for_osr(true);
1666
  ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1667
}
1668

  
1669

  
1670
void BackEdgeTable::Revert(Isolate* isolate,
1671
                           Code* unoptimized) {
1672
  DisallowHeapAllocation no_gc;
1673
  Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1674

  
1675
  // Iterate over the back edge table and revert the patched interrupt calls.
1676
  ASSERT(unoptimized->back_edges_patched_for_osr());
1677
  int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1678

  
1679
  BackEdgeTable back_edges(unoptimized, &no_gc);
1680
  for (uint32_t i = 0; i < back_edges.length(); i++) {
1681
    if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1682
      ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1683
                                            unoptimized,
1684
                                            back_edges.pc(i)));
1685
      PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1686
    }
1687
  }
1688

  
1689
  unoptimized->set_back_edges_patched_for_osr(false);
1690
  unoptimized->set_allow_osr_at_loop_nesting_level(0);
1691
  // Assert that none of the back edges are patched anymore.
1692
  ASSERT(Verify(isolate, unoptimized, -1));
1693
}
1694

  
1695

  
1696
void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
1697
  DisallowHeapAllocation no_gc;
1698
  Isolate* isolate = info->isolate();
1699
  Code* code = info->shared_info()->code();
1700
  Address pc = code->instruction_start() + info->osr_pc_offset();
1701
  ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, code, pc));
1702
  Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1703
  PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch);
1704
}
1705

  
1706

  
1707
void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) {
1708
  DisallowHeapAllocation no_gc;
1709
  Isolate* isolate = info->isolate();
1710
  Code* code = info->shared_info()->code();
1711
  Address pc = code->instruction_start() + info->osr_pc_offset();
1712
  if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) {
1713
    Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1714
    PatchAt(code, pc, ON_STACK_REPLACEMENT, patch);
1715
  }
1716
}
1717

  
1718

  
1719
#ifdef DEBUG
1720
bool BackEdgeTable::Verify(Isolate* isolate,
1721
                           Code* unoptimized,
1722
                           int loop_nesting_level) {
1723
  DisallowHeapAllocation no_gc;
1724
  BackEdgeTable back_edges(unoptimized, &no_gc);
1725
  for (uint32_t i = 0; i < back_edges.length(); i++) {
1726
    uint32_t loop_depth = back_edges.loop_depth(i);
1727
    CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1728
    // Assert that all back edges for shallower loops (and only those)
1729
    // have already been patched.
1730
    CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1731
             GetBackEdgeState(isolate,
1732
                              unoptimized,
1733
                              back_edges.pc(i)) != INTERRUPT);
1734
  }
1735
  return true;
1736
}
1737
#endif  // DEBUG
1738

  
1739

  
1618 1740
#undef __
1619 1741

  
1620 1742

  

Also available in: Unified diff