The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.

Please select the desired protocol below to get the URL.

This URL has Read-Only access.

Statistics
| Branch: | Revision:

main_repo / deps / v8 / src / compiler.cc @ f230a1cf

History | View | Annotate | Download (50.2 KB)

1
// Copyright 2012 the V8 project authors. All rights reserved.
2
// Redistribution and use in source and binary forms, with or without
3
// modification, are permitted provided that the following conditions are
4
// met:
5
//
6
//     * Redistributions of source code must retain the above copyright
7
//       notice, this list of conditions and the following disclaimer.
8
//     * Redistributions in binary form must reproduce the above
9
//       copyright notice, this list of conditions and the following
10
//       disclaimer in the documentation and/or other materials provided
11
//       with the distribution.
12
//     * Neither the name of Google Inc. nor the names of its
13
//       contributors may be used to endorse or promote products derived
14
//       from this software without specific prior written permission.
15
//
16
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27

    
28
#include "v8.h"
29

    
30
#include "compiler.h"
31

    
32
#include "bootstrapper.h"
33
#include "codegen.h"
34
#include "compilation-cache.h"
35
#include "cpu-profiler.h"
36
#include "debug.h"
37
#include "deoptimizer.h"
38
#include "full-codegen.h"
39
#include "gdb-jit.h"
40
#include "typing.h"
41
#include "hydrogen.h"
42
#include "isolate-inl.h"
43
#include "lithium.h"
44
#include "liveedit.h"
45
#include "parser.h"
46
#include "rewriter.h"
47
#include "runtime-profiler.h"
48
#include "scanner-character-streams.h"
49
#include "scopeinfo.h"
50
#include "scopes.h"
51
#include "vm-state-inl.h"
52

    
53
namespace v8 {
54
namespace internal {
55

    
56

    
57
CompilationInfo::CompilationInfo(Handle<Script> script,
58
                                 Zone* zone)
59
    : flags_(LanguageModeField::encode(CLASSIC_MODE)),
60
      script_(script),
61
      osr_ast_id_(BailoutId::None()),
62
      osr_pc_offset_(0) {
63
  Initialize(script->GetIsolate(), BASE, zone);
64
}
65

    
66

    
67
CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
68
                                 Zone* zone)
69
    : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
70
      shared_info_(shared_info),
71
      script_(Handle<Script>(Script::cast(shared_info->script()))),
72
      osr_ast_id_(BailoutId::None()),
73
      osr_pc_offset_(0) {
74
  Initialize(script_->GetIsolate(), BASE, zone);
75
}
76

    
77

    
78
CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
79
                                 Zone* zone)
80
    : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
81
      closure_(closure),
82
      shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
83
      script_(Handle<Script>(Script::cast(shared_info_->script()))),
84
      context_(closure->context()),
85
      osr_ast_id_(BailoutId::None()),
86
      osr_pc_offset_(0) {
87
  Initialize(script_->GetIsolate(), BASE, zone);
88
}
89

    
90

    
91
CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
92
                                 Isolate* isolate,
93
                                 Zone* zone)
94
    : flags_(LanguageModeField::encode(CLASSIC_MODE) |
95
             IsLazy::encode(true)),
96
      osr_ast_id_(BailoutId::None()),
97
      osr_pc_offset_(0) {
98
  Initialize(isolate, STUB, zone);
99
  code_stub_ = stub;
100
}
101

    
102

    
103
void CompilationInfo::Initialize(Isolate* isolate,
104
                                 Mode mode,
105
                                 Zone* zone) {
106
  isolate_ = isolate;
107
  function_ = NULL;
108
  scope_ = NULL;
109
  global_scope_ = NULL;
110
  extension_ = NULL;
111
  pre_parse_data_ = NULL;
112
  zone_ = zone;
113
  deferred_handles_ = NULL;
114
  code_stub_ = NULL;
115
  prologue_offset_ = Code::kPrologueOffsetNotSet;
116
  opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
117
  no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
118
                   ? new List<OffsetRange>(2) : NULL;
119
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
120
    dependencies_[i] = NULL;
121
  }
122
  if (mode == STUB) {
123
    mode_ = STUB;
124
    return;
125
  }
126
  mode_ = mode;
127
  abort_due_to_dependency_ = false;
128
  if (script_->type()->value() == Script::TYPE_NATIVE) {
129
    MarkAsNative();
130
  }
131
  if (!shared_info_.is_null()) {
132
    ASSERT(language_mode() == CLASSIC_MODE);
133
    SetLanguageMode(shared_info_->language_mode());
134
  }
135
  set_bailout_reason(kUnknown);
136
}
137

    
138

    
139
CompilationInfo::~CompilationInfo() {
140
  delete deferred_handles_;
141
  delete no_frame_ranges_;
142
#ifdef DEBUG
143
  // Check that no dependent maps have been added or added dependent maps have
144
  // been rolled back or committed.
145
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
146
    ASSERT_EQ(NULL, dependencies_[i]);
147
  }
148
#endif  // DEBUG
149
}
150

    
151

    
152
void CompilationInfo::CommitDependencies(Handle<Code> code) {
153
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
154
    ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
155
    if (group_objects == NULL) continue;
156
    ASSERT(!object_wrapper_.is_null());
157
    for (int j = 0; j < group_objects->length(); j++) {
158
      DependentCode::DependencyGroup group =
159
          static_cast<DependentCode::DependencyGroup>(i);
160
      DependentCode* dependent_code =
161
          DependentCode::ForObject(group_objects->at(j), group);
162
      dependent_code->UpdateToFinishedCode(group, this, *code);
163
    }
164
    dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
165
  }
166
}
167

    
168

    
169
void CompilationInfo::RollbackDependencies() {
170
  // Unregister from all dependent maps if not yet committed.
171
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
172
    ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
173
    if (group_objects == NULL) continue;
174
    for (int j = 0; j < group_objects->length(); j++) {
175
      DependentCode::DependencyGroup group =
176
          static_cast<DependentCode::DependencyGroup>(i);
177
      DependentCode* dependent_code =
178
          DependentCode::ForObject(group_objects->at(j), group);
179
      dependent_code->RemoveCompilationInfo(group, this);
180
    }
181
    dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
182
  }
183
}
184

    
185

    
186
int CompilationInfo::num_parameters() const {
187
  ASSERT(!IsStub());
188
  return scope()->num_parameters();
189
}
190

    
191

    
192
int CompilationInfo::num_heap_slots() const {
193
  if (IsStub()) {
194
    return 0;
195
  } else {
196
    return scope()->num_heap_slots();
197
  }
198
}
199

    
200

    
201
Code::Flags CompilationInfo::flags() const {
202
  if (IsStub()) {
203
    return Code::ComputeFlags(code_stub()->GetCodeKind(),
204
                              code_stub()->GetICState(),
205
                              code_stub()->GetExtraICState(),
206
                              code_stub()->GetStubType(),
207
                              code_stub()->GetStubFlags());
208
  } else {
209
    return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
210
  }
211
}
212

    
213

    
214
// Disable optimization for the rest of the compilation pipeline.
215
void CompilationInfo::DisableOptimization() {
216
  bool is_optimizable_closure =
217
    FLAG_optimize_closures &&
218
    closure_.is_null() &&
219
    !scope_->HasTrivialOuterContext() &&
220
    !scope_->outer_scope_calls_non_strict_eval() &&
221
    !scope_->inside_with();
222
  SetMode(is_optimizable_closure ? BASE : NONOPT);
223
}
224

    
225

    
226
// Primitive functions are unlikely to be picked up by the stack-walking
227
// profiler, so they trigger their own optimization when they're called
228
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
229
bool CompilationInfo::ShouldSelfOptimize() {
230
  return FLAG_self_optimization &&
231
      FLAG_crankshaft &&
232
      !function()->flags()->Contains(kDontSelfOptimize) &&
233
      !function()->dont_optimize() &&
234
      function()->scope()->AllowsLazyCompilation() &&
235
      (shared_info().is_null() || !shared_info()->optimization_disabled());
236
}
237

    
238

    
239
// Determine whether to use the full compiler for all code. If the flag
240
// --always-full-compiler is specified this is the case. For the virtual frame
241
// based compiler the full compiler is also used if a debugger is connected, as
242
// the code from the full compiler supports mode precise break points. For the
243
// crankshaft adaptive compiler debugging the optimized code is not possible at
244
// all. However crankshaft support recompilation of functions, so in this case
245
// the full compiler need not be be used if a debugger is attached, but only if
246
// break points has actually been set.
247
static bool IsDebuggerActive(Isolate* isolate) {
248
#ifdef ENABLE_DEBUGGER_SUPPORT
249
  return isolate->use_crankshaft() ?
250
    isolate->debug()->has_break_points() :
251
    isolate->debugger()->IsDebuggerActive();
252
#else
253
  return false;
254
#endif
255
}
256

    
257

    
258
static bool AlwaysFullCompiler(Isolate* isolate) {
259
  return FLAG_always_full_compiler || IsDebuggerActive(isolate);
260
}
261

    
262

    
263
void RecompileJob::RecordOptimizationStats() {
264
  Handle<JSFunction> function = info()->closure();
265
  int opt_count = function->shared()->opt_count();
266
  function->shared()->set_opt_count(opt_count + 1);
267
  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
268
  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
269
  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
270
  if (FLAG_trace_opt) {
271
    PrintF("[optimizing ");
272
    function->ShortPrint();
273
    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
274
           ms_codegen);
275
  }
276
  if (FLAG_trace_opt_stats) {
277
    static double compilation_time = 0.0;
278
    static int compiled_functions = 0;
279
    static int code_size = 0;
280

    
281
    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
282
    compiled_functions++;
283
    code_size += function->shared()->SourceSize();
284
    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
285
           compiled_functions,
286
           code_size,
287
           compilation_time);
288
  }
289
  if (FLAG_hydrogen_stats) {
290
    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
291
                                                    time_taken_to_optimize_,
292
                                                    time_taken_to_codegen_);
293
  }
294
}
295

    
296

    
297
// A return value of true indicates the compilation pipeline is still
298
// going, not necessarily that we optimized the code.
299
static bool MakeCrankshaftCode(CompilationInfo* info) {
300
  RecompileJob job(info);
301
  RecompileJob::Status status = job.CreateGraph();
302

    
303
  if (status != RecompileJob::SUCCEEDED) {
304
    return status != RecompileJob::FAILED;
305
  }
306
  status = job.OptimizeGraph();
307
  if (status != RecompileJob::SUCCEEDED) {
308
    status = job.AbortOptimization();
309
    return status != RecompileJob::FAILED;
310
  }
311
  status = job.GenerateAndInstallCode();
312
  return status != RecompileJob::FAILED;
313
}
314

    
315

    
316
class HOptimizedGraphBuilderWithPotisions: public HOptimizedGraphBuilder {
317
 public:
318
  explicit HOptimizedGraphBuilderWithPotisions(CompilationInfo* info)
319
      : HOptimizedGraphBuilder(info) {
320
  }
321

    
322
#define DEF_VISIT(type)                                 \
323
  virtual void Visit##type(type* node) V8_OVERRIDE {    \
324
    if (node->position() != RelocInfo::kNoPosition) {   \
325
      SetSourcePosition(node->position());              \
326
    }                                                   \
327
    HOptimizedGraphBuilder::Visit##type(node);          \
328
  }
329
  EXPRESSION_NODE_LIST(DEF_VISIT)
330
#undef DEF_VISIT
331

    
332
#define DEF_VISIT(type)                                          \
333
  virtual void Visit##type(type* node) V8_OVERRIDE {             \
334
    if (node->position() != RelocInfo::kNoPosition) {            \
335
      SetSourcePosition(node->position());                       \
336
    }                                                            \
337
    HOptimizedGraphBuilder::Visit##type(node);                   \
338
  }
339
  STATEMENT_NODE_LIST(DEF_VISIT)
340
#undef DEF_VISIT
341

    
342
#define DEF_VISIT(type)                                            \
343
  virtual void Visit##type(type* node) V8_OVERRIDE {               \
344
    HOptimizedGraphBuilder::Visit##type(node);                     \
345
  }
346
  MODULE_NODE_LIST(DEF_VISIT)
347
  DECLARATION_NODE_LIST(DEF_VISIT)
348
  AUXILIARY_NODE_LIST(DEF_VISIT)
349
#undef DEF_VISIT
350
};
351

    
352

    
353
RecompileJob::Status RecompileJob::CreateGraph() {
354
  ASSERT(isolate()->use_crankshaft());
355
  ASSERT(info()->IsOptimizing());
356
  ASSERT(!info()->IsCompilingForDebugging());
357

    
358
  // We should never arrive here if there is no code object on the
359
  // shared function object.
360
  ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
361

    
362
  // We should never arrive here if optimization has been disabled on the
363
  // shared function info.
364
  ASSERT(!info()->shared_info()->optimization_disabled());
365

    
366
  // Fall back to using the full code generator if it's not possible
367
  // to use the Hydrogen-based optimizing compiler. We already have
368
  // generated code for this from the shared function object.
369
  if (AlwaysFullCompiler(isolate())) {
370
    info()->AbortOptimization();
371
    return SetLastStatus(BAILED_OUT);
372
  }
373

    
374
  // Limit the number of times we re-compile a functions with
375
  // the optimizing compiler.
376
  const int kMaxOptCount =
377
      FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
378
  if (info()->opt_count() > kMaxOptCount) {
379
    info()->set_bailout_reason(kOptimizedTooManyTimes);
380
    return AbortOptimization();
381
  }
382

    
383
  // Due to an encoding limit on LUnallocated operands in the Lithium
384
  // language, we cannot optimize functions with too many formal parameters
385
  // or perform on-stack replacement for function with too many
386
  // stack-allocated local variables.
387
  //
388
  // The encoding is as a signed value, with parameters and receiver using
389
  // the negative indices and locals the non-negative ones.
390
  const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
391
  Scope* scope = info()->scope();
392
  if ((scope->num_parameters() + 1) > parameter_limit) {
393
    info()->set_bailout_reason(kTooManyParameters);
394
    return AbortOptimization();
395
  }
396

    
397
  const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
398
  if (info()->is_osr() &&
399
      scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
400
    info()->set_bailout_reason(kTooManyParametersLocals);
401
    return AbortOptimization();
402
  }
403

    
404
  // Take --hydrogen-filter into account.
405
  if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
406
    info()->AbortOptimization();
407
    return SetLastStatus(BAILED_OUT);
408
  }
409

    
410
  // Recompile the unoptimized version of the code if the current version
411
  // doesn't have deoptimization support. Alternatively, we may decide to
412
  // run the full code generator to get a baseline for the compile-time
413
  // performance of the hydrogen-based compiler.
414
  bool should_recompile = !info()->shared_info()->has_deoptimization_support();
415
  if (should_recompile || FLAG_hydrogen_stats) {
416
    ElapsedTimer timer;
417
    if (FLAG_hydrogen_stats) {
418
      timer.Start();
419
    }
420
    CompilationInfoWithZone unoptimized(info()->shared_info());
421
    // Note that we use the same AST that we will use for generating the
422
    // optimized code.
423
    unoptimized.SetFunction(info()->function());
424
    unoptimized.SetScope(info()->scope());
425
    unoptimized.SetContext(info()->context());
426
    if (should_recompile) unoptimized.EnableDeoptimizationSupport();
427
    bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
428
    if (should_recompile) {
429
      if (!succeeded) return SetLastStatus(FAILED);
430
      Handle<SharedFunctionInfo> shared = info()->shared_info();
431
      shared->EnableDeoptimizationSupport(*unoptimized.code());
432
      // The existing unoptimized code was replaced with the new one.
433
      Compiler::RecordFunctionCompilation(
434
          Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
435
    }
436
    if (FLAG_hydrogen_stats) {
437
      isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
438
    }
439
  }
440

    
441
  // Check that the unoptimized, shared code is ready for
442
  // optimizations.  When using the always_opt flag we disregard the
443
  // optimizable marker in the code object and optimize anyway. This
444
  // is safe as long as the unoptimized code has deoptimization
445
  // support.
446
  ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
447
  ASSERT(info()->shared_info()->has_deoptimization_support());
448

    
449
  if (FLAG_trace_hydrogen) {
450
    Handle<String> name = info()->function()->debug_name();
451
    PrintF("-----------------------------------------------------------\n");
452
    PrintF("Compiling method %s using hydrogen\n", *name->ToCString());
453
    isolate()->GetHTracer()->TraceCompilation(info());
454
  }
455

    
456
  // Type-check the function.
457
  AstTyper::Run(info());
458

    
459
  graph_builder_ = FLAG_emit_opt_code_positions
460
      ? new(info()->zone()) HOptimizedGraphBuilderWithPotisions(info())
461
      : new(info()->zone()) HOptimizedGraphBuilder(info());
462

    
463
  Timer t(this, &time_taken_to_create_graph_);
464
  graph_ = graph_builder_->CreateGraph();
465

    
466
  if (isolate()->has_pending_exception()) {
467
    info()->SetCode(Handle<Code>::null());
468
    return SetLastStatus(FAILED);
469
  }
470

    
471
  // The function being compiled may have bailed out due to an inline
472
  // candidate bailing out.  In such a case, we don't disable
473
  // optimization on the shared_info.
474
  ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
475
  if (graph_ == NULL) {
476
    if (graph_builder_->inline_bailout()) {
477
      info_->AbortOptimization();
478
      return SetLastStatus(BAILED_OUT);
479
    } else {
480
      return AbortOptimization();
481
    }
482
  }
483

    
484
  if (info()->HasAbortedDueToDependencyChange()) {
485
    info_->set_bailout_reason(kBailedOutDueToDependencyChange);
486
    info_->AbortOptimization();
487
    return SetLastStatus(BAILED_OUT);
488
  }
489

    
490
  return SetLastStatus(SUCCEEDED);
491
}
492

    
493

    
494
RecompileJob::Status RecompileJob::OptimizeGraph() {
495
  DisallowHeapAllocation no_allocation;
496
  DisallowHandleAllocation no_handles;
497
  DisallowHandleDereference no_deref;
498
  DisallowCodeDependencyChange no_dependency_change;
499

    
500
  ASSERT(last_status() == SUCCEEDED);
501
  Timer t(this, &time_taken_to_optimize_);
502
  ASSERT(graph_ != NULL);
503
  BailoutReason bailout_reason = kNoReason;
504
  if (!graph_->Optimize(&bailout_reason)) {
505
    if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason);
506
    return SetLastStatus(BAILED_OUT);
507
  } else {
508
    chunk_ = LChunk::NewChunk(graph_);
509
    if (chunk_ == NULL) {
510
      return SetLastStatus(BAILED_OUT);
511
    }
512
  }
513
  return SetLastStatus(SUCCEEDED);
514
}
515

    
516

    
517
RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
518
  ASSERT(last_status() == SUCCEEDED);
519
  ASSERT(!info()->HasAbortedDueToDependencyChange());
520
  DisallowCodeDependencyChange no_dependency_change;
521
  {  // Scope for timer.
522
    Timer timer(this, &time_taken_to_codegen_);
523
    ASSERT(chunk_ != NULL);
524
    ASSERT(graph_ != NULL);
525
    // Deferred handles reference objects that were accessible during
526
    // graph creation.  To make sure that we don't encounter inconsistencies
527
    // between graph creation and code generation, we disallow accessing
528
    // objects through deferred handles during the latter, with exceptions.
529
    DisallowDeferredHandleDereference no_deferred_handle_deref;
530
    Handle<Code> optimized_code = chunk_->Codegen();
531
    if (optimized_code.is_null()) {
532
      if (info()->bailout_reason() == kNoReason) {
533
        info()->set_bailout_reason(kCodeGenerationFailed);
534
      }
535
      return AbortOptimization();
536
    }
537
    info()->SetCode(optimized_code);
538
  }
539
  RecordOptimizationStats();
540
  // Add to the weak list of optimized code objects.
541
  info()->context()->native_context()->AddOptimizedCode(*info()->code());
542
  return SetLastStatus(SUCCEEDED);
543
}
544

    
545

    
546
static bool GenerateCode(CompilationInfo* info) {
547
  bool is_optimizing = info->isolate()->use_crankshaft() &&
548
                       !info->IsCompilingForDebugging() &&
549
                       info->IsOptimizing();
550
  if (is_optimizing) {
551
    Logger::TimerEventScope timer(
552
        info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
553
    return MakeCrankshaftCode(info);
554
  } else {
555
    if (info->IsOptimizing()) {
556
      // Have the CompilationInfo decide if the compilation should be
557
      // BASE or NONOPT.
558
      info->DisableOptimization();
559
    }
560
    Logger::TimerEventScope timer(
561
        info->isolate(), Logger::TimerEventScope::v8_compile_full_code);
562
    return FullCodeGenerator::MakeCode(info);
563
  }
564
}
565

    
566

    
567
static bool MakeCode(CompilationInfo* info) {
568
  // Precondition: code has been parsed.  Postcondition: the code field in
569
  // the compilation info is set if compilation succeeded.
570
  ASSERT(info->function() != NULL);
571
  return Rewriter::Rewrite(info) && Scope::Analyze(info) && GenerateCode(info);
572
}
573

    
574

    
575
#ifdef ENABLE_DEBUGGER_SUPPORT
576
bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
577
  // Precondition: code has been parsed.  Postcondition: the code field in
578
  // the compilation info is set if compilation succeeded.
579
  bool succeeded = MakeCode(info);
580
  if (!info->shared_info().is_null()) {
581
    Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
582
                                                     info->zone());
583
    info->shared_info()->set_scope_info(*scope_info);
584
  }
585
  return succeeded;
586
}
587
#endif
588

    
589

    
590
static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
591
                                          bool allow_lazy_without_ctx = false) {
592
  return LiveEditFunctionTracker::IsActive(info->isolate()) ||
593
         (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
594
}
595

    
596

    
597
// Sets the expected number of properties based on estimate from compiler.
598
void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
599
                                          int estimate) {
600
  // See the comment in SetExpectedNofProperties.
601
  if (shared->live_objects_may_exist()) return;
602

    
603
  // If no properties are added in the constructor, they are more likely
604
  // to be added later.
605
  if (estimate == 0) estimate = 2;
606

    
607
  // TODO(yangguo): check whether those heuristics are still up-to-date.
608
  // We do not shrink objects that go into a snapshot (yet), so we adjust
609
  // the estimate conservatively.
610
  if (Serializer::enabled()) {
611
    estimate += 2;
612
  } else if (FLAG_clever_optimizations) {
613
    // Inobject slack tracking will reclaim redundant inobject space later,
614
    // so we can afford to adjust the estimate generously.
615
    estimate += 8;
616
  } else {
617
    estimate += 3;
618
  }
619

    
620
  shared->set_expected_nof_properties(estimate);
621
}
622

    
623

    
624
static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
625
  Isolate* isolate = info->isolate();
626
  PostponeInterruptsScope postpone(isolate);
627

    
628
  ASSERT(!isolate->native_context().is_null());
629
  Handle<Script> script = info->script();
630
  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
631
  FixedArray* array = isolate->native_context()->embedder_data();
632
  script->set_context_data(array->get(0));
633

    
634
#ifdef ENABLE_DEBUGGER_SUPPORT
635
  if (info->is_eval()) {
636
    script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
637
    // For eval scripts add information on the function from which eval was
638
    // called.
639
    if (info->is_eval()) {
640
      StackTraceFrameIterator it(isolate);
641
      if (!it.done()) {
642
        script->set_eval_from_shared(it.frame()->function()->shared());
643
        Code* code = it.frame()->LookupCode();
644
        int offset = static_cast<int>(
645
            it.frame()->pc() - code->instruction_start());
646
        script->set_eval_from_instructions_offset(Smi::FromInt(offset));
647
      }
648
    }
649
  }
650

    
651
  // Notify debugger
652
  isolate->debugger()->OnBeforeCompile(script);
653
#endif
654

    
655
  // Only allow non-global compiles for eval.
656
  ASSERT(info->is_eval() || info->is_global());
657
  {
658
    Parser parser(info);
659
    if ((info->pre_parse_data() != NULL ||
660
         String::cast(script->source())->length() > FLAG_min_preparse_length) &&
661
        !DebuggerWantsEagerCompilation(info))
662
      parser.set_allow_lazy(true);
663
    if (!parser.Parse()) {
664
      return Handle<SharedFunctionInfo>::null();
665
    }
666
  }
667

    
668
  FunctionLiteral* lit = info->function();
669
  LiveEditFunctionTracker live_edit_tracker(isolate, lit);
670
  Handle<SharedFunctionInfo> result;
671
  {
672
    // Measure how long it takes to do the compilation; only take the
673
    // rest of the function into account to avoid overlap with the
674
    // parsing statistics.
675
    HistogramTimer* rate = info->is_eval()
676
          ? info->isolate()->counters()->compile_eval()
677
          : info->isolate()->counters()->compile();
678
    HistogramTimerScope timer(rate);
679

    
680
    // Compile the code.
681
    if (!MakeCode(info)) {
682
      if (!isolate->has_pending_exception()) isolate->StackOverflow();
683
      return Handle<SharedFunctionInfo>::null();
684
    }
685

    
686
    // Allocate function.
687
    ASSERT(!info->code().is_null());
688
    result =
689
        isolate->factory()->NewSharedFunctionInfo(
690
            lit->name(),
691
            lit->materialized_literal_count(),
692
            lit->is_generator(),
693
            info->code(),
694
            ScopeInfo::Create(info->scope(), info->zone()));
695

    
696
    ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
697
    Compiler::SetFunctionInfo(result, lit, true, script);
698

    
699
    if (script->name()->IsString()) {
700
      PROFILE(isolate, CodeCreateEvent(
701
          info->is_eval()
702
          ? Logger::EVAL_TAG
703
              : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
704
                *info->code(),
705
                *result,
706
                info,
707
                String::cast(script->name())));
708
      GDBJIT(AddCode(Handle<String>(String::cast(script->name())),
709
                     script,
710
                     info->code(),
711
                     info));
712
    } else {
713
      PROFILE(isolate, CodeCreateEvent(
714
          info->is_eval()
715
          ? Logger::EVAL_TAG
716
              : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
717
                *info->code(),
718
                *result,
719
                info,
720
                isolate->heap()->empty_string()));
721
      GDBJIT(AddCode(Handle<String>(), script, info->code(), info));
722
    }
723

    
724
    // Hint to the runtime system used when allocating space for initial
725
    // property space by setting the expected number of properties for
726
    // the instances of the function.
727
    SetExpectedNofPropertiesFromEstimate(result,
728
                                         lit->expected_property_count());
729

    
730
    script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
731
  }
732

    
733
#ifdef ENABLE_DEBUGGER_SUPPORT
734
  // Notify debugger
735
  isolate->debugger()->OnAfterCompile(
736
      script, Debugger::NO_AFTER_COMPILE_FLAGS);
737
#endif
738

    
739
  live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
740

    
741
  return result;
742
}
743

    
744

    
745
Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
746
                                             Handle<Object> script_name,
747
                                             int line_offset,
748
                                             int column_offset,
749
                                             bool is_shared_cross_origin,
750
                                             Handle<Context> context,
751
                                             v8::Extension* extension,
752
                                             ScriptDataImpl* pre_data,
753
                                             Handle<Object> script_data,
754
                                             NativesFlag natives) {
755
  Isolate* isolate = source->GetIsolate();
756
  int source_length = source->length();
757
  isolate->counters()->total_load_size()->Increment(source_length);
758
  isolate->counters()->total_compile_size()->Increment(source_length);
759

    
760
  // The VM is in the COMPILER state until exiting this function.
761
  VMState<COMPILER> state(isolate);
762

    
763
  CompilationCache* compilation_cache = isolate->compilation_cache();
764

    
765
  // Do a lookup in the compilation cache but not for extensions.
766
  Handle<SharedFunctionInfo> result;
767
  if (extension == NULL) {
768
    result = compilation_cache->LookupScript(source,
769
                                             script_name,
770
                                             line_offset,
771
                                             column_offset,
772
                                             is_shared_cross_origin,
773
                                             context);
774
  }
775

    
776
  if (result.is_null()) {
777
    // No cache entry found. Do pre-parsing, if it makes sense, and compile
778
    // the script.
779
    // Building preparse data that is only used immediately after is only a
780
    // saving if we might skip building the AST for lazily compiled functions.
781
    // I.e., preparse data isn't relevant when the lazy flag is off, and
782
    // for small sources, odds are that there aren't many functions
783
    // that would be compiled lazily anyway, so we skip the preparse step
784
    // in that case too.
785

    
786
    // Create a script object describing the script to be compiled.
787
    Handle<Script> script = isolate->factory()->NewScript(source);
788
    if (natives == NATIVES_CODE) {
789
      script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
790
    }
791
    if (!script_name.is_null()) {
792
      script->set_name(*script_name);
793
      script->set_line_offset(Smi::FromInt(line_offset));
794
      script->set_column_offset(Smi::FromInt(column_offset));
795
    }
796
    script->set_is_shared_cross_origin(is_shared_cross_origin);
797

    
798
    script->set_data(script_data.is_null() ? isolate->heap()->undefined_value()
799
                                           : *script_data);
800

    
801
    // Compile the function and add it to the cache.
802
    CompilationInfoWithZone info(script);
803
    info.MarkAsGlobal();
804
    info.SetExtension(extension);
805
    info.SetPreParseData(pre_data);
806
    info.SetContext(context);
807
    if (FLAG_use_strict) {
808
      info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
809
    }
810
    result = MakeFunctionInfo(&info);
811
    if (extension == NULL && !result.is_null() && !result->dont_cache()) {
812
      compilation_cache->PutScript(source, context, result);
813
    }
814
  } else {
815
    if (result->ic_age() != isolate->heap()->global_ic_age()) {
816
      result->ResetForNewContext(isolate->heap()->global_ic_age());
817
    }
818
  }
819

    
820
  if (result.is_null()) isolate->ReportPendingMessages();
821
  return result;
822
}
823

    
824

    
825
Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
826
                                                 Handle<Context> context,
827
                                                 bool is_global,
828
                                                 LanguageMode language_mode,
829
                                                 ParseRestriction restriction,
830
                                                 int scope_position) {
831
  Isolate* isolate = source->GetIsolate();
832
  int source_length = source->length();
833
  isolate->counters()->total_eval_size()->Increment(source_length);
834
  isolate->counters()->total_compile_size()->Increment(source_length);
835

    
836
  // The VM is in the COMPILER state until exiting this function.
837
  VMState<COMPILER> state(isolate);
838

    
839
  // Do a lookup in the compilation cache; if the entry is not there, invoke
840
  // the compiler and add the result to the cache.
841
  Handle<SharedFunctionInfo> result;
842
  CompilationCache* compilation_cache = isolate->compilation_cache();
843
  result = compilation_cache->LookupEval(source,
844
                                         context,
845
                                         is_global,
846
                                         language_mode,
847
                                         scope_position);
848

    
849
  if (result.is_null()) {
850
    // Create a script object describing the script to be compiled.
851
    Handle<Script> script = isolate->factory()->NewScript(source);
852
    CompilationInfoWithZone info(script);
853
    info.MarkAsEval();
854
    if (is_global) info.MarkAsGlobal();
855
    info.SetLanguageMode(language_mode);
856
    info.SetParseRestriction(restriction);
857
    info.SetContext(context);
858
    result = MakeFunctionInfo(&info);
859
    if (!result.is_null()) {
860
      // Explicitly disable optimization for eval code. We're not yet prepared
861
      // to handle eval-code in the optimizing compiler.
862
      result->DisableOptimization(kEval);
863

    
864
      // If caller is strict mode, the result must be in strict mode or
865
      // extended mode as well, but not the other way around. Consider:
866
      // eval("'use strict'; ...");
867
      ASSERT(language_mode != STRICT_MODE || !result->is_classic_mode());
868
      // If caller is in extended mode, the result must also be in
869
      // extended mode.
870
      ASSERT(language_mode != EXTENDED_MODE ||
871
             result->is_extended_mode());
872
      if (!result->dont_cache()) {
873
        compilation_cache->PutEval(
874
            source, context, is_global, result, scope_position);
875
      }
876
    }
877
  } else {
878
    if (result->ic_age() != isolate->heap()->global_ic_age()) {
879
      result->ResetForNewContext(isolate->heap()->global_ic_age());
880
    }
881
  }
882

    
883
  return result;
884
}
885

    
886

    
887
static bool InstallFullCode(CompilationInfo* info) {
888
  // Update the shared function info with the compiled code and the
889
  // scope info.  Please note, that the order of the shared function
890
  // info initialization is important since set_scope_info might
891
  // trigger a GC, causing the ASSERT below to be invalid if the code
892
  // was flushed. By setting the code object last we avoid this.
893
  Handle<SharedFunctionInfo> shared = info->shared_info();
894
  Handle<Code> code = info->code();
895
  CHECK(code->kind() == Code::FUNCTION);
896
  Handle<JSFunction> function = info->closure();
897
  Handle<ScopeInfo> scope_info =
898
      ScopeInfo::Create(info->scope(), info->zone());
899
  shared->set_scope_info(*scope_info);
900
  shared->ReplaceCode(*code);
901
  if (!function.is_null()) {
902
    function->ReplaceCode(*code);
903
    ASSERT(!function->IsOptimized());
904
  }
905

    
906
  // Set the expected number of properties for instances.
907
  FunctionLiteral* lit = info->function();
908
  int expected = lit->expected_property_count();
909
  SetExpectedNofPropertiesFromEstimate(shared, expected);
910

    
911
  // Check the function has compiled code.
912
  ASSERT(shared->is_compiled());
913
  shared->set_dont_optimize_reason(lit->dont_optimize_reason());
914
  shared->set_dont_inline(lit->flags()->Contains(kDontInline));
915
  shared->set_ast_node_count(lit->ast_node_count());
916

    
917
  if (info->isolate()->use_crankshaft() &&
918
      !function.is_null() &&
919
      !shared->optimization_disabled()) {
920
    // If we're asked to always optimize, we compile the optimized
921
    // version of the function right away - unless the debugger is
922
    // active as it makes no sense to compile optimized code then.
923
    if (FLAG_always_opt &&
924
        !info->isolate()->DebuggerHasBreakPoints()) {
925
      CompilationInfoWithZone optimized(function);
926
      optimized.SetOptimizing(BailoutId::None());
927
      return Compiler::CompileLazy(&optimized);
928
    }
929
  }
930
  return true;
931
}
932

    
933

    
934
static void InstallCodeCommon(CompilationInfo* info) {
935
  Handle<SharedFunctionInfo> shared = info->shared_info();
936
  Handle<Code> code = info->code();
937
  ASSERT(!code.is_null());
938

    
939
  // Set optimizable to false if this is disallowed by the shared
940
  // function info, e.g., we might have flushed the code and must
941
  // reset this bit when lazy compiling the code again.
942
  if (shared->optimization_disabled()) code->set_optimizable(false);
943

    
944
  if (shared->code() == *code) {
945
    // Do not send compilation event for the same code twice.
946
    return;
947
  }
948
  Compiler::RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
949
}
950

    
951

    
952
static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
953
  Handle<Code> code = info->code();
954
  if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.
955

    
956
  // Cache non-OSR optimized code.
957
  if (FLAG_cache_optimized_code && !info->is_osr()) {
958
    Handle<JSFunction> function = info->closure();
959
    Handle<SharedFunctionInfo> shared(function->shared());
960
    Handle<FixedArray> literals(function->literals());
961
    Handle<Context> native_context(function->context()->native_context());
962
    SharedFunctionInfo::AddToOptimizedCodeMap(
963
        shared, native_context, code, literals);
964
  }
965
}
966

    
967

    
968
static bool InstallCodeFromOptimizedCodeMap(CompilationInfo* info) {
969
  if (!info->IsOptimizing()) return false;  // Nothing to look up.
970

    
971
  // Lookup non-OSR optimized code.
972
  if (FLAG_cache_optimized_code && !info->is_osr()) {
973
    Handle<SharedFunctionInfo> shared = info->shared_info();
974
    Handle<JSFunction> function = info->closure();
975
    ASSERT(!function.is_null());
976
    Handle<Context> native_context(function->context()->native_context());
977
    int index = shared->SearchOptimizedCodeMap(*native_context);
978
    if (index > 0) {
979
      if (FLAG_trace_opt) {
980
        PrintF("[found optimized code for ");
981
        function->ShortPrint();
982
        PrintF("]\n");
983
      }
984
      // Caching of optimized code enabled and optimized code found.
985
      shared->InstallFromOptimizedCodeMap(*function, index);
986
      return true;
987
    }
988
  }
989
  return false;
990
}
991

    
992

    
993
bool Compiler::CompileLazy(CompilationInfo* info) {
994
  Isolate* isolate = info->isolate();
995

    
996
  // The VM is in the COMPILER state until exiting this function.
997
  VMState<COMPILER> state(isolate);
998

    
999
  PostponeInterruptsScope postpone(isolate);
1000

    
1001
  Handle<SharedFunctionInfo> shared = info->shared_info();
1002
  int compiled_size = shared->end_position() - shared->start_position();
1003
  isolate->counters()->total_compile_size()->Increment(compiled_size);
1004

    
1005
  if (InstallCodeFromOptimizedCodeMap(info)) return true;
1006

    
1007
  // Generate the AST for the lazily compiled function.
1008
  if (Parser::Parse(info)) {
1009
    // Measure how long it takes to do the lazy compilation; only take the
1010
    // rest of the function into account to avoid overlap with the lazy
1011
    // parsing statistics.
1012
    HistogramTimerScope timer(isolate->counters()->compile_lazy());
1013

    
1014
    // After parsing we know the function's language mode. Remember it.
1015
    LanguageMode language_mode = info->function()->language_mode();
1016
    info->SetLanguageMode(language_mode);
1017
    shared->set_language_mode(language_mode);
1018

    
1019
    // Compile the code.
1020
    if (!MakeCode(info)) {
1021
      if (!isolate->has_pending_exception()) {
1022
        isolate->StackOverflow();
1023
      }
1024
    } else {
1025
      InstallCodeCommon(info);
1026

    
1027
      if (info->IsOptimizing()) {
1028
        // Optimized code successfully created.
1029
        Handle<Code> code = info->code();
1030
        ASSERT(shared->scope_info() != ScopeInfo::Empty(isolate));
1031
        // TODO(titzer): Only replace the code if it was not an OSR compile.
1032
        info->closure()->ReplaceCode(*code);
1033
        InsertCodeIntoOptimizedCodeMap(info);
1034
        return true;
1035
      } else if (!info->is_osr()) {
1036
        // Compilation failed. Replace with full code if not OSR compile.
1037
        return InstallFullCode(info);
1038
      }
1039
    }
1040
  }
1041

    
1042
  ASSERT(info->code().is_null());
1043
  return false;
1044
}
1045

    
1046

    
1047
bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
1048
                                   uint32_t osr_pc_offset) {
1049
  bool compiling_for_osr = (osr_pc_offset != 0);
1050

    
1051
  Isolate* isolate = closure->GetIsolate();
1052
  // Here we prepare compile data for the concurrent recompilation thread, but
1053
  // this still happens synchronously and interrupts execution.
1054
  Logger::TimerEventScope timer(
1055
      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1056

    
1057
  if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
1058
    if (FLAG_trace_concurrent_recompilation) {
1059
      PrintF("  ** Compilation queue full, will retry optimizing ");
1060
      closure->PrintName();
1061
      PrintF(" on next run.\n");
1062
    }
1063
    return false;
1064
  }
1065

    
1066
  SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
1067
  Handle<SharedFunctionInfo> shared = info->shared_info();
1068

    
1069
  if (compiling_for_osr) {
1070
    BailoutId osr_ast_id =
1071
        shared->code()->TranslatePcOffsetToAstId(osr_pc_offset);
1072
    ASSERT(!osr_ast_id.IsNone());
1073
    info->SetOptimizing(osr_ast_id);
1074
    info->set_osr_pc_offset(osr_pc_offset);
1075

    
1076
    if (FLAG_trace_osr) {
1077
      PrintF("[COSR - attempt to queue ");
1078
      closure->PrintName();
1079
      PrintF(" at AST id %d]\n", osr_ast_id.ToInt());
1080
    }
1081
  } else {
1082
    info->SetOptimizing(BailoutId::None());
1083
  }
1084

    
1085
  VMState<COMPILER> state(isolate);
1086
  PostponeInterruptsScope postpone(isolate);
1087

    
1088
  int compiled_size = shared->end_position() - shared->start_position();
1089
  isolate->counters()->total_compile_size()->Increment(compiled_size);
1090

    
1091
  {
1092
    CompilationHandleScope handle_scope(*info);
1093

    
1094
    if (!compiling_for_osr && InstallCodeFromOptimizedCodeMap(*info)) {
1095
      return true;
1096
    }
1097

    
1098
    if (Parser::Parse(*info)) {
1099
      LanguageMode language_mode = info->function()->language_mode();
1100
      info->SetLanguageMode(language_mode);
1101
      shared->set_language_mode(language_mode);
1102
      info->SaveHandles();
1103

    
1104
      if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) {
1105
        RecompileJob* job = new(info->zone()) RecompileJob(*info);
1106
        RecompileJob::Status status = job->CreateGraph();
1107
        if (status == RecompileJob::SUCCEEDED) {
1108
          info.Detach();
1109
          shared->code()->set_profiler_ticks(0);
1110
          isolate->optimizing_compiler_thread()->QueueForOptimization(job);
1111
          ASSERT(!isolate->has_pending_exception());
1112
          return true;
1113
        } else if (status == RecompileJob::BAILED_OUT) {
1114
          isolate->clear_pending_exception();
1115
          InstallFullCode(*info);
1116
        }
1117
      }
1118
    }
1119
  }
1120

    
1121
  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1122
  return false;
1123
}
1124

    
1125

    
1126
Handle<Code> Compiler::InstallOptimizedCode(RecompileJob* job) {
1127
  SmartPointer<CompilationInfo> info(job->info());
1128
  // The function may have already been optimized by OSR.  Simply continue.
1129
  // Except when OSR already disabled optimization for some reason.
1130
  if (info->shared_info()->optimization_disabled()) {
1131
    info->AbortOptimization();
1132
    InstallFullCode(*info);
1133
    if (FLAG_trace_concurrent_recompilation) {
1134
      PrintF("  ** aborting optimization for ");
1135
      info->closure()->PrintName();
1136
      PrintF(" as it has been disabled.\n");
1137
    }
1138
    ASSERT(!info->closure()->IsInRecompileQueue());
1139
    return Handle<Code>::null();
1140
  }
1141

    
1142
  Isolate* isolate = info->isolate();
1143
  VMState<COMPILER> state(isolate);
1144
  Logger::TimerEventScope timer(
1145
      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1146
  // If crankshaft succeeded, install the optimized code else install
1147
  // the unoptimized code.
1148
  RecompileJob::Status status = job->last_status();
1149
  if (info->HasAbortedDueToDependencyChange()) {
1150
    info->set_bailout_reason(kBailedOutDueToDependencyChange);
1151
    status = job->AbortOptimization();
1152
  } else if (status != RecompileJob::SUCCEEDED) {
1153
    info->set_bailout_reason(kFailedBailedOutLastTime);
1154
    status = job->AbortOptimization();
1155
  } else if (isolate->DebuggerHasBreakPoints()) {
1156
    info->set_bailout_reason(kDebuggerIsActive);
1157
    status = job->AbortOptimization();
1158
  } else {
1159
    status = job->GenerateAndInstallCode();
1160
    ASSERT(status == RecompileJob::SUCCEEDED ||
1161
           status == RecompileJob::BAILED_OUT);
1162
  }
1163

    
1164
  InstallCodeCommon(*info);
1165
  if (status == RecompileJob::SUCCEEDED) {
1166
    Handle<Code> code = info->code();
1167
    ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty(isolate));
1168
    info->closure()->ReplaceCode(*code);
1169
    if (info->shared_info()->SearchOptimizedCodeMap(
1170
            info->closure()->context()->native_context()) == -1) {
1171
      InsertCodeIntoOptimizedCodeMap(*info);
1172
    }
1173
    if (FLAG_trace_concurrent_recompilation) {
1174
      PrintF("  ** Optimized code for ");
1175
      info->closure()->PrintName();
1176
      PrintF(" installed.\n");
1177
    }
1178
  } else {
1179
    info->AbortOptimization();
1180
    InstallFullCode(*info);
1181
  }
1182
  // Optimized code is finally replacing unoptimized code.  Reset the latter's
1183
  // profiler ticks to prevent too soon re-opt after a deopt.
1184
  info->shared_info()->code()->set_profiler_ticks(0);
1185
  ASSERT(!info->closure()->IsInRecompileQueue());
1186
  return (status == RecompileJob::SUCCEEDED) ? info->code()
1187
                                             : Handle<Code>::null();
1188
}
1189

    
1190

    
1191
Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
1192
                                                       Handle<Script> script) {
1193
  // Precondition: code has been parsed and scopes have been analyzed.
1194
  CompilationInfoWithZone info(script);
1195
  info.SetFunction(literal);
1196
  info.SetScope(literal->scope());
1197
  info.SetLanguageMode(literal->scope()->language_mode());
1198

    
1199
  Isolate* isolate = info.isolate();
1200
  Factory* factory = isolate->factory();
1201
  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1202
  // Determine if the function can be lazily compiled. This is necessary to
1203
  // allow some of our builtin JS files to be lazily compiled. These
1204
  // builtins cannot be handled lazily by the parser, since we have to know
1205
  // if a function uses the special natives syntax, which is something the
1206
  // parser records.
1207
  // If the debugger requests compilation for break points, we cannot be
1208
  // aggressive about lazy compilation, because it might trigger compilation
1209
  // of functions without an outer context when setting a breakpoint through
1210
  // Debug::FindSharedFunctionInfoInScript.
1211
  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1212
  bool allow_lazy = literal->AllowsLazyCompilation() &&
1213
      !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1214

    
1215
  Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate));
1216

    
1217
  // Generate code
1218
  if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1219
    Handle<Code> code = isolate->builtins()->LazyCompile();
1220
    info.SetCode(code);
1221
  } else if (GenerateCode(&info)) {
1222
    ASSERT(!info.code().is_null());
1223
    scope_info = ScopeInfo::Create(info.scope(), info.zone());
1224
  } else {
1225
    return Handle<SharedFunctionInfo>::null();
1226
  }
1227

    
1228
  // Create a shared function info object.
1229
  Handle<SharedFunctionInfo> result =
1230
      factory->NewSharedFunctionInfo(literal->name(),
1231
                                     literal->materialized_literal_count(),
1232
                                     literal->is_generator(),
1233
                                     info.code(),
1234
                                     scope_info);
1235
  SetFunctionInfo(result, literal, false, script);
1236
  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1237
  result->set_allows_lazy_compilation(allow_lazy);
1238
  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1239

    
1240
  // Set the expected number of properties for instances and return
1241
  // the resulting function.
1242
  SetExpectedNofPropertiesFromEstimate(result,
1243
                                       literal->expected_property_count());
1244
  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1245
  return result;
1246
}
1247

    
1248

    
1249
// Sets the function info on a function.
1250
// The start_position points to the first '(' character after the function name
1251
// in the full script source. When counting characters in the script source the
1252
// the first character is number 0 (not 1).
1253
void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
1254
                               FunctionLiteral* lit,
1255
                               bool is_toplevel,
1256
                               Handle<Script> script) {
1257
  function_info->set_length(lit->parameter_count());
1258
  function_info->set_formal_parameter_count(lit->parameter_count());
1259
  function_info->set_script(*script);
1260
  function_info->set_function_token_position(lit->function_token_position());
1261
  function_info->set_start_position(lit->start_position());
1262
  function_info->set_end_position(lit->end_position());
1263
  function_info->set_is_expression(lit->is_expression());
1264
  function_info->set_is_anonymous(lit->is_anonymous());
1265
  function_info->set_is_toplevel(is_toplevel);
1266
  function_info->set_inferred_name(*lit->inferred_name());
1267
  function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
1268
  function_info->set_allows_lazy_compilation_without_context(
1269
      lit->AllowsLazyCompilationWithoutContext());
1270
  function_info->set_language_mode(lit->language_mode());
1271
  function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
1272
  function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
1273
  function_info->set_ast_node_count(lit->ast_node_count());
1274
  function_info->set_is_function(lit->is_function());
1275
  function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
1276
  function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
1277
  function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
1278
  function_info->set_is_generator(lit->is_generator());
1279
}
1280

    
1281

    
1282
void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1283
                                         CompilationInfo* info,
1284
                                         Handle<SharedFunctionInfo> shared) {
1285
  // SharedFunctionInfo is passed separately, because if CompilationInfo
1286
  // was created using Script object, it will not have it.
1287

    
1288
  // Log the code generation. If source information is available include
1289
  // script name and line number. Check explicitly whether logging is
1290
  // enabled as finding the line number is not free.
1291
  if (info->isolate()->logger()->is_logging_code_events() ||
1292
      info->isolate()->cpu_profiler()->is_profiling()) {
1293
    Handle<Script> script = info->script();
1294
    Handle<Code> code = info->code();
1295
    if (*code == info->isolate()->builtins()->builtin(Builtins::kLazyCompile))
1296
      return;
1297
    int line_num = GetScriptLineNumber(script, shared->start_position()) + 1;
1298
    int column_num =
1299
        GetScriptColumnNumber(script, shared->start_position()) + 1;
1300
    USE(line_num);
1301
    if (script->name()->IsString()) {
1302
      PROFILE(info->isolate(),
1303
              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1304
                              *code,
1305
                              *shared,
1306
                              info,
1307
                              String::cast(script->name()),
1308
                              line_num,
1309
                              column_num));
1310
    } else {
1311
      PROFILE(info->isolate(),
1312
              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1313
                              *code,
1314
                              *shared,
1315
                              info,
1316
                              info->isolate()->heap()->empty_string(),
1317
                              line_num,
1318
                              column_num));
1319
    }
1320
  }
1321

    
1322
  GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1323
                 Handle<Script>(info->script()),
1324
                 Handle<Code>(info->code()),
1325
                 info));
1326
}
1327

    
1328

    
1329
CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1330
    : name_(name), info_(info), zone_(info->isolate()) {
1331
  if (FLAG_hydrogen_stats) {
1332
    info_zone_start_allocation_size_ = info->zone()->allocation_size();
1333
    timer_.Start();
1334
  }
1335
}
1336

    
1337

    
1338
CompilationPhase::~CompilationPhase() {
1339
  if (FLAG_hydrogen_stats) {
1340
    unsigned size = zone()->allocation_size();
1341
    size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1342
    isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1343
  }
1344
}
1345

    
1346

    
1347
bool CompilationPhase::ShouldProduceTraceOutput() const {
1348
  // Trace if the appropriate trace flag is set and the phase name's first
1349
  // character is in the FLAG_trace_phase command line parameter.
1350
  AllowHandleDereference allow_deref;
1351
  bool tracing_on = info()->IsStub()
1352
      ? FLAG_trace_hydrogen_stubs
1353
      : (FLAG_trace_hydrogen &&
1354
         info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1355
  return (tracing_on &&
1356
      OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1357
}
1358

    
1359
} }  // namespace v8::internal