Revision f230a1cf deps/v8/src/isolate.cc

View differences:

deps/v8/src/isolate.cc
42 42
#include "isolate-inl.h"
43 43
#include "lithium-allocator.h"
44 44
#include "log.h"
45
#include "marking-thread.h"
46 45
#include "messages.h"
47 46
#include "platform.h"
48 47
#include "regexp-stack.h"
......
121 120
void ThreadLocalTop::Initialize() {
122 121
  InitializeInternal();
123 122
#ifdef USE_SIMULATOR
124
#if V8_TARGET_ARCH_ARM
125 123
  simulator_ = Simulator::current(isolate_);
126
#elif V8_TARGET_ARCH_MIPS
127
  simulator_ = Simulator::current(isolate_);
128
#endif
129 124
#endif
130 125
  thread_id_ = ThreadId::Current();
131 126
}
......
147 142
    return number_of_threads;
148 143
  } else if (type == CONCURRENT_SWEEPING) {
149 144
    return number_of_threads - 1;
150
  } else if (type == PARALLEL_MARKING) {
151
    return number_of_threads;
152 145
  }
153 146
  return 1;
154 147
}
......
345 338
Thread::LocalStorageKey PerThreadAssertScopeBase::thread_local_key;
346 339
#endif  // DEBUG
347 340
Mutex Isolate::process_wide_mutex_;
341
// TODO(dcarney): Remove with default isolate.
342
enum DefaultIsolateStatus {
343
  kDefaultIsolateUninitialized,
344
  kDefaultIsolateInitialized,
345
  kDefaultIsolateCrashIfInitialized
346
};
347
static DefaultIsolateStatus default_isolate_status_
348
    = kDefaultIsolateUninitialized;
348 349
Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
349 350
Atomic32 Isolate::isolate_counter_ = 0;
350 351

  
......
382 383
}
383 384

  
384 385

  
386
void Isolate::SetCrashIfDefaultIsolateInitialized() {
387
  LockGuard<Mutex> lock_guard(&process_wide_mutex_);
388
  CHECK(default_isolate_status_ != kDefaultIsolateInitialized);
389
  default_isolate_status_ = kDefaultIsolateCrashIfInitialized;
390
}
391

  
392

  
385 393
void Isolate::EnsureDefaultIsolate() {
386 394
  LockGuard<Mutex> lock_guard(&process_wide_mutex_);
395
  CHECK(default_isolate_status_ != kDefaultIsolateCrashIfInitialized);
387 396
  if (default_isolate_ == NULL) {
388 397
    isolate_key_ = Thread::CreateThreadLocalKey();
389 398
    thread_id_key_ = Thread::CreateThreadLocalKey();
......
1087 1096
  Handle<String> key = factory()->stack_overflow_string();
1088 1097
  Handle<JSObject> boilerplate =
1089 1098
      Handle<JSObject>::cast(GetProperty(this, js_builtins_object(), key));
1090
  Handle<JSObject> exception = Copy(boilerplate);
1099
  Handle<JSObject> exception = JSObject::Copy(boilerplate);
1091 1100
  DoThrow(*exception, NULL);
1092 1101

  
1093 1102
  // Get stack trace limit.
......
1657 1666
  // This might be just paranoia, but it seems to be needed in case a
1658 1667
  // thread_local_top_ is restored on a separate OS thread.
1659 1668
#ifdef USE_SIMULATOR
1660
#if V8_TARGET_ARCH_ARM
1661 1669
  thread_local_top()->simulator_ = Simulator::current(this);
1662
#elif V8_TARGET_ARCH_MIPS
1663
  thread_local_top()->simulator_ = Simulator::current(this);
1664
#endif
1665 1670
#endif
1666 1671
  ASSERT(context() == NULL || context()->IsContext());
1667 1672
  return from + sizeof(ThreadLocalTop);
......
1776 1781
      // TODO(bmeurer) Initialized lazily because it depends on flags; can
1777 1782
      // be fixed once the default isolate cleanup is done.
1778 1783
      random_number_generator_(NULL),
1779
      is_memory_constrained_(false),
1780 1784
      has_fatal_error_(false),
1781 1785
      use_crankshaft_(true),
1782 1786
      initialized_from_snapshot_(false),
......
1784 1788
      heap_profiler_(NULL),
1785 1789
      function_entry_hook_(NULL),
1786 1790
      deferred_handles_head_(NULL),
1787
      optimizing_compiler_thread_(this),
1788
      marking_thread_(NULL),
1791
      optimizing_compiler_thread_(NULL),
1789 1792
      sweeper_thread_(NULL),
1790 1793
      stress_deopt_count_(0) {
1791 1794
  id_ = NoBarrier_AtomicIncrement(&isolate_counter_, 1);
......
1879 1882
    debugger()->UnloadDebugger();
1880 1883
#endif
1881 1884

  
1882
    if (FLAG_concurrent_recompilation) optimizing_compiler_thread_.Stop();
1885
    if (FLAG_concurrent_recompilation) {
1886
      optimizing_compiler_thread_->Stop();
1887
      delete optimizing_compiler_thread_;
1888
    }
1883 1889

  
1884 1890
    if (FLAG_sweeper_threads > 0) {
1885 1891
      for (int i = 0; i < FLAG_sweeper_threads; i++) {
......
1889 1895
      delete[] sweeper_thread_;
1890 1896
    }
1891 1897

  
1892
    if (FLAG_marking_threads > 0) {
1893
      for (int i = 0; i < FLAG_marking_threads; i++) {
1894
        marking_thread_[i]->Stop();
1895
        delete marking_thread_[i];
1896
      }
1897
      delete[] marking_thread_;
1898
    }
1899

  
1900 1898
    if (FLAG_hydrogen_stats) GetHStatistics()->Print();
1901 1899

  
1902 1900
    if (FLAG_print_deopt_stress) {
......
1911 1909
    deoptimizer_data_ = NULL;
1912 1910
    if (FLAG_preemption) {
1913 1911
      v8::Locker locker(reinterpret_cast<v8::Isolate*>(this));
1914
      v8::Locker::StopPreemption();
1912
      v8::Locker::StopPreemption(reinterpret_cast<v8::Isolate*>(this));
1915 1913
    }
1916 1914
    builtins_.TearDown();
1917 1915
    bootstrapper_->TearDown();
......
2219 2217

  
2220 2218
  deoptimizer_data_ = new DeoptimizerData(memory_allocator_);
2221 2219

  
2220
  if (FLAG_concurrent_recompilation) {
2221
    optimizing_compiler_thread_ = new OptimizingCompilerThread(this);
2222
    optimizing_compiler_thread_->Start();
2223
  }
2224

  
2222 2225
  const bool create_heap_objects = (des == NULL);
2223 2226
  if (create_heap_objects && !heap_.CreateHeapObjects()) {
2224 2227
    V8::FatalProcessOutOfMemory("heap object creation");
......
2248 2251

  
2249 2252
  if (FLAG_preemption) {
2250 2253
    v8::Locker locker(reinterpret_cast<v8::Isolate*>(this));
2251
    v8::Locker::StartPreemption(100);
2254
    v8::Locker::StartPreemption(reinterpret_cast<v8::Isolate*>(this), 100);
2252 2255
  }
2253 2256

  
2254 2257
#ifdef ENABLE_DEBUGGER_SUPPORT
......
2318 2321
                                   DONT_TRACK_ALLOCATION_SITE, 0);
2319 2322
    stub.InitializeInterfaceDescriptor(
2320 2323
        this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
2324
    BinaryOpStub::InitializeForIsolate(this);
2321 2325
    CompareNilICStub::InitializeForIsolate(this);
2322 2326
    ToBooleanStub::InitializeForIsolate(this);
2323 2327
    ArrayConstructorStubBase::InstallDescriptors(this);
2324 2328
    InternalArrayConstructorStubBase::InstallDescriptors(this);
2325 2329
    FastNewClosureStub::InstallDescriptors(this);
2326
  }
2327

  
2328
  if (FLAG_concurrent_recompilation) optimizing_compiler_thread_.Start();
2329

  
2330
  if (FLAG_marking_threads > 0) {
2331
    marking_thread_ = new MarkingThread*[FLAG_marking_threads];
2332
    for (int i = 0; i < FLAG_marking_threads; i++) {
2333
      marking_thread_[i] = new MarkingThread(this);
2334
      marking_thread_[i]->Start();
2335
    }
2330
    NumberToStringStub::InstallDescriptors(this);
2336 2331
  }
2337 2332

  
2338 2333
  if (FLAG_sweeper_threads > 0) {

Also available in: Unified diff