Revision f230a1cf deps/v8/src/mark-compact.cc
deps/v8/src/mark-compact.cc | ||
---|---|---|
38 | 38 |
#include "ic-inl.h" |
39 | 39 |
#include "incremental-marking.h" |
40 | 40 |
#include "mark-compact.h" |
41 |
#include "marking-thread.h" |
|
42 | 41 |
#include "objects-visiting.h" |
43 | 42 |
#include "objects-visiting-inl.h" |
44 | 43 |
#include "stub-cache.h" |
... | ... | |
92 | 91 |
|
93 | 92 |
void VisitEmbeddedPointer(RelocInfo* rinfo) { |
94 | 93 |
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
95 |
if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps || |
|
96 |
rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION || |
|
97 |
!rinfo->target_object()->IsMap() || |
|
98 |
!Map::cast(rinfo->target_object())->CanTransition()) { |
|
94 |
if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), |
|
95 |
rinfo->target_object())) { |
|
99 | 96 |
VisitPointer(rinfo->target_object_address()); |
100 | 97 |
} |
101 | 98 |
} |
... | ... | |
408 | 405 |
ASSERT(state_ == PREPARE_GC); |
409 | 406 |
ASSERT(encountered_weak_collections_ == Smi::FromInt(0)); |
410 | 407 |
|
408 |
heap()->allocation_mementos_found_ = 0; |
|
409 |
|
|
411 | 410 |
MarkLiveObjects(); |
412 | 411 |
ASSERT(heap_->incremental_marking()->IsStopped()); |
413 | 412 |
|
... | ... | |
432 | 431 |
#endif |
433 | 432 |
|
434 | 433 |
#ifdef VERIFY_HEAP |
435 |
if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && |
|
436 |
heap()->weak_embedded_maps_verification_enabled()) { |
|
437 |
VerifyWeakEmbeddedMapsInOptimizedCode(); |
|
434 |
if (heap()->weak_embedded_objects_verification_enabled()) { |
|
435 |
VerifyWeakEmbeddedObjectsInOptimizedCode(); |
|
438 | 436 |
} |
439 | 437 |
if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { |
440 | 438 |
VerifyOmittedMapChecks(); |
... | ... | |
450 | 448 |
marking_parity_ = EVEN_MARKING_PARITY; |
451 | 449 |
} |
452 | 450 |
|
451 |
if (FLAG_trace_track_allocation_sites && |
|
452 |
heap()->allocation_mementos_found_ > 0) { |
|
453 |
PrintF("AllocationMementos found during mark-sweep = %d\n", |
|
454 |
heap()->allocation_mementos_found_); |
|
455 |
} |
|
453 | 456 |
tracer_ = NULL; |
454 | 457 |
} |
455 | 458 |
|
... | ... | |
495 | 498 |
} |
496 | 499 |
|
497 | 500 |
|
498 |
void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
|
|
501 |
void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() {
|
|
499 | 502 |
HeapObjectIterator code_iterator(heap()->code_space()); |
500 | 503 |
for (HeapObject* obj = code_iterator.Next(); |
501 | 504 |
obj != NULL; |
... | ... | |
503 | 506 |
Code* code = Code::cast(obj); |
504 | 507 |
if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; |
505 | 508 |
if (WillBeDeoptimized(code)) continue; |
506 |
code->VerifyEmbeddedMapsDependency();
|
|
509 |
code->VerifyEmbeddedObjectsDependency();
|
|
507 | 510 |
} |
508 | 511 |
} |
509 | 512 |
|
... | ... | |
601 | 604 |
} |
602 | 605 |
|
603 | 606 |
|
604 |
void MarkCompactCollector::MarkInParallel() { |
|
605 |
for (int i = 0; i < FLAG_marking_threads; i++) { |
|
606 |
isolate()->marking_threads()[i]->StartMarking(); |
|
607 |
} |
|
608 |
} |
|
609 |
|
|
610 |
|
|
611 |
void MarkCompactCollector::WaitUntilMarkingCompleted() { |
|
612 |
for (int i = 0; i < FLAG_marking_threads; i++) { |
|
613 |
isolate()->marking_threads()[i]->WaitForMarkingThread(); |
|
614 |
} |
|
615 |
} |
|
616 |
|
|
617 |
|
|
618 | 607 |
bool Marking::TransferMark(Address old_start, Address new_start) { |
619 | 608 |
// This is only used when resizing an object. |
620 | 609 |
ASSERT(MemoryChunk::FromAddress(old_start) == |
... | ... | |
1481 | 1470 |
// Mark the backing hash table without pushing it on the marking stack. |
1482 | 1471 |
Object* table_object = weak_collection->table(); |
1483 | 1472 |
if (!table_object->IsHashTable()) return; |
1484 |
ObjectHashTable* table = ObjectHashTable::cast(table_object);
|
|
1473 |
WeakHashTable* table = WeakHashTable::cast(table_object);
|
|
1485 | 1474 |
Object** table_slot = |
1486 | 1475 |
HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset); |
1487 | 1476 |
MarkBit table_mark = Marking::MarkBitFrom(table); |
... | ... | |
1581 | 1570 |
fixed_array->map() != heap->fixed_double_array_map() && |
1582 | 1571 |
fixed_array != heap->empty_fixed_array()) { |
1583 | 1572 |
if (fixed_array->IsDictionary()) { |
1584 |
heap->RecordObjectStats(FIXED_ARRAY_TYPE, |
|
1585 |
dictionary_type, |
|
1586 |
fixed_array->Size()); |
|
1573 |
heap->RecordFixedArraySubTypeStats(dictionary_type, |
|
1574 |
fixed_array->Size()); |
|
1587 | 1575 |
} else { |
1588 |
heap->RecordObjectStats(FIXED_ARRAY_TYPE, |
|
1589 |
fast_type, |
|
1590 |
fixed_array->Size()); |
|
1576 |
heap->RecordFixedArraySubTypeStats(fast_type, |
|
1577 |
fixed_array->Size()); |
|
1591 | 1578 |
} |
1592 | 1579 |
} |
1593 | 1580 |
} |
... | ... | |
1597 | 1584 |
MarkCompactMarkingVisitor::VisitorId id, Map* map, HeapObject* obj) { |
1598 | 1585 |
Heap* heap = map->GetHeap(); |
1599 | 1586 |
int object_size = obj->Size(); |
1600 |
heap->RecordObjectStats(map->instance_type(), -1, object_size);
|
|
1587 |
heap->RecordObjectStats(map->instance_type(), object_size); |
|
1601 | 1588 |
non_count_table_.GetVisitorById(id)(map, obj); |
1602 | 1589 |
if (obj->IsJSObject()) { |
1603 | 1590 |
JSObject* object = JSObject::cast(obj); |
... | ... | |
1630 | 1617 |
if (map_obj->owns_descriptors() && |
1631 | 1618 |
array != heap->empty_descriptor_array()) { |
1632 | 1619 |
int fixed_array_size = array->Size(); |
1633 |
heap->RecordObjectStats(FIXED_ARRAY_TYPE, |
|
1634 |
DESCRIPTOR_ARRAY_SUB_TYPE, |
|
1635 |
fixed_array_size); |
|
1620 |
heap->RecordFixedArraySubTypeStats(DESCRIPTOR_ARRAY_SUB_TYPE, |
|
1621 |
fixed_array_size); |
|
1636 | 1622 |
} |
1637 | 1623 |
if (map_obj->HasTransitionArray()) { |
1638 | 1624 |
int fixed_array_size = map_obj->transitions()->Size(); |
1639 |
heap->RecordObjectStats(FIXED_ARRAY_TYPE, |
|
1640 |
TRANSITION_ARRAY_SUB_TYPE, |
|
1641 |
fixed_array_size); |
|
1625 |
heap->RecordFixedArraySubTypeStats(TRANSITION_ARRAY_SUB_TYPE, |
|
1626 |
fixed_array_size); |
|
1642 | 1627 |
} |
1643 | 1628 |
if (map_obj->has_code_cache()) { |
1644 | 1629 |
CodeCache* cache = CodeCache::cast(map_obj->code_cache()); |
1645 |
heap->RecordObjectStats( |
|
1646 |
FIXED_ARRAY_TYPE, |
|
1647 |
MAP_CODE_CACHE_SUB_TYPE, |
|
1648 |
cache->default_cache()->Size()); |
|
1630 |
heap->RecordFixedArraySubTypeStats(MAP_CODE_CACHE_SUB_TYPE, |
|
1631 |
cache->default_cache()->Size()); |
|
1649 | 1632 |
if (!cache->normal_type_cache()->IsUndefined()) { |
1650 |
heap->RecordObjectStats( |
|
1651 |
FIXED_ARRAY_TYPE, |
|
1633 |
heap->RecordFixedArraySubTypeStats( |
|
1652 | 1634 |
MAP_CODE_CACHE_SUB_TYPE, |
1653 | 1635 |
FixedArray::cast(cache->normal_type_cache())->Size()); |
1654 | 1636 |
} |
... | ... | |
1666 | 1648 |
Heap* heap = map->GetHeap(); |
1667 | 1649 |
int object_size = obj->Size(); |
1668 | 1650 |
ASSERT(map->instance_type() == CODE_TYPE); |
1669 |
heap->RecordObjectStats(CODE_TYPE, Code::cast(obj)->kind(), object_size); |
|
1651 |
Code* code_obj = Code::cast(obj); |
|
1652 |
heap->RecordCodeSubTypeStats(code_obj->kind(), code_obj->GetAge(), |
|
1653 |
object_size); |
|
1670 | 1654 |
ObjectStatsVisitBase(kVisitCode, map, obj); |
1671 | 1655 |
} |
1672 | 1656 |
}; |
... | ... | |
1680 | 1664 |
Heap* heap = map->GetHeap(); |
1681 | 1665 |
SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj); |
1682 | 1666 |
if (sfi->scope_info() != heap->empty_fixed_array()) { |
1683 |
heap->RecordObjectStats( |
|
1684 |
FIXED_ARRAY_TYPE, |
|
1667 |
heap->RecordFixedArraySubTypeStats( |
|
1685 | 1668 |
SCOPE_INFO_SUB_TYPE, |
1686 | 1669 |
FixedArray::cast(sfi->scope_info())->Size()); |
1687 | 1670 |
} |
... | ... | |
1698 | 1681 |
Heap* heap = map->GetHeap(); |
1699 | 1682 |
FixedArray* fixed_array = FixedArray::cast(obj); |
1700 | 1683 |
if (fixed_array == heap->string_table()) { |
1701 |
heap->RecordObjectStats( |
|
1702 |
FIXED_ARRAY_TYPE, |
|
1684 |
heap->RecordFixedArraySubTypeStats( |
|
1703 | 1685 |
STRING_TABLE_SUB_TYPE, |
1704 | 1686 |
fixed_array->Size()); |
1705 | 1687 |
} |
... | ... | |
2017 | 1999 |
int size = object->Size(); |
2018 | 2000 |
survivors_size += size; |
2019 | 2001 |
|
2002 |
if (FLAG_trace_track_allocation_sites && object->IsJSObject()) { |
|
2003 |
if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) |
|
2004 |
!= NULL) { |
|
2005 |
heap()->allocation_mementos_found_++; |
|
2006 |
} |
|
2007 |
} |
|
2008 |
|
|
2020 | 2009 |
offset++; |
2021 | 2010 |
current_cell >>= 1; |
2022 | 2011 |
// Aggressively promote young survivors to the old space. |
... | ... | |
2116 | 2105 |
// Handle the string table specially. |
2117 | 2106 |
MarkStringTable(visitor); |
2118 | 2107 |
|
2108 |
MarkWeakObjectToCodeTable(); |
|
2109 |
|
|
2119 | 2110 |
// There may be overflowed objects in the heap. Visit them now. |
2120 | 2111 |
while (marking_deque_.overflowed()) { |
2121 | 2112 |
RefillMarkingDeque(); |
... | ... | |
2156 | 2147 |
} |
2157 | 2148 |
|
2158 | 2149 |
|
2150 |
void MarkCompactCollector::MarkWeakObjectToCodeTable() { |
|
2151 |
HeapObject* weak_object_to_code_table = |
|
2152 |
HeapObject::cast(heap()->weak_object_to_code_table()); |
|
2153 |
if (!IsMarked(weak_object_to_code_table)) { |
|
2154 |
MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table); |
|
2155 |
SetMark(weak_object_to_code_table, mark); |
|
2156 |
} |
|
2157 |
} |
|
2158 |
|
|
2159 |
|
|
2159 | 2160 |
// Mark all objects reachable from the objects on the marking stack. |
2160 | 2161 |
// Before: the marking stack contains zero or more heap object pointers. |
2161 | 2162 |
// After: the marking stack is empty, and all objects reachable from the |
... | ... | |
2523 | 2524 |
if (map_mark.Get()) { |
2524 | 2525 |
ClearNonLiveDependentCode(map->dependent_code()); |
2525 | 2526 |
} else { |
2526 |
ClearAndDeoptimizeDependentCode(map); |
|
2527 |
ClearAndDeoptimizeDependentCode(map->dependent_code()); |
|
2528 |
map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
|
2527 | 2529 |
} |
2528 | 2530 |
} |
2529 | 2531 |
|
... | ... | |
2537 | 2539 |
ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); |
2538 | 2540 |
} |
2539 | 2541 |
} |
2542 |
|
|
2543 |
if (heap_->weak_object_to_code_table()->IsHashTable()) { |
|
2544 |
WeakHashTable* table = |
|
2545 |
WeakHashTable::cast(heap_->weak_object_to_code_table()); |
|
2546 |
uint32_t capacity = table->Capacity(); |
|
2547 |
for (uint32_t i = 0; i < capacity; i++) { |
|
2548 |
uint32_t key_index = table->EntryToIndex(i); |
|
2549 |
Object* key = table->get(key_index); |
|
2550 |
if (!table->IsKey(key)) continue; |
|
2551 |
uint32_t value_index = table->EntryToValueIndex(i); |
|
2552 |
Object* value = table->get(value_index); |
|
2553 |
if (IsMarked(key)) { |
|
2554 |
if (!IsMarked(value)) { |
|
2555 |
HeapObject* obj = HeapObject::cast(value); |
|
2556 |
MarkBit mark = Marking::MarkBitFrom(obj); |
|
2557 |
SetMark(obj, mark); |
|
2558 |
} |
|
2559 |
ClearNonLiveDependentCode(DependentCode::cast(value)); |
|
2560 |
} else { |
|
2561 |
ClearAndDeoptimizeDependentCode(DependentCode::cast(value)); |
|
2562 |
table->set(key_index, heap_->the_hole_value()); |
|
2563 |
table->set(value_index, heap_->the_hole_value()); |
|
2564 |
} |
|
2565 |
} |
|
2566 |
} |
|
2540 | 2567 |
} |
2541 | 2568 |
|
2542 | 2569 |
|
... | ... | |
2602 | 2629 |
} |
2603 | 2630 |
|
2604 | 2631 |
|
2605 |
void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) { |
|
2632 |
void MarkCompactCollector::ClearAndDeoptimizeDependentCode( |
|
2633 |
DependentCode* entries) { |
|
2606 | 2634 |
DisallowHeapAllocation no_allocation; |
2607 |
DependentCode* entries = map->dependent_code(); |
|
2608 | 2635 |
DependentCode::GroupStartIndexes starts(entries); |
2609 | 2636 |
int number_of_entries = starts.number_of_entries(); |
2610 | 2637 |
if (number_of_entries == 0) return; |
... | ... | |
2620 | 2647 |
} |
2621 | 2648 |
entries->clear_at(i); |
2622 | 2649 |
} |
2623 |
map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
|
2624 | 2650 |
} |
2625 | 2651 |
|
2626 | 2652 |
|
... | ... | |
2726 | 2752 |
Address src, |
2727 | 2753 |
int size, |
2728 | 2754 |
AllocationSpace dest) { |
2729 |
HEAP_PROFILE(heap(), ObjectMoveEvent(src, dst)); |
|
2730 |
// TODO(hpayer): Replace these checks with asserts. |
|
2731 |
CHECK(heap()->AllowedToBeMigrated(HeapObject::FromAddress(src), dest)); |
|
2732 |
CHECK(dest != LO_SPACE && size <= Page::kMaxNonCodeHeapObjectSize); |
|
2755 |
HeapProfiler* heap_profiler = heap()->isolate()->heap_profiler(); |
|
2756 |
if (heap_profiler->is_profiling()) { |
|
2757 |
heap_profiler->ObjectMoveEvent(src, dst, size); |
|
2758 |
} |
|
2759 |
ASSERT(heap()->AllowedToBeMigrated(HeapObject::FromAddress(src), dest)); |
|
2760 |
ASSERT(dest != LO_SPACE && size <= Page::kMaxNonCodeHeapObjectSize); |
|
2733 | 2761 |
if (dest == OLD_POINTER_SPACE) { |
2734 | 2762 |
Address src_slot = src; |
2735 | 2763 |
Address dst_slot = dst; |
... | ... | |
2910 | 2938 |
ASSERT(target_space == heap()->old_pointer_space() || |
2911 | 2939 |
target_space == heap()->old_data_space()); |
2912 | 2940 |
Object* result; |
2913 |
MaybeObject* maybe_result = target_space->AllocateRaw(object_size); |
|
2941 |
MaybeObject* maybe_result = target_space->AllocateRaw( |
|
2942 |
object_size, |
|
2943 |
PagedSpace::MOVE_OBJECT); |
|
2914 | 2944 |
if (maybe_result->ToObject(&result)) { |
2915 | 2945 |
HeapObject* target = HeapObject::cast(result); |
2916 | 2946 |
MigrateObject(target->address(), |
... | ... | |
2983 | 3013 |
|
2984 | 3014 |
int size = object->Size(); |
2985 | 3015 |
|
2986 |
MaybeObject* target = space->AllocateRaw(size); |
|
3016 |
MaybeObject* target = space->AllocateRaw(size, PagedSpace::MOVE_OBJECT);
|
|
2987 | 3017 |
if (target->IsFailure()) { |
2988 | 3018 |
// OS refused to give us memory. |
2989 | 3019 |
V8::FatalProcessOutOfMemory("Evacuation"); |
... | ... | |
3459 | 3489 |
updating_visitor.VisitPointer(heap_->native_contexts_list_address()); |
3460 | 3490 |
|
3461 | 3491 |
heap_->string_table()->Iterate(&updating_visitor); |
3492 |
updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address()); |
|
3493 |
if (heap_->weak_object_to_code_table()->IsHashTable()) { |
|
3494 |
WeakHashTable* table = |
|
3495 |
WeakHashTable::cast(heap_->weak_object_to_code_table()); |
|
3496 |
table->Iterate(&updating_visitor); |
|
3497 |
table->Rehash(heap_->undefined_value()); |
|
3498 |
} |
|
3462 | 3499 |
|
3463 | 3500 |
// Update pointers from external string table. |
3464 | 3501 |
heap_->UpdateReferencesInExternalStringTable( |
Also available in: Unified diff