The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / objects-visiting-inl.h @ f230a1cf
History | View | Annotate | Download (33.3 KB)
1 |
// Copyright 2012 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#ifndef V8_OBJECTS_VISITING_INL_H_
|
29 |
#define V8_OBJECTS_VISITING_INL_H_
|
30 |
|
31 |
|
32 |
namespace v8 { |
33 |
namespace internal { |
34 |
|
35 |
template<typename StaticVisitor> |
36 |
void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
|
37 |
table_.Register(kVisitShortcutCandidate, |
38 |
&FixedBodyVisitor<StaticVisitor, |
39 |
ConsString::BodyDescriptor, |
40 |
int>::Visit);
|
41 |
|
42 |
table_.Register(kVisitConsString, |
43 |
&FixedBodyVisitor<StaticVisitor, |
44 |
ConsString::BodyDescriptor, |
45 |
int>::Visit);
|
46 |
|
47 |
table_.Register(kVisitSlicedString, |
48 |
&FixedBodyVisitor<StaticVisitor, |
49 |
SlicedString::BodyDescriptor, |
50 |
int>::Visit);
|
51 |
|
52 |
table_.Register(kVisitSymbol, |
53 |
&FixedBodyVisitor<StaticVisitor, |
54 |
Symbol::BodyDescriptor, |
55 |
int>::Visit);
|
56 |
|
57 |
table_.Register(kVisitFixedArray, |
58 |
&FlexibleBodyVisitor<StaticVisitor, |
59 |
FixedArray::BodyDescriptor, |
60 |
int>::Visit);
|
61 |
|
62 |
table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray); |
63 |
|
64 |
table_.Register(kVisitNativeContext, |
65 |
&FixedBodyVisitor<StaticVisitor, |
66 |
Context::ScavengeBodyDescriptor, |
67 |
int>::Visit);
|
68 |
|
69 |
table_.Register(kVisitByteArray, &VisitByteArray); |
70 |
|
71 |
table_.Register(kVisitSharedFunctionInfo, |
72 |
&FixedBodyVisitor<StaticVisitor, |
73 |
SharedFunctionInfo::BodyDescriptor, |
74 |
int>::Visit);
|
75 |
|
76 |
table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString); |
77 |
|
78 |
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); |
79 |
|
80 |
table_.Register(kVisitJSFunction, &VisitJSFunction); |
81 |
|
82 |
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
83 |
|
84 |
table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
85 |
|
86 |
table_.Register(kVisitJSDataView, &VisitJSDataView); |
87 |
|
88 |
table_.Register(kVisitFreeSpace, &VisitFreeSpace); |
89 |
|
90 |
table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit); |
91 |
|
92 |
table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit); |
93 |
|
94 |
table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit); |
95 |
|
96 |
table_.template RegisterSpecializations<DataObjectVisitor, |
97 |
kVisitDataObject, |
98 |
kVisitDataObjectGeneric>(); |
99 |
|
100 |
table_.template RegisterSpecializations<JSObjectVisitor, |
101 |
kVisitJSObject, |
102 |
kVisitJSObjectGeneric>(); |
103 |
table_.template RegisterSpecializations<StructVisitor, |
104 |
kVisitStruct, |
105 |
kVisitStructGeneric>(); |
106 |
} |
107 |
|
108 |
|
109 |
template<typename StaticVisitor> |
110 |
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
|
111 |
Map* map, HeapObject* object) { |
112 |
Heap* heap = map->GetHeap(); |
113 |
|
114 |
STATIC_ASSERT( |
115 |
JSArrayBuffer::kWeakFirstViewOffset == |
116 |
JSArrayBuffer::kWeakNextOffset + kPointerSize); |
117 |
VisitPointers( |
118 |
heap, |
119 |
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), |
120 |
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); |
121 |
VisitPointers( |
122 |
heap, |
123 |
HeapObject::RawField(object, |
124 |
JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
|
125 |
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); |
126 |
return JSArrayBuffer::kSizeWithInternalFields;
|
127 |
} |
128 |
|
129 |
|
130 |
template<typename StaticVisitor> |
131 |
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
|
132 |
Map* map, HeapObject* object) { |
133 |
VisitPointers( |
134 |
map->GetHeap(), |
135 |
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), |
136 |
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); |
137 |
VisitPointers( |
138 |
map->GetHeap(), |
139 |
HeapObject::RawField(object, |
140 |
JSTypedArray::kWeakNextOffset + kPointerSize), |
141 |
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); |
142 |
return JSTypedArray::kSizeWithInternalFields;
|
143 |
} |
144 |
|
145 |
|
146 |
template<typename StaticVisitor> |
147 |
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
|
148 |
Map* map, HeapObject* object) { |
149 |
VisitPointers( |
150 |
map->GetHeap(), |
151 |
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), |
152 |
HeapObject::RawField(object, JSDataView::kWeakNextOffset)); |
153 |
VisitPointers( |
154 |
map->GetHeap(), |
155 |
HeapObject::RawField(object, |
156 |
JSDataView::kWeakNextOffset + kPointerSize), |
157 |
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); |
158 |
return JSDataView::kSizeWithInternalFields;
|
159 |
} |
160 |
|
161 |
|
162 |
template<typename StaticVisitor> |
163 |
void StaticMarkingVisitor<StaticVisitor>::Initialize() {
|
164 |
table_.Register(kVisitShortcutCandidate, |
165 |
&FixedBodyVisitor<StaticVisitor, |
166 |
ConsString::BodyDescriptor, |
167 |
void>::Visit);
|
168 |
|
169 |
table_.Register(kVisitConsString, |
170 |
&FixedBodyVisitor<StaticVisitor, |
171 |
ConsString::BodyDescriptor, |
172 |
void>::Visit);
|
173 |
|
174 |
table_.Register(kVisitSlicedString, |
175 |
&FixedBodyVisitor<StaticVisitor, |
176 |
SlicedString::BodyDescriptor, |
177 |
void>::Visit);
|
178 |
|
179 |
table_.Register(kVisitSymbol, |
180 |
&FixedBodyVisitor<StaticVisitor, |
181 |
Symbol::BodyDescriptor, |
182 |
void>::Visit);
|
183 |
|
184 |
table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); |
185 |
|
186 |
table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); |
187 |
|
188 |
table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray); |
189 |
|
190 |
table_.Register(kVisitNativeContext, &VisitNativeContext); |
191 |
|
192 |
table_.Register(kVisitAllocationSite, |
193 |
&FixedBodyVisitor<StaticVisitor, |
194 |
AllocationSite::BodyDescriptor, |
195 |
void>::Visit);
|
196 |
|
197 |
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit); |
198 |
|
199 |
table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit); |
200 |
|
201 |
table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit); |
202 |
|
203 |
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit); |
204 |
|
205 |
table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection); |
206 |
|
207 |
table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection); |
208 |
|
209 |
table_.Register(kVisitOddball, |
210 |
&FixedBodyVisitor<StaticVisitor, |
211 |
Oddball::BodyDescriptor, |
212 |
void>::Visit);
|
213 |
|
214 |
table_.Register(kVisitMap, &VisitMap); |
215 |
|
216 |
table_.Register(kVisitCode, &VisitCode); |
217 |
|
218 |
table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo); |
219 |
|
220 |
table_.Register(kVisitJSFunction, &VisitJSFunction); |
221 |
|
222 |
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer); |
223 |
|
224 |
table_.Register(kVisitJSTypedArray, &VisitJSTypedArray); |
225 |
|
226 |
table_.Register(kVisitJSDataView, &VisitJSDataView); |
227 |
|
228 |
// Registration for kVisitJSRegExp is done by StaticVisitor.
|
229 |
|
230 |
table_.Register(kVisitCell, |
231 |
&FixedBodyVisitor<StaticVisitor, |
232 |
Cell::BodyDescriptor, |
233 |
void>::Visit);
|
234 |
|
235 |
table_.Register(kVisitPropertyCell, &VisitPropertyCell); |
236 |
|
237 |
table_.template RegisterSpecializations<DataObjectVisitor, |
238 |
kVisitDataObject, |
239 |
kVisitDataObjectGeneric>(); |
240 |
|
241 |
table_.template RegisterSpecializations<JSObjectVisitor, |
242 |
kVisitJSObject, |
243 |
kVisitJSObjectGeneric>(); |
244 |
|
245 |
table_.template RegisterSpecializations<StructObjectVisitor, |
246 |
kVisitStruct, |
247 |
kVisitStructGeneric>(); |
248 |
} |
249 |
|
250 |
|
251 |
template<typename StaticVisitor> |
252 |
void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
|
253 |
Heap* heap, Address entry_address) { |
254 |
Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); |
255 |
heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code); |
256 |
StaticVisitor::MarkObject(heap, code); |
257 |
} |
258 |
|
259 |
|
260 |
template<typename StaticVisitor> |
261 |
void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
|
262 |
Heap* heap, RelocInfo* rinfo) { |
263 |
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
264 |
ASSERT(!rinfo->target_object()->IsConsString()); |
265 |
HeapObject* object = HeapObject::cast(rinfo->target_object()); |
266 |
heap->mark_compact_collector()->RecordRelocSlot(rinfo, object); |
267 |
if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), object)) {
|
268 |
StaticVisitor::MarkObject(heap, object); |
269 |
} |
270 |
} |
271 |
|
272 |
|
273 |
template<typename StaticVisitor> |
274 |
void StaticMarkingVisitor<StaticVisitor>::VisitCell(
|
275 |
Heap* heap, RelocInfo* rinfo) { |
276 |
ASSERT(rinfo->rmode() == RelocInfo::CELL); |
277 |
Cell* cell = rinfo->target_cell(); |
278 |
StaticVisitor::MarkObject(heap, cell); |
279 |
} |
280 |
|
281 |
|
282 |
template<typename StaticVisitor> |
283 |
void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
|
284 |
Heap* heap, RelocInfo* rinfo) { |
285 |
ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
286 |
rinfo->IsPatchedReturnSequence()) || |
287 |
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
288 |
rinfo->IsPatchedDebugBreakSlotSequence())); |
289 |
Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
290 |
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
291 |
StaticVisitor::MarkObject(heap, target); |
292 |
} |
293 |
|
294 |
|
295 |
template<typename StaticVisitor> |
296 |
void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
|
297 |
Heap* heap, RelocInfo* rinfo) { |
298 |
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
299 |
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
300 |
// Monomorphic ICs are preserved when possible, but need to be flushed
|
301 |
// when they might be keeping a Context alive, or when the heap is about
|
302 |
// to be serialized.
|
303 |
if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
|
304 |
&& (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC || |
305 |
target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() || |
306 |
Serializer::enabled() || target->ic_age() != heap->global_ic_age())) { |
307 |
IC::Clear(target->GetIsolate(), rinfo->pc()); |
308 |
target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
309 |
} |
310 |
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
311 |
StaticVisitor::MarkObject(heap, target); |
312 |
} |
313 |
|
314 |
|
315 |
template<typename StaticVisitor> |
316 |
void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
|
317 |
Heap* heap, RelocInfo* rinfo) { |
318 |
ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); |
319 |
Code* target = rinfo->code_age_stub(); |
320 |
ASSERT(target != NULL);
|
321 |
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target); |
322 |
StaticVisitor::MarkObject(heap, target); |
323 |
} |
324 |
|
325 |
|
326 |
template<typename StaticVisitor> |
327 |
void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
|
328 |
Map* map, HeapObject* object) { |
329 |
FixedBodyVisitor<StaticVisitor, |
330 |
Context::MarkCompactBodyDescriptor, |
331 |
void>::Visit(map, object);
|
332 |
|
333 |
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector(); |
334 |
for (int idx = Context::FIRST_WEAK_SLOT; |
335 |
idx < Context::NATIVE_CONTEXT_SLOTS; |
336 |
++idx) { |
337 |
Object** slot = |
338 |
HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx)); |
339 |
collector->RecordSlot(slot, slot, *slot); |
340 |
} |
341 |
} |
342 |
|
343 |
|
344 |
template<typename StaticVisitor> |
345 |
void StaticMarkingVisitor<StaticVisitor>::VisitMap(
|
346 |
Map* map, HeapObject* object) { |
347 |
Heap* heap = map->GetHeap(); |
348 |
Map* map_object = Map::cast(object); |
349 |
|
350 |
// Clears the cache of ICs related to this map.
|
351 |
if (FLAG_cleanup_code_caches_at_gc) {
|
352 |
map_object->ClearCodeCache(heap); |
353 |
} |
354 |
|
355 |
// When map collection is enabled we have to mark through map's transitions
|
356 |
// and back pointers in a special way to make these links weak.
|
357 |
if (FLAG_collect_maps && map_object->CanTransition()) {
|
358 |
MarkMapContents(heap, map_object); |
359 |
} else {
|
360 |
StaticVisitor::VisitPointers(heap, |
361 |
HeapObject::RawField(object, Map::kPointerFieldsBeginOffset), |
362 |
HeapObject::RawField(object, Map::kPointerFieldsEndOffset)); |
363 |
} |
364 |
} |
365 |
|
366 |
|
367 |
template<typename StaticVisitor> |
368 |
void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
|
369 |
Map* map, HeapObject* object) { |
370 |
Heap* heap = map->GetHeap(); |
371 |
|
372 |
Object** slot = |
373 |
HeapObject::RawField(object, PropertyCell::kDependentCodeOffset); |
374 |
if (FLAG_collect_maps) {
|
375 |
// Mark property cell dependent codes array but do not push it onto marking
|
376 |
// stack, this will make references from it weak. We will clean dead
|
377 |
// codes when we iterate over property cells in ClearNonLiveReferences.
|
378 |
HeapObject* obj = HeapObject::cast(*slot); |
379 |
heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
380 |
StaticVisitor::MarkObjectWithoutPush(heap, obj); |
381 |
} else {
|
382 |
StaticVisitor::VisitPointer(heap, slot); |
383 |
} |
384 |
|
385 |
StaticVisitor::VisitPointers(heap, |
386 |
HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset), |
387 |
HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset)); |
388 |
} |
389 |
|
390 |
|
391 |
template<typename StaticVisitor> |
392 |
void StaticMarkingVisitor<StaticVisitor>::VisitCode(
|
393 |
Map* map, HeapObject* object) { |
394 |
Heap* heap = map->GetHeap(); |
395 |
Code* code = Code::cast(object); |
396 |
if (FLAG_cleanup_code_caches_at_gc) {
|
397 |
code->ClearTypeFeedbackCells(heap); |
398 |
} |
399 |
if (FLAG_age_code && !Serializer::enabled()) {
|
400 |
code->MakeOlder(heap->mark_compact_collector()->marking_parity()); |
401 |
} |
402 |
code->CodeIterateBody<StaticVisitor>(heap); |
403 |
} |
404 |
|
405 |
|
406 |
template<typename StaticVisitor> |
407 |
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
408 |
Map* map, HeapObject* object) { |
409 |
Heap* heap = map->GetHeap(); |
410 |
SharedFunctionInfo* shared = SharedFunctionInfo::cast(object); |
411 |
if (shared->ic_age() != heap->global_ic_age()) {
|
412 |
shared->ResetForNewContext(heap->global_ic_age()); |
413 |
} |
414 |
if (FLAG_cache_optimized_code &&
|
415 |
FLAG_flush_optimized_code_cache && |
416 |
!shared->optimized_code_map()->IsSmi()) { |
417 |
// Always flush the optimized code map if requested by flag.
|
418 |
shared->ClearOptimizedCodeMap(); |
419 |
} |
420 |
MarkCompactCollector* collector = heap->mark_compact_collector(); |
421 |
if (collector->is_code_flushing_enabled()) {
|
422 |
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
|
423 |
// Add the shared function info holding an optimized code map to
|
424 |
// the code flusher for processing of code maps after marking.
|
425 |
collector->code_flusher()->AddOptimizedCodeMap(shared); |
426 |
// Treat all references within the code map weakly by marking the
|
427 |
// code map itself but not pushing it onto the marking deque.
|
428 |
FixedArray* code_map = FixedArray::cast(shared->optimized_code_map()); |
429 |
StaticVisitor::MarkObjectWithoutPush(heap, code_map); |
430 |
} |
431 |
if (IsFlushable(heap, shared)) {
|
432 |
// This function's code looks flushable. But we have to postpone
|
433 |
// the decision until we see all functions that point to the same
|
434 |
// SharedFunctionInfo because some of them might be optimized.
|
435 |
// That would also make the non-optimized version of the code
|
436 |
// non-flushable, because it is required for bailing out from
|
437 |
// optimized code.
|
438 |
collector->code_flusher()->AddCandidate(shared); |
439 |
// Treat the reference to the code object weakly.
|
440 |
VisitSharedFunctionInfoWeakCode(heap, object); |
441 |
return;
|
442 |
} |
443 |
} else {
|
444 |
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
|
445 |
// Flush optimized code map on major GCs without code flushing,
|
446 |
// needed because cached code doesn't contain breakpoints.
|
447 |
shared->ClearOptimizedCodeMap(); |
448 |
} |
449 |
} |
450 |
VisitSharedFunctionInfoStrongCode(heap, object); |
451 |
} |
452 |
|
453 |
|
454 |
template<typename StaticVisitor> |
455 |
void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
|
456 |
Map* map, HeapObject* object) { |
457 |
Heap* heap = map->GetHeap(); |
458 |
ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); |
459 |
int first_ptr_offset = constant_pool->OffsetOfElementAt(
|
460 |
constant_pool->first_ptr_index()); |
461 |
int last_ptr_offset = constant_pool->OffsetOfElementAt(
|
462 |
constant_pool->first_ptr_index() + constant_pool->count_of_ptr_entries()); |
463 |
StaticVisitor::VisitPointers( |
464 |
heap, |
465 |
HeapObject::RawField(object, first_ptr_offset), |
466 |
HeapObject::RawField(object, last_ptr_offset)); |
467 |
} |
468 |
|
469 |
|
470 |
template<typename StaticVisitor> |
471 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
|
472 |
Map* map, HeapObject* object) { |
473 |
Heap* heap = map->GetHeap(); |
474 |
JSFunction* function = JSFunction::cast(object); |
475 |
MarkCompactCollector* collector = heap->mark_compact_collector(); |
476 |
if (collector->is_code_flushing_enabled()) {
|
477 |
if (IsFlushable(heap, function)) {
|
478 |
// This function's code looks flushable. But we have to postpone
|
479 |
// the decision until we see all functions that point to the same
|
480 |
// SharedFunctionInfo because some of them might be optimized.
|
481 |
// That would also make the non-optimized version of the code
|
482 |
// non-flushable, because it is required for bailing out from
|
483 |
// optimized code.
|
484 |
collector->code_flusher()->AddCandidate(function); |
485 |
// Visit shared function info immediately to avoid double checking
|
486 |
// of its flushability later. This is just an optimization because
|
487 |
// the shared function info would eventually be visited.
|
488 |
SharedFunctionInfo* shared = function->shared(); |
489 |
if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
|
490 |
StaticVisitor::MarkObject(heap, shared->map()); |
491 |
VisitSharedFunctionInfoWeakCode(heap, shared); |
492 |
} |
493 |
// Treat the reference to the code object weakly.
|
494 |
VisitJSFunctionWeakCode(heap, object); |
495 |
return;
|
496 |
} else {
|
497 |
// Visit all unoptimized code objects to prevent flushing them.
|
498 |
StaticVisitor::MarkObject(heap, function->shared()->code()); |
499 |
if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
|
500 |
MarkInlinedFunctionsCode(heap, function->code()); |
501 |
} |
502 |
} |
503 |
} |
504 |
VisitJSFunctionStrongCode(heap, object); |
505 |
} |
506 |
|
507 |
|
508 |
template<typename StaticVisitor> |
509 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
|
510 |
Map* map, HeapObject* object) { |
511 |
int last_property_offset =
|
512 |
JSRegExp::kSize + kPointerSize * map->inobject_properties(); |
513 |
StaticVisitor::VisitPointers(map->GetHeap(), |
514 |
HeapObject::RawField(object, JSRegExp::kPropertiesOffset), |
515 |
HeapObject::RawField(object, last_property_offset)); |
516 |
} |
517 |
|
518 |
|
519 |
template<typename StaticVisitor> |
520 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
|
521 |
Map* map, HeapObject* object) { |
522 |
Heap* heap = map->GetHeap(); |
523 |
|
524 |
STATIC_ASSERT( |
525 |
JSArrayBuffer::kWeakFirstViewOffset == |
526 |
JSArrayBuffer::kWeakNextOffset + kPointerSize); |
527 |
StaticVisitor::VisitPointers( |
528 |
heap, |
529 |
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), |
530 |
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset)); |
531 |
StaticVisitor::VisitPointers( |
532 |
heap, |
533 |
HeapObject::RawField(object, |
534 |
JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
|
535 |
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); |
536 |
} |
537 |
|
538 |
|
539 |
template<typename StaticVisitor> |
540 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
|
541 |
Map* map, HeapObject* object) { |
542 |
StaticVisitor::VisitPointers( |
543 |
map->GetHeap(), |
544 |
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset), |
545 |
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset)); |
546 |
StaticVisitor::VisitPointers( |
547 |
map->GetHeap(), |
548 |
HeapObject::RawField(object, |
549 |
JSTypedArray::kWeakNextOffset + kPointerSize), |
550 |
HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields)); |
551 |
} |
552 |
|
553 |
|
554 |
template<typename StaticVisitor> |
555 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
|
556 |
Map* map, HeapObject* object) { |
557 |
StaticVisitor::VisitPointers( |
558 |
map->GetHeap(), |
559 |
HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset), |
560 |
HeapObject::RawField(object, JSDataView::kWeakNextOffset)); |
561 |
StaticVisitor::VisitPointers( |
562 |
map->GetHeap(), |
563 |
HeapObject::RawField(object, |
564 |
JSDataView::kWeakNextOffset + kPointerSize), |
565 |
HeapObject::RawField(object, JSDataView::kSizeWithInternalFields)); |
566 |
} |
567 |
|
568 |
|
569 |
template<typename StaticVisitor> |
570 |
void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
|
571 |
Heap* heap, Map* map) { |
572 |
// Make sure that the back pointer stored either in the map itself or
|
573 |
// inside its transitions array is marked. Skip recording the back
|
574 |
// pointer slot since map space is not compacted.
|
575 |
StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer())); |
576 |
|
577 |
// Treat pointers in the transitions array as weak and also mark that
|
578 |
// array to prevent visiting it later. Skip recording the transition
|
579 |
// array slot, since it will be implicitly recorded when the pointer
|
580 |
// fields of this map are visited.
|
581 |
TransitionArray* transitions = map->unchecked_transition_array(); |
582 |
if (transitions->IsTransitionArray()) {
|
583 |
MarkTransitionArray(heap, transitions); |
584 |
} else {
|
585 |
// Already marked by marking map->GetBackPointer() above.
|
586 |
ASSERT(transitions->IsMap() || transitions->IsUndefined()); |
587 |
} |
588 |
|
589 |
// Since descriptor arrays are potentially shared, ensure that only the
|
590 |
// descriptors that belong to this map are marked. The first time a
|
591 |
// non-empty descriptor array is marked, its header is also visited. The slot
|
592 |
// holding the descriptor array will be implicitly recorded when the pointer
|
593 |
// fields of this map are visited.
|
594 |
DescriptorArray* descriptors = map->instance_descriptors(); |
595 |
if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
|
596 |
descriptors->length() > 0) {
|
597 |
StaticVisitor::VisitPointers(heap, |
598 |
descriptors->GetFirstElementAddress(), |
599 |
descriptors->GetDescriptorEndSlot(0));
|
600 |
} |
601 |
int start = 0; |
602 |
int end = map->NumberOfOwnDescriptors();
|
603 |
if (start < end) {
|
604 |
StaticVisitor::VisitPointers(heap, |
605 |
descriptors->GetDescriptorStartSlot(start), |
606 |
descriptors->GetDescriptorEndSlot(end)); |
607 |
} |
608 |
|
609 |
// Mark prototype dependent codes array but do not push it onto marking
|
610 |
// stack, this will make references from it weak. We will clean dead
|
611 |
// codes when we iterate over maps in ClearNonLiveTransitions.
|
612 |
Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset); |
613 |
HeapObject* obj = HeapObject::cast(*slot); |
614 |
heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
615 |
StaticVisitor::MarkObjectWithoutPush(heap, obj); |
616 |
|
617 |
// Mark the pointer fields of the Map. Since the transitions array has
|
618 |
// been marked already, it is fine that one of these fields contains a
|
619 |
// pointer to it.
|
620 |
StaticVisitor::VisitPointers(heap, |
621 |
HeapObject::RawField(map, Map::kPointerFieldsBeginOffset), |
622 |
HeapObject::RawField(map, Map::kPointerFieldsEndOffset)); |
623 |
} |
624 |
|
625 |
|
626 |
template<typename StaticVisitor> |
627 |
void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
|
628 |
Heap* heap, TransitionArray* transitions) { |
629 |
if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return; |
630 |
|
631 |
// Simple transitions do not have keys nor prototype transitions.
|
632 |
if (transitions->IsSimpleTransition()) return; |
633 |
|
634 |
if (transitions->HasPrototypeTransitions()) {
|
635 |
// Mark prototype transitions array but do not push it onto marking
|
636 |
// stack, this will make references from it weak. We will clean dead
|
637 |
// prototype transitions in ClearNonLiveTransitions.
|
638 |
Object** slot = transitions->GetPrototypeTransitionsSlot(); |
639 |
HeapObject* obj = HeapObject::cast(*slot); |
640 |
heap->mark_compact_collector()->RecordSlot(slot, slot, obj); |
641 |
StaticVisitor::MarkObjectWithoutPush(heap, obj); |
642 |
} |
643 |
|
644 |
for (int i = 0; i < transitions->number_of_transitions(); ++i) { |
645 |
StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i)); |
646 |
} |
647 |
} |
648 |
|
649 |
|
650 |
template<typename StaticVisitor> |
651 |
void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
|
652 |
Heap* heap, Code* code) { |
653 |
// For optimized functions we should retain both non-optimized version
|
654 |
// of its code and non-optimized version of all inlined functions.
|
655 |
// This is required to support bailing out from inlined code.
|
656 |
DeoptimizationInputData* data = |
657 |
DeoptimizationInputData::cast(code->deoptimization_data()); |
658 |
FixedArray* literals = data->LiteralArray(); |
659 |
for (int i = 0, count = data->InlinedFunctionCount()->value(); |
660 |
i < count; |
661 |
i++) { |
662 |
JSFunction* inlined = JSFunction::cast(literals->get(i)); |
663 |
StaticVisitor::MarkObject(heap, inlined->shared()->code()); |
664 |
} |
665 |
} |
666 |
|
667 |
|
668 |
inline static bool IsValidNonBuiltinContext(Object* context) { |
669 |
return context->IsContext() &&
|
670 |
!Context::cast(context)->global_object()->IsJSBuiltinsObject(); |
671 |
} |
672 |
|
673 |
|
674 |
inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) { |
675 |
Object* undefined = heap->undefined_value(); |
676 |
return (info->script() != undefined) &&
|
677 |
(reinterpret_cast<Script*>(info->script())->source() != undefined); |
678 |
} |
679 |
|
680 |
|
681 |
template<typename StaticVisitor> |
682 |
bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
|
683 |
Heap* heap, JSFunction* function) { |
684 |
SharedFunctionInfo* shared_info = function->shared(); |
685 |
|
686 |
// Code is either on stack, in compilation cache or referenced
|
687 |
// by optimized version of function.
|
688 |
MarkBit code_mark = Marking::MarkBitFrom(function->code()); |
689 |
if (code_mark.Get()) {
|
690 |
return false; |
691 |
} |
692 |
|
693 |
// The function must have a valid context and not be a builtin.
|
694 |
if (!IsValidNonBuiltinContext(function->context())) {
|
695 |
return false; |
696 |
} |
697 |
|
698 |
// We do not (yet) flush code for optimized functions.
|
699 |
if (function->code() != shared_info->code()) {
|
700 |
return false; |
701 |
} |
702 |
|
703 |
// Check age of optimized code.
|
704 |
if (FLAG_age_code && !function->code()->IsOld()) {
|
705 |
return false; |
706 |
} |
707 |
|
708 |
return IsFlushable(heap, shared_info);
|
709 |
} |
710 |
|
711 |
|
712 |
template<typename StaticVisitor> |
713 |
bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
|
714 |
Heap* heap, SharedFunctionInfo* shared_info) { |
715 |
// Code is either on stack, in compilation cache or referenced
|
716 |
// by optimized version of function.
|
717 |
MarkBit code_mark = Marking::MarkBitFrom(shared_info->code()); |
718 |
if (code_mark.Get()) {
|
719 |
return false; |
720 |
} |
721 |
|
722 |
// The function must be compiled and have the source code available,
|
723 |
// to be able to recompile it in case we need the function again.
|
724 |
if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
|
725 |
return false; |
726 |
} |
727 |
|
728 |
// We never flush code for API functions.
|
729 |
Object* function_data = shared_info->function_data(); |
730 |
if (function_data->IsFunctionTemplateInfo()) {
|
731 |
return false; |
732 |
} |
733 |
|
734 |
// Only flush code for functions.
|
735 |
if (shared_info->code()->kind() != Code::FUNCTION) {
|
736 |
return false; |
737 |
} |
738 |
|
739 |
// Function must be lazy compilable.
|
740 |
if (!shared_info->allows_lazy_compilation()) {
|
741 |
return false; |
742 |
} |
743 |
|
744 |
// We do not (yet?) flush code for generator functions, because we don't know
|
745 |
// if there are still live activations (generator objects) on the heap.
|
746 |
if (shared_info->is_generator()) {
|
747 |
return false; |
748 |
} |
749 |
|
750 |
// If this is a full script wrapped in a function we do not flush the code.
|
751 |
if (shared_info->is_toplevel()) {
|
752 |
return false; |
753 |
} |
754 |
|
755 |
// If this is a function initialized with %SetCode then the one-to-one
|
756 |
// relation between SharedFunctionInfo and Code is broken.
|
757 |
if (shared_info->dont_flush()) {
|
758 |
return false; |
759 |
} |
760 |
|
761 |
// Check age of code. If code aging is disabled we never flush.
|
762 |
if (!FLAG_age_code || !shared_info->code()->IsOld()) {
|
763 |
return false; |
764 |
} |
765 |
|
766 |
return true; |
767 |
} |
768 |
|
769 |
|
770 |
template<typename StaticVisitor> |
771 |
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
|
772 |
Heap* heap, HeapObject* object) { |
773 |
StaticVisitor::BeforeVisitingSharedFunctionInfo(object); |
774 |
Object** start_slot = |
775 |
HeapObject::RawField(object, |
776 |
SharedFunctionInfo::BodyDescriptor::kStartOffset); |
777 |
Object** end_slot = |
778 |
HeapObject::RawField(object, |
779 |
SharedFunctionInfo::BodyDescriptor::kEndOffset); |
780 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
781 |
} |
782 |
|
783 |
|
784 |
template<typename StaticVisitor> |
785 |
void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
|
786 |
Heap* heap, HeapObject* object) { |
787 |
StaticVisitor::BeforeVisitingSharedFunctionInfo(object); |
788 |
Object** name_slot = |
789 |
HeapObject::RawField(object, SharedFunctionInfo::kNameOffset); |
790 |
StaticVisitor::VisitPointer(heap, name_slot); |
791 |
|
792 |
// Skip visiting kCodeOffset as it is treated weakly here.
|
793 |
STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize == |
794 |
SharedFunctionInfo::kCodeOffset); |
795 |
STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize == |
796 |
SharedFunctionInfo::kOptimizedCodeMapOffset); |
797 |
|
798 |
Object** start_slot = |
799 |
HeapObject::RawField(object, |
800 |
SharedFunctionInfo::kOptimizedCodeMapOffset); |
801 |
Object** end_slot = |
802 |
HeapObject::RawField(object, |
803 |
SharedFunctionInfo::BodyDescriptor::kEndOffset); |
804 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
805 |
} |
806 |
|
807 |
|
808 |
template<typename StaticVisitor> |
809 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
|
810 |
Heap* heap, HeapObject* object) { |
811 |
Object** start_slot = |
812 |
HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
813 |
Object** end_slot = |
814 |
HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
815 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
816 |
|
817 |
VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
818 |
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
819 |
JSFunction::kPrototypeOrInitialMapOffset); |
820 |
|
821 |
start_slot = |
822 |
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
823 |
end_slot = |
824 |
HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
825 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
826 |
} |
827 |
|
828 |
|
829 |
template<typename StaticVisitor> |
830 |
void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
|
831 |
Heap* heap, HeapObject* object) { |
832 |
Object** start_slot = |
833 |
HeapObject::RawField(object, JSFunction::kPropertiesOffset); |
834 |
Object** end_slot = |
835 |
HeapObject::RawField(object, JSFunction::kCodeEntryOffset); |
836 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
837 |
|
838 |
// Skip visiting kCodeEntryOffset as it is treated weakly here.
|
839 |
STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize == |
840 |
JSFunction::kPrototypeOrInitialMapOffset); |
841 |
|
842 |
start_slot = |
843 |
HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset); |
844 |
end_slot = |
845 |
HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset); |
846 |
StaticVisitor::VisitPointers(heap, start_slot, end_slot); |
847 |
} |
848 |
|
849 |
|
850 |
void Code::CodeIterateBody(ObjectVisitor* v) {
|
851 |
int mode_mask = RelocInfo::kCodeTargetMask |
|
852 |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
853 |
RelocInfo::ModeMask(RelocInfo::CELL) | |
854 |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
855 |
RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
856 |
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
857 |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
858 |
|
859 |
// There are two places where we iterate code bodies: here and the
|
860 |
// templated CodeIterateBody (below). They should be kept in sync.
|
861 |
IteratePointer(v, kRelocationInfoOffset); |
862 |
IteratePointer(v, kHandlerTableOffset); |
863 |
IteratePointer(v, kDeoptimizationDataOffset); |
864 |
IteratePointer(v, kTypeFeedbackInfoOffset); |
865 |
|
866 |
RelocIterator it(this, mode_mask); |
867 |
Isolate* isolate = this->GetIsolate(); |
868 |
for (; !it.done(); it.next()) {
|
869 |
it.rinfo()->Visit(isolate, v); |
870 |
} |
871 |
} |
872 |
|
873 |
|
874 |
template<typename StaticVisitor> |
875 |
void Code::CodeIterateBody(Heap* heap) {
|
876 |
int mode_mask = RelocInfo::kCodeTargetMask |
|
877 |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
878 |
RelocInfo::ModeMask(RelocInfo::CELL) | |
879 |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
880 |
RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
881 |
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
882 |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
883 |
|
884 |
// There are two places where we iterate code bodies: here and the non-
|
885 |
// templated CodeIterateBody (above). They should be kept in sync.
|
886 |
StaticVisitor::VisitPointer( |
887 |
heap, |
888 |
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); |
889 |
StaticVisitor::VisitPointer( |
890 |
heap, |
891 |
reinterpret_cast<Object**>(this->address() + kHandlerTableOffset)); |
892 |
StaticVisitor::VisitPointer( |
893 |
heap, |
894 |
reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); |
895 |
StaticVisitor::VisitPointer( |
896 |
heap, |
897 |
reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset)); |
898 |
|
899 |
RelocIterator it(this, mode_mask); |
900 |
for (; !it.done(); it.next()) {
|
901 |
it.rinfo()->template Visit<StaticVisitor>(heap); |
902 |
} |
903 |
} |
904 |
|
905 |
|
906 |
} } // namespace v8::internal
|
907 |
|
908 |
#endif // V8_OBJECTS_VISITING_INL_H_ |