The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / global-handles.cc @ f230a1cf
History | View | Annotate | Download (33.6 KB)
1 |
// Copyright 2009 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "v8.h" |
29 |
|
30 |
#include "api.h" |
31 |
#include "global-handles.h" |
32 |
|
33 |
#include "vm-state-inl.h" |
34 |
|
35 |
namespace v8 {
|
36 |
namespace internal {
|
37 |
|
38 |
|
39 |
ObjectGroup::~ObjectGroup() { |
40 |
if (info != NULL) info->Dispose(); |
41 |
delete[] objects;
|
42 |
} |
43 |
|
44 |
|
45 |
ImplicitRefGroup::~ImplicitRefGroup() { |
46 |
delete[] children;
|
47 |
} |
48 |
|
49 |
|
50 |
class GlobalHandles::Node { |
51 |
public:
|
52 |
// State transition diagram:
|
53 |
// FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
|
54 |
enum State {
|
55 |
FREE = 0,
|
56 |
NORMAL, // Normal global handle.
|
57 |
WEAK, // Flagged as weak but not yet finalized.
|
58 |
PENDING, // Has been recognized as only reachable by weak handles.
|
59 |
NEAR_DEATH // Callback has informed the handle is near death.
|
60 |
}; |
61 |
|
62 |
// Maps handle location (slot) to the containing node.
|
63 |
static Node* FromLocation(Object** location) {
|
64 |
ASSERT(OFFSET_OF(Node, object_) == 0);
|
65 |
return reinterpret_cast<Node*>(location); |
66 |
} |
67 |
|
68 |
Node() { |
69 |
ASSERT(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); |
70 |
ASSERT(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset); |
71 |
STATIC_ASSERT(static_cast<int>(NodeState::kMask) == |
72 |
Internals::kNodeStateMask); |
73 |
STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); |
74 |
STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); |
75 |
STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue); |
76 |
STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == |
77 |
Internals::kNodeIsIndependentShift); |
78 |
STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == |
79 |
Internals::kNodeIsPartiallyDependentShift); |
80 |
} |
81 |
|
82 |
#ifdef ENABLE_HANDLE_ZAPPING
|
83 |
~Node() { |
84 |
// TODO(1428): if it's a weak handle we should have invoked its callback.
|
85 |
// Zap the values for eager trapping.
|
86 |
object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
|
87 |
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
88 |
index_ = 0;
|
89 |
set_independent(false);
|
90 |
set_partially_dependent(false);
|
91 |
set_in_new_space_list(false);
|
92 |
parameter_or_next_free_.next_free = NULL;
|
93 |
weak_callback_ = NULL;
|
94 |
} |
95 |
#endif
|
96 |
|
97 |
void Initialize(int index, Node** first_free) { |
98 |
index_ = static_cast<uint8_t>(index);
|
99 |
ASSERT(static_cast<int>(index_) == index); |
100 |
set_state(FREE); |
101 |
set_in_new_space_list(false);
|
102 |
parameter_or_next_free_.next_free = *first_free; |
103 |
*first_free = this;
|
104 |
} |
105 |
|
106 |
void Acquire(Object* object) {
|
107 |
ASSERT(state() == FREE); |
108 |
object_ = object; |
109 |
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
110 |
set_independent(false);
|
111 |
set_partially_dependent(false);
|
112 |
set_state(NORMAL); |
113 |
parameter_or_next_free_.parameter = NULL;
|
114 |
weak_callback_ = NULL;
|
115 |
IncreaseBlockUses(); |
116 |
} |
117 |
|
118 |
void Release() {
|
119 |
ASSERT(state() != FREE); |
120 |
set_state(FREE); |
121 |
// Zap the values for eager trapping.
|
122 |
object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
|
123 |
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
124 |
set_independent(false);
|
125 |
set_partially_dependent(false);
|
126 |
weak_callback_ = NULL;
|
127 |
DecreaseBlockUses(); |
128 |
} |
129 |
|
130 |
// Object slot accessors.
|
131 |
Object* object() const { return object_; } |
132 |
Object** location() { return &object_; }
|
133 |
Handle<Object> handle() { return Handle<Object>(location()); }
|
134 |
|
135 |
// Wrapper class ID accessors.
|
136 |
bool has_wrapper_class_id() const { |
137 |
return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
|
138 |
} |
139 |
|
140 |
uint16_t wrapper_class_id() const { return class_id_; } |
141 |
|
142 |
// State and flag accessors.
|
143 |
|
144 |
State state() const {
|
145 |
return NodeState::decode(flags_);
|
146 |
} |
147 |
void set_state(State state) {
|
148 |
flags_ = NodeState::update(flags_, state); |
149 |
} |
150 |
|
151 |
bool is_independent() {
|
152 |
return IsIndependent::decode(flags_);
|
153 |
} |
154 |
void set_independent(bool v) { |
155 |
flags_ = IsIndependent::update(flags_, v); |
156 |
} |
157 |
|
158 |
bool is_partially_dependent() {
|
159 |
return IsPartiallyDependent::decode(flags_);
|
160 |
} |
161 |
void set_partially_dependent(bool v) { |
162 |
flags_ = IsPartiallyDependent::update(flags_, v); |
163 |
} |
164 |
|
165 |
bool is_in_new_space_list() {
|
166 |
return IsInNewSpaceList::decode(flags_);
|
167 |
} |
168 |
void set_in_new_space_list(bool v) { |
169 |
flags_ = IsInNewSpaceList::update(flags_, v); |
170 |
} |
171 |
|
172 |
bool is_revivable_callback() {
|
173 |
return IsRevivableCallback::decode(flags_);
|
174 |
} |
175 |
void set_revivable_callback(bool v) { |
176 |
flags_ = IsRevivableCallback::update(flags_, v); |
177 |
} |
178 |
|
179 |
bool IsNearDeath() const { |
180 |
// Check for PENDING to ensure correct answer when processing callbacks.
|
181 |
return state() == PENDING || state() == NEAR_DEATH;
|
182 |
} |
183 |
|
184 |
bool IsWeak() const { return state() == WEAK; } |
185 |
|
186 |
bool IsRetainer() const { return state() != FREE; } |
187 |
|
188 |
bool IsStrongRetainer() const { return state() == NORMAL; } |
189 |
|
190 |
bool IsWeakRetainer() const { |
191 |
return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
|
192 |
} |
193 |
|
194 |
void MarkPending() {
|
195 |
ASSERT(state() == WEAK); |
196 |
set_state(PENDING); |
197 |
} |
198 |
|
199 |
// Independent flag accessors.
|
200 |
void MarkIndependent() {
|
201 |
ASSERT(state() != FREE); |
202 |
set_independent(true);
|
203 |
} |
204 |
|
205 |
void MarkPartiallyDependent() {
|
206 |
ASSERT(state() != FREE); |
207 |
if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
|
208 |
set_partially_dependent(true);
|
209 |
} |
210 |
} |
211 |
void clear_partially_dependent() { set_partially_dependent(false); } |
212 |
|
213 |
// Callback accessor.
|
214 |
// TODO(svenpanne) Re-enable or nuke later.
|
215 |
// WeakReferenceCallback callback() { return callback_; }
|
216 |
|
217 |
// Callback parameter accessors.
|
218 |
void set_parameter(void* parameter) { |
219 |
ASSERT(state() != FREE); |
220 |
parameter_or_next_free_.parameter = parameter; |
221 |
} |
222 |
void* parameter() const { |
223 |
ASSERT(state() != FREE); |
224 |
return parameter_or_next_free_.parameter;
|
225 |
} |
226 |
|
227 |
// Accessors for next free node in the free list.
|
228 |
Node* next_free() { |
229 |
ASSERT(state() == FREE); |
230 |
return parameter_or_next_free_.next_free;
|
231 |
} |
232 |
void set_next_free(Node* value) {
|
233 |
ASSERT(state() == FREE); |
234 |
parameter_or_next_free_.next_free = value; |
235 |
} |
236 |
|
237 |
void MakeWeak(void* parameter, |
238 |
WeakCallback weak_callback, |
239 |
RevivableCallback revivable_callback) { |
240 |
ASSERT((weak_callback == NULL) != (revivable_callback == NULL)); |
241 |
ASSERT(state() != FREE); |
242 |
set_state(WEAK); |
243 |
set_parameter(parameter); |
244 |
if (weak_callback != NULL) { |
245 |
weak_callback_ = weak_callback; |
246 |
set_revivable_callback(false);
|
247 |
} else {
|
248 |
weak_callback_ = |
249 |
reinterpret_cast<WeakCallback>(revivable_callback);
|
250 |
set_revivable_callback(true);
|
251 |
} |
252 |
} |
253 |
|
254 |
void ClearWeakness() {
|
255 |
ASSERT(state() != FREE); |
256 |
set_state(NORMAL); |
257 |
set_parameter(NULL);
|
258 |
} |
259 |
|
260 |
bool PostGarbageCollectionProcessing(Isolate* isolate) {
|
261 |
if (state() != Node::PENDING) return false; |
262 |
if (weak_callback_ == NULL) { |
263 |
Release(); |
264 |
return false; |
265 |
} |
266 |
void* par = parameter();
|
267 |
set_state(NEAR_DEATH); |
268 |
set_parameter(NULL);
|
269 |
|
270 |
Object** object = location(); |
271 |
{ |
272 |
// Check that we are not passing a finalized external string to
|
273 |
// the callback.
|
274 |
ASSERT(!object_->IsExternalAsciiString() || |
275 |
ExternalAsciiString::cast(object_)->resource() != NULL);
|
276 |
ASSERT(!object_->IsExternalTwoByteString() || |
277 |
ExternalTwoByteString::cast(object_)->resource() != NULL);
|
278 |
// Leaving V8.
|
279 |
VMState<EXTERNAL> state(isolate); |
280 |
HandleScope handle_scope(isolate); |
281 |
if (is_revivable_callback()) {
|
282 |
RevivableCallback revivable = |
283 |
reinterpret_cast<RevivableCallback>(weak_callback_);
|
284 |
revivable(reinterpret_cast<v8::Isolate*>(isolate),
|
285 |
reinterpret_cast<Persistent<Value>*>(&object),
|
286 |
par); |
287 |
} else {
|
288 |
Handle<Object> handle(*object, isolate); |
289 |
v8::WeakCallbackData<v8::Value, void> data(
|
290 |
reinterpret_cast<v8::Isolate*>(isolate),
|
291 |
v8::Utils::ToLocal(handle), |
292 |
par); |
293 |
weak_callback_(data); |
294 |
} |
295 |
} |
296 |
// Absence of explicit cleanup or revival of weak handle
|
297 |
// in most of the cases would lead to memory leak.
|
298 |
ASSERT(state() != NEAR_DEATH); |
299 |
return true; |
300 |
} |
301 |
|
302 |
inline GlobalHandles* GetGlobalHandles();
|
303 |
|
304 |
private:
|
305 |
inline NodeBlock* FindBlock();
|
306 |
inline void IncreaseBlockUses(); |
307 |
inline void DecreaseBlockUses(); |
308 |
|
309 |
// Storage for object pointer.
|
310 |
// Placed first to avoid offset computation.
|
311 |
Object* object_; |
312 |
|
313 |
// Next word stores class_id, index, state, and independent.
|
314 |
// Note: the most aligned fields should go first.
|
315 |
|
316 |
// Wrapper class ID.
|
317 |
uint16_t class_id_; |
318 |
|
319 |
// Index in the containing handle block.
|
320 |
uint8_t index_; |
321 |
|
322 |
// This stores three flags (independent, partially_dependent and
|
323 |
// in_new_space_list) and a State.
|
324 |
class NodeState: public BitField<State, 0, 4> {}; |
325 |
class IsIndependent: public BitField<bool, 4, 1> {}; |
326 |
class IsPartiallyDependent: public BitField<bool, 5, 1> {}; |
327 |
class IsInNewSpaceList: public BitField<bool, 6, 1> {}; |
328 |
class IsRevivableCallback: public BitField<bool, 7, 1> {}; |
329 |
|
330 |
uint8_t flags_; |
331 |
|
332 |
// Handle specific callback - might be a weak reference in disguise.
|
333 |
WeakCallback weak_callback_; |
334 |
|
335 |
// Provided data for callback. In FREE state, this is used for
|
336 |
// the free list link.
|
337 |
union {
|
338 |
void* parameter;
|
339 |
Node* next_free; |
340 |
} parameter_or_next_free_; |
341 |
|
342 |
DISALLOW_COPY_AND_ASSIGN(Node); |
343 |
}; |
344 |
|
345 |
|
346 |
class GlobalHandles::NodeBlock { |
347 |
public:
|
348 |
static const int kSize = 256; |
349 |
|
350 |
explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
|
351 |
: next_(next), |
352 |
used_nodes_(0),
|
353 |
next_used_(NULL),
|
354 |
prev_used_(NULL),
|
355 |
global_handles_(global_handles) {} |
356 |
|
357 |
void PutNodesOnFreeList(Node** first_free) {
|
358 |
for (int i = kSize - 1; i >= 0; --i) { |
359 |
nodes_[i].Initialize(i, first_free); |
360 |
} |
361 |
} |
362 |
|
363 |
Node* node_at(int index) {
|
364 |
ASSERT(0 <= index && index < kSize);
|
365 |
return &nodes_[index];
|
366 |
} |
367 |
|
368 |
void IncreaseUses() {
|
369 |
ASSERT(used_nodes_ < kSize); |
370 |
if (used_nodes_++ == 0) { |
371 |
NodeBlock* old_first = global_handles_->first_used_block_; |
372 |
global_handles_->first_used_block_ = this;
|
373 |
next_used_ = old_first; |
374 |
prev_used_ = NULL;
|
375 |
if (old_first == NULL) return; |
376 |
old_first->prev_used_ = this;
|
377 |
} |
378 |
} |
379 |
|
380 |
void DecreaseUses() {
|
381 |
ASSERT(used_nodes_ > 0);
|
382 |
if (--used_nodes_ == 0) { |
383 |
if (next_used_ != NULL) next_used_->prev_used_ = prev_used_; |
384 |
if (prev_used_ != NULL) prev_used_->next_used_ = next_used_; |
385 |
if (this == global_handles_->first_used_block_) { |
386 |
global_handles_->first_used_block_ = next_used_; |
387 |
} |
388 |
} |
389 |
} |
390 |
|
391 |
GlobalHandles* global_handles() { return global_handles_; }
|
392 |
|
393 |
// Next block in the list of all blocks.
|
394 |
NodeBlock* next() const { return next_; } |
395 |
|
396 |
// Next/previous block in the list of blocks with used nodes.
|
397 |
NodeBlock* next_used() const { return next_used_; } |
398 |
NodeBlock* prev_used() const { return prev_used_; } |
399 |
|
400 |
private:
|
401 |
Node nodes_[kSize]; |
402 |
NodeBlock* const next_;
|
403 |
int used_nodes_;
|
404 |
NodeBlock* next_used_; |
405 |
NodeBlock* prev_used_; |
406 |
GlobalHandles* global_handles_; |
407 |
}; |
408 |
|
409 |
|
410 |
GlobalHandles* GlobalHandles::Node::GetGlobalHandles() { |
411 |
return FindBlock()->global_handles();
|
412 |
} |
413 |
|
414 |
|
415 |
GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() { |
416 |
intptr_t ptr = reinterpret_cast<intptr_t>(this); |
417 |
ptr = ptr - index_ * sizeof(Node);
|
418 |
NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
|
419 |
ASSERT(block->node_at(index_) == this);
|
420 |
return block;
|
421 |
} |
422 |
|
423 |
|
424 |
void GlobalHandles::Node::IncreaseBlockUses() {
|
425 |
NodeBlock* node_block = FindBlock(); |
426 |
node_block->IncreaseUses(); |
427 |
GlobalHandles* global_handles = node_block->global_handles(); |
428 |
global_handles->isolate()->counters()->global_handles()->Increment(); |
429 |
global_handles->number_of_global_handles_++; |
430 |
} |
431 |
|
432 |
|
433 |
void GlobalHandles::Node::DecreaseBlockUses() {
|
434 |
NodeBlock* node_block = FindBlock(); |
435 |
GlobalHandles* global_handles = node_block->global_handles(); |
436 |
parameter_or_next_free_.next_free = global_handles->first_free_; |
437 |
global_handles->first_free_ = this;
|
438 |
node_block->DecreaseUses(); |
439 |
global_handles->isolate()->counters()->global_handles()->Decrement(); |
440 |
global_handles->number_of_global_handles_--; |
441 |
} |
442 |
|
443 |
|
444 |
class GlobalHandles::NodeIterator { |
445 |
public:
|
446 |
explicit NodeIterator(GlobalHandles* global_handles)
|
447 |
: block_(global_handles->first_used_block_), |
448 |
index_(0) {}
|
449 |
|
450 |
bool done() const { return block_ == NULL; } |
451 |
|
452 |
Node* node() const {
|
453 |
ASSERT(!done()); |
454 |
return block_->node_at(index_);
|
455 |
} |
456 |
|
457 |
void Advance() {
|
458 |
ASSERT(!done()); |
459 |
if (++index_ < NodeBlock::kSize) return; |
460 |
index_ = 0;
|
461 |
block_ = block_->next_used(); |
462 |
} |
463 |
|
464 |
private:
|
465 |
NodeBlock* block_; |
466 |
int index_;
|
467 |
|
468 |
DISALLOW_COPY_AND_ASSIGN(NodeIterator); |
469 |
}; |
470 |
|
471 |
|
472 |
GlobalHandles::GlobalHandles(Isolate* isolate) |
473 |
: isolate_(isolate), |
474 |
number_of_global_handles_(0),
|
475 |
first_block_(NULL),
|
476 |
first_used_block_(NULL),
|
477 |
first_free_(NULL),
|
478 |
post_gc_processing_count_(0),
|
479 |
object_group_connections_(kObjectGroupConnectionsCapacity) {} |
480 |
|
481 |
|
482 |
GlobalHandles::~GlobalHandles() { |
483 |
NodeBlock* block = first_block_; |
484 |
while (block != NULL) { |
485 |
NodeBlock* tmp = block->next(); |
486 |
delete block;
|
487 |
block = tmp; |
488 |
} |
489 |
first_block_ = NULL;
|
490 |
} |
491 |
|
492 |
|
493 |
Handle<Object> GlobalHandles::Create(Object* value) { |
494 |
if (first_free_ == NULL) { |
495 |
first_block_ = new NodeBlock(this, first_block_); |
496 |
first_block_->PutNodesOnFreeList(&first_free_); |
497 |
} |
498 |
ASSERT(first_free_ != NULL);
|
499 |
// Take the first node in the free list.
|
500 |
Node* result = first_free_; |
501 |
first_free_ = result->next_free(); |
502 |
result->Acquire(value); |
503 |
if (isolate_->heap()->InNewSpace(value) &&
|
504 |
!result->is_in_new_space_list()) { |
505 |
new_space_nodes_.Add(result); |
506 |
result->set_in_new_space_list(true);
|
507 |
} |
508 |
return result->handle();
|
509 |
} |
510 |
|
511 |
|
512 |
Handle<Object> GlobalHandles::CopyGlobal(Object** location) { |
513 |
ASSERT(location != NULL);
|
514 |
return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
|
515 |
} |
516 |
|
517 |
|
518 |
void GlobalHandles::Destroy(Object** location) {
|
519 |
if (location != NULL) Node::FromLocation(location)->Release(); |
520 |
} |
521 |
|
522 |
|
523 |
void GlobalHandles::MakeWeak(Object** location,
|
524 |
void* parameter,
|
525 |
WeakCallback weak_callback, |
526 |
RevivableCallback revivable_callback) { |
527 |
Node::FromLocation(location)->MakeWeak( |
528 |
parameter, weak_callback, revivable_callback); |
529 |
} |
530 |
|
531 |
|
532 |
void GlobalHandles::ClearWeakness(Object** location) {
|
533 |
Node::FromLocation(location)->ClearWeakness(); |
534 |
} |
535 |
|
536 |
|
537 |
void GlobalHandles::MarkIndependent(Object** location) {
|
538 |
Node::FromLocation(location)->MarkIndependent(); |
539 |
} |
540 |
|
541 |
|
542 |
void GlobalHandles::MarkPartiallyDependent(Object** location) {
|
543 |
Node::FromLocation(location)->MarkPartiallyDependent(); |
544 |
} |
545 |
|
546 |
|
547 |
bool GlobalHandles::IsIndependent(Object** location) {
|
548 |
return Node::FromLocation(location)->is_independent();
|
549 |
} |
550 |
|
551 |
|
552 |
bool GlobalHandles::IsNearDeath(Object** location) {
|
553 |
return Node::FromLocation(location)->IsNearDeath();
|
554 |
} |
555 |
|
556 |
|
557 |
bool GlobalHandles::IsWeak(Object** location) {
|
558 |
return Node::FromLocation(location)->IsWeak();
|
559 |
} |
560 |
|
561 |
|
562 |
void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
|
563 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
564 |
if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
|
565 |
} |
566 |
} |
567 |
|
568 |
|
569 |
void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
|
570 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
571 |
if (it.node()->IsWeak() && f(it.node()->location())) {
|
572 |
it.node()->MarkPending(); |
573 |
} |
574 |
} |
575 |
} |
576 |
|
577 |
|
578 |
void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
|
579 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
580 |
Node* node = new_space_nodes_[i]; |
581 |
if (node->IsStrongRetainer() ||
|
582 |
(node->IsWeakRetainer() && !node->is_independent() && |
583 |
!node->is_partially_dependent())) { |
584 |
v->VisitPointer(node->location()); |
585 |
} |
586 |
} |
587 |
} |
588 |
|
589 |
|
590 |
void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
|
591 |
WeakSlotCallbackWithHeap f) { |
592 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
593 |
Node* node = new_space_nodes_[i]; |
594 |
ASSERT(node->is_in_new_space_list()); |
595 |
if ((node->is_independent() || node->is_partially_dependent()) &&
|
596 |
node->IsWeak() && f(isolate_->heap(), node->location())) { |
597 |
node->MarkPending(); |
598 |
} |
599 |
} |
600 |
} |
601 |
|
602 |
|
603 |
void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
|
604 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
605 |
Node* node = new_space_nodes_[i]; |
606 |
ASSERT(node->is_in_new_space_list()); |
607 |
if ((node->is_independent() || node->is_partially_dependent()) &&
|
608 |
node->IsWeakRetainer()) { |
609 |
v->VisitPointer(node->location()); |
610 |
} |
611 |
} |
612 |
} |
613 |
|
614 |
|
615 |
bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
|
616 |
WeakSlotCallbackWithHeap can_skip) { |
617 |
ComputeObjectGroupsAndImplicitReferences(); |
618 |
int last = 0; |
619 |
bool any_group_was_visited = false; |
620 |
for (int i = 0; i < object_groups_.length(); i++) { |
621 |
ObjectGroup* entry = object_groups_.at(i); |
622 |
ASSERT(entry != NULL);
|
623 |
|
624 |
Object*** objects = entry->objects; |
625 |
bool group_should_be_visited = false; |
626 |
for (size_t j = 0; j < entry->length; j++) { |
627 |
Object* object = *objects[j]; |
628 |
if (object->IsHeapObject()) {
|
629 |
if (!can_skip(isolate_->heap(), &object)) {
|
630 |
group_should_be_visited = true;
|
631 |
break;
|
632 |
} |
633 |
} |
634 |
} |
635 |
|
636 |
if (!group_should_be_visited) {
|
637 |
object_groups_[last++] = entry; |
638 |
continue;
|
639 |
} |
640 |
|
641 |
// An object in the group requires visiting, so iterate over all
|
642 |
// objects in the group.
|
643 |
for (size_t j = 0; j < entry->length; ++j) { |
644 |
Object* object = *objects[j]; |
645 |
if (object->IsHeapObject()) {
|
646 |
v->VisitPointer(&object); |
647 |
any_group_was_visited = true;
|
648 |
} |
649 |
} |
650 |
|
651 |
// Once the entire group has been iterated over, set the object
|
652 |
// group to NULL so it won't be processed again.
|
653 |
delete entry;
|
654 |
object_groups_.at(i) = NULL;
|
655 |
} |
656 |
object_groups_.Rewind(last); |
657 |
return any_group_was_visited;
|
658 |
} |
659 |
|
660 |
|
661 |
bool GlobalHandles::PostGarbageCollectionProcessing(
|
662 |
GarbageCollector collector, GCTracer* tracer) { |
663 |
// Process weak global handle callbacks. This must be done after the
|
664 |
// GC is completely done, because the callbacks may invoke arbitrary
|
665 |
// API functions.
|
666 |
ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
667 |
const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
668 |
bool next_gc_likely_to_collect_more = false; |
669 |
if (collector == SCAVENGER) {
|
670 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
671 |
Node* node = new_space_nodes_[i]; |
672 |
ASSERT(node->is_in_new_space_list()); |
673 |
if (!node->IsRetainer()) {
|
674 |
// Free nodes do not have weak callbacks. Do not use them to compute
|
675 |
// the next_gc_likely_to_collect_more.
|
676 |
continue;
|
677 |
} |
678 |
// Skip dependent handles. Their weak callbacks might expect to be
|
679 |
// called between two global garbage collection callbacks which
|
680 |
// are not called for minor collections.
|
681 |
if (!node->is_independent() && !node->is_partially_dependent()) {
|
682 |
continue;
|
683 |
} |
684 |
node->clear_partially_dependent(); |
685 |
if (node->PostGarbageCollectionProcessing(isolate_)) {
|
686 |
if (initial_post_gc_processing_count != post_gc_processing_count_) {
|
687 |
// Weak callback triggered another GC and another round of
|
688 |
// PostGarbageCollection processing. The current node might
|
689 |
// have been deleted in that round, so we need to bail out (or
|
690 |
// restart the processing).
|
691 |
return next_gc_likely_to_collect_more;
|
692 |
} |
693 |
} |
694 |
if (!node->IsRetainer()) {
|
695 |
next_gc_likely_to_collect_more = true;
|
696 |
} |
697 |
} |
698 |
} else {
|
699 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
700 |
if (!it.node()->IsRetainer()) {
|
701 |
// Free nodes do not have weak callbacks. Do not use them to compute
|
702 |
// the next_gc_likely_to_collect_more.
|
703 |
continue;
|
704 |
} |
705 |
it.node()->clear_partially_dependent(); |
706 |
if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
|
707 |
if (initial_post_gc_processing_count != post_gc_processing_count_) {
|
708 |
// See the comment above.
|
709 |
return next_gc_likely_to_collect_more;
|
710 |
} |
711 |
} |
712 |
if (!it.node()->IsRetainer()) {
|
713 |
next_gc_likely_to_collect_more = true;
|
714 |
} |
715 |
} |
716 |
} |
717 |
// Update the list of new space nodes.
|
718 |
int last = 0; |
719 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
720 |
Node* node = new_space_nodes_[i]; |
721 |
ASSERT(node->is_in_new_space_list()); |
722 |
if (node->IsRetainer()) {
|
723 |
if (isolate_->heap()->InNewSpace(node->object())) {
|
724 |
new_space_nodes_[last++] = node; |
725 |
tracer->increment_nodes_copied_in_new_space(); |
726 |
} else {
|
727 |
node->set_in_new_space_list(false);
|
728 |
tracer->increment_nodes_promoted(); |
729 |
} |
730 |
} else {
|
731 |
node->set_in_new_space_list(false);
|
732 |
tracer->increment_nodes_died_in_new_space(); |
733 |
} |
734 |
} |
735 |
new_space_nodes_.Rewind(last); |
736 |
return next_gc_likely_to_collect_more;
|
737 |
} |
738 |
|
739 |
|
740 |
void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
|
741 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
742 |
if (it.node()->IsStrongRetainer()) {
|
743 |
v->VisitPointer(it.node()->location()); |
744 |
} |
745 |
} |
746 |
} |
747 |
|
748 |
|
749 |
void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
|
750 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
751 |
if (it.node()->IsRetainer()) {
|
752 |
v->VisitPointer(it.node()->location()); |
753 |
} |
754 |
} |
755 |
} |
756 |
|
757 |
|
758 |
void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
|
759 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
760 |
if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
|
761 |
v->VisitEmbedderReference(it.node()->location(), |
762 |
it.node()->wrapper_class_id()); |
763 |
} |
764 |
} |
765 |
} |
766 |
|
767 |
|
768 |
void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
|
769 |
for (int i = 0; i < new_space_nodes_.length(); ++i) { |
770 |
Node* node = new_space_nodes_[i]; |
771 |
if (node->IsRetainer() && node->has_wrapper_class_id()) {
|
772 |
v->VisitEmbedderReference(node->location(), |
773 |
node->wrapper_class_id()); |
774 |
} |
775 |
} |
776 |
} |
777 |
|
778 |
|
779 |
int GlobalHandles::NumberOfWeakHandles() {
|
780 |
int count = 0; |
781 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
782 |
if (it.node()->IsWeakRetainer()) {
|
783 |
count++; |
784 |
} |
785 |
} |
786 |
return count;
|
787 |
} |
788 |
|
789 |
|
790 |
int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
|
791 |
int count = 0; |
792 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
793 |
if (it.node()->IsWeakRetainer() &&
|
794 |
it.node()->object()->IsJSGlobalObject()) { |
795 |
count++; |
796 |
} |
797 |
} |
798 |
return count;
|
799 |
} |
800 |
|
801 |
|
802 |
void GlobalHandles::RecordStats(HeapStats* stats) {
|
803 |
*stats->global_handle_count = 0;
|
804 |
*stats->weak_global_handle_count = 0;
|
805 |
*stats->pending_global_handle_count = 0;
|
806 |
*stats->near_death_global_handle_count = 0;
|
807 |
*stats->free_global_handle_count = 0;
|
808 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
809 |
*stats->global_handle_count += 1;
|
810 |
if (it.node()->state() == Node::WEAK) {
|
811 |
*stats->weak_global_handle_count += 1;
|
812 |
} else if (it.node()->state() == Node::PENDING) { |
813 |
*stats->pending_global_handle_count += 1;
|
814 |
} else if (it.node()->state() == Node::NEAR_DEATH) { |
815 |
*stats->near_death_global_handle_count += 1;
|
816 |
} else if (it.node()->state() == Node::FREE) { |
817 |
*stats->free_global_handle_count += 1;
|
818 |
} |
819 |
} |
820 |
} |
821 |
|
822 |
#ifdef DEBUG
|
823 |
|
824 |
void GlobalHandles::PrintStats() {
|
825 |
int total = 0; |
826 |
int weak = 0; |
827 |
int pending = 0; |
828 |
int near_death = 0; |
829 |
int destroyed = 0; |
830 |
|
831 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
832 |
total++; |
833 |
if (it.node()->state() == Node::WEAK) weak++;
|
834 |
if (it.node()->state() == Node::PENDING) pending++;
|
835 |
if (it.node()->state() == Node::NEAR_DEATH) near_death++;
|
836 |
if (it.node()->state() == Node::FREE) destroyed++;
|
837 |
} |
838 |
|
839 |
PrintF("Global Handle Statistics:\n");
|
840 |
PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total); |
841 |
PrintF(" # weak = %d\n", weak);
|
842 |
PrintF(" # pending = %d\n", pending);
|
843 |
PrintF(" # near_death = %d\n", near_death);
|
844 |
PrintF(" # free = %d\n", destroyed);
|
845 |
PrintF(" # total = %d\n", total);
|
846 |
} |
847 |
|
848 |
|
849 |
void GlobalHandles::Print() {
|
850 |
PrintF("Global handles:\n");
|
851 |
for (NodeIterator it(this); !it.done(); it.Advance()) { |
852 |
PrintF(" handle %p to %p%s\n",
|
853 |
reinterpret_cast<void*>(it.node()->location()), |
854 |
reinterpret_cast<void*>(it.node()->object()), |
855 |
it.node()->IsWeak() ? " (weak)" : ""); |
856 |
} |
857 |
} |
858 |
|
859 |
#endif
|
860 |
|
861 |
|
862 |
|
863 |
void GlobalHandles::AddObjectGroup(Object*** handles,
|
864 |
size_t length, |
865 |
v8::RetainedObjectInfo* info) { |
866 |
#ifdef DEBUG
|
867 |
for (size_t i = 0; i < length; ++i) { |
868 |
ASSERT(!Node::FromLocation(handles[i])->is_independent()); |
869 |
} |
870 |
#endif
|
871 |
if (length == 0) { |
872 |
if (info != NULL) info->Dispose(); |
873 |
return;
|
874 |
} |
875 |
ObjectGroup* group = new ObjectGroup(length);
|
876 |
for (size_t i = 0; i < length; ++i) |
877 |
group->objects[i] = handles[i]; |
878 |
group->info = info; |
879 |
object_groups_.Add(group); |
880 |
} |
881 |
|
882 |
|
883 |
void GlobalHandles::SetObjectGroupId(Object** handle,
|
884 |
UniqueId id) { |
885 |
object_group_connections_.Add(ObjectGroupConnection(id, handle)); |
886 |
} |
887 |
|
888 |
|
889 |
void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
|
890 |
RetainedObjectInfo* info) { |
891 |
retainer_infos_.Add(ObjectGroupRetainerInfo(id, info)); |
892 |
} |
893 |
|
894 |
|
895 |
void GlobalHandles::AddImplicitReferences(HeapObject** parent,
|
896 |
Object*** children, |
897 |
size_t length) { |
898 |
#ifdef DEBUG
|
899 |
ASSERT(!Node::FromLocation(BitCast<Object**>(parent))->is_independent()); |
900 |
for (size_t i = 0; i < length; ++i) { |
901 |
ASSERT(!Node::FromLocation(children[i])->is_independent()); |
902 |
} |
903 |
#endif
|
904 |
if (length == 0) return; |
905 |
ImplicitRefGroup* group = new ImplicitRefGroup(parent, length);
|
906 |
for (size_t i = 0; i < length; ++i) |
907 |
group->children[i] = children[i]; |
908 |
implicit_ref_groups_.Add(group); |
909 |
} |
910 |
|
911 |
|
912 |
void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
|
913 |
ASSERT(!Node::FromLocation(child)->is_independent()); |
914 |
implicit_ref_connections_.Add(ObjectGroupConnection(id, child)); |
915 |
} |
916 |
|
917 |
|
918 |
void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
|
919 |
ASSERT(!Node::FromLocation(child)->is_independent()); |
920 |
ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1); |
921 |
group->children[0] = child;
|
922 |
implicit_ref_groups_.Add(group); |
923 |
} |
924 |
|
925 |
|
926 |
void GlobalHandles::RemoveObjectGroups() {
|
927 |
for (int i = 0; i < object_groups_.length(); i++) |
928 |
delete object_groups_.at(i);
|
929 |
object_groups_.Clear(); |
930 |
for (int i = 0; i < retainer_infos_.length(); ++i) |
931 |
retainer_infos_[i].info->Dispose(); |
932 |
retainer_infos_.Clear(); |
933 |
object_group_connections_.Clear(); |
934 |
object_group_connections_.Initialize(kObjectGroupConnectionsCapacity); |
935 |
} |
936 |
|
937 |
|
938 |
void GlobalHandles::RemoveImplicitRefGroups() {
|
939 |
for (int i = 0; i < implicit_ref_groups_.length(); i++) { |
940 |
delete implicit_ref_groups_.at(i);
|
941 |
} |
942 |
implicit_ref_groups_.Clear(); |
943 |
implicit_ref_connections_.Clear(); |
944 |
} |
945 |
|
946 |
|
947 |
void GlobalHandles::TearDown() {
|
948 |
// TODO(1428): invoke weak callbacks.
|
949 |
} |
950 |
|
951 |
|
952 |
void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
|
953 |
if (object_group_connections_.length() == 0) { |
954 |
for (int i = 0; i < retainer_infos_.length(); ++i) |
955 |
retainer_infos_[i].info->Dispose(); |
956 |
retainer_infos_.Clear(); |
957 |
implicit_ref_connections_.Clear(); |
958 |
return;
|
959 |
} |
960 |
|
961 |
object_group_connections_.Sort(); |
962 |
retainer_infos_.Sort(); |
963 |
implicit_ref_connections_.Sort(); |
964 |
|
965 |
int info_index = 0; // For iterating retainer_infos_. |
966 |
UniqueId current_group_id(0);
|
967 |
int current_group_start = 0; |
968 |
|
969 |
int current_implicit_refs_start = 0; |
970 |
int current_implicit_refs_end = 0; |
971 |
for (int i = 0; i <= object_group_connections_.length(); ++i) { |
972 |
if (i == 0) |
973 |
current_group_id = object_group_connections_[i].id; |
974 |
if (i == object_group_connections_.length() ||
|
975 |
current_group_id != object_group_connections_[i].id) { |
976 |
// Group detected: objects in indices [current_group_start, i[.
|
977 |
|
978 |
// Find out which implicit references are related to this group. (We want
|
979 |
// to ignore object groups which only have 1 object, but that object is
|
980 |
// needed as a representative object for the implicit refrerence group.)
|
981 |
while (current_implicit_refs_start < implicit_ref_connections_.length() &&
|
982 |
implicit_ref_connections_[current_implicit_refs_start].id < |
983 |
current_group_id) |
984 |
++current_implicit_refs_start; |
985 |
current_implicit_refs_end = current_implicit_refs_start; |
986 |
while (current_implicit_refs_end < implicit_ref_connections_.length() &&
|
987 |
implicit_ref_connections_[current_implicit_refs_end].id == |
988 |
current_group_id) |
989 |
++current_implicit_refs_end; |
990 |
|
991 |
if (current_implicit_refs_end > current_implicit_refs_start) {
|
992 |
// Find a representative object for the implicit references.
|
993 |
HeapObject** representative = NULL;
|
994 |
for (int j = current_group_start; j < i; ++j) { |
995 |
Object** object = object_group_connections_[j].object; |
996 |
if ((*object)->IsHeapObject()) {
|
997 |
representative = reinterpret_cast<HeapObject**>(object);
|
998 |
break;
|
999 |
} |
1000 |
} |
1001 |
if (representative) {
|
1002 |
ImplicitRefGroup* group = new ImplicitRefGroup(
|
1003 |
representative, |
1004 |
current_implicit_refs_end - current_implicit_refs_start); |
1005 |
for (int j = current_implicit_refs_start; |
1006 |
j < current_implicit_refs_end; |
1007 |
++j) { |
1008 |
group->children[j - current_implicit_refs_start] = |
1009 |
implicit_ref_connections_[j].object; |
1010 |
} |
1011 |
implicit_ref_groups_.Add(group); |
1012 |
} |
1013 |
current_implicit_refs_start = current_implicit_refs_end; |
1014 |
} |
1015 |
|
1016 |
// Find a RetainedObjectInfo for the group.
|
1017 |
RetainedObjectInfo* info = NULL;
|
1018 |
while (info_index < retainer_infos_.length() &&
|
1019 |
retainer_infos_[info_index].id < current_group_id) { |
1020 |
retainer_infos_[info_index].info->Dispose(); |
1021 |
++info_index; |
1022 |
} |
1023 |
if (info_index < retainer_infos_.length() &&
|
1024 |
retainer_infos_[info_index].id == current_group_id) { |
1025 |
// This object group has an associated ObjectGroupRetainerInfo.
|
1026 |
info = retainer_infos_[info_index].info; |
1027 |
++info_index; |
1028 |
} |
1029 |
|
1030 |
// Ignore groups which only contain one object.
|
1031 |
if (i > current_group_start + 1) { |
1032 |
ObjectGroup* group = new ObjectGroup(i - current_group_start);
|
1033 |
for (int j = current_group_start; j < i; ++j) { |
1034 |
group->objects[j - current_group_start] = |
1035 |
object_group_connections_[j].object; |
1036 |
} |
1037 |
group->info = info; |
1038 |
object_groups_.Add(group); |
1039 |
} else if (info) { |
1040 |
info->Dispose(); |
1041 |
} |
1042 |
|
1043 |
if (i < object_group_connections_.length()) {
|
1044 |
current_group_id = object_group_connections_[i].id; |
1045 |
current_group_start = i; |
1046 |
} |
1047 |
} |
1048 |
} |
1049 |
object_group_connections_.Clear(); |
1050 |
object_group_connections_.Initialize(kObjectGroupConnectionsCapacity); |
1051 |
retainer_infos_.Clear(); |
1052 |
implicit_ref_connections_.Clear(); |
1053 |
} |
1054 |
|
1055 |
|
1056 |
EternalHandles::EternalHandles() : size_(0) {
|
1057 |
for (unsigned i = 0; i < ARRAY_SIZE(singleton_handles_); i++) { |
1058 |
singleton_handles_[i] = kInvalidIndex; |
1059 |
} |
1060 |
} |
1061 |
|
1062 |
|
1063 |
EternalHandles::~EternalHandles() { |
1064 |
for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i]; |
1065 |
} |
1066 |
|
1067 |
|
1068 |
void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
|
1069 |
int limit = size_;
|
1070 |
for (int i = 0; i < blocks_.length(); i++) { |
1071 |
ASSERT(limit > 0);
|
1072 |
Object** block = blocks_[i]; |
1073 |
visitor->VisitPointers(block, block + Min(limit, kSize)); |
1074 |
limit -= kSize; |
1075 |
} |
1076 |
} |
1077 |
|
1078 |
|
1079 |
void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
|
1080 |
for (int i = 0; i < new_space_indices_.length(); i++) { |
1081 |
visitor->VisitPointer(GetLocation(new_space_indices_[i])); |
1082 |
} |
1083 |
} |
1084 |
|
1085 |
|
1086 |
void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
|
1087 |
int last = 0; |
1088 |
for (int i = 0; i < new_space_indices_.length(); i++) { |
1089 |
int index = new_space_indices_[i];
|
1090 |
if (heap->InNewSpace(*GetLocation(index))) {
|
1091 |
new_space_indices_[last++] = index; |
1092 |
} |
1093 |
} |
1094 |
new_space_indices_.Rewind(last); |
1095 |
} |
1096 |
|
1097 |
|
1098 |
void EternalHandles::Create(Isolate* isolate, Object* object, int* index) { |
1099 |
ASSERT_EQ(kInvalidIndex, *index); |
1100 |
if (object == NULL) return; |
1101 |
ASSERT_NE(isolate->heap()->the_hole_value(), object); |
1102 |
int block = size_ >> kShift;
|
1103 |
int offset = size_ & kMask;
|
1104 |
// need to resize
|
1105 |
if (offset == 0) { |
1106 |
Object** next_block = new Object*[kSize];
|
1107 |
Object* the_hole = isolate->heap()->the_hole_value(); |
1108 |
MemsetPointer(next_block, the_hole, kSize); |
1109 |
blocks_.Add(next_block); |
1110 |
} |
1111 |
ASSERT_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); |
1112 |
blocks_[block][offset] = object; |
1113 |
if (isolate->heap()->InNewSpace(object)) {
|
1114 |
new_space_indices_.Add(size_); |
1115 |
} |
1116 |
*index = size_++; |
1117 |
} |
1118 |
|
1119 |
|
1120 |
} } // namespace v8::internal
|