@@ -125,16 +125,14 @@ class ChunkedStack {
125
125
}
126
126
}
127
127
128
- inline void _push (void * p) {
129
- *cur++ = p;
130
- if (cur == end) {
131
- chunks.push_back (start);
132
- get_chunk ();
133
- }
134
- }
135
-
136
128
public:
137
129
ChunkedStack () { get_chunk (); }
130
+ ~ChunkedStack () {
131
+ RELEASE_ASSERT (end - cur == CHUNK_SIZE, " destroying non-empty ChunkedStack" );
132
+ // We always have a block available in case we want to push items onto the TraversalWorklist,
133
+ // but that chunk needs to be released after use to avoid a memory leak.
134
+ release_chunk (start);
135
+ }
138
136
139
137
void * pop () {
140
138
if (cur > start) {
@@ -145,41 +143,50 @@ class ChunkedStack {
145
143
146
144
return previous_pop;
147
145
}
146
+
147
+ void push (void * p) {
148
+ *cur++ = p;
149
+ if (cur == end) {
150
+ chunks.push_back (start);
151
+ get_chunk ();
152
+ }
153
+ }
148
154
};
149
155
std::vector<void **> ChunkedStack::free_chunks;
150
156
151
- enum TraceStackType {
157
+ enum TraversalType {
152
158
MarkPhase,
153
159
FinalizationOrderingFindReachable,
154
160
FinalizationOrderingRemoveTemporaries,
155
161
MapReferencesPhase,
156
162
};
157
163
158
- class TraceStack : public ChunkedStack {
159
- TraceStackType visit_type;
164
+ class Worklist {
165
+ protected:
166
+ ChunkedStack stack;
167
+
168
+ public:
169
+ void * next () { return stack.pop (); }
170
+ };
171
+
172
+ class TraversalWorklist : public Worklist {
173
+ TraversalType visit_type;
160
174
161
175
public:
162
- TraceStack (TraceStackType type) : ChunkedStack(), visit_type(type) {}
163
- TraceStack (TraceStackType type, const std::unordered_set<void *>& roots) : TraceStack (type) {
176
+ TraversalWorklist (TraversalType type) : visit_type(type) {}
177
+ TraversalWorklist (TraversalType type, const std::unordered_set<void *>& roots) : TraversalWorklist (type) {
164
178
for (void * p : roots) {
165
179
ASSERT (!isMarked (GCAllocation::fromUserData (p)), " " );
166
- push (p);
180
+ addWork (p);
167
181
}
168
182
}
169
- ~TraceStack () {
170
- RELEASE_ASSERT (end - cur == CHUNK_SIZE, " destroying non-empty TraceStack" );
171
183
172
- // We always have a block available in case we want to push items onto the TraceStack,
173
- // but that chunk needs to be released after use to avoid a memory leak.
174
- release_chunk (start);
175
- }
176
-
177
- void push (void * p) {
184
+ void addWork (void * p) {
178
185
GC_TRACE_LOG (" Pushing %p\n " , p);
179
186
GCAllocation* al = GCAllocation::fromUserData (p);
180
187
181
188
switch (visit_type) {
182
- case TraceStackType ::MarkPhase:
189
+ case TraversalType ::MarkPhase:
183
190
// Use this to print the directed edges of the GC graph traversal.
184
191
// i.e. print every a -> b where a is a pointer and b is something a references
185
192
#if 0
@@ -208,7 +215,7 @@ class TraceStack : public ChunkedStack {
208
215
break ;
209
216
// See PyPy's finalization ordering algorithm:
210
217
// http://pypy.readthedocs.org/en/latest/discussion/finalizer-order.html
211
- case TraceStackType ::FinalizationOrderingFindReachable:
218
+ case TraversalType ::FinalizationOrderingFindReachable:
212
219
if (orderingState (al) == FinalizationState::UNREACHABLE) {
213
220
setOrderingState (al, FinalizationState::TEMPORARY);
214
221
} else if (orderingState (al) == FinalizationState::REACHABLE_FROM_FINALIZER) {
@@ -217,7 +224,7 @@ class TraceStack : public ChunkedStack {
217
224
return ;
218
225
}
219
226
break ;
220
- case TraceStackType ::FinalizationOrderingRemoveTemporaries:
227
+ case TraversalType ::FinalizationOrderingRemoveTemporaries:
221
228
if (orderingState (al) == FinalizationState::TEMPORARY) {
222
229
setOrderingState (al, FinalizationState::REACHABLE_FROM_FINALIZER);
223
230
} else {
@@ -228,22 +235,22 @@ class TraceStack : public ChunkedStack {
228
235
assert (false );
229
236
}
230
237
231
- _push (p);
238
+ stack. push (p);
232
239
}
233
240
};
234
241
235
- class ReferenceMapStack : public ChunkedStack {
242
+ class ReferenceMapWorklist : public Worklist {
236
243
ReferenceMap* refmap;
237
244
238
245
public:
239
- ReferenceMapStack (ReferenceMap* refmap) : ChunkedStack(), refmap(refmap) {}
240
- ReferenceMapStack (ReferenceMap* refmap, const std::unordered_set<void *>& roots) : ChunkedStack(), refmap(refmap) {
246
+ ReferenceMapWorklist (ReferenceMap* refmap) : refmap(refmap) {}
247
+ ReferenceMapWorklist (ReferenceMap* refmap, const std::unordered_set<void *>& roots) : refmap(refmap) {
241
248
for (void * p : roots) {
242
- push (GCAllocation::fromUserData (p), NULL );
249
+ addWork (GCAllocation::fromUserData (p), NULL );
243
250
}
244
251
}
245
252
246
- void push (GCAllocation* al, GCAllocation* source) {
253
+ void addWork (GCAllocation* al, GCAllocation* source) {
247
254
assert (refmap);
248
255
249
256
auto it = refmap->references .find (al);
@@ -270,7 +277,7 @@ class ReferenceMapStack : public ChunkedStack {
270
277
}
271
278
}
272
279
273
- _push (al->user_data );
280
+ stack. push (al->user_data );
274
281
} else {
275
282
if (source) {
276
283
// We found that there exists a pointer from `source` to `al`
@@ -435,7 +442,7 @@ void GCVisitor::_visit(void** ptr_address) {
435
442
}
436
443
437
444
ASSERT (global_heap.getAllocationFromInteriorPointer (p)->user_data == p, " %p" , p);
438
- stack-> push (p);
445
+ worklist-> addWork (p);
439
446
}
440
447
441
448
void GCVisitor::_visitRange (void ** start, void ** end) {
@@ -454,7 +461,7 @@ void GCVisitor::_visitRange(void** start, void** end) {
454
461
void GCVisitor::visitPotential (void * p) {
455
462
GCAllocation* a = global_heap.getAllocationFromInteriorPointer (p);
456
463
if (a) {
457
- stack-> push (a->user_data );
464
+ worklist-> addWork (a->user_data );
458
465
}
459
466
}
460
467
@@ -491,14 +498,14 @@ void GCVisitorPinning::_visit(void** ptr_address) {
491
498
492
499
GCAllocation* al = global_heap.getAllocationFromInteriorPointer (p);
493
500
ASSERT (al->user_data == p, " %p" , p);
494
- stack-> push (al, source);
501
+ worklist-> addWork (al, source);
495
502
}
496
503
497
504
void GCVisitorPinning::visitPotential (void * p) {
498
505
GCAllocation* a = global_heap.getAllocationFromInteriorPointer (p);
499
506
if (a) {
500
- stack ->pin (a);
501
- stack-> push (a, source);
507
+ worklist ->pin (a);
508
+ worklist-> addWork (a, source);
502
509
}
503
510
}
504
511
@@ -538,11 +545,11 @@ static void finalizationOrderingFindReachable(Box* obj) {
538
545
static StatCounter sc_us (" us_gc_mark_finalizer_ordering_1" );
539
546
Timer _t (" finalizationOrderingFindReachable" , /* min_usec=*/ 10000 );
540
547
541
- TraceStack stack (TraceStackType ::FinalizationOrderingFindReachable);
542
- GCVisitor visitor (&stack );
548
+ TraversalWorklist worklist (TraversalType ::FinalizationOrderingFindReachable);
549
+ GCVisitor visitor (&worklist );
543
550
544
- stack. push (obj);
545
- while (void * p = stack. pop ()) {
551
+ worklist. addWork (obj);
552
+ while (void * p = worklist. next ()) {
546
553
sc_marked_objs.log ();
547
554
548
555
visitByGCKind (p, visitor);
@@ -556,11 +563,11 @@ static void finalizationOrderingRemoveTemporaries(Box* obj) {
556
563
static StatCounter sc_us (" us_gc_mark_finalizer_ordering_2" );
557
564
Timer _t (" finalizationOrderingRemoveTemporaries" , /* min_usec=*/ 10000 );
558
565
559
- TraceStack stack (TraceStackType ::FinalizationOrderingRemoveTemporaries);
560
- GCVisitor visitor (&stack );
566
+ TraversalWorklist worklist (TraversalType ::FinalizationOrderingRemoveTemporaries);
567
+ GCVisitor visitor (&worklist );
561
568
562
- stack. push (obj);
563
- while (void * p = stack. pop ()) {
569
+ worklist. addWork (obj);
570
+ while (void * p = worklist. next ()) {
564
571
GCAllocation* al = GCAllocation::fromUserData (p);
565
572
assert (orderingState (al) != FinalizationState::UNREACHABLE);
566
573
visitByGCKind (p, visitor);
@@ -606,12 +613,12 @@ static void orderFinalizers() {
606
613
sc_us.log (us);
607
614
}
608
615
609
- static void graphTraversalMarking (ChunkedStack& stack , GCVisitor& visitor) {
616
+ static void graphTraversalMarking (Worklist& worklist , GCVisitor& visitor) {
610
617
static StatCounter sc_us (" us_gc_mark_phase_graph_traversal" );
611
618
static StatCounter sc_marked_objs (" gc_marked_object_count" );
612
619
Timer _t (" traversing" , /* min_usec=*/ 10000 );
613
620
614
- while (void * p = stack. pop ()) {
621
+ while (void * p = worklist. next ()) {
615
622
sc_marked_objs.log ();
616
623
617
624
GCAllocation* al = GCAllocation::fromUserData (p);
@@ -741,12 +748,12 @@ static void markPhase() {
741
748
GC_TRACE_LOG (" Starting collection %d\n " , ncollections);
742
749
743
750
GC_TRACE_LOG (" Looking at roots\n " );
744
- TraceStack stack (TraceStackType ::MarkPhase, roots);
745
- GCVisitor visitor (&stack );
751
+ TraversalWorklist worklist (TraversalType ::MarkPhase, roots);
752
+ GCVisitor visitor (&worklist );
746
753
747
754
visitRoots (visitor);
748
755
749
- graphTraversalMarking (stack , visitor);
756
+ graphTraversalMarking (worklist , visitor);
750
757
751
758
// Objects with finalizers cannot be freed in any order. During the call to a finalizer
752
759
// of an object, the finalizer expects the object's references to still point to valid
@@ -775,16 +782,16 @@ static void sweepPhase(std::vector<Box*>& weakly_referenced) {
775
782
}
776
783
777
784
static void mapReferencesPhase (ReferenceMap& refmap) {
778
- ReferenceMapStack stack (&refmap, roots);
779
- GCVisitorPinning visitor (&stack );
785
+ ReferenceMapWorklist worklist (&refmap, roots);
786
+ GCVisitorPinning visitor (&worklist );
780
787
781
788
visitRoots (visitor);
782
789
783
790
for (auto obj : objects_with_ordered_finalizers) {
784
791
visitor.visit (&obj);
785
792
}
786
793
787
- graphTraversalMarking (stack , visitor);
794
+ graphTraversalMarking (worklist , visitor);
788
795
}
789
796
790
797
// Move objects around memory randomly. The purpose is to test whether the rest
0 commit comments