1 /*
2 * Mark-and-sweep garbage collection.
3 */
4
5 #include "duk_internal.h"
6
7 #ifdef DUK_USE_MARK_AND_SWEEP
8
9 DUK_LOCAL_DECL void duk__mark_heaphdr(duk_heap *heap, duk_heaphdr *h);
10 DUK_LOCAL_DECL void duk__mark_tval(duk_heap *heap, duk_tval *tv);
11
12 /*
13 * Misc
14 */
15
16 /* Select a thread for mark-and-sweep use.
17 *
18 * XXX: This needs to change later.
19 */
duk__get_temp_hthread(duk_heap * heap)20 DUK_LOCAL duk_hthread *duk__get_temp_hthread(duk_heap *heap) {
21 if (heap->curr_thread) {
22 return heap->curr_thread;
23 }
24 return heap->heap_thread; /* may be NULL, too */
25 }
26
27 /*
28 * Marking functions for heap types: mark children recursively
29 */
30
duk__mark_hstring(duk_heap * heap,duk_hstring * h)31 DUK_LOCAL void duk__mark_hstring(duk_heap *heap, duk_hstring *h) {
32 DUK_UNREF(heap);
33 DUK_UNREF(h);
34
35 DUK_DDD(DUK_DDDPRINT("duk__mark_hstring: %p", (void *) h));
36 DUK_ASSERT(h);
37
38 /* nothing to process */
39 }
40
duk__mark_hobject(duk_heap * heap,duk_hobject * h)41 DUK_LOCAL void duk__mark_hobject(duk_heap *heap, duk_hobject *h) {
42 duk_uint_fast32_t i;
43
44 DUK_DDD(DUK_DDDPRINT("duk__mark_hobject: %p", (void *) h));
45
46 DUK_ASSERT(h);
47
48 /* XXX: use advancing pointers instead of index macros -> faster and smaller? */
49
50 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(h); i++) {
51 duk_hstring *key = DUK_HOBJECT_E_GET_KEY(heap, h, i);
52 if (!key) {
53 continue;
54 }
55 duk__mark_heaphdr(heap, (duk_heaphdr *) key);
56 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(heap, h, i)) {
57 duk__mark_heaphdr(heap, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_PTR(heap, h, i)->a.get);
58 duk__mark_heaphdr(heap, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_PTR(heap, h, i)->a.set);
59 } else {
60 duk__mark_tval(heap, &DUK_HOBJECT_E_GET_VALUE_PTR(heap, h, i)->v);
61 }
62 }
63
64 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ASIZE(h); i++) {
65 duk__mark_tval(heap, DUK_HOBJECT_A_GET_VALUE_PTR(heap, h, i));
66 }
67
68 /* hash part is a 'weak reference' and does not contribute */
69
70 duk__mark_heaphdr(heap, (duk_heaphdr *) DUK_HOBJECT_GET_PROTOTYPE(heap, h));
71
72 if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h)) {
73 duk_hcompiledfunction *f = (duk_hcompiledfunction *) h;
74 duk_tval *tv, *tv_end;
75 duk_hobject **fn, **fn_end;
76
77 /* 'data' is reachable through every compiled function which
78 * contains a reference.
79 */
80
81 duk__mark_heaphdr(heap, (duk_heaphdr *) DUK_HCOMPILEDFUNCTION_GET_DATA(heap, f));
82
83 if (DUK_HCOMPILEDFUNCTION_GET_DATA(heap, f) != NULL) {
84 tv = DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(heap, f);
85 tv_end = DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(heap, f);
86 while (tv < tv_end) {
87 duk__mark_tval(heap, tv);
88 tv++;
89 }
90
91 fn = DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(heap, f);
92 fn_end = DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(heap, f);
93 while (fn < fn_end) {
94 duk__mark_heaphdr(heap, (duk_heaphdr *) *fn);
95 fn++;
96 }
97 } else {
98 /* May happen in some out-of-memory corner cases. */
99 DUK_D(DUK_DPRINT("duk_hcompiledfunction 'data' is NULL, skipping marking"));
100 }
101 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h)) {
102 duk_hnativefunction *f = (duk_hnativefunction *) h;
103 DUK_UNREF(f);
104 /* nothing to mark */
105 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h)) {
106 duk_hbufferobject *b = (duk_hbufferobject *) h;
107 duk__mark_heaphdr(heap, (duk_heaphdr *) b->buf);
108 } else if (DUK_HOBJECT_IS_THREAD(h)) {
109 duk_hthread *t = (duk_hthread *) h;
110 duk_tval *tv;
111
112 tv = t->valstack;
113 while (tv < t->valstack_top) {
114 duk__mark_tval(heap, tv);
115 tv++;
116 }
117
118 for (i = 0; i < (duk_uint_fast32_t) t->callstack_top; i++) {
119 duk_activation *act = t->callstack + i;
120 duk__mark_heaphdr(heap, (duk_heaphdr *) DUK_ACT_GET_FUNC(act));
121 duk__mark_heaphdr(heap, (duk_heaphdr *) act->var_env);
122 duk__mark_heaphdr(heap, (duk_heaphdr *) act->lex_env);
123 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
124 duk__mark_heaphdr(heap, (duk_heaphdr *) act->prev_caller);
125 #endif
126 }
127
128 #if 0 /* nothing now */
129 for (i = 0; i < (duk_uint_fast32_t) t->catchstack_top; i++) {
130 duk_catcher *cat = t->catchstack + i;
131 }
132 #endif
133
134 duk__mark_heaphdr(heap, (duk_heaphdr *) t->resumer);
135
136 /* XXX: duk_small_uint_t would be enough for this loop */
137 for (i = 0; i < DUK_NUM_BUILTINS; i++) {
138 duk__mark_heaphdr(heap, (duk_heaphdr *) t->builtins[i]);
139 }
140 }
141 }
142
143 /* recursion tracking happens here only */
duk__mark_heaphdr(duk_heap * heap,duk_heaphdr * h)144 DUK_LOCAL void duk__mark_heaphdr(duk_heap *heap, duk_heaphdr *h) {
145 DUK_DDD(DUK_DDDPRINT("duk__mark_heaphdr %p, type %ld",
146 (void *) h,
147 (h != NULL ? (long) DUK_HEAPHDR_GET_TYPE(h) : (long) -1)));
148 if (!h) {
149 return;
150 }
151 #if defined(DUK_USE_ROM_OBJECTS)
152 if (DUK_HEAPHDR_HAS_READONLY(h)) {
153 DUK_DDD(DUK_DDDPRINT("readonly object %p, skip", (void *) h));
154 return;
155 }
156 #endif
157 if (DUK_HEAPHDR_HAS_REACHABLE(h)) {
158 DUK_DDD(DUK_DDDPRINT("already marked reachable, skip"));
159 return;
160 }
161 DUK_HEAPHDR_SET_REACHABLE(h);
162
163 if (heap->mark_and_sweep_recursion_depth >= DUK_USE_MARK_AND_SWEEP_RECLIMIT) {
164 /* log this with a normal debug level because this should be relatively rare */
165 DUK_D(DUK_DPRINT("mark-and-sweep recursion limit reached, marking as temproot: %p", (void *) h));
166 DUK_HEAP_SET_MARKANDSWEEP_RECLIMIT_REACHED(heap);
167 DUK_HEAPHDR_SET_TEMPROOT(h);
168 return;
169 }
170
171 heap->mark_and_sweep_recursion_depth++;
172
173 switch ((int) DUK_HEAPHDR_GET_TYPE(h)) {
174 case DUK_HTYPE_STRING:
175 duk__mark_hstring(heap, (duk_hstring *) h);
176 break;
177 case DUK_HTYPE_OBJECT:
178 duk__mark_hobject(heap, (duk_hobject *) h);
179 break;
180 case DUK_HTYPE_BUFFER:
181 /* nothing to mark */
182 break;
183 default:
184 DUK_D(DUK_DPRINT("attempt to mark heaphdr %p with invalid htype %ld", (void *) h, (long) DUK_HEAPHDR_GET_TYPE(h)));
185 DUK_UNREACHABLE();
186 }
187
188 heap->mark_and_sweep_recursion_depth--;
189 }
190
duk__mark_tval(duk_heap * heap,duk_tval * tv)191 DUK_LOCAL void duk__mark_tval(duk_heap *heap, duk_tval *tv) {
192 DUK_DDD(DUK_DDDPRINT("duk__mark_tval %p", (void *) tv));
193 if (!tv) {
194 return;
195 }
196 if (DUK_TVAL_IS_HEAP_ALLOCATED(tv)) {
197 duk__mark_heaphdr(heap, DUK_TVAL_GET_HEAPHDR(tv));
198 }
199 }
200
201 /*
202 * Mark the heap.
203 */
204
duk__mark_roots_heap(duk_heap * heap)205 DUK_LOCAL void duk__mark_roots_heap(duk_heap *heap) {
206 duk_small_uint_t i;
207
208 DUK_DD(DUK_DDPRINT("duk__mark_roots_heap: %p", (void *) heap));
209
210 duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_thread);
211 duk__mark_heaphdr(heap, (duk_heaphdr *) heap->heap_object);
212
213 for (i = 0; i < DUK_HEAP_NUM_STRINGS; i++) {
214 duk_hstring *h = DUK_HEAP_GET_STRING(heap, i);
215 duk__mark_heaphdr(heap, (duk_heaphdr *) h);
216 }
217
218 duk__mark_tval(heap, &heap->lj.value1);
219 duk__mark_tval(heap, &heap->lj.value2);
220
221 #if defined(DUK_USE_DEBUGGER_SUPPORT)
222 for (i = 0; i < heap->dbg_breakpoint_count; i++) {
223 duk__mark_heaphdr(heap, (duk_heaphdr *) heap->dbg_breakpoints[i].filename);
224 }
225 #endif
226 }
227
228 /*
229 * Mark refzero_list objects.
230 *
231 * Objects on the refzero_list have no inbound references. They might have
232 * outbound references to objects that we might free, which would invalidate
233 * any references held by the refzero objects. A refzero object might also
234 * be rescued by refcount finalization. Refzero objects are treated as
235 * reachability roots to ensure they (or anything they point to) are not
236 * freed in mark-and-sweep.
237 */
238
239 #ifdef DUK_USE_REFERENCE_COUNTING
duk__mark_refzero_list(duk_heap * heap)240 DUK_LOCAL void duk__mark_refzero_list(duk_heap *heap) {
241 duk_heaphdr *hdr;
242
243 DUK_DD(DUK_DDPRINT("duk__mark_refzero_list: %p", (void *) heap));
244
245 hdr = heap->refzero_list;
246 while (hdr) {
247 duk__mark_heaphdr(heap, hdr);
248 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
249 }
250 }
251 #endif
252
253 /*
254 * Mark unreachable, finalizable objects.
255 *
256 * Such objects will be moved aside and their finalizers run later. They have
257 * to be treated as reachability roots for their properties etc to remain
258 * allocated. This marking is only done for unreachable values which would
259 * be swept later (refzero_list is thus excluded).
260 *
261 * Objects are first marked FINALIZABLE and only then marked as reachability
262 * roots; otherwise circular references might be handled inconsistently.
263 */
264
duk__mark_finalizable(duk_heap * heap)265 DUK_LOCAL void duk__mark_finalizable(duk_heap *heap) {
266 duk_hthread *thr;
267 duk_heaphdr *hdr;
268 duk_size_t count_finalizable = 0;
269
270 DUK_DD(DUK_DDPRINT("duk__mark_finalizable: %p", (void *) heap));
271
272 thr = duk__get_temp_hthread(heap);
273 DUK_ASSERT(thr != NULL);
274
275 hdr = heap->heap_allocated;
276 while (hdr) {
277 /* A finalizer is looked up from the object and up its prototype chain
278 * (which allows inherited finalizers). A prototype loop must not cause
279 * an error to be thrown here; duk_hobject_hasprop_raw() will ignore a
280 * prototype loop silently and indicate that the property doesn't exist.
281 */
282
283 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr) &&
284 DUK_HEAPHDR_GET_TYPE(hdr) == DUK_HTYPE_OBJECT &&
285 !DUK_HEAPHDR_HAS_FINALIZED(hdr) &&
286 duk_hobject_hasprop_raw(thr, (duk_hobject *) hdr, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) {
287
288 /* heaphdr:
289 * - is not reachable
290 * - is an object
291 * - is not a finalized object
292 * - has a finalizer
293 */
294
295 DUK_DD(DUK_DDPRINT("unreachable heap object will be "
296 "finalized -> mark as finalizable "
297 "and treat as a reachability root: %p",
298 (void *) hdr));
299 DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(hdr));
300 DUK_HEAPHDR_SET_FINALIZABLE(hdr);
301 count_finalizable ++;
302 }
303
304 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
305 }
306
307 if (count_finalizable == 0) {
308 return;
309 }
310
311 DUK_DD(DUK_DDPRINT("marked %ld heap objects as finalizable, now mark them reachable",
312 (long) count_finalizable));
313
314 hdr = heap->heap_allocated;
315 while (hdr) {
316 if (DUK_HEAPHDR_HAS_FINALIZABLE(hdr)) {
317 duk__mark_heaphdr(heap, hdr);
318 }
319
320 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
321 }
322
323 /* Caller will finish the marking process if we hit a recursion limit. */
324 }
325
326 /*
327 * Mark objects on finalize_list.
328 *
329 */
330
duk__mark_finalize_list(duk_heap * heap)331 DUK_LOCAL void duk__mark_finalize_list(duk_heap *heap) {
332 duk_heaphdr *hdr;
333 #ifdef DUK_USE_DEBUG
334 duk_size_t count_finalize_list = 0;
335 #endif
336
337 DUK_DD(DUK_DDPRINT("duk__mark_finalize_list: %p", (void *) heap));
338
339 hdr = heap->finalize_list;
340 while (hdr) {
341 duk__mark_heaphdr(heap, hdr);
342 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
343 #ifdef DUK_USE_DEBUG
344 count_finalize_list++;
345 #endif
346 }
347
348 #ifdef DUK_USE_DEBUG
349 if (count_finalize_list > 0) {
350 DUK_D(DUK_DPRINT("marked %ld objects on the finalize_list as reachable (previous finalizer run skipped)",
351 (long) count_finalize_list));
352 }
353 #endif
354 }
355
356 /*
357 * Fallback marking handler if recursion limit is reached.
358 *
359 * Iterates 'temproots' until recursion limit is no longer hit. Note
360 * that temproots may reside either in heap allocated list or the
361 * refzero work list. This is a slow scan, but guarantees that we
362 * finish with a bounded C stack.
363 *
364 * Note that nodes may have been marked as temproots before this
365 * scan begun, OR they may have been marked during the scan (as
366 * we process nodes recursively also during the scan). This is
367 * intended behavior.
368 */
369
370 #ifdef DUK_USE_DEBUG
duk__handle_temproot(duk_heap * heap,duk_heaphdr * hdr,duk_size_t * count)371 DUK_LOCAL void duk__handle_temproot(duk_heap *heap, duk_heaphdr *hdr, duk_size_t *count) {
372 #else
373 DUK_LOCAL void duk__handle_temproot(duk_heap *heap, duk_heaphdr *hdr) {
374 #endif
375 if (!DUK_HEAPHDR_HAS_TEMPROOT(hdr)) {
376 DUK_DDD(DUK_DDDPRINT("not a temp root: %p", (void *) hdr));
377 return;
378 }
379
380 DUK_DDD(DUK_DDDPRINT("found a temp root: %p", (void *) hdr));
381 DUK_HEAPHDR_CLEAR_TEMPROOT(hdr);
382 DUK_HEAPHDR_CLEAR_REACHABLE(hdr); /* done so that duk__mark_heaphdr() works correctly */
383 duk__mark_heaphdr(heap, hdr);
384
385 #ifdef DUK_USE_DEBUG
386 (*count)++;
387 #endif
388 }
389
390 DUK_LOCAL void duk__mark_temproots_by_heap_scan(duk_heap *heap) {
391 duk_heaphdr *hdr;
392 #ifdef DUK_USE_DEBUG
393 duk_size_t count;
394 #endif
395
396 DUK_DD(DUK_DDPRINT("duk__mark_temproots_by_heap_scan: %p", (void *) heap));
397
398 while (DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap)) {
399 DUK_DD(DUK_DDPRINT("recursion limit reached, doing heap scan to continue from temproots"));
400
401 #ifdef DUK_USE_DEBUG
402 count = 0;
403 #endif
404 DUK_HEAP_CLEAR_MARKANDSWEEP_RECLIMIT_REACHED(heap);
405
406 hdr = heap->heap_allocated;
407 while (hdr) {
408 #ifdef DUK_USE_DEBUG
409 duk__handle_temproot(heap, hdr, &count);
410 #else
411 duk__handle_temproot(heap, hdr);
412 #endif
413 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
414 }
415
416 /* must also check refzero_list */
417 #ifdef DUK_USE_REFERENCE_COUNTING
418 hdr = heap->refzero_list;
419 while (hdr) {
420 #ifdef DUK_USE_DEBUG
421 duk__handle_temproot(heap, hdr, &count);
422 #else
423 duk__handle_temproot(heap, hdr);
424 #endif
425 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
426 }
427 #endif /* DUK_USE_REFERENCE_COUNTING */
428
429 #ifdef DUK_USE_DEBUG
430 DUK_DD(DUK_DDPRINT("temproot mark heap scan processed %ld temp roots", (long) count));
431 #endif
432 }
433 }
434
435 /*
436 * Finalize refcounts for heap elements just about to be freed.
437 * This must be done for all objects before freeing to avoid any
438 * stale pointer dereferences.
439 *
440 * Note that this must deduce the set of objects to be freed
441 * identically to duk__sweep_heap().
442 */
443
444 #ifdef DUK_USE_REFERENCE_COUNTING
445 DUK_LOCAL void duk__finalize_refcounts(duk_heap *heap) {
446 duk_hthread *thr;
447 duk_heaphdr *hdr;
448
449 thr = duk__get_temp_hthread(heap);
450 DUK_ASSERT(thr != NULL);
451
452 DUK_DD(DUK_DDPRINT("duk__finalize_refcounts: heap=%p, hthread=%p",
453 (void *) heap, (void *) thr));
454
455 hdr = heap->heap_allocated;
456 while (hdr) {
457 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr)) {
458 /*
459 * Unreachable object about to be swept. Finalize target refcounts
460 * (objects which the unreachable object points to) without doing
461 * refzero processing. Recursive decrefs are also prevented when
462 * refzero processing is disabled.
463 *
464 * Value cannot be a finalizable object, as they have been made
465 * temporarily reachable for this round.
466 */
467
468 DUK_DDD(DUK_DDDPRINT("unreachable object, refcount finalize before sweeping: %p", (void *) hdr));
469 duk_heaphdr_refcount_finalize(thr, hdr);
470 }
471
472 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
473 }
474 }
475 #endif /* DUK_USE_REFERENCE_COUNTING */
476
477 /*
478 * Clear (reachable) flags of refzero work list.
479 */
480
481 #ifdef DUK_USE_REFERENCE_COUNTING
482 DUK_LOCAL void duk__clear_refzero_list_flags(duk_heap *heap) {
483 duk_heaphdr *hdr;
484
485 DUK_DD(DUK_DDPRINT("duk__clear_refzero_list_flags: %p", (void *) heap));
486
487 hdr = heap->refzero_list;
488 while (hdr) {
489 DUK_HEAPHDR_CLEAR_REACHABLE(hdr);
490 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr));
491 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr));
492 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr));
493 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
494 }
495 }
496 #endif /* DUK_USE_REFERENCE_COUNTING */
497
498 /*
499 * Clear (reachable) flags of finalize_list
500 *
501 * We could mostly do in the sweep phase when we move objects from the
502 * heap into the finalize_list. However, if a finalizer run is skipped
503 * during a mark-and-sweep, the objects on the finalize_list will be marked
504 * reachable during the next mark-and-sweep. Since they're already on the
505 * finalize_list, no-one will be clearing their REACHABLE flag so we do it
506 * here. (This now overlaps with the sweep handling in a harmless way.)
507 */
508
509 DUK_LOCAL void duk__clear_finalize_list_flags(duk_heap *heap) {
510 duk_heaphdr *hdr;
511
512 DUK_DD(DUK_DDPRINT("duk__clear_finalize_list_flags: %p", (void *) heap));
513
514 hdr = heap->finalize_list;
515 while (hdr) {
516 DUK_HEAPHDR_CLEAR_REACHABLE(hdr);
517 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr));
518 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr));
519 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr));
520 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
521 }
522 }
523
524 /*
525 * Sweep stringtable
526 */
527
528 #if defined(DUK_USE_STRTAB_CHAIN)
529
530 /* XXX: skip count_free w/o debug? */
531 #if defined(DUK_USE_HEAPPTR16)
532 DUK_LOCAL void duk__sweep_string_chain16(duk_heap *heap, duk_uint16_t *slot, duk_size_t *count_keep, duk_size_t *count_free) {
533 duk_uint16_t h16 = *slot;
534 duk_hstring *h;
535 duk_uint16_t null16 = heap->heapptr_null16;
536
537 if (h16 == null16) {
538 /* nop */
539 return;
540 }
541 h = (duk_hstring *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, h16);
542 DUK_ASSERT(h != NULL);
543
544 if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr *) h)) {
545 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr *) h);
546 (*count_keep)++;
547 } else {
548 #if defined(DUK_USE_REFERENCE_COUNTING)
549 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr *) h) == 0);
550 #endif
551 /* deal with weak references first */
552 duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
553 *slot = null16;
554
555 /* free inner references (these exist e.g. when external
556 * strings are enabled)
557 */
558 duk_free_hstring_inner(heap, h);
559 DUK_FREE(heap, h);
560 (*count_free)++;
561 }
562 }
563 #else /* DUK_USE_HEAPPTR16 */
564 DUK_LOCAL void duk__sweep_string_chain(duk_heap *heap, duk_hstring **slot, duk_size_t *count_keep, duk_size_t *count_free) {
565 duk_hstring *h = *slot;
566
567 if (h == NULL) {
568 /* nop */
569 return;
570 }
571
572 if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr *) h)) {
573 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr *) h);
574 (*count_keep)++;
575 } else {
576 #if defined(DUK_USE_REFERENCE_COUNTING)
577 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr *) h) == 0);
578 #endif
579 /* deal with weak references first */
580 duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
581 *slot = NULL;
582
583 /* free inner references (these exist e.g. when external
584 * strings are enabled)
585 */
586 duk_free_hstring_inner(heap, h);
587 DUK_FREE(heap, h);
588 (*count_free)++;
589 }
590 }
591 #endif /* DUK_USE_HEAPPTR16 */
592
593 DUK_LOCAL void duk__sweep_stringtable_chain(duk_heap *heap, duk_size_t *out_count_keep) {
594 duk_strtab_entry *e;
595 duk_uint_fast32_t i;
596 duk_size_t count_free = 0;
597 duk_size_t count_keep = 0;
598 duk_size_t j, n;
599 #if defined(DUK_USE_HEAPPTR16)
600 duk_uint16_t *lst;
601 #else
602 duk_hstring **lst;
603 #endif
604
605 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap));
606
607 /* Non-zero refcounts should not happen for unreachable strings,
608 * because we refcount finalize all unreachable objects which
609 * should have decreased unreachable string refcounts to zero
610 * (even for cycles).
611 */
612
613 for (i = 0; i < DUK_STRTAB_CHAIN_SIZE; i++) {
614 e = heap->strtable + i;
615 if (e->listlen == 0) {
616 #if defined(DUK_USE_HEAPPTR16)
617 duk__sweep_string_chain16(heap, &e->u.str16, &count_keep, &count_free);
618 #else
619 duk__sweep_string_chain(heap, &e->u.str, &count_keep, &count_free);
620 #endif
621 } else {
622 #if defined(DUK_USE_HEAPPTR16)
623 lst = (duk_uint16_t *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, e->u.strlist16);
624 #else
625 lst = e->u.strlist;
626 #endif
627 for (j = 0, n = e->listlen; j < n; j++) {
628 #if defined(DUK_USE_HEAPPTR16)
629 duk__sweep_string_chain16(heap, lst + j, &count_keep, &count_free);
630 #else
631 duk__sweep_string_chain(heap, lst + j, &count_keep, &count_free);
632 #endif
633 }
634 }
635 }
636
637 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
638 (long) count_free, (long) count_keep));
639 *out_count_keep = count_keep;
640 }
641 #endif /* DUK_USE_STRTAB_CHAIN */
642
643 #if defined(DUK_USE_STRTAB_PROBE)
644 DUK_LOCAL void duk__sweep_stringtable_probe(duk_heap *heap, duk_size_t *out_count_keep) {
645 duk_hstring *h;
646 duk_uint_fast32_t i;
647 #ifdef DUK_USE_DEBUG
648 duk_size_t count_free = 0;
649 #endif
650 duk_size_t count_keep = 0;
651
652 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap));
653
654 for (i = 0; i < heap->st_size; i++) {
655 #if defined(DUK_USE_HEAPPTR16)
656 h = (duk_hstring *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, heap->strtable16[i]);
657 #else
658 h = heap->strtable[i];
659 #endif
660 if (h == NULL || h == DUK_STRTAB_DELETED_MARKER(heap)) {
661 continue;
662 } else if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr *) h)) {
663 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr *) h);
664 count_keep++;
665 continue;
666 }
667
668 #ifdef DUK_USE_DEBUG
669 count_free++;
670 #endif
671
672 #if defined(DUK_USE_REFERENCE_COUNTING)
673 /* Non-zero refcounts should not happen for unreachable strings,
674 * because we refcount finalize all unreachable objects which
675 * should have decreased unreachable string refcounts to zero
676 * (even for cycles).
677 */
678 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr *) h) == 0);
679 #endif
680
681 DUK_DDD(DUK_DDDPRINT("sweep string, not reachable: %p", (void *) h));
682
683 /* deal with weak references first */
684 duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
685
686 /* remove the string (mark DELETED), could also call
687 * duk_heap_string_remove() but that would be slow and
688 * pointless because we already know the slot.
689 */
690 #if defined(DUK_USE_HEAPPTR16)
691 heap->strtable16[i] = heap->heapptr_deleted16;
692 #else
693 heap->strtable[i] = DUK_STRTAB_DELETED_MARKER(heap);
694 #endif
695
696 /* free inner references (these exist e.g. when external
697 * strings are enabled)
698 */
699 duk_free_hstring_inner(heap, (duk_hstring *) h);
700
701 /* finally free the struct itself */
702 DUK_FREE(heap, h);
703 }
704
705 #ifdef DUK_USE_DEBUG
706 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
707 (long) count_free, (long) count_keep));
708 #endif
709 *out_count_keep = count_keep;
710 }
711 #endif /* DUK_USE_STRTAB_PROBE */
712
713 /*
714 * Sweep heap
715 */
716
717 DUK_LOCAL void duk__sweep_heap(duk_heap *heap, duk_int_t flags, duk_size_t *out_count_keep) {
718 duk_heaphdr *prev; /* last element that was left in the heap */
719 duk_heaphdr *curr;
720 duk_heaphdr *next;
721 #ifdef DUK_USE_DEBUG
722 duk_size_t count_free = 0;
723 duk_size_t count_finalize = 0;
724 duk_size_t count_rescue = 0;
725 #endif
726 duk_size_t count_keep = 0;
727
728 DUK_UNREF(flags);
729 DUK_DD(DUK_DDPRINT("duk__sweep_heap: %p", (void *) heap));
730
731 prev = NULL;
732 curr = heap->heap_allocated;
733 heap->heap_allocated = NULL;
734 while (curr) {
735 /* Strings and ROM objects are never placed on the heap allocated list. */
736 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) != DUK_HTYPE_STRING);
737 DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(curr));
738
739 next = DUK_HEAPHDR_GET_NEXT(heap, curr);
740
741 if (DUK_HEAPHDR_HAS_REACHABLE(curr)) {
742 /*
743 * Reachable object, keep
744 */
745
746 DUK_DDD(DUK_DDDPRINT("sweep, reachable: %p", (void *) curr));
747
748 if (DUK_HEAPHDR_HAS_FINALIZABLE(curr)) {
749 /*
750 * If object has been marked finalizable, move it to the
751 * "to be finalized" work list. It will be collected on
752 * the next mark-and-sweep if it is still unreachable
753 * after running the finalizer.
754 */
755
756 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr));
757 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT);
758 DUK_DDD(DUK_DDDPRINT("object has finalizer, move to finalization work list: %p", (void *) curr));
759
760 #ifdef DUK_USE_DOUBLE_LINKED_HEAP
761 if (heap->finalize_list) {
762 DUK_HEAPHDR_SET_PREV(heap, heap->finalize_list, curr);
763 }
764 DUK_HEAPHDR_SET_PREV(heap, curr, NULL);
765 #endif
766 DUK_HEAPHDR_SET_NEXT(heap, curr, heap->finalize_list);
767 DUK_ASSERT_HEAPHDR_LINKS(heap, curr);
768 heap->finalize_list = curr;
769 #ifdef DUK_USE_DEBUG
770 count_finalize++;
771 #endif
772 } else {
773 /*
774 * Object will be kept; queue object back to heap_allocated (to tail)
775 */
776
777 if (DUK_HEAPHDR_HAS_FINALIZED(curr)) {
778 /*
779 * Object's finalizer was executed on last round, and
780 * object has been happily rescued.
781 */
782
783 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr));
784 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT);
785 DUK_DD(DUK_DDPRINT("object rescued during mark-and-sweep finalization: %p", (void *) curr));
786 #ifdef DUK_USE_DEBUG
787 count_rescue++;
788 #endif
789 } else {
790 /*
791 * Plain, boring reachable object.
792 */
793 DUK_DD(DUK_DDPRINT("keep object: %!iO", curr));
794 count_keep++;
795 }
796
797 if (!heap->heap_allocated) {
798 heap->heap_allocated = curr;
799 }
800 if (prev) {
801 DUK_HEAPHDR_SET_NEXT(heap, prev, curr);
802 }
803 #ifdef DUK_USE_DOUBLE_LINKED_HEAP
804 DUK_HEAPHDR_SET_PREV(heap, curr, prev);
805 #endif
806 DUK_ASSERT_HEAPHDR_LINKS(heap, prev);
807 DUK_ASSERT_HEAPHDR_LINKS(heap, curr);
808 prev = curr;
809 }
810
811 DUK_HEAPHDR_CLEAR_REACHABLE(curr);
812 DUK_HEAPHDR_CLEAR_FINALIZED(curr);
813 DUK_HEAPHDR_CLEAR_FINALIZABLE(curr);
814
815 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr));
816 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr));
817 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr));
818
819 curr = next;
820 } else {
821 /*
822 * Unreachable object, free
823 */
824
825 DUK_DDD(DUK_DDDPRINT("sweep, not reachable: %p", (void *) curr));
826
827 #if defined(DUK_USE_REFERENCE_COUNTING)
828 /* Non-zero refcounts should not happen because we refcount
829 * finalize all unreachable objects which should cancel out
830 * refcounts (even for cycles).
831 */
832 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(curr) == 0);
833 #endif
834 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr));
835
836 if (DUK_HEAPHDR_HAS_FINALIZED(curr)) {
837 DUK_DDD(DUK_DDDPRINT("finalized object not rescued: %p", (void *) curr));
838 }
839
840 /* Note: object cannot be a finalizable unreachable object, as
841 * they have been marked temporarily reachable for this round,
842 * and are handled above.
843 */
844
845 #ifdef DUK_USE_DEBUG
846 count_free++;
847 #endif
848
849 /* weak refs should be handled here, but no weak refs for
850 * any non-string objects exist right now.
851 */
852
853 /* free object and all auxiliary (non-heap) allocs */
854 duk_heap_free_heaphdr_raw(heap, curr);
855
856 curr = next;
857 }
858 }
859 if (prev) {
860 DUK_HEAPHDR_SET_NEXT(heap, prev, NULL);
861 }
862 DUK_ASSERT_HEAPHDR_LINKS(heap, prev);
863
864 #ifdef DUK_USE_DEBUG
865 DUK_D(DUK_DPRINT("mark-and-sweep sweep objects (non-string): %ld freed, %ld kept, %ld rescued, %ld queued for finalization",
866 (long) count_free, (long) count_keep, (long) count_rescue, (long) count_finalize));
867 #endif
868 *out_count_keep = count_keep;
869 }
870
871 /*
872 * Run (object) finalizers in the "to be finalized" work list.
873 */
874
875 DUK_LOCAL void duk__run_object_finalizers(duk_heap *heap, duk_small_uint_t flags) {
876 duk_heaphdr *curr;
877 duk_heaphdr *next;
878 #ifdef DUK_USE_DEBUG
879 duk_size_t count = 0;
880 #endif
881 duk_hthread *thr;
882
883 DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap));
884
885 thr = duk__get_temp_hthread(heap);
886 DUK_ASSERT(thr != NULL);
887
888 curr = heap->finalize_list;
889 while (curr) {
890 DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr));
891
892 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT); /* only objects have finalizers */
893 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr)); /* flags have been already cleared */
894 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(curr));
895 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr));
896 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr));
897 DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(curr)); /* No finalizers for ROM objects */
898
899 if (DUK_LIKELY((flags & DUK_MS_FLAG_SKIP_FINALIZERS) == 0)) {
900 /* Run the finalizer, duk_hobject_run_finalizer() sets FINALIZED.
901 * Next mark-and-sweep will collect the object unless it has
902 * become reachable (i.e. rescued). FINALIZED prevents the
903 * finalizer from being executed again before that.
904 */
905 duk_hobject_run_finalizer(thr, (duk_hobject *) curr); /* must never longjmp */
906 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(curr));
907 } else {
908 /* Used during heap destruction: don't actually run finalizers
909 * because we're heading into forced finalization. Instead,
910 * queue finalizable objects back to the heap_allocated list.
911 */
912 DUK_D(DUK_DPRINT("skip finalizers flag set, queue object to heap_allocated without finalizing"));
913 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr));
914 }
915
916 /* queue back to heap_allocated */
917 next = DUK_HEAPHDR_GET_NEXT(heap, curr);
918 DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap, curr);
919
920 curr = next;
921 #ifdef DUK_USE_DEBUG
922 count++;
923 #endif
924 }
925
926 /* finalize_list will always be processed completely */
927 heap->finalize_list = NULL;
928
929 #ifdef DUK_USE_DEBUG
930 DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %ld finalizers called", (long) count));
931 #endif
932 }
933
934 /*
935 * Object compaction.
936 *
937 * Compaction is assumed to never throw an error.
938 */
939
940 DUK_LOCAL int duk__protected_compact_object(duk_context *ctx) {
941 /* XXX: for threads, compact value stack, call stack, catch stack? */
942
943 duk_hobject *obj = duk_get_hobject(ctx, -1);
944 DUK_ASSERT(obj != NULL);
945 duk_hobject_compact_props((duk_hthread *) ctx, obj);
946 return 0;
947 }
948
949 #ifdef DUK_USE_DEBUG
950 DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start, duk_size_t *p_count_check, duk_size_t *p_count_compact, duk_size_t *p_count_bytes_saved) {
951 #else
952 DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start) {
953 #endif
954 duk_heaphdr *curr;
955 #ifdef DUK_USE_DEBUG
956 duk_size_t old_size, new_size;
957 #endif
958 duk_hobject *obj;
959
960 DUK_UNREF(heap);
961
962 curr = start;
963 while (curr) {
964 DUK_DDD(DUK_DDDPRINT("mark-and-sweep compact: %p", (void *) curr));
965
966 if (DUK_HEAPHDR_GET_TYPE(curr) != DUK_HTYPE_OBJECT) {
967 goto next;
968 }
969 obj = (duk_hobject *) curr;
970
971 #ifdef DUK_USE_DEBUG
972 old_size = DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj),
973 DUK_HOBJECT_GET_ASIZE(obj),
974 DUK_HOBJECT_GET_HSIZE(obj));
975 #endif
976
977 DUK_DD(DUK_DDPRINT("compact object: %p", (void *) obj));
978 duk_push_hobject((duk_context *) thr, obj);
979 /* XXX: disable error handlers for duration of compaction? */
980 duk_safe_call((duk_context *) thr, duk__protected_compact_object, 1, 0);
981
982 #ifdef DUK_USE_DEBUG
983 new_size = DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj),
984 DUK_HOBJECT_GET_ASIZE(obj),
985 DUK_HOBJECT_GET_HSIZE(obj));
986 #endif
987
988 #ifdef DUK_USE_DEBUG
989 (*p_count_compact)++;
990 (*p_count_bytes_saved) += (duk_size_t) (old_size - new_size);
991 #endif
992
993 next:
994 curr = DUK_HEAPHDR_GET_NEXT(heap, curr);
995 #ifdef DUK_USE_DEBUG
996 (*p_count_check)++;
997 #endif
998 }
999 }
1000
1001 DUK_LOCAL void duk__compact_objects(duk_heap *heap) {
1002 /* XXX: which lists should participate? to be finalized? */
1003 #ifdef DUK_USE_DEBUG
1004 duk_size_t count_check = 0;
1005 duk_size_t count_compact = 0;
1006 duk_size_t count_bytes_saved = 0;
1007 #endif
1008 duk_hthread *thr;
1009
1010 DUK_DD(DUK_DDPRINT("duk__compact_objects: %p", (void *) heap));
1011
1012 thr = duk__get_temp_hthread(heap);
1013 DUK_ASSERT(thr != NULL);
1014
1015 #ifdef DUK_USE_DEBUG
1016 duk__compact_object_list(heap, thr, heap->heap_allocated, &count_check, &count_compact, &count_bytes_saved);
1017 duk__compact_object_list(heap, thr, heap->finalize_list, &count_check, &count_compact, &count_bytes_saved);
1018 #ifdef DUK_USE_REFERENCE_COUNTING
1019 duk__compact_object_list(heap, thr, heap->refzero_list, &count_check, &count_compact, &count_bytes_saved);
1020 #endif
1021 #else
1022 duk__compact_object_list(heap, thr, heap->heap_allocated);
1023 duk__compact_object_list(heap, thr, heap->finalize_list);
1024 #ifdef DUK_USE_REFERENCE_COUNTING
1025 duk__compact_object_list(heap, thr, heap->refzero_list);
1026 #endif
1027 #endif
1028
1029 #ifdef DUK_USE_DEBUG
1030 DUK_D(DUK_DPRINT("mark-and-sweep compact objects: %ld checked, %ld compaction attempts, %ld bytes saved by compaction",
1031 (long) count_check, (long) count_compact, (long) count_bytes_saved));
1032 #endif
1033 }
1034
1035 /*
1036 * Assertion helpers.
1037 */
1038
1039 #ifdef DUK_USE_ASSERTIONS
1040 DUK_LOCAL void duk__assert_heaphdr_flags(duk_heap *heap) {
1041 duk_heaphdr *hdr;
1042
1043 hdr = heap->heap_allocated;
1044 while (hdr) {
1045 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(hdr));
1046 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr));
1047 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr));
1048 /* may have FINALIZED */
1049 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1050 }
1051
1052 #ifdef DUK_USE_REFERENCE_COUNTING
1053 hdr = heap->refzero_list;
1054 while (hdr) {
1055 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(hdr));
1056 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr));
1057 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr));
1058 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr));
1059 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1060 }
1061 #endif /* DUK_USE_REFERENCE_COUNTING */
1062 }
1063
1064 #ifdef DUK_USE_REFERENCE_COUNTING
1065 DUK_LOCAL void duk__assert_valid_refcounts(duk_heap *heap) {
1066 duk_heaphdr *hdr = heap->heap_allocated;
1067 while (hdr) {
1068 if (DUK_HEAPHDR_GET_REFCOUNT(hdr) == 0 &&
1069 DUK_HEAPHDR_HAS_FINALIZED(hdr)) {
1070 /* An object may be in heap_allocated list with a zero
1071 * refcount if it has just been finalized and is waiting
1072 * to be collected by the next cycle.
1073 */
1074 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr) == 0) {
1075 /* An object may be in heap_allocated list with a zero
1076 * refcount also if it is a temporary object created by
1077 * a finalizer; because finalization now runs inside
1078 * mark-and-sweep, such objects will not be queued to
1079 * refzero_list and will thus appear here with refcount
1080 * zero.
1081 */
1082 #if 0 /* this case can no longer occur because refcount is unsigned */
1083 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr) < 0) {
1084 DUK_D(DUK_DPRINT("invalid refcount: %ld, %p -> %!O",
1085 (hdr != NULL ? (long) DUK_HEAPHDR_GET_REFCOUNT(hdr) : (long) 0),
1086 (void *) hdr, (duk_heaphdr *) hdr));
1087 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(hdr) > 0);
1088 #endif
1089 }
1090 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1091 }
1092 }
1093 #endif /* DUK_USE_REFERENCE_COUNTING */
1094 #endif /* DUK_USE_ASSERTIONS */
1095
1096 /*
1097 * Finalizer torture. Do one fake finalizer call which causes side effects
1098 * similar to one or more finalizers on actual objects.
1099 */
1100
1101 #if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1102 DUK_LOCAL duk_ret_t duk__markandsweep_fake_finalizer(duk_context *ctx) {
1103 DUK_D(DUK_DPRINT("fake mark-and-sweep torture finalizer executed"));
1104
1105 /* Require a lot of stack to force a value stack grow/shrink.
1106 * Recursive mark-and-sweep is prevented by allocation macros
1107 * so this won't trigger another mark-and-sweep.
1108 */
1109 duk_require_stack(ctx, 100000);
1110
1111 /* XXX: do something to force a callstack grow/shrink, perhaps
1112 * just a manual forced resize or a forced relocating realloc?
1113 */
1114
1115 return 0;
1116 }
1117
1118 DUK_LOCAL void duk__markandsweep_torture_finalizer(duk_hthread *thr) {
1119 duk_context *ctx;
1120 duk_int_t rc;
1121
1122 DUK_ASSERT(thr != NULL);
1123 ctx = (duk_context *) thr;
1124
1125 /* Avoid fake finalization when callstack limit has been reached.
1126 * Otherwise a callstack limit error will be created, then refzero'ed.
1127 */
1128 if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit ||
1129 thr->callstack_size + 2 * DUK_CALLSTACK_GROW_STEP >= thr->callstack_max /*approximate*/) {
1130 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake mark-and-sweep torture finalizer"));
1131 return;
1132 }
1133
1134 /* Run fake finalizer. Avoid creating unnecessary garbage. */
1135 duk_push_c_function(ctx, duk__markandsweep_fake_finalizer, 0 /*nargs*/);
1136 rc = duk_pcall(ctx, 0 /*nargs*/);
1137 DUK_UNREF(rc); /* ignored */
1138 duk_pop(ctx);
1139 }
1140 #endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1141
1142 /*
1143 * Main mark-and-sweep function.
1144 *
1145 * 'flags' represents the features requested by the caller. The current
1146 * heap->mark_and_sweep_base_flags is ORed automatically into the flags;
1147 * the base flags mask typically prevents certain mark-and-sweep operations
1148 * to avoid trouble.
1149 */
1150
1151 DUK_INTERNAL duk_bool_t duk_heap_mark_and_sweep(duk_heap *heap, duk_small_uint_t flags) {
1152 duk_hthread *thr;
1153 duk_size_t count_keep_obj;
1154 duk_size_t count_keep_str;
1155 #ifdef DUK_USE_VOLUNTARY_GC
1156 duk_size_t tmp;
1157 #endif
1158
1159 /* XXX: thread selection for mark-and-sweep is currently a hack.
1160 * If we don't have a thread, the entire mark-and-sweep is now
1161 * skipped (although we could just skip finalizations).
1162 */
1163
1164 /* If thr != NULL, the thr may still be in the middle of
1165 * initialization.
1166 * XXX: Improve the thread viability test.
1167 */
1168 thr = duk__get_temp_hthread(heap);
1169 if (thr == NULL) {
1170 DUK_D(DUK_DPRINT("gc skipped because we don't have a temp thread"));
1171
1172 /* reset voluntary gc trigger count */
1173 #ifdef DUK_USE_VOLUNTARY_GC
1174 heap->mark_and_sweep_trigger_counter = DUK_HEAP_MARK_AND_SWEEP_TRIGGER_SKIP;
1175 #endif
1176 return 0; /* OK */
1177 }
1178
1179 /* If debugger is paused, garbage collection is disabled by default. */
1180 /* XXX: will need a force flag if garbage collection is triggered
1181 * explicitly during paused state.
1182 */
1183 #if defined(DUK_USE_DEBUGGER_SUPPORT)
1184 if (DUK_HEAP_IS_PAUSED(heap)) {
1185 /* Checking this here rather that in memory alloc primitives
1186 * reduces checking code there but means a failed allocation
1187 * will go through a few retries before giving up. That's
1188 * fine because this only happens during debugging.
1189 */
1190 DUK_D(DUK_DPRINT("gc skipped because debugger is paused"));
1191 return 0;
1192 }
1193 #endif
1194
1195 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) starting, requested flags: 0x%08lx, effective flags: 0x%08lx",
1196 (unsigned long) flags, (unsigned long) (flags | heap->mark_and_sweep_base_flags)));
1197
1198 flags |= heap->mark_and_sweep_base_flags;
1199
1200 /*
1201 * Assertions before
1202 */
1203
1204 #ifdef DUK_USE_ASSERTIONS
1205 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap));
1206 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap));
1207 DUK_ASSERT(heap->mark_and_sweep_recursion_depth == 0);
1208 duk__assert_heaphdr_flags(heap);
1209 #ifdef DUK_USE_REFERENCE_COUNTING
1210 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1211 * finalizer may trigger a mark-and-sweep.
1212 */
1213 duk__assert_valid_refcounts(heap);
1214 #endif /* DUK_USE_REFERENCE_COUNTING */
1215 #endif /* DUK_USE_ASSERTIONS */
1216
1217 /*
1218 * Begin
1219 */
1220
1221 DUK_HEAP_SET_MARKANDSWEEP_RUNNING(heap);
1222
1223 /*
1224 * Mark roots, hoping that recursion limit is not normally hit.
1225 * If recursion limit is hit, run additional reachability rounds
1226 * starting from "temproots" until marking is complete.
1227 *
1228 * Marking happens in two phases: first we mark actual reachability
1229 * roots (and run "temproots" to complete the process). Then we
1230 * check which objects are unreachable and are finalizable; such
1231 * objects are marked as FINALIZABLE and marked as reachability
1232 * (and "temproots" is run again to complete the process).
1233 *
1234 * The heap finalize_list must also be marked as a reachability root.
1235 * There may be objects on the list from a previous round if the
1236 * previous run had finalizer skip flag.
1237 */
1238
1239 duk__mark_roots_heap(heap); /* main reachability roots */
1240 #ifdef DUK_USE_REFERENCE_COUNTING
1241 duk__mark_refzero_list(heap); /* refzero_list treated as reachability roots */
1242 #endif
1243 duk__mark_temproots_by_heap_scan(heap); /* temproots */
1244
1245 duk__mark_finalizable(heap); /* mark finalizable as reachability roots */
1246 duk__mark_finalize_list(heap); /* mark finalizer work list as reachability roots */
1247 duk__mark_temproots_by_heap_scan(heap); /* temproots */
1248
1249 /*
1250 * Sweep garbage and remove marking flags, and move objects with
1251 * finalizers to the finalizer work list.
1252 *
1253 * Objects to be swept need to get their refcounts finalized before
1254 * they are swept. In other words, their target object refcounts
1255 * need to be decreased. This has to be done before freeing any
1256 * objects to avoid decref'ing dangling pointers (which may happen
1257 * even without bugs, e.g. with reference loops)
1258 *
1259 * Because strings don't point to other heap objects, similar
1260 * finalization is not necessary for strings.
1261 */
1262
1263 /* XXX: more emergency behavior, e.g. find smaller hash sizes etc */
1264
1265 #ifdef DUK_USE_REFERENCE_COUNTING
1266 duk__finalize_refcounts(heap);
1267 #endif
1268 duk__sweep_heap(heap, flags, &count_keep_obj);
1269 #if defined(DUK_USE_STRTAB_CHAIN)
1270 duk__sweep_stringtable_chain(heap, &count_keep_str);
1271 #elif defined(DUK_USE_STRTAB_PROBE)
1272 duk__sweep_stringtable_probe(heap, &count_keep_str);
1273 #else
1274 #error internal error, invalid strtab options
1275 #endif
1276 #ifdef DUK_USE_REFERENCE_COUNTING
1277 duk__clear_refzero_list_flags(heap);
1278 #endif
1279 duk__clear_finalize_list_flags(heap);
1280
1281 /*
1282 * Object compaction (emergency only).
1283 *
1284 * Object compaction is a separate step after sweeping, as there is
1285 * more free memory for it to work with. Also, currently compaction
1286 * may insert new objects into the heap allocated list and the string
1287 * table which we don't want to do during a sweep (the reachability
1288 * flags of such objects would be incorrect). The objects inserted
1289 * are currently:
1290 *
1291 * - a temporary duk_hbuffer for a new properties allocation
1292 * - if array part is abandoned, string keys are interned
1293 *
1294 * The object insertions go to the front of the list, so they do not
1295 * cause an infinite loop (they are not compacted).
1296 */
1297
1298 if ((flags & DUK_MS_FLAG_EMERGENCY) &&
1299 !(flags & DUK_MS_FLAG_NO_OBJECT_COMPACTION)) {
1300 duk__compact_objects(heap);
1301 }
1302
1303 /*
1304 * String table resize check.
1305 *
1306 * Note: this may silently (and safely) fail if GC is caused by an
1307 * allocation call in stringtable resize_hash(). Resize_hash()
1308 * will prevent a recursive call to itself by setting the
1309 * DUK_MS_FLAG_NO_STRINGTABLE_RESIZE in heap->mark_and_sweep_base_flags.
1310 */
1311
1312 /* XXX: stringtable emergency compaction? */
1313
1314 /* XXX: remove this feature entirely? it would only matter for
1315 * emergency GC. Disable for lowest memory builds.
1316 */
1317 #if defined(DUK_USE_MS_STRINGTABLE_RESIZE)
1318 if (!(flags & DUK_MS_FLAG_NO_STRINGTABLE_RESIZE)) {
1319 DUK_DD(DUK_DDPRINT("resize stringtable: %p", (void *) heap));
1320 duk_heap_force_strtab_resize(heap);
1321 } else {
1322 DUK_D(DUK_DPRINT("stringtable resize skipped because DUK_MS_FLAG_NO_STRINGTABLE_RESIZE is set"));
1323 }
1324 #endif
1325
1326 /*
1327 * Finalize objects in the finalization work list. Finalized
1328 * objects are queued back to heap_allocated with FINALIZED set.
1329 *
1330 * Since finalizers may cause arbitrary side effects, they are
1331 * prevented during string table and object property allocation
1332 * resizing using the DUK_MS_FLAG_NO_FINALIZERS flag in
1333 * heap->mark_and_sweep_base_flags. In this case the objects
1334 * remain in the finalization work list after mark-and-sweep
1335 * exits and they may be finalized on the next pass.
1336 *
1337 * Finalization currently happens inside "MARKANDSWEEP_RUNNING"
1338 * protection (no mark-and-sweep may be triggered by the
1339 * finalizers). As a side effect:
1340 *
1341 * 1) an out-of-memory error inside a finalizer will not
1342 * cause a mark-and-sweep and may cause the finalizer
1343 * to fail unnecessarily
1344 *
1345 * 2) any temporary objects whose refcount decreases to zero
1346 * during finalization will not be put into refzero_list;
1347 * they can only be collected by another mark-and-sweep
1348 *
1349 * This is not optimal, but since the sweep for this phase has
1350 * already happened, this is probably good enough for now.
1351 */
1352
1353 #if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1354 /* Cannot simulate individual finalizers because finalize_list only
1355 * contains objects with actual finalizers. But simulate side effects
1356 * from finalization by doing a bogus function call and resizing the
1357 * stacks.
1358 */
1359 if (flags & DUK_MS_FLAG_NO_FINALIZERS) {
1360 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, DUK_MS_FLAG_NO_FINALIZERS is set"));
1361 } else if (!(thr->valstack != NULL && thr->callstack != NULL && thr->catchstack != NULL)) {
1362 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, thread not yet viable"));
1363 } else {
1364 DUK_D(DUK_DPRINT("run mark-and-sweep torture finalizer"));
1365 duk__markandsweep_torture_finalizer(thr);
1366 }
1367 #endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1368
1369 if (flags & DUK_MS_FLAG_NO_FINALIZERS) {
1370 DUK_D(DUK_DPRINT("finalizer run skipped because DUK_MS_FLAG_NO_FINALIZERS is set"));
1371 } else {
1372 duk__run_object_finalizers(heap, flags);
1373 }
1374
1375 /*
1376 * Finish
1377 */
1378
1379 DUK_HEAP_CLEAR_MARKANDSWEEP_RUNNING(heap);
1380
1381 /*
1382 * Assertions after
1383 */
1384
1385 #ifdef DUK_USE_ASSERTIONS
1386 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap));
1387 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap));
1388 DUK_ASSERT(heap->mark_and_sweep_recursion_depth == 0);
1389 duk__assert_heaphdr_flags(heap);
1390 #ifdef DUK_USE_REFERENCE_COUNTING
1391 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1392 * finalizer may trigger a mark-and-sweep.
1393 */
1394 duk__assert_valid_refcounts(heap);
1395 #endif /* DUK_USE_REFERENCE_COUNTING */
1396 #endif /* DUK_USE_ASSERTIONS */
1397
1398 /*
1399 * Reset trigger counter
1400 */
1401
1402 #ifdef DUK_USE_VOLUNTARY_GC
1403 tmp = (count_keep_obj + count_keep_str) / 256;
1404 heap->mark_and_sweep_trigger_counter = (duk_int_t) (
1405 (tmp * DUK_HEAP_MARK_AND_SWEEP_TRIGGER_MULT) +
1406 DUK_HEAP_MARK_AND_SWEEP_TRIGGER_ADD);
1407 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, trigger reset to %ld",
1408 (long) count_keep_obj, (long) count_keep_str, (long) heap->mark_and_sweep_trigger_counter));
1409 #else
1410 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, no voluntary trigger",
1411 (long) count_keep_obj, (long) count_keep_str));
1412 #endif
1413
1414 return 0; /* OK */
1415 }
1416
1417 #else /* DUK_USE_MARK_AND_SWEEP */
1418
1419 /* no mark-and-sweep gc */
1420
1421 #endif /* DUK_USE_MARK_AND_SWEEP */
1422