From f487f27c72ab054e8f9101c0696c24836a18eb9b Mon Sep 17 00:00:00 2001 From: Ivan Maidanski Date: Tue, 26 Sep 2017 00:52:49 +0300 Subject: [PATCH] Do not disable parallel mark for WRAP_MARK_SOME Issue #179 (bdwgc). Now, if there is a chance of unmapping of root segments, the latter ones are scanned immediately by GC_push_roots thus preventing memory protection faults in GC_mark_local. * include/private/gc_priv.h (GC_PUSH_CONDITIONAL): Move definition to mark_rts.c. * include/private/gcconfig.h [WRAP_MARK_SOME && PARALLEL_MARK] (PARALLEL_MARK): Do not undefine; remove TODO item. * mark.c [WRAP_MARK_SOME && PARALLEL_MARK] (GC_push_conditional_eager): New internal function; add TODO item. * mark_rts.c [WRAP_MARK_SOME && PARALLEL_MARK] (GC_push_conditional_eager): Declare function. * mark_rts.c [WRAP_MARK_SOME && PARALLEL_MARK] (GC_PUSH_CONDITIONAL): Define to GC_push_conditional_eager if GC_parallel. --- include/private/gc_priv.h | 9 --------- include/private/gcconfig.h | 5 ----- mark.c | 29 +++++++++++++++++++++++++++++ mark_rts.c | 18 ++++++++++++++++++ 4 files changed, 47 insertions(+), 14 deletions(-) diff --git a/include/private/gc_priv.h b/include/private/gc_priv.h index 3a6eca6..215a6fa 100644 --- a/include/private/gc_priv.h +++ b/include/private/gc_priv.h @@ -1657,15 +1657,6 @@ GC_INNER void GC_initiate_gc(void); GC_INNER GC_bool GC_collection_in_progress(void); /* Collection is in progress, or was abandoned. */ -#ifndef GC_DISABLE_INCREMENTAL -# define GC_PUSH_CONDITIONAL(b, t, all) \ - GC_push_conditional((ptr_t)(b), (ptr_t)(t), all) - /* Do either of GC_push_all or GC_push_selected */ - /* depending on the third arg. */ -#else -# define GC_PUSH_CONDITIONAL(b, t, all) GC_push_all((ptr_t)(b), (ptr_t)(t)) -#endif - #define GC_PUSH_ALL_SYM(sym) \ GC_push_all((ptr_t)&(sym), (ptr_t)&(sym) + sizeof(sym)) diff --git a/include/private/gcconfig.h b/include/private/gcconfig.h index e217d9a..33b482c 100644 --- a/include/private/gcconfig.h +++ b/include/private/gcconfig.h @@ -3007,11 +3007,6 @@ # define WRAP_MARK_SOME #endif -#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK) - /* TODO: GC_mark_local does not handle memory protection faults yet. */ -# undef PARALLEL_MARK -#endif - #if defined(PARALLEL_MARK) && !defined(DEFAULT_STACK_MAYBE_SMALL) \ && (defined(HPUX) || defined(GC_DGUX386_THREADS) \ || defined(NO_GETCONTEXT) /* e.g. musl */) diff --git a/mark.c b/mark.c index 28fea0e..abdb9a2 100644 --- a/mark.c +++ b/mark.c @@ -1628,6 +1628,35 @@ GC_INNER void GC_push_all_stack(ptr_t bottom, ptr_t top) # endif } +#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK) + /* Similar to GC_push_conditional but scans the whole region immediately. */ + GC_INNER void GC_push_conditional_eager(ptr_t bottom, ptr_t top, + GC_bool all) + GC_ATTR_NO_SANITIZE_ADDR GC_ATTR_NO_SANITIZE_MEMORY + { + word * b = (word *)(((word) bottom + ALIGNMENT-1) & ~(ALIGNMENT-1)); + word * t = (word *)(((word) top) & ~(ALIGNMENT-1)); + register word *p; + register word *lim; + register ptr_t greatest_ha = GC_greatest_plausible_heap_addr; + register ptr_t least_ha = GC_least_plausible_heap_addr; +# define GC_greatest_plausible_heap_addr greatest_ha +# define GC_least_plausible_heap_addr least_ha + + if (top == NULL) + return; + (void)all; /* TODO: If !all then scan only dirty pages. */ + + lim = t - 1; + for (p = b; (word)p <= (word)lim; p = (word *)((ptr_t)p + ALIGNMENT)) { + register word q = *p; + GC_PUSH_ONE_HEAP(q, p, GC_mark_stack_top); + } +# undef GC_greatest_plausible_heap_addr +# undef GC_least_plausible_heap_addr + } +#endif /* WRAP_MARK_SOME && PARALLEL_MARK */ + #if !defined(SMALL_CONFIG) && !defined(USE_MARK_BYTES) && \ defined(MARK_BIT_PER_GRANULE) # if GC_GRANULE_WORDS == 1 diff --git a/mark_rts.c b/mark_rts.c index eb2e0e9..00d5450 100644 --- a/mark_rts.c +++ b/mark_rts.c @@ -497,6 +497,24 @@ GC_API void GC_CALL GC_exclude_static_roots(void *b, void *e) UNLOCK(); } +#if defined(WRAP_MARK_SOME) && defined(PARALLEL_MARK) + /* GC_mark_local does not handle memory protection faults yet. So, */ + /* the static data regions are scanned immediately by GC_push_roots. */ + GC_INNER void GC_push_conditional_eager(ptr_t bottom, ptr_t top, + GC_bool all); +# define GC_PUSH_CONDITIONAL(b, t, all) \ + (GC_parallel \ + ? GC_push_conditional_eager(b, t, all) \ + : GC_push_conditional((ptr_t)(b), (ptr_t)(t), all)) +#elif defined(GC_DISABLE_INCREMENTAL) +# define GC_PUSH_CONDITIONAL(b, t, all) GC_push_all((ptr_t)(b), (ptr_t)(t)) +#else +# define GC_PUSH_CONDITIONAL(b, t, all) \ + GC_push_conditional((ptr_t)(b), (ptr_t)(t), all) + /* Do either of GC_push_all or GC_push_selected */ + /* depending on the third arg. */ +#endif + /* Invoke push_conditional on ranges that are not excluded. */ STATIC void GC_push_conditional_with_exclusions(ptr_t bottom, ptr_t top, GC_bool all GC_ATTR_UNUSED) -- 2.7.4