* The following bugs are resolved with this release:
4719, 13064, 14094, 14841, 14906, 15319, 15467, 15790, 15969, 16351,
- 16512, 16560, 16783, 17090, 17269, 17523, 17542, 17569, 17588, 17620,
- 17621, 17628, 17631, 17711, 17776, 17779, 17792, 17836, 17912, 17916,
- 17932, 17944, 17949, 17964, 17965, 17967, 17969, 17978, 17987, 17991,
- 17996, 17998, 17999, 18019, 18020, 18029, 18030, 18032, 18036, 18038,
- 18039, 18042, 18043, 18046, 18047, 18068, 18080, 18093, 18100, 18104,
- 18110, 18111, 18128, 18138, 18185.
+ 16512, 16560, 16783, 17090, 17195, 17269, 17523, 17542, 17569, 17588,
+ 17620, 17621, 17628, 17631, 17711, 17776, 17779, 17792, 17836, 17912,
+ 17916, 17932, 17944, 17949, 17964, 17965, 17967, 17969, 17978, 17987,
+ 17991, 17996, 17998, 17999, 18019, 18020, 18029, 18030, 18032, 18036,
+ 18038, 18039, 18042, 18043, 18046, 18047, 18068, 18080, 18093, 18100,
+ 18104, 18110, 18111, 18128, 18138, 18185.
* A powerpc and powerpc64 optimization for TLS, similar to TLS descriptors
for LD and GD on x86 and x86-64, has been implemented. You will need
unsigned long pagesz = GLRO (dl_pagesize);
mchunkptr top_chunk = top (ar_ptr), p, bck, fwd;
heap_info *prev_heap;
- long new_size, top_size, extra, prev_size, misalign;
+ long new_size, top_size, top_area, extra, prev_size, misalign;
/* Can this heap go away completely? */
while (top_chunk == chunk_at_offset (heap, sizeof (*heap)))
set_head (top_chunk, new_size | PREV_INUSE);
/*check_chunk(ar_ptr, top_chunk);*/
}
+
+ /* Uses similar logic for per-thread arenas as the main arena with systrim
+ by preserving the top pad and at least a page. */
top_size = chunksize (top_chunk);
- extra = (top_size - pad - MINSIZE - 1) & ~(pagesz - 1);
- if (extra < (long) pagesz)
+ top_area = top_size - MINSIZE - 1;
+ if (top_area <= pad)
+ return 0;
+
+ extra = ALIGN_DOWN(top_area - pad, pagesz);
+ if ((unsigned long) extra < mp_.trim_threshold)
return 0;
/* Try to shrink. */