Lines Matching refs:heap

407 #define _rpmalloc_stat_inc_alloc(heap, class_idx)                              \  argument
410 atomic_incr32(&heap->size_class_use[class_idx].alloc_current); \
411 if (alloc_current > heap->size_class_use[class_idx].alloc_peak) \
412 heap->size_class_use[class_idx].alloc_peak = alloc_current; \
413 atomic_incr32(&heap->size_class_use[class_idx].alloc_total); \
415 #define _rpmalloc_stat_inc_free(heap, class_idx) \ argument
417 atomic_decr32(&heap->size_class_use[class_idx].alloc_current); \
418 atomic_incr32(&heap->size_class_use[class_idx].free_total); \
439 #define _rpmalloc_stat_inc_alloc(heap, class_idx) \ argument
442 #define _rpmalloc_stat_inc_free(heap, class_idx) \ argument
633 heap_t *heap; member
883 heap_t *heap = get_thread_heap_raw(); in get_thread_heap() local
885 if (EXPECTED(heap != 0)) in get_thread_heap()
886 return heap; in get_thread_heap()
890 return heap; in get_thread_heap()
927 static void set_thread_heap(heap_t *heap) { in set_thread_heap() argument
930 pthread_setspecific(_memory_thread_heap, heap); in set_thread_heap()
932 _memory_thread_heap = heap; in set_thread_heap()
934 if (heap) in set_thread_heap()
935 heap->owner_thread = get_thread_id(); in set_thread_heap()
1251 static void _rpmalloc_heap_cache_insert(heap_t *heap, span_t *span);
1253 static void _rpmalloc_heap_finalize(heap_t *heap);
1255 static void _rpmalloc_heap_set_reserved_spans(heap_t *heap, span_t *master,
1278 static span_t *_rpmalloc_span_map_from_reserve(heap_t *heap, argument
1281 span_t *span = heap->span_reserve;
1282 heap->span_reserve =
1284 heap->spans_reserved -= (uint32_t)span_count;
1286 _rpmalloc_span_mark_as_subspan_unless_master(heap->span_reserve_master, span,
1289 _rpmalloc_stat_inc(&heap->span_use[span_count - 1].spans_from_reserved);
1321 static span_t *_rpmalloc_span_map_aligned_count(heap_t *heap, argument
1335 _rpmalloc_stat_inc(&heap->span_use[span_count - 1].spans_map_calls);
1340 if (heap->spans_reserved) {
1342 heap->span_reserve_master, heap->span_reserve, heap->spans_reserved);
1343 _rpmalloc_heap_cache_insert(heap, heap->span_reserve);
1362 _rpmalloc_heap_set_reserved_spans(heap, span, reserved_spans,
1370 static span_t *_rpmalloc_span_map(heap_t *heap, size_t span_count) { argument
1371 if (span_count <= heap->spans_reserved)
1372 return _rpmalloc_span_map_from_reserve(heap, span_count);
1383 (!heap->spans_reserved ? _memory_heap_reserve_count : span_count);
1391 _rpmalloc_heap_set_reserved_spans(heap, _memory_global_reserve_master,
1401 span = _rpmalloc_span_map_aligned_count(heap, span_count);
1461 static void _rpmalloc_span_release_to_cache(heap_t *heap, span_t *span) { argument
1462 rpmalloc_assert(heap == span->heap, "Span heap pointer corrupted");
1467 atomic_decr32(&heap->span_use[0].current);
1469 _rpmalloc_stat_dec(&heap->size_class_use[span->size_class].spans_current);
1470 if (!heap->finalize) {
1471 _rpmalloc_stat_inc(&heap->span_use[0].spans_to_cache);
1472 _rpmalloc_stat_inc(&heap->size_class_use[span->size_class].spans_to_cache);
1473 if (heap->size_class[span->size_class].cache)
1474 _rpmalloc_heap_cache_insert(heap,
1475 heap->size_class[span->size_class].cache);
1476 heap->size_class[span->size_class].cache = span;
1519 static void *_rpmalloc_span_initialize_new(heap_t *heap, argument
1525 span->heap = heap;
1547 _rpmalloc_span_double_link_list_add(&heap->full_span[class_idx], span);
1549 ++heap->full_span_count;
1574 static int _rpmalloc_span_finalize(heap_t *heap, size_t iclass, span_t *span, argument
1576 void *free_list = heap->size_class[iclass].free_list;
1597 heap->size_class[iclass].free_list = 0;
1603 _rpmalloc_stat_dec(&heap->span_use[0].current);
1604 _rpmalloc_stat_dec(&heap->size_class_use[iclass].spans_current);
1775 static void _rpmalloc_heap_set_reserved_spans(heap_t *heap, span_t *master, argument
1778 heap->span_reserve_master = master;
1779 heap->span_reserve = reserve;
1780 heap->spans_reserved = (uint32_t)reserve_span_count;
1785 static void _rpmalloc_heap_cache_adopt_deferred(heap_t *heap, argument
1788 &heap->span_free_deferred, 0));
1791 rpmalloc_assert(span->heap == heap, "Span heap pointer corrupted");
1793 rpmalloc_assert(heap->full_span_count, "Heap span counter corrupted");
1794 --heap->full_span_count;
1795 _rpmalloc_stat_dec(&heap->span_use[0].spans_deferred);
1797 _rpmalloc_span_double_link_list_remove(&heap->full_span[span->size_class],
1800 _rpmalloc_stat_dec(&heap->span_use[0].current);
1801 _rpmalloc_stat_dec(&heap->size_class_use[span->size_class].spans_current);
1805 _rpmalloc_heap_cache_insert(heap, span);
1812 rpmalloc_assert(heap->full_span_count, "Heap span counter corrupted");
1813 --heap->full_span_count;
1815 _rpmalloc_span_double_link_list_remove(&heap->large_huge_span, span);
1818 _rpmalloc_stat_dec(&heap->span_use[idx].spans_deferred);
1819 _rpmalloc_stat_dec(&heap->span_use[idx].current);
1823 _rpmalloc_heap_cache_insert(heap, span);
1830 static void _rpmalloc_heap_unmap(heap_t *heap) { argument
1831 if (!heap->master_heap) {
1832 if ((heap->finalize > 1) && !atomic_load32(&heap->child_count)) {
1833 span_t *span = (span_t *)((uintptr_t)heap & _memory_span_mask);
1837 if (atomic_decr32(&heap->master_heap->child_count) == 0) {
1838 _rpmalloc_heap_unmap(heap->master_heap);
1843 static void _rpmalloc_heap_global_finalize(heap_t *heap) { argument
1844 if (heap->finalize++ > 1) {
1845 --heap->finalize;
1849 _rpmalloc_heap_finalize(heap);
1855 span_cache = &heap->span_cache;
1857 span_cache = (span_cache_t *)(heap->span_large_cache + (iclass - 1));
1864 if (heap->full_span_count) {
1865 --heap->finalize;
1870 if (heap->size_class[iclass].free_list ||
1871 heap->size_class[iclass].partial_span) {
1872 --heap->finalize;
1877 size_t list_idx = (size_t)heap->id % HEAP_ARRAY_SIZE;
1879 if (list_heap == heap) {
1880 _memory_heaps[list_idx] = heap->next_heap;
1882 while (list_heap->next_heap != heap)
1884 list_heap->next_heap = heap->next_heap;
1887 _rpmalloc_heap_unmap(heap);
1892 static void _rpmalloc_heap_cache_insert(heap_t *heap, span_t *span) { argument
1893 if (UNEXPECTED(heap->finalize != 0)) {
1895 _rpmalloc_heap_global_finalize(heap);
1900 _rpmalloc_stat_inc(&heap->span_use[span_count - 1].spans_to_cache);
1902 span_cache_t *span_cache = &heap->span_cache;
1908 _rpmalloc_stat_add64(&heap->thread_to_global,
1910 _rpmalloc_stat_add(&heap->span_use[span_count - 1].spans_to_global,
1923 span_large_cache_t *span_cache = heap->span_large_cache + cache_idx;
1935 _rpmalloc_stat_add64(&heap->thread_to_global,
1937 _rpmalloc_stat_add(&heap->span_use[span_count - 1].spans_to_global,
1949 (void)sizeof(heap);
1955 static span_t *_rpmalloc_heap_thread_cache_extract(heap_t *heap, argument
1961 span_cache = &heap->span_cache;
1963 span_cache = (span_cache_t *)(heap->span_large_cache + (span_count - 2));
1965 _rpmalloc_stat_inc(&heap->span_use[span_count - 1].spans_from_cache);
1972 static span_t *_rpmalloc_heap_thread_cache_deferred_extract(heap_t *heap, argument
1976 _rpmalloc_heap_cache_adopt_deferred(heap, &span);
1978 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
1979 span = _rpmalloc_heap_thread_cache_extract(heap, span_count);
1984 static span_t *_rpmalloc_heap_reserved_extract(heap_t *heap, argument
1986 if (heap->spans_reserved >= span_count)
1987 return _rpmalloc_span_map(heap, span_count);
1992 static span_t *_rpmalloc_heap_global_cache_extract(heap_t *heap, argument
1999 span_cache = &heap->span_cache;
2002 span_cache = (span_cache_t *)(heap->span_large_cache + (span_count - 2));
2008 _rpmalloc_stat_add64(&heap->global_to_thread,
2010 _rpmalloc_stat_add(&heap->span_use[span_count - 1].spans_from_global,
2018 _rpmalloc_stat_add64(&heap->global_to_thread,
2020 _rpmalloc_stat_add(&heap->span_use[span_count - 1].spans_from_global,
2026 (void)sizeof(heap);
2031 static void _rpmalloc_inc_span_statistics(heap_t *heap, size_t span_count, argument
2033 (void)sizeof(heap);
2039 (uint32_t)atomic_incr32(&heap->span_use[idx].current);
2040 if (current_count > (uint32_t)atomic_load32(&heap->span_use[idx].high))
2041 atomic_store32(&heap->span_use[idx].high, (int32_t)current_count);
2042 _rpmalloc_stat_add_peak(&heap->size_class_use[class_idx].spans_current, 1,
2043 heap->size_class_use[class_idx].spans_peak);
2050 _rpmalloc_heap_extract_new_span(heap_t *heap, argument
2058 (heap->span_cache.count
2059 ? heap->span_cache.span[--heap->span_cache.count]
2061 _rpmalloc_inc_span_statistics(heap, span_count, class_idx);
2073 span = _rpmalloc_heap_thread_cache_extract(heap, span_count);
2075 _rpmalloc_stat_inc(&heap->size_class_use[class_idx].spans_from_cache);
2076 _rpmalloc_inc_span_statistics(heap, span_count, class_idx);
2079 span = _rpmalloc_heap_thread_cache_deferred_extract(heap, span_count);
2081 _rpmalloc_stat_inc(&heap->size_class_use[class_idx].spans_from_cache);
2082 _rpmalloc_inc_span_statistics(heap, span_count, class_idx);
2085 span = _rpmalloc_heap_global_cache_extract(heap, span_count);
2087 _rpmalloc_stat_inc(&heap->size_class_use[class_idx].spans_from_cache);
2088 _rpmalloc_inc_span_statistics(heap, span_count, class_idx);
2091 span = _rpmalloc_heap_reserved_extract(heap, span_count);
2093 _rpmalloc_stat_inc(&heap->size_class_use[class_idx].spans_from_reserved);
2094 _rpmalloc_inc_span_statistics(heap, span_count, class_idx);
2100 span = _rpmalloc_span_map(heap, base_span_count);
2101 _rpmalloc_inc_span_statistics(heap, base_span_count, class_idx);
2102 _rpmalloc_stat_inc(&heap->size_class_use[class_idx].spans_map_calls);
2106 static void _rpmalloc_heap_initialize(heap_t *heap) { argument
2107 _rpmalloc_memset_const(heap, 0, sizeof(heap_t));
2109 heap->id = 1 + atomic_incr32(&_memory_heap_id);
2112 size_t list_idx = (size_t)heap->id % HEAP_ARRAY_SIZE;
2113 heap->next_heap = _memory_heaps[list_idx];
2114 _memory_heaps[list_idx] = heap;
2117 static void _rpmalloc_heap_orphan(heap_t *heap, int first_class) { argument
2118 heap->owner_thread = (uintptr_t)-1;
2126 heap->next_orphan = *heap_list;
2127 *heap_list = heap;
2177 heap_t *heap = (heap_t *)pointer_offset(span, sizeof(span_t)); local
2178 _rpmalloc_heap_initialize(heap);
2184 atomic_store32(&heap->child_count, (int32_t)num_heaps - 1);
2185 heap_t *extra_heap = (heap_t *)pointer_offset(heap, aligned_heap_size);
2188 extra_heap->master_heap = heap;
2202 _rpmalloc_heap_set_reserved_spans(heap, span, remain_span, reserve_count);
2213 return heap;
2217 heap_t *heap = *heap_list; local
2218 *heap_list = (heap ? heap->next_orphan : 0);
2219 return heap;
2224 heap_t *heap = 0; local
2228 heap = _rpmalloc_heap_extract_orphan(&_memory_orphan_heaps);
2230 if (!heap)
2231 heap = _rpmalloc_heap_extract_orphan(&_memory_first_class_orphan_heaps);
2233 if (!heap)
2234 heap = _rpmalloc_heap_allocate_new();
2236 if (heap)
2237 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
2238 return heap;
2243 heap_t *heap = (heap_t *)heapptr; local
2244 if (!heap)
2247 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
2248 if (release_cache || heap->finalize) {
2253 span_cache = &heap->span_cache;
2255 span_cache = (span_cache_t *)(heap->span_large_cache + (iclass - 1));
2259 if (heap->finalize) {
2263 _rpmalloc_stat_add64(&heap->thread_to_global, span_cache->count *
2266 _rpmalloc_stat_add(&heap->span_use[iclass].spans_to_global,
2280 if (get_thread_heap_raw() == heap)
2295 _rpmalloc_heap_orphan(heap, first_class);
2307 static void _rpmalloc_heap_finalize(heap_t *heap) { argument
2308 if (heap->spans_reserved) {
2309 span_t *span = _rpmalloc_span_map(heap, heap->spans_reserved);
2311 heap->spans_reserved = 0;
2314 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
2317 if (heap->size_class[iclass].cache)
2318 _rpmalloc_span_unmap(heap->size_class[iclass].cache);
2319 heap->size_class[iclass].cache = 0;
2320 span_t *span = heap->size_class[iclass].partial_span;
2323 _rpmalloc_span_finalize(heap, iclass, span,
2324 &heap->size_class[iclass].partial_span);
2328 if (heap->size_class[iclass].free_list) {
2330 (span_t *)((uintptr_t)heap->size_class[iclass].free_list &
2334 list = &heap->full_span[iclass];
2336 --heap->full_span_count;
2337 if (!_rpmalloc_span_finalize(heap, iclass, class_span, list)) {
2341 &heap->size_class[iclass].partial_span, class_span);
2350 span_cache = &heap->span_cache;
2352 span_cache = (span_cache_t *)(heap->span_large_cache + (iclass - 1));
2358 rpmalloc_assert(!atomic_load_ptr(&heap->span_free_deferred),
2377 heap_t *heap, heap_size_class_t *heap_size_class, uint32_t class_idx) { argument
2379 rpmalloc_assume(heap != 0);
2422 _rpmalloc_span_double_link_list_add(&heap->full_span[class_idx], span);
2424 ++heap->full_span_count;
2429 span = _rpmalloc_heap_extract_new_span(heap, heap_size_class, 1, class_idx);
2432 return _rpmalloc_span_initialize_new(heap, heap_size_class, span,
2440 static void *_rpmalloc_allocate_small(heap_t *heap, size_t size) { argument
2441 rpmalloc_assert(heap, "No thread heap");
2445 heap_size_class_t *heap_size_class = heap->size_class + class_idx;
2446 _rpmalloc_stat_inc_alloc(heap, class_idx);
2449 return _rpmalloc_allocate_from_heap_fallback(heap, heap_size_class,
2454 static void *_rpmalloc_allocate_medium(heap_t *heap, size_t size) { argument
2455 rpmalloc_assert(heap, "No thread heap");
2462 heap_size_class_t *heap_size_class = heap->size_class + class_idx;
2463 _rpmalloc_stat_inc_alloc(heap, class_idx);
2466 return _rpmalloc_allocate_from_heap_fallback(heap, heap_size_class,
2471 static void *_rpmalloc_allocate_large(heap_t *heap, size_t size) { argument
2472 rpmalloc_assert(heap, "No thread heap");
2483 _rpmalloc_heap_extract_new_span(heap, 0, span_count, SIZE_CLASS_LARGE);
2490 span->heap = heap;
2493 _rpmalloc_span_double_link_list_add(&heap->large_huge_span, span);
2495 ++heap->full_span_count;
2501 static void *_rpmalloc_allocate_huge(heap_t *heap, size_t size) { argument
2502 rpmalloc_assert(heap, "No thread heap");
2503 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
2518 span->heap = heap;
2522 _rpmalloc_span_double_link_list_add(&heap->large_huge_span, span);
2524 ++heap->full_span_count;
2530 static void *_rpmalloc_allocate(heap_t *heap, size_t size) { argument
2533 return _rpmalloc_allocate_small(heap, size);
2535 return _rpmalloc_allocate_medium(heap, size);
2537 return _rpmalloc_allocate_large(heap, size);
2538 return _rpmalloc_allocate_huge(heap, size);
2541 static void *_rpmalloc_aligned_allocate(heap_t *heap, size_t alignment, argument
2544 return _rpmalloc_allocate(heap, size);
2568 return _rpmalloc_allocate(heap, multiple_size);
2574 ptr = _rpmalloc_allocate(heap, size + alignment);
2650 span->heap = heap;
2654 _rpmalloc_span_double_link_list_add(&heap->large_huge_span, span);
2656 ++heap->full_span_count;
2673 heap_t *heap = span->heap; local
2674 rpmalloc_assert(heap->owner_thread == get_thread_id() ||
2675 !heap->owner_thread || heap->finalize,
2681 _rpmalloc_span_double_link_list_remove(&heap->full_span[span->size_class],
2685 &heap->size_class[span->size_class].partial_span, span);
2686 --heap->full_span_count;
2706 &heap->size_class[span->size_class].partial_span, span);
2707 _rpmalloc_span_release_to_cache(heap, span);
2711 static void _rpmalloc_deallocate_defer_free_span(heap_t *heap, span_t *span) { argument
2713 _rpmalloc_stat_inc(&heap->span_use[span->span_count - 1].spans_deferred);
2716 span->free_list = (void *)atomic_load_ptr(&heap->span_free_deferred);
2717 } while (!atomic_cas_ptr(&heap->span_free_deferred, span, span->free_list));
2741 _rpmalloc_deallocate_defer_free_span(span->heap, span);
2746 _rpmalloc_stat_inc_free(span->heap, span->size_class);
2756 (span->heap->owner_thread &&
2757 (span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2760 ((span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2781 (span->heap->owner_thread &&
2782 (span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2785 ((span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2788 _rpmalloc_deallocate_defer_free_span(span->heap, span);
2791 rpmalloc_assert(span->heap->full_span_count, "Heap span counter corrupted");
2792 --span->heap->full_span_count;
2794 _rpmalloc_span_double_link_list_remove(&span->heap->large_huge_span, span);
2799 atomic_decr32(&span->heap->span_use[idx].current);
2801 heap_t *heap = span->heap; local
2802 rpmalloc_assert(heap, "No thread heap");
2805 ((span->span_count > 1) && (heap->span_cache.count == 0) &&
2806 !heap->finalize && !heap->spans_reserved);
2809 ((span->span_count > 1) && !heap->finalize && !heap->spans_reserved);
2812 heap->span_reserve = span;
2813 heap->spans_reserved = span->span_count;
2815 heap->span_reserve_master = span;
2820 heap->span_reserve_master = master;
2826 _rpmalloc_stat_inc(&heap->span_use[idx].spans_to_reserved);
2829 _rpmalloc_heap_cache_insert(heap, span);
2835 rpmalloc_assert(span->heap, "No span heap");
2838 (span->heap->owner_thread &&
2839 (span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2842 ((span->heap->owner_thread != get_thread_id()) && !span->heap->finalize);
2845 _rpmalloc_deallocate_defer_free_span(span->heap, span);
2848 rpmalloc_assert(span->heap->full_span_count, "Heap span counter corrupted");
2849 --span->heap->full_span_count;
2851 _rpmalloc_span_double_link_list_remove(&span->heap->large_huge_span, span);
2885 static void *_rpmalloc_reallocate(heap_t *heap, void *p, size_t size, argument
2959 void *block = _rpmalloc_allocate(heap, new_size);
2969 static void *_rpmalloc_aligned_reallocate(heap_t *heap, void *ptr, argument
2973 return _rpmalloc_reallocate(heap, ptr, size, oldsize, flags);
2983 (!no_alloc ? _rpmalloc_aligned_allocate(heap, alignment, size) : 0);
3308 heap_t *heap = _memory_heaps[list_idx]; local
3309 while (heap) {
3310 heap_t *next_heap = heap->next_heap;
3311 heap->finalize = 1;
3312 _rpmalloc_heap_global_finalize(heap);
3313 heap = next_heap;
3344 heap_t *heap = _rpmalloc_heap_allocate(0); local
3345 if (heap) {
3347 set_thread_heap(heap);
3349 FlsSetValue(fls_key, heap);
3357 heap_t *heap = get_thread_heap_raw(); local
3358 if (heap)
3359 _rpmalloc_heap_release_raw(heap, release_caches);
3381 heap_t *heap = get_thread_heap(); local
3382 return _rpmalloc_allocate(heap, size);
3406 heap_t *heap = get_thread_heap(); local
3407 void *block = _rpmalloc_allocate(heap, total);
3420 heap_t *heap = get_thread_heap(); local
3421 return _rpmalloc_reallocate(heap, ptr, size, 0, 0);
3433 heap_t *heap = get_thread_heap(); local
3434 return _rpmalloc_aligned_reallocate(heap, ptr, alignment, size, oldsize,
3439 heap_t *heap = get_thread_heap(); local
3440 return _rpmalloc_aligned_allocate(heap, alignment, size);
3491 heap_t *heap = get_thread_heap_raw(); local
3492 if (!heap)
3497 span_t *span = heap->size_class[iclass].partial_span;
3513 span_cache = &heap->span_cache;
3515 span_cache = (span_cache_t *)(heap->span_large_cache + (iclass - 1));
3520 span_t *deferred = (span_t *)atomic_load_ptr(&heap->span_free_deferred);
3528 stats->thread_to_global = (size_t)atomic_load64(&heap->thread_to_global);
3529 stats->global_to_thread = (size_t)atomic_load64(&heap->global_to_thread);
3533 (size_t)atomic_load32(&heap->span_use[iclass].current);
3535 (size_t)atomic_load32(&heap->span_use[iclass].high);
3537 (size_t)atomic_load32(&heap->span_use[iclass].spans_to_global);
3539 (size_t)atomic_load32(&heap->span_use[iclass].spans_from_global);
3541 (size_t)atomic_load32(&heap->span_use[iclass].spans_to_cache);
3543 (size_t)atomic_load32(&heap->span_use[iclass].spans_from_cache);
3545 (size_t)atomic_load32(&heap->span_use[iclass].spans_to_reserved);
3547 (size_t)atomic_load32(&heap->span_use[iclass].spans_from_reserved);
3549 (size_t)atomic_load32(&heap->span_use[iclass].spans_map_calls);
3553 (size_t)atomic_load32(&heap->size_class_use[iclass].alloc_current);
3555 (size_t)heap->size_class_use[iclass].alloc_peak;
3557 (size_t)atomic_load32(&heap->size_class_use[iclass].alloc_total);
3559 (size_t)atomic_load32(&heap->size_class_use[iclass].free_total);
3561 (size_t)atomic_load32(&heap->size_class_use[iclass].spans_to_cache);
3563 (size_t)atomic_load32(&heap->size_class_use[iclass].spans_from_cache);
3565 &heap->size_class_use[iclass].spans_from_reserved);
3567 (size_t)atomic_load32(&heap->size_class_use[iclass].spans_map_calls);
3606 static void _memory_heap_dump_statistics(heap_t *heap, void *file) { argument
3607 fprintf(file, "Heap %d stats:\n", heap->id);
3612 if (!atomic_load32(&heap->size_class_use[iclass].alloc_total))
3619 atomic_load32(&heap->size_class_use[iclass].alloc_current),
3620 heap->size_class_use[iclass].alloc_peak,
3621 atomic_load32(&heap->size_class_use[iclass].alloc_total),
3622 atomic_load32(&heap->size_class_use[iclass].free_total),
3625 atomic_load32(&heap->size_class_use[iclass].spans_current),
3626 heap->size_class_use[iclass].spans_peak,
3627 ((size_t)heap->size_class_use[iclass].alloc_peak *
3630 ((size_t)atomic_load32(&heap->size_class_use[iclass].spans_to_cache) *
3633 ((size_t)atomic_load32(&heap->size_class_use[iclass].spans_from_cache) *
3637 &heap->size_class_use[iclass].spans_from_reserved) *
3640 atomic_load32(&heap->size_class_use[iclass].spans_map_calls));
3646 if (!atomic_load32(&heap->span_use[iclass].high) &&
3647 !atomic_load32(&heap->span_use[iclass].spans_map_calls))
3652 (uint32_t)(iclass + 1), atomic_load32(&heap->span_use[iclass].current),
3653 atomic_load32(&heap->span_use[iclass].high),
3654 atomic_load32(&heap->span_use[iclass].spans_deferred),
3655 ((size_t)atomic_load32(&heap->span_use[iclass].high) *
3659 (unsigned int)(!iclass ? heap->span_cache.count
3660 : heap->span_large_cache[iclass - 1].count),
3661 ((size_t)atomic_load32(&heap->span_use[iclass].spans_to_cache) *
3664 ((size_t)atomic_load32(&heap->span_use[iclass].spans_from_cache) *
3670 ((size_t)atomic_load32(&heap->span_use[iclass].spans_to_reserved) *
3673 ((size_t)atomic_load32(&heap->span_use[iclass].spans_from_reserved) *
3676 ((size_t)atomic_load32(&heap->span_use[iclass].spans_to_global) *
3679 ((size_t)atomic_load32(&heap->span_use[iclass].spans_from_global) *
3682 atomic_load32(&heap->span_use[iclass].spans_map_calls));
3684 fprintf(file, "Full spans: %zu\n", heap->full_span_count);
3688 (size_t)atomic_load64(&heap->thread_to_global) / (size_t)(1024 * 1024),
3689 (size_t)atomic_load64(&heap->global_to_thread) / (size_t)(1024 * 1024));
3697 heap_t *heap = _memory_heaps[list_idx]; local
3698 while (heap) {
3702 if (!atomic_load32(&heap->size_class_use[iclass].alloc_total)) {
3704 !atomic_load32(&heap->size_class_use[iclass].free_total),
3707 !atomic_load32(&heap->size_class_use[iclass].spans_map_calls),
3715 if (!atomic_load32(&heap->span_use[iclass].high) &&
3716 !atomic_load32(&heap->span_use[iclass].spans_map_calls))
3721 _memory_heap_dump_statistics(heap, file);
3722 heap = heap->next_heap;
3793 heap_t *heap = _rpmalloc_heap_allocate(1); local
3794 rpmalloc_assume(heap != NULL);
3795 heap->owner_thread = 0;
3797 return heap;
3800 extern inline void rpmalloc_heap_release(rpmalloc_heap_t *heap) { argument
3801 if (heap)
3802 _rpmalloc_heap_release(heap, 1, 1);
3806 rpmalloc_heap_alloc(rpmalloc_heap_t *heap, size_t size) { argument
3813 return _rpmalloc_allocate(heap, size);
3817 rpmalloc_heap_aligned_alloc(rpmalloc_heap_t *heap, size_t alignment, argument
3825 return _rpmalloc_aligned_allocate(heap, alignment, size);
3829 rpmalloc_heap_calloc(rpmalloc_heap_t *heap, size_t num, size_t size) { argument
3830 return rpmalloc_heap_aligned_calloc(heap, 0, num, size);
3834 rpmalloc_heap_aligned_calloc(rpmalloc_heap_t *heap, size_t alignment, argument
3854 void *block = _rpmalloc_aligned_allocate(heap, alignment, total);
3861 rpmalloc_heap_realloc(rpmalloc_heap_t *heap, void *ptr, size_t size, argument
3869 return _rpmalloc_reallocate(heap, ptr, size, 0, flags);
3873 rpmalloc_heap_aligned_realloc(rpmalloc_heap_t *heap, void *ptr, argument
3882 return _rpmalloc_aligned_reallocate(heap, ptr, alignment, size, 0, flags);
3885 extern inline void rpmalloc_heap_free(rpmalloc_heap_t *heap, void *ptr) { argument
3886 (void)sizeof(heap);
3890 extern inline void rpmalloc_heap_free_all(rpmalloc_heap_t *heap) { argument
3894 _rpmalloc_heap_cache_adopt_deferred(heap, 0);
3897 span = heap->size_class[iclass].partial_span;
3900 _rpmalloc_heap_cache_insert(heap, span);
3903 heap->size_class[iclass].partial_span = 0;
3904 span = heap->full_span[iclass];
3907 _rpmalloc_heap_cache_insert(heap, span);
3911 span = heap->size_class[iclass].cache;
3913 _rpmalloc_heap_cache_insert(heap, span);
3914 heap->size_class[iclass].cache = 0;
3916 memset(heap->size_class, 0, sizeof(heap->size_class));
3917 memset(heap->full_span, 0, sizeof(heap->full_span));
3919 span = heap->large_huge_span;
3925 _rpmalloc_heap_cache_insert(heap, span);
3928 heap->large_huge_span = 0;
3929 heap->full_span_count = 0;
3935 span_cache = &heap->span_cache;
3937 span_cache = (span_cache_t *)(heap->span_large_cache + (iclass - 1));
3941 _rpmalloc_stat_add64(&heap->thread_to_global,
3943 _rpmalloc_stat_add(&heap->span_use[iclass].spans_to_global,
3957 atomic_store32(&heap->size_class_use[iclass].alloc_current, 0);
3958 atomic_store32(&heap->size_class_use[iclass].spans_current, 0);
3961 atomic_store32(&heap->span_use[iclass].current, 0);
3966 extern inline void rpmalloc_heap_thread_set_current(rpmalloc_heap_t *heap) { argument
3968 if (prev_heap != heap) {
3969 set_thread_heap(heap);
3979 return span->heap;