Lines Matching refs:allocator_data

213   struct omp_allocator_data *allocator_data;  in ialias()  local
232 allocator_data = (struct omp_allocator_data *) allocator; in ialias()
233 if (new_alignment < allocator_data->alignment) in ialias()
234 new_alignment = allocator_data->alignment; in ialias()
238 allocator_data = NULL; in ialias()
249 if (__builtin_expect (allocator_data in ialias()
250 && allocator_data->pool_size < ~(uintptr_t) 0, 0)) in ialias()
253 if (new_size > allocator_data->pool_size) in ialias()
256 used_pool_size = __atomic_load_n (&allocator_data->used_pool_size, in ialias()
263 || new_pool_size > allocator_data->pool_size) in ialias()
265 if (__atomic_compare_exchange_n (&allocator_data->used_pool_size, in ialias()
273 gomp_mutex_lock (&allocator_data->lock); in ialias()
274 if (__builtin_add_overflow (allocator_data->used_pool_size, new_size, in ialias()
276 || used_pool_size > allocator_data->pool_size) in ialias()
278 gomp_mutex_unlock (&allocator_data->lock); in ialias()
281 allocator_data->used_pool_size = used_pool_size; in ialias()
282 gomp_mutex_unlock (&allocator_data->lock); in ialias()
288 __atomic_add_fetch (&allocator_data->used_pool_size, -new_size, in ialias()
291 gomp_mutex_lock (&allocator_data->lock); in ialias()
292 allocator_data->used_pool_size -= new_size; in ialias()
293 gomp_mutex_unlock (&allocator_data->lock); in ialias()
318 if (allocator_data) in ialias()
320 switch (allocator_data->fallback) in ialias()
324 || (allocator_data in ialias()
325 && allocator_data->pool_size < ~(uintptr_t) 0)) in ialias()
341 allocator = allocator_data->fb_data; in ialias()
383 struct omp_allocator_data *allocator_data in omp_free() local
385 if (allocator_data->pool_size < ~(uintptr_t) 0) in omp_free()
388 __atomic_add_fetch (&allocator_data->used_pool_size, -data->size, in omp_free()
391 gomp_mutex_lock (&allocator_data->lock); in omp_free()
392 allocator_data->used_pool_size -= data->size; in omp_free()
393 gomp_mutex_unlock (&allocator_data->lock); in omp_free()
412 struct omp_allocator_data *allocator_data; in omp_aligned_calloc() local
431 allocator_data = (struct omp_allocator_data *) allocator; in omp_aligned_calloc()
432 if (new_alignment < allocator_data->alignment) in omp_aligned_calloc()
433 new_alignment = allocator_data->alignment; in omp_aligned_calloc()
437 allocator_data = NULL; in omp_aligned_calloc()
450 if (__builtin_expect (allocator_data in omp_aligned_calloc()
451 && allocator_data->pool_size < ~(uintptr_t) 0, 0)) in omp_aligned_calloc()
454 if (new_size > allocator_data->pool_size) in omp_aligned_calloc()
457 used_pool_size = __atomic_load_n (&allocator_data->used_pool_size, in omp_aligned_calloc()
464 || new_pool_size > allocator_data->pool_size) in omp_aligned_calloc()
466 if (__atomic_compare_exchange_n (&allocator_data->used_pool_size, in omp_aligned_calloc()
474 gomp_mutex_lock (&allocator_data->lock); in omp_aligned_calloc()
475 if (__builtin_add_overflow (allocator_data->used_pool_size, new_size, in omp_aligned_calloc()
477 || used_pool_size > allocator_data->pool_size) in omp_aligned_calloc()
479 gomp_mutex_unlock (&allocator_data->lock); in omp_aligned_calloc()
482 allocator_data->used_pool_size = used_pool_size; in omp_aligned_calloc()
483 gomp_mutex_unlock (&allocator_data->lock); in omp_aligned_calloc()
489 __atomic_add_fetch (&allocator_data->used_pool_size, -new_size, in omp_aligned_calloc()
492 gomp_mutex_lock (&allocator_data->lock); in omp_aligned_calloc()
493 allocator_data->used_pool_size -= new_size; in omp_aligned_calloc()
494 gomp_mutex_unlock (&allocator_data->lock); in omp_aligned_calloc()
519 if (allocator_data) in omp_aligned_calloc()
521 switch (allocator_data->fallback) in omp_aligned_calloc()
525 || (allocator_data in omp_aligned_calloc()
526 && allocator_data->pool_size < ~(uintptr_t) 0)) in omp_aligned_calloc()
542 allocator = allocator_data->fb_data; in omp_aligned_calloc()
561 struct omp_allocator_data *allocator_data, *free_allocator_data; in omp_realloc() local
585 allocator_data = (struct omp_allocator_data *) allocator; in omp_realloc()
586 if (new_alignment < allocator_data->alignment) in omp_realloc()
587 new_alignment = allocator_data->alignment; in omp_realloc()
590 allocator_data = NULL; in omp_realloc()
604 if (__builtin_expect (allocator_data in omp_realloc()
605 && allocator_data->pool_size < ~(uintptr_t) 0, 0)) in omp_realloc()
614 && free_allocator_data == allocator_data in omp_realloc()
619 && new_size - prev_size > allocator_data->pool_size) in omp_realloc()
622 used_pool_size = __atomic_load_n (&allocator_data->used_pool_size, in omp_realloc()
631 || new_pool_size > allocator_data->pool_size) in omp_realloc()
636 if (__atomic_compare_exchange_n (&allocator_data->used_pool_size, in omp_realloc()
644 gomp_mutex_lock (&allocator_data->lock); in omp_realloc()
647 if (__builtin_add_overflow (allocator_data->used_pool_size, in omp_realloc()
650 || used_pool_size > allocator_data->pool_size) in omp_realloc()
652 gomp_mutex_unlock (&allocator_data->lock); in omp_realloc()
657 used_pool_size = (allocator_data->used_pool_size in omp_realloc()
659 allocator_data->used_pool_size = used_pool_size; in omp_realloc()
660 gomp_mutex_unlock (&allocator_data->lock); in omp_realloc()
669 __atomic_add_fetch (&allocator_data->used_pool_size, in omp_realloc()
673 gomp_mutex_lock (&allocator_data->lock); in omp_realloc()
674 allocator_data->used_pool_size -= new_size - prev_size; in omp_realloc()
675 gomp_mutex_unlock (&allocator_data->lock); in omp_realloc()
738 if (allocator_data) in omp_realloc()
740 switch (allocator_data->fallback) in omp_realloc()
744 || (allocator_data in omp_realloc()
745 && allocator_data->pool_size < ~(uintptr_t) 0)) in omp_realloc()
761 allocator = allocator_data->fb_data; in omp_realloc()