Lines Matching defs:chunk_state

93   atomic_uint8_t chunk_state;
237 if (!atomic_compare_exchange_strong(&m->chunk_state, &old_chunk_state,
406 if (ac && atomic_load(&ac->chunk_state, memory_order_acquire) ==
495 u8 left_state = atomic_load(&left_chunk->chunk_state, memory_order_relaxed);
497 atomic_load(&right_chunk->chunk_state, memory_order_relaxed);
508 // Same chunk_state: choose based on offset.
520 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)
654 atomic_store(&m->chunk_state, CHUNK_ALLOCATED, memory_order_release);
668 // Flip the chunk_state atomically to avoid race on double-free.
669 if (!atomic_compare_exchange_strong(&m->chunk_state, &old_chunk_state,
685 CHECK_EQ(atomic_load(&m->chunk_state, memory_order_relaxed),
770 u8 chunk_state = atomic_load(&m->chunk_state, memory_order_acquire);
771 if (chunk_state != CHUNK_ALLOCATED)
772 ReportInvalidFree(old_ptr, chunk_state, stack);
797 void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) {
798 if (chunk_state == CHUNK_QUARANTINE)
825 u8 state = atomic_load(&p->chunk_state, memory_order_relaxed);
848 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)
918 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) !=
922 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) ==
926 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) ==
1142 if (!m || atomic_load(&m->chunk_state, memory_order_acquire) !=
1173 return atomic_load(&m->chunk_state, memory_order_relaxed) ==
1208 (atomic_load(&m->chunk_state, memory_order_acquire) !=
1228 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)