Lines Matching defs:chunk_state

92   atomic_uint8_t chunk_state;
236 if (!atomic_compare_exchange_strong(&m->chunk_state, &old_chunk_state,
405 if (ac && atomic_load(&ac->chunk_state, memory_order_acquire) ==
494 u8 left_state = atomic_load(&left_chunk->chunk_state, memory_order_relaxed);
496 atomic_load(&right_chunk->chunk_state, memory_order_relaxed);
507 // Same chunk_state: choose based on offset.
519 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)
653 atomic_store(&m->chunk_state, CHUNK_ALLOCATED, memory_order_release);
667 // Flip the chunk_state atomically to avoid race on double-free.
668 if (!atomic_compare_exchange_strong(&m->chunk_state, &old_chunk_state,
684 CHECK_EQ(atomic_load(&m->chunk_state, memory_order_relaxed),
768 u8 chunk_state = atomic_load(&m->chunk_state, memory_order_acquire);
769 if (chunk_state != CHUNK_ALLOCATED)
770 ReportInvalidFree(old_ptr, chunk_state, stack);
795 void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) {
796 if (chunk_state == CHUNK_QUARANTINE)
823 u8 state = atomic_load(&p->chunk_state, memory_order_relaxed);
846 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)
916 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) !=
920 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) ==
924 return chunk_ && atomic_load(&chunk_->chunk_state, memory_order_relaxed) ==
1140 if (!m || atomic_load(&m->chunk_state, memory_order_acquire) !=
1171 return atomic_load(&m->chunk_state, memory_order_relaxed) ==
1206 (atomic_load(&m->chunk_state, memory_order_acquire) !=
1226 if (atomic_load(&m->chunk_state, memory_order_acquire) != CHUNK_ALLOCATED)