106c3fb27SDimitry Andric // -*- C++ -*- 206c3fb27SDimitry Andric //===----------------------------------------------------------------------===// 306c3fb27SDimitry Andric // 406c3fb27SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 506c3fb27SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 606c3fb27SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 706c3fb27SDimitry Andric // 806c3fb27SDimitry Andric //===----------------------------------------------------------------------===// 906c3fb27SDimitry Andric 1006c3fb27SDimitry Andric #ifndef _LIBCPP___STOP_TOKEN_STOP_STATE_H 1106c3fb27SDimitry Andric #define _LIBCPP___STOP_TOKEN_STOP_STATE_H 1206c3fb27SDimitry Andric 13*0fca6ea1SDimitry Andric #include <__assert> 1406c3fb27SDimitry Andric #include <__config> 1506c3fb27SDimitry Andric #include <__stop_token/atomic_unique_lock.h> 1606c3fb27SDimitry Andric #include <__stop_token/intrusive_list_view.h> 175f757f3fSDimitry Andric #include <__thread/id.h> 1806c3fb27SDimitry Andric #include <atomic> 1906c3fb27SDimitry Andric #include <cstdint> 2006c3fb27SDimitry Andric 2106c3fb27SDimitry Andric #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 2206c3fb27SDimitry Andric # pragma GCC system_header 2306c3fb27SDimitry Andric #endif 2406c3fb27SDimitry Andric 2506c3fb27SDimitry Andric _LIBCPP_BEGIN_NAMESPACE_STD 2606c3fb27SDimitry Andric 275f757f3fSDimitry Andric #if _LIBCPP_STD_VER >= 20 && !defined(_LIBCPP_HAS_NO_THREADS) 2806c3fb27SDimitry Andric 2906c3fb27SDimitry Andric struct __stop_callback_base : __intrusive_node_base<__stop_callback_base> { 3006c3fb27SDimitry Andric using __callback_fn_t = void(__stop_callback_base*) noexcept; 3106c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI explicit __stop_callback_base(__callback_fn_t* __callback_fn) : __callback_fn_(__callback_fn) {} 3206c3fb27SDimitry Andric 3306c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI void __invoke() noexcept { __callback_fn_(this); } 3406c3fb27SDimitry Andric 3506c3fb27SDimitry Andric __callback_fn_t* __callback_fn_; 3606c3fb27SDimitry Andric atomic<bool> __completed_ = false; 3706c3fb27SDimitry Andric bool* __destroyed_ = nullptr; 3806c3fb27SDimitry Andric }; 3906c3fb27SDimitry Andric 4006c3fb27SDimitry Andric class __stop_state { 4106c3fb27SDimitry Andric static constexpr uint32_t __stop_requested_bit = 1; 4206c3fb27SDimitry Andric static constexpr uint32_t __callback_list_locked_bit = 1 << 1; 4306c3fb27SDimitry Andric static constexpr uint32_t __stop_source_counter_shift = 2; 4406c3fb27SDimitry Andric 4506c3fb27SDimitry Andric // The "stop_source counter" is not used for lifetime reference counting. 4606c3fb27SDimitry Andric // When the number of stop_source reaches 0, the remaining stop_tokens's 4706c3fb27SDimitry Andric // stop_possible will return false. We need this counter to track this. 4806c3fb27SDimitry Andric // 4906c3fb27SDimitry Andric // The "callback list locked" bit implements the atomic_unique_lock to 5006c3fb27SDimitry Andric // guard the operations on the callback list 5106c3fb27SDimitry Andric // 5206c3fb27SDimitry Andric // 31 - 2 | 1 | 0 | 5306c3fb27SDimitry Andric // stop_source counter | callback list locked | stop_requested | 5406c3fb27SDimitry Andric atomic<uint32_t> __state_ = 0; 5506c3fb27SDimitry Andric 5606c3fb27SDimitry Andric // Reference count for stop_token + stop_callback + stop_source 5706c3fb27SDimitry Andric // When the counter reaches zero, the state is destroyed 5806c3fb27SDimitry Andric // It is used by __intrusive_shared_ptr, but it is stored here for better layout 5906c3fb27SDimitry Andric atomic<uint32_t> __ref_count_ = 0; 6006c3fb27SDimitry Andric 6106c3fb27SDimitry Andric using __state_t = uint32_t; 6206c3fb27SDimitry Andric using __callback_list_lock = __atomic_unique_lock<__state_t, __callback_list_locked_bit>; 6306c3fb27SDimitry Andric using __callback_list = __intrusive_list_view<__stop_callback_base>; 6406c3fb27SDimitry Andric 6506c3fb27SDimitry Andric __callback_list __callback_list_; 665f757f3fSDimitry Andric __thread_id __requesting_thread_; 6706c3fb27SDimitry Andric 6806c3fb27SDimitry Andric public: 6906c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI __stop_state() noexcept = default; 7006c3fb27SDimitry Andric 7106c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI void __increment_stop_source_counter() noexcept { 7206c3fb27SDimitry Andric _LIBCPP_ASSERT_UNCATEGORIZED( 7306c3fb27SDimitry Andric __state_.load(std::memory_order_relaxed) <= static_cast<__state_t>(~(1 << __stop_source_counter_shift)), 7406c3fb27SDimitry Andric "stop_source's counter reaches the maximum. Incrementing the counter will overflow"); 7506c3fb27SDimitry Andric __state_.fetch_add(1 << __stop_source_counter_shift, std::memory_order_relaxed); 7606c3fb27SDimitry Andric } 7706c3fb27SDimitry Andric 7806c3fb27SDimitry Andric // We are not destroying the object after counter decrements to zero, nor do we have 7906c3fb27SDimitry Andric // operations depend on the ordering of decrementing the counter. relaxed is enough. 8006c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI void __decrement_stop_source_counter() noexcept { 8106c3fb27SDimitry Andric _LIBCPP_ASSERT_UNCATEGORIZED( 8206c3fb27SDimitry Andric __state_.load(std::memory_order_relaxed) >= static_cast<__state_t>(1 << __stop_source_counter_shift), 8306c3fb27SDimitry Andric "stop_source's counter is 0. Decrementing the counter will underflow"); 8406c3fb27SDimitry Andric __state_.fetch_sub(1 << __stop_source_counter_shift, std::memory_order_relaxed); 8506c3fb27SDimitry Andric } 8606c3fb27SDimitry Andric 8706c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI bool __stop_requested() const noexcept { 8806c3fb27SDimitry Andric // acquire because [thread.stoptoken.intro] A call to request_stop that returns true 8906c3fb27SDimitry Andric // synchronizes with a call to stop_requested on an associated stop_token or stop_source 9006c3fb27SDimitry Andric // object that returns true. 9106c3fb27SDimitry Andric // request_stop's compare_exchange_weak has release which syncs with this acquire 9206c3fb27SDimitry Andric return (__state_.load(std::memory_order_acquire) & __stop_requested_bit) != 0; 9306c3fb27SDimitry Andric } 9406c3fb27SDimitry Andric 9506c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI bool __stop_possible_for_stop_token() const noexcept { 9606c3fb27SDimitry Andric // [stoptoken.mem] false if "a stop request was not made and there are no associated stop_source objects" 9706c3fb27SDimitry Andric // Todo: Can this be std::memory_order_relaxed as the standard does not say anything except not to introduce data 9806c3fb27SDimitry Andric // race? 9906c3fb27SDimitry Andric __state_t __curent_state = __state_.load(std::memory_order_acquire); 10006c3fb27SDimitry Andric return ((__curent_state & __stop_requested_bit) != 0) || ((__curent_state >> __stop_source_counter_shift) != 0); 10106c3fb27SDimitry Andric } 10206c3fb27SDimitry Andric 10306c3fb27SDimitry Andric _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI bool __request_stop() noexcept { 10406c3fb27SDimitry Andric auto __cb_list_lock = __try_lock_for_request_stop(); 10506c3fb27SDimitry Andric if (!__cb_list_lock.__owns_lock()) { 10606c3fb27SDimitry Andric return false; 10706c3fb27SDimitry Andric } 10806c3fb27SDimitry Andric __requesting_thread_ = this_thread::get_id(); 10906c3fb27SDimitry Andric 11006c3fb27SDimitry Andric while (!__callback_list_.__empty()) { 11106c3fb27SDimitry Andric auto __cb = __callback_list_.__pop_front(); 11206c3fb27SDimitry Andric 11306c3fb27SDimitry Andric // allow other callbacks to be removed while invoking the current callback 11406c3fb27SDimitry Andric __cb_list_lock.__unlock(); 11506c3fb27SDimitry Andric 11606c3fb27SDimitry Andric bool __destroyed = false; 11706c3fb27SDimitry Andric __cb->__destroyed_ = &__destroyed; 11806c3fb27SDimitry Andric 11906c3fb27SDimitry Andric __cb->__invoke(); 12006c3fb27SDimitry Andric 12106c3fb27SDimitry Andric // __cb's invoke function could potentially delete itself. We need to check before accessing __cb's member 12206c3fb27SDimitry Andric if (!__destroyed) { 12306c3fb27SDimitry Andric // needs to set __destroyed_ pointer to nullptr, otherwise it points to a local variable 12406c3fb27SDimitry Andric // which is to be destroyed at the end of the loop 12506c3fb27SDimitry Andric __cb->__destroyed_ = nullptr; 12606c3fb27SDimitry Andric 12706c3fb27SDimitry Andric // [stopcallback.cons] If callback is concurrently executing on another thread, then the return 12806c3fb27SDimitry Andric // from the invocation of callback strongly happens before ([intro.races]) callback is destroyed. 12906c3fb27SDimitry Andric // this release syncs with the acquire in the remove_callback 13006c3fb27SDimitry Andric __cb->__completed_.store(true, std::memory_order_release); 13106c3fb27SDimitry Andric __cb->__completed_.notify_all(); 13206c3fb27SDimitry Andric } 13306c3fb27SDimitry Andric 13406c3fb27SDimitry Andric __cb_list_lock.__lock(); 13506c3fb27SDimitry Andric } 13606c3fb27SDimitry Andric 13706c3fb27SDimitry Andric return true; 13806c3fb27SDimitry Andric } 13906c3fb27SDimitry Andric 14006c3fb27SDimitry Andric _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI bool __add_callback(__stop_callback_base* __cb) noexcept { 14106c3fb27SDimitry Andric // If it is already stop_requested. Do not try to request it again. 14206c3fb27SDimitry Andric const auto __give_up_trying_to_lock_condition = [__cb](__state_t __state) { 14306c3fb27SDimitry Andric if ((__state & __stop_requested_bit) != 0) { 14406c3fb27SDimitry Andric // already stop requested, synchronously run the callback and no need to lock the list again 14506c3fb27SDimitry Andric __cb->__invoke(); 14606c3fb27SDimitry Andric return true; 14706c3fb27SDimitry Andric } 14806c3fb27SDimitry Andric // no stop source. no need to lock the list to add the callback as it can never be invoked 14906c3fb27SDimitry Andric return (__state >> __stop_source_counter_shift) == 0; 15006c3fb27SDimitry Andric }; 15106c3fb27SDimitry Andric 15206c3fb27SDimitry Andric __callback_list_lock __cb_list_lock(__state_, __give_up_trying_to_lock_condition); 15306c3fb27SDimitry Andric 15406c3fb27SDimitry Andric if (!__cb_list_lock.__owns_lock()) { 15506c3fb27SDimitry Andric return false; 15606c3fb27SDimitry Andric } 15706c3fb27SDimitry Andric 15806c3fb27SDimitry Andric __callback_list_.__push_front(__cb); 15906c3fb27SDimitry Andric 16006c3fb27SDimitry Andric return true; 16106c3fb27SDimitry Andric // unlock here: [thread.stoptoken.intro] Registration of a callback synchronizes with the invocation of 16206c3fb27SDimitry Andric // that callback. 16306c3fb27SDimitry Andric // Note: this release sync with the acquire in the request_stop' __try_lock_for_request_stop 16406c3fb27SDimitry Andric } 16506c3fb27SDimitry Andric 16606c3fb27SDimitry Andric // called by the destructor of stop_callback 16706c3fb27SDimitry Andric _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __remove_callback(__stop_callback_base* __cb) noexcept { 16806c3fb27SDimitry Andric __callback_list_lock __cb_list_lock(__state_); 16906c3fb27SDimitry Andric 17006c3fb27SDimitry Andric // under below condition, the request_stop call just popped __cb from the list and could execute it now 17106c3fb27SDimitry Andric bool __potentially_executing_now = __cb->__prev_ == nullptr && !__callback_list_.__is_head(__cb); 17206c3fb27SDimitry Andric 17306c3fb27SDimitry Andric if (__potentially_executing_now) { 17406c3fb27SDimitry Andric auto __requested_thread = __requesting_thread_; 17506c3fb27SDimitry Andric __cb_list_lock.__unlock(); 17606c3fb27SDimitry Andric 17706c3fb27SDimitry Andric if (std::this_thread::get_id() != __requested_thread) { 17806c3fb27SDimitry Andric // [stopcallback.cons] If callback is concurrently executing on another thread, then the return 17906c3fb27SDimitry Andric // from the invocation of callback strongly happens before ([intro.races]) callback is destroyed. 18006c3fb27SDimitry Andric __cb->__completed_.wait(false, std::memory_order_acquire); 18106c3fb27SDimitry Andric } else { 18206c3fb27SDimitry Andric // The destructor of stop_callback runs on the same thread of the thread that invokes the callback. 18306c3fb27SDimitry Andric // The callback is potentially invoking its own destuctor. Set the flag to avoid accessing destroyed 18406c3fb27SDimitry Andric // members on the invoking side 18506c3fb27SDimitry Andric if (__cb->__destroyed_) { 18606c3fb27SDimitry Andric *__cb->__destroyed_ = true; 18706c3fb27SDimitry Andric } 18806c3fb27SDimitry Andric } 18906c3fb27SDimitry Andric } else { 19006c3fb27SDimitry Andric __callback_list_.__remove(__cb); 19106c3fb27SDimitry Andric } 19206c3fb27SDimitry Andric } 19306c3fb27SDimitry Andric 19406c3fb27SDimitry Andric private: 19506c3fb27SDimitry Andric _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI __callback_list_lock __try_lock_for_request_stop() noexcept { 19606c3fb27SDimitry Andric // If it is already stop_requested, do not try to request stop or lock the list again. 19706c3fb27SDimitry Andric const auto __lock_fail_condition = [](__state_t __state) { return (__state & __stop_requested_bit) != 0; }; 19806c3fb27SDimitry Andric 19906c3fb27SDimitry Andric // set locked and requested bit at the same time 20006c3fb27SDimitry Andric const auto __after_lock_state = [](__state_t __state) { 20106c3fb27SDimitry Andric return __state | __callback_list_locked_bit | __stop_requested_bit; 20206c3fb27SDimitry Andric }; 20306c3fb27SDimitry Andric 20406c3fb27SDimitry Andric // acq because [thread.stoptoken.intro] Registration of a callback synchronizes with the invocation of that 20506c3fb27SDimitry Andric // callback. We are going to invoke the callback after getting the lock, acquire so that we can see the 20606c3fb27SDimitry Andric // registration of a callback (and other writes that happens-before the add_callback) 20706c3fb27SDimitry Andric // Note: the rel (unlock) in the add_callback syncs with this acq 20806c3fb27SDimitry Andric // rel because [thread.stoptoken.intro] A call to request_stop that returns true synchronizes with a call 20906c3fb27SDimitry Andric // to stop_requested on an associated stop_token or stop_source object that returns true. 21006c3fb27SDimitry Andric // We need to make sure that all writes (including user code) before request_stop will be made visible 21106c3fb27SDimitry Andric // to the threads that waiting for `stop_requested == true` 21206c3fb27SDimitry Andric // Note: this rel syncs with the acq in `stop_requested` 21306c3fb27SDimitry Andric const auto __locked_ordering = std::memory_order_acq_rel; 21406c3fb27SDimitry Andric 21506c3fb27SDimitry Andric return __callback_list_lock(__state_, __lock_fail_condition, __after_lock_state, __locked_ordering); 21606c3fb27SDimitry Andric } 21706c3fb27SDimitry Andric 21806c3fb27SDimitry Andric template <class _Tp> 21906c3fb27SDimitry Andric friend struct __intrusive_shared_ptr_traits; 22006c3fb27SDimitry Andric }; 22106c3fb27SDimitry Andric 22206c3fb27SDimitry Andric template <class _Tp> 22306c3fb27SDimitry Andric struct __intrusive_shared_ptr_traits; 22406c3fb27SDimitry Andric 22506c3fb27SDimitry Andric template <> 22606c3fb27SDimitry Andric struct __intrusive_shared_ptr_traits<__stop_state> { 22706c3fb27SDimitry Andric _LIBCPP_HIDE_FROM_ABI static atomic<uint32_t>& __get_atomic_ref_count(__stop_state& __state) { 22806c3fb27SDimitry Andric return __state.__ref_count_; 22906c3fb27SDimitry Andric } 23006c3fb27SDimitry Andric }; 23106c3fb27SDimitry Andric 2325f757f3fSDimitry Andric #endif // _LIBCPP_STD_VER >= 20 && !defined(_LIBCPP_HAS_NO_THREADS) 23306c3fb27SDimitry Andric 23406c3fb27SDimitry Andric _LIBCPP_END_NAMESPACE_STD 23506c3fb27SDimitry Andric 23606c3fb27SDimitry Andric #endif // _LIBCPP___STOP_TOKEN_STOP_STATE_H 237