xref: /minix3/external/bsd/libc++/dist/libcxxrt/src/guard.cc (revision 0a6a1f1d05b60e214de2f05a7310ddd1f0e590e7)
14684ddb6SLionel Sambuc /*
24684ddb6SLionel Sambuc  * Copyright 2010-2012 PathScale, Inc. All rights reserved.
34684ddb6SLionel Sambuc  *
44684ddb6SLionel Sambuc  * Redistribution and use in source and binary forms, with or without
54684ddb6SLionel Sambuc  * modification, are permitted provided that the following conditions are met:
64684ddb6SLionel Sambuc  *
74684ddb6SLionel Sambuc  * 1. Redistributions of source code must retain the above copyright notice,
84684ddb6SLionel Sambuc  *    this list of conditions and the following disclaimer.
94684ddb6SLionel Sambuc  *
104684ddb6SLionel Sambuc  * 2. Redistributions in binary form must reproduce the above copyright notice,
114684ddb6SLionel Sambuc  *    this list of conditions and the following disclaimer in the documentation
124684ddb6SLionel Sambuc  *    and/or other materials provided with the distribution.
134684ddb6SLionel Sambuc  *
144684ddb6SLionel Sambuc  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS
154684ddb6SLionel Sambuc  * IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
164684ddb6SLionel Sambuc  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
174684ddb6SLionel Sambuc  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
184684ddb6SLionel Sambuc  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
194684ddb6SLionel Sambuc  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
204684ddb6SLionel Sambuc  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
214684ddb6SLionel Sambuc  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
224684ddb6SLionel Sambuc  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
234684ddb6SLionel Sambuc  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
244684ddb6SLionel Sambuc  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
254684ddb6SLionel Sambuc  */
264684ddb6SLionel Sambuc 
274684ddb6SLionel Sambuc /**
284684ddb6SLionel Sambuc  * guard.cc: Functions for thread-safe static initialisation.
294684ddb6SLionel Sambuc  *
304684ddb6SLionel Sambuc  * Static values in C++ can be initialised lazily their first use.  This file
314684ddb6SLionel Sambuc  * contains functions that are used to ensure that two threads attempting to
324684ddb6SLionel Sambuc  * initialize the same static do not call the constructor twice.  This is
334684ddb6SLionel Sambuc  * important because constructors can have side effects, so calling the
344684ddb6SLionel Sambuc  * constructor twice may be very bad.
354684ddb6SLionel Sambuc  *
364684ddb6SLionel Sambuc  * Statics that require initialisation are protected by a 64-bit value.  Any
374684ddb6SLionel Sambuc  * platform that can do 32-bit atomic test and set operations can use this
384684ddb6SLionel Sambuc  * value as a low-overhead lock.  Because statics (in most sane code) are
394684ddb6SLionel Sambuc  * accessed far more times than they are initialised, this lock implementation
404684ddb6SLionel Sambuc  * is heavily optimised towards the case where the static has already been
414684ddb6SLionel Sambuc  * initialised.
424684ddb6SLionel Sambuc  */
434684ddb6SLionel Sambuc #include <stdint.h>
444684ddb6SLionel Sambuc #include <stdlib.h>
454684ddb6SLionel Sambuc #include <stdio.h>
464684ddb6SLionel Sambuc 
474684ddb6SLionel Sambuc #if !defined(__minix)
484684ddb6SLionel Sambuc #include <pthread.h>
494684ddb6SLionel Sambuc #else
504684ddb6SLionel Sambuc #define _MTHREADIFY_PTHREADS 1
514684ddb6SLionel Sambuc #include <minix/mthread.h>
52*0a6a1f1dSLionel Sambuc #pragma weak sched_yield
53*0a6a1f1dSLionel Sambuc #define sched_yield() do {\
54*0a6a1f1dSLionel Sambuc 	if (sched_yield) sched_yield();\
55*0a6a1f1dSLionel Sambuc 	} while(0)
564684ddb6SLionel Sambuc #endif /* !defined(__minix) */
574684ddb6SLionel Sambuc 
584684ddb6SLionel Sambuc #include <assert.h>
594684ddb6SLionel Sambuc #include "atomic.h"
604684ddb6SLionel Sambuc 
614684ddb6SLionel Sambuc // Older GCC doesn't define __LITTLE_ENDIAN__
624684ddb6SLionel Sambuc #ifndef __LITTLE_ENDIAN__
634684ddb6SLionel Sambuc 	// If __BYTE_ORDER__ is defined, use that instead
644684ddb6SLionel Sambuc #	ifdef __BYTE_ORDER__
654684ddb6SLionel Sambuc #		if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
664684ddb6SLionel Sambuc #			define __LITTLE_ENDIAN__
674684ddb6SLionel Sambuc #		endif
684684ddb6SLionel Sambuc 	// x86 and ARM are the most common little-endian CPUs, so let's have a
694684ddb6SLionel Sambuc 	// special case for them (ARM is already special cased).  Assume everything
704684ddb6SLionel Sambuc 	// else is big endian.
714684ddb6SLionel Sambuc #	elif defined(__x86_64) || defined(__i386)
724684ddb6SLionel Sambuc #		define __LITTLE_ENDIAN__
734684ddb6SLionel Sambuc #	endif
744684ddb6SLionel Sambuc #endif
754684ddb6SLionel Sambuc 
764684ddb6SLionel Sambuc 
774684ddb6SLionel Sambuc /*
784684ddb6SLionel Sambuc  * The least significant bit of the guard variable indicates that the object
794684ddb6SLionel Sambuc  * has been initialised, the most significant bit is used for a spinlock.
804684ddb6SLionel Sambuc  */
814684ddb6SLionel Sambuc #ifdef __arm__
824684ddb6SLionel Sambuc // ARM ABI - 32-bit guards.
834684ddb6SLionel Sambuc typedef uint32_t guard_t;
84*0a6a1f1dSLionel Sambuc typedef uint32_t guard_lock_t;
85*0a6a1f1dSLionel Sambuc static const uint32_t LOCKED = static_cast<guard_t>(1) << 31;
864684ddb6SLionel Sambuc static const uint32_t INITIALISED = 1;
87*0a6a1f1dSLionel Sambuc #define LOCK_PART(guard) (guard)
88*0a6a1f1dSLionel Sambuc #define INIT_PART(guard) (guard)
89*0a6a1f1dSLionel Sambuc #elif defined(_LP64)
904684ddb6SLionel Sambuc typedef uint64_t guard_t;
91*0a6a1f1dSLionel Sambuc typedef uint64_t guard_lock_t;
924684ddb6SLionel Sambuc #	if defined(__LITTLE_ENDIAN__)
93*0a6a1f1dSLionel Sambuc static const guard_t LOCKED = static_cast<guard_t>(1) << 63;
944684ddb6SLionel Sambuc static const guard_t INITIALISED = 1;
954684ddb6SLionel Sambuc #	else
964684ddb6SLionel Sambuc static const guard_t LOCKED = 1;
97*0a6a1f1dSLionel Sambuc static const guard_t INITIALISED = static_cast<guard_t>(1) << 56;
984684ddb6SLionel Sambuc #	endif
99*0a6a1f1dSLionel Sambuc #define LOCK_PART(guard) (guard)
100*0a6a1f1dSLionel Sambuc #define INIT_PART(guard) (guard)
101*0a6a1f1dSLionel Sambuc #else
102*0a6a1f1dSLionel Sambuc typedef uint32_t guard_lock_t;
103*0a6a1f1dSLionel Sambuc #	if defined(__LITTLE_ENDIAN__)
104*0a6a1f1dSLionel Sambuc typedef struct {
105*0a6a1f1dSLionel Sambuc 	uint32_t init_half;
106*0a6a1f1dSLionel Sambuc 	uint32_t lock_half;
107*0a6a1f1dSLionel Sambuc } guard_t;
108*0a6a1f1dSLionel Sambuc static const uint32_t LOCKED = static_cast<guard_lock_t>(1) << 31;
109*0a6a1f1dSLionel Sambuc static const uint32_t INITIALISED = 1;
110*0a6a1f1dSLionel Sambuc #	else
111*0a6a1f1dSLionel Sambuc typedef struct {
112*0a6a1f1dSLionel Sambuc 	uint32_t init_half;
113*0a6a1f1dSLionel Sambuc 	uint32_t lock_half;
114*0a6a1f1dSLionel Sambuc } guard_t;
115*0a6a1f1dSLionel Sambuc static_assert(sizeof(guard_t) == sizeof(uint64_t), "");
116*0a6a1f1dSLionel Sambuc static const uint32_t LOCKED = 1;
117*0a6a1f1dSLionel Sambuc static const uint32_t INITIALISED = static_cast<guard_lock_t>(1) << 24;
1184684ddb6SLionel Sambuc #	endif
119*0a6a1f1dSLionel Sambuc #define LOCK_PART(guard) (&(guard)->lock_half)
120*0a6a1f1dSLionel Sambuc #define INIT_PART(guard) (&(guard)->init_half)
121*0a6a1f1dSLionel Sambuc #endif
122*0a6a1f1dSLionel Sambuc static const guard_lock_t INITIAL = 0;
1234684ddb6SLionel Sambuc 
1244684ddb6SLionel Sambuc /**
1254684ddb6SLionel Sambuc  * Acquires a lock on a guard, returning 0 if the object has already been
1264684ddb6SLionel Sambuc  * initialised, and 1 if it has not.  If the object is already constructed then
1274684ddb6SLionel Sambuc  * this function just needs to read a byte from memory and return.
1284684ddb6SLionel Sambuc  */
__cxa_guard_acquire(volatile guard_t * guard_object)1294684ddb6SLionel Sambuc extern "C" int __cxa_guard_acquire(volatile guard_t *guard_object)
1304684ddb6SLionel Sambuc {
131*0a6a1f1dSLionel Sambuc 	guard_lock_t old;
1324684ddb6SLionel Sambuc 	// Not an atomic read, doesn't establish a happens-before relationship, but
1334684ddb6SLionel Sambuc 	// if one is already established and we end up seeing an initialised state
1344684ddb6SLionel Sambuc 	// then it's a fast path, otherwise we'll do something more expensive than
1354684ddb6SLionel Sambuc 	// this test anyway...
136*0a6a1f1dSLionel Sambuc 	if (INITIALISED == *INIT_PART(guard_object))
137*0a6a1f1dSLionel Sambuc 		return 0;
1384684ddb6SLionel Sambuc 	// Spin trying to do the initialisation
139*0a6a1f1dSLionel Sambuc 	for (;;)
1404684ddb6SLionel Sambuc 	{
1414684ddb6SLionel Sambuc 		// Loop trying to move the value of the guard from 0 (not
1424684ddb6SLionel Sambuc 		// locked, not initialised) to the locked-uninitialised
1434684ddb6SLionel Sambuc 		// position.
144*0a6a1f1dSLionel Sambuc 		old = __sync_val_compare_and_swap(LOCK_PART(guard_object),
145*0a6a1f1dSLionel Sambuc 		    INITIAL, LOCKED);
146*0a6a1f1dSLionel Sambuc 		if (old == INITIAL) {
147*0a6a1f1dSLionel Sambuc 			// Lock obtained.  If lock and init bit are
148*0a6a1f1dSLionel Sambuc 			// in separate words, check for init race.
149*0a6a1f1dSLionel Sambuc 			if (INIT_PART(guard_object) == LOCK_PART(guard_object))
1504684ddb6SLionel Sambuc 				return 1;
151*0a6a1f1dSLionel Sambuc 			if (INITIALISED != *INIT_PART(guard_object))
152*0a6a1f1dSLionel Sambuc 				return 1;
153*0a6a1f1dSLionel Sambuc 
154*0a6a1f1dSLionel Sambuc 			// No need for a memory barrier here,
155*0a6a1f1dSLionel Sambuc 			// see first comment.
156*0a6a1f1dSLionel Sambuc 			*LOCK_PART(guard_object) = INITIAL;
1574684ddb6SLionel Sambuc 			return 0;
158*0a6a1f1dSLionel Sambuc 		}
159*0a6a1f1dSLionel Sambuc 		// If lock and init bit are in the same word, check again
160*0a6a1f1dSLionel Sambuc 		// if we are done.
161*0a6a1f1dSLionel Sambuc 		if (INIT_PART(guard_object) == LOCK_PART(guard_object) &&
162*0a6a1f1dSLionel Sambuc 		    old == INITIALISED)
163*0a6a1f1dSLionel Sambuc 			return 0;
164*0a6a1f1dSLionel Sambuc 
165*0a6a1f1dSLionel Sambuc 		assert(old == LOCKED);
166*0a6a1f1dSLionel Sambuc 		// Another thread holds the lock.
167*0a6a1f1dSLionel Sambuc 		// If lock and init bit are in different words, check
168*0a6a1f1dSLionel Sambuc 		// if we are done before yielding and looping.
169*0a6a1f1dSLionel Sambuc 		if (INIT_PART(guard_object) != LOCK_PART(guard_object) &&
170*0a6a1f1dSLionel Sambuc 		    INITIALISED == *INIT_PART(guard_object))
171*0a6a1f1dSLionel Sambuc 			return 0;
1724684ddb6SLionel Sambuc 		sched_yield();
1734684ddb6SLionel Sambuc 	}
1744684ddb6SLionel Sambuc }
1754684ddb6SLionel Sambuc 
1764684ddb6SLionel Sambuc /**
1774684ddb6SLionel Sambuc  * Releases the lock without marking the object as initialised.  This function
1784684ddb6SLionel Sambuc  * is called if initialising a static causes an exception to be thrown.
1794684ddb6SLionel Sambuc  */
__cxa_guard_abort(volatile guard_t * guard_object)1804684ddb6SLionel Sambuc extern "C" void __cxa_guard_abort(volatile guard_t *guard_object)
1814684ddb6SLionel Sambuc {
1824684ddb6SLionel Sambuc 	__attribute__((unused))
183*0a6a1f1dSLionel Sambuc 	bool reset = __sync_bool_compare_and_swap(LOCK_PART(guard_object),
184*0a6a1f1dSLionel Sambuc 	    LOCKED, INITIAL);
1854684ddb6SLionel Sambuc 	assert(reset);
1864684ddb6SLionel Sambuc }
1874684ddb6SLionel Sambuc /**
1884684ddb6SLionel Sambuc  * Releases the guard and marks the object as initialised.  This function is
1894684ddb6SLionel Sambuc  * called after successful initialisation of a static.
1904684ddb6SLionel Sambuc  */
__cxa_guard_release(volatile guard_t * guard_object)1914684ddb6SLionel Sambuc extern "C" void __cxa_guard_release(volatile guard_t *guard_object)
1924684ddb6SLionel Sambuc {
193*0a6a1f1dSLionel Sambuc 	guard_lock_t old;
194*0a6a1f1dSLionel Sambuc 	if (INIT_PART(guard_object) == LOCK_PART(guard_object))
195*0a6a1f1dSLionel Sambuc 		old = LOCKED;
196*0a6a1f1dSLionel Sambuc 	else
197*0a6a1f1dSLionel Sambuc 		old = INITIAL;
1984684ddb6SLionel Sambuc 	__attribute__((unused))
199*0a6a1f1dSLionel Sambuc 	bool reset = __sync_bool_compare_and_swap(INIT_PART(guard_object),
200*0a6a1f1dSLionel Sambuc 	    old, INITIALISED);
2014684ddb6SLionel Sambuc 	assert(reset);
202*0a6a1f1dSLionel Sambuc 	if (INIT_PART(guard_object) != LOCK_PART(guard_object))
203*0a6a1f1dSLionel Sambuc 		*LOCK_PART(guard_object) = INITIAL;
2044684ddb6SLionel Sambuc }
205