New atomic API definition based on C11 atomic

This commit is contained in:
WeiY
2013-07-15 23:31:08 +08:00
committed by Sebastian Huber
parent 3923472555
commit 1180099ed9

View File

@@ -8,7 +8,7 @@
*/ */
/* /*
* COPYRIGHT (c) 2012 Deng Hengyi. * COPYRIGHT (c) 2012-2013 Deng Hengyi.
* *
* The license and distribution terms for this file may be * The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at * found in the file LICENSE in this distribution or at
@@ -32,241 +32,260 @@ extern "C" {
/**@{*/ /**@{*/
/** /**
* @brief the enumeration Atomic_Memory_barrier specifies the detailed regular * @brief Atomically load an atomic type value from atomic object.
* memory synchronization operations used in the atomic operation API *
* definitions. * @param object an atomic type pointer of object.
* @param order a type of Atomic_Order.
*
* The order shall not be ATOMIC_ORDER_RELEASE.
*/ */
typedef enum { RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Load_uint(
/** no operation orders memory. */ volatile Atomic_Uint *object,
ATOMIC_RELAXED_BARRIER, Atomic_Order order
/** a load operation performs an acquire operation on the affected memory )
* location. This flag guarantees that the effects of load operation are {
* completed before the effects of any later data accesses. return _CPU_atomic_Load_uint( object, order );
*/ }
ATOMIC_ACQUIRE_BARRIER,
/** a store operation performs a release operation on the affected memory RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Load_ptr(
* location. This flag guarantee that all effects of all previous data volatile Atomic_Pointer *object,
* accesses are completed before the store operation takes place. Atomic_Order order
*/ )
ATOMIC_RELEASE_BARRIER {
} Atomic_Memory_barrier; return _CPU_atomic_Load_ptr( object, order );
}
/** /**
* @brief Atomically load an atomic type value from address @a address with * @brief Atomically store an atomic type value into a atomic object.
* a type of Atomic_Memory_barrier @a memory_barrier. The @a memory_barrier *
* shall not be ATOMIC_RELEASE_BARRIER. * @param object an atomic type pointer of object.
* @param value a value to be stored into object.
* @param order a type of Atomic_Order.
*
* The order shall not be ATOMIC_ORDER_ACQUIRE.
*/ */
RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int( RTEMS_INLINE_ROUTINE void _Atomic_Store_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Memory_barrier memory_barrier uint_fast32_t value,
); Atomic_Order order
RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long( )
volatile Atomic_Long *address, {
Atomic_Memory_barrier memory_barrier _CPU_atomic_Store_uint( object, value, order );
); }
RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
volatile Atomic_Pointer *address,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
volatile Atomic_Int32 *address,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
volatile Atomic_Int64 *address,
Atomic_Memory_barrier memory_barrier
);
/**
* @brief Atomically store an atomic type value @a value into address @a
* address with a type of Atomic_Memory_barrier @a memory_barrier. The @a
* memory_barrier shall not be ATOMIC_ACQUIRE_BARRIER.
*/
RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
volatile Atomic_Int *address,
Atomic_Int value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
volatile Atomic_Long *address,
Atomic_Long value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr( RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
volatile Atomic_Pointer *address, volatile Atomic_Pointer *object,
Atomic_Pointer value, uintptr_t value,
Atomic_Memory_barrier memory_barrier Atomic_Order order
); )
RTEMS_INLINE_ROUTINE void _Atomic_Store_32( {
volatile Atomic_Int32 *address, _CPU_atomic_Store_ptr( object, value, order );
Atomic_Int32 value, }
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
volatile Atomic_Int64 *address,
Atomic_Int64 value,
Atomic_Memory_barrier memory_barrier
);
/** /**
* @brief Atomically load-add-store an atomic type value @a value into address * @brief Atomically load-add-store an atomic type value into object
* @a address with a type of Atomic_Memory_barrier @a memory_barrier. *
* @param object a atomic type pointer of object.
* @param value a value to be add and store into object.
* @param order a type of Atomic_Order.
*
* @retval a result value after add ops.
*/ */
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int( RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_add_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Int value, uint_fast32_t value,
Atomic_Memory_barrier memory_barrier Atomic_Order order
); )
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long( {
volatile Atomic_Long *address, return _CPU_atomic_Fetch_add_uint( object, value, order );
Atomic_Long value, }
Atomic_Memory_barrier memory_barrier
); RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_add_ptr(
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr( volatile Atomic_Pointer *object,
volatile Atomic_Pointer *address, uintptr_t value,
Atomic_Pointer value, Atomic_Order order
Atomic_Memory_barrier memory_barrier )
); {
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32( return _CPU_atomic_Fetch_add_ptr( object, value, order );
volatile Atomic_Int32 *address, }
Atomic_Int32 value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
volatile Atomic_Int64 *address,
Atomic_Int64 value,
Atomic_Memory_barrier memory_barrier
);
/** /**
* @brief Atomically load-sub-store an atomic type value @a value into address * @brief Atomically load-sub-store an atomic type value into object
* @a address with a type of Atomic_Memory_barrier @a memory_barrier. *
* @param object a atomic type pointer of object.
* @param value a value to be sub and store into object.
* @param order a type of Atomic_Order.
*
* @retval a result value after sub ops.
*/ */
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int( RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_sub_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Int value, uint_fast32_t value,
Atomic_Memory_barrier memory_barrier Atomic_Order order
); )
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long( {
volatile Atomic_Long *address, return _CPU_atomic_Fetch_sub_uint( object, value, order );
Atomic_Long value, }
Atomic_Memory_barrier memory_barrier
); RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_sub_ptr(
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr( volatile Atomic_Pointer *object,
volatile Atomic_Pointer *address, uintptr_t value,
Atomic_Pointer value, Atomic_Order order
Atomic_Memory_barrier memory_barrier )
); {
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32( return _CPU_atomic_Fetch_sub_ptr( object, value, order );
volatile Atomic_Int32 *address, }
Atomic_Int32 value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
volatile Atomic_Int64 *address,
Atomic_Int64 value,
Atomic_Memory_barrier memory_barrier
);
/** /**
* @brief Atomically load-or-store an atomic type value @a value into address * @brief Atomically load-or-store an atomic type value into object
* @a address with a type of Atomic_Memory_barrier @a memory_barrier. *
* @param object a atomic type pointer of object.
* @param value a value to be or and store into object.
* @param order a type of Atomic_Order.
*
* @retval a result value after or ops.
*/ */
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int( RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_or_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Int value, uint_fast32_t value,
Atomic_Memory_barrier memory_barrier Atomic_Order order
); )
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long( {
volatile Atomic_Long *address, return _CPU_atomic_Fetch_or_uint( object, value, order );
Atomic_Long value, }
Atomic_Memory_barrier memory_barrier
); RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_or_ptr(
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr( volatile Atomic_Pointer *object,
volatile Atomic_Pointer *address, uintptr_t value,
Atomic_Pointer value, Atomic_Order order
Atomic_Memory_barrier memory_barrier )
); {
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32( return _CPU_atomic_Fetch_or_ptr( object, value, order );
volatile Atomic_Int32 *address, }
Atomic_Int32 value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
volatile Atomic_Int64 *address,
Atomic_Int64 value,
Atomic_Memory_barrier memory_barrier
);
/** /**
* @brief Atomically load-and-store an atomic type value @a value into address * @brief Atomically load-and-store an atomic type value into object
* @a address with a type of Atomic_Memory_barrier @a memory_barrier. *
* @param object a atomic type pointer of object.
* @param value a value to be and and store into object.
* @param order a type of Atomic_Order.
*
* @retval a result value after and ops.
*/ */
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int( RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_and_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Int value, uint_fast32_t value,
Atomic_Memory_barrier memory_barrier Atomic_Order order
); )
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long( {
volatile Atomic_Long *address, return _CPU_atomic_Fetch_and_uint( object, value, order );
Atomic_Long value, }
Atomic_Memory_barrier memory_barrier
); RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_and_ptr(
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr( volatile Atomic_Pointer *object,
volatile Atomic_Pointer *address, uintptr_t value,
Atomic_Pointer value, Atomic_Order order
Atomic_Memory_barrier memory_barrier )
); {
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32( return _CPU_atomic_Fetch_and_ptr( object, value, order );
volatile Atomic_Int32 *address, }
Atomic_Int32 value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
volatile Atomic_Int64 *address,
Atomic_Int64 value,
Atomic_Memory_barrier memory_barrier
);
/** /**
* @brief Atomically compare the value stored at @a address with @a * @brief Atomically exchange an atomic type value into object
* old_value and if the two values are equal, update the value of @a *
* address with @a new_value. Returns zero if the compare failed, * @param object a atomic type pointer of object.
* nonzero otherwise. The operation uses a type of Atomic_Memory_barrier * @param value a value to exchange and and store into object.
* @a memory_barrier. * @param order a type of Atomic_Order.
*
* @retval a result value after exchange ops.
*/ */
RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int( RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Exchange_uint(
volatile Atomic_Int *address, volatile Atomic_Uint *object,
Atomic_Int old_value, uint_fast32_t value,
Atomic_Int new_value, Atomic_Order order
Atomic_Memory_barrier memory_barrier )
); {
RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long( return _CPU_atomic_Exchange_uint( object, value, order );
volatile Atomic_Long *address, }
Atomic_Long old_value,
Atomic_Long new_value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
volatile Atomic_Pointer *address,
Atomic_Pointer old_value,
Atomic_Pointer new_value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
volatile Atomic_Int32 *address,
Atomic_Int32 old_value,
Atomic_Int32 new_value,
Atomic_Memory_barrier memory_barrier
);
RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
volatile Atomic_Int64 *address,
Atomic_Int64 old_value,
Atomic_Int64 new_value,
Atomic_Memory_barrier memory_barrier
);
#include <rtems/score/atomic.inl> RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Exchange_ptr(
volatile Atomic_Pointer *object,
uintptr_t value,
Atomic_Order order
)
{
return _CPU_atomic_Exchange_ptr( object, value, order );
}
/**
* @brief Atomically compare the value stored at object with a
* old_value and if the two values are equal, update the value of a
* address with a new_value
*
* @param object a atomic type pointer of object.
* @param old_value pointer of a value.
* @param new_value a atomic type value.
* @param order_succ a type of Atomic_Order for successful exchange.
* @param order_fail a type of Atomic_Order for failed exchange.
*
* @retval true if the compare exchange successully.
* @retval false if the compare exchange failed.
*/
RTEMS_INLINE_ROUTINE bool _Atomic_Compare_exchange_uint(
volatile Atomic_Uint *object,
uint_fast32_t *old_value,
uint_fast32_t new_value,
Atomic_Order order_succ,
Atomic_Order order_fail
)
{
return _CPU_atomic_Compare_exchange_uint( object, old_value, new_value,
order_succ, order_fail );
}
RTEMS_INLINE_ROUTINE bool _Atomic_Compare_exchange_ptr(
volatile Atomic_Pointer *object,
uintptr_t *old_value,
uintptr_t new_value,
Atomic_Order order_succ,
Atomic_Order order_fail
)
{
return _CPU_atomic_Compare_exchange_ptr( object, old_value, new_value,
order_succ, order_fail );
}
/**
* @brief Atomically clear the value of an atomic flag type object.
*
* @param[in, out] object an atomic flag type pointer of object.
* @param order a type of Atomic_Order.
*
*/
RTEMS_INLINE_ROUTINE void _Atomic_Clear_flag(
volatile Atomic_Flag *object,
Atomic_Order order
)
{
_CPU_atomic_Clear_flag( object, order );
}
/**
* @brief Atomically test and clear the value of an atomic flag type object.
*
* @param[in, out] object an atomic flag type pointer of object.
* @param order a type of Atomic_Order.
*
* @retval true if the test and set successully.
* @retval false if the test and set failed.
*/
RTEMS_INLINE_ROUTINE bool _Atomic_Test_set_flag(
volatile Atomic_Flag *object,
Atomic_Order order
)
{
return _CPU_atomic_Test_set_flag( object, order );
}
#ifdef __cplusplus #ifdef __cplusplus
} }