diff --git a/arch/arm/Kconfig b/arch/arm/Kconfig index 44f55bffe82ac..74b7f01da1894 100644 --- a/arch/arm/Kconfig +++ b/arch/arm/Kconfig @@ -731,6 +731,7 @@ config ARCH_CHIP_CXD32XX bool "Sony CXD32xx" select ARCH_CORTEXM4 select ARCH_HAVE_FPU + select LIBC_ARCH_NXATOMIC ---help--- Sony CXD32XX (ARM Cortex-M4) architectures diff --git a/include/nuttx/atomic.h b/include/nuttx/atomic.h index 1c430f963f9c1..29c3decf26bd8 100644 --- a/include/nuttx/atomic.h +++ b/include/nuttx/atomic.h @@ -27,6 +27,9 @@ * Included Files ****************************************************************************/ +#ifdef CONFIG_LIBC_ARCH_NXATOMIC +# include +#else #ifdef __has_include # if defined(__cplusplus) && __has_include() extern "C++" @@ -92,6 +95,7 @@ extern "C++" #else # include #endif +#endif /**************************************************************************** * Public Function Prototypes diff --git a/include/nuttx/lib/stdatomic.h b/include/nuttx/lib/stdatomic.h index 07f14911fb9c0..95b21f7822a0b 100644 --- a/include/nuttx/lib/stdatomic.h +++ b/include/nuttx/lib/stdatomic.h @@ -62,6 +62,109 @@ #define ATOMIC_FLAG_INIT 0 #define ATOMIC_VAR_INIT(value) (value) +#ifdef CONFIG_LIBC_ARCH_NXATOMIC + +#define atomic_store_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_store_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_store_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_store_4(obj, val, type) : \ + __nx_atomic_store_8(obj, val, type)) + +#define atomic_store(obj, val) atomic_store_n(obj, val, __ATOMIC_RELAXED) +#define atomic_store_explicit(obj, val, type) atomic_store_n(obj, val, type) +#define atomic_init(obj, val) atomic_store(obj, val) + +#define atomic_load_n(obj, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_load_1(obj, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_load_2(obj, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_load_4(obj, type) : \ + __nx_atomic_load_8(obj, type)) + +#define atomic_load(obj) atomic_load_n(obj, __ATOMIC_RELAXED) +#define atomic_load_explicit(obj, type) atomic_load_n(obj, type) + +#define atomic_exchange_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_exchange_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_exchange_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_exchange_4(obj, val, type) : \ + __nx_atomic_exchange_8(obj, val, type)) + +#define atomic_exchange(obj, val) atomic_exchange_n(obj, val, __ATOMIC_RELAXED) +#define atomic_exchange_explicit(obj, val, type) atomic_exchange_n(obj, val, type) + +#define atomic_compare_exchange_n(obj, expected, desired, weak, success, failure) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_compare_exchange_1(obj, expected, desired, weak, success, failure) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_compare_exchange_2(obj, expected, desired, weak, success, failure) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_compare_exchange_4(obj, expected, desired, weak, success, failure) : \ + __nx_atomic_compare_exchange_8(obj, expected, desired, weak, success, failure)) + +#define atomic_compare_exchange_strong(obj, expected, desired) \ + atomic_compare_exchange_n(obj, expected, desired, false, __ATOMIC_RELAXED, __ATOMIC_RELAXED) +#define atomic_compare_exchange_strong_explicit(obj, expected, desired, success, failure) \ + atomic_compare_exchange_n(obj, expected, desired, false, success, failure) +#define atomic_compare_exchange_weak(obj, expected, desired) \ + atomic_compare_exchange_n(obj, expected, desired, true, __ATOMIC_RELAXED, __ATOMIC_RELAXED) +#define atomic_compare_exchange_weak_explicit(obj, expected, desired, success, failure) \ + atomic_compare_exchange_n(obj, expected, desired, true, success, failure) + +#define atomic_flag_test_and_set_n(obj, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_flag_test_and_set_1(obj, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_flag_test_and_set_2(obj, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_flag_test_and_set_4(obj, type) : \ + __nx_atomic_flag_test_and_set_8(obj, type)) + +#define atomic_flag_test_and_set(obj) atomic_flag_test_and_set_n(obj, __ATOMIC_RELAXED) +#define atomic_flag_test_and_set_explicit(obj, type) atomic_flag_test_and_set_n(obj, 1, type) +#define atomic_flag_clear(obj) atomic_store(obj, 0) +#define atomic_flag_clear_explicit(obj, type) atomic_store_explicit(obj, 0, type) + +#define atomic_fetch_and_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_fetch_and_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_fetch_and_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_fetch_and_4(obj, val, type) : \ + __nx_atomic_fetch_and_8(obj, val, type)) + +#define atomic_fetch_and(obj, val) atomic_fetch_and_n(obj, val, __ATOMIC_RELAXED) +#define atomic_fetch_and_explicit(obj, val, type) atomic_fetch_and_n(obj, val, type) + +#define atomic_fetch_or_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_fetch_or_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_fetch_or_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_fetch_or_4(obj, val, type) : \ + __nx_atomic_fetch_or_8(obj, val, type)) + +#define atomic_fetch_or(obj, val) atomic_fetch_or_n(obj, val, __ATOMIC_RELAXED) +#define atomic_fetch_or_explicit(obj, val, type) atomic_fetch_or_n(obj, val, type) + +#define atomic_fetch_xor_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_fetch_xor_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_fetch_xor_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_fetch_xor_4(obj, val, type) : \ + __nx_atomic_fetch_xor_8(obj, val, type)) + +#define atomic_fetch_xor(obj, val) atomic_fetch_xor_n(obj, val, __ATOMIC_RELAXED) +#define atomic_fetch_xor_explicit(obj, val, type) atomic_fetch_xor_n(obj, val, type) + +#define atomic_fetch_add_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_fetch_add_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_fetch_add_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_fetch_add_4(obj, val, type) : \ + __nx_atomic_fetch_add_8(obj, val, type)) + +#define atomic_fetch_add(obj, val) atomic_fetch_add_n(obj, val, __ATOMIC_RELAXED) +#define atomic_fetch_add_explicit(obj, val, type) atomic_fetch_add_n(obj, val, type) + +#define atomic_fetch_sub_n(obj, val, type) \ + (sizeof(*(obj)) == 1 ? __nx_atomic_fetch_sub_1(obj, val, type) : \ + sizeof(*(obj)) == 2 ? __nx_atomic_fetch_sub_2(obj, val, type) : \ + sizeof(*(obj)) == 4 ? __nx_atomic_fetch_sub_4(obj, val, type) : \ + __nx_atomic_fetch_sub_8(obj, val, type)) + +#define atomic_fetch_sub(obj, val) atomic_fetch_sub_n(obj, val, __ATOMIC_RELAXED) +#define atomic_fetch_sub_explicit(obj, val, type) atomic_fetch_sub_n(obj, val, type) + +#else + #define atomic_store_n(obj, val, type) \ (sizeof(*(obj)) == 1 ? __atomic_store_1(obj, val, type) : \ sizeof(*(obj)) == 2 ? __atomic_store_2(obj, val, type) : \ @@ -161,6 +264,8 @@ #define atomic_fetch_sub(obj, val) atomic_fetch_sub_n(obj, val, __ATOMIC_RELAXED) #define atomic_fetch_sub_explicit(obj, val, type) atomic_fetch_sub_n(obj, val, type) +#endif + /**************************************************************************** * Public Types ****************************************************************************/ @@ -194,6 +299,91 @@ typedef volatile wchar_t atomic_wchar_t; * Public Function Prototypes ****************************************************************************/ +#ifdef CONFIG_LIBC_ARCH_NXATOMIC + +void __nx_atomic_store_1(FAR volatile void *ptr, uint8_t value, + int memorder); +void __nx_atomic_store_2(FAR volatile void *ptr, uint16_t value, + int memorder); +void __nx_atomic_store_4(FAR volatile void *ptr, uint32_t value, + int memorder); +void __nx_atomic_store_8(FAR volatile void *ptr, uint64_t value, + int memorder); +uint8_t __nx_atomic_load_1(FAR const volatile void *ptr, int memorder); +uint16_t __nx_atomic_load_2(FAR const volatile void *ptr, int memorder); +uint32_t __nx_atomic_load_4(FAR const volatile void *ptr, int memorder); +uint64_t __nx_atomic_load_8(FAR const volatile void *ptr, int memorder); +uint8_t __nx_atomic_exchange_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_exchange_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_exchange_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_exchange_8(FAR volatile void *ptr, uint64_t value, + int memorder); +bool __nx_atomic_compare_exchange_1(FAR volatile void *mem, FAR void *expect, + uint8_t desired, bool weak, int success, + int failure); +bool __nx_atomic_compare_exchange_2(FAR volatile void *mem, FAR void *expect, + uint16_t desired, bool weak, int success, + int failure); +bool __nx_atomic_compare_exchange_4(FAR volatile void *mem, FAR void *expect, + uint32_t desired, bool weak, int success, + int failure); +bool __nx_atomic_compare_exchange_8(FAR volatile void *mem, FAR void *expect, + uint64_t desired, bool weak, int success, + int failure); +uint8_t __nx_atomic_flag_test_and_set_1(FAR const volatile void *ptr, + int memorder); +uint16_t __nx_atomic_flag_test_and_set_2(FAR const volatile void *ptr, + int memorder); +uint32_t __nx_atomic_flag_test_and_set_4(FAR const volatile void *ptr, + int memorder); +uint64_t __nx_atomic_flag_test_and_set_8(FAR const volatile void *ptr, + int memorder); +uint8_t __nx_atomic_fetch_add_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_fetch_add_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_fetch_add_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_fetch_add_8(FAR volatile void *ptr, uint64_t value, + int memorder); +uint8_t __nx_atomic_fetch_sub_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_fetch_sub_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_fetch_sub_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_fetch_sub_8(FAR volatile void *ptr, uint64_t value, + int memorder); +uint8_t __nx_atomic_fetch_and_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_fetch_and_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_fetch_and_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_fetch_and_8(FAR volatile void *ptr, uint64_t value, + int memorder); +uint8_t __nx_atomic_fetch_or_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_fetch_or_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_fetch_or_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_fetch_or_8(FAR volatile void *ptr, uint64_t value, + int memorder); +uint8_t __nx_atomic_fetch_xor_1(FAR volatile void *ptr, uint8_t value, + int memorder); +uint16_t __nx_atomic_fetch_xor_2(FAR volatile void *ptr, uint16_t value, + int memorder); +uint32_t __nx_atomic_fetch_xor_4(FAR volatile void *ptr, uint32_t value, + int memorder); +uint64_t __nx_atomic_fetch_xor_8(FAR volatile void *ptr, uint64_t value, + int memorder); + +#else + void __atomic_store_1(FAR volatile void *ptr, uint8_t value, int memorder); void __atomic_store_2(FAR volatile void *ptr, uint16_t value, int memorder); void __atomic_store_4(FAR volatile void *ptr, uint32_t value, int memorder); @@ -271,4 +461,6 @@ uint32_t __atomic_fetch_xor_4(FAR volatile void *ptr, uint32_t value, uint64_t __atomic_fetch_xor_8(FAR volatile void *ptr, uint64_t value, int memorder); +#endif + #endif /* __INCLUDE_NUTTX_LIB_STDATOMIC_H */ diff --git a/libs/libc/machine/Kconfig b/libs/libc/machine/Kconfig index 5b0caacccdabf..2918ecb380202 100644 --- a/libs/libc/machine/Kconfig +++ b/libs/libc/machine/Kconfig @@ -9,6 +9,14 @@ menu "Architecture-Specific Support" +config LIBC_ARCH_NXATOMIC + bool "arch_nxatomic" + default n + ---help--- + If this configuration is selected and is + included, arch_atomic.c will be linked instead of built-in + atomic function. + config ARCH_LOWPUTC bool "Low-level console output" default y diff --git a/libs/libc/machine/arch_atomic.c b/libs/libc/machine/arch_atomic.c index 5da80f452c424..3b4a9b152d633 100644 --- a/libs/libc/machine/arch_atomic.c +++ b/libs/libc/machine/arch_atomic.c @@ -34,6 +34,289 @@ * Pre-processor Definitions ****************************************************************************/ +#ifdef CONFIG_LIBC_ARCH_NXATOMIC + +#define STORE(n, type) \ + \ + void weak_function __nx_atomic_store_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + \ + *(FAR type *)ptr = value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + } + +#define LOAD(n, type) \ + \ + type weak_function __nx_atomic_load_##n (FAR const volatile void *ptr, \ + int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + \ + type ret = *(FAR type *)ptr; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define EXCHANGE(n, type) \ + \ + type weak_function __nx_atomic_exchange_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + type ret = *tmp; \ + *tmp = value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define CMP_EXCHANGE(n, type) \ + \ + bool weak_function __nx_atomic_compare_exchange_##n (FAR volatile void *mem, \ + FAR void *expect, \ + type desired, bool weak, \ + int success, int failure) \ + { \ + bool ret = false; \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmpmem = (FAR type *)mem; \ + FAR type *tmpexp = (FAR type *)expect; \ + \ + if (*tmpmem == *tmpexp) \ + { \ + ret = true; \ + *tmpmem = desired; \ + } \ + else \ + { \ + *tmpexp = *tmpmem; \ + } \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FLAG_TEST_AND_SET(n, type) \ + \ + type weak_function __nx_atomic_flags_test_and_set##n (FAR volatile void *ptr, \ + int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *(FAR type *)ptr = 1; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FETCH_ADD(n, type) \ + \ + type weak_function __nx_atomic_fetch_add_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *tmp = *tmp + value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FETCH_SUB(n, type) \ + \ + type weak_function __nx_atomic_fetch_sub_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *tmp = *tmp - value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FETCH_AND(n, type) \ + \ + type weak_function __nx_atomic_fetch_and_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *tmp = *tmp & value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FETCH_OR(n, type) \ + \ + type weak_function __nx_atomic_fetch_or_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *tmp = *tmp | value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define FETCH_XOR(n, type) \ + \ + type weak_function __nx_atomic_fetch_xor_##n (FAR volatile void *ptr, \ + type value, int memorder) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + *tmp = *tmp ^ value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define SYNC_ADD_FETCH(n, type) \ + \ + type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = *tmp + value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_SUB_FETCH(n, type) \ + \ + type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = *tmp - value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_OR_FETCH(n, type) \ + \ + type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = *tmp | value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_AND_FETCH(n, type) \ + \ + type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = *tmp & value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_XOR_FETCH(n, type) \ + \ + type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = *tmp ^ value; \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_NAND_FETCH(n, type) \ + \ + type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \ + type value) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + *tmp = ~(*tmp & value); \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return *tmp; \ + } + +#define SYNC_BOOL_CMP_SWAP(n, type) \ + \ + bool weak_function __sync_bool_compare_and_swap_##n (FAR volatile void *ptr, \ + type oldvalue, \ + type newvalue) \ + { \ + bool ret = false; \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + \ + if (*tmp == oldvalue) \ + { \ + ret = true; \ + *tmp = newvalue; \ + } \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#define SYNC_VAL_CMP_SWAP(n, type) \ + \ + type weak_function __sync_val_compare_and_swap_##n (FAR volatile void *ptr, \ + type oldvalue, \ + type newvalue) \ + { \ + irqstate_t irqstate = spin_lock_irqsave(NULL); \ + FAR type *tmp = (FAR type *)ptr; \ + type ret = *tmp; \ + \ + if (*tmp == oldvalue) \ + { \ + *tmp = newvalue; \ + } \ + \ + spin_unlock_irqrestore(NULL, irqstate); \ + return ret; \ + } + +#else + #define STORE(n, type) \ \ void weak_function __atomic_store_##n (FAR volatile void *ptr, \ @@ -313,6 +596,8 @@ return ret; \ } +#endif + /**************************************************************************** * Public Functions ****************************************************************************/