From 02c53cbd77120f46a118ef62c781b4bc1cc71719 Mon Sep 17 00:00:00 2001 From: chao an Date: Sun, 13 Oct 2024 01:15:11 +0800 Subject: [PATCH] libc/atomic: decoupling atomic and spinlock to avoid recursion 1. use irq save in AMP mode 2. use mutex lock in SMP mode Signed-off-by: chao an --- libs/libc/machine/arch_atomic.c | 104 ++++++++++++++++++++------------ 1 file changed, 67 insertions(+), 37 deletions(-) diff --git a/libs/libc/machine/arch_atomic.c b/libs/libc/machine/arch_atomic.c index 5da80f452c424..93545fcf7d061 100644 --- a/libs/libc/machine/arch_atomic.c +++ b/libs/libc/machine/arch_atomic.c @@ -28,7 +28,37 @@ #include #include -#include +#include +#include + +/**************************************************************************** + * Private Data + ****************************************************************************/ + +#ifdef CONFIG_SMP +static mutex_t g_atomic_lock = NXMUTEX_INITIALIZER; + +static inline_function irqstate_t atomic_lock(void) +{ + return nxmutex_lock(&g_atomic_lock); +} + +static inline_function void atomic_unlock(irqstate_t flags) +{ + UNUSED(flags); + nxmutex_unlock(&g_atomic_lock); +} +#else +static inline_function irqstate_t atomic_lock(void) +{ + return up_irq_save(); +} + +static inline_function void atomic_unlock(irqstate_t flags) +{ + up_irq_restore(flags); +} +#endif /**************************************************************************** * Pre-processor Definitions @@ -39,11 +69,11 @@ void weak_function __atomic_store_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ \ *(FAR type *)ptr = value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ } #define LOAD(n, type) \ @@ -51,11 +81,11 @@ type weak_function __atomic_load_##n (FAR const volatile void *ptr, \ int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ \ type ret = *(FAR type *)ptr; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -64,13 +94,13 @@ type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ type ret = *tmp; \ *tmp = value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -82,7 +112,7 @@ int success, int failure) \ { \ bool ret = false; \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmpmem = (FAR type *)mem; \ FAR type *tmpexp = (FAR type *)expect; \ \ @@ -96,7 +126,7 @@ *tmpexp = *tmpmem; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -105,13 +135,13 @@ type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \ int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *(FAR type *)ptr = 1; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -120,13 +150,13 @@ type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp + value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -135,13 +165,13 @@ type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp - value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -150,13 +180,13 @@ type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp & value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -165,13 +195,13 @@ type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp | value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -180,13 +210,13 @@ type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp ^ value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -195,12 +225,12 @@ type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp + value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -209,12 +239,12 @@ type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp - value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -223,12 +253,12 @@ type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp | value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -237,12 +267,12 @@ type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp & value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -251,12 +281,12 @@ type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp ^ value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -265,12 +295,12 @@ type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = ~(*tmp & value); \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -281,7 +311,7 @@ type newvalue) \ { \ bool ret = false; \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ if (*tmp == oldvalue) \ @@ -290,7 +320,7 @@ *tmp = newvalue; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -300,7 +330,7 @@ type oldvalue, \ type newvalue) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ @@ -309,7 +339,7 @@ *tmp = newvalue; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ }