| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195 | /* * intrinsics.h *  * Compiler intrinsics for ARMv7-M core. *  * Written & released by Keir Fraser <keir.xen@gmail.com> *  * This is free and unencumbered software released into the public domain. * See the file COPYING for more details, or visit <http://unlicense.org>. */struct exception_frame {    uint32_t r0, r1, r2, r3, r12, lr, pc, psr;};#define _STR(x) #x#define STR(x) _STR(x)/* Force a compilation error if condition is true */#define BUILD_BUG_ON(cond) ({ _Static_assert(!(cond), "!(" #cond ")"); })#define aligned(x) __attribute__((aligned(x)))#define packed __attribute((packed))#define always_inline __inline__ __attribute__((always_inline))#define noinline __attribute__((noinline))#define likely(x)     __builtin_expect(!!(x),1)#define unlikely(x)   __builtin_expect(!!(x),0)#define illegal() asm volatile (".short 0xde00");#define barrier() asm volatile ("" ::: "memory")#define cpu_sync() asm volatile("dsb; isb" ::: "memory")#define cpu_relax() asm volatile ("nop" ::: "memory")#define sv_call(imm) asm volatile ( "svc %0" : : "i" (imm) )#define read_special(reg) ({                        \    uint32_t __x;                                   \    asm volatile ("mrs %0,"#reg : "=r" (__x) ::);   \    __x;                                            \})#define write_special(reg,val) ({                   \    uint32_t __x = (uint32_t)(val);                 \    asm volatile ("msr "#reg",%0" :: "r" (__x) :);  \})/* CONTROL[1] == 0 => running on Master Stack (Exception Handler mode). */#define CONTROL_SPSEL 2#define in_exception() (!(read_special(control) & CONTROL_SPSEL))#define global_disable_exceptions() \    asm volatile ("cpsid f; cpsid i" ::: "memory")#define global_enable_exceptions() \    asm volatile ("cpsie f; cpsie i" ::: "memory")/* NB. IRQ disable via CPSID/MSR is self-synchronising. No barrier needed. */#define IRQ_global_disable() asm volatile ("cpsid i" ::: "memory")#define IRQ_global_enable() asm volatile ("cpsie i" ::: "memory")#define IRQ_global_save(flags) ({               \    (flags) = read_special(primask) & 1;        \    IRQ_global_disable(); })#define IRQ_global_restore(flags) ({            \    if (flags == 0) IRQ_global_enable(); })/* Save/restore IRQ priority levels.  * NB. IRQ disable via MSR is self-synchronising. I have confirmed this on  * Cortex-M3: any pending IRQs are handled before they are disabled by  * a BASEPRI update. Hence no barrier is needed here. */#define IRQ_save(newpri) ({                         \        uint8_t __newpri = (newpri)<<4;             \        uint8_t __oldpri = read_special(basepri);   \        if (!__oldpri || (__oldpri > __newpri))     \            write_special(basepri, __newpri);       \        __oldpri; })/* NB. Same as CPSIE, any pending IRQ enabled by this BASEPRI update may  * execute a couple of instructions after the MSR instruction. This has been * confirmed on Cortex-M3. */#define IRQ_restore(oldpri) write_special(basepri, (oldpri))static inline uint16_t _rev16(uint16_t x){    uint16_t result;    asm volatile ("rev16 %0,%1" : "=r" (result) : "r" (x));    return result;}static inline uint32_t _rev32(uint32_t x){    uint32_t result;    asm volatile ("rev %0,%1" : "=r" (result) : "r" (x));    return result;}static inline uint32_t _rbit32(uint32_t x){    uint32_t result;    asm volatile ("rbit %0,%1" : "=r" (result) : "r" (x));    return result;}extern void __bad_cmpxchg(volatile void *ptr, int size);static always_inline unsigned long __cmpxchg(    volatile void *ptr, unsigned long old, unsigned long new, int size){    unsigned long oldval, res;    switch (size) {    case 1:        do {            asm volatile("    ldrexb %1,[%2]      \n"                         "    movs   %0,#0        \n"                         "    cmp    %1,%3        \n"                         "    it     eq           \n"                         "    strexbeq %0,%4,[%2] \n"                         : "=&r" (res), "=&r" (oldval)                         : "r" (ptr), "Ir" (old), "r" (new)                         : "memory", "cc");        } while (res);        break;    case 2:        do {            asm volatile("    ldrexh %1,[%2]      \n"                         "    movs   %0,#0        \n"                         "    cmp    %1,%3        \n"                         "    it     eq           \n"                         "    strexheq %0,%4,[%2] \n"                         : "=&r" (res), "=&r" (oldval)                         : "r" (ptr), "Ir" (old), "r" (new)                         : "memory", "cc");        } while (res);        break;    case 4:        do {            asm volatile("    ldrex  %1,[%2]      \n"                         "    movs   %0,#0        \n"                         "    cmp    %1,%3        \n"                         "    it     eq           \n"                         "    strexeq %0,%4,[%2]  \n"                         : "=&r" (res), "=&r" (oldval)                         : "r" (ptr), "Ir" (old), "r" (new)                         : "memory", "cc");        } while (res);        break;    default:        __bad_cmpxchg(ptr, size);        oldval = 0;    }    return oldval;}#define cmpxchg(ptr,o,n)                                \    ((__typeof__(*(ptr)))__cmpxchg((ptr),               \                                   (unsigned long)(o),  \                                   (unsigned long)(n),  \                                   sizeof(*(ptr))))/* Cortex initialisation */void cortex_init(void);#if defined(CORTEX_M7)/* Cache operations */void icache_invalidate_all(void);void icache_enable(void);void dcache_invalidate_all(void);void dcache_clear_and_invalidate_all(void);void dcache_enable(void);void dcache_disable(void);#elif defined(CORTEX_M3)/* No caches in Cortex M3 */#define icache_invalidate_all() ((void)0)#define icache_enable() ((void)0)#define dcache_invalidate_all() ((void)0)#define dcache_clear_and_invalidate_all() ((void)0)#define dcache_enable() ((void)0)#define dcache_disable() ((void)0)#endif/* * Local variables: * mode: C * c-file-style: "Linux" * c-basic-offset: 4 * tab-width: 4 * indent-tabs-mode: nil * End: */
 |