intrinsics.h 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. /*
  2. * intrinsics.h
  3. *
  4. * Compiler intrinsics for ARMv7-M core.
  5. *
  6. * Written & released by Keir Fraser <keir.xen@gmail.com>
  7. *
  8. * This is free and unencumbered software released into the public domain.
  9. * See the file COPYING for more details, or visit <http://unlicense.org>.
  10. */
  11. struct exception_frame {
  12. uint32_t r0, r1, r2, r3, r12, lr, pc, psr;
  13. };
  14. #define _STR(x) #x
  15. #define STR(x) _STR(x)
  16. /* Force a compilation error if condition is true */
  17. #define BUILD_BUG_ON(cond) ({ _Static_assert(!(cond), "!(" #cond ")"); })
  18. #define aligned(x) __attribute__((aligned(x)))
  19. #define packed __attribute((packed))
  20. #define always_inline __inline__ __attribute__((always_inline))
  21. #define noinline __attribute__((noinline))
  22. #define likely(x) __builtin_expect(!!(x),1)
  23. #define unlikely(x) __builtin_expect(!!(x),0)
  24. #define illegal() asm volatile (".short 0xde00");
  25. #define barrier() asm volatile ("" ::: "memory")
  26. #define cpu_sync() asm volatile("dsb; isb" ::: "memory")
  27. #define cpu_relax() asm volatile ("nop" ::: "memory")
  28. #define sv_call(imm) asm volatile ( "svc %0" : : "i" (imm) )
  29. #define read_special(reg) ({ \
  30. uint32_t __x; \
  31. asm volatile ("mrs %0,"#reg : "=r" (__x) ::); \
  32. __x; \
  33. })
  34. #define write_special(reg,val) ({ \
  35. uint32_t __x = (uint32_t)(val); \
  36. asm volatile ("msr "#reg",%0" :: "r" (__x) :); \
  37. })
  38. /* CONTROL[1] == 0 => running on Master Stack (Exception Handler mode). */
  39. #define CONTROL_SPSEL 2
  40. #define in_exception() (!(read_special(control) & CONTROL_SPSEL))
  41. #define global_disable_exceptions() \
  42. asm volatile ("cpsid f; cpsid i" ::: "memory")
  43. #define global_enable_exceptions() \
  44. asm volatile ("cpsie f; cpsie i" ::: "memory")
  45. /* NB. IRQ disable via CPSID/MSR is self-synchronising. No barrier needed. */
  46. #define IRQ_global_disable() asm volatile ("cpsid i" ::: "memory")
  47. #define IRQ_global_enable() asm volatile ("cpsie i" ::: "memory")
  48. #define IRQ_global_save(flags) ({ \
  49. (flags) = read_special(primask) & 1; \
  50. IRQ_global_disable(); })
  51. #define IRQ_global_restore(flags) ({ \
  52. if (flags == 0) IRQ_global_enable(); })
  53. /* Save/restore IRQ priority levels.
  54. * NB. IRQ disable via MSR is self-synchronising. I have confirmed this on
  55. * Cortex-M3: any pending IRQs are handled before they are disabled by
  56. * a BASEPRI update. Hence no barrier is needed here. */
  57. #define IRQ_save(newpri) ({ \
  58. uint8_t __newpri = (newpri)<<4; \
  59. uint8_t __oldpri = read_special(basepri); \
  60. if (!__oldpri || (__oldpri > __newpri)) \
  61. write_special(basepri, __newpri); \
  62. __oldpri; })
  63. /* NB. Same as CPSIE, any pending IRQ enabled by this BASEPRI update may
  64. * execute a couple of instructions after the MSR instruction. This has been
  65. * confirmed on Cortex-M3. */
  66. #define IRQ_restore(oldpri) write_special(basepri, (oldpri))
  67. static inline uint16_t _rev16(uint16_t x)
  68. {
  69. uint16_t result;
  70. asm volatile ("rev16 %0,%1" : "=r" (result) : "r" (x));
  71. return result;
  72. }
  73. static inline uint32_t _rev32(uint32_t x)
  74. {
  75. uint32_t result;
  76. asm volatile ("rev %0,%1" : "=r" (result) : "r" (x));
  77. return result;
  78. }
  79. static inline uint32_t _rbit32(uint32_t x)
  80. {
  81. uint32_t result;
  82. asm volatile ("rbit %0,%1" : "=r" (result) : "r" (x));
  83. return result;
  84. }
  85. extern void __bad_cmpxchg(volatile void *ptr, int size);
  86. static always_inline unsigned long __cmpxchg(
  87. volatile void *ptr, unsigned long old, unsigned long new, int size)
  88. {
  89. unsigned long oldval, res;
  90. switch (size) {
  91. case 1:
  92. do {
  93. asm volatile(" ldrexb %1,[%2] \n"
  94. " movs %0,#0 \n"
  95. " cmp %1,%3 \n"
  96. " it eq \n"
  97. " strexbeq %0,%4,[%2] \n"
  98. : "=&r" (res), "=&r" (oldval)
  99. : "r" (ptr), "Ir" (old), "r" (new)
  100. : "memory", "cc");
  101. } while (res);
  102. break;
  103. case 2:
  104. do {
  105. asm volatile(" ldrexh %1,[%2] \n"
  106. " movs %0,#0 \n"
  107. " cmp %1,%3 \n"
  108. " it eq \n"
  109. " strexheq %0,%4,[%2] \n"
  110. : "=&r" (res), "=&r" (oldval)
  111. : "r" (ptr), "Ir" (old), "r" (new)
  112. : "memory", "cc");
  113. } while (res);
  114. break;
  115. case 4:
  116. do {
  117. asm volatile(" ldrex %1,[%2] \n"
  118. " movs %0,#0 \n"
  119. " cmp %1,%3 \n"
  120. " it eq \n"
  121. " strexeq %0,%4,[%2] \n"
  122. : "=&r" (res), "=&r" (oldval)
  123. : "r" (ptr), "Ir" (old), "r" (new)
  124. : "memory", "cc");
  125. } while (res);
  126. break;
  127. default:
  128. __bad_cmpxchg(ptr, size);
  129. oldval = 0;
  130. }
  131. return oldval;
  132. }
  133. #define cmpxchg(ptr,o,n) \
  134. ((__typeof__(*(ptr)))__cmpxchg((ptr), \
  135. (unsigned long)(o), \
  136. (unsigned long)(n), \
  137. sizeof(*(ptr))))
  138. /* Cortex initialisation */
  139. void cortex_init(void);
  140. #if defined(CORTEX_M7)
  141. /* Cache operations */
  142. void icache_invalidate_all(void);
  143. void icache_enable(void);
  144. void dcache_invalidate_all(void);
  145. void dcache_clear_and_invalidate_all(void);
  146. void dcache_enable(void);
  147. void dcache_disable(void);
  148. #elif defined(CORTEX_M3)
  149. /* No caches in Cortex M3 */
  150. #define icache_invalidate_all() ((void)0)
  151. #define icache_enable() ((void)0)
  152. #define dcache_invalidate_all() ((void)0)
  153. #define dcache_clear_and_invalidate_all() ((void)0)
  154. #define dcache_enable() ((void)0)
  155. #define dcache_disable() ((void)0)
  156. #endif
  157. /*
  158. * Local variables:
  159. * mode: C
  160. * c-file-style: "Linux"
  161. * c-basic-offset: 4
  162. * tab-width: 4
  163. * indent-tabs-mode: nil
  164. * End:
  165. */