2
0

picorv32.h 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. #ifndef PICORV32_H
  2. #define PICORV32_H
  3. #ifndef __ASSEMBLY__
  4. static inline void p_retirq(void)
  5. {
  6. asm volatile(".insn r 0x0b, 0, 2, zero, zero, zero");
  7. __builtin_unreachable();
  8. }
  9. /*
  10. * hpa: the keepmask is a local addition.
  11. *
  12. * oldmask = irq_mask;
  13. * irq_mask = ((irq_mask & ~keepmask) ^ newmask) | MASKED
  14. *
  15. * ... where MASKED represents IRQs permanently masked
  16. * in the hardware.
  17. */
  18. static inline unsigned int
  19. p_maskirq(unsigned int newmask, unsigned int keepmask)
  20. {
  21. unsigned int oldmask;
  22. asm volatile(".insn r 0x0b, 0, 3, %0, %1, %2"
  23. : "=r" (oldmask)
  24. : "r" (newmask), "r" (keepmask));
  25. return oldmask;
  26. }
  27. static inline unsigned int p_waitirq(void)
  28. {
  29. unsigned int pending_mask;
  30. asm volatile(".insn r 0x0b, 0, 4, %0, zero, zero"
  31. : "=r" (pending_mask));
  32. return pending_mask;
  33. }
  34. static inline unsigned int p_timer(unsigned int newval)
  35. {
  36. unsigned int oldval;
  37. asm volatile(".insn 0x0b, 0, 5, %0, %1, zero"
  38. : "=r" (oldval) : "r" (newval));
  39. }
  40. #else /* __ASSEMBLY__ */
  41. #define q0 x0
  42. #define q1 x1
  43. #define q2 x2
  44. #define q3 x3
  45. .macro addqxi qd, rs, imm
  46. .insn i 0x0b, 0x02, \qd, \rs, \imm
  47. .endm
  48. .macro addxqi rd, qs, imm
  49. .insn i 0x0b, 0x03, \rd, \qs, \imm
  50. .endm
  51. .macro retirq
  52. .insn r 0x0b, 0, 2, zero, zero, zero
  53. .endm
  54. .macro maskirq rd, rs1, rs2
  55. .insn r 0x0b, 0, 3, \rd, \rs1, \rs2
  56. .endm
  57. .macro waitirq rd
  58. .insn r 0x0b, 0, 4, \rd, zero, zero
  59. .endm
  60. .macro timer rd, rs
  61. .insn r 0x0b, 0, 5, \rd, \rs, zero
  62. .endm
  63. #endif /* __ASSEMBLY__ */
  64. #endif /* PICORV32_H */