1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677 |
- #ifndef PICORV32_H
- #define PICORV32_H
- #ifndef __ASSEMBLY__
- static inline void p_retirq(void)
- {
- asm volatile(".insn r 0x0b, 0, 2, zero, zero, zero");
- __builtin_unreachable();
- }
- /*
- * hpa: the keepmask is a local addition.
- *
- * oldmask = irq_mask;
- * irq_mask = ((irq_mask & ~keepmask) ^ newmask) | MASKED
- *
- * ... where MASKED represents IRQs permanently masked
- * in the hardware.
- */
- static inline unsigned int
- p_maskirq(unsigned int newmask, unsigned int keepmask)
- {
- unsigned int oldmask;
- asm volatile(".insn r 0x0b, 0, 3, %0, %1, %2"
- : "=r" (oldmask)
- : "r" (newmask), "r" (keepmask));
- return oldmask;
- }
- static inline unsigned int p_waitirq(void)
- {
- unsigned int pending_mask;
- asm volatile(".insn r 0x0b, 0, 4, %0, zero, zero"
- : "=r" (pending_mask));
- return pending_mask;
- }
- static inline unsigned int p_timer(unsigned int newval)
- {
- unsigned int oldval;
- asm volatile(".insn 0x0b, 0, 5, %0, %1, zero"
- : "=r" (oldval) : "r" (newval));
- }
- #else /* __ASSEMBLY__ */
- #define q0 x0
- #define q1 x1
- #define q2 x2
- #define q3 x3
- .macro addqxi qd, rs, imm
- .insn i 0x0b, 0x02, \qd, \rs, \imm
- .endm
- .macro addxqi rd, qs, imm
- .insn i 0x0b, 0x03, \rd, \qs, \imm
- .endm
- .macro retirq
- .insn r 0x0b, 0, 2, zero, zero, zero
- .endm
- .macro maskirq rd, rs1, rs2
- .insn r 0x0b, 0, 3, \rd, \rs1, \rs2
- .endm
- .macro waitirq rd
- .insn r 0x0b, 0, 4, \rd, zero, zero
- .endm
- .macro timer rd, rs
- .insn r 0x0b, 0, 5, \rd, \rs, zero
- .endm
- #endif /* __ASSEMBLY__ */
- #endif /* PICORV32_H */
|