| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182 | #ifndef PICORV32_H#define PICORV32_H#ifndef __ASSEMBLY__#include "compiler.h"static inline void p_retirq(void){    asm volatile(".insn r 0x0b, 0, 2, zero, zero, zero");    __builtin_unreachable();}/* * hpa: the keepmask is a local addition. * * oldmask  = irq_mask; * irq_mask = ((irq_mask & ~keepmask) ^ newmask) | MASKED * * ... where MASKED represents IRQs permanently masked * in the hardware. */static inline unsigned intp_maskirq(unsigned int newmask, unsigned int keepmask){    unsigned int oldmask;    asm volatile(".insn r 0x0b, 0, 3, %0, %z1, %z2"		 : "=r" (oldmask)		 : "Jr" (newmask), "Jr" (keepmask));    return oldmask;}static inline unsigned intp_waitirq(unsigned int andmask, unsigned int ormask){    unsigned int pending_mask;    asm volatile(".insn r 0x0b, 0, 4, %0, %z1, %z2"		 : "=r" (pending_mask)		 : "Jr" (andmask), "Jr" (ormask));    return pending_mask;}static inline unsigned int p_timer(unsigned int newval){    unsigned int oldval;    asm volatile(".insn 0x0b, 0, 5, %0, %z1, %z2"		 : "=r" (oldval)		 : "Jr" (newval), "Jr" (0));}#else  /* __ASSEMBLY__ */#define q0 x0#define q1 x1#define q2 x2#define q3 x3.macro addqxi qd, rs, imm	.insn i 0x0b, 0x02, \qd, \rs, \imm.endm.macro addxqi rd, qs, imm	.insn i 0x0b, 0x03, \rd, \qs, \imm.endm.macro retirq	.insn r 0x0b, 0, 2, zero, zero, zero.endm.macro maskirq rd, rs1, rs2	.insn r 0x0b, 0, 3, \rd, \rs1, \rs2.endm.macro waitirq rd, andmask, ormask	.insn r 0x0b, 0, 4, \rd, \andmask, \ormask.endm.macro timer rd, rs	.insn r 0x0b, 0, 5, \rd, \rs, zero.endm#endif /* __ASSEMBLY__ */#endif /* PICORV32_H */
 |