#ifndef PICORV32_H
#define PICORV32_H

#ifndef __ASSEMBLY__

#include "compiler.h"

static inline void p_retirq(void)
{
    asm volatile(".insn r 0x0b, 0, 2, zero, zero, zero");
    __builtin_unreachable();
}

/*
 * hpa: the keepmask is a local addition.
 *
 * oldmask  = irq_mask;
 * irq_mask = ((irq_mask & ~keepmask) ^ newmask) | MASKED
 *
 * ... where MASKED represents IRQs permanently masked
 * in the hardware.
 */
static inline unsigned int
p_maskirq(unsigned int newmask, unsigned int keepmask)
{
    unsigned int oldmask;
    asm volatile(".insn r 0x0b, 0, 3, %0, %z1, %z2"
		 : "=r" (oldmask)
		 : "Jr" (newmask), "Jr" (keepmask));
    return oldmask;
}

static inline unsigned int
p_waitirq(unsigned int andmask, unsigned int ormask)
{
    unsigned int pending_mask;
    asm volatile(".insn r 0x0b, 0, 4, %0, %z1, %z2"
		 : "=r" (pending_mask)
		 : "Jr" (andmask), "Jr" (ormask));
    return pending_mask;
}

static inline unsigned int p_timer(unsigned int newval)
{
    unsigned int oldval;
    asm volatile(".insn 0x0b, 0, 5, %0, %z1, %z2"
		 : "=r" (oldval)
		 : "Jr" (newval), "Jr" (0));
    return oldval;
}

static inline unsigned int
p_pollirq(unsigned int mask, unsigned int already)
{
    unsigned int irqs;
    asm volatile(".insn 0x0b, 0, 6, %0, %z1, %z2"
		 : "=r" (irqs)
		 : "Jr" (mask), "Jr" (already));
    return irqs;
}

#else  /* __ASSEMBLY__ */

#define q0 x0
#define q1 x1
#define q2 x2
#define q3 x3

.macro addqxi qd, rs, imm
	.insn i 0x0b, 0x02, \qd, \rs, \imm
.endm
.macro addxqi rd, qs, imm
	.insn i 0x0b, 0x03, \rd, \qs, \imm
.endm
.macro retirq
	.insn r 0x0b, 0, 2, zero, zero, zero
.endm

.macro maskirq rd, rs1, rs2
	.insn r 0x0b, 0, 3, \rd, \rs1, \rs2
.endm

.macro waitirq rd, andmask, ormask
	.insn r 0x0b, 0, 4, \rd, \andmask, \ormask
.endm

.macro timer rd, rs
	.insn r 0x0b, 0, 5, \rd, \rs, zero
.endm

.macro pollirq rd, mask, already
	.insn r 0x0b, 0, 6, \rd, \mask, \already
.endm

#endif /* __ASSEMBLY__ */

#endif /* PICORV32_H */