#ifndef __SBI_FENCE_H__
#define __SBI_FENCE_H__
+
/** Invalidate Stage2 TLBs for given VMID and guest physical address */
-void __sbi_hfence_gvma_vmid_gpa(unsigned long vmid, unsigned long gpa);
+void __sbi_hfence_gvma_vmid_gpa(unsigned long gpa, unsigned long vmid);
/** Invalidate Stage2 TLBs for given VMID */
void __sbi_hfence_gvma_vmid(unsigned long vmid);
void __sbi_hfence_gvma_all(void);
/** Invalidate unified TLB entries for given asid and guest virtual address */
-void __sbi_hfence_vvma_asid_va(unsigned long asid, unsigned long va);
+void __sbi_hfence_vvma_asid_va(unsigned long va, unsigned long asid);
/** Invalidate unified TLB entries for given ASID for a guest*/
void __sbi_hfence_vvma_asid(unsigned long asid);
/** Invalidate all possible Stage2 TLBs */
void __sbi_hfence_vvma_all(void);
+
#endif
*/
/*
- * Instruction encoding of hfence.gvma is:
+ * HFENCE.GVMA rs1, rs2
+ * HFENCE.GVMA zero, rs2
+ * HFENCE.GVMA rs1
+ * HFENCE.GVMA
+ *
+ * rs1!=zero and rs2!=zero ==> HFENCE.GVMA rs1, rs2
+ * rs1==zero and rs2!=zero ==> HFENCE.GVMA zero, rs2
+ * rs1!=zero and rs2==zero ==> HFENCE.GVMA rs1
+ * rs1==zero and rs2==zero ==> HFENCE.GVMA
+ *
+ * Instruction encoding of HFENCE.GVMA is:
* 0110001 rs2(5) rs1(5) 000 00000 1110011
*/
.align 3
.global __sbi_hfence_gvma_vmid_gpa
__sbi_hfence_gvma_vmid_gpa:
- /* hfence.gvma a1, a0 */
- .word 0x62a60073
+ /*
+ * rs1 = a0 (GPA)
+ * rs2 = a1 (VMID)
+ * HFENCE.GVMA a0, a1
+ * 0110001 01011 01010 000 00000 1110011
+ */
+ .word 0x62b50073
ret
.align 3
.global __sbi_hfence_gvma_vmid
__sbi_hfence_gvma_vmid:
- /* hfence.gvma zero, a0 */
+ /*
+ * rs1 = zero
+ * rs2 = a0 (VMID)
+ * HFENCE.GVMA zero, a0
+ * 0110001 01010 00000 000 00000 1110011
+ */
.word 0x62a00073
ret
.align 3
.global __sbi_hfence_gvma_gpa
__sbi_hfence_gvma_gpa:
- /* hfence.gvma a0 */
+ /*
+ * rs1 = a0 (GPA)
+ * rs2 = zero
+ * HFENCE.GVMA a0
+ * 0110001 00000 01010 000 00000 1110011
+ */
.word 0x62050073
ret
.align 3
.global __sbi_hfence_gvma_all
__sbi_hfence_gvma_all:
- /* hfence.gvma */
+ /*
+ * rs1 = zero
+ * rs2 = zero
+ * HFENCE.GVMA
+ * 0110001 00000 00000 000 00000 1110011
+ */
.word 0x62000073
ret
/*
- * Instruction encoding of hfence.bvma is:
+ * HFENCE.VVMA rs1, rs2
+ * HFENCE.VVMA zero, rs2
+ * HFENCE.VVMA rs1
+ * HFENCE.VVMA
+ *
+ * rs1!=zero and rs2!=zero ==> HFENCE.VVMA rs1, rs2
+ * rs1==zero and rs2!=zero ==> HFENCE.VVMA zero, rs2
+ * rs1!=zero and rs2==zero ==> HFENCE.VVMA rs1
+ * rs1==zero and rs2==zero ==> HFENCE.vVMA
+ *
+ * Instruction encoding of HFENCE.VVMA is:
* 0010001 rs2(5) rs1(5) 000 00000 1110011
*/
.align 3
.global __sbi_hfence_vvma_asid_va
__sbi_hfence_vvma_asid_va:
- /* hfence.bvma a1, a0 */
- .word 0x22a60073
+ /*
+ * rs1 = a0 (VA)
+ * rs2 = a1 (ASID)
+ * HFENCE.VVMA a0, a1
+ * 0010001 01011 01010 000 00000 1110011
+ */
+ .word 0x22b50073
ret
.align 3
.global __sbi_hfence_vvma_asid
__sbi_hfence_vvma_asid:
- /* hfence.bvma zero, a0 */
+ /*
+ * rs1 = zero
+ * rs2 = a0 (ASID)
+ * HFENCE.VVMA zero, a0
+ * 0010001 01010 00000 000 00000 1110011
+ */
.word 0x22a00073
ret
.align 3
.global __sbi_hfence_vvma_va
__sbi_hfence_vvma_va:
- /* hfence.bvma a0 */
+ /*
+ * rs1 = a0 (VA)
+ * rs2 = zero
+ * HFENCE.VVMA zero, a0
+ * 0010001 00000 01010 000 00000 1110011
+ */
.word 0x22050073
ret
.align 3
.global __sbi_hfence_vvma_all
__sbi_hfence_vvma_all:
- /* hfence.bvma */
+ /*
+ * rs1 = zero
+ * rs2 = zero
+ * HFENCE.VVMA
+ * 0010001 00000 00000 000 00000 1110011
+ */
.word 0x22000073
ret
}
for (i = 0; i < size; i += PAGE_SIZE) {
- __sbi_hfence_vvma_asid_va(asid, start + i);
+ __sbi_hfence_vvma_asid_va(start + i, asid);
}
done:
}
for (i = 0; i < size; i += PAGE_SIZE) {
- __sbi_hfence_gvma_vmid_gpa(vmid, start+i);
+ __sbi_hfence_gvma_vmid_gpa(start + i, vmid);
}
}