lib: Fix __sbi_hfence_gvma_vmid_gpa() and __sbi_hfence_vvma_asid_va()
authorAnup Patel <anup.patel@wdc.com>
Sat, 6 Jun 2020 12:03:48 +0000 (17:33 +0530)
committerAnup Patel <anup@brainfault.org>
Mon, 15 Jun 2020 03:54:27 +0000 (09:24 +0530)
The arguments/parameters of __sbi_hfence_gvma_vmid_gpa() and
__sbi_hfence_vvma_asid_va() functions are swapped so we fix it.

Currently, we did not face any issues because QEMU does a full
TLB flush for all HFENCE instructions.

We also improve documentation of HFENCE.GVMA and HFENCE.VVMA
instruction encoding.

Signed-off-by: Anup Patel <anup.patel@wdc.com>
Reviewed-by: Atish Patra <atish.patra@wdc.com>
include/sbi/sbi_hfence.h
lib/sbi/sbi_hfence.S
lib/sbi/sbi_tlb.c

index 824a8d6..4420f27 100644 (file)
@@ -10,8 +10,9 @@
 
 #ifndef __SBI_FENCE_H__
 #define __SBI_FENCE_H__
+
 /** Invalidate Stage2 TLBs for given VMID and guest physical address */
-void __sbi_hfence_gvma_vmid_gpa(unsigned long vmid, unsigned long gpa);
+void __sbi_hfence_gvma_vmid_gpa(unsigned long gpa, unsigned long vmid);
 
 /** Invalidate Stage2 TLBs for given VMID */
 void __sbi_hfence_gvma_vmid(unsigned long vmid);
@@ -23,7 +24,7 @@ void __sbi_hfence_gvma_gpa(unsigned long gpa);
 void __sbi_hfence_gvma_all(void);
 
 /** Invalidate unified TLB entries for given asid and guest virtual address */
-void __sbi_hfence_vvma_asid_va(unsigned long asid, unsigned long va);
+void __sbi_hfence_vvma_asid_va(unsigned long va, unsigned long asid);
 
 /** Invalidate unified TLB entries for given ASID for a guest*/
 void __sbi_hfence_vvma_asid(unsigned long asid);
@@ -33,4 +34,5 @@ void __sbi_hfence_vvma_va(unsigned long va);
 
 /** Invalidate all possible Stage2 TLBs */
 void __sbi_hfence_vvma_all(void);
+
 #endif
index 30a6e9f..d05becb 100644 (file)
  */
 
        /*
-        * Instruction encoding of hfence.gvma is:
+        * HFENCE.GVMA rs1, rs2
+        * HFENCE.GVMA zero, rs2
+        * HFENCE.GVMA rs1
+        * HFENCE.GVMA
+        *
+        * rs1!=zero and rs2!=zero ==> HFENCE.GVMA rs1, rs2
+        * rs1==zero and rs2!=zero ==> HFENCE.GVMA zero, rs2
+        * rs1!=zero and rs2==zero ==> HFENCE.GVMA rs1
+        * rs1==zero and rs2==zero ==> HFENCE.GVMA
+        *
+        * Instruction encoding of HFENCE.GVMA is:
         * 0110001 rs2(5) rs1(5) 000 00000 1110011
         */
 
        .align 3
        .global __sbi_hfence_gvma_vmid_gpa
 __sbi_hfence_gvma_vmid_gpa:
-       /* hfence.gvma a1, a0 */
-       .word 0x62a60073
+       /*
+        * rs1 = a0 (GPA)
+        * rs2 = a1 (VMID)
+        * HFENCE.GVMA a0, a1
+        * 0110001 01011 01010 000 00000 1110011
+        */
+       .word 0x62b50073
        ret
 
        .align 3
        .global __sbi_hfence_gvma_vmid
 __sbi_hfence_gvma_vmid:
-       /* hfence.gvma zero, a0 */
+       /*
+        * rs1 = zero
+        * rs2 = a0 (VMID)
+        * HFENCE.GVMA zero, a0
+        * 0110001 01010 00000 000 00000 1110011
+        */
        .word 0x62a00073
        ret
 
        .align 3
        .global __sbi_hfence_gvma_gpa
 __sbi_hfence_gvma_gpa:
-       /* hfence.gvma a0 */
+       /*
+        * rs1 = a0 (GPA)
+        * rs2 = zero
+        * HFENCE.GVMA a0
+        * 0110001 00000 01010 000 00000 1110011
+        */
        .word 0x62050073
        ret
 
        .align 3
        .global __sbi_hfence_gvma_all
 __sbi_hfence_gvma_all:
-       /* hfence.gvma */
+       /*
+        * rs1 = zero
+        * rs2 = zero
+        * HFENCE.GVMA
+        * 0110001 00000 00000 000 00000 1110011
+        */
        .word 0x62000073
        ret
 
        /*
-        * Instruction encoding of hfence.bvma is:
+        * HFENCE.VVMA rs1, rs2
+        * HFENCE.VVMA zero, rs2
+        * HFENCE.VVMA rs1
+        * HFENCE.VVMA
+        *
+        * rs1!=zero and rs2!=zero ==> HFENCE.VVMA rs1, rs2
+        * rs1==zero and rs2!=zero ==> HFENCE.VVMA zero, rs2
+        * rs1!=zero and rs2==zero ==> HFENCE.VVMA rs1
+        * rs1==zero and rs2==zero ==> HFENCE.vVMA
+        *
+        * Instruction encoding of HFENCE.VVMA is:
         * 0010001 rs2(5) rs1(5) 000 00000 1110011
         */
 
        .align 3
        .global __sbi_hfence_vvma_asid_va
 __sbi_hfence_vvma_asid_va:
-       /* hfence.bvma a1, a0 */
-       .word 0x22a60073
+       /*
+        * rs1 = a0 (VA)
+        * rs2 = a1 (ASID)
+        * HFENCE.VVMA a0, a1
+        * 0010001 01011 01010 000 00000 1110011
+        */
+       .word 0x22b50073
        ret
 
        .align 3
        .global __sbi_hfence_vvma_asid
 __sbi_hfence_vvma_asid:
-       /* hfence.bvma zero, a0 */
+       /*
+        * rs1 = zero
+        * rs2 = a0 (ASID)
+        * HFENCE.VVMA zero, a0
+        * 0010001 01010 00000 000 00000 1110011
+        */
        .word 0x22a00073
        ret
 
        .align 3
        .global __sbi_hfence_vvma_va
 __sbi_hfence_vvma_va:
-       /* hfence.bvma a0 */
+       /*
+        * rs1 = a0 (VA)
+        * rs2 = zero
+        * HFENCE.VVMA zero, a0
+        * 0010001 00000 01010 000 00000 1110011
+        */
        .word 0x22050073
        ret
 
        .align 3
        .global __sbi_hfence_vvma_all
 __sbi_hfence_vvma_all:
-       /* hfence.bvma */
+       /*
+        * rs1 = zero
+        * rs2 = zero
+        * HFENCE.VVMA
+        * 0010001 00000 00000 000 00000 1110011
+        */
        .word 0x22000073
        ret
index c6ca7b1..c8e62cd 100644 (file)
@@ -112,7 +112,7 @@ static void sbi_tlb_hfence_vvma_asid(struct sbi_tlb_info *tinfo)
        }
 
        for (i = 0; i < size; i += PAGE_SIZE) {
-               __sbi_hfence_vvma_asid_va(asid, start + i);
+               __sbi_hfence_vvma_asid_va(start + i, asid);
        }
 
 done:
@@ -137,7 +137,7 @@ static void sbi_tlb_hfence_gvma_vmid(struct sbi_tlb_info *tinfo)
        }
 
        for (i = 0; i < size; i += PAGE_SIZE) {
-               __sbi_hfence_gvma_vmid_gpa(vmid, start+i);
+               __sbi_hfence_gvma_vmid_gpa(start + i, vmid);
        }
 }