RISC-V: add zbb support to string functions
[platform/kernel/linux-starfive.git] / arch / riscv / include / asm / errata_list.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2021 Sifive.
4  */
5 #ifndef ASM_ERRATA_LIST_H
6 #define ASM_ERRATA_LIST_H
7
8 #include <asm/alternative.h>
9 #include <asm/csr.h>
10 #include <asm/vendorid_list.h>
11
12 #ifdef CONFIG_ERRATA_SIFIVE
13 #define ERRATA_SIFIVE_CIP_453 0
14 #define ERRATA_SIFIVE_CIP_1200 1
15 #define ERRATA_SIFIVE_NUMBER 2
16 #endif
17
18 #ifdef CONFIG_ERRATA_THEAD
19 #define ERRATA_THEAD_PBMT 0
20 #define ERRATA_THEAD_CMO 1
21 #define ERRATA_THEAD_PMU 2
22 #define ERRATA_THEAD_NUMBER 3
23 #endif
24
25 #define CPUFEATURE_SVPBMT 0
26 #define CPUFEATURE_ZICBOM 1
27 #define CPUFEATURE_ZBB 2
28 #define CPUFEATURE_NUMBER 3
29
30 #ifdef __ASSEMBLY__
31
32 #define ALT_INSN_FAULT(x)                                               \
33 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault),                  \
34             __stringify(RISCV_PTR sifive_cip_453_insn_fault_trp),       \
35             SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,                    \
36             CONFIG_ERRATA_SIFIVE_CIP_453)
37
38 #define ALT_PAGE_FAULT(x)                                               \
39 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault),                       \
40             __stringify(RISCV_PTR sifive_cip_453_page_fault_trp),       \
41             SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,                    \
42             CONFIG_ERRATA_SIFIVE_CIP_453)
43 #else /* !__ASSEMBLY__ */
44
45 #define ALT_FLUSH_TLB_PAGE(x)                                           \
46 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID,        \
47                 ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)  \
48                 : : "r" (addr) : "memory")
49
50 /*
51  * _val is marked as "will be overwritten", so need to set it to 0
52  * in the default case.
53  */
54 #define ALT_SVPBMT_SHIFT 61
55 #define ALT_THEAD_PBMT_SHIFT 59
56 #define ALT_SVPBMT(_val, prot)                                          \
57 asm(ALTERNATIVE_2("li %0, 0\t\nnop",                                    \
58                   "li %0, %1\t\nslli %0,%0,%3", 0,                      \
59                         CPUFEATURE_SVPBMT, CONFIG_RISCV_ISA_SVPBMT,     \
60                   "li %0, %2\t\nslli %0,%0,%4", THEAD_VENDOR_ID,        \
61                         ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)    \
62                 : "=r"(_val)                                            \
63                 : "I"(prot##_SVPBMT >> ALT_SVPBMT_SHIFT),               \
64                   "I"(prot##_THEAD >> ALT_THEAD_PBMT_SHIFT),            \
65                   "I"(ALT_SVPBMT_SHIFT),                                \
66                   "I"(ALT_THEAD_PBMT_SHIFT))
67
68 #ifdef CONFIG_ERRATA_THEAD_PBMT
69 /*
70  * IO/NOCACHE memory types are handled together with svpbmt,
71  * so on T-Head chips, check if no other memory type is set,
72  * and set the non-0 PMA type if applicable.
73  */
74 #define ALT_THEAD_PMA(_val)                                             \
75 asm volatile(ALTERNATIVE(                                               \
76         __nops(7),                                                      \
77         "li      t3, %1\n\t"                                            \
78         "slli    t3, t3, %3\n\t"                                        \
79         "and     t3, %0, t3\n\t"                                        \
80         "bne     t3, zero, 2f\n\t"                                      \
81         "li      t3, %2\n\t"                                            \
82         "slli    t3, t3, %3\n\t"                                        \
83         "or      %0, %0, t3\n\t"                                        \
84         "2:",  THEAD_VENDOR_ID,                                         \
85                 ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)            \
86         : "+r"(_val)                                                    \
87         : "I"(_PAGE_MTMASK_THEAD >> ALT_THEAD_PBMT_SHIFT),              \
88           "I"(_PAGE_PMA_THEAD >> ALT_THEAD_PBMT_SHIFT),                 \
89           "I"(ALT_THEAD_PBMT_SHIFT)                                     \
90         : "t3")
91 #else
92 #define ALT_THEAD_PMA(_val)
93 #endif
94
95 /*
96  * dcache.ipa rs1 (invalidate, physical address)
97  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
98  *   0000001    01010      rs1       000      00000  0001011
99  * dache.iva rs1 (invalida, virtual address)
100  *   0000001    00110      rs1       000      00000  0001011
101  *
102  * dcache.cpa rs1 (clean, physical address)
103  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
104  *   0000001    01001      rs1       000      00000  0001011
105  * dcache.cva rs1 (clean, virtual address)
106  *   0000001    00100      rs1       000      00000  0001011
107  *
108  * dcache.cipa rs1 (clean then invalidate, physical address)
109  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
110  *   0000001    01011      rs1       000      00000  0001011
111  * dcache.civa rs1 (... virtual address)
112  *   0000001    00111      rs1       000      00000  0001011
113  *
114  * sync.s (make sure all cache operations finished)
115  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
116  *   0000000    11001     00000      000      00000  0001011
117  */
118 #define THEAD_inval_A0  ".long 0x0265000b"
119 #define THEAD_clean_A0  ".long 0x0245000b"
120 #define THEAD_flush_A0  ".long 0x0275000b"
121 #define THEAD_SYNC_S    ".long 0x0190000b"
122
123 #define ALT_CMO_OP(_op, _start, _size, _cachesize)                      \
124 asm volatile(ALTERNATIVE_2(                                             \
125         __nops(6),                                                      \
126         "mv a0, %1\n\t"                                                 \
127         "j 2f\n\t"                                                      \
128         "3:\n\t"                                                        \
129         "cbo." __stringify(_op) " (a0)\n\t"                             \
130         "add a0, a0, %0\n\t"                                            \
131         "2:\n\t"                                                        \
132         "bltu a0, %2, 3b\n\t"                                           \
133         "nop", 0, CPUFEATURE_ZICBOM, CONFIG_RISCV_ISA_ZICBOM,           \
134         "mv a0, %1\n\t"                                                 \
135         "j 2f\n\t"                                                      \
136         "3:\n\t"                                                        \
137         THEAD_##_op##_A0 "\n\t"                                         \
138         "add a0, a0, %0\n\t"                                            \
139         "2:\n\t"                                                        \
140         "bltu a0, %2, 3b\n\t"                                           \
141         THEAD_SYNC_S, THEAD_VENDOR_ID,                                  \
142                         ERRATA_THEAD_CMO, CONFIG_ERRATA_THEAD_CMO)      \
143         : : "r"(_cachesize),                                            \
144             "r"((unsigned long)(_start) & ~((_cachesize) - 1UL)),       \
145             "r"((unsigned long)(_start) + (_size))                      \
146         : "a0")
147
148 #define THEAD_C9XX_RV_IRQ_PMU                   17
149 #define THEAD_C9XX_CSR_SCOUNTEROF               0x5c5
150
151 #define ALT_SBI_PMU_OVERFLOW(__ovl)                                     \
152 asm volatile(ALTERNATIVE(                                               \
153         "csrr %0, " __stringify(CSR_SSCOUNTOVF),                        \
154         "csrr %0, " __stringify(THEAD_C9XX_CSR_SCOUNTEROF),             \
155                 THEAD_VENDOR_ID, ERRATA_THEAD_PMU,                      \
156                 CONFIG_ERRATA_THEAD_PMU)                                \
157         : "=r" (__ovl) :                                                \
158         : "memory")
159
160 #endif /* __ASSEMBLY__ */
161
162 #endif