[FIX] using functions set_memory_[ro\rw]()
[kernel/swap-modules.git] / uprobe / arch / asm-arm / swap_uprobes.c
1 /**
2  * uprobe/arch/asm-arm/swap_uprobes.c
3  * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4  * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5  * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6  * separating core and arch parts
7  *
8  * @section LICENSE
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
23  *
24  * @section COPYRIGHT
25  *
26  * Copyright (C) Samsung Electronics, 2006-2010
27  *
28  * @section DESCRIPTION
29  *
30  * Arch-dependent uprobe interface implementation for ARM.
31  */
32
33 #include <kprobe/swap_kprobes.h>
34 #include <kprobe/arch/asm/swap_kprobes.h>
35 #include <kprobe/arch/asm/trampoline_arm.h>
36 #include <asm/traps.h>
37 #include <uprobe/swap_uprobes.h>
38 #include <uprobe/arch/asm/swap_uprobes.h>
39 #include <kprobe/swap_slots.h>
40 #include <kprobe/swap_kprobes_deps.h>
41 #include "trampoline_thumb.h"
42
43 // FIXME:
44 #include <kprobe/swap_kdebug.h>
45
46 /**
47  * @def flush_insns
48  * @brief Flushes instructions.
49  */
50 #define flush_insns(addr, size)                                 \
51         flush_icache_range((unsigned long)(addr),               \
52                            (unsigned long)(addr) + (size))
53
54 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
55 {
56         long offset = insn & 0x3ff;
57         offset -= insn & 0x400;
58         return (insn_addr + 4 + offset * 2);
59 }
60
61 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
62 {
63         long offset = insn & 0x7f;
64         offset -= insn & 0x80;
65         return (insn_addr + 4 + offset * 2);
66 }
67
68 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
69 {
70         unsigned int poff = insn & 0x3ff;
71         unsigned int offset = (insn & 0x07fe0000) >> 17;
72
73         poff -= (insn & 0x400);
74
75         if (insn & (1 << 12))
76                 return ((insn_addr + 4 + (poff << 12) + offset * 4));
77         else
78         return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
79 }
80
81 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
82 {
83         unsigned int i = (insn & 0x200) >> 3;
84         unsigned int offset = (insn & 0xf8) >> 2;
85         return insn_addr + 4 + i + offset;
86 }
87
88 /* is instruction Thumb2 and NOT a branch, etc... */
89 static int is_thumb2(kprobe_opcode_t insn)
90 {
91         return ((insn & 0xf800) == 0xe800 ||
92                 (insn & 0xf800) == 0xf000 ||
93                 (insn & 0xf800) == 0xf800);
94 }
95
96 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
97 {
98         int ret;
99         struct kprobe *p = up2kp(up);
100         unsigned long insn = p->opcode;
101         unsigned long vaddr = (unsigned long)p->addr;
102         unsigned long *tramp = up->atramp.tramp_arm;
103
104         ret = arch_make_trampoline_arm(vaddr, insn, tramp);
105         p->safe_arm = !!ret;
106
107         return ret;
108 }
109
110 static int arch_check_insn_thumb(unsigned long insn)
111 {
112         int ret = 0;
113
114         /* check instructions that can change PC */
115         if (THUMB_INSN_MATCH(UNDEF, insn) ||
116             THUMB_INSN_MATCH(SWI, insn) ||
117             THUMB_INSN_MATCH(BREAK, insn) ||
118             THUMB2_INSN_MATCH(B1, insn) ||
119             THUMB2_INSN_MATCH(B2, insn) ||
120             THUMB2_INSN_MATCH(BXJ, insn) ||
121             (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
122             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
123             (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
124             (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
125             (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
126             (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
127             THUMB2_INSN_MATCH(LDMIA, insn) ||
128             THUMB2_INSN_MATCH(LDMDB, insn) ||
129             (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
130             (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
131             (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
132             (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
133             (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
134             (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
135             (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
136             (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
137             /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
138             (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
139             (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
140             (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
141             (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
142             (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
143             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
144             (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
145             (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
146             /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
147             (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
148                 ret = -EFAULT;
149         }
150
151         return ret;
152 }
153
154 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
155 {
156         unsigned char mreg = 0;
157         unsigned char reg = 0;
158
159         if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
160                 reg = ((insn & 0xffff) & uregs) >> 8;
161         } else {
162                 if (THUMB_INSN_MATCH(MOV3, insn)) {
163                         if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
164                                 reg = (insn & 0xffff) & uregs;
165                         } else {
166                                 return 0;
167                         }
168                 } else {
169                         if (THUMB2_INSN_MATCH(ADR, insn)) {
170                                 reg = ((insn >> 16) & uregs) >> 8;
171                                 if (reg == 15) {
172                                         return 0;
173                                 }
174                         } else {
175                                 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
176                                     THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
177                                     THUMB2_INSN_MATCH(LDRWL, insn)) {
178                                         reg = ((insn >> 16) & uregs) >> 12;
179                                         if (reg == 15) {
180                                                 return 0;
181                                         }
182                                 } else {
183                                         // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
184                                         if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
185                                             THUMB2_INSN_MATCH(LDREX, insn)) {
186                                                 reg = ((insn >> 16) & uregs) >> 12;
187                                         } else {
188                                                 if (THUMB2_INSN_MATCH(DP, insn)) {
189                                                         reg = ((insn >> 16) & uregs) >> 12;
190                                                         if (reg == 15) {
191                                                                 return 0;
192                                                         }
193                                                 } else {
194                                                         if (THUMB2_INSN_MATCH(RSBW, insn)) {
195                                                                 reg = ((insn >> 12) & uregs) >> 8;
196                                                                 if (reg == 15){
197                                                                         return 0;
198                                                                 }
199                                                         } else {
200                                                                 if (THUMB2_INSN_MATCH(RORW, insn)) {
201                                                                         reg = ((insn >> 12) & uregs) >> 8;
202                                                                         if (reg == 15) {
203                                                                                 return 0;
204                                                                         }
205                                                                 } else {
206                                                                         if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
207                                                                             THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
208                                                                             THUMB2_INSN_MATCH(LSRW2, insn)) {
209                                                                                 reg = ((insn >> 12) & uregs) >> 8;
210                                                                                 if (reg == 15) {
211                                                                                         return 0;
212                                                                                 }
213                                                                         } else {
214                                                                                 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
215                                                                                         reg = 15;
216                                                                                 } else {
217                                                                                         if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
218                                                                                                 reg = THUMB2_INSN_REG_RM(insn);
219                                                                                         }
220                                                                                 }
221                                                                         }
222                                                                 }
223                                                         }
224                                                 }
225                                         }
226                                 }
227                         }
228                 }
229         }
230
231         if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
232              THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
233              THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
234              THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
235                 reg = THUMB2_INSN_REG_RT(insn);
236         }
237
238         if (reg == 6 || reg == 7) {
239                 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
240                 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
241                 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
242                 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
243                 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
244                 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
245                 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
246                 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
247         }
248
249         if (THUMB_INSN_MATCH(APC, insn)) {
250                 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
251                 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800);                              // ADD Rd, SP, #immed_8*4
252         } else {
253                 if (THUMB_INSN_MATCH(LRO3, insn)) {
254                         // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
255                         *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000);                     // LDR Rd, [SP, #immed_8*4]
256                 } else {
257                         if (THUMB_INSN_MATCH(MOV3, insn)) {
258                                 // MOV Rd, PC -> MOV Rd, SP
259                                 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10);               // MOV Rd, SP
260                         } else {
261                                 if (THUMB2_INSN_MATCH(ADR, insn)) {
262                                         // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
263                                         insns[2] = (insn & 0xfffffff0) | 0x0d;                          // ADDW Rd, SP, #imm
264                                 } else {
265                                         if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
266                                             THUMB2_INSN_MATCH(LDRHW, insn)) {
267                                                 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
268                                                 // !!!!!!!!!!!!!!!!!!!!!!!!
269                                                 // !!! imm_12 vs. imm_8 !!!
270                                                 // !!!!!!!!!!!!!!!!!!!!!!!!
271                                                 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;            // LDR.W Rt, [SP, #-<imm_8>]
272                                         } else {
273                                                 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
274                                                     THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
275                                                     THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
276                                                         // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
277                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                                                                   // LDRx.W Rt, [SP, #+<imm_12>]
278                                                 } else {
279                                                         if (THUMB2_INSN_MATCH(MUL, insn)) {
280                                                                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                                                    // MUL Rd, Rn, SP
281                                                         } else {
282                                                                 if (THUMB2_INSN_MATCH(DP, insn)) {
283                                                                         if (THUMB2_INSN_REG_RM(insn) == 15) {
284                                                                                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                                    // DP Rd, Rn, PC
285                                                                         } else if (THUMB2_INSN_REG_RN(insn) == 15) {
286                                                                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                           // DP Rd, PC, Rm
287                                                                         }
288                                                                 } else {
289                                                                         if (THUMB2_INSN_MATCH(LDRWL, insn)) {
290                                                                                 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
291                                                                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                           // LDRx.W Rt, [SP, #+<imm_12>]
292                                                                         } else {
293                                                                                 if (THUMB2_INSN_MATCH(RSBW, insn)) {
294                                                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                                   // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
295                                                                                 } else {
296                                                                                         if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
297                                                                                                 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
298                                                                                                         insns[2] = (insn & 0xfffdfffd);                                                         // ROR.W Rd, PC, PC
299                                                                                                 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
300                                                                                                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // ROR.W Rd, Rn, PC
301                                                                                                 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
302                                                                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // ROR.W Rd, PC, Rm
303                                                                                                 }
304                                                                                         } else {
305                                                                                                 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
306                                                                                                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
307                                                                                                 }
308                                                                                         }
309                                                                                 }
310                                                                         }
311                                                                 }
312                                                         }
313                                                 }
314                                         }
315                                 }
316                         }
317                 }
318         }
319
320         if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
321                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                            // STRx.W Rt, [Rn, SP]
322         } else {
323                 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
324                     THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
325                         if (THUMB2_INSN_REG_RN(insn) == 15) {
326                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // STRD/T/HT{.W} Rt, [SP, ...]
327                         } else {
328                                 insns[2] = insn;
329                         }
330                 } else {
331                         if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
332                                 if (THUMB2_INSN_REG_RN(insn) == 15) {
333                                         insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;                                    // STRH.W Rt, [SP, #-<imm_8>]
334                                 } else {
335                                         insns[2] = insn;
336                                 }
337                         }
338                 }
339         }
340
341         // STRx PC, xxx
342         if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn)   ||
343                             THUMB2_INSN_MATCH(STRBW, insn)  ||
344                             THUMB2_INSN_MATCH(STRD, insn)   ||
345                             THUMB2_INSN_MATCH(STRHT, insn)  ||
346                             THUMB2_INSN_MATCH(STRT, insn)   ||
347                             THUMB2_INSN_MATCH(STRHW1, insn) ||
348                             THUMB2_INSN_MATCH(STRHW, insn) )) {
349                 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
350         }
351
352         if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
353                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                   // TEQ SP, #<const>
354         } else {
355                 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
356                         if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
357                                 insns[2] = (insn & 0xfffdfffd);                                                         // TEQ/TST PC, PC
358                         } else if (THUMB2_INSN_REG_RM(insn) == 15) {
359                                 insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // TEQ/TST Rn, PC
360                         } else if (THUMB2_INSN_REG_RN(insn) == 15) {
361                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // TEQ/TST PC, Rm
362                         }
363                 }
364         }
365
366         return 0;
367 }
368
369 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
370 {
371         int uregs, pc_dep;
372         struct kprobe *p = up2kp(up);
373         unsigned int addr;
374         unsigned long vaddr = (unsigned long)p->addr;
375         unsigned long insn = p->opcode;
376         unsigned long *tramp = up->atramp.tramp_thumb;
377         enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
378
379         p->safe_thumb = 1;
380         if (vaddr & 0x01) {
381                 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
382                 return -EINVAL;
383         }
384
385         if (!arch_check_insn_thumb(insn)) {
386                 p->safe_thumb = 0;
387         }
388
389         uregs = 0;
390         pc_dep = 0;
391
392         if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
393                 uregs = 0x0700;         /* 8-10 */
394                 pc_dep = 1;
395         } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
396                 /* MOV Rd, PC */
397                 uregs = 0x07;
398                 pc_dep = 1;
399         } else if THUMB2_INSN_MATCH(ADR, insn) {
400                 uregs = 0x0f00;         /* Rd 8-11 */
401                 pc_dep = 1;
402         } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
403                      THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
404                      THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
405                      THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
406                      THUMB2_INSN_MATCH(LDREX, insn) ||
407                      ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
408                        THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
409                       (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
410                      ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
411                        (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
412                 uregs = 0xf000;         /* Rt 12-15 */
413                 pc_dep = 1;
414         } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
415                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
416                 pc_dep = 1;
417         } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
418                 uregs = 0xf;
419                 pc_dep = 1;
420         } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
421                 uregs = 0xf000;         /* Rd 12-15 */
422                 pc_dep = 1;
423         } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
424                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
425                 pc_dep = 1;
426         } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
427                 uregs = 0x0f00;         /* Rd 8-11 */
428                 pc_dep = 1;
429         } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
430                 uregs = 0x0f00;
431                 pc_dep = 1;
432         } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
433                 uregs = 0x0f00;         /* Rd 8-11 */
434                 pc_dep = 1;
435         } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
436                 uregs = 0x0f00;         /* Rd 8-11 */
437                 pc_dep = 1;
438         } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
439                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
440                 pc_dep = 1;
441         } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
442                    (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
443                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
444                 pc_dep = 1;
445         }
446
447         if (unlikely(uregs && pc_dep)) {
448                 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
449                 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
450                         printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
451                                __FILE__, __LINE__, insn);
452                         p->safe_thumb = 1;
453                 }
454
455                 addr = vaddr + 4;
456                 *((unsigned short*)tramp + 13) = 0xdeff;
457                 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
458                 *((unsigned short*)tramp + 15) = addr >> 16;
459                 if (!is_thumb2(insn)) {
460                         addr = vaddr + 2;
461                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
462                         *((unsigned short*)tramp + 17) = addr >> 16;
463                 } else {
464                         addr = vaddr + 4;
465                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
466                         *((unsigned short*)tramp + 17) = addr >> 16;
467                 }
468         } else {
469                 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
470                 *((unsigned short*)tramp + 13) = 0xdeff;
471                 if (!is_thumb2(insn)) {
472                         addr = vaddr + 2;
473                         *((unsigned short*)tramp + 2) = insn;
474                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
475                         *((unsigned short*)tramp + 17) = addr >> 16;
476                 } else {
477                         addr = vaddr + 4;
478                         tramp[1] = insn;
479                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
480                         *((unsigned short*)tramp + 17) = addr >> 16;
481                 }
482         }
483
484         if (THUMB_INSN_MATCH(B2, insn)) {
485                 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
486                 *((unsigned short*)tramp + 13) = 0xdeff;
487                 addr = branch_t16_dest(insn, vaddr);
488                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
489                 *((unsigned short*)tramp + 15) = addr >> 16;
490                 *((unsigned short*)tramp + 16) = 0;
491                 *((unsigned short*)tramp + 17) = 0;
492
493         } else if (THUMB_INSN_MATCH(B1, insn)) {
494                 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
495                 *((unsigned short*)tramp + 13) = 0xdeff;
496                 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
497                 addr = branch_cond_t16_dest(insn, vaddr);
498                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
499                 *((unsigned short*)tramp + 15) = addr >> 16;
500                 addr = vaddr + 2;
501                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
502                 *((unsigned short*)tramp + 17) = addr >> 16;
503
504         } else if (THUMB_INSN_MATCH(BLX2, insn) ||
505                    THUMB_INSN_MATCH(BX, insn)) {
506                 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
507                 *((unsigned short*)tramp + 13) = 0xdeff;
508                 *((unsigned short*)tramp + 4) = insn;
509                 addr = vaddr + 2;
510                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
511                 *((unsigned short*)tramp + 17) = addr >> 16;
512
513         } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
514                    THUMB2_INSN_MATCH(BL, insn)) {
515                 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
516                 *((unsigned short*)tramp + 13) = 0xdeff;
517                 addr = branch_t32_dest(insn, vaddr);
518                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
519                 *((unsigned short*)tramp + 15) = addr >> 16;
520                 addr = vaddr + 4;
521                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
522                 *((unsigned short*)tramp + 17) = addr >> 16;
523
524         } else if (THUMB_INSN_MATCH(CBZ, insn)) {
525                 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
526                 *((unsigned short*)tramp + 13) = 0xdeff;
527                 /* zero out original branch displacement (imm5 = 0; i = 0) */
528                 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
529                 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
530                 *((unsigned short*)tramp + 0) |= 0x20;
531                 addr = cbz_t16_dest(insn, vaddr);
532                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
533                 *((unsigned short*)tramp + 15) = addr >> 16;
534                 addr = vaddr + 2;
535                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
536                 *((unsigned short*)tramp + 17) = addr >> 16;
537         }
538
539         return 0;
540 }
541
542 /**
543  * @brief Prepares uprobe for ARM.
544  *
545  * @param up Pointer to the uprobe.
546  * @return 0 on success,\n
547  * negative error code on error.
548  */
549 int arch_prepare_uprobe(struct uprobe *up)
550 {
551         struct kprobe *p = up2kp(up);
552         struct task_struct *task = up->task;
553         unsigned long vaddr = (unsigned long)p->addr;
554         unsigned long insn;
555
556         if (vaddr & 0x01) {
557                 printk("Error in %s at %d: attempt to register uprobe "
558                        "at an unaligned address\n", __FILE__, __LINE__);
559                 return -EINVAL;
560         }
561
562         if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
563                 panic("failed to read memory %lx!\n", vaddr);
564
565         p->opcode = insn;
566
567         arch_copy_trampoline_arm_uprobe(up);
568         arch_copy_trampoline_thumb_uprobe(up);
569
570         if ((p->safe_arm) && (p->safe_thumb)) {
571                 printk("Error in %s at %d: failed "
572                        "arch_copy_trampoline_*_uprobe() (both) "
573                        "[tgid=%u, addr=%lx, data=%lx]\n",
574                        __FILE__, __LINE__, task->tgid, vaddr, insn);
575                 return -EFAULT;
576         }
577
578         up->atramp.utramp = swap_slot_alloc(up->sm);
579         if (up->atramp.utramp == NULL) {
580                 printk("Error: swap_slot_alloc failed (%08lx)\n", vaddr);
581                 return -ENOMEM;
582         }
583
584         return 0;
585 }
586
587 /**
588  * @brief Analysis opcodes.
589  *
590  * @param rp Pointer to the uretprobe.
591  * @return Void.
592  */
593 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
594 {
595         /* Remove retprobe if first insn overwrites lr */
596         rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
597                              THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
598                              THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
599
600         rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
601                            ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
602                            ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
603 }
604
605 /**
606  * @brief Prepates uretprobe for ARM.
607  *
608  * @param ri Pointer to the uretprobe instance.
609  * @param regs Pointer to CPU register data.
610  * @return Void.
611  */
612 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
613                             struct pt_regs *regs)
614 {
615         ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
616         ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
617
618         /* Set flag of current mode */
619         ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
620
621         if (thumb_mode(regs)) {
622                 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
623         } else {
624                 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
625         }
626 }
627
628 /**
629  * @brief Disarms uretprobe instance.
630  *
631  * @param ri Pointer to the uretprobe instance
632  * @param task Pointer to the task for which the uretprobe instance
633  * @return 0 on success,\n
634  * negative error code on error.
635  */
636 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
637                          struct task_struct *task)
638 {
639         struct pt_regs *uregs = task_pt_regs(ri->task);
640         unsigned long ra = swap_get_ret_addr(uregs);
641         unsigned long *tramp;
642         unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
643         unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
644         unsigned long *found = NULL;
645         unsigned long *buf[RETPROBE_STACK_DEPTH];
646         int i, retval;
647
648         /* Understand function mode */
649         if ((long)ri->sp & 1) {
650                 tramp = (unsigned long *)
651                         ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
652         } else {
653                 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
654                                           UPROBES_TRAMP_RET_BREAK_IDX);
655         }
656
657         /* check stack */
658         retval = read_proc_vm_atomic(task, (unsigned long)stack,
659                                      buf, sizeof(buf));
660         if (retval != sizeof(buf)) {
661                 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
662                        task->comm, task->tgid, task->pid,
663                        (unsigned long)stack);
664                 retval = -EFAULT;
665                 goto check_lr;
666         }
667
668         /* search the stack from the bottom */
669         for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
670                 if (buf[i] == tramp) {
671                         found = stack + i;
672                         break;
673                 }
674         }
675
676         if (!found) {
677                 retval = -ESRCH;
678                 goto check_lr;
679         }
680
681         printk("---> %s (%d/%d): trampoline found at "
682                "%08lx (%08lx /%+d) - %p\n",
683                task->comm, task->tgid, task->pid,
684                (unsigned long)found, (unsigned long)sp,
685                found - sp, ri->rp->up.kp.addr);
686         retval = write_proc_vm_atomic(task, (unsigned long)found,
687                                       &ri->ret_addr,
688                                       sizeof(ri->ret_addr));
689         if (retval != sizeof(ri->ret_addr)) {
690                 printk("---> %s (%d/%d): failed to write value to %08lx",
691                        task->comm, task->tgid, task->pid, (unsigned long)found);
692                 retval = -EFAULT;
693         } else {
694                 retval = 0;
695         }
696
697 check_lr: /* check lr anyway */
698         if (ra == (unsigned long)tramp) {
699                 printk("---> %s (%d/%d): trampoline found at "
700                        "lr = %08lx - %p\n",
701                        task->comm, task->tgid, task->pid,
702                        ra, ri->rp->up.kp.addr);
703                 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
704                 retval = 0;
705         } else if (retval) {
706                 printk("---> %s (%d/%d): trampoline NOT found at "
707                        "sp = %08lx, lr = %08lx - %p\n",
708                        task->comm, task->tgid, task->pid,
709                        (unsigned long)sp, ra, ri->rp->up.kp.addr);
710         }
711
712         return retval;
713 }
714
715 /**
716  * @brief Jump pre-handler.
717  *
718  * @param p Pointer to the kprobe.
719  * @param regs Pointer to CPU register data.
720  * @return 0.
721  */
722 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
723 {
724         struct uprobe *up = container_of(p, struct uprobe, kp);
725         struct ujprobe *jp = container_of(up, struct ujprobe, up);
726
727         kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
728         entry_point_t entry = (entry_point_t)jp->entry;
729
730         if (pre_entry) {
731                 p->ss_addr[smp_processor_id()] = (kprobe_opcode_t *)
732                                                  pre_entry(jp->priv_arg, regs);
733         }
734
735         if (entry) {
736                 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
737                       regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
738         } else {
739                 arch_ujprobe_return();
740         }
741
742         return 0;
743 }
744
745 /**
746  * @brief Gets trampoline address.
747  *
748  * @param p Pointer to the kprobe.
749  * @param regs Pointer to CPU register data.
750  * @return Trampoline address.
751  */
752 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
753 {
754         return thumb_mode(regs) ?
755                         (unsigned long)(p->ainsn.insn) + 0x1b :
756                         (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
757 }
758
759 /**
760  * @brief Restores return address.
761  *
762  * @param orig_ret_addr Original return address.
763  * @param regs Pointer to CPU register data.
764  * @return Void.
765  */
766 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
767 {
768         regs->ARM_lr = orig_ret_addr;
769         regs->ARM_pc = orig_ret_addr & ~0x1;
770
771         if (regs->ARM_lr & 0x1)
772                 regs->ARM_cpsr |= PSR_T_BIT;
773         else
774                 regs->ARM_cpsr &= ~PSR_T_BIT;
775 }
776
777 /**
778  * @brief Removes uprobe.
779  *
780  * @param up Pointer to the uprobe.
781  * @return Void.
782  */
783 void arch_remove_uprobe(struct uprobe *up)
784 {
785         swap_slot_free(up->sm, up->atramp.utramp);
786 }
787
788 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
789 {
790         if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
791                 u16 tmp = p->opcode >> 16;
792                 write_proc_vm_atomic(current,
793                                 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
794                 flush_insns(p->addr, 4);
795         }
796 }
797
798 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
799 {
800         unsigned long *tramp, *utramp;
801         struct kprobe *p = up2kp(up);
802         int sw;
803
804         /*
805          * 0 bit - thumb mode           (0 - arm, 1 - thumb)
806          * 1 bit - arm mode support     (0 - off, 1  on)
807          * 2 bit - thumb mode support   (0 - off, 1  on)`
808          */
809         sw = (!!thumb_mode(regs)) |
810              (int)!p->safe_arm << 1 |
811              (int)!p->safe_thumb << 2;
812
813         switch (sw) {
814         /* ARM */
815         case 0b110:
816         case 0b010:
817                 tramp = up->atramp.tramp_arm;
818                 break;
819         /* THUMB */
820         case 0b111:
821         case 0b101:
822                 restore_opcode_for_thumb(p, regs);
823                 tramp = up->atramp.tramp_thumb;
824                 break;
825         default:
826                 printk("Error in %s at %d: we are in arm mode "
827                        "(!) and check instruction was fail "
828                        "(%0lX instruction at %p address)!\n",
829                        __FILE__, __LINE__, p->opcode, p->addr);
830
831                 disarm_uprobe(p, up->task);
832
833                 return 1;
834         }
835
836         utramp = up->atramp.utramp;
837
838         if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
839                                   UPROBES_TRAMP_LEN * sizeof(*tramp)))
840                 panic("failed to write memory %p!\n", utramp);
841         flush_insns(utramp, UPROBES_TRAMP_LEN * sizeof(*tramp));
842
843         p->ainsn.insn = utramp;
844
845         return 0;
846 }
847
848 static int uprobe_handler(struct pt_regs *regs)
849 {
850         kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
851         struct task_struct *task = current;
852         pid_t tgid = task->tgid;
853         struct kprobe *p;
854
855         p = get_ukprobe(addr, tgid);
856         if (p == NULL) {
857                 unsigned long offset_bp = thumb_mode(regs) ?
858                                           0x1a :
859                                           4 * UPROBES_TRAMP_RET_BREAK_IDX;
860                 void *tramp_addr = (void *)addr - offset_bp;
861
862                 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
863                 if (p == NULL) {
864                         printk("no_uprobe: Not one of ours: let "
865                                "kernel handle it %p\n", addr);
866                         return 1;
867                 }
868
869                 trampoline_uprobe_handler(p, regs);
870         } else {
871                 if (p->ainsn.insn == NULL) {
872                         struct uprobe *up = kp2up(p);
873
874                         if (make_trampoline(up, regs)) {
875                                 printk("no_uprobe live\n");
876                                 return 0;
877                         }
878
879                         /* for uretprobe */
880                         add_uprobe_table(p);
881                 }
882
883                 if (!p->pre_handler || !p->pre_handler(p, regs)) {
884                         prepare_singlestep(p, regs);
885                 }
886         }
887
888         return 0;
889 }
890
891 /**
892  * @brief Breakpoint instruction handler.
893  *
894  * @param regs Pointer to CPU register data.
895  * @param instr Instruction.
896  * @return uprobe_handler results.
897  */
898 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
899 {
900         int ret;
901         unsigned long flags;
902         local_irq_save(flags);
903
904         preempt_disable();
905         ret = uprobe_handler(regs);
906         preempt_enable_no_resched();
907
908         local_irq_restore(flags);
909         return ret;
910 }
911
912 /* userspace probes hook (arm) */
913 static struct undef_hook undef_hook_for_us_arm = {
914         .instr_mask     = 0xffffffff,
915         .instr_val      = BREAKPOINT_INSTRUCTION,
916         .cpsr_mask      = MODE_MASK,
917         .cpsr_val       = USR_MODE,
918         .fn             = uprobe_trap_handler
919 };
920
921 /* userspace probes hook (thumb) */
922 static struct undef_hook undef_hook_for_us_thumb = {
923         .instr_mask     = 0xffffffff,
924         .instr_val      = BREAKPOINT_INSTRUCTION & 0x0000ffff,
925         .cpsr_mask      = MODE_MASK,
926         .cpsr_val       = USR_MODE,
927         .fn             = uprobe_trap_handler
928 };
929
930 /**
931  * @brief Installs breakpoint hooks.
932  *
933  * @return 0.
934  */
935 int swap_arch_init_uprobes(void)
936 {
937         swap_register_undef_hook(&undef_hook_for_us_arm);
938         swap_register_undef_hook(&undef_hook_for_us_thumb);
939
940         return 0;
941 }
942
943 /**
944  * @brief Uninstalls breakpoint hooks.
945  *
946  * @return Void.
947  */
948 void swap_arch_exit_uprobes(void)
949 {
950         swap_unregister_undef_hook(&undef_hook_for_us_thumb);
951         swap_unregister_undef_hook(&undef_hook_for_us_arm);
952 }