[IMPROVE] remove links to build modules
[kernel/swap-modules.git] / uprobe / arch / arm / swap-asm / swap_uprobes.c
1 /**
2  * uprobe/arch/asm-arm/swap_uprobes.c
3  * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4  * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5  * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6  * separating core and arch parts
7  *
8  * @section LICENSE
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
23  *
24  * @section COPYRIGHT
25  *
26  * Copyright (C) Samsung Electronics, 2006-2010
27  *
28  * @section DESCRIPTION
29  *
30  * Arch-dependent uprobe interface implementation for ARM.
31  */
32
33
34 #include <linux/init.h>                 /* need for asm/traps.h */
35 #include <linux/sched.h>                /* need for asm/traps.h */
36
37 #include <asm/ptrace.h>                 /* need for asm/traps.h */
38 #include <asm/traps.h>
39
40 #include <kprobe/swap_slots.h>
41 #include <kprobe/swap_kprobes.h>
42 #include <kprobe/swap_kprobes_deps.h>
43 #include <uprobe/swap_uprobes.h>
44
45 #include <swap-asm/swap_kprobes.h>
46 #include <swap-asm/trampoline_arm.h>
47
48 #include "swap_uprobes.h"
49 #include "trampoline_thumb.h"
50
51
52 /**
53  * @def flush_insns
54  * @brief Flushes instructions.
55  */
56 #define flush_insns(addr, size)                                 \
57         flush_icache_range((unsigned long)(addr),               \
58                            (unsigned long)(addr) + (size))
59
60 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
61 {
62         long offset = insn & 0x3ff;
63         offset -= insn & 0x400;
64         return (insn_addr + 4 + offset * 2);
65 }
66
67 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
68 {
69         long offset = insn & 0x7f;
70         offset -= insn & 0x80;
71         return (insn_addr + 4 + offset * 2);
72 }
73
74 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
75 {
76         unsigned int poff = insn & 0x3ff;
77         unsigned int offset = (insn & 0x07fe0000) >> 17;
78
79         poff -= (insn & 0x400);
80
81         if (insn & (1 << 12))
82                 return ((insn_addr + 4 + (poff << 12) + offset * 4));
83         else
84         return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
85 }
86
87 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
88 {
89         unsigned int i = (insn & 0x200) >> 3;
90         unsigned int offset = (insn & 0xf8) >> 2;
91         return insn_addr + 4 + i + offset;
92 }
93
94 /* is instruction Thumb2 and NOT a branch, etc... */
95 static int is_thumb2(kprobe_opcode_t insn)
96 {
97         return ((insn & 0xf800) == 0xe800 ||
98                 (insn & 0xf800) == 0xf000 ||
99                 (insn & 0xf800) == 0xf800);
100 }
101
102 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
103 {
104         int ret;
105         struct kprobe *p = up2kp(up);
106         unsigned long insn = p->opcode;
107         unsigned long vaddr = (unsigned long)p->addr;
108         unsigned long *tramp = up->atramp.tramp_arm;
109
110         ret = arch_make_trampoline_arm(vaddr, insn, tramp);
111         p->safe_arm = !!ret;
112
113         return ret;
114 }
115
116 static int arch_check_insn_thumb(unsigned long insn)
117 {
118         int ret = 0;
119
120         /* check instructions that can change PC */
121         if (THUMB_INSN_MATCH(UNDEF, insn) ||
122             THUMB_INSN_MATCH(SWI, insn) ||
123             THUMB_INSN_MATCH(BREAK, insn) ||
124             THUMB2_INSN_MATCH(B1, insn) ||
125             THUMB2_INSN_MATCH(B2, insn) ||
126             THUMB2_INSN_MATCH(BXJ, insn) ||
127             (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
128             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
129             (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
130             (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
131             (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
132             (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
133             THUMB2_INSN_MATCH(LDMIA, insn) ||
134             THUMB2_INSN_MATCH(LDMDB, insn) ||
135             (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
136             (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
137             (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
138             (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
139             (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
140             (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
141             (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
142             (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
143             /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
144             (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
145             (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
146             (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
147             (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
148             (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
149             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
150             (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
151             (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
152             /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
153             (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
154                 ret = -EFAULT;
155         }
156
157         return ret;
158 }
159
160 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
161 {
162         unsigned char mreg = 0;
163         unsigned char reg = 0;
164
165         if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
166                 reg = ((insn & 0xffff) & uregs) >> 8;
167         } else {
168                 if (THUMB_INSN_MATCH(MOV3, insn)) {
169                         if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
170                                 reg = (insn & 0xffff) & uregs;
171                         } else {
172                                 return 0;
173                         }
174                 } else {
175                         if (THUMB2_INSN_MATCH(ADR, insn)) {
176                                 reg = ((insn >> 16) & uregs) >> 8;
177                                 if (reg == 15) {
178                                         return 0;
179                                 }
180                         } else {
181                                 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
182                                     THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
183                                     THUMB2_INSN_MATCH(LDRWL, insn)) {
184                                         reg = ((insn >> 16) & uregs) >> 12;
185                                         if (reg == 15) {
186                                                 return 0;
187                                         }
188                                 } else {
189                                         // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
190                                         if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
191                                             THUMB2_INSN_MATCH(LDREX, insn)) {
192                                                 reg = ((insn >> 16) & uregs) >> 12;
193                                         } else {
194                                                 if (THUMB2_INSN_MATCH(DP, insn)) {
195                                                         reg = ((insn >> 16) & uregs) >> 12;
196                                                         if (reg == 15) {
197                                                                 return 0;
198                                                         }
199                                                 } else {
200                                                         if (THUMB2_INSN_MATCH(RSBW, insn)) {
201                                                                 reg = ((insn >> 12) & uregs) >> 8;
202                                                                 if (reg == 15){
203                                                                         return 0;
204                                                                 }
205                                                         } else {
206                                                                 if (THUMB2_INSN_MATCH(RORW, insn)) {
207                                                                         reg = ((insn >> 12) & uregs) >> 8;
208                                                                         if (reg == 15) {
209                                                                                 return 0;
210                                                                         }
211                                                                 } else {
212                                                                         if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
213                                                                             THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
214                                                                             THUMB2_INSN_MATCH(LSRW2, insn)) {
215                                                                                 reg = ((insn >> 12) & uregs) >> 8;
216                                                                                 if (reg == 15) {
217                                                                                         return 0;
218                                                                                 }
219                                                                         } else {
220                                                                                 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
221                                                                                         reg = 15;
222                                                                                 } else {
223                                                                                         if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
224                                                                                                 reg = THUMB2_INSN_REG_RM(insn);
225                                                                                         }
226                                                                                 }
227                                                                         }
228                                                                 }
229                                                         }
230                                                 }
231                                         }
232                                 }
233                         }
234                 }
235         }
236
237         if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
238              THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
239              THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
240              THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
241                 reg = THUMB2_INSN_REG_RT(insn);
242         }
243
244         if (reg == 6 || reg == 7) {
245                 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
246                 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
247                 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
248                 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
249                 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
250                 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
251                 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
252                 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
253         }
254
255         if (THUMB_INSN_MATCH(APC, insn)) {
256                 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
257                 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800);                              // ADD Rd, SP, #immed_8*4
258         } else {
259                 if (THUMB_INSN_MATCH(LRO3, insn)) {
260                         // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
261                         *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000);                     // LDR Rd, [SP, #immed_8*4]
262                 } else {
263                         if (THUMB_INSN_MATCH(MOV3, insn)) {
264                                 // MOV Rd, PC -> MOV Rd, SP
265                                 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10);               // MOV Rd, SP
266                         } else {
267                                 if (THUMB2_INSN_MATCH(ADR, insn)) {
268                                         // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
269                                         insns[2] = (insn & 0xfffffff0) | 0x0d;                          // ADDW Rd, SP, #imm
270                                 } else {
271                                         if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
272                                             THUMB2_INSN_MATCH(LDRHW, insn)) {
273                                                 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
274                                                 // !!!!!!!!!!!!!!!!!!!!!!!!
275                                                 // !!! imm_12 vs. imm_8 !!!
276                                                 // !!!!!!!!!!!!!!!!!!!!!!!!
277                                                 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;            // LDR.W Rt, [SP, #-<imm_8>]
278                                         } else {
279                                                 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
280                                                     THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
281                                                     THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
282                                                         // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
283                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                                                                   // LDRx.W Rt, [SP, #+<imm_12>]
284                                                 } else {
285                                                         if (THUMB2_INSN_MATCH(MUL, insn)) {
286                                                                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                                                    // MUL Rd, Rn, SP
287                                                         } else {
288                                                                 if (THUMB2_INSN_MATCH(DP, insn)) {
289                                                                         if (THUMB2_INSN_REG_RM(insn) == 15) {
290                                                                                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                                    // DP Rd, Rn, PC
291                                                                         } else if (THUMB2_INSN_REG_RN(insn) == 15) {
292                                                                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                           // DP Rd, PC, Rm
293                                                                         }
294                                                                 } else {
295                                                                         if (THUMB2_INSN_MATCH(LDRWL, insn)) {
296                                                                                 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
297                                                                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                           // LDRx.W Rt, [SP, #+<imm_12>]
298                                                                         } else {
299                                                                                 if (THUMB2_INSN_MATCH(RSBW, insn)) {
300                                                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                                   // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
301                                                                                 } else {
302                                                                                         if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
303                                                                                                 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
304                                                                                                         insns[2] = (insn & 0xfffdfffd);                                                         // ROR.W Rd, PC, PC
305                                                                                                 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
306                                                                                                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // ROR.W Rd, Rn, PC
307                                                                                                 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
308                                                                                                         insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // ROR.W Rd, PC, Rm
309                                                                                                 }
310                                                                                         } else {
311                                                                                                 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
312                                                                                                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
313                                                                                                 }
314                                                                                         }
315                                                                                 }
316                                                                         }
317                                                                 }
318                                                         }
319                                                 }
320                                         }
321                                 }
322                         }
323                 }
324         }
325
326         if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
327                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;                                                            // STRx.W Rt, [Rn, SP]
328         } else {
329                 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
330                     THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
331                         if (THUMB2_INSN_REG_RN(insn) == 15) {
332                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // STRD/T/HT{.W} Rt, [SP, ...]
333                         } else {
334                                 insns[2] = insn;
335                         }
336                 } else {
337                         if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
338                                 if (THUMB2_INSN_REG_RN(insn) == 15) {
339                                         insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;                                    // STRH.W Rt, [SP, #-<imm_8>]
340                                 } else {
341                                         insns[2] = insn;
342                                 }
343                         }
344                 }
345         }
346
347         // STRx PC, xxx
348         if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn)   ||
349                             THUMB2_INSN_MATCH(STRBW, insn)  ||
350                             THUMB2_INSN_MATCH(STRD, insn)   ||
351                             THUMB2_INSN_MATCH(STRHT, insn)  ||
352                             THUMB2_INSN_MATCH(STRT, insn)   ||
353                             THUMB2_INSN_MATCH(STRHW1, insn) ||
354                             THUMB2_INSN_MATCH(STRHW, insn) )) {
355                 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
356         }
357
358         if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
359                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                                   // TEQ SP, #<const>
360         } else {
361                 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
362                         if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
363                                 insns[2] = (insn & 0xfffdfffd);                                                         // TEQ/TST PC, PC
364                         } else if (THUMB2_INSN_REG_RM(insn) == 15) {
365                                 insns[2] = (insn & 0xfff0ffff) | 0xd0000;                                               // TEQ/TST Rn, PC
366                         } else if (THUMB2_INSN_REG_RN(insn) == 15) {
367                                 insns[2] = (insn & 0xfffffff0) | 0xd;                                                   // TEQ/TST PC, Rm
368                         }
369                 }
370         }
371
372         return 0;
373 }
374
375 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
376 {
377         int uregs, pc_dep;
378         struct kprobe *p = up2kp(up);
379         unsigned int addr;
380         unsigned long vaddr = (unsigned long)p->addr;
381         unsigned long insn = p->opcode;
382         unsigned long *tramp = up->atramp.tramp_thumb;
383         enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
384
385         p->safe_thumb = 1;
386         if (vaddr & 0x01) {
387                 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
388                 return -EINVAL;
389         }
390
391         if (!arch_check_insn_thumb(insn)) {
392                 p->safe_thumb = 0;
393         }
394
395         uregs = 0;
396         pc_dep = 0;
397
398         if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
399                 uregs = 0x0700;         /* 8-10 */
400                 pc_dep = 1;
401         } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
402                 /* MOV Rd, PC */
403                 uregs = 0x07;
404                 pc_dep = 1;
405         } else if THUMB2_INSN_MATCH(ADR, insn) {
406                 uregs = 0x0f00;         /* Rd 8-11 */
407                 pc_dep = 1;
408         } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
409                      THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
410                      THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
411                      THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
412                      THUMB2_INSN_MATCH(LDREX, insn) ||
413                      ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
414                        THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
415                       (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
416                      ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
417                        (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
418                 uregs = 0xf000;         /* Rt 12-15 */
419                 pc_dep = 1;
420         } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
421                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
422                 pc_dep = 1;
423         } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
424                 uregs = 0xf;
425                 pc_dep = 1;
426         } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
427                 uregs = 0xf000;         /* Rd 12-15 */
428                 pc_dep = 1;
429         } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
430                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
431                 pc_dep = 1;
432         } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
433                 uregs = 0x0f00;         /* Rd 8-11 */
434                 pc_dep = 1;
435         } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
436                 uregs = 0x0f00;
437                 pc_dep = 1;
438         } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
439                 uregs = 0x0f00;         /* Rd 8-11 */
440                 pc_dep = 1;
441         } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
442                 uregs = 0x0f00;         /* Rd 8-11 */
443                 pc_dep = 1;
444         } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
445                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
446                 pc_dep = 1;
447         } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
448                    (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
449                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
450                 pc_dep = 1;
451         }
452
453         if (unlikely(uregs && pc_dep)) {
454                 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
455                 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
456                         printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
457                                __FILE__, __LINE__, insn);
458                         p->safe_thumb = 1;
459                 }
460
461                 addr = vaddr + 4;
462                 *((unsigned short*)tramp + 13) = 0xdeff;
463                 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
464                 *((unsigned short*)tramp + 15) = addr >> 16;
465                 if (!is_thumb2(insn)) {
466                         addr = vaddr + 2;
467                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
468                         *((unsigned short*)tramp + 17) = addr >> 16;
469                 } else {
470                         addr = vaddr + 4;
471                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
472                         *((unsigned short*)tramp + 17) = addr >> 16;
473                 }
474         } else {
475                 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
476                 *((unsigned short*)tramp + 13) = 0xdeff;
477                 if (!is_thumb2(insn)) {
478                         addr = vaddr + 2;
479                         *((unsigned short*)tramp + 2) = insn;
480                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
481                         *((unsigned short*)tramp + 17) = addr >> 16;
482                 } else {
483                         addr = vaddr + 4;
484                         tramp[1] = insn;
485                         *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
486                         *((unsigned short*)tramp + 17) = addr >> 16;
487                 }
488         }
489
490         if (THUMB_INSN_MATCH(B2, insn)) {
491                 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
492                 *((unsigned short*)tramp + 13) = 0xdeff;
493                 addr = branch_t16_dest(insn, vaddr);
494                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
495                 *((unsigned short*)tramp + 15) = addr >> 16;
496                 *((unsigned short*)tramp + 16) = 0;
497                 *((unsigned short*)tramp + 17) = 0;
498
499         } else if (THUMB_INSN_MATCH(B1, insn)) {
500                 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
501                 *((unsigned short*)tramp + 13) = 0xdeff;
502                 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
503                 addr = branch_cond_t16_dest(insn, vaddr);
504                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
505                 *((unsigned short*)tramp + 15) = addr >> 16;
506                 addr = vaddr + 2;
507                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
508                 *((unsigned short*)tramp + 17) = addr >> 16;
509
510         } else if (THUMB_INSN_MATCH(BLX2, insn) ||
511                    THUMB_INSN_MATCH(BX, insn)) {
512                 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
513                 *((unsigned short*)tramp + 13) = 0xdeff;
514                 *((unsigned short*)tramp + 4) = insn;
515                 addr = vaddr + 2;
516                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
517                 *((unsigned short*)tramp + 17) = addr >> 16;
518
519         } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
520                    THUMB2_INSN_MATCH(BL, insn)) {
521                 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
522                 *((unsigned short*)tramp + 13) = 0xdeff;
523                 addr = branch_t32_dest(insn, vaddr);
524                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
525                 *((unsigned short*)tramp + 15) = addr >> 16;
526                 addr = vaddr + 4;
527                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
528                 *((unsigned short*)tramp + 17) = addr >> 16;
529
530         } else if (THUMB_INSN_MATCH(CBZ, insn)) {
531                 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
532                 *((unsigned short*)tramp + 13) = 0xdeff;
533                 /* zero out original branch displacement (imm5 = 0; i = 0) */
534                 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
535                 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
536                 *((unsigned short*)tramp + 0) |= 0x20;
537                 addr = cbz_t16_dest(insn, vaddr);
538                 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
539                 *((unsigned short*)tramp + 15) = addr >> 16;
540                 addr = vaddr + 2;
541                 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
542                 *((unsigned short*)tramp + 17) = addr >> 16;
543         }
544
545         return 0;
546 }
547
548 /**
549  * @brief Prepares uprobe for ARM.
550  *
551  * @param up Pointer to the uprobe.
552  * @return 0 on success,\n
553  * negative error code on error.
554  */
555 int arch_prepare_uprobe(struct uprobe *up)
556 {
557         struct kprobe *p = up2kp(up);
558         struct task_struct *task = up->task;
559         unsigned long vaddr = (unsigned long)p->addr;
560         unsigned long insn;
561
562         if (vaddr & 0x01) {
563                 printk("Error in %s at %d: attempt to register uprobe "
564                        "at an unaligned address\n", __FILE__, __LINE__);
565                 return -EINVAL;
566         }
567
568         if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
569                 panic("failed to read memory %lx!\n", vaddr);
570
571         p->opcode = insn;
572
573         arch_copy_trampoline_arm_uprobe(up);
574         arch_copy_trampoline_thumb_uprobe(up);
575
576         if ((p->safe_arm) && (p->safe_thumb)) {
577                 printk("Error in %s at %d: failed "
578                        "arch_copy_trampoline_*_uprobe() (both) "
579                        "[tgid=%u, addr=%lx, data=%lx]\n",
580                        __FILE__, __LINE__, task->tgid, vaddr, insn);
581                 return -EFAULT;
582         }
583
584         up->atramp.utramp = swap_slot_alloc(up->sm);
585         if (up->atramp.utramp == NULL) {
586                 printk("Error: swap_slot_alloc failed (%08lx)\n", vaddr);
587                 return -ENOMEM;
588         }
589
590         return 0;
591 }
592
593 /**
594  * @brief Analysis opcodes.
595  *
596  * @param rp Pointer to the uretprobe.
597  * @return Void.
598  */
599 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
600 {
601         /* Remove retprobe if first insn overwrites lr */
602         rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
603                              THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
604                              THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
605
606         rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
607                            ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
608                            ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
609 }
610
611 /**
612  * @brief Prepates uretprobe for ARM.
613  *
614  * @param ri Pointer to the uretprobe instance.
615  * @param regs Pointer to CPU register data.
616  * @return Void.
617  */
618 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
619                             struct pt_regs *regs)
620 {
621         ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
622         ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
623
624         /* Set flag of current mode */
625         ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
626
627         if (thumb_mode(regs)) {
628                 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
629         } else {
630                 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
631         }
632 }
633
634 /**
635  * @brief Disarms uretprobe instance.
636  *
637  * @param ri Pointer to the uretprobe instance
638  * @param task Pointer to the task for which the uretprobe instance
639  * @return 0 on success,\n
640  * negative error code on error.
641  */
642 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
643                          struct task_struct *task)
644 {
645         struct pt_regs *uregs = task_pt_regs(ri->task);
646         unsigned long ra = swap_get_ret_addr(uregs);
647         unsigned long *tramp;
648         unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
649         unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
650         unsigned long *found = NULL;
651         unsigned long *buf[RETPROBE_STACK_DEPTH];
652         int i, retval;
653
654         /* Understand function mode */
655         if ((long)ri->sp & 1) {
656                 tramp = (unsigned long *)
657                         ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
658         } else {
659                 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
660                                           UPROBES_TRAMP_RET_BREAK_IDX);
661         }
662
663         /* check stack */
664         retval = read_proc_vm_atomic(task, (unsigned long)stack,
665                                      buf, sizeof(buf));
666         if (retval != sizeof(buf)) {
667                 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
668                        task->comm, task->tgid, task->pid,
669                        (unsigned long)stack);
670                 retval = -EFAULT;
671                 goto check_lr;
672         }
673
674         /* search the stack from the bottom */
675         for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
676                 if (buf[i] == tramp) {
677                         found = stack + i;
678                         break;
679                 }
680         }
681
682         if (!found) {
683                 retval = -ESRCH;
684                 goto check_lr;
685         }
686
687         printk("---> %s (%d/%d): trampoline found at "
688                "%08lx (%08lx /%+d) - %p\n",
689                task->comm, task->tgid, task->pid,
690                (unsigned long)found, (unsigned long)sp,
691                found - sp, ri->rp->up.kp.addr);
692         retval = write_proc_vm_atomic(task, (unsigned long)found,
693                                       &ri->ret_addr,
694                                       sizeof(ri->ret_addr));
695         if (retval != sizeof(ri->ret_addr)) {
696                 printk("---> %s (%d/%d): failed to write value to %08lx",
697                        task->comm, task->tgid, task->pid, (unsigned long)found);
698                 retval = -EFAULT;
699         } else {
700                 retval = 0;
701         }
702
703 check_lr: /* check lr anyway */
704         if (ra == (unsigned long)tramp) {
705                 printk("---> %s (%d/%d): trampoline found at "
706                        "lr = %08lx - %p\n",
707                        task->comm, task->tgid, task->pid,
708                        ra, ri->rp->up.kp.addr);
709                 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
710                 retval = 0;
711         } else if (retval) {
712                 printk("---> %s (%d/%d): trampoline NOT found at "
713                        "sp = %08lx, lr = %08lx - %p\n",
714                        task->comm, task->tgid, task->pid,
715                        (unsigned long)sp, ra, ri->rp->up.kp.addr);
716         }
717
718         return retval;
719 }
720
721 /**
722  * @brief Jump pre-handler.
723  *
724  * @param p Pointer to the kprobe.
725  * @param regs Pointer to CPU register data.
726  * @return 0.
727  */
728 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
729 {
730         struct uprobe *up = container_of(p, struct uprobe, kp);
731         struct ujprobe *jp = container_of(up, struct ujprobe, up);
732
733         kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
734         entry_point_t entry = (entry_point_t)jp->entry;
735
736         if (pre_entry) {
737                 p->ss_addr[smp_processor_id()] = (kprobe_opcode_t *)
738                                                  pre_entry(jp->priv_arg, regs);
739         }
740
741         if (entry) {
742                 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
743                       regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
744         } else {
745                 arch_ujprobe_return();
746         }
747
748         return 0;
749 }
750
751 /**
752  * @brief Gets trampoline address.
753  *
754  * @param p Pointer to the kprobe.
755  * @param regs Pointer to CPU register data.
756  * @return Trampoline address.
757  */
758 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
759 {
760         return thumb_mode(regs) ?
761                         (unsigned long)(p->ainsn.insn) + 0x1b :
762                         (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
763 }
764
765 /**
766  * @brief Restores return address.
767  *
768  * @param orig_ret_addr Original return address.
769  * @param regs Pointer to CPU register data.
770  * @return Void.
771  */
772 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
773 {
774         regs->ARM_lr = orig_ret_addr;
775         regs->ARM_pc = orig_ret_addr & ~0x1;
776
777         if (regs->ARM_lr & 0x1)
778                 regs->ARM_cpsr |= PSR_T_BIT;
779         else
780                 regs->ARM_cpsr &= ~PSR_T_BIT;
781 }
782
783 /**
784  * @brief Removes uprobe.
785  *
786  * @param up Pointer to the uprobe.
787  * @return Void.
788  */
789 void arch_remove_uprobe(struct uprobe *up)
790 {
791         swap_slot_free(up->sm, up->atramp.utramp);
792 }
793
794 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
795 {
796         if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
797                 u16 tmp = p->opcode >> 16;
798                 write_proc_vm_atomic(current,
799                                 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
800                 flush_insns(p->addr, 4);
801         }
802 }
803
804 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
805 {
806         unsigned long *tramp, *utramp;
807         struct kprobe *p = up2kp(up);
808         int sw;
809
810         /*
811          * 0 bit - thumb mode           (0 - arm, 1 - thumb)
812          * 1 bit - arm mode support     (0 - off, 1  on)
813          * 2 bit - thumb mode support   (0 - off, 1  on)`
814          */
815         sw = (!!thumb_mode(regs)) |
816              (int)!p->safe_arm << 1 |
817              (int)!p->safe_thumb << 2;
818
819         switch (sw) {
820         /* ARM */
821         case 0b110:
822         case 0b010:
823                 tramp = up->atramp.tramp_arm;
824                 break;
825         /* THUMB */
826         case 0b111:
827         case 0b101:
828                 restore_opcode_for_thumb(p, regs);
829                 tramp = up->atramp.tramp_thumb;
830                 break;
831         default:
832                 printk("Error in %s at %d: we are in arm mode "
833                        "(!) and check instruction was fail "
834                        "(%0lX instruction at %p address)!\n",
835                        __FILE__, __LINE__, p->opcode, p->addr);
836
837                 disarm_uprobe(p, up->task);
838
839                 return 1;
840         }
841
842         utramp = up->atramp.utramp;
843
844         if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
845                                   UPROBES_TRAMP_LEN * sizeof(*tramp)))
846                 panic("failed to write memory %p!\n", utramp);
847         flush_insns(utramp, UPROBES_TRAMP_LEN * sizeof(*tramp));
848
849         p->ainsn.insn = utramp;
850
851         return 0;
852 }
853
854 static int uprobe_handler(struct pt_regs *regs)
855 {
856         kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
857         struct task_struct *task = current;
858         pid_t tgid = task->tgid;
859         struct kprobe *p;
860
861         p = get_ukprobe(addr, tgid);
862         if (p == NULL) {
863                 unsigned long offset_bp = thumb_mode(regs) ?
864                                           0x1a :
865                                           4 * UPROBES_TRAMP_RET_BREAK_IDX;
866                 void *tramp_addr = (void *)addr - offset_bp;
867
868                 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
869                 if (p == NULL) {
870                         printk("no_uprobe: Not one of ours: let "
871                                "kernel handle it %p\n", addr);
872                         return 1;
873                 }
874
875                 trampoline_uprobe_handler(p, regs);
876         } else {
877                 if (p->ainsn.insn == NULL) {
878                         struct uprobe *up = kp2up(p);
879
880                         if (make_trampoline(up, regs)) {
881                                 printk("no_uprobe live\n");
882                                 return 0;
883                         }
884
885                         /* for uretprobe */
886                         add_uprobe_table(p);
887                 }
888
889                 if (!p->pre_handler || !p->pre_handler(p, regs)) {
890                         prepare_singlestep(p, regs);
891                 }
892         }
893
894         return 0;
895 }
896
897 /**
898  * @brief Breakpoint instruction handler.
899  *
900  * @param regs Pointer to CPU register data.
901  * @param instr Instruction.
902  * @return uprobe_handler results.
903  */
904 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
905 {
906         int ret;
907         unsigned long flags;
908         local_irq_save(flags);
909
910         preempt_disable();
911         ret = uprobe_handler(regs);
912         preempt_enable_no_resched();
913
914         local_irq_restore(flags);
915         return ret;
916 }
917
918 /* userspace probes hook (arm) */
919 static struct undef_hook undef_hook_for_us_arm = {
920         .instr_mask     = 0xffffffff,
921         .instr_val      = BREAKPOINT_INSTRUCTION,
922         .cpsr_mask      = MODE_MASK,
923         .cpsr_val       = USR_MODE,
924         .fn             = uprobe_trap_handler
925 };
926
927 /* userspace probes hook (thumb) */
928 static struct undef_hook undef_hook_for_us_thumb = {
929         .instr_mask     = 0xffffffff,
930         .instr_val      = BREAKPOINT_INSTRUCTION & 0x0000ffff,
931         .cpsr_mask      = MODE_MASK,
932         .cpsr_val       = USR_MODE,
933         .fn             = uprobe_trap_handler
934 };
935
936 /**
937  * @brief Installs breakpoint hooks.
938  *
939  * @return 0.
940  */
941 int swap_arch_init_uprobes(void)
942 {
943         swap_register_undef_hook(&undef_hook_for_us_arm);
944         swap_register_undef_hook(&undef_hook_for_us_thumb);
945
946         return 0;
947 }
948
949 /**
950  * @brief Uninstalls breakpoint hooks.
951  *
952  * @return Void.
953  */
954 void swap_arch_exit_uprobes(void)
955 {
956         swap_unregister_undef_hook(&undef_hook_for_us_thumb);
957         swap_unregister_undef_hook(&undef_hook_for_us_arm);
958 }