Merge branch 'tizen_2.4' into tizen_2.4_dev
[kernel/swap-modules.git] / uprobe / arch / arm / swap-asm / swap_uprobes.c
1 /**
2  * uprobe/arch/asm-arm/swap_uprobes.c
3  * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4  * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5  * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6  * separating core and arch parts
7  *
8  * @section LICENSE
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation; either version 2 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the GNU General Public License
21  * along with this program; if not, write to the Free Software
22  * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
23  *
24  * @section COPYRIGHT
25  *
26  * Copyright (C) Samsung Electronics, 2006-2010
27  *
28  * @section DESCRIPTION
29  *
30  * Arch-dependent uprobe interface implementation for ARM.
31  */
32
33
34 #include <linux/init.h>                 /* need for asm/traps.h */
35 #include <linux/sched.h>                /* need for asm/traps.h */
36
37 #include <linux/ptrace.h>               /* need for asm/traps.h */
38 #include <asm/traps.h>
39
40 #include <kprobe/swap_slots.h>
41 #include <kprobe/swap_kprobes.h>
42 #include <kprobe/swap_kprobes_deps.h>
43 #include <uprobe/swap_uprobes.h>
44
45 #include <swap-asm/swap_kprobes.h>
46 #include <swap-asm/trampoline_arm.h>
47
48 #include "swap_uprobes.h"
49 #include "trampoline_thumb.h"
50
51
52 #define UBP_ARM         (BREAKPOINT_INSTRUCTION)
53 #define UBP_THUMB       (BREAKPOINT_INSTRUCTION & 0xffff)
54
55 /**
56  * @def flush_insns
57  * @brief Flushes instructions.
58  */
59 #define flush_insns(addr, size)                                 \
60         flush_icache_range((unsigned long)(addr),               \
61                            (unsigned long)(addr) + (size))
62
63 static inline long branch_t16_dest(uprobe_opcode_t insn, unsigned int insn_addr)
64 {
65         long offset = insn & 0x3ff;
66         offset -= insn & 0x400;
67         return insn_addr + 4 + offset * 2;
68 }
69
70 static inline long branch_cond_t16_dest(uprobe_opcode_t insn,
71                                         unsigned int insn_addr)
72 {
73         long offset = insn & 0x7f;
74         offset -= insn & 0x80;
75         return insn_addr + 4 + offset * 2;
76 }
77
78 static inline long branch_t32_dest(uprobe_opcode_t insn, unsigned int insn_addr)
79 {
80         unsigned int poff = insn & 0x3ff;
81         unsigned int offset = (insn & 0x07fe0000) >> 17;
82
83         poff -= (insn & 0x400);
84
85         if (insn & (1 << 12))
86                 return insn_addr + 4 + (poff << 12) + offset * 4;
87         else
88         return (insn_addr + 4 + (poff << 12) + offset * 4) & ~3;
89 }
90
91 static inline long cbz_t16_dest(uprobe_opcode_t insn, unsigned int insn_addr)
92 {
93         unsigned int i = (insn & 0x200) >> 3;
94         unsigned int offset = (insn & 0xf8) >> 2;
95         return insn_addr + 4 + i + offset;
96 }
97
98 /* is instruction Thumb2 and NOT a branch, etc... */
99 static int is_thumb2(uprobe_opcode_t insn)
100 {
101         return ((insn & 0xf800) == 0xe800 ||
102                 (insn & 0xf800) == 0xf000 ||
103                 (insn & 0xf800) == 0xf800);
104 }
105
106 static int arch_check_insn_thumb(unsigned long insn)
107 {
108         int ret = 0;
109
110         /* check instructions that can change PC */
111         if (THUMB_INSN_MATCH(UNDEF, insn) ||
112             THUMB_INSN_MATCH(SWI, insn) ||
113             THUMB_INSN_MATCH(BREAK, insn) ||
114             THUMB2_INSN_MATCH(B1, insn) ||
115             THUMB2_INSN_MATCH(B2, insn) ||
116             THUMB2_INSN_MATCH(BXJ, insn) ||
117             (THUMB2_INSN_MATCH(ADR, insn) &&
118              THUMB2_INSN_REG_RD(insn) == 15) ||
119             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
120             (THUMB2_INSN_MATCH(LDRW1, insn) &&
121              THUMB2_INSN_REG_RT(insn) == 15) ||
122             (THUMB2_INSN_MATCH(LDRHW, insn) &&
123              THUMB2_INSN_REG_RT(insn) == 15) ||
124             (THUMB2_INSN_MATCH(LDRHW1, insn) &&
125              THUMB2_INSN_REG_RT(insn) == 15) ||
126             (THUMB2_INSN_MATCH(LDRWL, insn) &&
127              THUMB2_INSN_REG_RT(insn) == 15) ||
128             THUMB2_INSN_MATCH(LDMIA, insn) ||
129             THUMB2_INSN_MATCH(LDMDB, insn) ||
130             (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
131             (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
132             (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
133             (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
134             (THUMB2_INSN_MATCH(LSLW1, insn) &&
135              THUMB2_INSN_REG_RD(insn) == 15) ||
136             (THUMB2_INSN_MATCH(LSLW2, insn) &&
137              THUMB2_INSN_REG_RD(insn) == 15) ||
138             (THUMB2_INSN_MATCH(LSRW1, insn) &&
139              THUMB2_INSN_REG_RD(insn) == 15) ||
140             (THUMB2_INSN_MATCH(LSRW2, insn) &&
141              THUMB2_INSN_REG_RD(insn) == 15) ||
142             /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
143             (THUMB2_INSN_MATCH(STRW1, insn) &&
144              THUMB2_INSN_REG_RN(insn) == 15) ||
145             (THUMB2_INSN_MATCH(STRBW1, insn) &&
146              THUMB2_INSN_REG_RN(insn) == 15) ||
147             (THUMB2_INSN_MATCH(STRHW1, insn) &&
148              THUMB2_INSN_REG_RN(insn) == 15) ||
149             (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
150             (THUMB2_INSN_MATCH(STRHW, insn) &&
151              THUMB2_INSN_REG_RN(insn) == 15) ||
152             (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
153             (THUMB2_INSN_MATCH(LDRBW, insn) &&
154              THUMB2_INSN_REG_RN(insn) == 15) ||
155             (THUMB2_INSN_MATCH(LDRHW, insn) &&
156              THUMB2_INSN_REG_RN(insn) == 15) ||
157             /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
158             (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) ||
159              THUMB2_INSN_MATCH(STRD, insn))) {
160                 ret = -EFAULT;
161         }
162
163         return ret;
164 }
165
166 static int prep_pc_dep_insn_execbuf_thumb(uprobe_opcode_t *insns,
167                                           uprobe_opcode_t insn, int uregs)
168 {
169         unsigned char mreg = 0;
170         unsigned char reg = 0;
171
172         if (THUMB_INSN_MATCH(APC, insn) ||
173             THUMB_INSN_MATCH(LRO3, insn)) {
174                 reg = ((insn & 0xffff) & uregs) >> 8;
175         } else if (THUMB_INSN_MATCH(MOV3, insn)) {
176                 if (((((unsigned char)insn) & 0xff) >> 3) == 15)
177                         reg = (insn & 0xffff) & uregs;
178                 else
179                         return 0;
180         } else if (THUMB2_INSN_MATCH(ADR, insn)) {
181                 reg = ((insn >> 16) & uregs) >> 8;
182                 if (reg == 15)
183                         return 0;
184         } else if (THUMB2_INSN_MATCH(LDRW, insn) ||
185                    THUMB2_INSN_MATCH(LDRW1, insn) ||
186                    THUMB2_INSN_MATCH(LDRHW, insn) ||
187                    THUMB2_INSN_MATCH(LDRHW1, insn) ||
188                    THUMB2_INSN_MATCH(LDRWL, insn)) {
189                 reg = ((insn >> 16) & uregs) >> 12;
190                 if (reg == 15)
191                         return 0;
192         /*
193          * LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
194          */
195         } else if (THUMB2_INSN_MATCH(LDRBW, insn) ||
196                    THUMB2_INSN_MATCH(LDRBW1, insn) ||
197                    THUMB2_INSN_MATCH(LDREX, insn)) {
198                 reg = ((insn >> 16) & uregs) >> 12;
199         } else if (THUMB2_INSN_MATCH(DP, insn)) {
200                 reg = ((insn >> 16) & uregs) >> 12;
201                 if (reg == 15)
202                         return 0;
203         } else if (THUMB2_INSN_MATCH(RSBW, insn)) {
204                 reg = ((insn >> 12) & uregs) >> 8;
205                 if (reg == 15)
206                         return 0;
207         } else if (THUMB2_INSN_MATCH(RORW, insn)) {
208                 reg = ((insn >> 12) & uregs) >> 8;
209                 if (reg == 15)
210                         return 0;
211         } else if (THUMB2_INSN_MATCH(ROR, insn) ||
212                    THUMB2_INSN_MATCH(LSLW1, insn) ||
213                    THUMB2_INSN_MATCH(LSLW2, insn) ||
214                    THUMB2_INSN_MATCH(LSRW1, insn) ||
215                    THUMB2_INSN_MATCH(LSRW2, insn)) {
216                 reg = ((insn >> 12) & uregs) >> 8;
217                 if (reg == 15)
218                         return 0;
219         } else if (THUMB2_INSN_MATCH(TEQ1, insn) ||
220                    THUMB2_INSN_MATCH(TST1, insn)) {
221                 reg = 15;
222         } else if (THUMB2_INSN_MATCH(TEQ2, insn) ||
223                    THUMB2_INSN_MATCH(TST2, insn)) {
224                 reg = THUMB2_INSN_REG_RM(insn);
225         }
226
227         if ((THUMB2_INSN_MATCH(STRW, insn) ||
228              THUMB2_INSN_MATCH(STRBW, insn) ||
229              THUMB2_INSN_MATCH(STRD, insn) ||
230              THUMB2_INSN_MATCH(STRHT, insn) ||
231              THUMB2_INSN_MATCH(STRT, insn) ||
232              THUMB2_INSN_MATCH(STRHW1, insn) ||
233              THUMB2_INSN_MATCH(STRHW, insn)) &&
234             THUMB2_INSN_REG_RT(insn) == 15) {
235                 reg = THUMB2_INSN_REG_RT(insn);
236         }
237
238         if (reg == 6 || reg == 7) {
239                 *((unsigned short *)insns + 0) =
240                         (*((unsigned short *)insns + 0) & 0x00ff) |
241                         ((1 << mreg) | (1 << (mreg + 1)));
242                 *((unsigned short *)insns + 1) =
243                         (*((unsigned short *)insns + 1) & 0xf8ff) | (mreg << 8);
244                 *((unsigned short *)insns + 2) =
245                         (*((unsigned short *)insns + 2) & 0xfff8) | (mreg + 1);
246                 *((unsigned short *)insns + 3) =
247                         (*((unsigned short *)insns + 3) & 0xffc7) | (mreg << 3);
248                 *((unsigned short *)insns + 7) =
249                         (*((unsigned short *)insns + 7) & 0xf8ff) | (mreg << 8);
250                 *((unsigned short *)insns + 8) =
251                         (*((unsigned short *)insns + 8) & 0xffc7) | (mreg << 3);
252                 *((unsigned short *)insns + 9) =
253                         (*((unsigned short *)insns + 9) & 0xffc7) |
254                         ((mreg + 1) << 3);
255                 *((unsigned short *)insns + 10) =
256                         (*((unsigned short *)insns + 10) & 0x00ff) |
257                         ((1 << mreg) | (1 << (mreg + 1)));
258         }
259
260         if (THUMB_INSN_MATCH(APC, insn)) {
261                 /* ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4 */
262                 *((unsigned short *)insns + 4) = ((insn & 0xffff) | 0x800);
263         } else if (THUMB_INSN_MATCH(LRO3, insn)) {
264                 /* LDR Rd, [PC, #immed_8*4] ->
265                  * LDR Rd, [SP, #immed_8*4] */
266                 *((unsigned short *)insns + 4) =
267                         ((insn & 0xffff) + 0x5000);
268         } else if (THUMB_INSN_MATCH(MOV3, insn)) {
269                 /* MOV Rd, PC -> MOV Rd, SP */
270                 *((unsigned short *)insns + 4) =
271                         ((insn & 0xffff) ^ 0x10);
272         } else if (THUMB2_INSN_MATCH(ADR, insn)) {
273                 /* ADDW Rd,PC,#imm -> ADDW Rd,SP,#imm */
274                 insns[2] = (insn & 0xfffffff0) | 0x0d;
275         } else if (THUMB2_INSN_MATCH(LDRW, insn) ||
276                    THUMB2_INSN_MATCH(LDRBW, insn) ||
277                    THUMB2_INSN_MATCH(LDRHW, insn)) {
278                 /* LDR.W Rt, [PC, #-<imm_12>] ->
279                  * LDR.W Rt, [SP, #-<imm_8>]
280                  * !!!!!!!!!!!!!!!!!!!!!!!!
281                  * !!! imm_12 vs. imm_8 !!!
282                  * !!!!!!!!!!!!!!!!!!!!!!!! */
283                 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;
284         } else if (THUMB2_INSN_MATCH(LDRW1, insn) ||
285                    THUMB2_INSN_MATCH(LDRBW1, insn) ||
286                    THUMB2_INSN_MATCH(LDRHW1, insn) ||
287                    THUMB2_INSN_MATCH(LDRD, insn) ||
288                    THUMB2_INSN_MATCH(LDRD1, insn) ||
289                    THUMB2_INSN_MATCH(LDREX, insn)) {
290                 /* LDRx.W Rt, [PC, #+<imm_12>] ->
291                  * LDRx.W Rt, [SP, #+<imm_12>]
292                  (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>] */
293                 insns[2] = (insn & 0xfffffff0) | 0xd;
294         } else if (THUMB2_INSN_MATCH(MUL, insn)) {
295                 /* MUL Rd, Rn, SP */
296                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
297         } else if (THUMB2_INSN_MATCH(DP, insn)) {
298                 if (THUMB2_INSN_REG_RM(insn) == 15)
299                         /* DP Rd, Rn, PC */
300                         insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
301                 else if (THUMB2_INSN_REG_RN(insn) == 15)
302                         /* DP Rd, PC, Rm */
303                         insns[2] = (insn & 0xfffffff0) | 0xd;
304         } else if (THUMB2_INSN_MATCH(LDRWL, insn)) {
305                 /* LDRx.W Rt, [PC, #<imm_12>] ->
306                  * LDRx.W Rt, [SP, #+<imm_12>]
307                  * (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>] */
308                 insns[2] = (insn & 0xfffffff0) | 0xd;
309         } else if (THUMB2_INSN_MATCH(RSBW, insn)) {
310                 /*  RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const> */
311                 insns[2] = (insn & 0xfffffff0) | 0xd;
312         } else if (THUMB2_INSN_MATCH(RORW, insn) ||
313                    THUMB2_INSN_MATCH(LSLW1, insn) ||
314                    THUMB2_INSN_MATCH(LSRW1, insn)) {
315                 if ((THUMB2_INSN_REG_RM(insn) == 15) &&
316                     (THUMB2_INSN_REG_RN(insn) == 15))
317                         /*  ROR.W Rd, PC, PC */
318                         insns[2] = (insn & 0xfffdfffd);
319                 else if (THUMB2_INSN_REG_RM(insn) == 15)
320                         /*  ROR.W Rd, Rn, PC */
321                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;
322                 else if (THUMB2_INSN_REG_RN(insn) == 15)
323                         /*  ROR.W Rd, PC, Rm */
324                         insns[2] = (insn & 0xfffffff0) | 0xd;
325         } else if (THUMB2_INSN_MATCH(ROR, insn) ||
326                    THUMB2_INSN_MATCH(LSLW2, insn) ||
327                    THUMB2_INSN_MATCH(LSRW2, insn)) {
328                 /*  ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const> */
329                 insns[2] = (insn & 0xfff0ffff) | 0xd0000;
330         }
331
332         if (THUMB2_INSN_MATCH(STRW, insn) ||
333             THUMB2_INSN_MATCH(STRBW, insn)) {
334                 /*  STRx.W Rt, [Rn, SP] */
335                 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
336         } else if (THUMB2_INSN_MATCH(STRD, insn) ||
337                    THUMB2_INSN_MATCH(STRHT, insn) ||
338                    THUMB2_INSN_MATCH(STRT, insn) ||
339                    THUMB2_INSN_MATCH(STRHW1, insn)) {
340                 if (THUMB2_INSN_REG_RN(insn) == 15)
341                         /*  STRD/T/HT{.W} Rt, [SP, ...] */
342                         insns[2] = (insn & 0xfffffff0) | 0xd;
343                 else
344                         insns[2] = insn;
345         } else if (THUMB2_INSN_MATCH(STRHW, insn) &&
346                    (THUMB2_INSN_REG_RN(insn) == 15)) {
347                 if (THUMB2_INSN_REG_RN(insn) == 15)
348                         /*  STRH.W Rt, [SP, #-<imm_8>] */
349                         insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;
350                 else
351                         insns[2] = insn;
352         }
353
354         /*  STRx PC, xxx */
355         if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn)   ||
356                             THUMB2_INSN_MATCH(STRBW, insn)  ||
357                             THUMB2_INSN_MATCH(STRD, insn)   ||
358                             THUMB2_INSN_MATCH(STRHT, insn)  ||
359                             THUMB2_INSN_MATCH(STRT, insn)   ||
360                             THUMB2_INSN_MATCH(STRHW1, insn) ||
361                             THUMB2_INSN_MATCH(STRHW, insn))) {
362                 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
363         }
364
365         if (THUMB2_INSN_MATCH(TEQ1, insn) ||
366             THUMB2_INSN_MATCH(TST1, insn)) {
367                 /*  TEQ SP, #<const> */
368                 insns[2] = (insn & 0xfffffff0) | 0xd;
369         } else if (THUMB2_INSN_MATCH(TEQ2, insn) ||
370                    THUMB2_INSN_MATCH(TST2, insn)) {
371                 if ((THUMB2_INSN_REG_RN(insn) == 15) &&
372                     (THUMB2_INSN_REG_RM(insn) == 15))
373                         /*  TEQ/TST PC, PC */
374                         insns[2] = (insn & 0xfffdfffd);
375                 else if (THUMB2_INSN_REG_RM(insn) == 15)
376                         /*  TEQ/TST Rn, PC */
377                         insns[2] = (insn & 0xfff0ffff) | 0xd0000;
378                 else if (THUMB2_INSN_REG_RN(insn) == 15)
379                         /*  TEQ/TST PC, Rm */
380                         insns[2] = (insn & 0xfffffff0) | 0xd;
381         }
382
383         return 0;
384 }
385
386 static int arch_make_trampoline_thumb(unsigned long vaddr, unsigned long insn,
387                                       unsigned long *tramp, size_t tramp_len)
388 {
389         int ret;
390         int uregs = 0;
391         int pc_dep = 0;
392         unsigned int addr;
393
394         ret = arch_check_insn_thumb(insn);
395         if (ret) {
396                 pr_err("THUMB inst isn't support vaddr=%lx insn=%08lx\n",
397                        vaddr, insn);
398                 return ret;
399         }
400
401         if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
402                 uregs = 0x0700;         /* 8-10 */
403                 pc_dep = 1;
404         } else if (THUMB_INSN_MATCH(MOV3, insn) &&
405                    (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
406                 /* MOV Rd, PC */
407                 uregs = 0x07;
408                 pc_dep = 1;
409         } else if THUMB2_INSN_MATCH(ADR, insn) {
410                 uregs = 0x0f00;         /* Rd 8-11 */
411                 pc_dep = 1;
412         } else if (((THUMB2_INSN_MATCH(LDRW, insn) ||
413                      THUMB2_INSN_MATCH(LDRW1, insn) ||
414                      THUMB2_INSN_MATCH(LDRBW, insn) ||
415                      THUMB2_INSN_MATCH(LDRBW1, insn) ||
416                      THUMB2_INSN_MATCH(LDRHW, insn) ||
417                      THUMB2_INSN_MATCH(LDRHW1, insn) ||
418                      THUMB2_INSN_MATCH(LDRWL, insn)) &&
419                     THUMB2_INSN_REG_RN(insn) == 15) ||
420                      THUMB2_INSN_MATCH(LDREX, insn) ||
421                      ((THUMB2_INSN_MATCH(STRW, insn) ||
422                        THUMB2_INSN_MATCH(STRBW, insn) ||
423                        THUMB2_INSN_MATCH(STRHW, insn) ||
424                        THUMB2_INSN_MATCH(STRHW1, insn)) &&
425                       (THUMB2_INSN_REG_RN(insn) == 15 ||
426                        THUMB2_INSN_REG_RT(insn) == 15)) ||
427                      ((THUMB2_INSN_MATCH(STRT, insn) ||
428                        THUMB2_INSN_MATCH(STRHT, insn)) &&
429                        (THUMB2_INSN_REG_RN(insn) == 15 ||
430                         THUMB2_INSN_REG_RT(insn) == 15))) {
431                 uregs = 0xf000;         /* Rt 12-15 */
432                 pc_dep = 1;
433         } else if ((THUMB2_INSN_MATCH(LDRD, insn) ||
434                     THUMB2_INSN_MATCH(LDRD1, insn)) &&
435                    (THUMB2_INSN_REG_RN(insn) == 15)) {
436                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
437                 pc_dep = 1;
438         } else if (THUMB2_INSN_MATCH(MUL, insn) &&
439                    THUMB2_INSN_REG_RM(insn) == 15) {
440                 uregs = 0xf;
441                 pc_dep = 1;
442         } else if (THUMB2_INSN_MATCH(DP, insn) &&
443                    (THUMB2_INSN_REG_RN(insn) == 15 ||
444                     THUMB2_INSN_REG_RM(insn) == 15)) {
445                 uregs = 0xf000;         /* Rd 12-15 */
446                 pc_dep = 1;
447         } else if (THUMB2_INSN_MATCH(STRD, insn) &&
448                    ((THUMB2_INSN_REG_RN(insn) == 15) ||
449                     (THUMB2_INSN_REG_RT(insn) == 15) ||
450                     THUMB2_INSN_REG_RT2(insn) == 15)) {
451                 uregs = 0xff00;         /* Rt 12-15, Rt2 8-11 */
452                 pc_dep = 1;
453         } else if (THUMB2_INSN_MATCH(RSBW, insn) &&
454                    THUMB2_INSN_REG_RN(insn) == 15) {
455                 uregs = 0x0f00;         /* Rd 8-11 */
456                 pc_dep = 1;
457         } else if (THUMB2_INSN_MATCH(RORW, insn) &&
458                    (THUMB2_INSN_REG_RN(insn) == 15 ||
459                     THUMB2_INSN_REG_RM(insn) == 15)) {
460                 uregs = 0x0f00;
461                 pc_dep = 1;
462         } else if ((THUMB2_INSN_MATCH(ROR, insn) ||
463                     THUMB2_INSN_MATCH(LSLW2, insn) ||
464                     THUMB2_INSN_MATCH(LSRW2, insn)) &&
465                    THUMB2_INSN_REG_RM(insn) == 15) {
466                 uregs = 0x0f00;         /* Rd 8-11 */
467                 pc_dep = 1;
468         } else if ((THUMB2_INSN_MATCH(LSLW1, insn) ||
469                     THUMB2_INSN_MATCH(LSRW1, insn)) &&
470                    (THUMB2_INSN_REG_RN(insn) == 15 ||
471                     THUMB2_INSN_REG_RM(insn) == 15)) {
472                 uregs = 0x0f00;         /* Rd 8-11 */
473                 pc_dep = 1;
474         } else if ((THUMB2_INSN_MATCH(TEQ1, insn) ||
475                     THUMB2_INSN_MATCH(TST1, insn)) &&
476                    THUMB2_INSN_REG_RN(insn) == 15) {
477                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
478                 pc_dep = 1;
479         } else if ((THUMB2_INSN_MATCH(TEQ2, insn) ||
480                     THUMB2_INSN_MATCH(TST2, insn)) &&
481                    (THUMB2_INSN_REG_RN(insn) == 15 ||
482                     THUMB2_INSN_REG_RM(insn) == 15)) {
483                 uregs = 0xf0000;        /* Rn 0-3 (16-19) */
484                 pc_dep = 1;
485         }
486
487         if (unlikely(uregs && pc_dep)) {
488                 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
489                 prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs);
490
491                 addr = vaddr + 4;
492                 *((unsigned short *)tramp + 13) = 0xdeff;
493                 *((unsigned short *)tramp + 14) = addr & 0x0000ffff;
494                 *((unsigned short *)tramp + 15) = addr >> 16;
495                 if (!is_thumb2(insn)) {
496                         addr = vaddr + 2;
497                         *((unsigned short *)tramp + 16) =
498                                 (addr & 0x0000ffff) | 0x1;
499                         *((unsigned short *)tramp + 17) = addr >> 16;
500                 } else {
501                         addr = vaddr + 4;
502                         *((unsigned short *)tramp + 16) =
503                                 (addr & 0x0000ffff) | 0x1;
504                         *((unsigned short *)tramp + 17) = addr >> 16;
505                 }
506         } else {
507                 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
508                 *((unsigned short *)tramp + 13) = 0xdeff;
509                 if (!is_thumb2(insn)) {
510                         addr = vaddr + 2;
511                         *((unsigned short *)tramp + 2) = insn;
512                         *((unsigned short *)tramp + 16) =
513                                 (addr & 0x0000ffff) | 0x1;
514                         *((unsigned short *)tramp + 17) = addr >> 16;
515                 } else {
516                         addr = vaddr + 4;
517                         tramp[1] = insn;
518                         *((unsigned short *)tramp + 16) =
519                                 (addr & 0x0000ffff) | 0x1;
520                         *((unsigned short *)tramp + 17) = addr >> 16;
521                 }
522         }
523
524         if (THUMB_INSN_MATCH(B2, insn)) {
525                 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
526                 *((unsigned short *)tramp + 13) = 0xdeff;
527                 addr = branch_t16_dest(insn, vaddr);
528                 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
529                 *((unsigned short *)tramp + 15) = addr >> 16;
530                 *((unsigned short *)tramp + 16) = 0;
531                 *((unsigned short *)tramp + 17) = 0;
532
533         } else if (THUMB_INSN_MATCH(B1, insn)) {
534                 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
535                 *((unsigned short *)tramp + 13) = 0xdeff;
536                 *((unsigned short *)tramp + 0) |= (insn & 0xf00);
537                 addr = branch_cond_t16_dest(insn, vaddr);
538                 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
539                 *((unsigned short *)tramp + 15) = addr >> 16;
540                 addr = vaddr + 2;
541                 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
542                 *((unsigned short *)tramp + 17) = addr >> 16;
543
544         } else if (THUMB_INSN_MATCH(BLX2, insn) ||
545                    THUMB_INSN_MATCH(BX, insn)) {
546                 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
547                 *((unsigned short *)tramp + 13) = 0xdeff;
548                 *((unsigned short *)tramp + 4) = insn;
549                 addr = vaddr + 2;
550                 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
551                 *((unsigned short *)tramp + 17) = addr >> 16;
552
553         } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
554                    THUMB2_INSN_MATCH(BL, insn)) {
555                 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
556                 *((unsigned short *)tramp + 13) = 0xdeff;
557                 addr = branch_t32_dest(insn, vaddr);
558                 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff);
559                 *((unsigned short *)tramp + 15) = addr >> 16;
560                 addr = vaddr + 4;
561                 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
562                 *((unsigned short *)tramp + 17) = addr >> 16;
563
564         } else if (THUMB_INSN_MATCH(CBZ, insn)) {
565                 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
566                 *((unsigned short *)tramp + 13) = 0xdeff;
567                 /* zero out original branch displacement (imm5 = 0; i = 0) */
568                 *((unsigned short *)tramp + 0) = insn & (~0x2f8);
569                 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
570                 *((unsigned short *)tramp + 0) |= 0x20;
571                 addr = cbz_t16_dest(insn, vaddr);
572                 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
573                 *((unsigned short *)tramp + 15) = addr >> 16;
574                 addr = vaddr + 2;
575                 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
576                 *((unsigned short *)tramp + 17) = addr >> 16;
577         }
578
579         return 0;
580 }
581
582 /**
583  * @brief Prepares uprobe for ARM.
584  *
585  * @param up Pointer to the uprobe.
586  * @return 0 on success,\n
587  * negative error code on error.
588  */
589 int arch_prepare_uprobe(struct uprobe *p)
590 {
591         int ret;
592         struct task_struct *task = p->task;
593         unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
594         unsigned long insn;
595         int thumb_mode = (unsigned long)p->addr & 1;
596         unsigned long tramp[UPROBES_TRAMP_LEN];
597         unsigned long __user *utramp;
598         enum { tramp_len = sizeof(tramp) };
599
600         if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn))) {
601                 printk(KERN_ERR "failed to read memory %lx!\n", vaddr);
602                 return -EINVAL;
603         }
604
605         ret = thumb_mode ?
606                         arch_make_trampoline_thumb(vaddr, insn,
607                                                    tramp, tramp_len) :
608                         arch_make_trampoline_arm(vaddr, insn, tramp);
609         if (ret) {
610                 pr_err("failed to make tramp, addr=%p\n", p->addr);
611                 return ret;
612         }
613
614         utramp = swap_slot_alloc(p->sm);
615         if (utramp == NULL) {
616                 printk(KERN_INFO "Error: swap_slot_alloc failed (%08lx)\n",
617                        vaddr);
618                 return -ENOMEM;
619         }
620
621         if (!write_proc_vm_atomic(p->task, (unsigned long)utramp, tramp,
622                                   tramp_len)) {
623                 pr_err("failed to write memory tramp=%p!\n", utramp);
624                 swap_slot_free(p->sm, utramp);
625                 return -EINVAL;
626         }
627
628         flush_insns(utramp, tramp_len);
629         p->ainsn.insn = utramp;
630         p->opcode = insn;
631
632         /* for uretprobe */
633         add_uprobe_table(p);
634
635         return 0;
636 }
637
638 /**
639  * @brief Analysis opcodes.
640  *
641  * @param rp Pointer to the uretprobe.
642  * @return Void.
643  */
644 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
645 {
646         /* Remove retprobe if first insn overwrites lr */
647         rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.opcode) ||
648                              THUMB2_INSN_MATCH(BLX1, rp->up.opcode) ||
649                              THUMB_INSN_MATCH(BLX2, rp->up.opcode));
650
651         rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.opcode) ||
652                            ARM_INSN_MATCH(BLX1, rp->up.opcode) ||
653                            ARM_INSN_MATCH(BLX2, rp->up.opcode));
654 }
655
656 /**
657  * @brief Prepates uretprobe for ARM.
658  *
659  * @param ri Pointer to the uretprobe instance.
660  * @param regs Pointer to CPU register data.
661  * @return Error code.
662  */
663 int arch_prepare_uretprobe(struct uretprobe_instance *ri, struct pt_regs *regs)
664 {
665         ri->ret_addr = (uprobe_opcode_t *)regs->ARM_lr;
666         ri->sp = (uprobe_opcode_t *)regs->ARM_sp;
667
668         /* Set flag of current mode */
669         ri->sp = (uprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
670
671         if (ri->preload_thumb) {
672                 regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn) + 0x1b;
673         } else {
674                 if (thumb_mode(regs))
675                         regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn) + 0x1b;
676                 else
677                         regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn +
678                                                        UPROBES_TRAMP_RET_BREAK_IDX);
679         }
680
681         return 0;
682 }
683
684 unsigned long arch_tramp_by_ri(struct uretprobe_instance *ri)
685 {
686         /* Understand function mode */
687         return ((unsigned long)ri->sp & 1) ?
688                         ((unsigned long)ri->rp->up.ainsn.insn + 0x1b) :
689                         (unsigned long)(ri->rp->up.ainsn.insn +
690                                         UPROBES_TRAMP_RET_BREAK_IDX);
691 }
692
693 /**
694  * @brief Disarms uretprobe instance.
695  *
696  * @param ri Pointer to the uretprobe instance
697  * @param task Pointer to the task for which the uretprobe instance
698  * @return 0 on success,\n
699  * negative error code on error.
700  */
701 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
702                          struct task_struct *task, unsigned long tr)
703 {
704         struct pt_regs *uregs = task_pt_regs(ri->task);
705         unsigned long ra = swap_get_ret_addr(uregs);
706         unsigned long *tramp;
707         unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
708         unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
709         unsigned long *found = NULL;
710         unsigned long *buf[RETPROBE_STACK_DEPTH];
711         unsigned long vaddr;
712         int i, retval;
713
714         if (tr == 0) {
715                 vaddr = (unsigned long)ri->rp->up.addr;
716                 tramp = (unsigned long *)arch_tramp_by_ri(ri);
717         } else {
718                 /* ri - invalid */
719                 vaddr = 0;
720                 tramp = (unsigned long *)tr;
721         }
722
723         /* check stack */
724         retval = read_proc_vm_atomic(task, (unsigned long)stack,
725                                      buf, sizeof(buf));
726         if (retval != sizeof(buf)) {
727                 printk(KERN_INFO "---> %s (%d/%d): failed to read "
728                        "stack from %08lx\n", task->comm, task->tgid, task->pid,
729                        (unsigned long)stack);
730                 retval = -EFAULT;
731                 goto check_lr;
732         }
733
734         /* search the stack from the bottom */
735         for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
736                 if (buf[i] == tramp) {
737                         found = stack + i;
738                         break;
739                 }
740         }
741
742         if (!found) {
743                 retval = -ESRCH;
744                 goto check_lr;
745         }
746
747         printk(KERN_INFO "---> %s (%d/%d): trampoline found at "
748                "%08lx (%08lx /%+d) - %lx, set ret_addr=%p\n",
749                task->comm, task->tgid, task->pid,
750                (unsigned long)found, (unsigned long)sp,
751                found - sp, vaddr, ri->ret_addr);
752         retval = write_proc_vm_atomic(task, (unsigned long)found,
753                                       &ri->ret_addr,
754                                       sizeof(ri->ret_addr));
755         if (retval != sizeof(ri->ret_addr)) {
756                 printk(KERN_INFO "---> %s (%d/%d): "
757                        "failed to write value to %08lx",
758                        task->comm, task->tgid, task->pid, (unsigned long)found);
759                 retval = -EFAULT;
760         } else {
761                 retval = 0;
762         }
763
764 check_lr: /* check lr anyway */
765         if (ra == (unsigned long)tramp) {
766                 printk(KERN_INFO "---> %s (%d/%d): trampoline found at "
767                        "lr = %08lx - %lx, set ret_addr=%p\n",
768                        task->comm, task->tgid, task->pid, ra, vaddr, ri->ret_addr);
769
770                 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
771                 retval = 0;
772         } else if (retval) {
773                 printk(KERN_INFO "---> %s (%d/%d): trampoline NOT found at "
774                        "sp = %08lx, lr = %08lx - %lx, ret_addr=%p\n",
775                        task->comm, task->tgid, task->pid,
776                        (unsigned long)sp, ra, vaddr, ri->ret_addr);
777         }
778
779         return retval;
780 }
781
782 /**
783  * @brief Jump pre-handler.
784  *
785  * @param p Pointer to the uprobe.
786  * @param regs Pointer to CPU register data.
787  * @return 0.
788  */
789 int setjmp_upre_handler(struct uprobe *p, struct pt_regs *regs)
790 {
791         struct ujprobe *jp = container_of(p, struct ujprobe, up);
792
793         uprobe_pre_entry_handler_t pre_entry =
794                 (uprobe_pre_entry_handler_t)jp->pre_entry;
795         entry_point_t entry = (entry_point_t)jp->entry;
796
797         if (pre_entry) {
798                 p->ss_addr[smp_processor_id()] = (uprobe_opcode_t *)
799                                                  pre_entry(jp->priv_arg, regs);
800         }
801
802         if (entry) {
803                 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
804                       regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
805         } else {
806                 arch_ujprobe_return();
807         }
808
809         return 0;
810 }
811
812 /**
813  * @brief Gets trampoline address.
814  *
815  * @param p Pointer to the uprobe.
816  * @param regs Pointer to CPU register data.
817  * @return Trampoline address.
818  */
819 unsigned long arch_get_trampoline_addr(struct uprobe *p, struct pt_regs *regs)
820 {
821         return thumb_mode(regs) ?
822                         (unsigned long)(p->ainsn.insn) + 0x1b :
823                         (unsigned long)(p->ainsn.insn +
824                                         UPROBES_TRAMP_RET_BREAK_IDX);
825 }
826
827 /**
828  * @brief Restores return address.
829  *
830  * @param orig_ret_addr Original return address.
831  * @param regs Pointer to CPU register data.
832  * @return Void.
833  */
834 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
835 {
836         regs->ARM_lr = orig_ret_addr;
837         regs->ARM_pc = orig_ret_addr & ~0x1;
838
839         if (regs->ARM_lr & 0x1)
840                 regs->ARM_cpsr |= PSR_T_BIT;
841         else
842                 regs->ARM_cpsr &= ~PSR_T_BIT;
843 }
844
845 /**
846  * @brief Removes uprobe.
847  *
848  * @param up Pointer to the uprobe.
849  * @return Void.
850  */
851 void arch_remove_uprobe(struct uprobe *up)
852 {
853         swap_slot_free(up->sm, up->ainsn.insn);
854 }
855
856 int arch_arm_uprobe(struct uprobe *p)
857 {
858         int ret;
859         unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
860         int thumb_mode = (unsigned long)p->addr & 1;
861         int len = 4 >> thumb_mode;      /* if thumb_mode then len = 2 */
862         unsigned long insn = thumb_mode ? UBP_THUMB : UBP_ARM;
863
864         ret = write_proc_vm_atomic(p->task, vaddr, &insn, len);
865         if (!ret) {
866                 pr_err("arch_arm_uprobe: failed to write memory tgid=%u addr=%08lx len=%d\n",
867                        p->task->tgid, vaddr, len);
868
869                 return -EACCES;
870         } else {
871                 flush_insns(vaddr, len);
872         }
873
874         return 0;
875 }
876
877 void arch_disarm_uprobe(struct uprobe *p, struct task_struct *task)
878 {
879         int ret;
880
881         unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
882         int thumb_mode = (unsigned long)p->addr & 1;
883         int len = 4 >> thumb_mode;      /* if thumb_mode then len = 2 */
884
885         ret = write_proc_vm_atomic(task, vaddr, &p->opcode, len);
886         if (!ret) {
887                 pr_err("arch_disarm_uprobe: failed to write memory tgid=%u addr=%08lx len=%d\n",
888                        task->tgid, vaddr, len);
889         } else {
890                 flush_insns(vaddr, len);
891         }
892 }
893
894 static int urp_handler(struct pt_regs *regs, pid_t tgid)
895 {
896         struct uprobe *p;
897         unsigned long vaddr = regs->ARM_pc;
898         unsigned long offset_bp = thumb_mode(regs) ?
899                                   0x1a :
900                                   4 * UPROBES_TRAMP_RET_BREAK_IDX;
901         unsigned long tramp_addr = vaddr - offset_bp;
902
903         p = get_uprobe_by_insn_slot((void *)tramp_addr, tgid, regs);
904         if (p == NULL) {
905                 printk(KERN_INFO
906                        "no_uprobe: Not one of ours: let kernel handle it %lx\n",
907                        vaddr);
908                 return 1;
909         }
910
911         trampoline_uprobe_handler(p, regs);
912
913         return 0;
914 }
915 /**
916  * @brief Prepares singlestep for current CPU.
917  *
918  * @param p Pointer to kprobe.
919  * @param regs Pointer to CPU registers data.
920  * @return Void.
921  */
922 static void arch_prepare_singlestep(struct uprobe *p, struct pt_regs *regs)
923 {
924         int cpu = smp_processor_id();
925
926         if (p->ss_addr[cpu]) {
927                 regs->ARM_pc = (unsigned long)p->ss_addr[cpu];
928                 p->ss_addr[cpu] = NULL;
929         } else {
930                 regs->ARM_pc = (unsigned long)p->ainsn.insn;
931         }
932 }
933
934 /**
935  * @brief Breakpoint instruction handler.
936  *
937  * @param regs Pointer to CPU register data.
938  * @param instr Instruction.
939  * @return uprobe_handler results.
940  */
941 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
942 {
943         int ret = 0;
944         struct uprobe *p;
945         unsigned long flags;
946         unsigned long vaddr = regs->ARM_pc | !!thumb_mode(regs);
947         pid_t tgid = current->tgid;
948
949         local_irq_save(flags);
950         preempt_disable();
951
952         p = get_uprobe((uprobe_opcode_t *)vaddr, tgid);
953         if (p) {
954                 bool prepare = false;
955
956                 if (p->atomic_ctx) {
957                         if (!p->pre_handler || !p->pre_handler(p, regs))
958                                 prepare = true;
959                 } else {
960                         swap_preempt_enable_no_resched();
961                         local_irq_restore(flags);
962
963                         if (!p->pre_handler || !p->pre_handler(p, regs))
964                                 prepare = true;
965
966                         local_irq_save(flags);
967                         preempt_disable();
968                 }
969
970                 if (prepare)
971                         arch_prepare_singlestep(p, regs);
972         } else {
973                 ret = urp_handler(regs, tgid);
974
975                 /* check ARM/THUMB mode on correct */
976                 if (ret) {
977                         vaddr ^= 1;
978                         p = get_uprobe((uprobe_opcode_t *)vaddr, tgid);
979                         if (p) {
980                                 pr_err("invalid mode: thumb=%d addr=%p insn=%08lx\n",
981                                        !!thumb_mode(regs), p->addr, p->opcode);
982                                 ret = 0;
983
984                                 swap_preempt_enable_no_resched();
985                                 local_irq_restore(flags);
986
987                                 disarm_uprobe(p, current);
988
989                                 local_irq_save(flags);
990                                 preempt_disable();
991                         }
992                 }
993         }
994
995         swap_preempt_enable_no_resched();
996         local_irq_restore(flags);
997
998         return ret;
999 }
1000
1001 /* userspace probes hook (arm) */
1002 static struct undef_hook undef_hook_for_us_arm = {
1003         .instr_mask     = 0xffffffff,
1004         .instr_val      = UBP_ARM,
1005         .cpsr_mask      = MODE_MASK,
1006         .cpsr_val       = USR_MODE,
1007         .fn             = uprobe_trap_handler
1008 };
1009
1010 /* userspace probes hook (thumb) */
1011 static struct undef_hook undef_hook_for_us_thumb = {
1012         .instr_mask     = 0xffffffff,
1013         .instr_val      = UBP_THUMB,
1014         .cpsr_mask      = MODE_MASK,
1015         .cpsr_val       = USR_MODE,
1016         .fn             = uprobe_trap_handler
1017 };
1018
1019 /**
1020  * @brief Installs breakpoint hooks.
1021  *
1022  * @return 0.
1023  */
1024 int swap_arch_init_uprobes(void)
1025 {
1026         swap_register_undef_hook(&undef_hook_for_us_arm);
1027         swap_register_undef_hook(&undef_hook_for_us_thumb);
1028
1029         return 0;
1030 }
1031
1032 /**
1033  * @brief Uninstalls breakpoint hooks.
1034  *
1035  * @return Void.
1036  */
1037 void swap_arch_exit_uprobes(void)
1038 {
1039         swap_unregister_undef_hook(&undef_hook_for_us_thumb);
1040         swap_unregister_undef_hook(&undef_hook_for_us_arm);
1041 }