2 * Dynamic Binary Instrumentation Module based on KProbes
3 * modules/uprobe/arch/asm-arm/swap_uprobes.h
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
19 * Copyright (C) Samsung Electronics, 2006-2010
21 * 2008-2009 Alexey Gerenkov <a.gerenkov@samsung.com> User-Space
22 * Probes initial implementation; Support x86/ARM/MIPS for both user and kernel spaces.
23 * 2010 Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for separating core and arch parts
27 #include <kprobe/dbi_kprobes.h>
28 #include <kprobe/arch/asm/dbi_kprobes.h>
29 #include <kprobe/arch/asm/trampoline_arm.h>
30 #include <asm/traps.h>
31 #include <uprobe/swap_uprobes.h>
32 #include <uprobe/arch/asm/swap_uprobes.h>
33 #include <kprobe/dbi_insn_slots.h>
34 #include <kprobe/dbi_kprobes_deps.h>
35 #include "trampoline_thumb.h"
38 #include <kprobe/dbi_kdebug.h>
41 #define flush_insns(addr, size) \
42 flush_icache_range((unsigned long)(addr), \
43 (unsigned long)(addr) + (size))
45 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
47 long offset = insn & 0x3ff;
48 offset -= insn & 0x400;
49 return (insn_addr + 4 + offset * 2);
52 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
54 long offset = insn & 0x7f;
55 offset -= insn & 0x80;
56 return (insn_addr + 4 + offset * 2);
59 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
61 unsigned int poff = insn & 0x3ff;
62 unsigned int offset = (insn & 0x07fe0000) >> 17;
64 poff -= (insn & 0x400);
67 return ((insn_addr + 4 + (poff << 12) + offset * 4));
69 return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
72 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
74 unsigned int i = (insn & 0x200) >> 3;
75 unsigned int offset = (insn & 0xf8) >> 2;
76 return insn_addr + 4 + i + offset;
79 /* is instruction Thumb2 and NOT a branch, etc... */
80 static int is_thumb2(kprobe_opcode_t insn)
82 return ((insn & 0xf800) == 0xe800 ||
83 (insn & 0xf800) == 0xf000 ||
84 (insn & 0xf800) == 0xf800);
87 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
90 struct kprobe *p = up2kp(up);
91 unsigned long insn = p->opcode;
92 unsigned long vaddr = (unsigned long)p->addr;
93 unsigned long *tramp = up->atramp.tramp_arm;
95 ret = arch_make_trampoline_arm(vaddr, insn, tramp);
101 static int arch_check_insn_thumb(unsigned long insn)
105 /* check instructions that can change PC */
106 if (THUMB_INSN_MATCH(UNDEF, insn) ||
107 THUMB_INSN_MATCH(SWI, insn) ||
108 THUMB_INSN_MATCH(BREAK, insn) ||
109 THUMB2_INSN_MATCH(B1, insn) ||
110 THUMB2_INSN_MATCH(B2, insn) ||
111 THUMB2_INSN_MATCH(BXJ, insn) ||
112 (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
113 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
114 (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
115 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
116 (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
117 (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
118 THUMB2_INSN_MATCH(LDMIA, insn) ||
119 THUMB2_INSN_MATCH(LDMDB, insn) ||
120 (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
121 (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
122 (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
123 (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
124 (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
125 (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
126 (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
127 (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
128 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
129 (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
130 (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
131 (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
132 (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
133 (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
134 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
135 (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
136 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
137 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
138 (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
145 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
147 unsigned char mreg = 0;
148 unsigned char reg = 0;
150 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
151 reg = ((insn & 0xffff) & uregs) >> 8;
153 if (THUMB_INSN_MATCH(MOV3, insn)) {
154 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
155 reg = (insn & 0xffff) & uregs;
160 if (THUMB2_INSN_MATCH(ADR, insn)) {
161 reg = ((insn >> 16) & uregs) >> 8;
166 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
167 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
168 THUMB2_INSN_MATCH(LDRWL, insn)) {
169 reg = ((insn >> 16) & uregs) >> 12;
174 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
175 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
176 THUMB2_INSN_MATCH(LDREX, insn)) {
177 reg = ((insn >> 16) & uregs) >> 12;
179 if (THUMB2_INSN_MATCH(DP, insn)) {
180 reg = ((insn >> 16) & uregs) >> 12;
185 if (THUMB2_INSN_MATCH(RSBW, insn)) {
186 reg = ((insn >> 12) & uregs) >> 8;
191 if (THUMB2_INSN_MATCH(RORW, insn)) {
192 reg = ((insn >> 12) & uregs) >> 8;
197 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
198 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
199 THUMB2_INSN_MATCH(LSRW2, insn)) {
200 reg = ((insn >> 12) & uregs) >> 8;
205 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
208 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
209 reg = THUMB2_INSN_REG_RM(insn);
222 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
223 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
224 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
225 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
226 reg = THUMB2_INSN_REG_RT(insn);
229 if (reg == 6 || reg == 7) {
230 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
231 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
232 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
233 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
234 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
235 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
236 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
237 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
240 if (THUMB_INSN_MATCH(APC, insn)) {
241 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
242 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
244 if (THUMB_INSN_MATCH(LRO3, insn)) {
245 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
246 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
248 if (THUMB_INSN_MATCH(MOV3, insn)) {
249 // MOV Rd, PC -> MOV Rd, SP
250 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
252 if (THUMB2_INSN_MATCH(ADR, insn)) {
253 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
254 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
256 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
257 THUMB2_INSN_MATCH(LDRHW, insn)) {
258 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
259 // !!!!!!!!!!!!!!!!!!!!!!!!
260 // !!! imm_12 vs. imm_8 !!!
261 // !!!!!!!!!!!!!!!!!!!!!!!!
262 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
264 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
265 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
266 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
267 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
268 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
270 if (THUMB2_INSN_MATCH(MUL, insn)) {
271 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
273 if (THUMB2_INSN_MATCH(DP, insn)) {
274 if (THUMB2_INSN_REG_RM(insn) == 15) {
275 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
276 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
277 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
280 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
281 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
282 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
284 if (THUMB2_INSN_MATCH(RSBW, insn)) {
285 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
287 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
288 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
289 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
290 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
291 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
292 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
293 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
296 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
297 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
311 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
312 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
314 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
315 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
316 if (THUMB2_INSN_REG_RN(insn) == 15) {
317 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
322 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
323 if (THUMB2_INSN_REG_RN(insn) == 15) {
324 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
333 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
334 THUMB2_INSN_MATCH(STRBW, insn) ||
335 THUMB2_INSN_MATCH(STRD, insn) ||
336 THUMB2_INSN_MATCH(STRHT, insn) ||
337 THUMB2_INSN_MATCH(STRT, insn) ||
338 THUMB2_INSN_MATCH(STRHW1, insn) ||
339 THUMB2_INSN_MATCH(STRHW, insn) )) {
340 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
343 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
344 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
346 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
347 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
348 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
349 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
350 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
351 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
352 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
360 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
363 struct kprobe *p = up2kp(up);
365 unsigned long vaddr = (unsigned long)p->addr;
366 unsigned long insn = p->opcode;
367 unsigned long *tramp = up->atramp.tramp_thumb;
368 enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
372 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
376 if (!arch_check_insn_thumb(insn)) {
383 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
384 uregs = 0x0700; /* 8-10 */
386 } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
390 } else if THUMB2_INSN_MATCH(ADR, insn) {
391 uregs = 0x0f00; /* Rd 8-11 */
393 } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
394 THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
395 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
396 THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
397 THUMB2_INSN_MATCH(LDREX, insn) ||
398 ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
399 THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
400 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
401 ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
402 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
403 uregs = 0xf000; /* Rt 12-15 */
405 } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
406 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
408 } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
411 } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
412 uregs = 0xf000; /* Rd 12-15 */
414 } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
415 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
417 } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
418 uregs = 0x0f00; /* Rd 8-11 */
420 } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
423 } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
424 uregs = 0x0f00; /* Rd 8-11 */
426 } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
427 uregs = 0x0f00; /* Rd 8-11 */
429 } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
430 uregs = 0xf0000; /* Rn 0-3 (16-19) */
432 } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
433 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
434 uregs = 0xf0000; /* Rn 0-3 (16-19) */
438 if (unlikely(uregs && pc_dep)) {
439 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
440 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
441 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
442 __FILE__, __LINE__, insn);
447 *((unsigned short*)tramp + 13) = 0xdeff;
448 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
449 *((unsigned short*)tramp + 15) = addr >> 16;
450 if (!is_thumb2(insn)) {
452 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
453 *((unsigned short*)tramp + 17) = addr >> 16;
456 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
457 *((unsigned short*)tramp + 17) = addr >> 16;
460 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
461 *((unsigned short*)tramp + 13) = 0xdeff;
462 if (!is_thumb2(insn)) {
464 *((unsigned short*)tramp + 2) = insn;
465 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
466 *((unsigned short*)tramp + 17) = addr >> 16;
470 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
471 *((unsigned short*)tramp + 17) = addr >> 16;
475 if (THUMB_INSN_MATCH(B2, insn)) {
476 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
477 *((unsigned short*)tramp + 13) = 0xdeff;
478 addr = branch_t16_dest(insn, vaddr);
479 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
480 *((unsigned short*)tramp + 15) = addr >> 16;
481 *((unsigned short*)tramp + 16) = 0;
482 *((unsigned short*)tramp + 17) = 0;
484 } else if (THUMB_INSN_MATCH(B1, insn)) {
485 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
486 *((unsigned short*)tramp + 13) = 0xdeff;
487 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
488 addr = branch_cond_t16_dest(insn, vaddr);
489 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
490 *((unsigned short*)tramp + 15) = addr >> 16;
492 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
493 *((unsigned short*)tramp + 17) = addr >> 16;
495 } else if (THUMB_INSN_MATCH(BLX2, insn) ||
496 THUMB_INSN_MATCH(BX, insn)) {
497 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
498 *((unsigned short*)tramp + 13) = 0xdeff;
499 *((unsigned short*)tramp + 4) = insn;
501 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
502 *((unsigned short*)tramp + 17) = addr >> 16;
504 } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
505 THUMB2_INSN_MATCH(BL, insn)) {
506 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
507 *((unsigned short*)tramp + 13) = 0xdeff;
508 addr = branch_t32_dest(insn, vaddr);
509 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
510 *((unsigned short*)tramp + 15) = addr >> 16;
512 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
513 *((unsigned short*)tramp + 17) = addr >> 16;
515 } else if (THUMB_INSN_MATCH(CBZ, insn)) {
516 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
517 *((unsigned short*)tramp + 13) = 0xdeff;
518 /* zero out original branch displacement (imm5 = 0; i = 0) */
519 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
520 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
521 *((unsigned short*)tramp + 0) |= 0x20;
522 addr = cbz_t16_dest(insn, vaddr);
523 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
524 *((unsigned short*)tramp + 15) = addr >> 16;
526 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
527 *((unsigned short*)tramp + 17) = addr >> 16;
533 int arch_prepare_uprobe(struct uprobe *up)
535 struct kprobe *p = up2kp(up);
536 struct task_struct *task = up->task;
537 unsigned long vaddr = (unsigned long)p->addr;
541 printk("Error in %s at %d: attempt to register uprobe "
542 "at an unaligned address\n", __FILE__, __LINE__);
546 if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
547 panic("failed to read memory %lx!\n", vaddr);
551 arch_copy_trampoline_arm_uprobe(up);
552 arch_copy_trampoline_thumb_uprobe(up);
554 if ((p->safe_arm) && (p->safe_thumb)) {
555 printk("Error in %s at %d: failed "
556 "arch_copy_trampoline_*_uprobe() (both) "
557 "[tgid=%u, addr=%lx, data=%lx]\n",
558 __FILE__, __LINE__, task->tgid, vaddr, insn);
562 up->atramp.utramp = alloc_insn_slot(up->sm);
563 if (up->atramp.utramp == NULL) {
564 printk("Error: alloc_insn_slot failed (%08lx)\n", vaddr);
571 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
573 /* Remove retprobe if first insn overwrites lr */
574 rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
575 THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
576 THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
578 rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
579 ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
580 ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
583 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
584 struct pt_regs *regs)
586 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
587 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
589 /* Set flag of current mode */
590 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
592 if (thumb_mode(regs)) {
593 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
595 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
599 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
600 struct task_struct *task)
602 unsigned long *tramp;
603 unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
604 unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
605 unsigned long *found = NULL;
606 unsigned long *buf[RETPROBE_STACK_DEPTH];
609 /* Understand function mode */
610 if ((long)ri->sp & 1) {
611 tramp = (unsigned long *)
612 ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
614 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
615 UPROBES_TRAMP_RET_BREAK_IDX);
618 retval = read_proc_vm_atomic(task, (unsigned long)stack,
620 if (retval != sizeof(buf)) {
621 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
622 task->comm, task->tgid, task->pid,
623 (unsigned long)stack);
628 /* search the stack from the bottom */
629 for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
630 if (buf[i] == tramp) {
637 printk("---> %s (%d/%d): trampoline found at "
638 "%08lx (%08lx /%+d) - %p\n",
639 task->comm, task->tgid, task->pid,
640 (unsigned long)found, (unsigned long)sp,
641 found - sp, ri->rp->up.kp.addr);
642 retval = write_proc_vm_atomic(task, (unsigned long)found,
644 sizeof(ri->ret_addr));
645 if (retval != sizeof(ri->ret_addr)) {
646 printk("---> %s (%d/%d): failed to write value "
648 task->comm, task->tgid, task->pid, (unsigned long)found);
654 struct pt_regs *uregs = task_pt_regs(ri->task);
655 unsigned long ra = dbi_get_ret_addr(uregs);
656 if (ra == (unsigned long)tramp) {
657 printk("---> %s (%d/%d): trampoline found at "
659 task->comm, task->tgid, task->pid,
660 ra, ri->rp->up.kp.addr);
661 dbi_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
664 printk("---> %s (%d/%d): trampoline NOT found at "
665 "sp = %08lx, lr = %08lx - %p\n",
666 task->comm, task->tgid, task->pid,
667 (unsigned long)sp, ra, ri->rp->up.kp.addr);
676 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
678 struct uprobe *up = container_of(p, struct uprobe, kp);
679 struct ujprobe *jp = container_of(up, struct ujprobe, up);
681 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
682 entry_point_t entry = (entry_point_t)jp->entry;
685 p->ss_addr[smp_processor_id()] = (kprobe_opcode_t *)
686 pre_entry(jp->priv_arg, regs);
690 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
691 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
693 arch_ujprobe_return();
699 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
701 return thumb_mode(regs) ?
702 (unsigned long)(p->ainsn.insn) + 0x1b :
703 (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
706 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
708 regs->ARM_lr = orig_ret_addr;
709 regs->ARM_pc = orig_ret_addr & ~0x1;
711 if (regs->ARM_lr & 0x1)
712 regs->ARM_cpsr |= PSR_T_BIT;
714 regs->ARM_cpsr &= ~PSR_T_BIT;
717 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
719 if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
720 u16 tmp = p->opcode >> 16;
721 write_proc_vm_atomic(current,
722 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
723 flush_insns(p->addr, 4);
727 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
729 unsigned long *tramp, *utramp;
730 struct kprobe *p = up2kp(up);
734 * 0 bit - thumb mode (0 - arm, 1 - thumb)
735 * 1 bit - arm mode support (0 - off, 1 on)
736 * 2 bit - thumb mode support (0 - off, 1 on)`
738 sw = (!!thumb_mode(regs)) |
739 (int)!p->safe_arm << 1 |
740 (int)!p->safe_thumb << 2;
746 tramp = up->atramp.tramp_arm;
751 restore_opcode_for_thumb(p, regs);
752 tramp = up->atramp.tramp_thumb;
755 printk("Error in %s at %d: we are in arm mode "
756 "(!) and check instruction was fail "
757 "(%0lX instruction at %p address)!\n",
758 __FILE__, __LINE__, p->opcode, p->addr);
760 disarm_uprobe(p, up->task);
765 utramp = up->atramp.utramp;
767 if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
768 UPROBES_TRAMP_LEN * sizeof(*tramp)))
769 panic("failed to write memory %p!\n", utramp);
770 flush_insns(utramp, UPROBES_TRAMP_LEN * sizeof(*tramp));
772 p->ainsn.insn = utramp;
777 static int uprobe_handler(struct pt_regs *regs)
779 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
780 struct task_struct *task = current;
781 pid_t tgid = task->tgid;
784 p = get_ukprobe(addr, tgid);
786 unsigned long offset_bp = thumb_mode(regs) ?
788 4 * UPROBES_TRAMP_RET_BREAK_IDX;
789 void *tramp_addr = (void *)addr - offset_bp;
791 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
793 printk("no_uprobe: Not one of ours: let "
794 "kernel handle it %p\n", addr);
798 trampoline_uprobe_handler(p, regs);
800 if (p->ainsn.insn == NULL) {
801 struct uprobe *up = kp2up(p);
803 if (make_trampoline(up, regs)) {
804 printk("no_uprobe live\n");
812 if (!p->pre_handler || !p->pre_handler(p, regs)) {
813 prepare_singlestep(p, regs);
820 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
824 local_irq_save(flags);
827 ret = uprobe_handler(regs);
828 preempt_enable_no_resched();
830 local_irq_restore(flags);
834 /* userspace probes hook (arm) */
835 static struct undef_hook undef_hook_for_us_arm = {
836 .instr_mask = 0xffffffff,
837 .instr_val = BREAKPOINT_INSTRUCTION,
838 .cpsr_mask = MODE_MASK,
839 .cpsr_val = USR_MODE,
840 .fn = uprobe_trap_handler
843 /* userspace probes hook (thumb) */
844 static struct undef_hook undef_hook_for_us_thumb = {
845 .instr_mask = 0xffffffff,
846 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
847 .cpsr_mask = MODE_MASK,
848 .cpsr_val = USR_MODE,
849 .fn = uprobe_trap_handler
852 int swap_arch_init_uprobes(void)
854 swap_register_undef_hook(&undef_hook_for_us_arm);
855 swap_register_undef_hook(&undef_hook_for_us_thumb);
860 void swap_arch_exit_uprobes(void)
862 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
863 swap_unregister_undef_hook(&undef_hook_for_us_arm);