2 * Dynamic Binary Instrumentation Module based on KProbes
3 * modules/uprobe/arch/asm-arm/swap_uprobes.h
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
19 * Copyright (C) Samsung Electronics, 2006-2010
21 * 2008-2009 Alexey Gerenkov <a.gerenkov@samsung.com> User-Space
22 * Probes initial implementation; Support x86/ARM/MIPS for both user and kernel spaces.
23 * 2010 Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for separating core and arch parts
27 #include <kprobe/dbi_kprobes.h>
28 #include <kprobe/arch/asm/dbi_kprobes.h>
29 #include <kprobe/arch/asm/trampoline_arm.h>
30 #include <asm/traps.h>
31 #include <uprobe/swap_uprobes.h>
32 #include <uprobe/arch/asm/swap_uprobes.h>
33 #include <kprobe/dbi_insn_slots.h>
34 #include <kprobe/dbi_kprobes_deps.h>
35 #include "trampoline_thumb.h"
38 #include <kprobe/dbi_kdebug.h>
41 #define flush_insns(addr, size) \
42 flush_icache_range((unsigned long)(addr), \
43 (unsigned long)(addr) + (size))
45 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
47 long offset = insn & 0x3ff;
48 offset -= insn & 0x400;
49 return (insn_addr + 4 + offset * 2);
52 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
54 long offset = insn & 0x7f;
55 offset -= insn & 0x80;
56 return (insn_addr + 4 + offset * 2);
59 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
61 unsigned int poff = insn & 0x3ff;
62 unsigned int offset = (insn & 0x07fe0000) >> 17;
64 poff -= (insn & 0x400);
67 return ((insn_addr + 4 + (poff << 12) + offset * 4));
69 return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
72 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
74 unsigned int i = (insn & 0x200) >> 3;
75 unsigned int offset = (insn & 0xf8) >> 2;
76 return insn_addr + 4 + i + offset;
79 /* is instruction Thumb2 and NOT a branch, etc... */
80 static int is_thumb2(kprobe_opcode_t insn)
82 return ((insn & 0xf800) == 0xe800 ||
83 (insn & 0xf800) == 0xf000 ||
84 (insn & 0xf800) == 0xf800);
87 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
90 struct kprobe *p = up2kp(up);
91 unsigned long insn = p->opcode;
92 unsigned long vaddr = (unsigned long)p->addr;
93 unsigned long *tramp = up->atramp.tramp_arm;
95 ret = arch_make_trampoline_arm(vaddr, insn, tramp);
101 static int arch_check_insn_thumb(unsigned long insn)
105 /* check instructions that can change PC */
106 if (THUMB_INSN_MATCH(UNDEF, insn) ||
107 THUMB_INSN_MATCH(SWI, insn) ||
108 THUMB_INSN_MATCH(BREAK, insn) ||
109 THUMB2_INSN_MATCH(B1, insn) ||
110 THUMB2_INSN_MATCH(B2, insn) ||
111 THUMB2_INSN_MATCH(BXJ, insn) ||
112 (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
113 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
114 (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
115 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
116 (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
117 (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
118 THUMB2_INSN_MATCH(LDMIA, insn) ||
119 THUMB2_INSN_MATCH(LDMDB, insn) ||
120 (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
121 (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
122 (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
123 (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
124 (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
125 (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
126 (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
127 (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
128 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
129 (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
130 (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
131 (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
132 (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
133 (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
134 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
135 (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
136 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
137 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
138 (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
139 printk("Bad insn arch_check_insn_thumb: %lx\n", insn);
146 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
148 unsigned char mreg = 0;
149 unsigned char reg = 0;
151 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
152 reg = ((insn & 0xffff) & uregs) >> 8;
154 if (THUMB_INSN_MATCH(MOV3, insn)) {
155 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
156 reg = (insn & 0xffff) & uregs;
161 if (THUMB2_INSN_MATCH(ADR, insn)) {
162 reg = ((insn >> 16) & uregs) >> 8;
167 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
168 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
169 THUMB2_INSN_MATCH(LDRWL, insn)) {
170 reg = ((insn >> 16) & uregs) >> 12;
175 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
176 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
177 THUMB2_INSN_MATCH(LDREX, insn)) {
178 reg = ((insn >> 16) & uregs) >> 12;
180 if (THUMB2_INSN_MATCH(DP, insn)) {
181 reg = ((insn >> 16) & uregs) >> 12;
186 if (THUMB2_INSN_MATCH(RSBW, insn)) {
187 reg = ((insn >> 12) & uregs) >> 8;
192 if (THUMB2_INSN_MATCH(RORW, insn)) {
193 reg = ((insn >> 12) & uregs) >> 8;
198 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
199 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
200 THUMB2_INSN_MATCH(LSRW2, insn)) {
201 reg = ((insn >> 12) & uregs) >> 8;
206 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
209 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
210 reg = THUMB2_INSN_REG_RM(insn);
223 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
224 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
225 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
226 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
227 reg = THUMB2_INSN_REG_RT(insn);
230 if (reg == 6 || reg == 7) {
231 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
232 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
233 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
234 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
235 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
236 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
237 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
238 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
241 if (THUMB_INSN_MATCH(APC, insn)) {
242 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
243 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
245 if (THUMB_INSN_MATCH(LRO3, insn)) {
246 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
247 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
249 if (THUMB_INSN_MATCH(MOV3, insn)) {
250 // MOV Rd, PC -> MOV Rd, SP
251 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
253 if (THUMB2_INSN_MATCH(ADR, insn)) {
254 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
255 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
257 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
258 THUMB2_INSN_MATCH(LDRHW, insn)) {
259 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
260 // !!!!!!!!!!!!!!!!!!!!!!!!
261 // !!! imm_12 vs. imm_8 !!!
262 // !!!!!!!!!!!!!!!!!!!!!!!!
263 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
265 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
266 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
267 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
268 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
269 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
271 if (THUMB2_INSN_MATCH(MUL, insn)) {
272 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
274 if (THUMB2_INSN_MATCH(DP, insn)) {
275 if (THUMB2_INSN_REG_RM(insn) == 15) {
276 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
277 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
278 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
281 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
282 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
283 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
285 if (THUMB2_INSN_MATCH(RSBW, insn)) {
286 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
288 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
289 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
290 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
291 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
292 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
293 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
294 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
297 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
298 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
312 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
313 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
315 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
316 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
317 if (THUMB2_INSN_REG_RN(insn) == 15) {
318 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
323 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
324 if (THUMB2_INSN_REG_RN(insn) == 15) {
325 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
334 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
335 THUMB2_INSN_MATCH(STRBW, insn) ||
336 THUMB2_INSN_MATCH(STRD, insn) ||
337 THUMB2_INSN_MATCH(STRHT, insn) ||
338 THUMB2_INSN_MATCH(STRT, insn) ||
339 THUMB2_INSN_MATCH(STRHW1, insn) ||
340 THUMB2_INSN_MATCH(STRHW, insn) )) {
341 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
344 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
345 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
347 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
348 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
349 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
350 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
351 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
352 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
353 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
361 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
364 struct kprobe *p = up2kp(up);
366 unsigned long vaddr = (unsigned long)p->addr;
367 unsigned long insn = p->opcode;
368 unsigned long *tramp = up->atramp.tramp_thumb;
369 enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
373 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
377 if (!arch_check_insn_thumb(insn)) {
384 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
385 uregs = 0x0700; /* 8-10 */
387 } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
391 } else if THUMB2_INSN_MATCH(ADR, insn) {
392 uregs = 0x0f00; /* Rd 8-11 */
394 } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
395 THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
396 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
397 THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
398 THUMB2_INSN_MATCH(LDREX, insn) ||
399 ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
400 THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
401 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
402 ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
403 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
404 uregs = 0xf000; /* Rt 12-15 */
406 } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
407 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
409 } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
412 } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
413 uregs = 0xf000; /* Rd 12-15 */
415 } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
416 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
418 } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
419 uregs = 0x0f00; /* Rd 8-11 */
421 } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
424 } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
425 uregs = 0x0f00; /* Rd 8-11 */
427 } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
428 uregs = 0x0f00; /* Rd 8-11 */
430 } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
431 uregs = 0xf0000; /* Rn 0-3 (16-19) */
433 } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
434 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
435 uregs = 0xf0000; /* Rn 0-3 (16-19) */
439 if (unlikely(uregs && pc_dep)) {
440 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
441 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
442 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
443 __FILE__, __LINE__, insn);
448 *((unsigned short*)tramp + 13) = 0xdeff;
449 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
450 *((unsigned short*)tramp + 15) = addr >> 16;
451 if (!is_thumb2(insn)) {
453 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
454 *((unsigned short*)tramp + 17) = addr >> 16;
457 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
458 *((unsigned short*)tramp + 17) = addr >> 16;
461 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
462 *((unsigned short*)tramp + 13) = 0xdeff;
463 if (!is_thumb2(insn)) {
465 *((unsigned short*)tramp + 2) = insn;
466 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
467 *((unsigned short*)tramp + 17) = addr >> 16;
471 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
472 *((unsigned short*)tramp + 17) = addr >> 16;
476 if (THUMB_INSN_MATCH(B2, insn)) {
477 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
478 *((unsigned short*)tramp + 13) = 0xdeff;
479 addr = branch_t16_dest(insn, vaddr);
480 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
481 *((unsigned short*)tramp + 15) = addr >> 16;
482 *((unsigned short*)tramp + 16) = 0;
483 *((unsigned short*)tramp + 17) = 0;
485 } else if (THUMB_INSN_MATCH(B1, insn)) {
486 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
487 *((unsigned short*)tramp + 13) = 0xdeff;
488 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
489 addr = branch_cond_t16_dest(insn, vaddr);
490 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
491 *((unsigned short*)tramp + 15) = addr >> 16;
493 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
494 *((unsigned short*)tramp + 17) = addr >> 16;
496 } else if (THUMB_INSN_MATCH(BLX2, insn) ||
497 THUMB_INSN_MATCH(BX, insn)) {
498 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
499 *((unsigned short*)tramp + 13) = 0xdeff;
500 *((unsigned short*)tramp + 4) = insn;
502 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
503 *((unsigned short*)tramp + 17) = addr >> 16;
505 } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
506 THUMB2_INSN_MATCH(BL, insn)) {
507 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
508 *((unsigned short*)tramp + 13) = 0xdeff;
509 addr = branch_t32_dest(insn, vaddr);
510 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
511 *((unsigned short*)tramp + 15) = addr >> 16;
513 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
514 *((unsigned short*)tramp + 17) = addr >> 16;
516 } else if (THUMB_INSN_MATCH(CBZ, insn)) {
517 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
518 *((unsigned short*)tramp + 13) = 0xdeff;
519 /* zero out original branch displacement (imm5 = 0; i = 0) */
520 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
521 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
522 *((unsigned short*)tramp + 0) |= 0x20;
523 addr = cbz_t16_dest(insn, vaddr);
524 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
525 *((unsigned short*)tramp + 15) = addr >> 16;
527 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
528 *((unsigned short*)tramp + 17) = addr >> 16;
534 int arch_prepare_uprobe(struct uprobe *up)
536 struct kprobe *p = up2kp(up);
537 struct task_struct *task = up->task;
538 unsigned long vaddr = (unsigned long)p->addr;
542 printk("Error in %s at %d: attempt to register uprobe "
543 "at an unaligned address\n", __FILE__, __LINE__);
547 if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
548 panic("failed to read memory %lx!\n", vaddr);
552 arch_copy_trampoline_arm_uprobe(up);
553 arch_copy_trampoline_thumb_uprobe(up);
555 if ((p->safe_arm) && (p->safe_thumb)) {
556 printk("Error in %s at %d: failed "
557 "arch_copy_trampoline_*_uprobe() (both) "
558 "[tgid=%u, addr=%lx, data=%lx]\n",
559 __FILE__, __LINE__, task->tgid, vaddr, insn);
561 if (!write_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
562 panic("Failed to write memory %p!\n", p->addr);
567 up->atramp.utramp = alloc_insn_slot(up->sm);
568 if (up->atramp.utramp == NULL) {
569 printk("Error: alloc_insn_slot failed (%08lx)\n", vaddr);
576 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
578 /* Remove retprobe if first insn overwrites lr */
579 rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
580 THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
581 THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
583 rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
584 ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
585 ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
588 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
589 struct pt_regs *regs)
591 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
592 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
594 /* Set flag of current mode */
595 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
597 if (thumb_mode(regs)) {
598 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
600 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
604 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
605 struct task_struct *task)
607 unsigned long *tramp;
608 unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
609 unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
610 unsigned long *found = NULL;
611 unsigned long *buf[RETPROBE_STACK_DEPTH];
614 /* Understand function mode */
615 if ((long)ri->sp & 1) {
616 tramp = (unsigned long *)
617 ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
619 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
620 UPROBES_TRAMP_RET_BREAK_IDX);
623 retval = read_proc_vm_atomic(task, (unsigned long)stack,
625 if (retval != sizeof(buf)) {
626 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
627 task->comm, task->tgid, task->pid,
628 (unsigned long)stack);
633 /* search the stack from the bottom */
634 for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
635 if (buf[i] == tramp) {
642 printk("---> %s (%d/%d): trampoline found at "
643 "%08lx (%08lx /%+d) - %p\n",
644 task->comm, task->tgid, task->pid,
645 (unsigned long)found, (unsigned long)sp,
646 found - sp, ri->rp->up.kp.addr);
647 retval = write_proc_vm_atomic(task, (unsigned long)found,
649 sizeof(ri->ret_addr));
650 if (retval != sizeof(ri->ret_addr)) {
651 printk("---> %s (%d/%d): failed to write value "
653 task->comm, task->tgid, task->pid, (unsigned long)found);
659 struct pt_regs *uregs = task_pt_regs(ri->task);
660 unsigned long ra = dbi_get_ret_addr(uregs);
661 if (ra == (unsigned long)tramp) {
662 printk("---> %s (%d/%d): trampoline found at "
664 task->comm, task->tgid, task->pid,
665 ra, ri->rp->up.kp.addr);
666 dbi_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
669 printk("---> %s (%d/%d): trampoline NOT found at "
670 "sp = %08lx, lr = %08lx - %p\n",
671 task->comm, task->tgid, task->pid,
672 (unsigned long)sp, ra, ri->rp->up.kp.addr);
681 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
683 struct uprobe *up = container_of(p, struct uprobe, kp);
684 struct ujprobe *jp = container_of(up, struct ujprobe, up);
686 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
687 entry_point_t entry = (entry_point_t)jp->entry;
690 p->ss_addr = (kprobe_opcode_t *)pre_entry(jp->priv_arg, regs);
694 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
695 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
697 arch_ujprobe_return();
703 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
705 return thumb_mode(regs) ?
706 (unsigned long)(p->ainsn.insn) + 0x1b :
707 (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
710 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
712 regs->ARM_lr = orig_ret_addr;
713 regs->ARM_pc = orig_ret_addr & ~0x1;
715 if (regs->ARM_lr & 0x1)
716 regs->ARM_cpsr |= PSR_T_BIT;
718 regs->ARM_cpsr &= ~PSR_T_BIT;
721 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
723 if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
724 u16 tmp = p->opcode >> 16;
725 write_proc_vm_atomic(current,
726 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
727 flush_insns(p->addr, 4);
731 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
733 unsigned long *tramp, *utramp;
734 struct kprobe *p = up2kp(up);
738 * 0 bit - thumb mode (0 - arm, 1 - thumb)
739 * 1 bit - arm mode support (0 - off, 1 on)
740 * 2 bit - thumb mode support (0 - off, 1 on)`
742 sw = (!!thumb_mode(regs)) |
743 (int)!p->safe_arm << 1 |
744 (int)!p->safe_thumb << 2;
750 tramp = up->atramp.tramp_arm;
755 restore_opcode_for_thumb(p, regs);
756 tramp = up->atramp.tramp_thumb;
759 printk("Error in %s at %d: we are in arm mode "
760 "(!) and check instruction was fail "
761 "(%0lX instruction at %p address)!\n",
762 __FILE__, __LINE__, p->opcode, p->addr);
764 disarm_uprobe(p, up->task);
769 utramp = up->atramp.utramp;
771 if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
772 UPROBES_TRAMP_LEN * sizeof(*tramp)))
773 panic("failed to write memory %p!\n", utramp);
775 p->ainsn.insn = utramp;
780 static int uprobe_handler(struct pt_regs *regs)
782 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
783 struct task_struct *task = current;
784 pid_t tgid = task->tgid;
787 p = get_ukprobe(addr, tgid);
789 unsigned long offset_bp = thumb_mode(regs) ?
791 4 * UPROBES_TRAMP_RET_BREAK_IDX;
792 void *tramp_addr = (void *)addr - offset_bp;
794 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
796 printk("no_uprobe: Not one of ours: let "
797 "kernel handle it %p\n", addr);
801 trampoline_uprobe_handler(p, regs);
803 if (p->ainsn.insn == NULL) {
804 struct uprobe *up = kp2up(p);
806 if (make_trampoline(up, regs)) {
807 printk("no_uprobe live\n");
815 if (!p->pre_handler || !p->pre_handler(p, regs)) {
816 prepare_singlestep(p, regs);
823 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
827 local_irq_save(flags);
830 ret = uprobe_handler(regs);
831 preempt_enable_no_resched();
833 local_irq_restore(flags);
837 /* userspace probes hook (arm) */
838 static struct undef_hook undef_hook_for_us_arm = {
839 .instr_mask = 0xffffffff,
840 .instr_val = BREAKPOINT_INSTRUCTION,
841 .cpsr_mask = MODE_MASK,
842 .cpsr_val = USR_MODE,
843 .fn = uprobe_trap_handler
846 /* userspace probes hook (thumb) */
847 static struct undef_hook undef_hook_for_us_thumb = {
848 .instr_mask = 0xffffffff,
849 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
850 .cpsr_mask = MODE_MASK,
851 .cpsr_val = USR_MODE,
852 .fn = uprobe_trap_handler
855 int swap_arch_init_uprobes(void)
857 swap_register_undef_hook(&undef_hook_for_us_arm);
858 swap_register_undef_hook(&undef_hook_for_us_thumb);
863 void swap_arch_exit_uprobes(void)
865 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
866 swap_unregister_undef_hook(&undef_hook_for_us_arm);