2 * uprobe/arch/asm-arm/swap_uprobes.c
3 * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4 * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5 * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6 * separating core and arch parts
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
26 * Copyright (C) Samsung Electronics, 2006-2010
28 * @section DESCRIPTION
30 * Arch-dependent uprobe interface implementation for ARM.
34 #include <linux/init.h> /* need for asm/traps.h */
35 #include <linux/sched.h> /* need for asm/traps.h */
37 #include <asm/ptrace.h> /* need for asm/traps.h */
38 #include <asm/traps.h>
40 #include <kprobe/swap_slots.h>
41 #include <kprobe/swap_kprobes.h>
42 #include <kprobe/swap_kprobes_deps.h>
43 #include <uprobe/swap_uprobes.h>
45 #include <swap-asm/swap_kprobes.h>
46 #include <swap-asm/trampoline_arm.h>
48 #include "swap_uprobes.h"
49 #include "trampoline_thumb.h"
54 * @brief Flushes instructions.
56 #define flush_insns(addr, size) \
57 flush_icache_range((unsigned long)(addr), \
58 (unsigned long)(addr) + (size))
60 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
62 long offset = insn & 0x3ff;
63 offset -= insn & 0x400;
64 return (insn_addr + 4 + offset * 2);
67 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
69 long offset = insn & 0x7f;
70 offset -= insn & 0x80;
71 return (insn_addr + 4 + offset * 2);
74 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
76 unsigned int poff = insn & 0x3ff;
77 unsigned int offset = (insn & 0x07fe0000) >> 17;
79 poff -= (insn & 0x400);
82 return ((insn_addr + 4 + (poff << 12) + offset * 4));
84 return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
87 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
89 unsigned int i = (insn & 0x200) >> 3;
90 unsigned int offset = (insn & 0xf8) >> 2;
91 return insn_addr + 4 + i + offset;
94 /* is instruction Thumb2 and NOT a branch, etc... */
95 static int is_thumb2(kprobe_opcode_t insn)
97 return ((insn & 0xf800) == 0xe800 ||
98 (insn & 0xf800) == 0xf000 ||
99 (insn & 0xf800) == 0xf800);
102 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
105 struct kprobe *p = up2kp(up);
106 unsigned long insn = p->opcode;
107 unsigned long vaddr = (unsigned long)p->addr;
108 unsigned long *tramp = up->atramp.tramp_arm;
110 ret = arch_make_trampoline_arm(vaddr, insn, tramp);
116 static int arch_check_insn_thumb(unsigned long insn)
120 /* check instructions that can change PC */
121 if (THUMB_INSN_MATCH(UNDEF, insn) ||
122 THUMB_INSN_MATCH(SWI, insn) ||
123 THUMB_INSN_MATCH(BREAK, insn) ||
124 THUMB2_INSN_MATCH(B1, insn) ||
125 THUMB2_INSN_MATCH(B2, insn) ||
126 THUMB2_INSN_MATCH(BXJ, insn) ||
127 (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
128 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
129 (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
130 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
131 (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
132 (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
133 THUMB2_INSN_MATCH(LDMIA, insn) ||
134 THUMB2_INSN_MATCH(LDMDB, insn) ||
135 (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
136 (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
137 (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
138 (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
139 (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
140 (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
141 (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
142 (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
143 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
144 (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
145 (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
146 (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
147 (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
148 (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
149 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
150 (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
151 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
152 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
153 (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
160 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
162 unsigned char mreg = 0;
163 unsigned char reg = 0;
165 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
166 reg = ((insn & 0xffff) & uregs) >> 8;
168 if (THUMB_INSN_MATCH(MOV3, insn)) {
169 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
170 reg = (insn & 0xffff) & uregs;
175 if (THUMB2_INSN_MATCH(ADR, insn)) {
176 reg = ((insn >> 16) & uregs) >> 8;
181 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
182 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
183 THUMB2_INSN_MATCH(LDRWL, insn)) {
184 reg = ((insn >> 16) & uregs) >> 12;
189 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
190 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
191 THUMB2_INSN_MATCH(LDREX, insn)) {
192 reg = ((insn >> 16) & uregs) >> 12;
194 if (THUMB2_INSN_MATCH(DP, insn)) {
195 reg = ((insn >> 16) & uregs) >> 12;
200 if (THUMB2_INSN_MATCH(RSBW, insn)) {
201 reg = ((insn >> 12) & uregs) >> 8;
206 if (THUMB2_INSN_MATCH(RORW, insn)) {
207 reg = ((insn >> 12) & uregs) >> 8;
212 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
213 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
214 THUMB2_INSN_MATCH(LSRW2, insn)) {
215 reg = ((insn >> 12) & uregs) >> 8;
220 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
223 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
224 reg = THUMB2_INSN_REG_RM(insn);
237 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
238 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
239 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
240 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
241 reg = THUMB2_INSN_REG_RT(insn);
244 if (reg == 6 || reg == 7) {
245 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
246 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
247 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
248 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
249 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
250 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
251 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
252 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
255 if (THUMB_INSN_MATCH(APC, insn)) {
256 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
257 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
259 if (THUMB_INSN_MATCH(LRO3, insn)) {
260 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
261 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
263 if (THUMB_INSN_MATCH(MOV3, insn)) {
264 // MOV Rd, PC -> MOV Rd, SP
265 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
267 if (THUMB2_INSN_MATCH(ADR, insn)) {
268 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
269 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
271 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
272 THUMB2_INSN_MATCH(LDRHW, insn)) {
273 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
274 // !!!!!!!!!!!!!!!!!!!!!!!!
275 // !!! imm_12 vs. imm_8 !!!
276 // !!!!!!!!!!!!!!!!!!!!!!!!
277 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
279 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
280 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
281 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
282 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
283 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
285 if (THUMB2_INSN_MATCH(MUL, insn)) {
286 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
288 if (THUMB2_INSN_MATCH(DP, insn)) {
289 if (THUMB2_INSN_REG_RM(insn) == 15) {
290 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
291 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
292 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
295 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
296 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
297 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
299 if (THUMB2_INSN_MATCH(RSBW, insn)) {
300 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
302 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
303 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
304 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
305 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
306 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
307 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
308 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
311 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
312 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
326 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
327 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
329 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
330 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
331 if (THUMB2_INSN_REG_RN(insn) == 15) {
332 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
337 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
338 if (THUMB2_INSN_REG_RN(insn) == 15) {
339 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
348 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
349 THUMB2_INSN_MATCH(STRBW, insn) ||
350 THUMB2_INSN_MATCH(STRD, insn) ||
351 THUMB2_INSN_MATCH(STRHT, insn) ||
352 THUMB2_INSN_MATCH(STRT, insn) ||
353 THUMB2_INSN_MATCH(STRHW1, insn) ||
354 THUMB2_INSN_MATCH(STRHW, insn) )) {
355 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
358 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
359 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
361 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
362 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
363 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
364 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
365 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
366 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
367 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
375 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
378 struct kprobe *p = up2kp(up);
380 unsigned long vaddr = (unsigned long)p->addr;
381 unsigned long insn = p->opcode;
382 unsigned long *tramp = up->atramp.tramp_thumb;
383 enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
387 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
391 if (!arch_check_insn_thumb(insn)) {
398 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
399 uregs = 0x0700; /* 8-10 */
401 } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
405 } else if THUMB2_INSN_MATCH(ADR, insn) {
406 uregs = 0x0f00; /* Rd 8-11 */
408 } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
409 THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
410 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
411 THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
412 THUMB2_INSN_MATCH(LDREX, insn) ||
413 ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
414 THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
415 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
416 ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
417 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
418 uregs = 0xf000; /* Rt 12-15 */
420 } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
421 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
423 } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
426 } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
427 uregs = 0xf000; /* Rd 12-15 */
429 } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
430 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
432 } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
433 uregs = 0x0f00; /* Rd 8-11 */
435 } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
438 } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
439 uregs = 0x0f00; /* Rd 8-11 */
441 } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
442 uregs = 0x0f00; /* Rd 8-11 */
444 } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
445 uregs = 0xf0000; /* Rn 0-3 (16-19) */
447 } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
448 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
449 uregs = 0xf0000; /* Rn 0-3 (16-19) */
453 if (unlikely(uregs && pc_dep)) {
454 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
455 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
456 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
457 __FILE__, __LINE__, insn);
462 *((unsigned short*)tramp + 13) = 0xdeff;
463 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
464 *((unsigned short*)tramp + 15) = addr >> 16;
465 if (!is_thumb2(insn)) {
467 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
468 *((unsigned short*)tramp + 17) = addr >> 16;
471 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
472 *((unsigned short*)tramp + 17) = addr >> 16;
475 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
476 *((unsigned short*)tramp + 13) = 0xdeff;
477 if (!is_thumb2(insn)) {
479 *((unsigned short*)tramp + 2) = insn;
480 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
481 *((unsigned short*)tramp + 17) = addr >> 16;
485 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
486 *((unsigned short*)tramp + 17) = addr >> 16;
490 if (THUMB_INSN_MATCH(B2, insn)) {
491 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
492 *((unsigned short*)tramp + 13) = 0xdeff;
493 addr = branch_t16_dest(insn, vaddr);
494 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
495 *((unsigned short*)tramp + 15) = addr >> 16;
496 *((unsigned short*)tramp + 16) = 0;
497 *((unsigned short*)tramp + 17) = 0;
499 } else if (THUMB_INSN_MATCH(B1, insn)) {
500 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
501 *((unsigned short*)tramp + 13) = 0xdeff;
502 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
503 addr = branch_cond_t16_dest(insn, vaddr);
504 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
505 *((unsigned short*)tramp + 15) = addr >> 16;
507 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
508 *((unsigned short*)tramp + 17) = addr >> 16;
510 } else if (THUMB_INSN_MATCH(BLX2, insn) ||
511 THUMB_INSN_MATCH(BX, insn)) {
512 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
513 *((unsigned short*)tramp + 13) = 0xdeff;
514 *((unsigned short*)tramp + 4) = insn;
516 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
517 *((unsigned short*)tramp + 17) = addr >> 16;
519 } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
520 THUMB2_INSN_MATCH(BL, insn)) {
521 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
522 *((unsigned short*)tramp + 13) = 0xdeff;
523 addr = branch_t32_dest(insn, vaddr);
524 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
525 *((unsigned short*)tramp + 15) = addr >> 16;
527 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
528 *((unsigned short*)tramp + 17) = addr >> 16;
530 } else if (THUMB_INSN_MATCH(CBZ, insn)) {
531 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
532 *((unsigned short*)tramp + 13) = 0xdeff;
533 /* zero out original branch displacement (imm5 = 0; i = 0) */
534 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
535 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
536 *((unsigned short*)tramp + 0) |= 0x20;
537 addr = cbz_t16_dest(insn, vaddr);
538 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
539 *((unsigned short*)tramp + 15) = addr >> 16;
541 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
542 *((unsigned short*)tramp + 17) = addr >> 16;
549 * @brief Prepares uprobe for ARM.
551 * @param up Pointer to the uprobe.
552 * @return 0 on success,\n
553 * negative error code on error.
555 int arch_prepare_uprobe(struct uprobe *up)
557 struct kprobe *p = up2kp(up);
558 struct task_struct *task = up->task;
559 unsigned long vaddr = (unsigned long)p->addr;
563 printk("Error in %s at %d: attempt to register uprobe "
564 "at an unaligned address\n", __FILE__, __LINE__);
568 if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
569 panic("failed to read memory %lx!\n", vaddr);
573 arch_copy_trampoline_arm_uprobe(up);
574 arch_copy_trampoline_thumb_uprobe(up);
576 if ((p->safe_arm) && (p->safe_thumb)) {
577 printk("Error in %s at %d: failed "
578 "arch_copy_trampoline_*_uprobe() (both) "
579 "[tgid=%u, addr=%lx, data=%lx]\n",
580 __FILE__, __LINE__, task->tgid, vaddr, insn);
584 up->atramp.utramp = swap_slot_alloc(up->sm);
585 if (up->atramp.utramp == NULL) {
586 printk("Error: swap_slot_alloc failed (%08lx)\n", vaddr);
594 * @brief Analysis opcodes.
596 * @param rp Pointer to the uretprobe.
599 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
601 /* Remove retprobe if first insn overwrites lr */
602 rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
603 THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
604 THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
606 rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
607 ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
608 ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
612 * @brief Prepates uretprobe for ARM.
614 * @param ri Pointer to the uretprobe instance.
615 * @param regs Pointer to CPU register data.
618 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
619 struct pt_regs *regs)
621 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
622 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
624 /* Set flag of current mode */
625 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
627 if (thumb_mode(regs)) {
628 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
630 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
635 * @brief Disarms uretprobe instance.
637 * @param ri Pointer to the uretprobe instance
638 * @param task Pointer to the task for which the uretprobe instance
639 * @return 0 on success,\n
640 * negative error code on error.
642 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
643 struct task_struct *task)
645 struct pt_regs *uregs = task_pt_regs(ri->task);
646 unsigned long ra = swap_get_ret_addr(uregs);
647 unsigned long *tramp;
648 unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
649 unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
650 unsigned long *found = NULL;
651 unsigned long *buf[RETPROBE_STACK_DEPTH];
654 /* Understand function mode */
655 if ((long)ri->sp & 1) {
656 tramp = (unsigned long *)
657 ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
659 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
660 UPROBES_TRAMP_RET_BREAK_IDX);
664 retval = read_proc_vm_atomic(task, (unsigned long)stack,
666 if (retval != sizeof(buf)) {
667 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
668 task->comm, task->tgid, task->pid,
669 (unsigned long)stack);
674 /* search the stack from the bottom */
675 for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
676 if (buf[i] == tramp) {
687 printk("---> %s (%d/%d): trampoline found at "
688 "%08lx (%08lx /%+d) - %p\n",
689 task->comm, task->tgid, task->pid,
690 (unsigned long)found, (unsigned long)sp,
691 found - sp, ri->rp->up.kp.addr);
692 retval = write_proc_vm_atomic(task, (unsigned long)found,
694 sizeof(ri->ret_addr));
695 if (retval != sizeof(ri->ret_addr)) {
696 printk("---> %s (%d/%d): failed to write value to %08lx",
697 task->comm, task->tgid, task->pid, (unsigned long)found);
703 check_lr: /* check lr anyway */
704 if (ra == (unsigned long)tramp) {
705 printk("---> %s (%d/%d): trampoline found at "
707 task->comm, task->tgid, task->pid,
708 ra, ri->rp->up.kp.addr);
709 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
712 printk("---> %s (%d/%d): trampoline NOT found at "
713 "sp = %08lx, lr = %08lx - %p\n",
714 task->comm, task->tgid, task->pid,
715 (unsigned long)sp, ra, ri->rp->up.kp.addr);
722 * @brief Jump pre-handler.
724 * @param p Pointer to the kprobe.
725 * @param regs Pointer to CPU register data.
728 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
730 struct uprobe *up = container_of(p, struct uprobe, kp);
731 struct ujprobe *jp = container_of(up, struct ujprobe, up);
733 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
734 entry_point_t entry = (entry_point_t)jp->entry;
737 p->ss_addr[smp_processor_id()] = (kprobe_opcode_t *)
738 pre_entry(jp->priv_arg, regs);
742 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
743 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
745 arch_ujprobe_return();
752 * @brief Gets trampoline address.
754 * @param p Pointer to the kprobe.
755 * @param regs Pointer to CPU register data.
756 * @return Trampoline address.
758 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
760 return thumb_mode(regs) ?
761 (unsigned long)(p->ainsn.insn) + 0x1b :
762 (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
766 * @brief Restores return address.
768 * @param orig_ret_addr Original return address.
769 * @param regs Pointer to CPU register data.
772 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
774 regs->ARM_lr = orig_ret_addr;
775 regs->ARM_pc = orig_ret_addr & ~0x1;
777 if (regs->ARM_lr & 0x1)
778 regs->ARM_cpsr |= PSR_T_BIT;
780 regs->ARM_cpsr &= ~PSR_T_BIT;
784 * @brief Removes uprobe.
786 * @param up Pointer to the uprobe.
789 void arch_remove_uprobe(struct uprobe *up)
791 swap_slot_free(up->sm, up->atramp.utramp);
794 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
796 if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
797 u16 tmp = p->opcode >> 16;
798 write_proc_vm_atomic(current,
799 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
800 flush_insns(p->addr, 4);
804 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
806 unsigned long *tramp, *utramp;
807 struct kprobe *p = up2kp(up);
811 * 0 bit - thumb mode (0 - arm, 1 - thumb)
812 * 1 bit - arm mode support (0 - off, 1 on)
813 * 2 bit - thumb mode support (0 - off, 1 on)`
815 sw = (!!thumb_mode(regs)) |
816 (int)!p->safe_arm << 1 |
817 (int)!p->safe_thumb << 2;
823 tramp = up->atramp.tramp_arm;
828 restore_opcode_for_thumb(p, regs);
829 tramp = up->atramp.tramp_thumb;
832 printk("Error in %s at %d: we are in arm mode "
833 "(!) and check instruction was fail "
834 "(%0lX instruction at %p address)!\n",
835 __FILE__, __LINE__, p->opcode, p->addr);
837 disarm_uprobe(p, up->task);
842 utramp = up->atramp.utramp;
844 if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
845 UPROBES_TRAMP_LEN * sizeof(*tramp)))
846 panic("failed to write memory %p!\n", utramp);
847 flush_insns(utramp, UPROBES_TRAMP_LEN * sizeof(*tramp));
849 p->ainsn.insn = utramp;
854 static int uprobe_handler(struct pt_regs *regs)
856 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
857 struct task_struct *task = current;
858 pid_t tgid = task->tgid;
861 p = get_ukprobe(addr, tgid);
863 unsigned long offset_bp = thumb_mode(regs) ?
865 4 * UPROBES_TRAMP_RET_BREAK_IDX;
866 void *tramp_addr = (void *)addr - offset_bp;
868 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
870 printk("no_uprobe: Not one of ours: let "
871 "kernel handle it %p\n", addr);
875 trampoline_uprobe_handler(p, regs);
877 if (p->ainsn.insn == NULL) {
878 struct uprobe *up = kp2up(p);
880 if (make_trampoline(up, regs)) {
881 printk("no_uprobe live\n");
889 if (!p->pre_handler || !p->pre_handler(p, regs)) {
890 prepare_singlestep(p, regs);
898 * @brief Breakpoint instruction handler.
900 * @param regs Pointer to CPU register data.
901 * @param instr Instruction.
902 * @return uprobe_handler results.
904 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
908 local_irq_save(flags);
911 ret = uprobe_handler(regs);
912 preempt_enable_no_resched();
914 local_irq_restore(flags);
918 /* userspace probes hook (arm) */
919 static struct undef_hook undef_hook_for_us_arm = {
920 .instr_mask = 0xffffffff,
921 .instr_val = BREAKPOINT_INSTRUCTION,
922 .cpsr_mask = MODE_MASK,
923 .cpsr_val = USR_MODE,
924 .fn = uprobe_trap_handler
927 /* userspace probes hook (thumb) */
928 static struct undef_hook undef_hook_for_us_thumb = {
929 .instr_mask = 0xffffffff,
930 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
931 .cpsr_mask = MODE_MASK,
932 .cpsr_val = USR_MODE,
933 .fn = uprobe_trap_handler
937 * @brief Installs breakpoint hooks.
941 int swap_arch_init_uprobes(void)
943 swap_register_undef_hook(&undef_hook_for_us_arm);
944 swap_register_undef_hook(&undef_hook_for_us_thumb);
950 * @brief Uninstalls breakpoint hooks.
954 void swap_arch_exit_uprobes(void)
956 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
957 swap_unregister_undef_hook(&undef_hook_for_us_arm);