2 * uprobe/arch/asm-arm/swap_uprobes.c
3 * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4 * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5 * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6 * separating core and arch parts
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
26 * Copyright (C) Samsung Electronics, 2006-2010
28 * @section DESCRIPTION
30 * Arch-dependent uprobe interface implementation for ARM.
33 #include <kprobe/swap_kprobes.h>
34 #include <kprobe/arch/asm/swap_kprobes.h>
35 #include <kprobe/arch/asm/trampoline_arm.h>
36 #include <asm/traps.h>
37 #include <uprobe/swap_uprobes.h>
38 #include <uprobe/arch/asm/swap_uprobes.h>
39 #include <kprobe/swap_slots.h>
40 #include <kprobe/swap_kprobes_deps.h>
41 #include "trampoline_thumb.h"
44 #include <kprobe/swap_kdebug.h>
48 * @brief Flushes instructions.
50 #define flush_insns(addr, size) \
51 flush_icache_range((unsigned long)(addr), \
52 (unsigned long)(addr) + (size))
54 static inline long branch_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
56 long offset = insn & 0x3ff;
57 offset -= insn & 0x400;
58 return (insn_addr + 4 + offset * 2);
61 static inline long branch_cond_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
63 long offset = insn & 0x7f;
64 offset -= insn & 0x80;
65 return (insn_addr + 4 + offset * 2);
68 static inline long branch_t32_dest(kprobe_opcode_t insn, unsigned int insn_addr)
70 unsigned int poff = insn & 0x3ff;
71 unsigned int offset = (insn & 0x07fe0000) >> 17;
73 poff -= (insn & 0x400);
76 return ((insn_addr + 4 + (poff << 12) + offset * 4));
78 return ((insn_addr + 4 + (poff << 12) + offset * 4) & ~3);
81 static inline long cbz_t16_dest(kprobe_opcode_t insn, unsigned int insn_addr)
83 unsigned int i = (insn & 0x200) >> 3;
84 unsigned int offset = (insn & 0xf8) >> 2;
85 return insn_addr + 4 + i + offset;
88 /* is instruction Thumb2 and NOT a branch, etc... */
89 static int is_thumb2(kprobe_opcode_t insn)
91 return ((insn & 0xf800) == 0xe800 ||
92 (insn & 0xf800) == 0xf000 ||
93 (insn & 0xf800) == 0xf800);
96 static int arch_copy_trampoline_arm_uprobe(struct uprobe *up)
99 struct kprobe *p = up2kp(up);
100 unsigned long insn = p->opcode;
101 unsigned long vaddr = (unsigned long)p->addr;
102 unsigned long *tramp = up->atramp.tramp_arm;
104 ret = arch_make_trampoline_arm(vaddr, insn, tramp);
110 static int arch_check_insn_thumb(unsigned long insn)
114 /* check instructions that can change PC */
115 if (THUMB_INSN_MATCH(UNDEF, insn) ||
116 THUMB_INSN_MATCH(SWI, insn) ||
117 THUMB_INSN_MATCH(BREAK, insn) ||
118 THUMB2_INSN_MATCH(B1, insn) ||
119 THUMB2_INSN_MATCH(B2, insn) ||
120 THUMB2_INSN_MATCH(BXJ, insn) ||
121 (THUMB2_INSN_MATCH(ADR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
122 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
123 (THUMB2_INSN_MATCH(LDRW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
124 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
125 (THUMB2_INSN_MATCH(LDRHW1, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
126 (THUMB2_INSN_MATCH(LDRWL, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
127 THUMB2_INSN_MATCH(LDMIA, insn) ||
128 THUMB2_INSN_MATCH(LDMDB, insn) ||
129 (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
130 (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
131 (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
132 (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
133 (THUMB2_INSN_MATCH(LSLW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
134 (THUMB2_INSN_MATCH(LSLW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
135 (THUMB2_INSN_MATCH(LSRW1, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
136 (THUMB2_INSN_MATCH(LSRW2, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
137 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
138 (THUMB2_INSN_MATCH(STRW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
139 (THUMB2_INSN_MATCH(STRBW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
140 (THUMB2_INSN_MATCH(STRHW1, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
141 (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
142 (THUMB2_INSN_MATCH(STRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
143 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
144 (THUMB2_INSN_MATCH(LDRBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
145 (THUMB2_INSN_MATCH(LDRHW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
146 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
147 (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(STRD, insn))) {
154 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
156 unsigned char mreg = 0;
157 unsigned char reg = 0;
159 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
160 reg = ((insn & 0xffff) & uregs) >> 8;
162 if (THUMB_INSN_MATCH(MOV3, insn)) {
163 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
164 reg = (insn & 0xffff) & uregs;
169 if (THUMB2_INSN_MATCH(ADR, insn)) {
170 reg = ((insn >> 16) & uregs) >> 8;
175 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
176 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
177 THUMB2_INSN_MATCH(LDRWL, insn)) {
178 reg = ((insn >> 16) & uregs) >> 12;
183 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
184 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
185 THUMB2_INSN_MATCH(LDREX, insn)) {
186 reg = ((insn >> 16) & uregs) >> 12;
188 if (THUMB2_INSN_MATCH(DP, insn)) {
189 reg = ((insn >> 16) & uregs) >> 12;
194 if (THUMB2_INSN_MATCH(RSBW, insn)) {
195 reg = ((insn >> 12) & uregs) >> 8;
200 if (THUMB2_INSN_MATCH(RORW, insn)) {
201 reg = ((insn >> 12) & uregs) >> 8;
206 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
207 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
208 THUMB2_INSN_MATCH(LSRW2, insn)) {
209 reg = ((insn >> 12) & uregs) >> 8;
214 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
217 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
218 reg = THUMB2_INSN_REG_RM(insn);
231 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
232 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
233 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
234 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
235 reg = THUMB2_INSN_REG_RT(insn);
238 if (reg == 6 || reg == 7) {
239 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
240 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
241 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
242 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
243 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
244 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
245 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
246 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
249 if (THUMB_INSN_MATCH(APC, insn)) {
250 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
251 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
253 if (THUMB_INSN_MATCH(LRO3, insn)) {
254 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
255 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
257 if (THUMB_INSN_MATCH(MOV3, insn)) {
258 // MOV Rd, PC -> MOV Rd, SP
259 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
261 if (THUMB2_INSN_MATCH(ADR, insn)) {
262 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
263 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
265 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
266 THUMB2_INSN_MATCH(LDRHW, insn)) {
267 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
268 // !!!!!!!!!!!!!!!!!!!!!!!!
269 // !!! imm_12 vs. imm_8 !!!
270 // !!!!!!!!!!!!!!!!!!!!!!!!
271 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
273 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
274 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
275 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
276 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
277 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
279 if (THUMB2_INSN_MATCH(MUL, insn)) {
280 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
282 if (THUMB2_INSN_MATCH(DP, insn)) {
283 if (THUMB2_INSN_REG_RM(insn) == 15) {
284 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
285 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
286 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
289 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
290 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
291 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
293 if (THUMB2_INSN_MATCH(RSBW, insn)) {
294 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
296 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
297 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
298 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
299 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
300 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
301 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
302 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
305 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
306 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
320 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
321 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
323 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
324 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
325 if (THUMB2_INSN_REG_RN(insn) == 15) {
326 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
331 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
332 if (THUMB2_INSN_REG_RN(insn) == 15) {
333 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
342 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
343 THUMB2_INSN_MATCH(STRBW, insn) ||
344 THUMB2_INSN_MATCH(STRD, insn) ||
345 THUMB2_INSN_MATCH(STRHT, insn) ||
346 THUMB2_INSN_MATCH(STRT, insn) ||
347 THUMB2_INSN_MATCH(STRHW1, insn) ||
348 THUMB2_INSN_MATCH(STRHW, insn) )) {
349 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
352 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
353 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
355 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
356 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
357 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
358 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
359 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
360 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
361 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
369 static int arch_copy_trampoline_thumb_uprobe(struct uprobe *up)
372 struct kprobe *p = up2kp(up);
374 unsigned long vaddr = (unsigned long)p->addr;
375 unsigned long insn = p->opcode;
376 unsigned long *tramp = up->atramp.tramp_thumb;
377 enum { tramp_len = sizeof(up->atramp.tramp_thumb) };
381 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
385 if (!arch_check_insn_thumb(insn)) {
392 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
393 uregs = 0x0700; /* 8-10 */
395 } else if (THUMB_INSN_MATCH(MOV3, insn) && (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
399 } else if THUMB2_INSN_MATCH(ADR, insn) {
400 uregs = 0x0f00; /* Rd 8-11 */
402 } else if (((THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
403 THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
404 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
405 THUMB2_INSN_MATCH(LDRWL, insn)) && THUMB2_INSN_REG_RN(insn) == 15) ||
406 THUMB2_INSN_MATCH(LDREX, insn) ||
407 ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
408 THUMB2_INSN_MATCH(STRHW, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) &&
409 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15)) ||
410 ((THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHT, insn)) &&
411 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RT(insn) == 15))) {
412 uregs = 0xf000; /* Rt 12-15 */
414 } else if ((THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15)) {
415 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
417 } else if (THUMB2_INSN_MATCH(MUL, insn) && THUMB2_INSN_REG_RM(insn) == 15) {
420 } else if (THUMB2_INSN_MATCH(DP, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
421 uregs = 0xf000; /* Rd 12-15 */
423 } else if (THUMB2_INSN_MATCH(STRD, insn) && ((THUMB2_INSN_REG_RN(insn) == 15) || (THUMB2_INSN_REG_RT(insn) == 15) || THUMB2_INSN_REG_RT2(insn) == 15)) {
424 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
426 } else if (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RN(insn) == 15) {
427 uregs = 0x0f00; /* Rd 8-11 */
429 } else if (THUMB2_INSN_MATCH (RORW, insn) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
432 } else if ((THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) && THUMB2_INSN_REG_RM(insn) == 15) {
433 uregs = 0x0f00; /* Rd 8-11 */
435 } else if ((THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) && (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
436 uregs = 0x0f00; /* Rd 8-11 */
438 } else if ((THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) && THUMB2_INSN_REG_RN(insn) == 15) {
439 uregs = 0xf0000; /* Rn 0-3 (16-19) */
441 } else if ((THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) &&
442 (THUMB2_INSN_REG_RN(insn) == 15 || THUMB2_INSN_REG_RM(insn) == 15)) {
443 uregs = 0xf0000; /* Rn 0-3 (16-19) */
447 if (unlikely(uregs && pc_dep)) {
448 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
449 if (prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs) != 0) {
450 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
451 __FILE__, __LINE__, insn);
456 *((unsigned short*)tramp + 13) = 0xdeff;
457 *((unsigned short*)tramp + 14) = addr & 0x0000ffff;
458 *((unsigned short*)tramp + 15) = addr >> 16;
459 if (!is_thumb2(insn)) {
461 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
462 *((unsigned short*)tramp + 17) = addr >> 16;
465 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
466 *((unsigned short*)tramp + 17) = addr >> 16;
469 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
470 *((unsigned short*)tramp + 13) = 0xdeff;
471 if (!is_thumb2(insn)) {
473 *((unsigned short*)tramp + 2) = insn;
474 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
475 *((unsigned short*)tramp + 17) = addr >> 16;
479 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
480 *((unsigned short*)tramp + 17) = addr >> 16;
484 if (THUMB_INSN_MATCH(B2, insn)) {
485 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
486 *((unsigned short*)tramp + 13) = 0xdeff;
487 addr = branch_t16_dest(insn, vaddr);
488 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
489 *((unsigned short*)tramp + 15) = addr >> 16;
490 *((unsigned short*)tramp + 16) = 0;
491 *((unsigned short*)tramp + 17) = 0;
493 } else if (THUMB_INSN_MATCH(B1, insn)) {
494 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
495 *((unsigned short*)tramp + 13) = 0xdeff;
496 *((unsigned short*)tramp + 0) |= (insn & 0xf00);
497 addr = branch_cond_t16_dest(insn, vaddr);
498 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
499 *((unsigned short*)tramp + 15) = addr >> 16;
501 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
502 *((unsigned short*)tramp + 17) = addr >> 16;
504 } else if (THUMB_INSN_MATCH(BLX2, insn) ||
505 THUMB_INSN_MATCH(BX, insn)) {
506 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
507 *((unsigned short*)tramp + 13) = 0xdeff;
508 *((unsigned short*)tramp + 4) = insn;
510 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
511 *((unsigned short*)tramp + 17) = addr >> 16;
513 } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
514 THUMB2_INSN_MATCH(BL, insn)) {
515 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
516 *((unsigned short*)tramp + 13) = 0xdeff;
517 addr = branch_t32_dest(insn, vaddr);
518 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff);
519 *((unsigned short*)tramp + 15) = addr >> 16;
521 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
522 *((unsigned short*)tramp + 17) = addr >> 16;
524 } else if (THUMB_INSN_MATCH(CBZ, insn)) {
525 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
526 *((unsigned short*)tramp + 13) = 0xdeff;
527 /* zero out original branch displacement (imm5 = 0; i = 0) */
528 *((unsigned short*)tramp + 0) = insn & (~0x2f8);
529 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
530 *((unsigned short*)tramp + 0) |= 0x20;
531 addr = cbz_t16_dest(insn, vaddr);
532 *((unsigned short*)tramp + 14) = (addr & 0x0000ffff) | 0x1;
533 *((unsigned short*)tramp + 15) = addr >> 16;
535 *((unsigned short*)tramp + 16) = (addr & 0x0000ffff) | 0x1;
536 *((unsigned short*)tramp + 17) = addr >> 16;
543 * @brief Prepares uprobe for ARM.
545 * @param up Pointer to the uprobe.
546 * @return 0 on success,\n
547 * negative error code on error.
549 int arch_prepare_uprobe(struct uprobe *up)
551 struct kprobe *p = up2kp(up);
552 struct task_struct *task = up->task;
553 unsigned long vaddr = (unsigned long)p->addr;
557 printk("Error in %s at %d: attempt to register uprobe "
558 "at an unaligned address\n", __FILE__, __LINE__);
562 if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn)))
563 panic("failed to read memory %lx!\n", vaddr);
567 arch_copy_trampoline_arm_uprobe(up);
568 arch_copy_trampoline_thumb_uprobe(up);
570 if ((p->safe_arm) && (p->safe_thumb)) {
571 printk("Error in %s at %d: failed "
572 "arch_copy_trampoline_*_uprobe() (both) "
573 "[tgid=%u, addr=%lx, data=%lx]\n",
574 __FILE__, __LINE__, task->tgid, vaddr, insn);
578 up->atramp.utramp = swap_slot_alloc(up->sm);
579 if (up->atramp.utramp == NULL) {
580 printk("Error: swap_slot_alloc failed (%08lx)\n", vaddr);
588 * @brief Analysis opcodes.
590 * @param rp Pointer to the uretprobe.
593 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
595 /* Remove retprobe if first insn overwrites lr */
596 rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.kp.opcode) ||
597 THUMB2_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
598 THUMB_INSN_MATCH(BLX2, rp->up.kp.opcode));
600 rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.kp.opcode) ||
601 ARM_INSN_MATCH(BLX1, rp->up.kp.opcode) ||
602 ARM_INSN_MATCH(BLX2, rp->up.kp.opcode));
606 * @brief Prepates uretprobe for ARM.
608 * @param ri Pointer to the uretprobe instance.
609 * @param regs Pointer to CPU register data.
612 void arch_prepare_uretprobe(struct uretprobe_instance *ri,
613 struct pt_regs *regs)
615 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
616 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
618 /* Set flag of current mode */
619 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
621 if (thumb_mode(regs)) {
622 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn) + 0x1b;
624 regs->ARM_lr = (unsigned long)(ri->rp->up.kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
629 * @brief Disarms uretprobe instance.
631 * @param ri Pointer to the uretprobe instance
632 * @param task Pointer to the task for which the uretprobe instance
633 * @return 0 on success,\n
634 * negative error code on error.
636 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
637 struct task_struct *task)
639 struct pt_regs *uregs = task_pt_regs(ri->task);
640 unsigned long ra = swap_get_ret_addr(uregs);
641 unsigned long *tramp;
642 unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
643 unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
644 unsigned long *found = NULL;
645 unsigned long *buf[RETPROBE_STACK_DEPTH];
648 /* Understand function mode */
649 if ((long)ri->sp & 1) {
650 tramp = (unsigned long *)
651 ((unsigned long)ri->rp->up.kp.ainsn.insn + 0x1b);
653 tramp = (unsigned long *)(ri->rp->up.kp.ainsn.insn +
654 UPROBES_TRAMP_RET_BREAK_IDX);
658 retval = read_proc_vm_atomic(task, (unsigned long)stack,
660 if (retval != sizeof(buf)) {
661 printk("---> %s (%d/%d): failed to read stack from %08lx\n",
662 task->comm, task->tgid, task->pid,
663 (unsigned long)stack);
668 /* search the stack from the bottom */
669 for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
670 if (buf[i] == tramp) {
681 printk("---> %s (%d/%d): trampoline found at "
682 "%08lx (%08lx /%+d) - %p\n",
683 task->comm, task->tgid, task->pid,
684 (unsigned long)found, (unsigned long)sp,
685 found - sp, ri->rp->up.kp.addr);
686 retval = write_proc_vm_atomic(task, (unsigned long)found,
688 sizeof(ri->ret_addr));
689 if (retval != sizeof(ri->ret_addr)) {
690 printk("---> %s (%d/%d): failed to write value to %08lx",
691 task->comm, task->tgid, task->pid, (unsigned long)found);
697 check_lr: /* check lr anyway */
698 if (ra == (unsigned long)tramp) {
699 printk("---> %s (%d/%d): trampoline found at "
701 task->comm, task->tgid, task->pid,
702 ra, ri->rp->up.kp.addr);
703 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
706 printk("---> %s (%d/%d): trampoline NOT found at "
707 "sp = %08lx, lr = %08lx - %p\n",
708 task->comm, task->tgid, task->pid,
709 (unsigned long)sp, ra, ri->rp->up.kp.addr);
716 * @brief Jump pre-handler.
718 * @param p Pointer to the kprobe.
719 * @param regs Pointer to CPU register data.
722 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
724 struct uprobe *up = container_of(p, struct uprobe, kp);
725 struct ujprobe *jp = container_of(up, struct ujprobe, up);
727 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
728 entry_point_t entry = (entry_point_t)jp->entry;
731 p->ss_addr[smp_processor_id()] = (kprobe_opcode_t *)
732 pre_entry(jp->priv_arg, regs);
736 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
737 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
739 arch_ujprobe_return();
746 * @brief Gets trampoline address.
748 * @param p Pointer to the kprobe.
749 * @param regs Pointer to CPU register data.
750 * @return Trampoline address.
752 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
754 return thumb_mode(regs) ?
755 (unsigned long)(p->ainsn.insn) + 0x1b :
756 (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
760 * @brief Restores return address.
762 * @param orig_ret_addr Original return address.
763 * @param regs Pointer to CPU register data.
766 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
768 regs->ARM_lr = orig_ret_addr;
769 regs->ARM_pc = orig_ret_addr & ~0x1;
771 if (regs->ARM_lr & 0x1)
772 regs->ARM_cpsr |= PSR_T_BIT;
774 regs->ARM_cpsr &= ~PSR_T_BIT;
778 * @brief Removes uprobe.
780 * @param up Pointer to the uprobe.
783 void arch_remove_uprobe(struct uprobe *up)
785 swap_slot_free(up->sm, up->atramp.utramp);
788 static void restore_opcode_for_thumb(struct kprobe *p, struct pt_regs *regs)
790 if (thumb_mode(regs) && !is_thumb2(p->opcode)) {
791 u16 tmp = p->opcode >> 16;
792 write_proc_vm_atomic(current,
793 (unsigned long)((u16*)p->addr + 1), &tmp, 2);
794 flush_insns(p->addr, 4);
798 static int make_trampoline(struct uprobe *up, struct pt_regs *regs)
800 unsigned long *tramp, *utramp;
801 struct kprobe *p = up2kp(up);
805 * 0 bit - thumb mode (0 - arm, 1 - thumb)
806 * 1 bit - arm mode support (0 - off, 1 on)
807 * 2 bit - thumb mode support (0 - off, 1 on)`
809 sw = (!!thumb_mode(regs)) |
810 (int)!p->safe_arm << 1 |
811 (int)!p->safe_thumb << 2;
817 tramp = up->atramp.tramp_arm;
822 restore_opcode_for_thumb(p, regs);
823 tramp = up->atramp.tramp_thumb;
826 printk("Error in %s at %d: we are in arm mode "
827 "(!) and check instruction was fail "
828 "(%0lX instruction at %p address)!\n",
829 __FILE__, __LINE__, p->opcode, p->addr);
831 disarm_uprobe(p, up->task);
836 utramp = up->atramp.utramp;
838 if (!write_proc_vm_atomic(up->task, (unsigned long)utramp, tramp,
839 UPROBES_TRAMP_LEN * sizeof(*tramp)))
840 panic("failed to write memory %p!\n", utramp);
841 flush_insns(utramp, UPROBES_TRAMP_LEN * sizeof(*tramp));
843 p->ainsn.insn = utramp;
848 static int uprobe_handler(struct pt_regs *regs)
850 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
851 struct task_struct *task = current;
852 pid_t tgid = task->tgid;
855 p = get_ukprobe(addr, tgid);
857 unsigned long offset_bp = thumb_mode(regs) ?
859 4 * UPROBES_TRAMP_RET_BREAK_IDX;
860 void *tramp_addr = (void *)addr - offset_bp;
862 p = get_ukprobe_by_insn_slot(tramp_addr, tgid, regs);
864 printk("no_uprobe: Not one of ours: let "
865 "kernel handle it %p\n", addr);
869 trampoline_uprobe_handler(p, regs);
871 if (p->ainsn.insn == NULL) {
872 struct uprobe *up = kp2up(p);
874 if (make_trampoline(up, regs)) {
875 printk("no_uprobe live\n");
883 if (!p->pre_handler || !p->pre_handler(p, regs)) {
884 prepare_singlestep(p, regs);
892 * @brief Breakpoint instruction handler.
894 * @param regs Pointer to CPU register data.
895 * @param instr Instruction.
896 * @return uprobe_handler results.
898 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
902 local_irq_save(flags);
905 ret = uprobe_handler(regs);
906 preempt_enable_no_resched();
908 local_irq_restore(flags);
912 /* userspace probes hook (arm) */
913 static struct undef_hook undef_hook_for_us_arm = {
914 .instr_mask = 0xffffffff,
915 .instr_val = BREAKPOINT_INSTRUCTION,
916 .cpsr_mask = MODE_MASK,
917 .cpsr_val = USR_MODE,
918 .fn = uprobe_trap_handler
921 /* userspace probes hook (thumb) */
922 static struct undef_hook undef_hook_for_us_thumb = {
923 .instr_mask = 0xffffffff,
924 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
925 .cpsr_mask = MODE_MASK,
926 .cpsr_val = USR_MODE,
927 .fn = uprobe_trap_handler
931 * @brief Installs breakpoint hooks.
935 int swap_arch_init_uprobes(void)
937 swap_register_undef_hook(&undef_hook_for_us_arm);
938 swap_register_undef_hook(&undef_hook_for_us_thumb);
944 * @brief Uninstalls breakpoint hooks.
948 void swap_arch_exit_uprobes(void)
950 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
951 swap_unregister_undef_hook(&undef_hook_for_us_arm);