2 * uprobe/arch/asm-arm/swap_uprobes.c
3 * @author Alexey Gerenkov <a.gerenkov@samsung.com> User-Space Probes initial
4 * implementation; Support x86/ARM/MIPS for both user and kernel spaces.
5 * @author Ekaterina Gorelkina <e.gorelkina@samsung.com>: redesign module for
6 * separating core and arch parts
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
26 * Copyright (C) Samsung Electronics, 2006-2010
28 * @section DESCRIPTION
30 * Arch-dependent uprobe interface implementation for ARM.
34 #include <linux/init.h> /* need for asm/traps.h */
35 #include <linux/sched.h> /* need for asm/traps.h */
37 #include <linux/ptrace.h> /* need for asm/traps.h */
38 #include <asm/traps.h>
40 #include <kprobe/swap_slots.h>
41 #include <kprobe/swap_kprobes.h>
42 #include <kprobe/swap_kprobes_deps.h>
43 #include <uprobe/swap_uprobes.h>
45 #include <swap-asm/swap_kprobes.h>
46 #include <swap-asm/trampoline_arm.h>
48 #include "swap_uprobes.h"
49 #include "trampoline_thumb.h"
52 #define UBP_ARM (BREAKPOINT_INSTRUCTION)
53 #define UBP_THUMB (BREAKPOINT_INSTRUCTION & 0xffff)
57 * @brief Flushes instructions.
59 #define flush_insns(addr, size) \
60 flush_icache_range((unsigned long)(addr), \
61 (unsigned long)(addr) + (size))
63 static inline long branch_t16_dest(uprobe_opcode_t insn, unsigned int insn_addr)
65 long offset = insn & 0x3ff;
66 offset -= insn & 0x400;
67 return insn_addr + 4 + offset * 2;
70 static inline long branch_cond_t16_dest(uprobe_opcode_t insn,
71 unsigned int insn_addr)
73 long offset = insn & 0x7f;
74 offset -= insn & 0x80;
75 return insn_addr + 4 + offset * 2;
78 static inline long branch_t32_dest(uprobe_opcode_t insn, unsigned int insn_addr)
80 unsigned int poff = insn & 0x3ff;
81 unsigned int offset = (insn & 0x07fe0000) >> 17;
83 poff -= (insn & 0x400);
86 return insn_addr + 4 + (poff << 12) + offset * 4;
88 return (insn_addr + 4 + (poff << 12) + offset * 4) & ~3;
91 static inline long cbz_t16_dest(uprobe_opcode_t insn, unsigned int insn_addr)
93 unsigned int i = (insn & 0x200) >> 3;
94 unsigned int offset = (insn & 0xf8) >> 2;
95 return insn_addr + 4 + i + offset;
98 /* is instruction Thumb2 and NOT a branch, etc... */
99 static int is_thumb2(uprobe_opcode_t insn)
101 return ((insn & 0xf800) == 0xe800 ||
102 (insn & 0xf800) == 0xf000 ||
103 (insn & 0xf800) == 0xf800);
106 static int arch_check_insn_thumb(unsigned long insn)
110 /* check instructions that can change PC */
111 if (THUMB_INSN_MATCH(UNDEF, insn) ||
112 THUMB_INSN_MATCH(SWI, insn) ||
113 THUMB_INSN_MATCH(BREAK, insn) ||
114 THUMB2_INSN_MATCH(B1, insn) ||
115 THUMB2_INSN_MATCH(B2, insn) ||
116 THUMB2_INSN_MATCH(BXJ, insn) ||
117 (THUMB2_INSN_MATCH(ADR, insn) &&
118 THUMB2_INSN_REG_RD(insn) == 15) ||
119 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RT(insn) == 15) ||
120 (THUMB2_INSN_MATCH(LDRW1, insn) &&
121 THUMB2_INSN_REG_RT(insn) == 15) ||
122 (THUMB2_INSN_MATCH(LDRHW, insn) &&
123 THUMB2_INSN_REG_RT(insn) == 15) ||
124 (THUMB2_INSN_MATCH(LDRHW1, insn) &&
125 THUMB2_INSN_REG_RT(insn) == 15) ||
126 (THUMB2_INSN_MATCH(LDRWL, insn) &&
127 THUMB2_INSN_REG_RT(insn) == 15) ||
128 THUMB2_INSN_MATCH(LDMIA, insn) ||
129 THUMB2_INSN_MATCH(LDMDB, insn) ||
130 (THUMB2_INSN_MATCH(DP, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
131 (THUMB2_INSN_MATCH(RSBW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
132 (THUMB2_INSN_MATCH(RORW, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
133 (THUMB2_INSN_MATCH(ROR, insn) && THUMB2_INSN_REG_RD(insn) == 15) ||
134 (THUMB2_INSN_MATCH(LSLW1, insn) &&
135 THUMB2_INSN_REG_RD(insn) == 15) ||
136 (THUMB2_INSN_MATCH(LSLW2, insn) &&
137 THUMB2_INSN_REG_RD(insn) == 15) ||
138 (THUMB2_INSN_MATCH(LSRW1, insn) &&
139 THUMB2_INSN_REG_RD(insn) == 15) ||
140 (THUMB2_INSN_MATCH(LSRW2, insn) &&
141 THUMB2_INSN_REG_RD(insn) == 15) ||
142 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
143 (THUMB2_INSN_MATCH(STRW1, insn) &&
144 THUMB2_INSN_REG_RN(insn) == 15) ||
145 (THUMB2_INSN_MATCH(STRBW1, insn) &&
146 THUMB2_INSN_REG_RN(insn) == 15) ||
147 (THUMB2_INSN_MATCH(STRHW1, insn) &&
148 THUMB2_INSN_REG_RN(insn) == 15) ||
149 (THUMB2_INSN_MATCH(STRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
150 (THUMB2_INSN_MATCH(STRHW, insn) &&
151 THUMB2_INSN_REG_RN(insn) == 15) ||
152 (THUMB2_INSN_MATCH(LDRW, insn) && THUMB2_INSN_REG_RN(insn) == 15) ||
153 (THUMB2_INSN_MATCH(LDRBW, insn) &&
154 THUMB2_INSN_REG_RN(insn) == 15) ||
155 (THUMB2_INSN_MATCH(LDRHW, insn) &&
156 THUMB2_INSN_REG_RN(insn) == 15) ||
157 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
158 (THUMB2_INSN_MATCH(LDRD, insn) || THUMB2_INSN_MATCH(LDRD1, insn) ||
159 THUMB2_INSN_MATCH(STRD, insn))) {
166 static int prep_pc_dep_insn_execbuf_thumb(uprobe_opcode_t *insns,
167 uprobe_opcode_t insn, int uregs)
169 unsigned char mreg = 0;
170 unsigned char reg = 0;
172 if (THUMB_INSN_MATCH(APC, insn) ||
173 THUMB_INSN_MATCH(LRO3, insn)) {
174 reg = ((insn & 0xffff) & uregs) >> 8;
175 } else if (THUMB_INSN_MATCH(MOV3, insn)) {
176 if (((((unsigned char)insn) & 0xff) >> 3) == 15)
177 reg = (insn & 0xffff) & uregs;
180 } else if (THUMB2_INSN_MATCH(ADR, insn)) {
181 reg = ((insn >> 16) & uregs) >> 8;
184 } else if (THUMB2_INSN_MATCH(LDRW, insn) ||
185 THUMB2_INSN_MATCH(LDRW1, insn) ||
186 THUMB2_INSN_MATCH(LDRHW, insn) ||
187 THUMB2_INSN_MATCH(LDRHW1, insn) ||
188 THUMB2_INSN_MATCH(LDRWL, insn)) {
189 reg = ((insn >> 16) & uregs) >> 12;
193 * LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
195 } else if (THUMB2_INSN_MATCH(LDRBW, insn) ||
196 THUMB2_INSN_MATCH(LDRBW1, insn) ||
197 THUMB2_INSN_MATCH(LDREX, insn)) {
198 reg = ((insn >> 16) & uregs) >> 12;
199 } else if (THUMB2_INSN_MATCH(DP, insn)) {
200 reg = ((insn >> 16) & uregs) >> 12;
203 } else if (THUMB2_INSN_MATCH(RSBW, insn)) {
204 reg = ((insn >> 12) & uregs) >> 8;
207 } else if (THUMB2_INSN_MATCH(RORW, insn)) {
208 reg = ((insn >> 12) & uregs) >> 8;
211 } else if (THUMB2_INSN_MATCH(ROR, insn) ||
212 THUMB2_INSN_MATCH(LSLW1, insn) ||
213 THUMB2_INSN_MATCH(LSLW2, insn) ||
214 THUMB2_INSN_MATCH(LSRW1, insn) ||
215 THUMB2_INSN_MATCH(LSRW2, insn)) {
216 reg = ((insn >> 12) & uregs) >> 8;
219 } else if (THUMB2_INSN_MATCH(TEQ1, insn) ||
220 THUMB2_INSN_MATCH(TST1, insn)) {
222 } else if (THUMB2_INSN_MATCH(TEQ2, insn) ||
223 THUMB2_INSN_MATCH(TST2, insn)) {
224 reg = THUMB2_INSN_REG_RM(insn);
227 if ((THUMB2_INSN_MATCH(STRW, insn) ||
228 THUMB2_INSN_MATCH(STRBW, insn) ||
229 THUMB2_INSN_MATCH(STRD, insn) ||
230 THUMB2_INSN_MATCH(STRHT, insn) ||
231 THUMB2_INSN_MATCH(STRT, insn) ||
232 THUMB2_INSN_MATCH(STRHW1, insn) ||
233 THUMB2_INSN_MATCH(STRHW, insn)) &&
234 THUMB2_INSN_REG_RT(insn) == 15) {
235 reg = THUMB2_INSN_REG_RT(insn);
238 if (reg == 6 || reg == 7) {
239 *((unsigned short *)insns + 0) =
240 (*((unsigned short *)insns + 0) & 0x00ff) |
241 ((1 << mreg) | (1 << (mreg + 1)));
242 *((unsigned short *)insns + 1) =
243 (*((unsigned short *)insns + 1) & 0xf8ff) | (mreg << 8);
244 *((unsigned short *)insns + 2) =
245 (*((unsigned short *)insns + 2) & 0xfff8) | (mreg + 1);
246 *((unsigned short *)insns + 3) =
247 (*((unsigned short *)insns + 3) & 0xffc7) | (mreg << 3);
248 *((unsigned short *)insns + 7) =
249 (*((unsigned short *)insns + 7) & 0xf8ff) | (mreg << 8);
250 *((unsigned short *)insns + 8) =
251 (*((unsigned short *)insns + 8) & 0xffc7) | (mreg << 3);
252 *((unsigned short *)insns + 9) =
253 (*((unsigned short *)insns + 9) & 0xffc7) |
255 *((unsigned short *)insns + 10) =
256 (*((unsigned short *)insns + 10) & 0x00ff) |
257 ((1 << mreg) | (1 << (mreg + 1)));
260 if (THUMB_INSN_MATCH(APC, insn)) {
261 /* ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4 */
262 *((unsigned short *)insns + 4) = ((insn & 0xffff) | 0x800);
263 } else if (THUMB_INSN_MATCH(LRO3, insn)) {
264 /* LDR Rd, [PC, #immed_8*4] ->
265 * LDR Rd, [SP, #immed_8*4] */
266 *((unsigned short *)insns + 4) =
267 ((insn & 0xffff) + 0x5000);
268 } else if (THUMB_INSN_MATCH(MOV3, insn)) {
269 /* MOV Rd, PC -> MOV Rd, SP */
270 *((unsigned short *)insns + 4) =
271 ((insn & 0xffff) ^ 0x10);
272 } else if (THUMB2_INSN_MATCH(ADR, insn)) {
273 /* ADDW Rd,PC,#imm -> ADDW Rd,SP,#imm */
274 insns[2] = (insn & 0xfffffff0) | 0x0d;
275 } else if (THUMB2_INSN_MATCH(LDRW, insn) ||
276 THUMB2_INSN_MATCH(LDRBW, insn) ||
277 THUMB2_INSN_MATCH(LDRHW, insn)) {
278 /* LDR.W Rt, [PC, #-<imm_12>] ->
279 * LDR.W Rt, [SP, #-<imm_8>]
280 * !!!!!!!!!!!!!!!!!!!!!!!!
281 * !!! imm_12 vs. imm_8 !!!
282 * !!!!!!!!!!!!!!!!!!!!!!!! */
283 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;
284 } else if (THUMB2_INSN_MATCH(LDRW1, insn) ||
285 THUMB2_INSN_MATCH(LDRBW1, insn) ||
286 THUMB2_INSN_MATCH(LDRHW1, insn) ||
287 THUMB2_INSN_MATCH(LDRD, insn) ||
288 THUMB2_INSN_MATCH(LDRD1, insn) ||
289 THUMB2_INSN_MATCH(LDREX, insn)) {
290 /* LDRx.W Rt, [PC, #+<imm_12>] ->
291 * LDRx.W Rt, [SP, #+<imm_12>]
292 (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>] */
293 insns[2] = (insn & 0xfffffff0) | 0xd;
294 } else if (THUMB2_INSN_MATCH(MUL, insn)) {
296 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
297 } else if (THUMB2_INSN_MATCH(DP, insn)) {
298 if (THUMB2_INSN_REG_RM(insn) == 15)
300 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
301 else if (THUMB2_INSN_REG_RN(insn) == 15)
303 insns[2] = (insn & 0xfffffff0) | 0xd;
304 } else if (THUMB2_INSN_MATCH(LDRWL, insn)) {
305 /* LDRx.W Rt, [PC, #<imm_12>] ->
306 * LDRx.W Rt, [SP, #+<imm_12>]
307 * (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>] */
308 insns[2] = (insn & 0xfffffff0) | 0xd;
309 } else if (THUMB2_INSN_MATCH(RSBW, insn)) {
310 /* RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const> */
311 insns[2] = (insn & 0xfffffff0) | 0xd;
312 } else if (THUMB2_INSN_MATCH(RORW, insn) ||
313 THUMB2_INSN_MATCH(LSLW1, insn) ||
314 THUMB2_INSN_MATCH(LSRW1, insn)) {
315 if ((THUMB2_INSN_REG_RM(insn) == 15) &&
316 (THUMB2_INSN_REG_RN(insn) == 15))
317 /* ROR.W Rd, PC, PC */
318 insns[2] = (insn & 0xfffdfffd);
319 else if (THUMB2_INSN_REG_RM(insn) == 15)
320 /* ROR.W Rd, Rn, PC */
321 insns[2] = (insn & 0xfff0ffff) | 0xd0000;
322 else if (THUMB2_INSN_REG_RN(insn) == 15)
323 /* ROR.W Rd, PC, Rm */
324 insns[2] = (insn & 0xfffffff0) | 0xd;
325 } else if (THUMB2_INSN_MATCH(ROR, insn) ||
326 THUMB2_INSN_MATCH(LSLW2, insn) ||
327 THUMB2_INSN_MATCH(LSRW2, insn)) {
328 /* ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const> */
329 insns[2] = (insn & 0xfff0ffff) | 0xd0000;
332 if (THUMB2_INSN_MATCH(STRW, insn) ||
333 THUMB2_INSN_MATCH(STRBW, insn)) {
334 /* STRx.W Rt, [Rn, SP] */
335 insns[2] = (insn & 0xfff0ffff) | 0x000d0000;
336 } else if (THUMB2_INSN_MATCH(STRD, insn) ||
337 THUMB2_INSN_MATCH(STRHT, insn) ||
338 THUMB2_INSN_MATCH(STRT, insn) ||
339 THUMB2_INSN_MATCH(STRHW1, insn)) {
340 if (THUMB2_INSN_REG_RN(insn) == 15)
341 /* STRD/T/HT{.W} Rt, [SP, ...] */
342 insns[2] = (insn & 0xfffffff0) | 0xd;
345 } else if (THUMB2_INSN_MATCH(STRHW, insn) &&
346 (THUMB2_INSN_REG_RN(insn) == 15)) {
347 if (THUMB2_INSN_REG_RN(insn) == 15)
348 /* STRH.W Rt, [SP, #-<imm_8>] */
349 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d;
355 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
356 THUMB2_INSN_MATCH(STRBW, insn) ||
357 THUMB2_INSN_MATCH(STRD, insn) ||
358 THUMB2_INSN_MATCH(STRHT, insn) ||
359 THUMB2_INSN_MATCH(STRT, insn) ||
360 THUMB2_INSN_MATCH(STRHW1, insn) ||
361 THUMB2_INSN_MATCH(STRHW, insn))) {
362 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
365 if (THUMB2_INSN_MATCH(TEQ1, insn) ||
366 THUMB2_INSN_MATCH(TST1, insn)) {
367 /* TEQ SP, #<const> */
368 insns[2] = (insn & 0xfffffff0) | 0xd;
369 } else if (THUMB2_INSN_MATCH(TEQ2, insn) ||
370 THUMB2_INSN_MATCH(TST2, insn)) {
371 if ((THUMB2_INSN_REG_RN(insn) == 15) &&
372 (THUMB2_INSN_REG_RM(insn) == 15))
374 insns[2] = (insn & 0xfffdfffd);
375 else if (THUMB2_INSN_REG_RM(insn) == 15)
377 insns[2] = (insn & 0xfff0ffff) | 0xd0000;
378 else if (THUMB2_INSN_REG_RN(insn) == 15)
380 insns[2] = (insn & 0xfffffff0) | 0xd;
386 static int arch_make_trampoline_thumb(unsigned long vaddr, unsigned long insn,
387 unsigned long *tramp, size_t tramp_len)
394 ret = arch_check_insn_thumb(insn);
396 pr_err("THUMB inst isn't support vaddr=%lx insn=%08lx\n",
401 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
402 uregs = 0x0700; /* 8-10 */
404 } else if (THUMB_INSN_MATCH(MOV3, insn) &&
405 (((((unsigned char)insn) & 0xff) >> 3) == 15)) {
409 } else if THUMB2_INSN_MATCH(ADR, insn) {
410 uregs = 0x0f00; /* Rd 8-11 */
412 } else if (((THUMB2_INSN_MATCH(LDRW, insn) ||
413 THUMB2_INSN_MATCH(LDRW1, insn) ||
414 THUMB2_INSN_MATCH(LDRBW, insn) ||
415 THUMB2_INSN_MATCH(LDRBW1, insn) ||
416 THUMB2_INSN_MATCH(LDRHW, insn) ||
417 THUMB2_INSN_MATCH(LDRHW1, insn) ||
418 THUMB2_INSN_MATCH(LDRWL, insn)) &&
419 THUMB2_INSN_REG_RN(insn) == 15) ||
420 THUMB2_INSN_MATCH(LDREX, insn) ||
421 ((THUMB2_INSN_MATCH(STRW, insn) ||
422 THUMB2_INSN_MATCH(STRBW, insn) ||
423 THUMB2_INSN_MATCH(STRHW, insn) ||
424 THUMB2_INSN_MATCH(STRHW1, insn)) &&
425 (THUMB2_INSN_REG_RN(insn) == 15 ||
426 THUMB2_INSN_REG_RT(insn) == 15)) ||
427 ((THUMB2_INSN_MATCH(STRT, insn) ||
428 THUMB2_INSN_MATCH(STRHT, insn)) &&
429 (THUMB2_INSN_REG_RN(insn) == 15 ||
430 THUMB2_INSN_REG_RT(insn) == 15))) {
431 uregs = 0xf000; /* Rt 12-15 */
433 } else if ((THUMB2_INSN_MATCH(LDRD, insn) ||
434 THUMB2_INSN_MATCH(LDRD1, insn)) &&
435 (THUMB2_INSN_REG_RN(insn) == 15)) {
436 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
438 } else if (THUMB2_INSN_MATCH(MUL, insn) &&
439 THUMB2_INSN_REG_RM(insn) == 15) {
442 } else if (THUMB2_INSN_MATCH(DP, insn) &&
443 (THUMB2_INSN_REG_RN(insn) == 15 ||
444 THUMB2_INSN_REG_RM(insn) == 15)) {
445 uregs = 0xf000; /* Rd 12-15 */
447 } else if (THUMB2_INSN_MATCH(STRD, insn) &&
448 ((THUMB2_INSN_REG_RN(insn) == 15) ||
449 (THUMB2_INSN_REG_RT(insn) == 15) ||
450 THUMB2_INSN_REG_RT2(insn) == 15)) {
451 uregs = 0xff00; /* Rt 12-15, Rt2 8-11 */
453 } else if (THUMB2_INSN_MATCH(RSBW, insn) &&
454 THUMB2_INSN_REG_RN(insn) == 15) {
455 uregs = 0x0f00; /* Rd 8-11 */
457 } else if (THUMB2_INSN_MATCH(RORW, insn) &&
458 (THUMB2_INSN_REG_RN(insn) == 15 ||
459 THUMB2_INSN_REG_RM(insn) == 15)) {
462 } else if ((THUMB2_INSN_MATCH(ROR, insn) ||
463 THUMB2_INSN_MATCH(LSLW2, insn) ||
464 THUMB2_INSN_MATCH(LSRW2, insn)) &&
465 THUMB2_INSN_REG_RM(insn) == 15) {
466 uregs = 0x0f00; /* Rd 8-11 */
468 } else if ((THUMB2_INSN_MATCH(LSLW1, insn) ||
469 THUMB2_INSN_MATCH(LSRW1, insn)) &&
470 (THUMB2_INSN_REG_RN(insn) == 15 ||
471 THUMB2_INSN_REG_RM(insn) == 15)) {
472 uregs = 0x0f00; /* Rd 8-11 */
474 } else if ((THUMB2_INSN_MATCH(TEQ1, insn) ||
475 THUMB2_INSN_MATCH(TST1, insn)) &&
476 THUMB2_INSN_REG_RN(insn) == 15) {
477 uregs = 0xf0000; /* Rn 0-3 (16-19) */
479 } else if ((THUMB2_INSN_MATCH(TEQ2, insn) ||
480 THUMB2_INSN_MATCH(TST2, insn)) &&
481 (THUMB2_INSN_REG_RN(insn) == 15 ||
482 THUMB2_INSN_REG_RM(insn) == 15)) {
483 uregs = 0xf0000; /* Rn 0-3 (16-19) */
487 if (unlikely(uregs && pc_dep)) {
488 memcpy(tramp, pc_dep_insn_execbuf_thumb, tramp_len);
489 prep_pc_dep_insn_execbuf_thumb(tramp, insn, uregs);
492 *((unsigned short *)tramp + 13) = 0xdeff;
493 *((unsigned short *)tramp + 14) = addr & 0x0000ffff;
494 *((unsigned short *)tramp + 15) = addr >> 16;
495 if (!is_thumb2(insn)) {
497 *((unsigned short *)tramp + 16) =
498 (addr & 0x0000ffff) | 0x1;
499 *((unsigned short *)tramp + 17) = addr >> 16;
502 *((unsigned short *)tramp + 16) =
503 (addr & 0x0000ffff) | 0x1;
504 *((unsigned short *)tramp + 17) = addr >> 16;
507 memcpy(tramp, gen_insn_execbuf_thumb, tramp_len);
508 *((unsigned short *)tramp + 13) = 0xdeff;
509 if (!is_thumb2(insn)) {
511 *((unsigned short *)tramp + 2) = insn;
512 *((unsigned short *)tramp + 16) =
513 (addr & 0x0000ffff) | 0x1;
514 *((unsigned short *)tramp + 17) = addr >> 16;
518 *((unsigned short *)tramp + 16) =
519 (addr & 0x0000ffff) | 0x1;
520 *((unsigned short *)tramp + 17) = addr >> 16;
524 if (THUMB_INSN_MATCH(B2, insn)) {
525 memcpy(tramp, b_off_insn_execbuf_thumb, tramp_len);
526 *((unsigned short *)tramp + 13) = 0xdeff;
527 addr = branch_t16_dest(insn, vaddr);
528 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
529 *((unsigned short *)tramp + 15) = addr >> 16;
530 *((unsigned short *)tramp + 16) = 0;
531 *((unsigned short *)tramp + 17) = 0;
533 } else if (THUMB_INSN_MATCH(B1, insn)) {
534 memcpy(tramp, b_cond_insn_execbuf_thumb, tramp_len);
535 *((unsigned short *)tramp + 13) = 0xdeff;
536 *((unsigned short *)tramp + 0) |= (insn & 0xf00);
537 addr = branch_cond_t16_dest(insn, vaddr);
538 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
539 *((unsigned short *)tramp + 15) = addr >> 16;
541 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
542 *((unsigned short *)tramp + 17) = addr >> 16;
544 } else if (THUMB_INSN_MATCH(BLX2, insn) ||
545 THUMB_INSN_MATCH(BX, insn)) {
546 memcpy(tramp, b_r_insn_execbuf_thumb, tramp_len);
547 *((unsigned short *)tramp + 13) = 0xdeff;
548 *((unsigned short *)tramp + 4) = insn;
550 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
551 *((unsigned short *)tramp + 17) = addr >> 16;
553 } else if (THUMB2_INSN_MATCH(BLX1, insn) ||
554 THUMB2_INSN_MATCH(BL, insn)) {
555 memcpy(tramp, blx_off_insn_execbuf_thumb, tramp_len);
556 *((unsigned short *)tramp + 13) = 0xdeff;
557 addr = branch_t32_dest(insn, vaddr);
558 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff);
559 *((unsigned short *)tramp + 15) = addr >> 16;
561 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
562 *((unsigned short *)tramp + 17) = addr >> 16;
564 } else if (THUMB_INSN_MATCH(CBZ, insn)) {
565 memcpy(tramp, cbz_insn_execbuf_thumb, tramp_len);
566 *((unsigned short *)tramp + 13) = 0xdeff;
567 /* zero out original branch displacement (imm5 = 0; i = 0) */
568 *((unsigned short *)tramp + 0) = insn & (~0x2f8);
569 /* replace it with 8 bytes offset in execbuf (imm5 = 0b00010) */
570 *((unsigned short *)tramp + 0) |= 0x20;
571 addr = cbz_t16_dest(insn, vaddr);
572 *((unsigned short *)tramp + 14) = (addr & 0x0000ffff) | 0x1;
573 *((unsigned short *)tramp + 15) = addr >> 16;
575 *((unsigned short *)tramp + 16) = (addr & 0x0000ffff) | 0x1;
576 *((unsigned short *)tramp + 17) = addr >> 16;
583 * @brief Prepares uprobe for ARM.
585 * @param up Pointer to the uprobe.
586 * @return 0 on success,\n
587 * negative error code on error.
589 int arch_prepare_uprobe(struct uprobe *p)
592 struct task_struct *task = p->task;
593 unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
595 int thumb_mode = (unsigned long)p->addr & 1;
596 unsigned long tramp[UPROBES_TRAMP_LEN];
597 unsigned long __user *utramp;
598 enum { tramp_len = sizeof(tramp) };
600 if (!read_proc_vm_atomic(task, vaddr, &insn, sizeof(insn))) {
601 printk(KERN_ERR "failed to read memory %lx!\n", vaddr);
606 arch_make_trampoline_thumb(vaddr, insn,
608 arch_make_trampoline_arm(vaddr, insn, tramp);
610 pr_err("failed to make tramp, addr=%p\n", p->addr);
614 utramp = swap_slot_alloc(p->sm);
615 if (utramp == NULL) {
616 printk(KERN_INFO "Error: swap_slot_alloc failed (%08lx)\n",
621 if (!write_proc_vm_atomic(p->task, (unsigned long)utramp, tramp,
623 pr_err("failed to write memory tramp=%p!\n", utramp);
624 swap_slot_free(p->sm, utramp);
628 flush_insns(utramp, tramp_len);
629 p->ainsn.insn = utramp;
639 * @brief Analysis opcodes.
641 * @param rp Pointer to the uretprobe.
644 void arch_opcode_analysis_uretprobe(struct uretprobe *rp)
646 /* Remove retprobe if first insn overwrites lr */
647 rp->thumb_noret = !!(THUMB2_INSN_MATCH(BL, rp->up.opcode) ||
648 THUMB2_INSN_MATCH(BLX1, rp->up.opcode) ||
649 THUMB_INSN_MATCH(BLX2, rp->up.opcode));
651 rp->arm_noret = !!(ARM_INSN_MATCH(BL, rp->up.opcode) ||
652 ARM_INSN_MATCH(BLX1, rp->up.opcode) ||
653 ARM_INSN_MATCH(BLX2, rp->up.opcode));
657 * @brief Prepates uretprobe for ARM.
659 * @param ri Pointer to the uretprobe instance.
660 * @param regs Pointer to CPU register data.
661 * @return Error code.
663 int arch_prepare_uretprobe(struct uretprobe_instance *ri, struct pt_regs *regs)
665 ri->ret_addr = (uprobe_opcode_t *)regs->ARM_lr;
666 ri->sp = (uprobe_opcode_t *)regs->ARM_sp;
668 /* Set flag of current mode */
669 ri->sp = (uprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
671 if (ri->preload_thumb) {
672 regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn) + 0x1b;
674 if (thumb_mode(regs))
675 regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn) + 0x1b;
677 regs->ARM_lr = (unsigned long)(ri->rp->up.ainsn.insn +
678 UPROBES_TRAMP_RET_BREAK_IDX);
684 unsigned long arch_tramp_by_ri(struct uretprobe_instance *ri)
686 /* Understand function mode */
687 return ((unsigned long)ri->sp & 1) ?
688 ((unsigned long)ri->rp->up.ainsn.insn + 0x1b) :
689 (unsigned long)(ri->rp->up.ainsn.insn +
690 UPROBES_TRAMP_RET_BREAK_IDX);
694 * @brief Disarms uretprobe instance.
696 * @param ri Pointer to the uretprobe instance
697 * @param task Pointer to the task for which the uretprobe instance
698 * @return 0 on success,\n
699 * negative error code on error.
701 int arch_disarm_urp_inst(struct uretprobe_instance *ri,
702 struct task_struct *task, unsigned long tr)
704 struct pt_regs *uregs = task_pt_regs(ri->task);
705 unsigned long ra = swap_get_ret_addr(uregs);
706 unsigned long *tramp;
707 unsigned long *sp = (unsigned long *)((long)ri->sp & ~1);
708 unsigned long *stack = sp - RETPROBE_STACK_DEPTH + 1;
709 unsigned long *found = NULL;
710 unsigned long *buf[RETPROBE_STACK_DEPTH];
715 vaddr = (unsigned long)ri->rp->up.addr;
716 tramp = (unsigned long *)arch_tramp_by_ri(ri);
720 tramp = (unsigned long *)tr;
724 retval = read_proc_vm_atomic(task, (unsigned long)stack,
726 if (retval != sizeof(buf)) {
727 printk(KERN_INFO "---> %s (%d/%d): failed to read "
728 "stack from %08lx\n", task->comm, task->tgid, task->pid,
729 (unsigned long)stack);
734 /* search the stack from the bottom */
735 for (i = RETPROBE_STACK_DEPTH - 1; i >= 0; i--) {
736 if (buf[i] == tramp) {
747 printk(KERN_INFO "---> %s (%d/%d): trampoline found at "
748 "%08lx (%08lx /%+d) - %lx, set ret_addr=%p\n",
749 task->comm, task->tgid, task->pid,
750 (unsigned long)found, (unsigned long)sp,
751 found - sp, vaddr, ri->ret_addr);
752 retval = write_proc_vm_atomic(task, (unsigned long)found,
754 sizeof(ri->ret_addr));
755 if (retval != sizeof(ri->ret_addr)) {
756 printk(KERN_INFO "---> %s (%d/%d): "
757 "failed to write value to %08lx",
758 task->comm, task->tgid, task->pid, (unsigned long)found);
764 check_lr: /* check lr anyway */
765 if (ra == (unsigned long)tramp) {
766 printk(KERN_INFO "---> %s (%d/%d): trampoline found at "
767 "lr = %08lx - %lx, set ret_addr=%p\n",
768 task->comm, task->tgid, task->pid, ra, vaddr, ri->ret_addr);
770 swap_set_ret_addr(uregs, (unsigned long)ri->ret_addr);
773 printk(KERN_INFO "---> %s (%d/%d): trampoline NOT found at "
774 "sp = %08lx, lr = %08lx - %lx, ret_addr=%p\n",
775 task->comm, task->tgid, task->pid,
776 (unsigned long)sp, ra, vaddr, ri->ret_addr);
783 * @brief Jump pre-handler.
785 * @param p Pointer to the uprobe.
786 * @param regs Pointer to CPU register data.
789 int setjmp_upre_handler(struct uprobe *p, struct pt_regs *regs)
791 struct ujprobe *jp = container_of(p, struct ujprobe, up);
793 uprobe_pre_entry_handler_t pre_entry =
794 (uprobe_pre_entry_handler_t)jp->pre_entry;
795 entry_point_t entry = (entry_point_t)jp->entry;
798 p->ss_addr[smp_processor_id()] = (uprobe_opcode_t *)
799 pre_entry(jp->priv_arg, regs);
803 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
804 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
806 arch_ujprobe_return();
813 * @brief Gets trampoline address.
815 * @param p Pointer to the uprobe.
816 * @param regs Pointer to CPU register data.
817 * @return Trampoline address.
819 unsigned long arch_get_trampoline_addr(struct uprobe *p, struct pt_regs *regs)
821 return thumb_mode(regs) ?
822 (unsigned long)(p->ainsn.insn) + 0x1b :
823 (unsigned long)(p->ainsn.insn +
824 UPROBES_TRAMP_RET_BREAK_IDX);
828 * @brief Restores return address.
830 * @param orig_ret_addr Original return address.
831 * @param regs Pointer to CPU register data.
834 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
836 regs->ARM_lr = orig_ret_addr;
837 regs->ARM_pc = orig_ret_addr & ~0x1;
839 if (regs->ARM_lr & 0x1)
840 regs->ARM_cpsr |= PSR_T_BIT;
842 regs->ARM_cpsr &= ~PSR_T_BIT;
846 * @brief Removes uprobe.
848 * @param up Pointer to the uprobe.
851 void arch_remove_uprobe(struct uprobe *up)
853 swap_slot_free(up->sm, up->ainsn.insn);
856 int arch_arm_uprobe(struct uprobe *p)
859 unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
860 int thumb_mode = (unsigned long)p->addr & 1;
861 int len = 4 >> thumb_mode; /* if thumb_mode then len = 2 */
862 unsigned long insn = thumb_mode ? UBP_THUMB : UBP_ARM;
864 ret = write_proc_vm_atomic(p->task, vaddr, &insn, len);
866 pr_err("arch_arm_uprobe: failed to write memory tgid=%u addr=%08lx len=%d\n",
867 p->task->tgid, vaddr, len);
871 flush_insns(vaddr, len);
877 void arch_disarm_uprobe(struct uprobe *p, struct task_struct *task)
881 unsigned long vaddr = (unsigned long)p->addr & ~((unsigned long)1);
882 int thumb_mode = (unsigned long)p->addr & 1;
883 int len = 4 >> thumb_mode; /* if thumb_mode then len = 2 */
885 ret = write_proc_vm_atomic(task, vaddr, &p->opcode, len);
887 pr_err("arch_disarm_uprobe: failed to write memory tgid=%u addr=%08lx len=%d\n",
888 task->tgid, vaddr, len);
890 flush_insns(vaddr, len);
894 static int urp_handler(struct pt_regs *regs, pid_t tgid)
897 unsigned long vaddr = regs->ARM_pc;
898 unsigned long offset_bp = thumb_mode(regs) ?
900 4 * UPROBES_TRAMP_RET_BREAK_IDX;
901 unsigned long tramp_addr = vaddr - offset_bp;
903 p = get_uprobe_by_insn_slot((void *)tramp_addr, tgid, regs);
906 "no_uprobe: Not one of ours: let kernel handle it %lx\n",
911 trampoline_uprobe_handler(p, regs);
916 * @brief Prepares singlestep for current CPU.
918 * @param p Pointer to kprobe.
919 * @param regs Pointer to CPU registers data.
922 static void arch_prepare_singlestep(struct uprobe *p, struct pt_regs *regs)
924 int cpu = smp_processor_id();
926 if (p->ss_addr[cpu]) {
927 regs->ARM_pc = (unsigned long)p->ss_addr[cpu];
928 p->ss_addr[cpu] = NULL;
930 regs->ARM_pc = (unsigned long)p->ainsn.insn;
935 * @brief Breakpoint instruction handler.
937 * @param regs Pointer to CPU register data.
938 * @param instr Instruction.
939 * @return uprobe_handler results.
941 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
946 unsigned long vaddr = regs->ARM_pc | !!thumb_mode(regs);
947 pid_t tgid = current->tgid;
949 local_irq_save(flags);
952 p = get_uprobe((uprobe_opcode_t *)vaddr, tgid);
954 bool prepare = false;
957 if (!p->pre_handler || !p->pre_handler(p, regs))
960 swap_preempt_enable_no_resched();
961 local_irq_restore(flags);
963 if (!p->pre_handler || !p->pre_handler(p, regs))
966 local_irq_save(flags);
971 arch_prepare_singlestep(p, regs);
973 ret = urp_handler(regs, tgid);
975 /* check ARM/THUMB mode on correct */
978 p = get_uprobe((uprobe_opcode_t *)vaddr, tgid);
980 pr_err("invalid mode: thumb=%d addr=%p insn=%08lx\n",
981 !!thumb_mode(regs), p->addr, p->opcode);
984 swap_preempt_enable_no_resched();
985 local_irq_restore(flags);
987 disarm_uprobe(p, current);
989 local_irq_save(flags);
995 swap_preempt_enable_no_resched();
996 local_irq_restore(flags);
1001 /* userspace probes hook (arm) */
1002 static struct undef_hook undef_hook_for_us_arm = {
1003 .instr_mask = 0xffffffff,
1004 .instr_val = UBP_ARM,
1005 .cpsr_mask = MODE_MASK,
1006 .cpsr_val = USR_MODE,
1007 .fn = uprobe_trap_handler
1010 /* userspace probes hook (thumb) */
1011 static struct undef_hook undef_hook_for_us_thumb = {
1012 .instr_mask = 0xffffffff,
1013 .instr_val = UBP_THUMB,
1014 .cpsr_mask = MODE_MASK,
1015 .cpsr_val = USR_MODE,
1016 .fn = uprobe_trap_handler
1020 * @brief Installs breakpoint hooks.
1024 int swap_arch_init_uprobes(void)
1026 swap_register_undef_hook(&undef_hook_for_us_arm);
1027 swap_register_undef_hook(&undef_hook_for_us_thumb);
1033 * @brief Uninstalls breakpoint hooks.
1037 void swap_arch_exit_uprobes(void)
1039 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
1040 swap_unregister_undef_hook(&undef_hook_for_us_arm);