1 #include <dbi_kprobes.h>
2 #include <asm/dbi_kprobes.h>
4 #include <swap_uprobes.h>
5 #include <asm/swap_uprobes.h>
6 #include <dbi_insn_slots.h>
9 #include <dbi_kdebug.h>
10 extern struct hlist_head uprobe_insn_pages;
11 void pc_dep_insn_execbuf(void);
12 void gen_insn_execbuf(void);
13 void gen_insn_execbuf_thumb(void);
14 void pc_dep_insn_execbuf_thumb(void);
17 #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
18 #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
20 static kprobe_opcode_t get_addr_b(kprobe_opcode_t insn, kprobe_opcode_t *addr)
22 // real position less then PC by 8
23 return (kprobe_opcode_t)((long)addr + 8 + branch_displacement(insn));
26 /* is instruction Thumb2 and NOT a branch, etc... */
27 static int is_thumb2(kprobe_opcode_t insn)
29 return ((insn & 0xf800) == 0xe800 ||
30 (insn & 0xf800) == 0xf000 ||
31 (insn & 0xf800) == 0xf800);
34 static int arch_copy_trampoline_arm_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
36 kprobe_opcode_t insns[UPROBES_TRAMP_LEN];
38 kprobe_opcode_t insn[MAX_INSN_SIZE];
39 struct arch_specific_insn ainsn;
42 if ((unsigned long)p->addr & 0x01) {
43 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
48 ainsn.insn_arm = insn;
49 if (!arch_check_insn_arm(&ainsn)) {
55 if (ARM_INSN_MATCH(DPIS, insn[0]) || ARM_INSN_MATCH(LRO, insn[0]) ||
56 ARM_INSN_MATCH(SRO, insn[0])) {
58 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
59 (ARM_INSN_MATCH(SRO, insn[0]) && (ARM_INSN_REG_RD(insn[0]) == 15))) {
60 DBPRINTF("Unboostable insn %lx, DPIS/LRO/SRO\n", insn[0]);
65 } else if (ARM_INSN_MATCH(DPI, insn[0]) || ARM_INSN_MATCH(LIO, insn[0]) ||
66 ARM_INSN_MATCH (SIO, insn[0])) {
68 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_MATCH(SIO, insn[0]) &&
69 (ARM_INSN_REG_RD(insn[0]) == 15))) {
71 DBPRINTF("Unboostable insn %lx/%p, DPI/LIO/SIO\n", insn[0], p);
75 } else if (ARM_INSN_MATCH(DPRS, insn[0])) {
77 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
78 (ARM_INSN_REG_RS(insn[0]) == 15)) {
80 DBPRINTF("Unboostable insn %lx, DPRS\n", insn[0]);
84 } else if (ARM_INSN_MATCH(SM, insn[0])) {
86 if (ARM_INSN_REG_MR (insn[0], 15))
88 DBPRINTF ("Unboostable insn %lx, SM\n", insn[0]);
93 // check instructions that can write result to SP andu uses PC
94 if (pc_dep && (ARM_INSN_REG_RD (ainsn.insn_arm[0]) == 13)) {
95 printk("Error in %s at %d: instruction check failed (arm)\n", __FILE__, __LINE__);
97 // TODO: move free to later phase
98 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
102 if (unlikely(uregs && pc_dep)) {
103 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
104 if (prep_pc_dep_insn_execbuf(insns, insn[0], uregs) != 0) {
105 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
106 __FILE__, __LINE__, insn[0]);
108 // TODO: move free to later phase
109 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
113 insns[6] = (kprobe_opcode_t) (p->addr + 2);
115 memcpy(insns, gen_insn_execbuf, sizeof(insns));
116 insns[UPROBES_TRAMP_INSN_IDX] = insn[0];
119 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
120 insns[7] = (kprobe_opcode_t) (p->addr + 1);
123 if(ARM_INSN_MATCH(B, ainsn.insn_arm[0])) {
124 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
125 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
126 insns[6] = (kprobe_opcode_t)(p->addr + 2);
127 insns[7] = get_addr_b(p->opcode, p->addr);
130 DBPRINTF("arch_prepare_uprobe: to %p - %lx %lx %lx %lx %lx %lx %lx %lx %lx",
131 p->ainsn.insn_arm, insns[0], insns[1], insns[2], insns[3], insns[4],
132 insns[5], insns[6], insns[7], insns[8]);
133 if (!write_proc_vm_atomic(task, (unsigned long)p->ainsn.insn_arm, insns, sizeof(insns))) {
134 panic("failed to write memory %p!\n", p->ainsn.insn_arm);
135 // Mr_Nobody: we have to panic, really??...
136 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
143 static int arch_check_insn_thumb(struct arch_specific_insn *ainsn)
147 // check instructions that can change PC
148 if (THUMB_INSN_MATCH(UNDEF, ainsn->insn_thumb[0]) ||
149 THUMB_INSN_MATCH(SWI, ainsn->insn_thumb[0]) ||
150 THUMB_INSN_MATCH(BREAK, ainsn->insn_thumb[0]) ||
151 THUMB2_INSN_MATCH(BL, ainsn->insn_thumb[0]) ||
152 THUMB_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
153 THUMB_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
154 THUMB_INSN_MATCH(CBZ, ainsn->insn_thumb[0]) ||
155 THUMB2_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
156 THUMB2_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
157 THUMB2_INSN_MATCH(BLX1, ainsn->insn_thumb[0]) ||
158 THUMB_INSN_MATCH(BLX2, ainsn->insn_thumb[0]) ||
159 THUMB_INSN_MATCH(BX, ainsn->insn_thumb[0]) ||
160 THUMB2_INSN_MATCH(BXJ, ainsn->insn_thumb[0]) ||
161 (THUMB2_INSN_MATCH(ADR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
162 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
163 (THUMB2_INSN_MATCH(LDRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
164 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
165 (THUMB2_INSN_MATCH(LDRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
166 (THUMB2_INSN_MATCH(LDRWL, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
167 THUMB2_INSN_MATCH(LDMIA, ainsn->insn_thumb[0]) ||
168 THUMB2_INSN_MATCH(LDMDB, ainsn->insn_thumb[0]) ||
169 (THUMB2_INSN_MATCH(DP, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
170 (THUMB2_INSN_MATCH(RSBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
171 (THUMB2_INSN_MATCH(RORW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
172 (THUMB2_INSN_MATCH(ROR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
173 (THUMB2_INSN_MATCH(LSLW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
174 (THUMB2_INSN_MATCH(LSLW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
175 (THUMB2_INSN_MATCH(LSRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
176 (THUMB2_INSN_MATCH(LSRW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
177 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
178 (THUMB2_INSN_MATCH(STRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
179 (THUMB2_INSN_MATCH(STRBW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
180 (THUMB2_INSN_MATCH(STRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
181 (THUMB2_INSN_MATCH(STRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
182 (THUMB2_INSN_MATCH(STRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
183 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
184 (THUMB2_INSN_MATCH(LDRBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
185 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
186 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
187 (THUMB2_INSN_MATCH(LDRD, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(LDRD1, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(STRD, ainsn->insn_thumb[0])) ) {
188 DBPRINTF("Bad insn arch_check_insn_thumb: %lx\n", ainsn->insn_thumb[0]);
195 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
197 unsigned char mreg = 0;
198 unsigned char reg = 0;
200 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
201 reg = ((insn & 0xffff) & uregs) >> 8;
203 if (THUMB_INSN_MATCH(MOV3, insn)) {
204 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
205 reg = (insn & 0xffff) & uregs;
210 if (THUMB2_INSN_MATCH(ADR, insn)) {
211 reg = ((insn >> 16) & uregs) >> 8;
216 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
217 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
218 THUMB2_INSN_MATCH(LDRWL, insn)) {
219 reg = ((insn >> 16) & uregs) >> 12;
224 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
225 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
226 THUMB2_INSN_MATCH(LDREX, insn)) {
227 reg = ((insn >> 16) & uregs) >> 12;
229 if (THUMB2_INSN_MATCH(DP, insn)) {
230 reg = ((insn >> 16) & uregs) >> 12;
235 if (THUMB2_INSN_MATCH(RSBW, insn)) {
236 reg = ((insn >> 12) & uregs) >> 8;
241 if (THUMB2_INSN_MATCH(RORW, insn)) {
242 reg = ((insn >> 12) & uregs) >> 8;
247 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
248 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
249 THUMB2_INSN_MATCH(LSRW2, insn)) {
250 reg = ((insn >> 12) & uregs) >> 8;
255 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
258 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
259 reg = THUMB2_INSN_REG_RM(insn);
272 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
273 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
274 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
275 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
276 reg = THUMB2_INSN_REG_RT(insn);
279 if (reg == 6 || reg == 7) {
280 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
281 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
282 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
283 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
284 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
285 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
286 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
287 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
290 if (THUMB_INSN_MATCH(APC, insn)) {
291 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
292 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
294 if (THUMB_INSN_MATCH(LRO3, insn)) {
295 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
296 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
298 if (THUMB_INSN_MATCH(MOV3, insn)) {
299 // MOV Rd, PC -> MOV Rd, SP
300 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
302 if (THUMB2_INSN_MATCH(ADR, insn)) {
303 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
304 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
306 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
307 THUMB2_INSN_MATCH(LDRHW, insn)) {
308 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
309 // !!!!!!!!!!!!!!!!!!!!!!!!
310 // !!! imm_12 vs. imm_8 !!!
311 // !!!!!!!!!!!!!!!!!!!!!!!!
312 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
314 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
315 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
316 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
317 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
318 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
320 if (THUMB2_INSN_MATCH(MUL, insn)) {
321 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
323 if (THUMB2_INSN_MATCH(DP, insn)) {
324 if (THUMB2_INSN_REG_RM(insn) == 15) {
325 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
326 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
327 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
330 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
331 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
332 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
334 if (THUMB2_INSN_MATCH(RSBW, insn)) {
335 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
337 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
338 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
339 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
340 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
341 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
342 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
343 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
346 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
347 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
361 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
362 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
364 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
365 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
366 if (THUMB2_INSN_REG_RN(insn) == 15) {
367 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
372 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
373 if (THUMB2_INSN_REG_RN(insn) == 15) {
374 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
383 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
384 THUMB2_INSN_MATCH(STRBW, insn) ||
385 THUMB2_INSN_MATCH(STRD, insn) ||
386 THUMB2_INSN_MATCH(STRHT, insn) ||
387 THUMB2_INSN_MATCH(STRT, insn) ||
388 THUMB2_INSN_MATCH(STRHW1, insn) ||
389 THUMB2_INSN_MATCH(STRHW, insn) )) {
390 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
393 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
394 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
396 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
397 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
398 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
399 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
400 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
401 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
402 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
410 static int arch_copy_trampoline_thumb_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
414 kprobe_opcode_t insn[MAX_INSN_SIZE];
415 struct arch_specific_insn ainsn;
416 kprobe_opcode_t insns[UPROBES_TRAMP_LEN * 2];
419 if ((unsigned long)p->addr & 0x01) {
420 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
425 ainsn.insn_thumb = insn;
426 if (!arch_check_insn_thumb(&ainsn)) {
433 if (THUMB_INSN_MATCH(APC, insn[0]) || THUMB_INSN_MATCH(LRO3, insn[0])) {
434 uregs = 0x0700; // 8-10
436 } else if (THUMB_INSN_MATCH(MOV3, insn[0]) && (((((unsigned char)insn[0]) & 0xff) >> 3) == 15)) {
440 } else if THUMB2_INSN_MATCH(ADR, insn[0]) {
441 uregs = 0x0f00; // Rd 8-11
443 } else if (((THUMB2_INSN_MATCH(LDRW, insn[0]) || THUMB2_INSN_MATCH(LDRW1, insn[0]) ||
444 THUMB2_INSN_MATCH(LDRBW, insn[0]) || THUMB2_INSN_MATCH(LDRBW1, insn[0]) ||
445 THUMB2_INSN_MATCH(LDRHW, insn[0]) || THUMB2_INSN_MATCH(LDRHW1, insn[0]) ||
446 THUMB2_INSN_MATCH(LDRWL, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) ||
447 THUMB2_INSN_MATCH(LDREX, insn[0]) ||
448 ((THUMB2_INSN_MATCH(STRW, insn[0]) || THUMB2_INSN_MATCH(STRBW, insn[0]) ||
449 THUMB2_INSN_MATCH(STRHW, insn[0]) || THUMB2_INSN_MATCH(STRHW1, insn[0])) &&
450 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15)) ||
451 ((THUMB2_INSN_MATCH(STRT, insn[0]) || THUMB2_INSN_MATCH(STRHT, insn[0])) &&
452 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15))) {
453 uregs = 0xf000; // Rt 12-15
455 } else if ((THUMB2_INSN_MATCH(LDRD, insn[0]) || THUMB2_INSN_MATCH(LDRD1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15)) {
456 uregs = 0xff00; // Rt 12-15, Rt2 8-11
458 } else if (THUMB2_INSN_MATCH(MUL, insn[0]) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
461 } else if (THUMB2_INSN_MATCH(DP, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
462 uregs = 0xf000; // Rd 12-15
464 } else if (THUMB2_INSN_MATCH(STRD, insn[0]) && ((THUMB2_INSN_REG_RN(insn[0]) == 15) || (THUMB2_INSN_REG_RT(insn[0]) == 15) || THUMB2_INSN_REG_RT2(insn[0]) == 15)) {
465 uregs = 0xff00; // Rt 12-15, Rt2 8-11
467 } else if (THUMB2_INSN_MATCH(RSBW, insn[0]) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
468 uregs = 0x0f00; // Rd 8-11
470 } else if (THUMB2_INSN_MATCH (RORW, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
473 } else if ((THUMB2_INSN_MATCH(ROR, insn[0]) || THUMB2_INSN_MATCH(LSLW2, insn[0]) || THUMB2_INSN_MATCH(LSRW2, insn[0])) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
474 uregs = 0x0f00; // Rd 8-11
476 } else if ((THUMB2_INSN_MATCH(LSLW1, insn[0]) || THUMB2_INSN_MATCH(LSRW1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
477 uregs = 0x0f00; // Rd 8-11
479 } else if ((THUMB2_INSN_MATCH(TEQ1, insn[0]) || THUMB2_INSN_MATCH(TST1, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
480 uregs = 0xf0000; //Rn 0-3 (16-19)
482 } else if ((THUMB2_INSN_MATCH(TEQ2, insn[0]) || THUMB2_INSN_MATCH(TST2, insn[0])) &&
483 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
484 uregs = 0xf0000; //Rn 0-3 (16-19)
488 if (unlikely(uregs && pc_dep)) {
489 memcpy(insns, pc_dep_insn_execbuf_thumb, 18 * 2);
490 if (prep_pc_dep_insn_execbuf_thumb(insns, insn[0], uregs) != 0) {
491 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
492 __FILE__, __LINE__, insn[0]);
494 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
498 addr = ((unsigned int)p->addr) + 4;
499 *((unsigned short*)insns + 13) = 0xdeff;
500 *((unsigned short*)insns + 14) = addr & 0x0000ffff;
501 *((unsigned short*)insns + 15) = addr >> 16;
502 if (!is_thumb2(insn[0])) {
503 addr = ((unsigned int)p->addr) + 2;
504 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
505 *((unsigned short*)insns + 17) = addr >> 16;
507 addr = ((unsigned int)p->addr) + 4;
508 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
509 *((unsigned short*)insns + 17) = addr >> 16;
512 memcpy(insns, gen_insn_execbuf_thumb, 18 * 2);
513 *((unsigned short*)insns + 13) = 0xdeff;
514 if (!is_thumb2(insn[0])) {
515 addr = ((unsigned int)p->addr) + 2;
516 *((unsigned short*)insns + 2) = insn[0];
517 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
518 *((unsigned short*)insns + 17) = addr >> 16;
520 addr = ((unsigned int)p->addr) + 4;
522 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
523 *((unsigned short*)insns + 17) = addr >> 16;
527 if (!write_proc_vm_atomic (task, (unsigned long)p->ainsn.insn_thumb, insns, 18 * 2)) {
528 panic("failed to write memory %p!\n", p->ainsn.insn_thumb);
529 // Mr_Nobody: we have to panic, really??...
530 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
537 int arch_prepare_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
540 kprobe_opcode_t insn[MAX_INSN_SIZE];
542 if ((unsigned long)p->addr & 0x01) {
543 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
547 if (!read_proc_vm_atomic(task, (unsigned long)p->addr, &insn, MAX_INSN_SIZE * sizeof(kprobe_opcode_t))) {
548 panic("Failed to read memory task[tgid=%u, comm=%s] %p!\n", task->tgid, task->comm, p->addr);
552 p->ainsn.insn_arm = get_insn_slot(task, &uprobe_insn_pages, atomic);
553 if (!p->ainsn.insn_arm) {
554 printk("Error in %s at %d: kprobe slot allocation error (arm)\n", __FILE__, __LINE__);
558 ret = arch_copy_trampoline_arm_uprobe(p, task, 1);
560 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
564 p->ainsn.insn_thumb = get_insn_slot(task, &uprobe_insn_pages, atomic);
565 if (!p->ainsn.insn_thumb) {
566 printk("Error in %s at %d: kprobe slot allocation error (thumb)\n", __FILE__, __LINE__);
570 ret = arch_copy_trampoline_thumb_uprobe(p, task, 1);
572 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
573 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
577 if ((p->safe_arm == -1) && (p->safe_thumb == -1)) {
578 printk("Error in %s at %d: failed arch_copy_trampoline_*_uprobe() (both) [tgid=%u, addr=%lx, data=%lx]\n",
579 __FILE__, __LINE__, task->tgid, (unsigned long)p->addr, (unsigned long)p->opcode);
580 if (!write_proc_vm_atomic(task, (unsigned long)p->addr, &p->opcode, sizeof(p->opcode))) {
581 panic("Failed to write memory %p!\n", p->addr);
584 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
585 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
593 void arch_prepare_uretprobe_hl(struct uretprobe_instance *ri,
594 struct pt_regs *regs)
596 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
597 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
599 /* Set flag of current mode */
600 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
602 if (thumb_mode(regs)) {
603 regs->ARM_lr = (unsigned long)(ri->rp->kp.ainsn.insn) + 0x1b;
605 regs->ARM_lr = (unsigned long)(ri->rp->kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
609 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
611 struct jprobe *jp = container_of(p, struct jprobe, kp);
612 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
613 entry_point_t entry = (entry_point_t)jp->entry;
616 p->ss_addr = (kprobe_opcode_t *)pre_entry(jp->priv_arg, regs);
620 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
621 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
623 dbi_arch_uprobe_return();
629 unsigned long arch_get_trampoline_addr(struct kprobe *p, struct pt_regs *regs)
631 return thumb_mode(regs) ?
632 (unsigned long)(p->ainsn.insn) + 0x1b :
633 (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
636 void arch_set_orig_ret_addr(unsigned long orig_ret_addr, struct pt_regs *regs)
638 regs->ARM_lr = orig_ret_addr;
639 regs->ARM_pc = orig_ret_addr;
641 if (thumb_mode(regs) && !(regs->ARM_lr & 0x01)) {
642 regs->ARM_cpsr &= 0xFFFFFFDF;
643 } else if (user_mode(regs) && (regs->ARM_lr & 0x01)) {
644 regs->ARM_cpsr |= 0x20;
648 static int check_validity_insn(struct kprobe *p, struct pt_regs *regs, struct task_struct *task)
652 if (unlikely(thumb_mode(regs))) {
653 if (p->safe_thumb != -1) {
654 p->ainsn.insn = p->ainsn.insn_thumb;
655 list_for_each_entry_rcu(kp, &p->list, list) {
656 kp->ainsn.insn = p->ainsn.insn_thumb;
659 printk("Error in %s at %d: we are in thumb mode (!) and check instruction was fail \
660 (%0lX instruction at %p address)!\n", __FILE__, __LINE__, p->opcode, p->addr);
661 // Test case when we do our actions on already running application
662 disarm_uprobe(p, task);
666 if (p->safe_arm != -1) {
667 p->ainsn.insn = p->ainsn.insn_arm;
668 list_for_each_entry_rcu(kp, &p->list, list) {
669 kp->ainsn.insn = p->ainsn.insn_arm;
672 printk("Error in %s at %d: we are in arm mode (!) and check instruction was fail \
673 (%0lX instruction at %p address)!\n", __FILE__, __LINE__, p->opcode, p->addr);
674 // Test case when we do our actions on already running application
675 disarm_uprobe(p, task);
683 static int uprobe_handler(struct pt_regs *regs)
685 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
686 struct task_struct *task = current;
687 pid_t tgid = task->tgid;
690 p = get_uprobe(addr, tgid);
692 if (p && (check_validity_insn(p, regs, task) != 0)) {
693 printk("no_uprobe live\n");
698 p = get_kprobe_by_insn_slot(addr, tgid, regs);
700 printk("no_uprobe\n");
704 trampoline_uprobe_handler(p, regs);
708 /* restore opcode for thumb app */
709 if (thumb_mode(regs)) {
710 if (!is_thumb2(p->opcode)) {
711 unsigned long tmp = p->opcode >> 16;
712 write_proc_vm_atomic(task, (unsigned long)((unsigned short*)p->addr + 1), &tmp, 2);
714 // "2*sizeof(kprobe_opcode_t)" - strange. Should be "sizeof(kprobe_opcode_t)", need to test
715 flush_icache_range((unsigned int)p->addr, ((unsigned int)p->addr) + (2 * sizeof(kprobe_opcode_t)));
719 if (!p->pre_handler || !p->pre_handler(p, regs)) {
720 prepare_singlestep(p, regs);
726 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
730 local_irq_save(flags);
733 ret = uprobe_handler(regs);
734 preempt_enable_no_resched();
736 local_irq_restore(flags);
740 /* userspace probes hook (arm) */
741 static struct undef_hook undef_hook_for_us_arm = {
742 .instr_mask = 0xffffffff,
743 .instr_val = BREAKPOINT_INSTRUCTION,
744 .cpsr_mask = MODE_MASK,
745 .cpsr_val = USR_MODE,
746 .fn = uprobe_trap_handler
749 /* userspace probes hook (thumb) */
750 static struct undef_hook undef_hook_for_us_thumb = {
751 .instr_mask = 0xffffffff,
752 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
753 .cpsr_mask = MODE_MASK,
754 .cpsr_val = USR_MODE,
755 .fn = uprobe_trap_handler
758 int swap_arch_init_uprobes(void)
760 swap_register_undef_hook(&undef_hook_for_us_arm);
761 swap_register_undef_hook(&undef_hook_for_us_thumb);
766 void swap_arch_exit_uprobes(void)
768 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
769 swap_unregister_undef_hook(&undef_hook_for_us_arm);