1 #include <dbi_kprobes.h>
2 #include <asm/dbi_kprobes.h>
6 #include <dbi_kdebug.h>
7 extern struct hlist_head uprobe_insn_pages;
8 kprobe_opcode_t *get_insn_slot(struct task_struct *task, struct hlist_head *page_list, int atomic);
9 int arch_check_insn_arm(struct arch_specific_insn *ainsn);
10 int prep_pc_dep_insn_execbuf(kprobe_opcode_t *insns, kprobe_opcode_t insn, int uregs);
11 void free_insn_slot(struct hlist_head *page_list, struct task_struct *task, kprobe_opcode_t *slot);
12 int isThumb2(kprobe_opcode_t insn);
13 void pc_dep_insn_execbuf(void);
14 void gen_insn_execbuf(void);
15 void gen_insn_execbuf_thumb(void);
16 void pc_dep_insn_execbuf_thumb(void);
17 int kprobe_trap_handler(struct pt_regs *regs, unsigned int instr);
20 #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
21 #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
23 static kprobe_opcode_t get_addr_b(kprobe_opcode_t insn, kprobe_opcode_t *addr)
25 // real position less then PC by 8
26 return (kprobe_opcode_t)((long)addr + 8 + branch_displacement(insn));
29 static int arch_copy_trampoline_arm_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
31 kprobe_opcode_t insns[UPROBES_TRAMP_LEN];
33 kprobe_opcode_t insn[MAX_INSN_SIZE];
34 struct arch_specific_insn ainsn;
37 if ((unsigned long)p->addr & 0x01) {
38 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
43 ainsn.insn_arm = insn;
44 if (!arch_check_insn_arm(&ainsn)) {
50 if (ARM_INSN_MATCH(DPIS, insn[0]) || ARM_INSN_MATCH(LRO, insn[0]) ||
51 ARM_INSN_MATCH(SRO, insn[0])) {
53 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
54 (ARM_INSN_MATCH(SRO, insn[0]) && (ARM_INSN_REG_RD(insn[0]) == 15))) {
55 DBPRINTF("Unboostable insn %lx, DPIS/LRO/SRO\n", insn[0]);
60 } else if (ARM_INSN_MATCH(DPI, insn[0]) || ARM_INSN_MATCH(LIO, insn[0]) ||
61 ARM_INSN_MATCH (SIO, insn[0])) {
63 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_MATCH(SIO, insn[0]) &&
64 (ARM_INSN_REG_RD(insn[0]) == 15))) {
66 DBPRINTF("Unboostable insn %lx/%p, DPI/LIO/SIO\n", insn[0], p);
70 } else if (ARM_INSN_MATCH(DPRS, insn[0])) {
72 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
73 (ARM_INSN_REG_RS(insn[0]) == 15)) {
75 DBPRINTF("Unboostable insn %lx, DPRS\n", insn[0]);
79 } else if (ARM_INSN_MATCH(SM, insn[0])) {
81 if (ARM_INSN_REG_MR (insn[0], 15))
83 DBPRINTF ("Unboostable insn %lx, SM\n", insn[0]);
88 // check instructions that can write result to SP andu uses PC
89 if (pc_dep && (ARM_INSN_REG_RD (ainsn.insn_arm[0]) == 13)) {
90 printk("Error in %s at %d: instruction check failed (arm)\n", __FILE__, __LINE__);
92 // TODO: move free to later phase
93 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
97 if (unlikely(uregs && pc_dep)) {
98 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
99 if (prep_pc_dep_insn_execbuf(insns, insn[0], uregs) != 0) {
100 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
101 __FILE__, __LINE__, insn[0]);
103 // TODO: move free to later phase
104 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
108 insns[6] = (kprobe_opcode_t) (p->addr + 2);
110 memcpy(insns, gen_insn_execbuf, sizeof(insns));
111 insns[UPROBES_TRAMP_INSN_IDX] = insn[0];
114 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
115 insns[7] = (kprobe_opcode_t) (p->addr + 1);
118 if(ARM_INSN_MATCH(B, ainsn.insn_arm[0])) {
119 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
120 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
121 insns[6] = (kprobe_opcode_t)(p->addr + 2);
122 insns[7] = get_addr_b(p->opcode, p->addr);
125 DBPRINTF("arch_prepare_uprobe: to %p - %lx %lx %lx %lx %lx %lx %lx %lx %lx",
126 p->ainsn.insn_arm, insns[0], insns[1], insns[2], insns[3], insns[4],
127 insns[5], insns[6], insns[7], insns[8]);
128 if (!write_proc_vm_atomic(task, (unsigned long)p->ainsn.insn_arm, insns, sizeof(insns))) {
129 panic("failed to write memory %p!\n", p->ainsn.insn_arm);
130 // Mr_Nobody: we have to panic, really??...
131 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
138 static int arch_check_insn_thumb(struct arch_specific_insn *ainsn)
142 // check instructions that can change PC
143 if (THUMB_INSN_MATCH(UNDEF, ainsn->insn_thumb[0]) ||
144 THUMB_INSN_MATCH(SWI, ainsn->insn_thumb[0]) ||
145 THUMB_INSN_MATCH(BREAK, ainsn->insn_thumb[0]) ||
146 THUMB2_INSN_MATCH(BL, ainsn->insn_thumb[0]) ||
147 THUMB_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
148 THUMB_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
149 THUMB_INSN_MATCH(CBZ, ainsn->insn_thumb[0]) ||
150 THUMB2_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
151 THUMB2_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
152 THUMB2_INSN_MATCH(BLX1, ainsn->insn_thumb[0]) ||
153 THUMB_INSN_MATCH(BLX2, ainsn->insn_thumb[0]) ||
154 THUMB_INSN_MATCH(BX, ainsn->insn_thumb[0]) ||
155 THUMB2_INSN_MATCH(BXJ, ainsn->insn_thumb[0]) ||
156 (THUMB2_INSN_MATCH(ADR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
157 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
158 (THUMB2_INSN_MATCH(LDRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
159 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
160 (THUMB2_INSN_MATCH(LDRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
161 (THUMB2_INSN_MATCH(LDRWL, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
162 THUMB2_INSN_MATCH(LDMIA, ainsn->insn_thumb[0]) ||
163 THUMB2_INSN_MATCH(LDMDB, ainsn->insn_thumb[0]) ||
164 (THUMB2_INSN_MATCH(DP, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
165 (THUMB2_INSN_MATCH(RSBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
166 (THUMB2_INSN_MATCH(RORW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
167 (THUMB2_INSN_MATCH(ROR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
168 (THUMB2_INSN_MATCH(LSLW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
169 (THUMB2_INSN_MATCH(LSLW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
170 (THUMB2_INSN_MATCH(LSRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
171 (THUMB2_INSN_MATCH(LSRW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
172 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
173 (THUMB2_INSN_MATCH(STRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
174 (THUMB2_INSN_MATCH(STRBW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
175 (THUMB2_INSN_MATCH(STRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
176 (THUMB2_INSN_MATCH(STRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
177 (THUMB2_INSN_MATCH(STRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
178 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
179 (THUMB2_INSN_MATCH(LDRBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
180 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
181 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
182 (THUMB2_INSN_MATCH(LDRD, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(LDRD1, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(STRD, ainsn->insn_thumb[0])) ) {
183 DBPRINTF("Bad insn arch_check_insn_thumb: %lx\n", ainsn->insn_thumb[0]);
190 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
192 unsigned char mreg = 0;
193 unsigned char reg = 0;
195 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
196 reg = ((insn & 0xffff) & uregs) >> 8;
198 if (THUMB_INSN_MATCH(MOV3, insn)) {
199 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
200 reg = (insn & 0xffff) & uregs;
205 if (THUMB2_INSN_MATCH(ADR, insn)) {
206 reg = ((insn >> 16) & uregs) >> 8;
211 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
212 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
213 THUMB2_INSN_MATCH(LDRWL, insn)) {
214 reg = ((insn >> 16) & uregs) >> 12;
219 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
220 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
221 THUMB2_INSN_MATCH(LDREX, insn)) {
222 reg = ((insn >> 16) & uregs) >> 12;
224 if (THUMB2_INSN_MATCH(DP, insn)) {
225 reg = ((insn >> 16) & uregs) >> 12;
230 if (THUMB2_INSN_MATCH(RSBW, insn)) {
231 reg = ((insn >> 12) & uregs) >> 8;
236 if (THUMB2_INSN_MATCH(RORW, insn)) {
237 reg = ((insn >> 12) & uregs) >> 8;
242 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
243 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
244 THUMB2_INSN_MATCH(LSRW2, insn)) {
245 reg = ((insn >> 12) & uregs) >> 8;
250 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
253 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
254 reg = THUMB2_INSN_REG_RM(insn);
267 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
268 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
269 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
270 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
271 reg = THUMB2_INSN_REG_RT(insn);
274 if (reg == 6 || reg == 7) {
275 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
276 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
277 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
278 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
279 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
280 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
281 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
282 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
285 if (THUMB_INSN_MATCH(APC, insn)) {
286 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
287 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
289 if (THUMB_INSN_MATCH(LRO3, insn)) {
290 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
291 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
293 if (THUMB_INSN_MATCH(MOV3, insn)) {
294 // MOV Rd, PC -> MOV Rd, SP
295 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
297 if (THUMB2_INSN_MATCH(ADR, insn)) {
298 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
299 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
301 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
302 THUMB2_INSN_MATCH(LDRHW, insn)) {
303 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
304 // !!!!!!!!!!!!!!!!!!!!!!!!
305 // !!! imm_12 vs. imm_8 !!!
306 // !!!!!!!!!!!!!!!!!!!!!!!!
307 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
309 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
310 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
311 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
312 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
313 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
315 if (THUMB2_INSN_MATCH(MUL, insn)) {
316 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
318 if (THUMB2_INSN_MATCH(DP, insn)) {
319 if (THUMB2_INSN_REG_RM(insn) == 15) {
320 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
321 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
322 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
325 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
326 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
327 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
329 if (THUMB2_INSN_MATCH(RSBW, insn)) {
330 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
332 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
333 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
334 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
335 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
336 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
337 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
338 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
341 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
342 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
356 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
357 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
359 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
360 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
361 if (THUMB2_INSN_REG_RN(insn) == 15) {
362 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
367 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
368 if (THUMB2_INSN_REG_RN(insn) == 15) {
369 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
378 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
379 THUMB2_INSN_MATCH(STRBW, insn) ||
380 THUMB2_INSN_MATCH(STRD, insn) ||
381 THUMB2_INSN_MATCH(STRHT, insn) ||
382 THUMB2_INSN_MATCH(STRT, insn) ||
383 THUMB2_INSN_MATCH(STRHW1, insn) ||
384 THUMB2_INSN_MATCH(STRHW, insn) )) {
385 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
388 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
389 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
391 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
392 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
393 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
394 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
395 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
396 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
397 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
405 static int arch_copy_trampoline_thumb_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
409 kprobe_opcode_t insn[MAX_INSN_SIZE];
410 struct arch_specific_insn ainsn;
411 kprobe_opcode_t insns[UPROBES_TRAMP_LEN * 2];
414 if ((unsigned long)p->addr & 0x01) {
415 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
420 ainsn.insn_thumb = insn;
421 if (!arch_check_insn_thumb(&ainsn)) {
428 if (THUMB_INSN_MATCH(APC, insn[0]) || THUMB_INSN_MATCH(LRO3, insn[0])) {
429 uregs = 0x0700; // 8-10
431 } else if (THUMB_INSN_MATCH(MOV3, insn[0]) && (((((unsigned char)insn[0]) & 0xff) >> 3) == 15)) {
435 } else if THUMB2_INSN_MATCH(ADR, insn[0]) {
436 uregs = 0x0f00; // Rd 8-11
438 } else if (((THUMB2_INSN_MATCH(LDRW, insn[0]) || THUMB2_INSN_MATCH(LDRW1, insn[0]) ||
439 THUMB2_INSN_MATCH(LDRBW, insn[0]) || THUMB2_INSN_MATCH(LDRBW1, insn[0]) ||
440 THUMB2_INSN_MATCH(LDRHW, insn[0]) || THUMB2_INSN_MATCH(LDRHW1, insn[0]) ||
441 THUMB2_INSN_MATCH(LDRWL, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) ||
442 THUMB2_INSN_MATCH(LDREX, insn[0]) ||
443 ((THUMB2_INSN_MATCH(STRW, insn[0]) || THUMB2_INSN_MATCH(STRBW, insn[0]) ||
444 THUMB2_INSN_MATCH(STRHW, insn[0]) || THUMB2_INSN_MATCH(STRHW1, insn[0])) &&
445 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15)) ||
446 ((THUMB2_INSN_MATCH(STRT, insn[0]) || THUMB2_INSN_MATCH(STRHT, insn[0])) &&
447 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15))) {
448 uregs = 0xf000; // Rt 12-15
450 } else if ((THUMB2_INSN_MATCH(LDRD, insn[0]) || THUMB2_INSN_MATCH(LDRD1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15)) {
451 uregs = 0xff00; // Rt 12-15, Rt2 8-11
453 } else if (THUMB2_INSN_MATCH(MUL, insn[0]) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
456 } else if (THUMB2_INSN_MATCH(DP, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
457 uregs = 0xf000; // Rd 12-15
459 } else if (THUMB2_INSN_MATCH(STRD, insn[0]) && ((THUMB2_INSN_REG_RN(insn[0]) == 15) || (THUMB2_INSN_REG_RT(insn[0]) == 15) || THUMB2_INSN_REG_RT2(insn[0]) == 15)) {
460 uregs = 0xff00; // Rt 12-15, Rt2 8-11
462 } else if (THUMB2_INSN_MATCH(RSBW, insn[0]) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
463 uregs = 0x0f00; // Rd 8-11
465 } else if (THUMB2_INSN_MATCH (RORW, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
468 } else if ((THUMB2_INSN_MATCH(ROR, insn[0]) || THUMB2_INSN_MATCH(LSLW2, insn[0]) || THUMB2_INSN_MATCH(LSRW2, insn[0])) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
469 uregs = 0x0f00; // Rd 8-11
471 } else if ((THUMB2_INSN_MATCH(LSLW1, insn[0]) || THUMB2_INSN_MATCH(LSRW1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
472 uregs = 0x0f00; // Rd 8-11
474 } else if ((THUMB2_INSN_MATCH(TEQ1, insn[0]) || THUMB2_INSN_MATCH(TST1, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
475 uregs = 0xf0000; //Rn 0-3 (16-19)
477 } else if ((THUMB2_INSN_MATCH(TEQ2, insn[0]) || THUMB2_INSN_MATCH(TST2, insn[0])) &&
478 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
479 uregs = 0xf0000; //Rn 0-3 (16-19)
483 if (unlikely(uregs && pc_dep)) {
484 memcpy(insns, pc_dep_insn_execbuf_thumb, 18 * 2);
485 if (prep_pc_dep_insn_execbuf_thumb(insns, insn[0], uregs) != 0) {
486 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
487 __FILE__, __LINE__, insn[0]);
489 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
493 addr = ((unsigned int)p->addr) + 4;
494 *((unsigned short*)insns + 13) = 0xdeff;
495 *((unsigned short*)insns + 14) = addr & 0x0000ffff;
496 *((unsigned short*)insns + 15) = addr >> 16;
497 if (!isThumb2(insn[0])) {
498 addr = ((unsigned int)p->addr) + 2;
499 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
500 *((unsigned short*)insns + 17) = addr >> 16;
502 addr = ((unsigned int)p->addr) + 4;
503 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
504 *((unsigned short*)insns + 17) = addr >> 16;
507 memcpy(insns, gen_insn_execbuf_thumb, 18 * 2);
508 *((unsigned short*)insns + 13) = 0xdeff;
509 if (!isThumb2(insn[0])) {
510 addr = ((unsigned int)p->addr) + 2;
511 *((unsigned short*)insns + 2) = insn[0];
512 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
513 *((unsigned short*)insns + 17) = addr >> 16;
515 addr = ((unsigned int)p->addr) + 4;
517 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
518 *((unsigned short*)insns + 17) = addr >> 16;
522 if (!write_proc_vm_atomic (task, (unsigned long)p->ainsn.insn_thumb, insns, 18 * 2)) {
523 panic("failed to write memory %p!\n", p->ainsn.insn_thumb);
524 // Mr_Nobody: we have to panic, really??...
525 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
532 int arch_prepare_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
535 kprobe_opcode_t insn[MAX_INSN_SIZE];
537 if ((unsigned long)p->addr & 0x01) {
538 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
542 if (!read_proc_vm_atomic(task, (unsigned long)p->addr, &insn, MAX_INSN_SIZE * sizeof(kprobe_opcode_t))) {
543 panic("Failed to read memory task[tgid=%u, comm=%s] %p!\n", task->tgid, task->comm, p->addr);
547 p->ainsn.insn_arm = get_insn_slot(task, &uprobe_insn_pages, atomic);
548 if (!p->ainsn.insn_arm) {
549 printk("Error in %s at %d: kprobe slot allocation error (arm)\n", __FILE__, __LINE__);
553 ret = arch_copy_trampoline_arm_uprobe(p, task, 1);
555 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
559 p->ainsn.insn_thumb = get_insn_slot(task, &uprobe_insn_pages, atomic);
560 if (!p->ainsn.insn_thumb) {
561 printk("Error in %s at %d: kprobe slot allocation error (thumb)\n", __FILE__, __LINE__);
565 ret = arch_copy_trampoline_thumb_uprobe(p, task, 1);
567 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
568 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
572 if ((p->safe_arm == -1) && (p->safe_thumb == -1)) {
573 printk("Error in %s at %d: failed arch_copy_trampoline_*_uprobe() (both) [tgid=%u, addr=%lx, data=%lx]\n",
574 __FILE__, __LINE__, task->tgid, (unsigned long)p->addr, (unsigned long)p->opcode);
575 if (!write_proc_vm_atomic(task, (unsigned long)p->addr, &p->opcode, sizeof(p->opcode))) {
576 panic("Failed to write memory %p!\n", p->addr);
579 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
580 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
588 /* userspace probes hook (arm) */
589 static struct undef_hook undef_hook_for_us_arm = {
590 .instr_mask = 0xffffffff,
591 .instr_val = BREAKPOINT_INSTRUCTION,
592 .cpsr_mask = MODE_MASK,
593 .cpsr_val = USR_MODE,
594 .fn = kprobe_trap_handler
597 /* userspace probes hook (thumb) */
598 static struct undef_hook undef_hook_for_us_thumb = {
599 .instr_mask = 0xffffffff,
600 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
601 .cpsr_mask = MODE_MASK,
602 .cpsr_val = USR_MODE,
603 .fn = kprobe_trap_handler
606 int swap_arch_init_uprobes(void)
608 swap_register_undef_hook(&undef_hook_for_us_arm);
609 swap_register_undef_hook(&undef_hook_for_us_thumb);
614 void swap_arch_exit_uprobes(void)
616 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
617 swap_unregister_undef_hook(&undef_hook_for_us_arm);