1 #include <dbi_kprobes.h>
2 #include <asm/dbi_kprobes.h>
4 #include <swap_uprobes.h>
5 #include <asm/swap_uprobes.h>
8 #include <dbi_kdebug.h>
9 extern struct hlist_head uprobe_insn_pages;
10 kprobe_opcode_t *get_insn_slot(struct task_struct *task, struct hlist_head *page_list, int atomic);
11 int arch_check_insn_arm(struct arch_specific_insn *ainsn);
12 int prep_pc_dep_insn_execbuf(kprobe_opcode_t *insns, kprobe_opcode_t insn, int uregs);
13 void free_insn_slot(struct hlist_head *page_list, struct task_struct *task, kprobe_opcode_t *slot);
14 void pc_dep_insn_execbuf(void);
15 void gen_insn_execbuf(void);
16 void gen_insn_execbuf_thumb(void);
17 void pc_dep_insn_execbuf_thumb(void);
18 int kprobe_trap_handler(struct pt_regs *regs, unsigned int instr);
19 void add_rp_inst(struct kretprobe_instance *ri);
20 struct kretprobe_instance *get_free_rp_inst (struct kretprobe *rp);
23 #define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
24 #define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
26 static kprobe_opcode_t get_addr_b(kprobe_opcode_t insn, kprobe_opcode_t *addr)
28 // real position less then PC by 8
29 return (kprobe_opcode_t)((long)addr + 8 + branch_displacement(insn));
32 /* is instruction Thumb2 and NOT a branch, etc... */
33 static int is_thumb2(kprobe_opcode_t insn)
35 return ((insn & 0xf800) == 0xe800 ||
36 (insn & 0xf800) == 0xf000 ||
37 (insn & 0xf800) == 0xf800);
40 static int arch_copy_trampoline_arm_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
42 kprobe_opcode_t insns[UPROBES_TRAMP_LEN];
44 kprobe_opcode_t insn[MAX_INSN_SIZE];
45 struct arch_specific_insn ainsn;
48 if ((unsigned long)p->addr & 0x01) {
49 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
54 ainsn.insn_arm = insn;
55 if (!arch_check_insn_arm(&ainsn)) {
61 if (ARM_INSN_MATCH(DPIS, insn[0]) || ARM_INSN_MATCH(LRO, insn[0]) ||
62 ARM_INSN_MATCH(SRO, insn[0])) {
64 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
65 (ARM_INSN_MATCH(SRO, insn[0]) && (ARM_INSN_REG_RD(insn[0]) == 15))) {
66 DBPRINTF("Unboostable insn %lx, DPIS/LRO/SRO\n", insn[0]);
71 } else if (ARM_INSN_MATCH(DPI, insn[0]) || ARM_INSN_MATCH(LIO, insn[0]) ||
72 ARM_INSN_MATCH (SIO, insn[0])) {
74 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_MATCH(SIO, insn[0]) &&
75 (ARM_INSN_REG_RD(insn[0]) == 15))) {
77 DBPRINTF("Unboostable insn %lx/%p, DPI/LIO/SIO\n", insn[0], p);
81 } else if (ARM_INSN_MATCH(DPRS, insn[0])) {
83 if ((ARM_INSN_REG_RN(insn[0]) == 15) || (ARM_INSN_REG_RM(insn[0]) == 15) ||
84 (ARM_INSN_REG_RS(insn[0]) == 15)) {
86 DBPRINTF("Unboostable insn %lx, DPRS\n", insn[0]);
90 } else if (ARM_INSN_MATCH(SM, insn[0])) {
92 if (ARM_INSN_REG_MR (insn[0], 15))
94 DBPRINTF ("Unboostable insn %lx, SM\n", insn[0]);
99 // check instructions that can write result to SP andu uses PC
100 if (pc_dep && (ARM_INSN_REG_RD (ainsn.insn_arm[0]) == 13)) {
101 printk("Error in %s at %d: instruction check failed (arm)\n", __FILE__, __LINE__);
103 // TODO: move free to later phase
104 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
108 if (unlikely(uregs && pc_dep)) {
109 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
110 if (prep_pc_dep_insn_execbuf(insns, insn[0], uregs) != 0) {
111 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
112 __FILE__, __LINE__, insn[0]);
114 // TODO: move free to later phase
115 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
119 insns[6] = (kprobe_opcode_t) (p->addr + 2);
121 memcpy(insns, gen_insn_execbuf, sizeof(insns));
122 insns[UPROBES_TRAMP_INSN_IDX] = insn[0];
125 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
126 insns[7] = (kprobe_opcode_t) (p->addr + 1);
129 if(ARM_INSN_MATCH(B, ainsn.insn_arm[0])) {
130 memcpy(insns, pc_dep_insn_execbuf, sizeof(insns));
131 insns[UPROBES_TRAMP_RET_BREAK_IDX] = BREAKPOINT_INSTRUCTION;
132 insns[6] = (kprobe_opcode_t)(p->addr + 2);
133 insns[7] = get_addr_b(p->opcode, p->addr);
136 DBPRINTF("arch_prepare_uprobe: to %p - %lx %lx %lx %lx %lx %lx %lx %lx %lx",
137 p->ainsn.insn_arm, insns[0], insns[1], insns[2], insns[3], insns[4],
138 insns[5], insns[6], insns[7], insns[8]);
139 if (!write_proc_vm_atomic(task, (unsigned long)p->ainsn.insn_arm, insns, sizeof(insns))) {
140 panic("failed to write memory %p!\n", p->ainsn.insn_arm);
141 // Mr_Nobody: we have to panic, really??...
142 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_arm, 0);
149 static int arch_check_insn_thumb(struct arch_specific_insn *ainsn)
153 // check instructions that can change PC
154 if (THUMB_INSN_MATCH(UNDEF, ainsn->insn_thumb[0]) ||
155 THUMB_INSN_MATCH(SWI, ainsn->insn_thumb[0]) ||
156 THUMB_INSN_MATCH(BREAK, ainsn->insn_thumb[0]) ||
157 THUMB2_INSN_MATCH(BL, ainsn->insn_thumb[0]) ||
158 THUMB_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
159 THUMB_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
160 THUMB_INSN_MATCH(CBZ, ainsn->insn_thumb[0]) ||
161 THUMB2_INSN_MATCH(B1, ainsn->insn_thumb[0]) ||
162 THUMB2_INSN_MATCH(B2, ainsn->insn_thumb[0]) ||
163 THUMB2_INSN_MATCH(BLX1, ainsn->insn_thumb[0]) ||
164 THUMB_INSN_MATCH(BLX2, ainsn->insn_thumb[0]) ||
165 THUMB_INSN_MATCH(BX, ainsn->insn_thumb[0]) ||
166 THUMB2_INSN_MATCH(BXJ, ainsn->insn_thumb[0]) ||
167 (THUMB2_INSN_MATCH(ADR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
168 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
169 (THUMB2_INSN_MATCH(LDRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
170 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
171 (THUMB2_INSN_MATCH(LDRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
172 (THUMB2_INSN_MATCH(LDRWL, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RT(ainsn->insn_thumb[0]) == 15) ||
173 THUMB2_INSN_MATCH(LDMIA, ainsn->insn_thumb[0]) ||
174 THUMB2_INSN_MATCH(LDMDB, ainsn->insn_thumb[0]) ||
175 (THUMB2_INSN_MATCH(DP, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
176 (THUMB2_INSN_MATCH(RSBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
177 (THUMB2_INSN_MATCH(RORW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
178 (THUMB2_INSN_MATCH(ROR, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
179 (THUMB2_INSN_MATCH(LSLW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
180 (THUMB2_INSN_MATCH(LSLW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
181 (THUMB2_INSN_MATCH(LSRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
182 (THUMB2_INSN_MATCH(LSRW2, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RD(ainsn->insn_thumb[0]) == 15) ||
183 /* skip PC, #-imm12 -> SP, #-imm8 and Tegra-hanging instructions */
184 (THUMB2_INSN_MATCH(STRW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
185 (THUMB2_INSN_MATCH(STRBW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
186 (THUMB2_INSN_MATCH(STRHW1, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
187 (THUMB2_INSN_MATCH(STRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
188 (THUMB2_INSN_MATCH(STRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
189 (THUMB2_INSN_MATCH(LDRW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
190 (THUMB2_INSN_MATCH(LDRBW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
191 (THUMB2_INSN_MATCH(LDRHW, ainsn->insn_thumb[0]) && THUMB2_INSN_REG_RN(ainsn->insn_thumb[0]) == 15) ||
192 /* skip STRDx/LDRDx Rt, Rt2, [Rd, ...] */
193 (THUMB2_INSN_MATCH(LDRD, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(LDRD1, ainsn->insn_thumb[0]) || THUMB2_INSN_MATCH(STRD, ainsn->insn_thumb[0])) ) {
194 DBPRINTF("Bad insn arch_check_insn_thumb: %lx\n", ainsn->insn_thumb[0]);
201 static int prep_pc_dep_insn_execbuf_thumb(kprobe_opcode_t * insns, kprobe_opcode_t insn, int uregs)
203 unsigned char mreg = 0;
204 unsigned char reg = 0;
206 if (THUMB_INSN_MATCH(APC, insn) || THUMB_INSN_MATCH(LRO3, insn)) {
207 reg = ((insn & 0xffff) & uregs) >> 8;
209 if (THUMB_INSN_MATCH(MOV3, insn)) {
210 if (((((unsigned char) insn) & 0xff) >> 3) == 15) {
211 reg = (insn & 0xffff) & uregs;
216 if (THUMB2_INSN_MATCH(ADR, insn)) {
217 reg = ((insn >> 16) & uregs) >> 8;
222 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRW1, insn) ||
223 THUMB2_INSN_MATCH(LDRHW, insn) || THUMB2_INSN_MATCH(LDRHW1, insn) ||
224 THUMB2_INSN_MATCH(LDRWL, insn)) {
225 reg = ((insn >> 16) & uregs) >> 12;
230 // LDRB.W PC, [PC, #immed] => PLD [PC, #immed], so Rt == PC is skipped
231 if (THUMB2_INSN_MATCH(LDRBW, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
232 THUMB2_INSN_MATCH(LDREX, insn)) {
233 reg = ((insn >> 16) & uregs) >> 12;
235 if (THUMB2_INSN_MATCH(DP, insn)) {
236 reg = ((insn >> 16) & uregs) >> 12;
241 if (THUMB2_INSN_MATCH(RSBW, insn)) {
242 reg = ((insn >> 12) & uregs) >> 8;
247 if (THUMB2_INSN_MATCH(RORW, insn)) {
248 reg = ((insn >> 12) & uregs) >> 8;
253 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW1, insn) ||
254 THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW1, insn) ||
255 THUMB2_INSN_MATCH(LSRW2, insn)) {
256 reg = ((insn >> 12) & uregs) >> 8;
261 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
264 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
265 reg = THUMB2_INSN_REG_RM(insn);
278 if ((THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn) ||
279 THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
280 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn) ||
281 THUMB2_INSN_MATCH(STRHW, insn)) && THUMB2_INSN_REG_RT(insn) == 15) {
282 reg = THUMB2_INSN_REG_RT(insn);
285 if (reg == 6 || reg == 7) {
286 *((unsigned short*)insns + 0) = (*((unsigned short*)insns + 0) & 0x00ff) | ((1 << mreg) | (1 << (mreg + 1)));
287 *((unsigned short*)insns + 1) = (*((unsigned short*)insns + 1) & 0xf8ff) | (mreg << 8);
288 *((unsigned short*)insns + 2) = (*((unsigned short*)insns + 2) & 0xfff8) | (mreg + 1);
289 *((unsigned short*)insns + 3) = (*((unsigned short*)insns + 3) & 0xffc7) | (mreg << 3);
290 *((unsigned short*)insns + 7) = (*((unsigned short*)insns + 7) & 0xf8ff) | (mreg << 8);
291 *((unsigned short*)insns + 8) = (*((unsigned short*)insns + 8) & 0xffc7) | (mreg << 3);
292 *((unsigned short*)insns + 9) = (*((unsigned short*)insns + 9) & 0xffc7) | ((mreg + 1) << 3);
293 *((unsigned short*)insns + 10) = (*((unsigned short*)insns + 10) & 0x00ff) | (( 1 << mreg) | (1 << (mreg + 1)));
296 if (THUMB_INSN_MATCH(APC, insn)) {
297 // ADD Rd, PC, #immed_8*4 -> ADD Rd, SP, #immed_8*4
298 *((unsigned short*)insns + 4) = ((insn & 0xffff) | 0x800); // ADD Rd, SP, #immed_8*4
300 if (THUMB_INSN_MATCH(LRO3, insn)) {
301 // LDR Rd, [PC, #immed_8*4] -> LDR Rd, [SP, #immed_8*4]
302 *((unsigned short*)insns + 4) = ((insn & 0xffff) + 0x5000); // LDR Rd, [SP, #immed_8*4]
304 if (THUMB_INSN_MATCH(MOV3, insn)) {
305 // MOV Rd, PC -> MOV Rd, SP
306 *((unsigned short*)insns + 4) = ((insn & 0xffff) ^ 0x10); // MOV Rd, SP
308 if (THUMB2_INSN_MATCH(ADR, insn)) {
309 // ADDW Rd, PC, #imm -> ADDW Rd, SP, #imm
310 insns[2] = (insn & 0xfffffff0) | 0x0d; // ADDW Rd, SP, #imm
312 if (THUMB2_INSN_MATCH(LDRW, insn) || THUMB2_INSN_MATCH(LDRBW, insn) ||
313 THUMB2_INSN_MATCH(LDRHW, insn)) {
314 // LDR.W Rt, [PC, #-<imm_12>] -> LDR.W Rt, [SP, #-<imm_8>]
315 // !!!!!!!!!!!!!!!!!!!!!!!!
316 // !!! imm_12 vs. imm_8 !!!
317 // !!!!!!!!!!!!!!!!!!!!!!!!
318 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // LDR.W Rt, [SP, #-<imm_8>]
320 if (THUMB2_INSN_MATCH(LDRW1, insn) || THUMB2_INSN_MATCH(LDRBW1, insn) ||
321 THUMB2_INSN_MATCH(LDRHW1, insn) || THUMB2_INSN_MATCH(LDRD, insn) ||
322 THUMB2_INSN_MATCH(LDRD1, insn) || THUMB2_INSN_MATCH(LDREX, insn)) {
323 // LDRx.W Rt, [PC, #+<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
324 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
326 if (THUMB2_INSN_MATCH(MUL, insn)) {
327 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // MUL Rd, Rn, SP
329 if (THUMB2_INSN_MATCH(DP, insn)) {
330 if (THUMB2_INSN_REG_RM(insn) == 15) {
331 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // DP Rd, Rn, PC
332 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
333 insns[2] = (insn & 0xfffffff0) | 0xd; // DP Rd, PC, Rm
336 if (THUMB2_INSN_MATCH(LDRWL, insn)) {
337 // LDRx.W Rt, [PC, #<imm_12>] -> LDRx.W Rt, [SP, #+<imm_12>] (+/-imm_8 for LDRD Rt, Rt2, [PC, #<imm_8>]
338 insns[2] = (insn & 0xfffffff0) | 0xd; // LDRx.W Rt, [SP, #+<imm_12>]
340 if (THUMB2_INSN_MATCH(RSBW, insn)) {
341 insns[2] = (insn & 0xfffffff0) | 0xd; // RSB{S}.W Rd, PC, #<const> -> RSB{S}.W Rd, SP, #<const>
343 if (THUMB2_INSN_MATCH(RORW, insn) || THUMB2_INSN_MATCH(LSLW1, insn) || THUMB2_INSN_MATCH(LSRW1, insn)) {
344 if ((THUMB2_INSN_REG_RM(insn) == 15) && (THUMB2_INSN_REG_RN(insn) == 15)) {
345 insns[2] = (insn & 0xfffdfffd); // ROR.W Rd, PC, PC
346 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
347 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR.W Rd, Rn, PC
348 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
349 insns[2] = (insn & 0xfffffff0) | 0xd; // ROR.W Rd, PC, Rm
352 if (THUMB2_INSN_MATCH(ROR, insn) || THUMB2_INSN_MATCH(LSLW2, insn) || THUMB2_INSN_MATCH(LSRW2, insn)) {
353 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // ROR{S} Rd, PC, #<const> -> ROR{S} Rd, SP, #<const>
367 if (THUMB2_INSN_MATCH(STRW, insn) || THUMB2_INSN_MATCH(STRBW, insn)) {
368 insns[2] = (insn & 0xfff0ffff) | 0x000d0000; // STRx.W Rt, [Rn, SP]
370 if (THUMB2_INSN_MATCH(STRD, insn) || THUMB2_INSN_MATCH(STRHT, insn) ||
371 THUMB2_INSN_MATCH(STRT, insn) || THUMB2_INSN_MATCH(STRHW1, insn)) {
372 if (THUMB2_INSN_REG_RN(insn) == 15) {
373 insns[2] = (insn & 0xfffffff0) | 0xd; // STRD/T/HT{.W} Rt, [SP, ...]
378 if (THUMB2_INSN_MATCH(STRHW, insn) && (THUMB2_INSN_REG_RN(insn) == 15)) {
379 if (THUMB2_INSN_REG_RN(insn) == 15) {
380 insns[2] = (insn & 0xf0fffff0) | 0x0c00000d; // STRH.W Rt, [SP, #-<imm_8>]
389 if ((reg == 15) && (THUMB2_INSN_MATCH(STRW, insn) ||
390 THUMB2_INSN_MATCH(STRBW, insn) ||
391 THUMB2_INSN_MATCH(STRD, insn) ||
392 THUMB2_INSN_MATCH(STRHT, insn) ||
393 THUMB2_INSN_MATCH(STRT, insn) ||
394 THUMB2_INSN_MATCH(STRHW1, insn) ||
395 THUMB2_INSN_MATCH(STRHW, insn) )) {
396 insns[2] = (insns[2] & 0x0fffffff) | 0xd0000000;
399 if (THUMB2_INSN_MATCH(TEQ1, insn) || THUMB2_INSN_MATCH(TST1, insn)) {
400 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ SP, #<const>
402 if (THUMB2_INSN_MATCH(TEQ2, insn) || THUMB2_INSN_MATCH(TST2, insn)) {
403 if ((THUMB2_INSN_REG_RN(insn) == 15) && (THUMB2_INSN_REG_RM(insn) == 15)) {
404 insns[2] = (insn & 0xfffdfffd); // TEQ/TST PC, PC
405 } else if (THUMB2_INSN_REG_RM(insn) == 15) {
406 insns[2] = (insn & 0xfff0ffff) | 0xd0000; // TEQ/TST Rn, PC
407 } else if (THUMB2_INSN_REG_RN(insn) == 15) {
408 insns[2] = (insn & 0xfffffff0) | 0xd; // TEQ/TST PC, Rm
416 static int arch_copy_trampoline_thumb_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
420 kprobe_opcode_t insn[MAX_INSN_SIZE];
421 struct arch_specific_insn ainsn;
422 kprobe_opcode_t insns[UPROBES_TRAMP_LEN * 2];
425 if ((unsigned long)p->addr & 0x01) {
426 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
431 ainsn.insn_thumb = insn;
432 if (!arch_check_insn_thumb(&ainsn)) {
439 if (THUMB_INSN_MATCH(APC, insn[0]) || THUMB_INSN_MATCH(LRO3, insn[0])) {
440 uregs = 0x0700; // 8-10
442 } else if (THUMB_INSN_MATCH(MOV3, insn[0]) && (((((unsigned char)insn[0]) & 0xff) >> 3) == 15)) {
446 } else if THUMB2_INSN_MATCH(ADR, insn[0]) {
447 uregs = 0x0f00; // Rd 8-11
449 } else if (((THUMB2_INSN_MATCH(LDRW, insn[0]) || THUMB2_INSN_MATCH(LDRW1, insn[0]) ||
450 THUMB2_INSN_MATCH(LDRBW, insn[0]) || THUMB2_INSN_MATCH(LDRBW1, insn[0]) ||
451 THUMB2_INSN_MATCH(LDRHW, insn[0]) || THUMB2_INSN_MATCH(LDRHW1, insn[0]) ||
452 THUMB2_INSN_MATCH(LDRWL, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) ||
453 THUMB2_INSN_MATCH(LDREX, insn[0]) ||
454 ((THUMB2_INSN_MATCH(STRW, insn[0]) || THUMB2_INSN_MATCH(STRBW, insn[0]) ||
455 THUMB2_INSN_MATCH(STRHW, insn[0]) || THUMB2_INSN_MATCH(STRHW1, insn[0])) &&
456 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15)) ||
457 ((THUMB2_INSN_MATCH(STRT, insn[0]) || THUMB2_INSN_MATCH(STRHT, insn[0])) &&
458 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RT(insn[0]) == 15))) {
459 uregs = 0xf000; // Rt 12-15
461 } else if ((THUMB2_INSN_MATCH(LDRD, insn[0]) || THUMB2_INSN_MATCH(LDRD1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15)) {
462 uregs = 0xff00; // Rt 12-15, Rt2 8-11
464 } else if (THUMB2_INSN_MATCH(MUL, insn[0]) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
467 } else if (THUMB2_INSN_MATCH(DP, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
468 uregs = 0xf000; // Rd 12-15
470 } else if (THUMB2_INSN_MATCH(STRD, insn[0]) && ((THUMB2_INSN_REG_RN(insn[0]) == 15) || (THUMB2_INSN_REG_RT(insn[0]) == 15) || THUMB2_INSN_REG_RT2(insn[0]) == 15)) {
471 uregs = 0xff00; // Rt 12-15, Rt2 8-11
473 } else if (THUMB2_INSN_MATCH(RSBW, insn[0]) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
474 uregs = 0x0f00; // Rd 8-11
476 } else if (THUMB2_INSN_MATCH (RORW, insn[0]) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
479 } else if ((THUMB2_INSN_MATCH(ROR, insn[0]) || THUMB2_INSN_MATCH(LSLW2, insn[0]) || THUMB2_INSN_MATCH(LSRW2, insn[0])) && THUMB2_INSN_REG_RM(insn[0]) == 15) {
480 uregs = 0x0f00; // Rd 8-11
482 } else if ((THUMB2_INSN_MATCH(LSLW1, insn[0]) || THUMB2_INSN_MATCH(LSRW1, insn[0])) && (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
483 uregs = 0x0f00; // Rd 8-11
485 } else if ((THUMB2_INSN_MATCH(TEQ1, insn[0]) || THUMB2_INSN_MATCH(TST1, insn[0])) && THUMB2_INSN_REG_RN(insn[0]) == 15) {
486 uregs = 0xf0000; //Rn 0-3 (16-19)
488 } else if ((THUMB2_INSN_MATCH(TEQ2, insn[0]) || THUMB2_INSN_MATCH(TST2, insn[0])) &&
489 (THUMB2_INSN_REG_RN(insn[0]) == 15 || THUMB2_INSN_REG_RM(insn[0]) == 15)) {
490 uregs = 0xf0000; //Rn 0-3 (16-19)
494 if (unlikely(uregs && pc_dep)) {
495 memcpy(insns, pc_dep_insn_execbuf_thumb, 18 * 2);
496 if (prep_pc_dep_insn_execbuf_thumb(insns, insn[0], uregs) != 0) {
497 printk("Error in %s at %d: failed to prepare exec buffer for insn %lx!",
498 __FILE__, __LINE__, insn[0]);
500 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
504 addr = ((unsigned int)p->addr) + 4;
505 *((unsigned short*)insns + 13) = 0xdeff;
506 *((unsigned short*)insns + 14) = addr & 0x0000ffff;
507 *((unsigned short*)insns + 15) = addr >> 16;
508 if (!is_thumb2(insn[0])) {
509 addr = ((unsigned int)p->addr) + 2;
510 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
511 *((unsigned short*)insns + 17) = addr >> 16;
513 addr = ((unsigned int)p->addr) + 4;
514 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
515 *((unsigned short*)insns + 17) = addr >> 16;
518 memcpy(insns, gen_insn_execbuf_thumb, 18 * 2);
519 *((unsigned short*)insns + 13) = 0xdeff;
520 if (!is_thumb2(insn[0])) {
521 addr = ((unsigned int)p->addr) + 2;
522 *((unsigned short*)insns + 2) = insn[0];
523 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
524 *((unsigned short*)insns + 17) = addr >> 16;
526 addr = ((unsigned int)p->addr) + 4;
528 *((unsigned short*)insns + 16) = (addr & 0x0000ffff) | 0x1;
529 *((unsigned short*)insns + 17) = addr >> 16;
533 if (!write_proc_vm_atomic (task, (unsigned long)p->ainsn.insn_thumb, insns, 18 * 2)) {
534 panic("failed to write memory %p!\n", p->ainsn.insn_thumb);
535 // Mr_Nobody: we have to panic, really??...
536 //free_insn_slot (&uprobe_insn_pages, task, p->ainsn.insn_thumb, 0);
543 int arch_prepare_uprobe(struct kprobe *p, struct task_struct *task, int atomic)
546 kprobe_opcode_t insn[MAX_INSN_SIZE];
548 if ((unsigned long)p->addr & 0x01) {
549 printk("Error in %s at %d: attempt to register kprobe at an unaligned address\n", __FILE__, __LINE__);
553 if (!read_proc_vm_atomic(task, (unsigned long)p->addr, &insn, MAX_INSN_SIZE * sizeof(kprobe_opcode_t))) {
554 panic("Failed to read memory task[tgid=%u, comm=%s] %p!\n", task->tgid, task->comm, p->addr);
558 p->ainsn.insn_arm = get_insn_slot(task, &uprobe_insn_pages, atomic);
559 if (!p->ainsn.insn_arm) {
560 printk("Error in %s at %d: kprobe slot allocation error (arm)\n", __FILE__, __LINE__);
564 ret = arch_copy_trampoline_arm_uprobe(p, task, 1);
566 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
570 p->ainsn.insn_thumb = get_insn_slot(task, &uprobe_insn_pages, atomic);
571 if (!p->ainsn.insn_thumb) {
572 printk("Error in %s at %d: kprobe slot allocation error (thumb)\n", __FILE__, __LINE__);
576 ret = arch_copy_trampoline_thumb_uprobe(p, task, 1);
578 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
579 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
583 if ((p->safe_arm == -1) && (p->safe_thumb == -1)) {
584 printk("Error in %s at %d: failed arch_copy_trampoline_*_uprobe() (both) [tgid=%u, addr=%lx, data=%lx]\n",
585 __FILE__, __LINE__, task->tgid, (unsigned long)p->addr, (unsigned long)p->opcode);
586 if (!write_proc_vm_atomic(task, (unsigned long)p->addr, &p->opcode, sizeof(p->opcode))) {
587 panic("Failed to write memory %p!\n", p->addr);
590 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_arm);
591 free_insn_slot(&uprobe_insn_pages, task, p->ainsn.insn_thumb);
599 void arch_prepare_uretprobe_hl(struct kretprobe_instance *ri,
600 struct pt_regs *regs)
602 ri->ret_addr = (kprobe_opcode_t *)regs->ARM_lr;
603 ri->sp = (kprobe_opcode_t *)regs->ARM_sp;
605 /* Set flag of current mode */
606 ri->sp = (kprobe_opcode_t *)((long)ri->sp | !!thumb_mode(regs));
608 if (thumb_mode(regs)) {
609 regs->ARM_lr = (unsigned long)(ri->rp->kp.ainsn.insn) + 0x1b;
611 regs->ARM_lr = (unsigned long)(ri->rp->kp.ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
615 int setjmp_upre_handler(struct kprobe *p, struct pt_regs *regs)
617 struct jprobe *jp = container_of(p, struct jprobe, kp);
618 kprobe_pre_entry_handler_t pre_entry = (kprobe_pre_entry_handler_t)jp->pre_entry;
619 entry_point_t entry = (entry_point_t)jp->entry;
622 p->ss_addr = (kprobe_opcode_t *)pre_entry(jp->priv_arg, regs);
626 entry(regs->ARM_r0, regs->ARM_r1, regs->ARM_r2,
627 regs->ARM_r3, regs->ARM_r4, regs->ARM_r5);
629 dbi_arch_uprobe_return();
635 int trampoline_uprobe_handler(struct kprobe *p, struct pt_regs *regs)
637 struct kretprobe_instance *ri = NULL;
638 struct hlist_head *head;
639 struct hlist_node *node, *tmp;
640 unsigned long flags, orig_ret_address = 0;
641 unsigned long trampoline_address = 0;
643 if (thumb_mode(regs)) {
644 trampoline_address = (unsigned long)(p->ainsn.insn) + 0x1b;
646 trampoline_address = (unsigned long)(p->ainsn.insn + UPROBES_TRAMP_RET_BREAK_IDX);
649 spin_lock_irqsave(&kretprobe_lock, flags);
651 head = kretprobe_inst_table_head(current->mm);
654 * It is possible to have multiple instances associated with a given
655 * task either because an multiple functions in the call path
656 * have a return probe installed on them, and/or more then one
657 * return probe was registered for a target function.
659 * We can handle this because:
660 * - instances are always inserted at the head of the list
661 * - when multiple return probes are registered for the same
662 * function, the first instance's ret_addr will point to the
663 * real return address, and all the rest will point to
664 * kretprobe_trampoline
666 hlist_for_each_entry_safe(ri, node, tmp, head, hlist) {
667 if (ri->task != current) {
668 /* another task is sharing our hash bucket */
672 if (ri->rp && ri->rp->handler) {
673 ri->rp->handler(ri, regs, ri->rp->priv_arg);
676 orig_ret_address = (unsigned long)ri->ret_addr;
679 if (orig_ret_address != trampoline_address) {
681 * This is the real return address. Any other
682 * instances associated with this task are for
683 * other calls deeper on the call stack
689 regs->ARM_pc = orig_ret_address;
690 if (thumb_mode(regs) && !(regs->ARM_lr & 0x01)) {
691 regs->ARM_cpsr &= 0xFFFFFFDF;
692 } else if (user_mode(regs) && (regs->ARM_lr & 0x01)) {
693 regs->ARM_cpsr |= 0x20;
696 spin_unlock_irqrestore(&kretprobe_lock, flags);
699 * By returning a non-zero value, we are telling
700 * kprobe_handler() that we don't want the post_handler
701 * to run (and have re-enabled preemption)
707 static int check_validity_insn(struct kprobe *p, struct pt_regs *regs, struct task_struct *task)
711 if (unlikely(thumb_mode(regs))) {
712 if (p->safe_thumb != -1) {
713 p->ainsn.insn = p->ainsn.insn_thumb;
714 list_for_each_entry_rcu(kp, &p->list, list) {
715 kp->ainsn.insn = p->ainsn.insn_thumb;
718 printk("Error in %s at %d: we are in thumb mode (!) and check instruction was fail \
719 (%0lX instruction at %p address)!\n", __FILE__, __LINE__, p->opcode, p->addr);
720 // Test case when we do our actions on already running application
721 disarm_uprobe(p, task);
725 if (p->safe_arm != -1) {
726 p->ainsn.insn = p->ainsn.insn_arm;
727 list_for_each_entry_rcu(kp, &p->list, list) {
728 kp->ainsn.insn = p->ainsn.insn_arm;
731 printk("Error in %s at %d: we are in arm mode (!) and check instruction was fail \
732 (%0lX instruction at %p address)!\n", __FILE__, __LINE__, p->opcode, p->addr);
733 // Test case when we do our actions on already running application
734 disarm_uprobe(p, task);
742 static int uprobe_handler(struct pt_regs *regs)
744 kprobe_opcode_t *addr = (kprobe_opcode_t *)(regs->ARM_pc);
745 struct task_struct *task = current;
746 pid_t tgid = task->tgid;
749 p = get_uprobe(addr, tgid);
751 if (p && (check_validity_insn(p, regs, task) != 0)) {
752 printk("no_uprobe live\n");
757 p = get_kprobe_by_insn_slot(addr, tgid, regs);
759 printk("no_uprobe\n");
763 trampoline_uprobe_handler(p, regs);
767 /* restore opcode for thumb app */
768 if (thumb_mode(regs)) {
769 if (!is_thumb2(p->opcode)) {
770 unsigned long tmp = p->opcode >> 16;
771 write_proc_vm_atomic(task, (unsigned long)((unsigned short*)p->addr + 1), &tmp, 2);
773 // "2*sizeof(kprobe_opcode_t)" - strange. Should be "sizeof(kprobe_opcode_t)", need to test
774 flush_icache_range((unsigned int)p->addr, ((unsigned int)p->addr) + (2 * sizeof(kprobe_opcode_t)));
778 if (!p->pre_handler || !p->pre_handler(p, regs)) {
779 prepare_singlestep(p, regs);
785 int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
789 local_irq_save(flags);
792 ret = uprobe_handler(regs);
793 preempt_enable_no_resched();
795 local_irq_restore(flags);
799 /* userspace probes hook (arm) */
800 static struct undef_hook undef_hook_for_us_arm = {
801 .instr_mask = 0xffffffff,
802 .instr_val = BREAKPOINT_INSTRUCTION,
803 .cpsr_mask = MODE_MASK,
804 .cpsr_val = USR_MODE,
805 .fn = uprobe_trap_handler
808 /* userspace probes hook (thumb) */
809 static struct undef_hook undef_hook_for_us_thumb = {
810 .instr_mask = 0xffffffff,
811 .instr_val = BREAKPOINT_INSTRUCTION & 0x0000ffff,
812 .cpsr_mask = MODE_MASK,
813 .cpsr_val = USR_MODE,
814 .fn = uprobe_trap_handler
817 int swap_arch_init_uprobes(void)
819 swap_register_undef_hook(&undef_hook_for_us_arm);
820 swap_register_undef_hook(&undef_hook_for_us_thumb);
825 void swap_arch_exit_uprobes(void)
827 swap_unregister_undef_hook(&undef_hook_for_us_thumb);
828 swap_unregister_undef_hook(&undef_hook_for_us_arm);