1 /* Copyright (c) 2009, 2010, 2011, 2012 ARM Ltd.
3 Permission is hereby granted, free of charge, to any person obtaining
4 a copy of this software and associated documentation files (the
5 ``Software''), to deal in the Software without restriction, including
6 without limitation the rights to use, copy, modify, merge, publish,
7 distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so, subject to
9 the following conditions:
11 The above copyright notice and this permission notice shall be
12 included in all copies or substantial portions of the Software.
14 THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,
15 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18 CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19 TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
22 #if defined(__aarch64__) || defined(__arm64__)
24 #include <fficonfig.h>
29 #ifdef HAVE_MACHINE_ASM_H
30 #include <machine/asm.h>
32 #ifdef __USER_LABEL_PREFIX__
33 #define CONCAT1(a, b) CONCAT2(a, b)
34 #define CONCAT2(a, b) a ## b
36 /* Use the right prefix for global labels. */
37 #define CNAME(x) CONCAT1 (__USER_LABEL_PREFIX__, x)
50 #define PTR_REG(n) w##n
52 #define PTR_REG(n) x##n
61 #if FFI_EXEC_TRAMPOLINE_TABLE && defined(__MACH__) && defined(HAVE_PTRAUTH)
62 # define BR(r) braaz r
63 # define BLR(r) blraaz r
73 extern void ffi_call_SYSV (void *stack, void *frame,
74 void (*fn)(void), void *rvalue,
75 int flags, void *closure);
77 Therefore on entry we have:
89 /* Use a stack frame allocated by our caller. */
94 cfi_def_cfa_register(x29)
95 cfi_rel_offset (x29, 0)
96 cfi_rel_offset (x30, 8)
98 mov x9, x2 /* save fn */
99 mov x8, x3 /* install structure return */
100 #ifdef FFI_GO_CLOSURES
101 mov x18, x5 /* install static chain */
103 stp x3, x4, [x29, #16] /* save rvalue and flags */
105 /* Load the vector argument passing registers, if necessary. */
106 tbz w4, #AARCH64_FLAG_ARG_V_BIT, 1f
108 ldp q2, q3, [sp, #32]
109 ldp q4, q5, [sp, #64]
110 ldp q6, q7, [sp, #96]
112 /* Load the core argument passing registers, including
113 the structure return pointer. */
114 ldp x0, x1, [sp, #16*N_V_ARG_REG + 0]
115 ldp x2, x3, [sp, #16*N_V_ARG_REG + 16]
116 ldp x4, x5, [sp, #16*N_V_ARG_REG + 32]
117 ldp x6, x7, [sp, #16*N_V_ARG_REG + 48]
119 /* Deallocate the context, leaving the stacked arguments. */
120 add sp, sp, #CALL_CONTEXT_SIZE
122 BLR(x9) /* call fn */
124 ldp x3, x4, [x29, #16] /* reload rvalue and flags */
126 /* Partially deconstruct the stack frame. */
128 cfi_def_cfa_register (sp)
131 /* Save the return value as directed. */
133 and w4, w4, #AARCH64_RET_MASK
134 add x5, x5, x4, lsl #3
137 /* Note that each table entry is 2 insns, and thus 8 bytes.
138 For integer data, note that we're storing into ffi_arg
139 and therefore we want to extend to 64 bits; these types
140 have two consecutive entries allocated for them. */
144 1: str x0, [x3] /* INT64 */
146 2: stp x0, x1, [x3] /* INT128 */
148 3: brk #1000 /* UNUSED */
150 4: brk #1000 /* UNUSED */
152 5: brk #1000 /* UNUSED */
154 6: brk #1000 /* UNUSED */
156 7: brk #1000 /* UNUSED */
158 8: st4 { v0.s, v1.s, v2.s, v3.s }[0], [x3] /* S4 */
160 9: st3 { v0.s, v1.s, v2.s }[0], [x3] /* S3 */
162 10: stp s0, s1, [x3] /* S2 */
164 11: str s0, [x3] /* S1 */
166 12: st4 { v0.d, v1.d, v2.d, v3.d }[0], [x3] /* D4 */
168 13: st3 { v0.d, v1.d, v2.d }[0], [x3] /* D3 */
170 14: stp d0, d1, [x3] /* D2 */
172 15: str d0, [x3] /* D1 */
174 16: str q3, [x3, #48] /* Q4 */
176 17: str q2, [x3, #32] /* Q3 */
178 18: stp q0, q1, [x3] /* Q2 */
180 19: str q0, [x3] /* Q1 */
182 20: uxtb w0, w0 /* UINT8 */
184 21: ret /* reserved */
186 22: uxth w0, w0 /* UINT16 */
188 23: ret /* reserved */
190 24: mov w0, w0 /* UINT32 */
192 25: ret /* reserved */
194 26: sxtb x0, w0 /* SINT8 */
196 27: ret /* reserved */
198 28: sxth x0, w0 /* SINT16 */
200 29: ret /* reserved */
202 30: sxtw x0, w0 /* SINT32 */
204 31: ret /* reserved */
209 .globl CNAME(ffi_call_SYSV)
210 FFI_HIDDEN(CNAME(ffi_call_SYSV))
212 .type CNAME(ffi_call_SYSV), #function
213 .size CNAME(ffi_call_SYSV), .-CNAME(ffi_call_SYSV)
218 Closure invocation glue. This is the low level code invoked directly by
219 the closure trampoline to setup and call a closure.
221 On entry x17 points to a struct ffi_closure, x16 has been clobbered
222 all other registers are preserved.
224 We allocate a call context and save the argument passing registers,
225 then invoked the generic C ffi_closure_SYSV_inner() function to do all
226 the real work, on return we load the result passing registers back from
230 #define ffi_closure_SYSV_FS (8*2 + CALL_CONTEXT_SIZE + 64)
233 CNAME(ffi_closure_SYSV_V):
235 stp x29, x30, [sp, #-ffi_closure_SYSV_FS]!
236 cfi_adjust_cfa_offset (ffi_closure_SYSV_FS)
237 cfi_rel_offset (x29, 0)
238 cfi_rel_offset (x30, 8)
240 /* Save the argument passing vector registers. */
241 stp q0, q1, [sp, #16 + 0]
242 stp q2, q3, [sp, #16 + 32]
243 stp q4, q5, [sp, #16 + 64]
244 stp q6, q7, [sp, #16 + 96]
248 .globl CNAME(ffi_closure_SYSV_V)
249 FFI_HIDDEN(CNAME(ffi_closure_SYSV_V))
251 .type CNAME(ffi_closure_SYSV_V), #function
252 .size CNAME(ffi_closure_SYSV_V), . - CNAME(ffi_closure_SYSV_V)
257 CNAME(ffi_closure_SYSV):
258 stp x29, x30, [sp, #-ffi_closure_SYSV_FS]!
259 cfi_adjust_cfa_offset (ffi_closure_SYSV_FS)
260 cfi_rel_offset (x29, 0)
261 cfi_rel_offset (x30, 8)
265 /* Save the argument passing core registers. */
266 stp x0, x1, [sp, #16 + 16*N_V_ARG_REG + 0]
267 stp x2, x3, [sp, #16 + 16*N_V_ARG_REG + 16]
268 stp x4, x5, [sp, #16 + 16*N_V_ARG_REG + 32]
269 stp x6, x7, [sp, #16 + 16*N_V_ARG_REG + 48]
271 /* Load ffi_closure_inner arguments. */
272 ldp PTR_REG(0), PTR_REG(1), [x17, #FFI_TRAMPOLINE_CLOSURE_OFFSET] /* load cif, fn */
273 ldr PTR_REG(2), [x17, #FFI_TRAMPOLINE_CLOSURE_OFFSET+PTR_SIZE*2] /* load user_data */
275 add x3, sp, #16 /* load context */
276 add x4, sp, #ffi_closure_SYSV_FS /* load stack */
277 add x5, sp, #16+CALL_CONTEXT_SIZE /* load rvalue */
278 mov x6, x8 /* load struct_rval */
279 bl CNAME(ffi_closure_SYSV_inner)
281 /* Load the return value as directed. */
282 #if FFI_EXEC_TRAMPOLINE_TABLE && defined(__MACH__) && defined(HAVE_PTRAUTH)
286 and w0, w0, #AARCH64_RET_MASK
287 add x1, x1, x0, lsl #3
288 add x3, sp, #16+CALL_CONTEXT_SIZE
291 /* Note that each table entry is 2 insns, and thus 8 bytes. */
295 1: ldr x0, [x3] /* INT64 */
297 2: ldp x0, x1, [x3] /* INT128 */
299 3: brk #1000 /* UNUSED */
301 4: brk #1000 /* UNUSED */
303 5: brk #1000 /* UNUSED */
305 6: brk #1000 /* UNUSED */
307 7: brk #1000 /* UNUSED */
309 8: ldr s3, [x3, #12] /* S4 */
311 9: ldr s2, [x3, #8] /* S3 */
313 10: ldp s0, s1, [x3] /* S2 */
315 11: ldr s0, [x3] /* S1 */
317 12: ldr d3, [x3, #24] /* D4 */
319 13: ldr d2, [x3, #16] /* D3 */
321 14: ldp d0, d1, [x3] /* D2 */
323 15: ldr d0, [x3] /* D1 */
325 16: ldr q3, [x3, #48] /* Q4 */
327 17: ldr q2, [x3, #32] /* Q3 */
329 18: ldp q0, q1, [x3] /* Q2 */
331 19: ldr q0, [x3] /* Q1 */
333 20: ldrb w0, [x3, #BE(7)] /* UINT8 */
335 21: brk #1000 /* reserved */
337 22: ldrh w0, [x3, #BE(6)] /* UINT16 */
339 23: brk #1000 /* reserved */
341 24: ldr w0, [x3, #BE(4)] /* UINT32 */
343 25: brk #1000 /* reserved */
345 26: ldrsb x0, [x3, #BE(7)] /* SINT8 */
347 27: brk #1000 /* reserved */
349 28: ldrsh x0, [x3, #BE(6)] /* SINT16 */
351 29: brk #1000 /* reserved */
353 30: ldrsw x0, [x3, #BE(4)] /* SINT32 */
356 99: ldp x29, x30, [sp], #ffi_closure_SYSV_FS
357 cfi_adjust_cfa_offset (-ffi_closure_SYSV_FS)
363 .globl CNAME(ffi_closure_SYSV)
364 FFI_HIDDEN(CNAME(ffi_closure_SYSV))
366 .type CNAME(ffi_closure_SYSV), #function
367 .size CNAME(ffi_closure_SYSV), . - CNAME(ffi_closure_SYSV)
370 #if defined(FFI_EXEC_STATIC_TRAMP)
372 CNAME(ffi_closure_SYSV_V_alt):
373 /* See the comments above trampoline_code_table. */
374 ldr x17, [sp, #8] /* Load closure in x17 */
375 add sp, sp, #16 /* Restore the stack */
376 b CNAME(ffi_closure_SYSV_V)
378 .globl CNAME(ffi_closure_SYSV_V_alt)
379 FFI_HIDDEN(CNAME(ffi_closure_SYSV_V_alt))
381 .type CNAME(ffi_closure_SYSV_V_alt), #function
382 .size CNAME(ffi_closure_SYSV_V_alt), . - CNAME(ffi_closure_SYSV_V_alt)
386 CNAME(ffi_closure_SYSV_alt):
387 /* See the comments above trampoline_code_table. */
388 ldr x17, [sp, #8] /* Load closure in x17 */
389 add sp, sp, #16 /* Restore the stack */
390 b CNAME(ffi_closure_SYSV)
392 .globl CNAME(ffi_closure_SYSV_alt)
393 FFI_HIDDEN(CNAME(ffi_closure_SYSV_alt))
395 .type CNAME(ffi_closure_SYSV_alt), #function
396 .size CNAME(ffi_closure_SYSV_alt), . - CNAME(ffi_closure_SYSV_alt)
400 * Below is the definition of the trampoline code table. Each element in
401 * the code table is a trampoline.
404 * The trampoline uses register x17. It saves the original value of x17 on
407 * The trampoline has two parameters - target code to jump to and data for
408 * the target code. The trampoline extracts the parameters from its parameter
409 * block (see tramp_table_map()). The trampoline saves the data address on
410 * the stack. Finally, it jumps to the target code.
412 * The target code can choose to:
414 * - restore the value of x17
415 * - load the data address in a register
416 * - restore the stack pointer to what it was when the trampoline was invoked.
418 .align AARCH64_TRAMP_MAP_SHIFT
419 CNAME(trampoline_code_table):
420 .rept AARCH64_TRAMP_MAP_SIZE / AARCH64_TRAMP_SIZE
421 sub sp, sp, #16 /* Make space on the stack */
422 str x17, [sp] /* Save x17 on stack */
423 adr x17, #16376 /* Get data address */
424 ldr x17, [x17] /* Copy data into x17 */
425 str x17, [sp, #8] /* Save data on stack */
426 adr x17, #16372 /* Get code address */
427 ldr x17, [x17] /* Load code address into x17 */
428 br x17 /* Jump to code */
431 .globl CNAME(trampoline_code_table)
432 FFI_HIDDEN(CNAME(trampoline_code_table))
434 .type CNAME(trampoline_code_table), #function
435 .size CNAME(trampoline_code_table), . - CNAME(trampoline_code_table)
437 .align AARCH64_TRAMP_MAP_SHIFT
438 #endif /* FFI_EXEC_STATIC_TRAMP */
440 #if FFI_EXEC_TRAMPOLINE_TABLE
443 #include <mach/machine/vm_param.h>
444 .align PAGE_MAX_SHIFT
445 CNAME(ffi_closure_trampoline_table_page):
446 .rept PAGE_MAX_SIZE / FFI_TRAMPOLINE_SIZE
447 adr x16, -PAGE_MAX_SIZE
450 nop /* each entry in the trampoline config page is 2*sizeof(void*) so the trampoline itself cannot be smaller than 16 bytes */
453 .globl CNAME(ffi_closure_trampoline_table_page)
454 FFI_HIDDEN(CNAME(ffi_closure_trampoline_table_page))
456 .type CNAME(ffi_closure_trampoline_table_page), #function
457 .size CNAME(ffi_closure_trampoline_table_page), . - CNAME(ffi_closure_trampoline_table_page)
461 #endif /* FFI_EXEC_TRAMPOLINE_TABLE */
463 #ifdef FFI_GO_CLOSURES
465 CNAME(ffi_go_closure_SYSV_V):
467 stp x29, x30, [sp, #-ffi_closure_SYSV_FS]!
468 cfi_adjust_cfa_offset (ffi_closure_SYSV_FS)
469 cfi_rel_offset (x29, 0)
470 cfi_rel_offset (x30, 8)
472 /* Save the argument passing vector registers. */
473 stp q0, q1, [sp, #16 + 0]
474 stp q2, q3, [sp, #16 + 32]
475 stp q4, q5, [sp, #16 + 64]
476 stp q6, q7, [sp, #16 + 96]
480 .globl CNAME(ffi_go_closure_SYSV_V)
481 FFI_HIDDEN(CNAME(ffi_go_closure_SYSV_V))
483 .type CNAME(ffi_go_closure_SYSV_V), #function
484 .size CNAME(ffi_go_closure_SYSV_V), . - CNAME(ffi_go_closure_SYSV_V)
489 CNAME(ffi_go_closure_SYSV):
490 stp x29, x30, [sp, #-ffi_closure_SYSV_FS]!
491 cfi_adjust_cfa_offset (ffi_closure_SYSV_FS)
492 cfi_rel_offset (x29, 0)
493 cfi_rel_offset (x30, 8)
497 /* Save the argument passing core registers. */
498 stp x0, x1, [sp, #16 + 16*N_V_ARG_REG + 0]
499 stp x2, x3, [sp, #16 + 16*N_V_ARG_REG + 16]
500 stp x4, x5, [sp, #16 + 16*N_V_ARG_REG + 32]
501 stp x6, x7, [sp, #16 + 16*N_V_ARG_REG + 48]
503 /* Load ffi_closure_inner arguments. */
504 ldp PTR_REG(0), PTR_REG(1), [x18, #PTR_SIZE]/* load cif, fn */
505 mov x2, x18 /* load user_data */
509 .globl CNAME(ffi_go_closure_SYSV)
510 FFI_HIDDEN(CNAME(ffi_go_closure_SYSV))
512 .type CNAME(ffi_go_closure_SYSV), #function
513 .size CNAME(ffi_go_closure_SYSV), . - CNAME(ffi_go_closure_SYSV)
515 #endif /* FFI_GO_CLOSURES */
516 #endif /* __arm64__ */
518 #if defined __ELF__ && defined __linux__
519 .section .note.GNU-stack,"",%progbits