2 ; Copyright (c) Microsoft. All rights reserved.
3 ; Licensed under the MIT license. See LICENSE file in the project root for full license information.
12 include <AsmMacros.inc>
13 include AsmConstants.inc
15 ifdef FEATURE_MIXEDMODE
16 IJWNOADThunk__MakeCall equ ?MakeCall@IJWNOADThunk@@KAXXZ
17 IJWNOADThunk__FindThunkTarget equ ?FindThunkTarget@IJWNOADThunk@@QEAAPEBXXZ
19 gfHostConfig equ ?g_fHostConfig@@3KA
20 NDirect__IsHostHookEnabled equ ?IsHostHookEnabled@NDirect@@SAHXZ
22 extern CreateThreadBlockThrow:proc
23 extern TheUMEntryPrestubWorker:proc
24 ifdef FEATURE_MIXEDMODE
25 extern IJWNOADThunk__FindThunkTarget:proc
27 extern UMEntryPrestubUnwindFrameChainHandler:proc
28 extern UMThunkStubUnwindFrameChainHandler:proc
29 extern g_TrapReturningThreads:dword
30 extern UM2MDoADCallBack:proc
31 extern ReverseEnterRuntimeHelper:proc
32 extern ReverseLeaveRuntimeHelper:proc
33 ifdef FEATURE_INCLUDE_ALL_INTERFACES
34 extern gfHostConfig:dword
35 extern NDirect__IsHostHookEnabled:proc
37 extern UMThunkStubRareDisableWorker:proc
41 ; METHODDESC_REGISTER: UMEntryThunk*
43 NESTED_ENTRY TheUMEntryPrestub, _TEXT, UMEntryPrestubUnwindFrameChainHandler
45 TheUMEntryPrestub_STACK_FRAME_SIZE = SIZEOF_MAX_OUTGOING_ARGUMENT_HOMES
48 TheUMEntryPrestub_XMM_SAVE_OFFSET = TheUMEntryPrestub_STACK_FRAME_SIZE
49 TheUMEntryPrestub_STACK_FRAME_SIZE = TheUMEntryPrestub_STACK_FRAME_SIZE + SIZEOF_MAX_FP_ARG_SPILL
51 ; Ensure that the new rsp will be 16-byte aligned. Note that the caller has
52 ; already pushed the return address.
53 if ((TheUMEntryPrestub_STACK_FRAME_SIZE + 8) MOD 16) ne 0
54 TheUMEntryPrestub_STACK_FRAME_SIZE = TheUMEntryPrestub_STACK_FRAME_SIZE + 8
57 alloc_stack TheUMEntryPrestub_STACK_FRAME_SIZE
59 save_reg_postrsp rcx, TheUMEntryPrestub_STACK_FRAME_SIZE + 8h
60 save_reg_postrsp rdx, TheUMEntryPrestub_STACK_FRAME_SIZE + 10h
61 save_reg_postrsp r8, TheUMEntryPrestub_STACK_FRAME_SIZE + 18h
62 save_reg_postrsp r9, TheUMEntryPrestub_STACK_FRAME_SIZE + 20h
64 save_xmm128_postrsp xmm0, TheUMEntryPrestub_XMM_SAVE_OFFSET
65 save_xmm128_postrsp xmm1, TheUMEntryPrestub_XMM_SAVE_OFFSET + 10h
66 save_xmm128_postrsp xmm2, TheUMEntryPrestub_XMM_SAVE_OFFSET + 20h
67 save_xmm128_postrsp xmm3, TheUMEntryPrestub_XMM_SAVE_OFFSET + 30h
72 ; Do prestub-specific stuff
74 mov rcx, METHODDESC_REGISTER
75 call TheUMEntryPrestubWorker
78 ; we're going to tail call to the exec stub that we just setup
81 mov rcx, [rsp + TheUMEntryPrestub_STACK_FRAME_SIZE + 8h]
82 mov rdx, [rsp + TheUMEntryPrestub_STACK_FRAME_SIZE + 10h]
83 mov r8, [rsp + TheUMEntryPrestub_STACK_FRAME_SIZE + 18h]
84 mov r9, [rsp + TheUMEntryPrestub_STACK_FRAME_SIZE + 20h]
86 movdqa xmm0, [rsp + TheUMEntryPrestub_XMM_SAVE_OFFSET]
87 movdqa xmm1, [rsp + TheUMEntryPrestub_XMM_SAVE_OFFSET + 10h]
88 movdqa xmm2, [rsp + TheUMEntryPrestub_XMM_SAVE_OFFSET + 20h]
89 movdqa xmm3, [rsp + TheUMEntryPrestub_XMM_SAVE_OFFSET + 30h]
94 add rsp, TheUMEntryPrestub_STACK_FRAME_SIZE
97 NESTED_END TheUMEntryPrestub, _TEXT
101 ; METHODDESC_REGISTER: UMEntryThunk*
103 NESTED_ENTRY UMThunkStub, _TEXT, UMThunkStubUnwindFrameChainHandler
105 UMThunkStubAMD64_STACK_FRAME_SIZE = 0
107 ; number of integer registers saved in prologue
108 UMThunkStubAMD64_NUM_REG_PUSHES = 2
109 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + (UMThunkStubAMD64_NUM_REG_PUSHES * 8)
111 ; rare path spill area
112 UMThunkStubAMD64_RARE_PATH_SPILL_SIZE = 10h
113 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + UMThunkStubAMD64_RARE_PATH_SPILL_SIZE
114 UMThunkStubAMD64_RARE_PATH_SPILL_NEGOFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE
119 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + 8
120 UMThunkStubAMD64_HOST_NOTIFY_FLAG_NEGOFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE
123 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + SIZEOF_MAX_FP_ARG_SPILL
125 ; Ensure that the offset of the XMM save area will be 16-byte aligned.
126 if ((UMThunkStubAMD64_STACK_FRAME_SIZE + 8) MOD 16) ne 0 ; +8 for caller-pushed return address
127 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + 8
130 UMThunkStubAMD64_XMM_SAVE_NEGOFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE
132 ; Add in the callee scratch area size.
133 UMThunkStubAMD64_CALLEE_SCRATCH_SIZE = SIZEOF_MAX_OUTGOING_ARGUMENT_HOMES
134 UMThunkStubAMD64_STACK_FRAME_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE + UMThunkStubAMD64_CALLEE_SCRATCH_SIZE
136 ; Now we have the full size of the stack frame. The offsets have been computed relative to the
137 ; top, so negate them to make them relative to the post-prologue rsp.
138 UMThunkStubAMD64_FRAME_OFFSET = UMThunkStubAMD64_CALLEE_SCRATCH_SIZE
139 UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE - UMThunkStubAMD64_FRAME_OFFSET - UMThunkStubAMD64_RARE_PATH_SPILL_NEGOFFSET
140 UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE - UMThunkStubAMD64_FRAME_OFFSET - UMThunkStubAMD64_HOST_NOTIFY_FLAG_NEGOFFSET
141 UMThunkStubAMD64_XMM_SAVE_OFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE - UMThunkStubAMD64_FRAME_OFFSET - UMThunkStubAMD64_XMM_SAVE_NEGOFFSET
142 UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET = UMThunkStubAMD64_STACK_FRAME_SIZE + 8 - UMThunkStubAMD64_FRAME_OFFSET ; +8 for return address
143 UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE = UMThunkStubAMD64_STACK_FRAME_SIZE - (UMThunkStubAMD64_NUM_REG_PUSHES * 8)
145 .errnz UMTHUNKSTUB_HOST_NOTIFY_FLAG_RBPOFFSET - UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET, update UMTHUNKSTUB_HOST_NOTIFY_FLAG_RBPOFFSET
149 ; [ callee scratch ] <-- new RSP
153 ; {optional stack args passed to callee}
158 ; {optional padding to align xmm regs}
159 ; HOST_NOTIFY_FLAG (needs to make ReverseLeaveRuntime call flag)
160 ; [rare path spill area]
161 ; [rare path spill area]
164 ; return address <-- entry RSP
174 push_nonvol_reg rbp ; stack_args
175 alloc_stack UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE
176 set_frame rbp, UMThunkStubAMD64_FRAME_OFFSET ; stack_args
177 mov byte ptr [rbp + UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET], 0 ; hosted
183 CALL_GETTHREAD ; will not trash r10
189 mov r12, rax ; r12 <- Thread*
192 ; disable preemptive GC
194 mov dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 1
197 ; catch returning thread here if a GC is in progress
199 cmp [g_TrapReturningThreads], 0
200 jnz DoTrapReturningThreadsTHROW
204 ifdef FEATURE_INCLUDE_ALL_INTERFACES
205 test [gfHostConfig], ASM_CLRTASKHOSTED ; inlined NDirect::IsHostHookEnabled ; hosted
207 call IsHostHookEnabledHelper
210 jnz NotifyHost_ReverseEnterRuntime ; hosted
211 Done_NotifyHost_ReverseEnterRuntime:
214 mov rax, [r12 + OFFSETOF__Thread__m_pDomain]
215 mov eax, [rax + OFFSETOF__AppDomain__m_dwId]
217 mov r11d, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_dwDomainId]
222 mov r11, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_pUMThunkMarshInfo]
223 mov eax, [r11 + OFFSETOF__UMThunkMarshInfo__m_cbActualArgSize] ; stack_args
224 test rax, rax ; stack_args
225 jnz CopyStackArgs ; stack_args
229 mov rax, [r11 + OFFSETOF__UMThunkMarshInfo__m_pILStub] ; rax <- Stub*
234 ; enable preemptive GC
236 mov dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 0
238 ifdef FEATURE_INCLUDE_ALL_INTERFACES
239 cmp byte ptr [rbp + UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET], 0 ; hosted
240 jnz NotifyHost_ReverseLeaveRuntime ; hosted
241 Done_NotifyHost_ReverseLeaveRuntime:
245 lea rsp, [rbp - UMThunkStubAMD64_FRAME_OFFSET + UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE]
252 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rcx
253 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h], rdx
254 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h], r8
255 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h], r9
257 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
258 ; initial measurements indidcate that this could be worth about a 5% savings in reverse
260 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h], xmm0
261 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h], xmm1
262 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h], xmm2
263 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h], xmm3
265 mov [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET], METHODDESC_REGISTER
266 call CreateThreadBlockThrow
267 mov METHODDESC_REGISTER, [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET]
269 mov rcx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
270 mov rdx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h]
271 mov r8, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h]
272 mov r9, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h]
274 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
275 movdqa xmm0, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h]
276 movdqa xmm1, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h]
277 movdqa xmm2, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h]
278 movdqa xmm3, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h]
282 DoTrapReturningThreadsTHROW:
284 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rcx
285 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h], rdx
286 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h], r8
287 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h], r9
289 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
290 ; initial measurements indidcate that this could be worth about a 5% savings in reverse
292 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h], xmm0
293 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h], xmm1
294 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h], xmm2
295 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h], xmm3
297 mov [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET], METHODDESC_REGISTER
298 mov rcx, r12 ; Thread* pThread
299 mov rdx, METHODDESC_REGISTER ; UMEntryThunk* pUMEntry
300 call UMThunkStubRareDisableWorker
301 mov METHODDESC_REGISTER, [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET]
303 mov rcx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
304 mov rdx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h]
305 mov r8, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h]
306 mov r9, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h]
308 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
309 movdqa xmm0, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h]
310 movdqa xmm1, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h]
311 movdqa xmm2, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h]
312 movdqa xmm3, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h]
314 jmp InCooperativeMode
317 ; rax = cbStackArgs (with 20h for register args subtracted out already)
322 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rcx
323 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h], rdx
324 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h], r8
326 ; rax = number of bytes
328 lea rcx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + SIZEOF_MAX_OUTGOING_ARGUMENT_HOMES]
329 lea rdx, [rsp + UMThunkStubAMD64_CALLEE_SCRATCH_SIZE]
332 ; rax = number of bytes
342 mov rcx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
343 mov rdx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h]
344 mov r8, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h]
348 ifdef FEATURE_INCLUDE_ALL_INTERFACES
349 NotifyHost_ReverseEnterRuntime:
350 mov [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET], METHODDESC_REGISTER
352 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rcx
353 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h], rdx
354 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h], r8
355 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h], r9
357 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
358 ; initial measurements indidcate that this could be worth about a 5% savings in reverse
360 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h], xmm0
361 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h], xmm1
362 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h], xmm2
363 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h], xmm3
366 call ReverseEnterRuntimeHelper
367 mov byte ptr [rbp + UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET], 1
369 mov rcx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
370 mov rdx, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h]
371 mov r8, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h]
372 mov r9, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h]
374 ; @CONSIDER: mark UMEntryThunks that have FP params and only save/restore xmm regs on those calls
375 movdqa xmm0, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h]
376 movdqa xmm1, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h]
377 movdqa xmm2, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h]
378 movdqa xmm3, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h]
380 mov METHODDESC_REGISTER, [rbp + UMThunkStubAMD64_RARE_PATH_SPILL_OFFSET]
382 jmp Done_NotifyHost_ReverseEnterRuntime
384 NotifyHost_ReverseLeaveRuntime:
387 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rax
388 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h], xmm0
391 call ReverseLeaveRuntimeHelper
392 mov byte ptr [rbp + UMThunkStubAMD64_HOST_NOTIFY_FLAG_OFFSET], 0
395 mov rax, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
396 movdqa xmm0, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h]
398 jmp Done_NotifyHost_ReverseLeaveRuntime
403 ; home register args to the stack
405 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], rcx
406 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 8h], rdx
407 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h], r8
408 mov [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 18h], r9
411 ; save off xmm registers
413 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h], xmm0
414 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 10h], xmm1
415 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 20h], xmm2
416 movdqa [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 30h], xmm3
419 ; call our helper to perform the AD transtion
421 mov rcx, METHODDESC_REGISTER
422 lea r8, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET]
423 mov rax, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_pUMThunkMarshInfo]
424 mov r9d, [rax + OFFSETOF__UMThunkMarshInfo__m_cbActualArgSize]
425 call UM2MDoADCallBack
427 ; restore return value
428 mov rax, [rbp + UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
429 movdqa xmm0, [rbp + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0h]
433 NESTED_END UMThunkStub, _TEXT
436 ; EXTERN_C void __stdcall UM2MThunk_WrapperHelper(
437 ; void *pThunkArgs, ; rcx
439 ; void *pAddr, ; r8 // not used
440 ; UMEntryThunk *pEntryThunk, ; r9
441 ; Thread *pThread); ; [entry_sp + 28h]
443 NESTED_ENTRY UM2MThunk_WrapperHelper, _TEXT
446 UM2MThunk_WrapperHelper_STACK_FRAME_SIZE = 0
448 ; number of integer registers saved in prologue
449 UM2MThunk_WrapperHelper_NUM_REG_PUSHES = 3
450 UM2MThunk_WrapperHelper_STACK_FRAME_SIZE = UM2MThunk_WrapperHelper_STACK_FRAME_SIZE + (UM2MThunk_WrapperHelper_NUM_REG_PUSHES * 8)
452 UM2MThunk_WrapperHelper_CALLEE_SCRATCH_SIZE = SIZEOF_MAX_OUTGOING_ARGUMENT_HOMES
453 UM2MThunk_WrapperHelper_STACK_FRAME_SIZE = UM2MThunk_WrapperHelper_STACK_FRAME_SIZE + UM2MThunk_WrapperHelper_CALLEE_SCRATCH_SIZE
455 ; Ensure that rsp remains 16-byte aligned
456 if ((UM2MThunk_WrapperHelper_STACK_FRAME_SIZE + 8) MOD 16) ne 0 ; +8 for caller-pushed return address
457 UM2MThunk_WrapperHelper_STACK_FRAME_SIZE = UM2MThunk_WrapperHelper_STACK_FRAME_SIZE + 8
460 UM2MThunk_WrapperHelper_FRAME_OFFSET = UM2MThunk_WrapperHelper_CALLEE_SCRATCH_SIZE
461 UM2MThunk_WrapperHelper_FIXED_STACK_ALLOC_SIZE = UM2MThunk_WrapperHelper_STACK_FRAME_SIZE - (UM2MThunk_WrapperHelper_NUM_REG_PUSHES * 8)
466 alloc_stack UM2MThunk_WrapperHelper_FIXED_STACK_ALLOC_SIZE
467 set_frame rbp, UM2MThunk_WrapperHelper_FRAME_OFFSET
471 ; We are in cooperative mode and in the correct domain.
472 ; The host has also been notified that we've entered the
473 ; runtime. All we have left to do is to copy the stack,
474 ; setup the register args and then call the managed target
481 mov METHODDESC_REGISTER, r9
483 mov rsi, rcx ; rsi <- pThunkArgs
489 movdqa xmm0, [rsi + UMThunkStubAMD64_XMM_SAVE_OFFSET - UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h]
490 movdqa xmm1, [rsi + UMThunkStubAMD64_XMM_SAVE_OFFSET - UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 10h]
491 movdqa xmm2, [rsi + UMThunkStubAMD64_XMM_SAVE_OFFSET - UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 20h]
492 movdqa xmm3, [rsi + UMThunkStubAMD64_XMM_SAVE_OFFSET - UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 30h]
494 mov rax, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_pUMThunkMarshInfo] ; rax <- UMThunkMarshInfo*
495 mov rax, [rax + OFFSETOF__UMThunkMarshInfo__m_pILStub] ; rax <- Stub*
498 ; make sure we don't trash the return value
500 movdqa [rsi + UMThunkStubAMD64_XMM_SAVE_OFFSET - UMThunkStubAMD64_ARGUMENTS_STACK_HOME_OFFSET + 0h], xmm0
502 lea rsp, [rbp - UM2MThunk_WrapperHelper_FRAME_OFFSET + UM2MThunk_WrapperHelper_FIXED_STACK_ALLOC_SIZE]
510 ; rdx = cbStackArgs (with 20h for register args subtracted out already)
518 lea rsi, [rcx + SIZEOF_MAX_OUTGOING_ARGUMENT_HOMES]
519 lea rdi, [rsp + UM2MThunk_WrapperHelper_CALLEE_SCRATCH_SIZE]
530 NESTED_END UM2MThunk_WrapperHelper, _TEXT
533 ifdef FEATURE_INCLUDE_ALL_INTERFACES
535 NESTED_ENTRY IsHostHookEnabledHelper, _TEXT
543 IsHostHookEnabledHelper_FIXED_STACK_ALLOC_SIZE = 20h + 40h
545 alloc_stack IsHostHookEnabledHelper_FIXED_STACK_ALLOC_SIZE
549 movdqa [rsp + 20h + 0h], xmm0
550 movdqa [rsp + 20h + 10h], xmm1
551 movdqa [rsp + 20h + 20h], xmm2
552 movdqa [rsp + 20h + 30h], xmm3
554 call NDirect__IsHostHookEnabled
556 movdqa xmm0, [rsp + 20h + 0h]
557 movdqa xmm1, [rsp + 20h + 10h]
558 movdqa xmm2, [rsp + 20h + 20h]
559 movdqa xmm3, [rsp + 20h + 30h]
562 add rsp, IsHostHookEnabledHelper_FIXED_STACK_ALLOC_SIZE
569 NESTED_END IsHostHookEnabledHelper, _TEXT
571 endif ; FEATURE_INCLUDE_ALL_INTERFACES
574 ifdef FEATURE_MIXEDMODE
575 NESTED_ENTRY IJWNOADThunk__MakeCall, _TEXT
576 ; METHODDESC_REGISTER = IJWNOADThunk*
580 save_reg_postrsp rcx, 70h
581 save_reg_postrsp rdx, 78h
582 save_reg_postrsp r8, 80h
583 save_reg_postrsp r9, 88h
585 save_xmm128_postrsp xmm0, 20h
586 save_xmm128_postrsp xmm1, 30h
587 save_xmm128_postrsp xmm2, 40h
588 save_xmm128_postrsp xmm3, 50h
591 mov rcx, METHODDESC_REGISTER
592 call IJWNOADThunk__FindThunkTarget
594 movdqa xmm0, [rsp + 20h]
595 movdqa xmm1, [rsp + 30h]
596 movdqa xmm2, [rsp + 40h]
597 movdqa xmm3, [rsp + 50h]
604 ; The target is in rax
607 NESTED_END IJWNOADThunk__MakeCall, _TEXT
608 endif ; FEATURE_MIXEDMODE