From fcf27188d7530a9931ba59e6231463c0acf0e8ba Mon Sep 17 00:00:00 2001 From: Craig Topper Date: Sat, 21 Oct 2017 02:26:00 +0000 Subject: [PATCH] [X86] Do not generate __multi3 for mul i128 on X86 Summary: __multi3 is not available on x86 (32-bit). Setting lib call name for MULI_128 to nullptr forces DAGTypeLegalizer::ExpandIntRes_MUL to generate instructions for 128-bit multiply instead of a call to an undefined function. This fixes PR20871 though it may be worth looking at why licm and indvars combine to generate 65-bit multiplies in that test. Patch by Riyaz V Puthiyapurayil Reviewers: craig.topper, schweitz Reviewed By: craig.topper, schweitz Subscribers: RKSimon, llvm-commits Differential Revision: https://reviews.llvm.org/D38668 llvm-svn: 316254 --- llvm/lib/Target/X86/X86ISelLowering.cpp | 1 + llvm/test/CodeGen/X86/libcall-sret.ll | 6 +- llvm/test/CodeGen/X86/mul-i1024.ll | 10723 ++++++++++++++++++------------ llvm/test/CodeGen/X86/mul-i256.ll | 470 +- llvm/test/CodeGen/X86/mul-i512.ll | 2177 +++--- llvm/test/CodeGen/X86/mul128.ll | 79 + llvm/test/CodeGen/X86/overflow.ll | 73 +- 7 files changed, 8461 insertions(+), 5068 deletions(-) diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp index a1f5e8b..a40c236 100644 --- a/llvm/lib/Target/X86/X86ISelLowering.cpp +++ b/llvm/lib/Target/X86/X86ISelLowering.cpp @@ -1585,6 +1585,7 @@ X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM, setLibcallName(RTLIB::SHL_I128, nullptr); setLibcallName(RTLIB::SRL_I128, nullptr); setLibcallName(RTLIB::SRA_I128, nullptr); + setLibcallName(RTLIB::MUL_I128, nullptr); } // Combine sin / cos into one node or libcall if possible. diff --git a/llvm/test/CodeGen/X86/libcall-sret.ll b/llvm/test/CodeGen/X86/libcall-sret.ll index 4ef0a78..3c484af 100644 --- a/llvm/test/CodeGen/X86/libcall-sret.ll +++ b/llvm/test/CodeGen/X86/libcall-sret.ll @@ -22,7 +22,7 @@ define void @test_sret_libcall(i128 %l, i128 %r) { ; CHECK: pushl 72(%esp) ; CHECK: pushl [[SRET_ADDR]] -; CHECK: calll __multi3 +; CHECK: calll __udivti3 ; CHECK: addl $44, %esp ; CHECK-DAG: movl 8(%esp), [[RES0:%[a-z]+]] @@ -33,7 +33,7 @@ define void @test_sret_libcall(i128 %l, i128 %r) { ; CHECK-DAG: movl [[RES1]], var+4 ; CHECK-DAG: movl [[RES2]], var+8 ; CHECK-DAG: movl [[RES3]], var+12 - %prod = mul i128 %l, %r - store i128 %prod, i128* @var + %quot = udiv i128 %l, %r + store i128 %quot, i128* @var ret void } diff --git a/llvm/test/CodeGen/X86/mul-i1024.ll b/llvm/test/CodeGen/X86/mul-i1024.ll index 87661004..dd8bdca 100644 --- a/llvm/test/CodeGen/X86/mul-i1024.ll +++ b/llvm/test/CodeGen/X86/mul-i1024.ll @@ -10,4226 +10,6715 @@ define void @test_1024(i1024* %a, i1024* %b, i1024* %out) nounwind { ; X32-NEXT: pushl %ebx ; X32-NEXT: pushl %edi ; X32-NEXT: pushl %esi -; X32-NEXT: andl $-8, %esp -; X32-NEXT: subl $2632, %esp # imm = 0xA48 -; X32-NEXT: movl 8(%ebp), %eax -; X32-NEXT: movl 64(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 68(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 72(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 76(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 80(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 84(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 88(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 92(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 96(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 100(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 104(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 108(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 112(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 116(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 120(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 124(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 4(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 16(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 20(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 24(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 28(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 32(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 36(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 40(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 44(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 48(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 52(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 56(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 60(%eax), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: subl $996, %esp # imm = 0x3E4 ; X32-NEXT: movl 12(%ebp), %eax -; X32-NEXT: movl 48(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 52(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 56(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 60(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 32(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 36(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 40(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 44(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 16(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 20(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 24(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 28(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 112(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 116(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 120(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 124(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 96(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 100(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 104(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 108(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 80(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 84(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 88(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 92(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 64(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 68(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 72(%eax), %edi -; X32-NEXT: movl 76(%eax), %esi -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 4(%eax), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edx -; X32-NEXT: pushl %ecx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi +; X32-NEXT: movl 32(%eax), %eax +; X32-NEXT: movl %eax, -188(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl 8(%ebp), %esi +; X32-NEXT: movl 48(%esi), %eax +; X32-NEXT: movl %eax, -440(%ebp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: movl %edx, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -132(%ebp) # 4-byte Spill +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl %edx, -884(%ebp) # 4-byte Spill +; X32-NEXT: movl 32(%esi), %eax +; X32-NEXT: movl %eax, -416(%ebp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -400(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -324(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: movl %edx, %eax +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %edi, %ecx +; X32-NEXT: movl %ecx, -204(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -892(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 36(%eax), %eax +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, -236(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl %edi, -304(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl %eax, -220(%ebp) # 4-byte Spill +; X32-NEXT: movl 36(%esi), %eax +; X32-NEXT: movl %eax, -316(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %ecx, -124(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %edx +; X32-NEXT: movl -400(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %edx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -324(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ebx, -100(%ebp) # 4-byte Spill +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -656(%ebp) # 4-byte Spill +; X32-NEXT: leal (%ebx,%edi), %eax +; X32-NEXT: movl %edx, %edi +; X32-NEXT: leal (%ecx,%edi), %edx +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -700(%ebp) # 4-byte Spill +; X32-NEXT: seto %al +; X32-NEXT: lahf +; X32-NEXT: movl %eax, %eax +; X32-NEXT: movl %eax, -640(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -96(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, -112(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, -64(%ebp) # 4-byte Folded Spill ; X32-NEXT: movl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload +; X32-NEXT: setb -160(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl (%eax), %eax +; X32-NEXT: movl %eax, -168(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl 8(%ebp), %ecx +; X32-NEXT: movl 16(%ecx), %eax +; X32-NEXT: movl %eax, -348(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, -320(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -180(%ebp) # 4-byte Spill +; X32-NEXT: addl %esi, %eax +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl %edx, -428(%ebp) # 4-byte Spill +; X32-NEXT: movl (%ecx), %eax +; X32-NEXT: movl %eax, -260(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -264(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -136(%ebp) # 4-byte Spill +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %edx, %eax +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -452(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl -140(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -764(%ebp) # 4-byte Spill +; X32-NEXT: movl -324(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %esi, %ecx +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: movl %ebx, -424(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %ebx +; X32-NEXT: movl %ebx, -256(%ebp) # 4-byte Spill +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -204(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -220(%ebp) # 4-byte Folded Spill +; X32-NEXT: setb -388(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 4(%eax), %eax +; X32-NEXT: movl %eax, -92(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, -28(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, -16(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %bh +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -76(%ebp) # 4-byte Spill +; X32-NEXT: movzbl %bh, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl %edi, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 8(%eax), %eax +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -156(%ebp) # 4-byte Spill +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl -256(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: movl %ecx, -120(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 52(%eax), %eax +; X32-NEXT: movl %eax, -340(%ebp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl -140(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl -132(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movl %edi, -192(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movl %ecx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movzbl %cl, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -216(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 56(%eax), %eax +; X32-NEXT: movl %eax, -408(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -392(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -412(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %ecx +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: movl %ebx, -272(%ebp) # 4-byte Spill +; X32-NEXT: adcl -216(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -24(%ebp) # 4-byte Spill +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -16(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -420(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl -120(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -616(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl -60(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -612(%ebp) # 4-byte Spill +; X32-NEXT: movl -64(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -184(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -64(%ebp) # 4-byte Spill +; X32-NEXT: movzbl -160(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl -124(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -152(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 40(%eax), %eax +; X32-NEXT: movl %eax, -352(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -364(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %ebx, -396(%ebp) # 4-byte Spill +; X32-NEXT: movl -324(%ebp), %edx # 4-byte Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -400(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: addl %esi, %edi +; X32-NEXT: movl %edi, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -152(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -52(%ebp) # 4-byte Spill +; X32-NEXT: addl -28(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -112(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -16(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -196(%ebp) # 4-byte Spill +; X32-NEXT: seto %al +; X32-NEXT: lahf +; X32-NEXT: movl %eax, %eax +; X32-NEXT: movl %eax, -456(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl -120(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -504(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: adcl -60(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -508(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %ecx +; X32-NEXT: movl 16(%ecx), %eax +; X32-NEXT: movl %eax, -212(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %esi, -84(%ebp) # 4-byte Spill +; X32-NEXT: movl 20(%ecx), %eax +; X32-NEXT: movl %eax, -252(%ebp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: movl %ebx, -164(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 24(%eax), %eax +; X32-NEXT: movl %eax, -284(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -308(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -208(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %ebx +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, %edx +; X32-NEXT: movl -324(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %edi, -116(%ebp) # 4-byte Spill +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl -400(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -84(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -768(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -296(%ebp) # 4-byte Spill +; X32-NEXT: movl -112(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -776(%ebp) # 4-byte Spill +; X32-NEXT: movl -44(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -772(%ebp) # 4-byte Spill +; X32-NEXT: movl -52(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %ebx, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -780(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %edx # 4-byte Reload +; X32-NEXT: movl %edx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl -140(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -448(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -332(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -648(%ebp) # 4-byte Spill +; X32-NEXT: movl -272(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -40(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -644(%ebp) # 4-byte Spill +; X32-NEXT: movl -24(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -572(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 20(%eax), %eax +; X32-NEXT: movl %eax, -216(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -320(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl -180(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %esi, -48(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 24(%eax), %eax +; X32-NEXT: movl %eax, -288(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -280(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -312(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %edx +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -320(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -312(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: movl %ebx, -20(%ebp) # 4-byte Spill +; X32-NEXT: addl -28(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -228(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -16(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -596(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl -120(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -464(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl -60(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -536(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 4(%eax), %eax +; X32-NEXT: movl %eax, -124(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -264(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl -136(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %esi, -276(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl %edi, -584(%ebp) # 4-byte Spill +; X32-NEXT: movzbl %cl, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -432(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 8(%eax), %eax +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl %ecx, -160(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -268(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl -264(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %ecx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl -432(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %esi, %edx +; X32-NEXT: addl -28(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -344(%ebp) # 4-byte Spill +; X32-NEXT: movl -276(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %edx +; X32-NEXT: adcl -16(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -404(%ebp) # 4-byte Spill ; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload +; X32-NEXT: seto %al +; X32-NEXT: lahf +; X32-NEXT: movl %eax, %edx +; X32-NEXT: popl %eax +; X32-NEXT: movl %edx, -736(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %edx +; X32-NEXT: adcl -120(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -532(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ecx, -172(%ebp) # 4-byte Spill +; X32-NEXT: adcl -60(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -592(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %edx +; X32-NEXT: movl %edx, %eax +; X32-NEXT: movl -116(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -84(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -328(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -368(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -620(%ebp) # 4-byte Spill +; X32-NEXT: movl -240(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -40(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -788(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: adcl -56(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -784(%ebp) # 4-byte Spill +; X32-NEXT: movl -180(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -100(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl -320(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -204(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -804(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl -264(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -820(%ebp) # 4-byte Spill +; X32-NEXT: movl -180(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -116(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, %eax +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: movl %esi, -576(%ebp) # 4-byte Spill +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, -540(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -800(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -796(%ebp) # 4-byte Spill +; X32-NEXT: movl -20(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -56(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -792(%ebp) # 4-byte Spill +; X32-NEXT: movl -220(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -220(%ebp) # 4-byte Spill +; X32-NEXT: movzbl -388(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl -236(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -376(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 40(%eax), %eax +; X32-NEXT: movl %eax, -236(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -304(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -128(%ebp) # 4-byte Spill +; X32-NEXT: movl -100(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %edi +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -204(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %esi, %edi +; X32-NEXT: adcl -376(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -468(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -80(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -816(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %edi, -372(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -812(%ebp) # 4-byte Spill +; X32-NEXT: movl -20(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %edx, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -808(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -512(%ebp) # 4-byte Spill +; X32-NEXT: movl -276(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -676(%ebp) # 4-byte Spill +; X32-NEXT: seto %al +; X32-NEXT: lahf +; X32-NEXT: movl %eax, %eax +; X32-NEXT: movl %eax, -740(%ebp) # 4-byte Spill +; X32-NEXT: movl -240(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -624(%ebp) # 4-byte Spill +; X32-NEXT: movl -172(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -628(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %esi +; X32-NEXT: movl 48(%esi), %eax +; X32-NEXT: movl %eax, -300(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: movl %ebx, -336(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl 52(%esi), %eax +; X32-NEXT: movl %eax, -144(%ebp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %esi, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 56(%eax), %eax +; X32-NEXT: movl %eax, -244(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -224(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -360(%ebp) # 4-byte Spill +; X32-NEXT: movl -336(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: movl %edi, %edx +; X32-NEXT: movl %edx, -176(%ebp) # 4-byte Spill +; X32-NEXT: adcl -360(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -472(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: movl %edi, -436(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -336(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl -264(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -824(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -588(%ebp) # 4-byte Spill +; X32-NEXT: movl -276(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -632(%ebp) # 4-byte Spill +; X32-NEXT: movl -240(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -828(%ebp) # 4-byte Spill +; X32-NEXT: movl -172(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -636(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 64(%eax), %eax +; X32-NEXT: movl %eax, -476(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %esi, -384(%ebp) # 4-byte Spill +; X32-NEXT: movl -116(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %ecx +; X32-NEXT: movl %eax, %edx +; X32-NEXT: movl %edx, -480(%ebp) # 4-byte Spill +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl -84(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -920(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl -256(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: adcl -384(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -932(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 80(%eax), %eax +; X32-NEXT: movl %eax, -548(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -380(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -380(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %edx, -356(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl %ecx, -948(%ebp) # 4-byte Spill +; X32-NEXT: addl %esi, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl %ebx, -960(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %ecx +; X32-NEXT: movl 80(%ecx), %eax +; X32-NEXT: movl %eax, -552(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, -528(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -524(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %edx, %eax +; X32-NEXT: movl -264(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -976(%ebp) # 4-byte Spill +; X32-NEXT: movl 64(%ecx), %eax +; X32-NEXT: movl %eax, -520(%ebp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, -500(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %esi, -496(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %ecx +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: movl %ecx, -992(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl -180(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -320(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -1008(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: movl -336(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %eax +; X32-NEXT: adcl -176(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -832(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -672(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -836(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -472(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -840(%ebp) # 4-byte Spill +; X32-NEXT: movl -20(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -436(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -844(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -680(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -80(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -856(%ebp) # 4-byte Spill +; X32-NEXT: movl -272(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -372(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -852(%ebp) # 4-byte Spill +; X32-NEXT: movl -24(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -292(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -848(%ebp) # 4-byte Spill +; X32-NEXT: movl -44(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload ; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: pushl %ecx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: movl %ebx, %esi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: addb $127, %al +; X32-NEXT: sahf +; X32-NEXT: popl %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -860(%ebp) # 4-byte Spill +; X32-NEXT: movl -52(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl %eax, -864(%ebp) # 4-byte Spill +; X32-NEXT: movl -324(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl -400(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -176(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -868(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -684(%ebp) # 4-byte Spill +; X32-NEXT: movl -112(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -876(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -472(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -872(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -436(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -880(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl -140(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, -888(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -688(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -900(%ebp) # 4-byte Spill +; X32-NEXT: movl -272(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -896(%ebp) # 4-byte Spill +; X32-NEXT: movl -24(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -904(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 68(%eax), %eax +; X32-NEXT: movl %eax, -248(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -384(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl -480(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %esi, -652(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -96(%ebp) # 1-byte Folded Spill +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl -96(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 72(%eax), %eax +; X32-NEXT: movl %eax, -516(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl %esi, -484(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -488(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl -384(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: movl -116(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl -480(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, -692(%ebp) # 4-byte Spill +; X32-NEXT: movl -164(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl -652(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movl %esi, -908(%ebp) # 4-byte Spill +; X32-NEXT: movl -40(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, -916(%ebp) # 4-byte Spill +; X32-NEXT: movl -56(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: movl %esi, -912(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, -696(%ebp) # 4-byte Spill +; X32-NEXT: adcl -16(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -652(%ebp) # 4-byte Spill +; X32-NEXT: adcl -120(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -924(%ebp) # 4-byte Spill +; X32-NEXT: adcl -60(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -928(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %ecx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl 84(%eax), %eax +; X32-NEXT: movl %eax, -544(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -356(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl -380(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %esi, -660(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 88(%eax), %eax +; X32-NEXT: movl %eax, -580(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -600(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -604(%ebp) # 4-byte Spill ; X32-NEXT: movl %edi, %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: movl %ebx, %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -356(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -704(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %edx # 4-byte Reload +; X32-NEXT: movl -660(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -940(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl %edx, -944(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %edx +; X32-NEXT: movl -60(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: movl %edi, -936(%ebp) # 4-byte Spill +; X32-NEXT: movl -116(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movl %edi, -708(%ebp) # 4-byte Spill +; X32-NEXT: adcl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -660(%ebp) # 4-byte Spill +; X32-NEXT: adcl -40(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -952(%ebp) # 4-byte Spill +; X32-NEXT: adcl -56(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -956(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 84(%eax), %eax +; X32-NEXT: movl %eax, -460(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl -528(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -524(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -668(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %edi +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 88(%eax), %eax +; X32-NEXT: movl %eax, -492(%ebp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl %esi, -556(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -560(%ebp) # 4-byte Spill +; X32-NEXT: movl -524(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: movl -528(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -732(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: movl %esi, %edx +; X32-NEXT: movl %edx, -728(%ebp) # 4-byte Spill +; X32-NEXT: addl -136(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -712(%ebp) # 4-byte Spill +; X32-NEXT: movl -668(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -276(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -968(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl -240(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -964(%ebp) # 4-byte Spill +; X32-NEXT: adcl -172(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -972(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 68(%eax), %eax +; X32-NEXT: movl %eax, -444(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -496(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -500(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -664(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -96(%ebp) # 1-byte Folded Spill +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl -96(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 72(%eax), %eax +; X32-NEXT: movl %eax, -388(%ebp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, -564(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %ebx, -568(%ebp) # 4-byte Spill +; X32-NEXT: movl -500(%ebp), %edx # 4-byte Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -496(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl %edx, %eax +; X32-NEXT: addl -136(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -716(%ebp) # 4-byte Spill +; X32-NEXT: movl -664(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %esi +; X32-NEXT: adcl -276(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -988(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %esi +; X32-NEXT: adcl -240(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -984(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %esi +; X32-NEXT: adcl -172(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -980(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl -180(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, -720(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -664(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: movl %edi, -996(%ebp) # 4-byte Spill +; X32-NEXT: movl -20(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: movl %ecx, -1000(%ebp) # 4-byte Spill +; X32-NEXT: movl -524(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl -528(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -320(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -1004(%ebp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %eax, -724(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, -668(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %ebx, -732(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, -728(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 12(%eax), %eax +; X32-NEXT: movl %eax, -96(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl -156(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl -104(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: setb -88(%ebp) # 1-byte Folded Spill +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: movzbl -88(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %ecx, %edx +; X32-NEXT: addl %edx, %ebx +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %eax, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl -76(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: movl -72(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl %edx, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -232(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %edx +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl -88(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl %edi, -72(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, -76(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: setb %dl +; X32-NEXT: addl -104(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -232(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movzbl %dl, %edx +; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: movl %edx, -608(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -28(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -164(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -40(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -120(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -56(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -60(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -164(%ebp) # 4-byte Spill +; X32-NEXT: adcl -608(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl -88(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 44(%eax), %eax +; X32-NEXT: movl %eax, -120(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: movl %ebx, %ecx +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl -364(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -60(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl %esi, %ecx +; X32-NEXT: setb -16(%ebp) # 1-byte Folded Spill +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movzbl -16(%ebp), %ebx # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %esi, %edx +; X32-NEXT: adcl %ecx, %ebx +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl -324(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -152(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -400(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %esi, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %edi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl -324(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %esi, -64(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -112(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %eax +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -88(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -88(%ebp) # 1-byte Folded Spill +; X32-NEXT: addl -364(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -60(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movzbl -88(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -60(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl -324(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl -132(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -112(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -44(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -272(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -52(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -24(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %eax, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl %edx, -192(%ebp) # 4-byte Spill +; X32-NEXT: adcl -60(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: movl %edi, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -456(%ebp), %ecx # 4-byte Reload +; X32-NEXT: pushl %eax +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addb $127, %al +; X32-NEXT: sahf +; X32-NEXT: popl %eax +; X32-NEXT: adcl -72(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -608(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -76(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -760(%ebp) # 4-byte Spill +; X32-NEXT: movl -88(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -232(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -756(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %eax +; X32-NEXT: adcl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -752(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: adcl -40(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -748(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl -56(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -744(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 12(%eax), %eax +; X32-NEXT: movl %eax, -60(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl -268(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl -160(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: movl %ecx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: setb %cl +; X32-NEXT: addl %eax, %edx +; X32-NEXT: movzbl %cl, %eax +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl %edi, %esi +; X32-NEXT: addl %esi, %edx +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -112(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl -584(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: movl -432(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -264(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: movl %esi, -432(%ebp) # 4-byte Spill +; X32-NEXT: movl -24(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, -456(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl -112(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl %edi, -432(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -276(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, -456(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: setb %bl +; X32-NEXT: addl -160(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -24(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -112(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -136(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -180(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %edi, %edx +; X32-NEXT: adcl -48(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -240(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -36(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -172(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -20(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -584(%ebp) # 4-byte Spill +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -276(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl -112(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -736(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %eax +; X32-NEXT: addb $127, %al +; X32-NEXT: sahf +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -432(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -456(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, -232(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %edx, -164(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %esi, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 44(%eax), %eax +; X32-NEXT: movl %eax, -112(%ebp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: movl -128(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl -304(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movl %ebx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movzbl %bl, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %ecx, %edx +; X32-NEXT: addl %edx, %esi +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl -100(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl -220(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl -376(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edx +; X32-NEXT: movl %edx, -376(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl %edx, -220(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %edx +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl %edi, -376(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -220(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -80(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %eax +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -20(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb %dl +; X32-NEXT: addl -304(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -36(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movzbl %dl, %edx +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -48(%ebp) # 4-byte Folded Spill ; X32-NEXT: movl %edi, %ebx -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: movl %esi, %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl -336(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -200(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -80(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -472(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -372(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -436(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -292(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl %edx, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl -36(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -48(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -740(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %eax +; X32-NEXT: addb $127, %al +; X32-NEXT: sahf +; X32-NEXT: movl -376(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -432(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -220(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -456(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %ebx, -584(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -200(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -276(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, -240(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl %esi, -172(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -640(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %eax +; X32-NEXT: addb $127, %al +; X32-NEXT: sahf +; X32-NEXT: adcl -64(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -376(%ebp) # 4-byte Spill +; X32-NEXT: adcl -16(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl -88(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -640(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -200(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -44(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -472(%ebp) # 4-byte Spill +; X32-NEXT: adcl -52(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -436(%ebp) # 4-byte Spill +; X32-NEXT: movl -408(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 60(%eax), %eax +; X32-NEXT: movl %eax, -192(%ebp) # 4-byte Spill +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -16(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -92(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -392(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -440(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -168(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -92(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -340(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -68(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -764(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -48(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -36(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -16(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -440(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -44(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -132(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -140(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -20(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -48(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -80(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -16(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -36(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -108(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -80(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl -20(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -20(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -20(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -392(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -16(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -80(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -36(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -68(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -16(%ebp) # 4-byte Spill +; X32-NEXT: adcl -420(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl -616(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl -612(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -272(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -92(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %bl +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -616(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -92(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -612(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -152(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -316(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -152(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -32(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -424(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -44(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl -68(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -36(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -416(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -424(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -420(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -324(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -400(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -152(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -424(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -44(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -420(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -20(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -36(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -68(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -108(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -20(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -44(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -44(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -44(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -364(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl -20(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -68(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl -44(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -32(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -196(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -504(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -508(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: addl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -64(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -48(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl -52(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -16(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -272(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -52(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -252(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -252(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -84(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -508(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -68(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -504(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -296(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -768(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -48(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -372(%ebp) # 4-byte Spill +; X32-NEXT: adcl -64(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -416(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -152(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 28(%eax), %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: movl %esi, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -324(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -400(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -372(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -152(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -68(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -24(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -52(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -372(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -24(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -48(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -68(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -68(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -68(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -364(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -52(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -24(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -372(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -68(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -296(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -776(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -772(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -780(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -36(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, -508(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -20(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -504(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -292(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -152(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -44(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -64(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -16(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl -80(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -88(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -272(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -68(%ebp) # 4-byte Spill +; X32-NEXT: setb -20(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -252(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -272(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -16(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -16(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -392(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -84(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -440(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -292(%ebp) # 4-byte Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -252(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -372(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -88(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -340(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -88(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -332(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -448(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -36(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -448(%ebp) # 4-byte Spill +; X32-NEXT: adcl -272(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -16(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -440(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -48(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -296(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -132(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -140(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -448(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -296(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -80(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -16(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -16(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -272(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -80(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -48(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -80(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -80(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -80(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -392(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl -272(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -36(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -16(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -80(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -332(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: adcl -648(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -644(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -332(%ebp) # 4-byte Spill +; X32-NEXT: adcl -572(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl -292(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -52(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -372(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -88(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -44(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -296(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -68(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -20(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl %ebx, -272(%ebp) # 4-byte Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl -332(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: addl -32(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl -196(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -372(%ebp) # 4-byte Spill +; X32-NEXT: adcl -608(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl -760(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -296(%ebp) # 4-byte Spill +; X32-NEXT: movl -756(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -272(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -752(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -748(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -332(%ebp) # 4-byte Spill +; X32-NEXT: movl -744(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -168(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 28(%eax), %eax +; X32-NEXT: movl %eax, -16(%ebp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -92(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -348(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -572(%ebp) # 4-byte Spill +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -32(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -92(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -448(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -216(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -228(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -428(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -52(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -196(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -24(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -428(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -320(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -32(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -196(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -52(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -428(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -44(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -20(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -52(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -108(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -44(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -24(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -24(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -280(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -20(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -44(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -52(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -228(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -596(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -464(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -464(%ebp) # 4-byte Spill +; X32-NEXT: adcl -536(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -92(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -228(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %bl +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -160(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -648(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -92(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -644(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -536(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -124(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -536(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -344(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -452(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -32(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -452(%ebp) # 4-byte Spill +; X32-NEXT: adcl -228(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -260(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -536(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -596(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -136(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -264(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -452(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -536(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -32(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -596(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -24(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -52(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -228(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -108(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -24(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -32(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -32(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -32(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -160(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -52(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl -24(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -228(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl -32(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -344(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -404(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -532(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -592(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: addl -572(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl -448(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -196(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -572(%ebp) # 4-byte Spill +; X32-NEXT: adcl -428(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -464(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -68(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -228(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -428(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -228(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -252(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -452(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -252(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -160(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -84(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -196(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -228(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -532(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -592(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -532(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -532(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -368(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -328(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -428(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -448(%ebp) # 4-byte Spill +; X32-NEXT: adcl -452(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -328(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -196(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -228(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -260(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -428(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -48(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -452(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -136(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -264(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -448(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -428(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -328(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -452(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -196(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -228(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -448(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -196(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -228(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -196(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -328(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -48(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -196(%ebp) # 4-byte Spill +; X32-NEXT: adcl -328(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -328(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -328(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -160(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -328(%ebp) # 4-byte Spill +; X32-NEXT: movl -228(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -196(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -448(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -328(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -368(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -620(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -788(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -784(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -52(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, -592(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -24(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -532(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -572(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -428(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -32(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -452(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -20(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -228(%ebp) # 4-byte Spill +; X32-NEXT: adcl -44(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -196(%ebp) # 4-byte Spill +; X32-NEXT: adcl -464(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -620(%ebp) # 4-byte Spill +; X32-NEXT: adcl -68(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -328(%ebp) # 4-byte Spill +; X32-NEXT: setb -464(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -252(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -44(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -44(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -84(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -348(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -252(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -368(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -216(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -368(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -540(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -576(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -20(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -576(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -348(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -368(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -368(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -48(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -368(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -320(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl -576(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -24(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -368(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: addl -52(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -44(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: setb -576(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -52(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -52(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -48(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: setb -24(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -24(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -280(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -44(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -52(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: movzbl -576(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: movl -24(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -540(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -800(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl -796(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -792(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, %edi +; X32-NEXT: movl -32(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -228(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -68(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -196(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -20(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -620(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -368(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -328(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movzbl -464(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: addl -344(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl -404(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl -72(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -76(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -44(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -232(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -52(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -164(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -24(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -40(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -56(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -32(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -616(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -68(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -612(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -20(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -424(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -420(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -368(%ebp) # 4-byte Spill +; X32-NEXT: adcl -508(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: adcl -504(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl -152(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl -64(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -464(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -292(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -372(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -296(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -272(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -36(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -332(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -188(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -164(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -348(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -152(%ebp) # 4-byte Spill +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -72(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -228(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -216(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -468(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -804(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl -76(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -236(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -164(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -320(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -164(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -232(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -56(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -40(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -72(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -236(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -40(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl -40(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -40(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -40(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -280(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -56(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -76(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -72(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -40(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -468(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl -816(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl -812(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -344(%ebp) # 4-byte Spill +; X32-NEXT: adcl -808(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -196(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -148(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -328(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -160(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -468(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -508(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -468(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -504(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -124(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -512(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -820(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -196(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -404(%ebp) # 4-byte Spill +; X32-NEXT: adcl -328(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -196(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -64(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -72(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -260(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -236(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -328(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -468(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -136(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -264(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -404(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -328(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -196(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -468(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -64(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -72(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -196(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -236(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -64(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -404(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -112(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl -404(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -404(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -404(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -160(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -72(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl -64(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -196(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -512(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -676(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -624(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -628(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl -152(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl -228(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl -164(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -628(%ebp) # 4-byte Spill +; X32-NEXT: adcl -232(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -624(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -76(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -344(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -300(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -232(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -164(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -232(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -144(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -228(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull -144(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -160(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -336(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -176(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -232(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -300(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -404(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -144(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -540(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -196(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -124(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -196(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -588(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -824(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -420(%ebp) # 4-byte Spill +; X32-NEXT: adcl -228(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -424(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -152(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -232(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -260(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -228(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 60(%eax), %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %esi, -164(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -196(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -136(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -264(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -420(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -228(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -424(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -196(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -152(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -232(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -232(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -244(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -152(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -424(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -152(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -152(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -420(%ebp) # 4-byte Spill +; X32-NEXT: adcl -152(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -152(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -152(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -160(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -424(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -420(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -232(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -588(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -632(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -828(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -636(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -404(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -540(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -628(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -228(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -624(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -196(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -56(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -424(%ebp) # 4-byte Spill +; X32-NEXT: adcl -76(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -420(%ebp) # 4-byte Spill +; X32-NEXT: adcl -344(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -636(%ebp) # 4-byte Spill +; X32-NEXT: adcl -40(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -632(%ebp) # 4-byte Spill +; X32-NEXT: setb -588(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -336(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -176(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -348(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -232(%ebp) # 4-byte Spill +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -64(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -216(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -64(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -672(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -832(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -76(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -344(%ebp) # 4-byte Spill +; X32-NEXT: adcl -72(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -164(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -216(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -320(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -344(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -72(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -64(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -56(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -40(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -56(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -244(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -344(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -40(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl -76(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -76(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -76(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -280(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -312(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -344(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -40(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -56(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -672(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: adcl -836(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl -840(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl -844(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -232(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -424(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -152(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -420(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -72(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -636(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -64(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -632(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -588(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl %ebx, -344(%ebp) # 4-byte Spill +; X32-NEXT: movl -40(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl -56(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, -76(%ebp) # 4-byte Folded Spill +; X32-NEXT: addl -512(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl -676(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl -432(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl -456(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -344(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -584(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -276(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl -240(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -172(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -32(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, -508(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -68(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -504(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -20(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -328(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -368(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -468(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -44(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -404(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -52(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -540(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -24(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -228(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -464(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl %edx, -196(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -232(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl -152(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl -72(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl -64(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -292(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl -372(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl -88(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl -296(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl -272(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -344(%ebp) # 4-byte Spill +; X32-NEXT: movl -40(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -36(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl -56(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -332(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl -80(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: setb -372(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -188(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -240(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -276(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -240(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -148(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -392(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -440(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -272(%ebp) # 4-byte Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -36(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -296(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -340(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -680(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -884(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -276(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -240(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -172(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -440(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -236(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -276(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -132(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -140(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -20(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -276(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -36(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -240(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -80(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -172(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -20(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -236(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -80(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -172(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -172(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -172(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -172(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -392(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -80(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -36(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -20(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -172(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -680(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl -856(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl -852(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl -848(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -20(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -148(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -24(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl %edi, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -432(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -88(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -456(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -316(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -656(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -892(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -44(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl -52(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -416(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -236(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -44(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -112(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -52(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -324(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -400(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -32(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -88(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -24(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -20(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -24(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -236(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -88(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -112(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl -32(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -32(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -32(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -364(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -20(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -88(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -24(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -656(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -700(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -860(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -864(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl -272(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl -296(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl -276(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -332(%ebp) # 4-byte Spill +; X32-NEXT: adcl -240(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -368(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -36(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -292(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -172(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -276(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -336(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -176(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -296(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -24(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -272(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -68(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -316(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -68(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -684(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -868(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -276(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -512(%ebp) # 4-byte Spill +; X32-NEXT: adcl -240(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -32(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -296(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -416(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -276(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -164(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -240(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -324(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -400(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -512(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -276(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -68(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -240(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -32(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -296(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -512(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -244(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -296(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -32(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl -68(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb -68(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl -68(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -364(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -68(%ebp) # 4-byte Spill +; X32-NEXT: movl -296(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl -32(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -512(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl -68(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -684(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -876(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -872(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -880(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -20(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -88(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -272(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -332(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -276(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -368(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -240(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -80(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -296(%ebp) # 4-byte Spill +; X32-NEXT: adcl -36(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl -292(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl -172(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -68(%ebp) # 4-byte Spill +; X32-NEXT: setb -88(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -172(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -172(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -336(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -392(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -176(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -412(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -336(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl %ecx, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl -440(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -300(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -172(%ebp) # 4-byte Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -332(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movzbl -332(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl -688(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -888(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -36(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -20(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -332(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -336(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -176(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -440(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -20(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -36(%ebp) # 4-byte Spill +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -20(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull -164(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -340(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -132(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -140(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %edi, -36(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -332(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -20(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -336(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -132(%ebp) # 4-byte Spill +; X32-NEXT: adcl -176(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -140(%ebp) # 4-byte Spill +; X32-NEXT: setb -176(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -408(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -244(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -332(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -336(%ebp) # 4-byte Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -332(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -332(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -192(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -332(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -392(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -224(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -412(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -360(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -336(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -132(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -140(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -176(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -688(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -900(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -360(%ebp) # 4-byte Spill +; X32-NEXT: adcl -896(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -392(%ebp) # 4-byte Spill +; X32-NEXT: adcl -904(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -412(%ebp) # 4-byte Spill +; X32-NEXT: movl -172(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -296(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -80(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -32(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -36(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -292(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -20(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -68(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -88(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl %ebx, -336(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -360(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -392(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl -412(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -656(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -172(%ebp) # 4-byte Spill +; X32-NEXT: adcl -700(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -80(%ebp) # 4-byte Spill +; X32-NEXT: adcl -376(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -220(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -336(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -640(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -360(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -472(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -392(%ebp) # 4-byte Spill +; X32-NEXT: adcl -436(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -232(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -432(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -152(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -456(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -44(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -52(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -344(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -24(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -40(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -272(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -56(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -276(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -240(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -372(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, -172(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -80(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %esi, -36(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %edi, -20(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, -336(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -360(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -392(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %ebx, -412(%ebp) # 4-byte Spill +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -476(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -132(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -140(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -248(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -140(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -308(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -480(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -208(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -384(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl -212(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -476(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -248(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -40(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -252(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movzbl -40(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl -692(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -920(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -132(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -140(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -200(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -176(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -212(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -516(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -132(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -132(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 76(%eax), %edx +; X32-NEXT: movl %edx, -132(%ebp) # 4-byte Spill +; X32-NEXT: movl -212(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -132(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -116(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -484(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -84(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -488(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: addl %ebx, -140(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -56(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -200(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -176(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -56(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -284(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -516(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -200(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -200(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -224(%ebp) # 4-byte Spill +; X32-NEXT: movl -284(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -132(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl -224(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -224(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -224(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -308(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -484(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -208(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -488(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -176(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -200(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -56(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -692(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -176(%ebp) # 4-byte Spill +; X32-NEXT: adcl -908(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl -916(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl -912(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -32(%ebp) # 4-byte Spill +; X32-NEXT: movl -108(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -476(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -56(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -248(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -104(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -480(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -156(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -384(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -224(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -476(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -436(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -344(%ebp) # 4-byte Spill +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -436(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -248(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -232(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -92(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -696(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -932(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -76(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -88(%ebp) # 4-byte Spill +; X32-NEXT: adcl -72(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -224(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -516(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -436(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -72(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -132(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -472(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -28(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -484(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -256(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -488(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -88(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -436(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -472(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -224(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -56(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -56(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -108(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -516(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -224(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -76(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -108(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -132(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl -72(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -72(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -72(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl -104(%ebp), %ebx # 4-byte Reload +; X32-NEXT: addl -484(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -156(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -488(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -72(%ebp) # 4-byte Spill +; X32-NEXT: movl -224(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl -76(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl -56(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl -696(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -652(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -924(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -928(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: addl -64(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -224(%ebp) # 4-byte Spill +; X32-NEXT: adcl -220(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -76(%ebp) # 4-byte Spill +; X32-NEXT: adcl -140(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -152(%ebp) # 4-byte Spill +; X32-NEXT: adcl -40(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -176(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -200(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -68(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -32(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -548(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -40(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -544(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -544(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -104(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -380(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -156(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -356(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -548(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -56(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -88(%ebp) # 4-byte Spill +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -56(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -296(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -56(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -92(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -56(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -704(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -948(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -140(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -292(%ebp) # 4-byte Spill +; X32-NEXT: adcl -40(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -376(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -220(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -64(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -580(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -40(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -140(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl 8(%ebp), %eax +; X32-NEXT: movl 92(%eax), %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: movl %esi, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -56(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -28(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -600(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -256(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -604(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -292(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -376(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -220(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -64(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -376(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -108(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -580(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -220(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -220(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -292(%ebp) # 4-byte Spill +; X32-NEXT: movl -108(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -140(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl -292(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -292(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -292(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -104(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -600(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -156(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -604(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -64(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -220(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movzbl -376(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -704(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -940(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -944(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -936(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -224(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -88(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -76(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -296(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -152(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -40(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -72(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -56(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -176(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -64(%ebp) # 4-byte Spill +; X32-NEXT: adcl -200(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl -68(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -68(%ebp) # 4-byte Spill +; X32-NEXT: adcl -32(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -152(%ebp) # 4-byte Spill +; X32-NEXT: setb -32(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -548(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -544(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -380(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -308(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -356(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -380(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl %ecx, -356(%ebp) # 4-byte Spill +; X32-NEXT: movl -212(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -548(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -76(%ebp) # 4-byte Spill +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -544(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -72(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -252(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -708(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -960(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -176(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -376(%ebp) # 4-byte Spill +; X32-NEXT: adcl -200(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -224(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -380(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -356(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -212(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -580(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx ; X32-NEXT: addl %edi, %ecx -; X32-NEXT: movl %edx, %eax -; X32-NEXT: adcl %esi, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl -212(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -140(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -200(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edi, %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -116(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -600(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -84(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -604(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -376(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -176(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -224(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -200(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edx, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %ebx, %edi +; X32-NEXT: addl -380(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -116(%ebp) # 4-byte Spill +; X32-NEXT: adcl -356(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -84(%ebp) # 4-byte Spill +; X32-NEXT: setb -356(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -580(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -380(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -224(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -380(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %eax, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl %ecx, %edx -; X32-NEXT: adcl %eax, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl %ecx, %esi -; X32-NEXT: adcl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %eax, %ebx -; X32-NEXT: addl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb -380(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -380(%ebp), %ecx # 1-byte Folded Reload ; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edx, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %ebx, %eax -; X32-NEXT: adcl %edi, %ecx -; X32-NEXT: setb %dl -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movzbl %dl, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %esi, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edi, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edi, %esi -; X32-NEXT: addl %eax, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl -308(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -600(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -208(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -604(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl -224(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl -116(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -84(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -356(%ebp), %eax # 1-byte Folded Reload ; X32-NEXT: adcl %eax, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -708(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -660(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -952(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -956(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -64(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -76(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -220(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -72(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -68(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -176(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -152(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -200(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -32(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -224(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl %edi, -380(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl %esi, -308(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %ecx, -208(%ebp) # 4-byte Spill +; X32-NEXT: movl -516(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -188(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -116(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -356(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -116(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -32(%ebp) # 4-byte Spill ; X32-NEXT: adcl %edi, %ecx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %esi, %eax +; X32-NEXT: setb %bl +; X32-NEXT: movl -132(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx ; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl -484(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -488(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -116(%ebp) # 4-byte Spill +; X32-NEXT: movl -476(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -188(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -220(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -64(%ebp) # 4-byte Spill +; X32-NEXT: movl -248(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -220(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -220(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -248(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -100(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -480(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -204(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -384(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl -356(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -100(%ebp) # 4-byte Spill +; X32-NEXT: adcl -32(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -204(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -84(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -116(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -476(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -236(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -356(%ebp) # 4-byte Spill +; X32-NEXT: movl -248(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -112(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -32(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -248(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -32(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -480(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -384(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -356(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -204(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -32(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %esi ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: addl -84(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -480(%ebp) # 4-byte Spill +; X32-NEXT: adcl -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -384(%ebp) # 4-byte Spill +; X32-NEXT: setb -204(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -516(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -236(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -100(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -116(%ebp) # 4-byte Spill +; X32-NEXT: movl -132(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -100(%ebp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -112(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -100(%ebp) # 4-byte Spill ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %edx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %edx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %eax +; X32-NEXT: setb -84(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -112(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -84(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -484(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -304(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -488(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -480(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -116(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -384(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -100(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -204(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, -484(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, -488(%ebp) # 4-byte Spill +; X32-NEXT: movl -548(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %edi, %ecx +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: movl -236(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -204(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull -544(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, -236(%ebp) # 4-byte Spill +; X32-NEXT: movl -580(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -148(%ebp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %esi +; X32-NEXT: movl -188(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl -140(%ebp), %esi # 4-byte Reload +; X32-NEXT: imull %edi, %esi +; X32-NEXT: addl %edx, %esi +; X32-NEXT: addl -204(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -236(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl %edi, %esi +; X32-NEXT: movl -548(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -236(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx ; X32-NEXT: adcl $0, %edi -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -544(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -204(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -148(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -84(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -304(%ebp) # 4-byte Spill +; X32-NEXT: adcl -140(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -84(%ebp) # 4-byte Spill +; X32-NEXT: movl -476(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -164(%ebp), %esi # 4-byte Reload +; X32-NEXT: imull %eax, %esi +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: addl %esi, %edx +; X32-NEXT: imull -248(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, -244(%ebp) # 4-byte Spill +; X32-NEXT: movl -516(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl -144(%ebp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %esi +; X32-NEXT: movl -300(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -132(%ebp), %ecx # 4-byte Reload +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl -148(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl -244(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -132(%ebp) # 4-byte Spill +; X32-NEXT: movl -476(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %esi ; X32-NEXT: adcl $0, %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl -300(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -248(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb -244(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -144(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -248(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -244(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: addl -128(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -132(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -148(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -236(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -204(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -304(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -84(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -116(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -148(%ebp) # 4-byte Spill +; X32-NEXT: adcl -100(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -164(%ebp) # 4-byte Spill +; X32-NEXT: adcl -484(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -384(%ebp) # 4-byte Spill +; X32-NEXT: adcl -488(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -300(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %esi +; X32-NEXT: movl 104(%esi), %ebx +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl %ebx, -244(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, -236(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl 108(%esi), %eax +; X32-NEXT: movl %eax, -100(%ebp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl %eax, %esi -; X32-NEXT: adcl %edi, %edx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movzbl %al, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: addl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %edx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %edi, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ebx, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -92(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -204(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: setb -116(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movzbl -116(%ebp), %eax # 1-byte Folded Reload ; X32-NEXT: adcl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl -244(%ebp), %eax # 4-byte Reload +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -128(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -248(%ebp) # 4-byte Spill +; X32-NEXT: addl -28(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -256(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -112(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl %edx, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %ecx +; X32-NEXT: movl 96(%ecx), %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl %edi, -84(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -304(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -132(%ebp) # 4-byte Spill +; X32-NEXT: movl 100(%ecx), %eax +; X32-NEXT: movl %eax, -116(%ebp) # 4-byte Spill +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl -132(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -92(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi ; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -132(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb -144(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -116(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi ; X32-NEXT: movl %eax, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: movzbl -144(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, -188(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -144(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -256(%ebp), %eax # 4-byte Reload ; X32-NEXT: adcl %edx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: addl -236(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -28(%ebp) # 4-byte Spill +; X32-NEXT: adcl -204(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -256(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -112(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -140(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -84(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -204(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -236(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -204(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edi, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -204(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl -116(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -144(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -188(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -236(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -256(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -204(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl %ebx, %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -112(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -144(%ebp) # 4-byte Spill +; X32-NEXT: adcl -140(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -188(%ebp) # 4-byte Spill +; X32-NEXT: setb -112(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -244(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -108(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -256(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl -100(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -256(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -96(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -256(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -96(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -248(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -104(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -128(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -156(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -144(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -28(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -188(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -256(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -112(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: movl %edi, -248(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, -128(%ebp) # 4-byte Spill +; X32-NEXT: movl 8(%ebp), %ecx +; X32-NEXT: movl 112(%ecx), %eax +; X32-NEXT: movl %eax, -156(%ebp) # 4-byte Spill +; X32-NEXT: imull %eax, %esi +; X32-NEXT: movl -108(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, -144(%ebp) # 4-byte Spill +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl 116(%ecx), %eax +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: imull %eax, %edi +; X32-NEXT: addl %edx, %edi +; X32-NEXT: movl %edi, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl 120(%ecx), %eax +; X32-NEXT: movl %ecx, %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl -92(%ebp), %esi # 4-byte Reload +; X32-NEXT: imull %esi, %edi +; X32-NEXT: movl -168(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -96(%ebp) # 4-byte Spill +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl 124(%ebx), %ebx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: imull %eax, %ebx +; X32-NEXT: addl %edx, %ebx +; X32-NEXT: movl -144(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, -96(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -108(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -156(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -144(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -144(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl -168(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -104(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -168(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -104(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -96(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -92(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: movl %edx, -96(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: imull %eax, %edi +; X32-NEXT: movl -284(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: addl %edi, %edx +; X32-NEXT: imull -116(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, -284(%ebp) # 4-byte Spill +; X32-NEXT: movl -244(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl -252(%ebp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: movl -212(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -100(%ebp), %ecx # 4-byte Reload +; X32-NEXT: imull %edi, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl -104(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -104(%ebp) # 4-byte Spill +; X32-NEXT: adcl -284(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -100(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl %edi, %ecx +; X32-NEXT: movl -84(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -284(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -116(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -116(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: addl -104(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -100(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -284(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -108(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -168(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -92(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -96(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -28(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, %edi +; X32-NEXT: adcl -256(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, %ebx +; X32-NEXT: adcl -248(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -116(%ebp) # 4-byte Spill +; X32-NEXT: adcl -128(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -256(%ebp) # 4-byte Spill +; X32-NEXT: movl -304(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl -64(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -132(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -220(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -236(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -356(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -204(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -32(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -148(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -284(%ebp) # 4-byte Spill +; X32-NEXT: adcl -164(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -384(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl %edi, -116(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -256(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -300(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: addl -76(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -304(%ebp) # 4-byte Spill +; X32-NEXT: adcl -72(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -132(%ebp) # 4-byte Spill +; X32-NEXT: adcl -176(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -236(%ebp) # 4-byte Spill +; X32-NEXT: adcl -200(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -204(%ebp) # 4-byte Spill +; X32-NEXT: movl -224(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -284(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -380(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -140(%ebp) # 4-byte Spill +; X32-NEXT: movl -308(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -116(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -208(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -256(%ebp) # 4-byte Spill +; X32-NEXT: movl -492(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 92(%eax), %eax +; X32-NEXT: movl %eax, -96(%ebp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -28(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx ; X32-NEXT: addl %ebx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -556(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -136(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -560(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -264(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -92(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl -552(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -260(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -168(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi ; X32-NEXT: movl %eax, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: addl %esi, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edx, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edi, %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %ebx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %edx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: addl -168(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -460(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -712(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -976(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -108(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: adcl -104(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -168(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -92(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -28(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -552(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -104(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -104(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -60(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -524(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -160(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -528(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -268(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -48(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -108(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -168(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -104(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edi -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: addl %edi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: addl -92(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -28(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -28(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -492(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -184(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -92(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -212(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -92(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %edx, -92(%ebp) # 4-byte Spill +; X32-NEXT: movl -492(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -60(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -208(%ebp) # 4-byte Spill +; X32-NEXT: adcl -92(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -92(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -92(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -556(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -160(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -560(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -268(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -212(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -208(%ebp), %ecx # 4-byte Reload ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: addl %edx, %ecx -; X32-NEXT: adcl %esi, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %eax, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: adcl $0, %edi +; X32-NEXT: movzbl -28(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi ; X32-NEXT: adcl $0, %ebx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: addl -712(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -212(%ebp) # 4-byte Spill +; X32-NEXT: adcl -968(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -208(%ebp) # 4-byte Spill +; X32-NEXT: adcl -964(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -244(%ebp) # 4-byte Spill +; X32-NEXT: adcl -972(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -248(%ebp) # 4-byte Spill +; X32-NEXT: movl -388(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -92(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -168(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 76(%eax), %eax +; X32-NEXT: movl %eax, -28(%ebp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -168(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: addl %edx, %esi -; X32-NEXT: adcl %ecx, %edi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movzbl %al, %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: addl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %edi, %eax -; X32-NEXT: movl %eax, %edi -; X32-NEXT: adcl %ebx, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -252(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -564(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -136(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -568(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -264(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -156(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl -520(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -260(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -308(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -168(%ebp) # 4-byte Spill +; X32-NEXT: movl -444(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -308(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -308(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -444(%ebp), %edi # 4-byte Reload ; X32-NEXT: movl %edi, %eax -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edi, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl %edx, %eax -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -716(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -992(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -92(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -252(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -100(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -156(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -48(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -520(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -92(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -252(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -92(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -60(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -92(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -444(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -500(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -160(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -496(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -268(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: addl %eax, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %edi -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %ebx, %edi ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: setb %dl -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movzbl %dl, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -252(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -92(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl -156(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -48(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb -48(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -388(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -184(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -156(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -100(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -156(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, -156(%ebp) # 4-byte Spill +; X32-NEXT: movl -388(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -60(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -156(%ebp), %edx # 4-byte Folded Reload ; X32-NEXT: movl %edx, %ebx -; X32-NEXT: addl %eax, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %eax, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %edx, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb -156(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -156(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -564(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -160(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -568(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -268(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -100(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl -84(%ebp), %esi # 4-byte Reload ; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movzbl -48(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -716(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -988(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -984(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -980(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl -148(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -100(%ebp) # 4-byte Spill +; X32-NEXT: adcl -128(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -108(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -144(%ebp) # 4-byte Spill +; X32-NEXT: adcl -104(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -188(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -212(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -208(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -244(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -248(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -388(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -348(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -216(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -564(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -180(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -568(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -320(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -148(%ebp) # 4-byte Spill +; X32-NEXT: movl -520(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -348(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, -156(%ebp) # 4-byte Spill +; X32-NEXT: movl -444(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -216(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -104(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -112(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -444(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -112(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -720(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -1008(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -108(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -300(%ebp) # 4-byte Spill +; X32-NEXT: adcl -48(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -112(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -128(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -148(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -520(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -288(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx ; X32-NEXT: movl %eax, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %ebx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: addl %edi, %ecx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -16(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -108(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl -444(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -500(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -280(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -496(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -312(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl -300(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -48(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -112(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -108(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: addl -128(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -148(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: setb -112(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -388(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -288(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, -128(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -128(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %edx, -300(%ebp) # 4-byte Spill +; X32-NEXT: movl -388(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -16(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl -300(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: setb -300(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -300(%ebp), %edi # 1-byte Folded Reload +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl -564(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -280(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -568(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -312(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl -148(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -128(%ebp), %ecx # 4-byte Reload ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movzbl -112(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl -720(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -664(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -996(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -1000(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -156(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -104(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -144(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -48(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -188(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -108(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: addl %edx, %ecx -; X32-NEXT: adcl %esi, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %eax, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %ecx ; X32-NEXT: adcl $0, %edi ; X32-NEXT: adcl $0, %ebx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: addl -212(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -148(%ebp) # 4-byte Spill +; X32-NEXT: adcl -208(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl -244(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -248(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -144(%ebp) # 4-byte Spill +; X32-NEXT: setb -100(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -492(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -212(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ecx, %esi ; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %edi, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -216(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, -208(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb -248(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -96(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -248(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -180(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -556(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -320(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -560(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, -180(%ebp) # 4-byte Spill ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movzbl %al, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx +; X32-NEXT: movl %ecx, -320(%ebp) # 4-byte Spill +; X32-NEXT: movl -552(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -248(%ebp) # 4-byte Spill +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -216(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -244(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -188(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movzbl -188(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl -724(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -1004(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -212(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -208(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -188(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -180(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -320(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -552(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -288(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -208(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -212(%ebp) # 4-byte Spill +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -208(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: addl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull -16(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -208(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -16(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx ; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl %ebx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl %eax, %esi -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl -524(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -280(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -528(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -312(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %edi, -212(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -188(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -208(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: adcl $0, %edx -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: addl -180(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -524(%ebp) # 4-byte Spill +; X32-NEXT: adcl -320(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -528(%ebp) # 4-byte Spill +; X32-NEXT: setb -180(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -492(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -288(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -188(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -320(%ebp) # 4-byte Spill +; X32-NEXT: movl -96(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -188(%ebp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -16(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -188(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull -16(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -188(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -556(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -280(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -560(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -312(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -320(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl -524(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -528(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movzbl -180(%ebp), %eax # 1-byte Folded Reload ; X32-NEXT: adcl %eax, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %esi, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl %edi, %esi -; X32-NEXT: adcl %ebx, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: addl -724(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -668(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -732(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -728(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -148(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -248(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -128(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -244(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -84(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -212(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -144(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -208(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -100(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, -320(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %edi, -300(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %edx, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %esi, -556(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl %ebx, %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl %ecx, -560(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %ebx +; X32-NEXT: movl 96(%ebx), %ecx +; X32-NEXT: movl %ecx, -312(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -100(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -180(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -100(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl 100(%ebx), %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %ebx, -100(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb -280(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: movzbl -280(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ecx +; X32-NEXT: movl -312(%ebp), %eax # 4-byte Reload +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, -84(%ebp) # 4-byte Spill +; X32-NEXT: movl %edx, -280(%ebp) # 4-byte Spill +; X32-NEXT: movl -160(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl -268(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movl %edi, -188(%ebp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movl %esi, -144(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -312(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -164(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: addl %ecx, %esi -; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -100(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -384(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi ; X32-NEXT: setb %cl -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movzbl %cl, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl %eax, %esi -; X32-NEXT: adcl %edi, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl -124(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -84(%ebp), %edi # 4-byte Reload +; X32-NEXT: addl -136(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -264(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %esi +; X32-NEXT: addl -180(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -148(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -280(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -188(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -144(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 104(%eax), %ecx +; X32-NEXT: movl %ecx, -180(%ebp) # 4-byte Spill +; X32-NEXT: movl -260(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -128(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -148(%ebp) # 4-byte Spill ; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl -128(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl 12(%ebp), %eax +; X32-NEXT: movl 108(%eax), %edx +; X32-NEXT: movl %ebx, %eax ; X32-NEXT: movl %edx, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, -112(%ebp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -128(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb -176(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %esi, %edi +; X32-NEXT: movzbl -176(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ecx +; X32-NEXT: movl -180(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, -200(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -176(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl -264(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %edi, %esi +; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl -84(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, -148(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -280(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, -128(%ebp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax ; X32-NEXT: adcl $0, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: addl -188(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -136(%ebp) # 4-byte Spill +; X32-NEXT: adcl -144(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -264(%ebp) # 4-byte Spill +; X32-NEXT: setb -84(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -184(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -144(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -280(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -144(%ebp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %ecx, %edi -; X32-NEXT: adcl %eax, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl %eax, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -112(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb -144(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -112(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -144(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -160(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -176(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -268(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -200(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: addl %edi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl -136(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -280(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -264(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -136(%ebp) # 4-byte Spill +; X32-NEXT: movzbl -84(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, -160(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edx, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %esi, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %eax, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, -268(%ebp) # 4-byte Spill +; X32-NEXT: movl -348(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %ebx, %ecx +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: movl -180(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -264(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull -216(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, -180(%ebp) # 4-byte Spill +; X32-NEXT: movl -288(%ebp), %eax # 4-byte Reload ; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl -100(%ebp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %esi +; X32-NEXT: movl -312(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl -16(%ebp), %esi # 4-byte Reload +; X32-NEXT: imull %edi, %esi +; X32-NEXT: addl %edx, %esi +; X32-NEXT: addl -264(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -84(%ebp) # 4-byte Spill +; X32-NEXT: adcl -180(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -348(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -288(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -216(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -264(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -100(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %edi ; X32-NEXT: addl %ecx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %edi, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %ebx, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl -84(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -348(%ebp) # 4-byte Spill +; X32-NEXT: adcl -16(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -180(%ebp) # 4-byte Spill +; X32-NEXT: movl 12(%ebp), %edx +; X32-NEXT: movl 124(%edx), %ecx +; X32-NEXT: movl -260(%ebp), %eax # 4-byte Reload +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: movl 120(%edx), %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -216(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull -124(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl 112(%edi), %ebx +; X32-NEXT: movl 116(%edi), %ecx +; X32-NEXT: movl %ecx, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %edi +; X32-NEXT: imull %ecx, %edi +; X32-NEXT: mull %ebx +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl -60(%ebp), %ecx # 4-byte Reload +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl -216(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: movl %edi, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -260(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -312(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -216(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi ; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: addl -312(%ebp), %esi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: addl %eax, %esi -; X32-NEXT: adcl %edx, %edi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl %eax, %esi -; X32-NEXT: adcl %edi, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull -124(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl %edi, %ebx +; X32-NEXT: setb -260(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull -124(%ebp) # 4-byte Folded Reload +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl -260(%ebp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: addl -184(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -60(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -216(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -288(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl -264(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -348(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -180(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: addl -280(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -216(%ebp) # 4-byte Spill +; X32-NEXT: adcl -136(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -264(%ebp) # 4-byte Spill +; X32-NEXT: adcl -160(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -180(%ebp) # 4-byte Spill +; X32-NEXT: adcl -268(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -288(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -520(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -16(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl -444(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -136(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -500(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -496(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, -160(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -520(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -124(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl -124(%ebp), %ebx # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %edi, %edx -; X32-NEXT: adcl %ebx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl %edx, %edi -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ecx, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -444(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -124(%ebp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -500(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -324(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -496(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -400(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl -60(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -500(%ebp) # 4-byte Spill +; X32-NEXT: adcl -136(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -496(%ebp) # 4-byte Spill +; X32-NEXT: adcl $0, -160(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, -16(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -416(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -388(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -28(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, %edi ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: setb %dl -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movzbl %dl, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: setb -136(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -316(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -136(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -324(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -564(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -400(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -568(%ebp), %ecx # 4-byte Folded Reload ; X32-NEXT: addl %eax, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -500(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -60(%ebp) # 4-byte Folded Spill +; X32-NEXT: adcl -496(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -136(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl %ecx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %ecx, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ebx, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax -; X32-NEXT: adcl %esi, %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: addl -160(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -324(%ebp) # 4-byte Spill +; X32-NEXT: adcl -16(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -400(%ebp) # 4-byte Spill +; X32-NEXT: setb -160(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl -352(%ebp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl -388(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, -268(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl -268(%ebp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: addl %edx, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull -28(%ebp) # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, -268(%ebp) # 4-byte Spill ; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %edi, %edx -; X32-NEXT: adcl %ebx, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl $0, %eax +; X32-NEXT: setb -260(%ebp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl -28(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl -260(%ebp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl -364(%ebp), %esi # 4-byte Reload +; X32-NEXT: addl -564(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -396(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -568(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl -324(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, -16(%ebp) # 4-byte Folded Spill +; X32-NEXT: movl -400(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, -268(%ebp) # 4-byte Folded Spill +; X32-NEXT: movzbl -160(%ebp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, -364(%ebp) # 4-byte Spill ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: adcl $0, %esi -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, -396(%ebp) # 4-byte Spill +; X32-NEXT: movl -440(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %edi, %ecx +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: movl -388(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, -28(%ebp) # 4-byte Spill +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull -340(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, -388(%ebp) # 4-byte Spill +; X32-NEXT: movl -408(%ebp), %eax # 4-byte Reload ; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl -444(%ebp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %esi +; X32-NEXT: movl -520(%ebp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl -192(%ebp), %esi # 4-byte Reload +; X32-NEXT: imull %edi, %esi +; X32-NEXT: addl %edx, %esi +; X32-NEXT: addl -28(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -28(%ebp) # 4-byte Spill +; X32-NEXT: adcl -388(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -192(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -440(%ebp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, -324(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl -340(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, -260(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: setb %bl +; X32-NEXT: movl -444(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %bl, %ecx ; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %esi, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl %edx, %edi -; X32-NEXT: movl %ebx, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: addl -28(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -340(%ebp) # 4-byte Spill +; X32-NEXT: adcl -192(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -192(%ebp) # 4-byte Spill +; X32-NEXT: movl -416(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -96(%ebp), %edi # 4-byte Reload +; X32-NEXT: imull %eax, %edi ; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl -492(%ebp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, -28(%ebp) # 4-byte Spill +; X32-NEXT: addl %edi, %edx +; X32-NEXT: imull -316(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, -492(%ebp) # 4-byte Spill +; X32-NEXT: movl -352(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl -460(%ebp), %edi # 4-byte Reload +; X32-NEXT: imull %edi, %ecx +; X32-NEXT: movl -552(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl -120(%ebp), %ecx # 4-byte Reload +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl -28(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -96(%ebp) # 4-byte Spill +; X32-NEXT: adcl -492(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -120(%ebp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %ebx, %ecx ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: addl %edx, %ebx -; X32-NEXT: adcl %ecx, %eax +; X32-NEXT: movl -552(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl -316(%ebp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, -160(%ebp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edi ; X32-NEXT: setb %cl -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl -460(%ebp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %edi, %eax ; X32-NEXT: movzbl %cl, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi -; X32-NEXT: addl %ebx, %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl %eax, %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: adcl %edx, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl 16(%ebp), %edx -; X32-NEXT: movl %ecx, 4(%edx) -; X32-NEXT: movl %eax, (%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 8(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 12(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 16(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 20(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 24(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 28(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 32(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 36(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 40(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 44(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 48(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 52(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 56(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 60(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 64(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 68(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 72(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 76(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 80(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 84(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 88(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 92(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 96(%edx) -; X32-NEXT: movl %esi, 100(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 104(%edx) -; X32-NEXT: movl %edi, 108(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 112(%edx) -; X32-NEXT: movl %ebx, 116(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 120(%edx) -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: movl %eax, 124(%edx) -; X32-NEXT: leal -12(%ebp), %esp +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: addl -96(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -120(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -28(%ebp), %edx # 4-byte Reload +; X32-NEXT: addl -324(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -160(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -260(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl -340(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl -192(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: addl -16(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: adcl -268(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -160(%ebp) # 4-byte Spill +; X32-NEXT: movl %eax, %edx +; X32-NEXT: adcl -364(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -396(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl -164(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -124(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -384(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl -60(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -148(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -136(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -128(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl -216(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl -160(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -264(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl -180(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -120(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -288(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -16(%ebp) # 4-byte Spill +; X32-NEXT: addl -248(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill +; X32-NEXT: adcl -244(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -124(%ebp) # 4-byte Spill +; X32-NEXT: adcl -212(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -60(%ebp) # 4-byte Spill +; X32-NEXT: adcl -208(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -136(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -320(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -300(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -160(%ebp) # 4-byte Spill +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -556(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -120(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -560(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %eax # 4-byte Reload +; X32-NEXT: addl -344(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -168(%ebp) # 4-byte Spill +; X32-NEXT: movl -308(%ebp), %esi # 4-byte Reload +; X32-NEXT: adcl -232(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl -252(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -436(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -92(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -472(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -92(%ebp) # 4-byte Spill +; X32-NEXT: movl -156(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -88(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -104(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -296(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -104(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -40(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -48(%ebp) # 4-byte Spill +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -56(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -304(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -184(%ebp) # 4-byte Spill +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -132(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -124(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -236(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -204(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -136(%ebp) # 4-byte Spill +; X32-NEXT: adcl -284(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, -28(%ebp) # 4-byte Spill +; X32-NEXT: movl -160(%ebp), %edx # 4-byte Reload +; X32-NEXT: adcl -140(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -116(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl -16(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -256(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl -168(%ebp), %ecx # 4-byte Reload +; X32-NEXT: addl -432(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -168(%ebp) # 4-byte Spill +; X32-NEXT: adcl -456(%ebp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, -308(%ebp) # 4-byte Spill +; X32-NEXT: adcl -44(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, -252(%ebp) # 4-byte Spill +; X32-NEXT: movl -92(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -52(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -92(%ebp) # 4-byte Spill +; X32-NEXT: adcl -24(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, -156(%ebp) # 4-byte Spill +; X32-NEXT: movl -104(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -272(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -104(%ebp) # 4-byte Spill +; X32-NEXT: movl -48(%ebp), %ebx # 4-byte Reload +; X32-NEXT: adcl -276(%ebp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl -108(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -240(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -108(%ebp) # 4-byte Spill +; X32-NEXT: movl -184(%ebp), %edi # 4-byte Reload +; X32-NEXT: adcl -172(%ebp), %edi # 4-byte Folded Reload +; X32-NEXT: movl -124(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -80(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -124(%ebp) # 4-byte Spill +; X32-NEXT: movl -60(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -36(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -60(%ebp) # 4-byte Spill +; X32-NEXT: movl -136(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -20(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -136(%ebp) # 4-byte Spill +; X32-NEXT: movl -28(%ebp), %ecx # 4-byte Reload +; X32-NEXT: adcl -336(%ebp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, -28(%ebp) # 4-byte Spill +; X32-NEXT: adcl -360(%ebp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl -392(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -120(%ebp) # 4-byte Spill +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: adcl -412(%ebp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, -16(%ebp) # 4-byte Spill +; X32-NEXT: movl 16(%ebp), %ecx +; X32-NEXT: movl -648(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, (%ecx) +; X32-NEXT: movl -644(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 4(%ecx) +; X32-NEXT: movl -536(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 8(%ecx) +; X32-NEXT: movl -596(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 12(%ecx) +; X32-NEXT: movl -592(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 16(%ecx) +; X32-NEXT: movl -532(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 20(%ecx) +; X32-NEXT: movl -428(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 24(%ecx) +; X32-NEXT: movl -452(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 28(%ecx) +; X32-NEXT: movl -508(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 32(%ecx) +; X32-NEXT: movl -504(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 36(%ecx) +; X32-NEXT: movl -328(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 40(%ecx) +; X32-NEXT: movl -468(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 44(%ecx) +; X32-NEXT: movl -404(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 48(%ecx) +; X32-NEXT: movl -540(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 52(%ecx) +; X32-NEXT: movl -228(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 56(%ecx) +; X32-NEXT: movl -196(%ebp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, 60(%ecx) +; X32-NEXT: movl -168(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 64(%ecx) +; X32-NEXT: movl -308(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 68(%ecx) +; X32-NEXT: movl -252(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 72(%ecx) +; X32-NEXT: movl -92(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 76(%ecx) +; X32-NEXT: movl -156(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 80(%ecx) +; X32-NEXT: movl -104(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 84(%ecx) +; X32-NEXT: movl %ebx, 88(%ecx) +; X32-NEXT: movl -108(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 92(%ecx) +; X32-NEXT: movl %edi, 96(%ecx) +; X32-NEXT: movl -124(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 100(%ecx) +; X32-NEXT: movl -60(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 104(%ecx) +; X32-NEXT: movl -136(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 108(%ecx) +; X32-NEXT: movl -28(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 112(%ecx) +; X32-NEXT: movl %edx, 116(%ecx) +; X32-NEXT: movl -120(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 120(%ecx) +; X32-NEXT: movl -16(%ebp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, 124(%ecx) +; X32-NEXT: addl $996, %esp # imm = 0x3E4 ; X32-NEXT: popl %esi ; X32-NEXT: popl %edi ; X32-NEXT: popl %ebx diff --git a/llvm/test/CodeGen/X86/mul-i256.ll b/llvm/test/CodeGen/X86/mul-i256.ll index 8a66692..0a48ae7 100644 --- a/llvm/test/CodeGen/X86/mul-i256.ll +++ b/llvm/test/CodeGen/X86/mul-i256.ll @@ -9,180 +9,346 @@ define void @test(i256* %a, i256* %b, i256* %out) #0 { ; X32: # BB#0: # %entry ; X32-NEXT: pushl %ebp ; X32-NEXT: .cfi_def_cfa_offset 8 -; X32-NEXT: .cfi_offset %ebp, -8 -; X32-NEXT: movl %esp, %ebp -; X32-NEXT: .cfi_def_cfa_register %ebp ; X32-NEXT: pushl %ebx +; X32-NEXT: .cfi_def_cfa_offset 12 ; X32-NEXT: pushl %edi +; X32-NEXT: .cfi_def_cfa_offset 16 ; X32-NEXT: pushl %esi -; X32-NEXT: andl $-8, %esp -; X32-NEXT: subl $168, %esp +; X32-NEXT: .cfi_def_cfa_offset 20 +; X32-NEXT: subl $88, %esp +; X32-NEXT: .cfi_def_cfa_offset 108 ; X32-NEXT: .cfi_offset %esi, -20 ; X32-NEXT: .cfi_offset %edi, -16 ; X32-NEXT: .cfi_offset %ebx, -12 -; X32-NEXT: movl 8(%ebp), %eax -; X32-NEXT: movl 16(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 20(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 24(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 28(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 4(%eax), %ebx -; X32-NEXT: movl 12(%ebp), %eax -; X32-NEXT: movl 16(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 20(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 24(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 28(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: .cfi_offset %ebp, -8 +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl 12(%ecx), %ebp +; X32-NEXT: movl 8(%ecx), %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl (%eax), %ebx +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax ; X32-NEXT: movl 4(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %esi -; X32-NEXT: movl 12(%eax), %edi -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %ecx, %edi ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebx +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl %esi, (%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %esi, %eax +; X32-NEXT: adcl %ebp, %edx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl (%esi), %ebp +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %ecx, %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl 4(%esi), %esi +; X32-NEXT: movl %esi, %eax ; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ecx, %ebp +; X32-NEXT: movzbl %bl, %eax +; X32-NEXT: adcl %eax, %edi ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp +; X32-NEXT: movl %esi, %eax +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl (%esp), %ecx # 4-byte Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %ebp, %ecx +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, (%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl 8(%ecx), %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %esi, %edi +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: movl 12(%ecx), %ecx +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebp, %edi +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %edi, %ebp +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %ebx, %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %ebp, %ebx +; X32-NEXT: adcl %esi, %eax +; X32-NEXT: movl (%esp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb (%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ebx, %ebp +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movzbl (%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl 16(%ecx), %esi +; X32-NEXT: imull %esi, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl 20(%ecx), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: imull %eax, %edi +; X32-NEXT: addl %edx, %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl 24(%ecx), %eax +; X32-NEXT: movl %ecx, %ebp +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: imull %ecx, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, (%esp) # 4-byte Spill +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl 28(%ebp), %ebp +; X32-NEXT: imull %ebx, %ebp +; X32-NEXT: addl %edx, %ebp +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload +; X32-NEXT: addl %edx, (%esp) # 4-byte Folded Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ebx, %edi ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: adcl $0, %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl (%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, (%esp) # 4-byte Spill +; X32-NEXT: adcl %ebp, %edx ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi ; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: addl %ecx, %edi -; X32-NEXT: adcl %eax, %ebx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 28(%ebx), %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: imull %esi, %ecx +; X32-NEXT: movl 24(%ebx), %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %edi ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %edi +; X32-NEXT: movl 16(%ebx), %ebp +; X32-NEXT: movl 20(%ebx), %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ebp +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: imull %ebp, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl %edi, %esi -; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: adcl %ecx, %edi +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl (%esp), %eax # 4-byte Folded Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, %ebx +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl 16(%ebp), %edi -; X32-NEXT: movl %ebx, 4(%edi) -; X32-NEXT: movl 16(%ebp), %ebx +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, (%ecx) ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: movl %edi, (%ebx) +; X32-NEXT: movl %edi, 4(%ecx) ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: movl %edi, 8(%ebx) +; X32-NEXT: movl %edi, 8(%ecx) ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: movl %edi, 12(%ebx) -; X32-NEXT: movl %esi, 16(%ebx) -; X32-NEXT: movl %ecx, 20(%ebx) -; X32-NEXT: movl %edx, 24(%ebx) -; X32-NEXT: movl %eax, 28(%ebx) -; X32-NEXT: leal -12(%ebp), %esp +; X32-NEXT: movl %edi, 12(%ecx) +; X32-NEXT: movl %ebx, 16(%ecx) +; X32-NEXT: movl %esi, 20(%ecx) +; X32-NEXT: movl %eax, 24(%ecx) +; X32-NEXT: movl %edx, 28(%ecx) +; X32-NEXT: addl $88, %esp ; X32-NEXT: popl %esi ; X32-NEXT: popl %edi ; X32-NEXT: popl %ebx diff --git a/llvm/test/CodeGen/X86/mul-i512.ll b/llvm/test/CodeGen/X86/mul-i512.ll index 3da17b6..5e165fe 100644 --- a/llvm/test/CodeGen/X86/mul-i512.ll +++ b/llvm/test/CodeGen/X86/mul-i512.ll @@ -6,886 +6,1523 @@ define void @test_512(i512* %a, i512* %b, i512* %out) nounwind { ; X32-LABEL: test_512: ; X32: # BB#0: ; X32-NEXT: pushl %ebp -; X32-NEXT: movl %esp, %ebp ; X32-NEXT: pushl %ebx ; X32-NEXT: pushl %edi ; X32-NEXT: pushl %esi -; X32-NEXT: andl $-8, %esp -; X32-NEXT: subl $656, %esp # imm = 0x290 -; X32-NEXT: movl 8(%ebp), %eax -; X32-NEXT: movl 48(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 52(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 56(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 60(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 40(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 44(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 32(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 36(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 4(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 16(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 20(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 24(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 28(%eax), %eax +; X32-NEXT: subl $244, %esp +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl 20(%ecx), %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl 16(%ecx), %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%ebp), %eax -; X32-NEXT: movl 48(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 52(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 56(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 60(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 32(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 36(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 40(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 44(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 4(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 8(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 12(%eax), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl 16(%eax), %esi -; X32-NEXT: movl 20(%eax), %edi -; X32-NEXT: movl 24(%eax), %ebx -; X32-NEXT: movl 28(%eax), %eax -; X32-NEXT: movl %eax, (%esp) # 4-byte Spill -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %eax -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi +; X32-NEXT: movl %ecx, %ebp +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl %esi, %edi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, (%esp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %ebx +; X32-NEXT: movl %ecx, %edi ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl %esi +; X32-NEXT: setb %cl +; X32-NEXT: addl %eax, %ebx +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl 24(%ebp), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: addl %ebx, %esi ; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx +; X32-NEXT: adcl %ecx, %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl (%ecx), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %ebx, %ebx +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl 4(%ecx), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ecx, %esi +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl %ebp, %ebx ; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %ebx +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl 8(%esi), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %esi +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %ebp, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl (%eax), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %ebp, %ebp +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl %ecx, %edx +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 16(%eax), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ebp +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ecx, %edi +; X32-NEXT: movl %ecx, %ebp +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl (%esp), %eax # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 4(%eax), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %edi, %edi +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl %ebp, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %ebx +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 8(%eax), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %edi +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl 20(%esi), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %ebx ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: addl %ebp, %ebx +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movzbl %bl, %ebx +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl 24(%esi), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %esi +; X32-NEXT: addl %eax, %esi ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: addl %ecx, %esi +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: adcl %ebp, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl %esi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: addl %edx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl (%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, (%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 28(%eax), %esi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ebp, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebp +; X32-NEXT: setb %bl +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %edi, %ebp +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: addl (%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ecx, %ebp +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 12(%eax), %ecx +; X32-NEXT: movl %ecx, (%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull (%esp) # 4-byte Folded Reload +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebp +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %ebx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload +; X32-NEXT: movl %ebx, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl (%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload +; X32-NEXT: addl %ebp, %edx ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %esi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl (%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 12(%eax), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %esi, %ebp +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl %edi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: pushl %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: pushl %esi -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %ebx -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl %edi -; X32-NEXT: pushl %esi -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %esi -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebp, %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ecx, %ebp +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl (%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ecx ; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebx ; X32-NEXT: adcl %edx, %ecx -; X32-NEXT: setb %dl -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movzbl %dl, %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx +; X32-NEXT: addl %ebp, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %edx ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl (%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %esi # 1-byte Folded Reload +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebp +; X32-NEXT: adcl %edx, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ebp +; X32-NEXT: adcl $0, %edi +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl 28(%eax), %ebp +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %ebp +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edx, %ebx -; X32-NEXT: movl %ebx, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, (%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: setb %bl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl %bl, %esi +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload ; X32-NEXT: addl %eax, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: adcl %edx, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload +; X32-NEXT: addl %ebp, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload ; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload ; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl %ebp, %edi +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ecx, %ebx +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %ebp +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl %ebx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload ; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ecx, %edi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %edi, %ebp +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl 32(%ecx), %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl (%esp), %ebp # 4-byte Reload +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %esi, %ebx +; X32-NEXT: adcl $0, %edi ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl 36(%eax), %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %esi, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %ebx, %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %bl +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %esi, %ebp +; X32-NEXT: movzbl %bl, %eax +; X32-NEXT: adcl %eax, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload ; X32-NEXT: addl %eax, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: adcl %edx, %esi +; X32-NEXT: addl %ebp, %edi +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %eax, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %edi ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %eax, (%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %ebx, %eax -; X32-NEXT: addl %edx, %edi -; X32-NEXT: movl %ecx, %edx -; X32-NEXT: adcl %esi, %edx -; X32-NEXT: movl (%esp), %ecx # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %edi, %ebp +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl %ebx, %esi +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, (%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill ; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movl 40(%eax), %ebp +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebp ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl 44(%ebx), %ebx +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ecx, %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %ecx +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: xorl %edx, %edx +; X32-NEXT: mull %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl %eax, %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: addl %eax, %edi ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %esi, %edi ; X32-NEXT: adcl %ecx, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %edi, %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: addl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, %edi +; X32-NEXT: adcl $0, %eax +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %edi, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %edx, %ebx -; X32-NEXT: adcl %esi, %eax -; X32-NEXT: movl %eax, %esi +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl (%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: imull %eax, %ebp +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %ebp, %edx +; X32-NEXT: imull {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl %esi, %ebp +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: imull %eax, %esi +; X32-NEXT: addl %edx, %esi +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebp, %esi +; X32-NEXT: movl %esi, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %ebp, %ebx +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb %bl ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax +; X32-NEXT: mull %ebp +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl %bl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx +; X32-NEXT: movl 60(%edx), %ecx ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: imull %eax, %ecx +; X32-NEXT: movl 56(%edx), %esi +; X32-NEXT: movl %edx, %edi +; X32-NEXT: mull %esi ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: imull {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: addl %edx, %esi +; X32-NEXT: movl 48(%edi), %ebx +; X32-NEXT: movl 52(%edi), %ebp ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: adcl $0, %ebx -; X32-NEXT: movl %esi, %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl %eax, %edi +; X32-NEXT: imull %ebp, %edi +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ebx +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl (%esp), %ecx # 4-byte Reload +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: movl %ecx, (%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %esi, %edi +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %ebx +; X32-NEXT: addl %edi, %ebx +; X32-NEXT: adcl %ebp, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl (%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload ; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: setb (%esp) # 1-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl %esi, %ecx +; X32-NEXT: movl 40(%ecx), %ebx +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %eax, (%esp) # 4-byte Spill +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl 44(%ecx), %ecx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ebp, %edi ; X32-NEXT: adcl $0, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: addl %eax, %esi -; X32-NEXT: adcl %ecx, %edx -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movzbl %al, %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %edi, %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: adcl %esi, %ebx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl %ebx, %edi +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %esi ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl 32(%esi), %edi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl 36(%esi), %esi +; X32-NEXT: movl %esi, %eax +; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl $0, %ebx +; X32-NEXT: movl %edi, %eax ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload -; X32-NEXT: adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: addl %esi, %edi -; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl %ebx, %edi +; X32-NEXT: setb %bl +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %ebp ; X32-NEXT: movl %eax, %esi -; X32-NEXT: adcl %edx, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload -; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: addl %edi, %esi +; X32-NEXT: movzbl %bl, %eax +; X32-NEXT: adcl %eax, %ebp ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %edi, %eax +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl %eax, %ecx ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: adcl %edx, %eax +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl %ebp, %eax +; X32-NEXT: addl (%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, (%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ecx, %ebp +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %edi, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: adcl %esi, %edi +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %ecx, %ebx +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %esi +; X32-NEXT: adcl %edx, %ecx ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill -; X32-NEXT: movzbl (%esp), %eax # 1-byte Folded Reload -; X32-NEXT: adcl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: addl %eax, (%esp) # 4-byte Folded Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl $0, %esi +; X32-NEXT: adcl $0, %ecx +; X32-NEXT: addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload ; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edx +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: mull %esi ; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %esi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: adcl $0, %esi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: adcl %esi, %ecx +; X32-NEXT: setb {{[0-9]+}}(%esp) # 1-byte Folded Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebx +; X32-NEXT: addl %ecx, %eax +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %ecx # 1-byte Folded Reload +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %eax, %edi +; X32-NEXT: adcl %edx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload +; X32-NEXT: adcl %eax, %edi ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl $0, %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edx -; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl %eax, (%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl 48(%ecx), %ebp +; X32-NEXT: imull %ebp, %ebx +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: addl %ebx, %edx +; X32-NEXT: movl 52(%ecx), %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: imull %eax, %edi ; X32-NEXT: addl %edx, %edi ; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl %esi, %ebx -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: movl 56(%ecx), %eax +; X32-NEXT: movl %ecx, %ebx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: imull %esi, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl (%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: movl %eax, %esi -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: addl %edi, %edx +; X32-NEXT: movl 60(%ebx), %ebx +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: imull %eax, %ebx +; X32-NEXT: addl %edx, %ebx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ecx +; X32-NEXT: movl %eax, %edi +; X32-NEXT: addl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload ; X32-NEXT: adcl $0, %ecx -; X32-NEXT: addl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl %edx, (%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl $0, %esi -; X32-NEXT: addl %eax, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %esi +; X32-NEXT: addl %edi, %eax +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl %ecx, %esi -; X32-NEXT: setb %al -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movzbl %al, %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebp +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %edx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl $0, %eax +; X32-NEXT: adcl %ebx, %edx +; X32-NEXT: movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: imull %ebp, %edi +; X32-NEXT: movl %ebp, %eax +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: mull %ecx +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %edi, %edx +; X32-NEXT: imull {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: imull %ebx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: mull %edi +; X32-NEXT: addl %ecx, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: imull %edi, %ecx +; X32-NEXT: addl %edx, %ecx +; X32-NEXT: addl %esi, %eax ; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: leal {{[0-9]+}}(%esp), %eax -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl {{[0-9]+}}(%esp) # 4-byte Folded Reload -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx -; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx -; X32-NEXT: movl {{[0-9]+}}(%esp), %edx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx -; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax -; X32-NEXT: addl %edi, %ebx -; X32-NEXT: adcl %esi, %ecx -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %edi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl %ebx, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ecx +; X32-NEXT: addl %esi, %ecx +; X32-NEXT: adcl $0, %edi +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ecx, %ebp +; X32-NEXT: adcl %edi, %esi +; X32-NEXT: setb %cl +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax # 4-byte Reload +; X32-NEXT: mull %ebx +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: movzbl %cl, %ecx +; X32-NEXT: adcl %ecx, %ebx +; X32-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: addl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl (%esp), %edi # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload +; X32-NEXT: movl (%esp), %edx # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload -; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload +; X32-NEXT: movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload -; X32-NEXT: addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload -; X32-NEXT: movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload -; X32-NEXT: movl %edi, (%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload +; X32-NEXT: movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill +; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: movl %edx, (%esp) # 4-byte Spill ; X32-NEXT: adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %edx # 4-byte Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload ; X32-NEXT: adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi -; X32-NEXT: movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill -; X32-NEXT: movl {{[0-9]+}}(%esp), %edi -; X32-NEXT: movl 16(%ebp), %esi -; X32-NEXT: movl %edi, 4(%esi) -; X32-NEXT: movl 16(%ebp), %edi -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, (%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 8(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 12(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 16(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 20(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 24(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 28(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 32(%edi) -; X32-NEXT: movl %ebx, 36(%edi) -; X32-NEXT: movl (%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 40(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 44(%edi) -; X32-NEXT: movl {{[0-9]+}}(%esp), %esi # 4-byte Reload -; X32-NEXT: movl %esi, 48(%edi) -; X32-NEXT: movl %ecx, 52(%edi) -; X32-NEXT: movl %edx, 56(%edi) -; X32-NEXT: movl %eax, 60(%edi) -; X32-NEXT: leal -12(%ebp), %esp +; X32-NEXT: adcl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, (%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 4(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 8(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 12(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 16(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 20(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 24(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 28(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 32(%ecx) +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 36(%ecx) +; X32-NEXT: movl (%esp), %edi # 4-byte Reload +; X32-NEXT: movl %edi, 40(%ecx) +; X32-NEXT: movl %esi, 44(%ecx) +; X32-NEXT: movl %edx, 48(%ecx) +; X32-NEXT: movl %ebp, 52(%ecx) +; X32-NEXT: movl %eax, 56(%ecx) +; X32-NEXT: movl %ebx, 60(%ecx) +; X32-NEXT: addl $244, %esp ; X32-NEXT: popl %esi ; X32-NEXT: popl %edi ; X32-NEXT: popl %ebx diff --git a/llvm/test/CodeGen/X86/mul128.ll b/llvm/test/CodeGen/X86/mul128.ll index 2b3a1350..70a6173 100644 --- a/llvm/test/CodeGen/X86/mul128.ll +++ b/llvm/test/CodeGen/X86/mul128.ll @@ -1,5 +1,6 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64 +; RUN: llc < %s -mtriple=i386-unknown | FileCheck %s --check-prefix=X86 define i128 @foo(i128 %t, i128 %u) { ; X64-LABEL: foo: @@ -12,6 +13,84 @@ define i128 @foo(i128 %t, i128 %u) { ; X64-NEXT: imulq %r8, %rsi ; X64-NEXT: addq %rsi, %rdx ; X64-NEXT: retq +; +; X86-LABEL: foo: +; X86: # BB#0: +; X86-NEXT: pushl %ebp +; X86-NEXT: .cfi_def_cfa_offset 8 +; X86-NEXT: pushl %ebx +; X86-NEXT: .cfi_def_cfa_offset 12 +; X86-NEXT: pushl %edi +; X86-NEXT: .cfi_def_cfa_offset 16 +; X86-NEXT: pushl %esi +; X86-NEXT: .cfi_def_cfa_offset 20 +; X86-NEXT: subl $8, %esp +; X86-NEXT: .cfi_def_cfa_offset 28 +; X86-NEXT: .cfi_offset %esi, -20 +; X86-NEXT: .cfi_offset %edi, -16 +; X86-NEXT: .cfi_offset %ebx, -12 +; X86-NEXT: .cfi_offset %ebp, -8 +; X86-NEXT: movl {{[0-9]+}}(%esp), %edx +; X86-NEXT: movl {{[0-9]+}}(%esp), %edi +; X86-NEXT: movl {{[0-9]+}}(%esp), %esi +; X86-NEXT: imull %edx, %esi +; X86-NEXT: movl %edi, %eax +; X86-NEXT: mull %edx +; X86-NEXT: movl %eax, %ebx +; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X86-NEXT: imull %edi, %ecx +; X86-NEXT: addl %edx, %ecx +; X86-NEXT: movl {{[0-9]+}}(%esp), %eax +; X86-NEXT: addl %esi, %ecx +; X86-NEXT: movl %eax, %esi +; X86-NEXT: imull {{[0-9]+}}(%esp), %esi +; X86-NEXT: movl {{[0-9]+}}(%esp), %ebp +; X86-NEXT: mull %ebp +; X86-NEXT: addl %esi, %edx +; X86-NEXT: movl {{[0-9]+}}(%esp), %edi +; X86-NEXT: imull %ebp, %edi +; X86-NEXT: addl %edx, %edi +; X86-NEXT: addl %ebx, %eax +; X86-NEXT: movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill +; X86-NEXT: adcl %ecx, %edi +; X86-NEXT: movl %ebp, %eax +; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X86-NEXT: mull %ecx +; X86-NEXT: movl %edx, %ebx +; X86-NEXT: movl %eax, (%esp) # 4-byte Spill +; X86-NEXT: movl {{[0-9]+}}(%esp), %eax +; X86-NEXT: mull %ecx +; X86-NEXT: movl %edx, %esi +; X86-NEXT: movl %eax, %ecx +; X86-NEXT: addl %ebx, %ecx +; X86-NEXT: adcl $0, %esi +; X86-NEXT: movl %ebp, %eax +; X86-NEXT: mull {{[0-9]+}}(%esp) +; X86-NEXT: movl %edx, %ebx +; X86-NEXT: movl %eax, %ebp +; X86-NEXT: addl %ecx, %ebp +; X86-NEXT: adcl %esi, %ebx +; X86-NEXT: setb %cl +; X86-NEXT: movl {{[0-9]+}}(%esp), %eax +; X86-NEXT: mull {{[0-9]+}}(%esp) +; X86-NEXT: addl %ebx, %eax +; X86-NEXT: movzbl %cl, %ecx +; X86-NEXT: adcl %ecx, %edx +; X86-NEXT: addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload +; X86-NEXT: adcl %edi, %edx +; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X86-NEXT: movl (%esp), %esi # 4-byte Reload +; X86-NEXT: movl %esi, (%ecx) +; X86-NEXT: movl %ebp, 4(%ecx) +; X86-NEXT: movl %eax, 8(%ecx) +; X86-NEXT: movl %edx, 12(%ecx) +; X86-NEXT: movl %ecx, %eax +; X86-NEXT: addl $8, %esp +; X86-NEXT: popl %esi +; X86-NEXT: popl %edi +; X86-NEXT: popl %ebx +; X86-NEXT: popl %ebp +; X86-NEXT: retl $4 %k = mul i128 %t, %u ret i128 %k } diff --git a/llvm/test/CodeGen/X86/overflow.ll b/llvm/test/CodeGen/X86/overflow.ll index 00dadc4..1c68af2 100644 --- a/llvm/test/CodeGen/X86/overflow.ll +++ b/llvm/test/CodeGen/X86/overflow.ll @@ -6,39 +6,60 @@ define i128 @mulhioverflow(i64 %a, i64 %b, i64 %c) nounwind { ; X32-LABEL: mulhioverflow: ; X32: # BB#0: ; X32-NEXT: pushl %ebp -; X32-NEXT: movl %esp, %ebp +; X32-NEXT: pushl %ebx ; X32-NEXT: pushl %edi ; X32-NEXT: pushl %esi -; X32-NEXT: andl $-8, %esp -; X32-NEXT: subl $16, %esp -; X32-NEXT: movl 8(%ebp), %esi -; X32-NEXT: movl 28(%ebp), %edi -; X32-NEXT: movl %esp, %eax -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl 24(%ebp) -; X32-NEXT: pushl 20(%ebp) -; X32-NEXT: pushl $0 -; X32-NEXT: pushl $0 -; X32-NEXT: pushl 16(%ebp) -; X32-NEXT: pushl 12(%ebp) -; X32-NEXT: pushl %eax -; X32-NEXT: calll __multi3 -; X32-NEXT: addl $32, %esp +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %esi +; X32-NEXT: movl {{[0-9]+}}(%esp), %ebp +; X32-NEXT: movl {{[0-9]+}}(%esp), %edi +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: movl %esi, %eax +; X32-NEXT: mull %ebp +; X32-NEXT: movl %edx, %ebp +; X32-NEXT: movl %eax, %esi +; X32-NEXT: addl %ebx, %esi +; X32-NEXT: adcl $0, %ebp +; X32-NEXT: movl %ecx, %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %ebx +; X32-NEXT: addl %esi, %eax +; X32-NEXT: adcl %ebp, %ebx +; X32-NEXT: setb %al +; X32-NEXT: movzbl %al, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: mull %edi +; X32-NEXT: movl %edx, %esi +; X32-NEXT: movl %eax, %ebp +; X32-NEXT: addl %ebx, %ebp +; X32-NEXT: adcl %ecx, %esi +; X32-NEXT: xorl %ecx, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: mull %ecx +; X32-NEXT: movl %edx, %edi +; X32-NEXT: movl %eax, %ebx ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax -; X32-NEXT: andl $1, %edi -; X32-NEXT: addl {{[0-9]+}}(%esp), %edi -; X32-NEXT: adcl $0, %eax +; X32-NEXT: mull %ecx +; X32-NEXT: addl %ebx, %eax +; X32-NEXT: adcl %edi, %edx +; X32-NEXT: addl %ebp, %eax +; X32-NEXT: adcl %esi, %edx +; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx +; X32-NEXT: andl $1, %ecx +; X32-NEXT: addl %eax, %ecx +; X32-NEXT: movl {{[0-9]+}}(%esp), %eax +; X32-NEXT: movl %ecx, (%eax) +; X32-NEXT: adcl $0, %edx +; X32-NEXT: movl %edx, 4(%eax) ; X32-NEXT: setb %cl ; X32-NEXT: movzbl %cl, %ecx -; X32-NEXT: movl %edi, (%esi) -; X32-NEXT: movl %eax, 4(%esi) -; X32-NEXT: movl %ecx, 8(%esi) -; X32-NEXT: movl $0, 12(%esi) -; X32-NEXT: movl %esi, %eax -; X32-NEXT: leal -8(%ebp), %esp +; X32-NEXT: movl %ecx, 8(%eax) +; X32-NEXT: movl $0, 12(%eax) ; X32-NEXT: popl %esi ; X32-NEXT: popl %edi +; X32-NEXT: popl %ebx ; X32-NEXT: popl %ebp ; X32-NEXT: retl $4 ; -- 2.7.4