1 /* PLT trampolines. x86-64 version.
2 Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, write to the Free
17 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
22 #include <link-defines.h>
25 .globl _dl_runtime_resolve
26 .type _dl_runtime_resolve, @function
31 cfi_adjust_cfa_offset(72) # Incorporate PLT
32 movq %rax,(%rsp) # Preserve registers otherwise clobbered.
39 movq 64(%rsp), %rsi # Copy args pushed by PLT in register.
40 movq 56(%rsp), %rdi # %rdi: link_map, %rsi: reloc_index
41 call _dl_fixup # Call resolver.
42 movq %rax, %r11 # Save return value
43 movq 48(%rsp), %r9 # Get register content back.
50 addq $72, %rsp # Adjust stack(PLT did 2 pushes)
51 cfi_adjust_cfa_offset(-72)
52 jmp *%r11 # Jump to function address.
54 .size _dl_runtime_resolve, .-_dl_runtime_resolve
58 .globl _dl_runtime_profile
59 .type _dl_runtime_profile, @function
64 cfi_adjust_cfa_offset(16) # Incorporate PLT
65 /* The La_x86_64_regs data structure pointed to by the
66 fourth paramater must be 16-byte aligned. This must
67 be explicitly enforced. We have the set up a dynamically
68 sized stack frame. %rbx points to the top half which
69 has a fixed size and preserves the original stack pointer. */
71 subq $32, %rsp # Allocate the local storage.
72 cfi_adjust_cfa_offset(32)
74 cfi_rel_offset(%rbx, 0)
78 48(%rbx) return address
83 24(%rbx) La_x86_64_regs pointer
91 cfi_def_cfa_register(%rbx)
93 /* Actively align the La_x86_64_regs structure. */
94 andq $0xfffffffffffffff0, %rsp
95 # ifdef HAVE_AVX_SUPPORT
96 /* sizeof(La_x86_64_regs). Need extra space for 8 SSE registers
97 to detect if any xmm0-xmm7 registers are changed by audit
99 subq $(LR_SIZE + XMM_SIZE*8), %rsp
101 subq $LR_SIZE, %rsp # sizeof(La_x86_64_regs)
105 /* Fill the La_x86_64_regs structure. */
106 movq %rdx, LR_RDX_OFFSET(%rsp)
107 movq %r8, LR_R8_OFFSET(%rsp)
108 movq %r9, LR_R9_OFFSET(%rsp)
109 movq %rcx, LR_RCX_OFFSET(%rsp)
110 movq %rsi, LR_RSI_OFFSET(%rsp)
111 movq %rdi, LR_RDI_OFFSET(%rsp)
112 movq %rbp, LR_RBP_OFFSET(%rsp)
115 movq %rax, LR_RSP_OFFSET(%rsp)
117 /* We always store the XMM registers even if AVX is available.
118 This is to provide backward binary compatility for existing
120 movaps %xmm0, (LR_XMM_OFFSET)(%rsp)
121 movaps %xmm1, (LR_XMM_OFFSET + XMM_SIZE)(%rsp)
122 movaps %xmm2, (LR_XMM_OFFSET + XMM_SIZE*2)(%rsp)
123 movaps %xmm3, (LR_XMM_OFFSET + XMM_SIZE*3)(%rsp)
124 movaps %xmm4, (LR_XMM_OFFSET + XMM_SIZE*4)(%rsp)
125 movaps %xmm5, (LR_XMM_OFFSET + XMM_SIZE*5)(%rsp)
126 movaps %xmm6, (LR_XMM_OFFSET + XMM_SIZE*6)(%rsp)
127 movaps %xmm7, (LR_XMM_OFFSET + XMM_SIZE*7)(%rsp)
129 # ifdef HAVE_AVX_SUPPORT
136 cmpl $0, L(have_avx)(%rip)
138 movq %rbx, %r11 # Save rbx
141 movq %r11,%rbx # Restore rbx
143 testl $(1 << 28), %ecx
146 2: movl %eax, L(have_avx)(%rip)
152 # include "dl-trampoline.h"
159 # include "dl-trampoline.h"
162 .size _dl_runtime_profile, .-_dl_runtime_profile
167 .globl _dl_x86_64_save_sse
168 .type _dl_x86_64_save_sse, @function
172 # ifdef HAVE_AVX_SUPPORT
173 cmpl $0, L(have_avx)(%rip)
175 movq %rbx, %r11 # Save rbx
178 movq %r11,%rbx # Restore rbx
180 testl $(1 << 28), %ecx
183 2: movl %eax, L(have_avx)(%rip)
189 vmovdqa %ymm0, %fs:RTLD_SAVESPACE_SSE+0*YMM_SIZE
190 vmovdqa %ymm1, %fs:RTLD_SAVESPACE_SSE+1*YMM_SIZE
191 vmovdqa %ymm2, %fs:RTLD_SAVESPACE_SSE+2*YMM_SIZE
192 vmovdqa %ymm3, %fs:RTLD_SAVESPACE_SSE+3*YMM_SIZE
193 vmovdqa %ymm4, %fs:RTLD_SAVESPACE_SSE+4*YMM_SIZE
194 vmovdqa %ymm5, %fs:RTLD_SAVESPACE_SSE+5*YMM_SIZE
195 vmovdqa %ymm6, %fs:RTLD_SAVESPACE_SSE+6*YMM_SIZE
196 vmovdqa %ymm7, %fs:RTLD_SAVESPACE_SSE+7*YMM_SIZE
200 movdqa %xmm0, %fs:RTLD_SAVESPACE_SSE+0*XMM_SIZE
201 movdqa %xmm1, %fs:RTLD_SAVESPACE_SSE+1*XMM_SIZE
202 movdqa %xmm2, %fs:RTLD_SAVESPACE_SSE+2*XMM_SIZE
203 movdqa %xmm3, %fs:RTLD_SAVESPACE_SSE+3*XMM_SIZE
204 movdqa %xmm4, %fs:RTLD_SAVESPACE_SSE+4*XMM_SIZE
205 movdqa %xmm5, %fs:RTLD_SAVESPACE_SSE+5*XMM_SIZE
206 movdqa %xmm6, %fs:RTLD_SAVESPACE_SSE+6*XMM_SIZE
207 movdqa %xmm7, %fs:RTLD_SAVESPACE_SSE+7*XMM_SIZE
210 .size _dl_x86_64_save_sse, .-_dl_x86_64_save_sse
213 .globl _dl_x86_64_restore_sse
214 .type _dl_x86_64_restore_sse, @function
217 _dl_x86_64_restore_sse:
218 # ifdef HAVE_AVX_SUPPORT
219 cmpl $0, L(have_avx)(%rip)
222 vmovdqa %fs:RTLD_SAVESPACE_SSE+0*YMM_SIZE, %ymm0
223 vmovdqa %fs:RTLD_SAVESPACE_SSE+1*YMM_SIZE, %ymm1
224 vmovdqa %fs:RTLD_SAVESPACE_SSE+2*YMM_SIZE, %ymm2
225 vmovdqa %fs:RTLD_SAVESPACE_SSE+3*YMM_SIZE, %ymm3
226 vmovdqa %fs:RTLD_SAVESPACE_SSE+4*YMM_SIZE, %ymm4
227 vmovdqa %fs:RTLD_SAVESPACE_SSE+5*YMM_SIZE, %ymm5
228 vmovdqa %fs:RTLD_SAVESPACE_SSE+6*YMM_SIZE, %ymm6
229 vmovdqa %fs:RTLD_SAVESPACE_SSE+7*YMM_SIZE, %ymm7
233 movdqa %fs:RTLD_SAVESPACE_SSE+0*XMM_SIZE, %xmm0
234 movdqa %fs:RTLD_SAVESPACE_SSE+1*XMM_SIZE, %xmm1
235 movdqa %fs:RTLD_SAVESPACE_SSE+2*XMM_SIZE, %xmm2
236 movdqa %fs:RTLD_SAVESPACE_SSE+3*XMM_SIZE, %xmm3
237 movdqa %fs:RTLD_SAVESPACE_SSE+4*XMM_SIZE, %xmm4
238 movdqa %fs:RTLD_SAVESPACE_SSE+5*XMM_SIZE, %xmm5
239 movdqa %fs:RTLD_SAVESPACE_SSE+6*XMM_SIZE, %xmm6
240 movdqa %fs:RTLD_SAVESPACE_SSE+7*XMM_SIZE, %xmm7
243 .size _dl_x86_64_restore_sse, .-_dl_x86_64_restore_sse