1 /* PLT trampolines. x86-64 version.
2 Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, write to the Free
17 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
22 #include <link-defines.h>
25 .globl _dl_runtime_resolve
26 .type _dl_runtime_resolve, @function
31 cfi_adjust_cfa_offset(72) # Incorporate PLT
32 movq %rax,(%rsp) # Preserve registers otherwise clobbered.
39 movq 64(%rsp), %rsi # Copy args pushed by PLT in register.
40 movq 56(%rsp), %rdi # %rdi: link_map, %rsi: reloc_index
41 call _dl_fixup # Call resolver.
42 movq %rax, %r11 # Save return value
43 movq 48(%rsp), %r9 # Get register content back.
50 addq $72, %rsp # Adjust stack(PLT did 2 pushes)
51 cfi_adjust_cfa_offset(-72)
52 jmp *%r11 # Jump to function address.
54 .size _dl_runtime_resolve, .-_dl_runtime_resolve
58 .globl _dl_runtime_profile
59 .type _dl_runtime_profile, @function
64 /* The La_x86_64_regs data structure pointed to by the
65 fourth paramater must be 16-byte aligned. This must
66 be explicitly enforced. We have the set up a dynamically
67 sized stack frame. %rbx points to the top half which
68 has a fixed size and preserves the original stack pointer. */
70 subq $32, %rsp # Allocate the local storage.
71 cfi_adjust_cfa_offset(48) # Incorporate PLT
73 cfi_rel_offset(%rbx, 0)
77 48(%rbx) return address
82 24(%rbx) La_x86_64_regs pointer
90 cfi_def_cfa_register(%rbx)
92 /* Actively align the La_x86_64_regs structure. */
93 andq $0xfffffffffffffff0, %rsp
94 # ifdef HAVE_AVX_SUPPORT
95 /* sizeof(La_x86_64_regs). Need extra space for 8 SSE registers
96 to detect if any xmm0-xmm7 registers are changed by audit
98 subq $(LR_SIZE + XMM_SIZE*8), %rsp
100 subq $LR_SIZE, %rsp # sizeof(La_x86_64_regs)
104 /* Fill the La_x86_64_regs structure. */
105 movq %rdx, LR_RDX_OFFSET(%rsp)
106 movq %r8, LR_R8_OFFSET(%rsp)
107 movq %r9, LR_R9_OFFSET(%rsp)
108 movq %rcx, LR_RCX_OFFSET(%rsp)
109 movq %rsi, LR_RSI_OFFSET(%rsp)
110 movq %rdi, LR_RDI_OFFSET(%rsp)
111 movq %rbp, LR_RBP_OFFSET(%rsp)
113 # ifdef HAVE_AVX_SUPPORT
114 jmp *L(save_and_restore_vector)(%rip)
117 L(save_and_restore_vector_sse):
120 # define MOVXMM movaps
121 # include "dl-trampoline.h"
123 # ifdef HAVE_AVX_SUPPORT
125 # define MOVXMM vmovdqa
128 L(save_and_restore_vector_avx):
129 # include "dl-trampoline.h"
133 .size _dl_runtime_profile, .-_dl_runtime_profile
135 # ifdef HAVE_AVX_SUPPORT
137 mov %rbx,%r11 # Save rbx
140 mov %r11,%rbx # Restore rbx
141 leaq L(save_and_restore_vector_sse)(%rip), %rax
142 andl $(1 << 28), %ecx # Check if AVX is available.
144 leaq L(save_and_restore_vector_avx)(%rip), %rax
146 movq %rax,L(save_and_restore_vector)(%rip)
149 .section .data.rel.local,"aw",@progbits
151 L(save_and_restore_vector):