1 /* longjmp for PowerPC64.
2 Copyright (C) 1995-2012 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
23 # include <novmxsetjmp.h>
25 # include <jmpbuf-offsets.h>
35 /* Inside ld.so we use the local alias to avoid runtime GOT
37 .tc _rtld_local_ro[TC],_rtld_local_ro
39 .tc _rtld_global_ro[TC],_rtld_global_ro
42 .tc _dl_hwcap[TC],_dl_hwcap
48 ENTRY (BP_SYM (__longjmp))
50 CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
52 ld r5,.LC__dl_hwcap@toc(r2)
54 /* Load _rtld-global._dl_hwcap. */
55 ld r5,RTLD_GLOBAL_RO_DL_HWCAP_OFFSET(r5)
57 ld r5,0(r5) /* Load extern _dl_hwcap. */
59 andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16)
63 lwz r0,((JB_VRSAVE)*8)(3)
65 beq+ L(aligned_restore_vmx)
72 # define load_misaligned_vmx_lo_loaded(loadvr,lovr,shiftvr,loadgpr,addgpr) \
73 addi addgpr,addgpr,32; \
75 vperm loadvr,loadvr,lovr,shiftvr;
76 load_misaligned_vmx_lo_loaded(v21,v22,v0,r5,r6)
77 load_misaligned_vmx_lo_loaded(v22,v23,v0,r6,r5)
78 load_misaligned_vmx_lo_loaded(v23,v24,v0,r5,r6)
79 load_misaligned_vmx_lo_loaded(v24,v25,v0,r6,r5)
80 load_misaligned_vmx_lo_loaded(v25,v26,v0,r5,r6)
81 load_misaligned_vmx_lo_loaded(v26,v27,v0,r6,r5)
82 load_misaligned_vmx_lo_loaded(v27,v28,v0,r5,r6)
83 load_misaligned_vmx_lo_loaded(v28,v29,v0,r6,r5)
84 load_misaligned_vmx_lo_loaded(v29,v30,v0,r5,r6)
85 load_misaligned_vmx_lo_loaded(v30,v31,v0,r6,r5)
89 L(aligned_restore_vmx):
115 #if defined PTR_DEMANGLE || defined CHECK_SP
116 ld r22,(JB_GPR1*8)(r3)
118 ld r1,(JB_GPR1*8)(r3)
122 PTR_DEMANGLE3 (r22, r22, r25)
124 PTR_DEMANGLE3 (r1, r22, r25)
131 ld r2,(JB_GPR2*8)(r3)
133 ld r14,((JB_GPRS+0)*8)(r3)
134 lfd fp14,((JB_FPRS+0)*8)(r3)
135 #if defined SHARED && !defined IS_IN_rtld
136 std r2,40(r1) /* Restore the callers TOC save area. */
138 ld r15,((JB_GPRS+1)*8)(r3)
139 lfd fp15,((JB_FPRS+1)*8)(r3)
140 ld r16,((JB_GPRS+2)*8)(r3)
141 lfd fp16,((JB_FPRS+2)*8)(r3)
142 ld r17,((JB_GPRS+3)*8)(r3)
143 lfd fp17,((JB_FPRS+3)*8)(r3)
144 ld r18,((JB_GPRS+4)*8)(r3)
145 lfd fp18,((JB_FPRS+4)*8)(r3)
146 ld r19,((JB_GPRS+5)*8)(r3)
147 lfd fp19,((JB_FPRS+5)*8)(r3)
148 ld r20,((JB_GPRS+6)*8)(r3)
149 lfd fp20,((JB_FPRS+6)*8)(r3)
151 PTR_DEMANGLE2 (r0, r25)
154 /* std r2,40(r1) Restore the TOC save area. */
155 ld r21,((JB_GPRS+7)*8)(r3)
156 lfd fp21,((JB_FPRS+7)*8)(r3)
157 ld r22,((JB_GPRS+8)*8)(r3)
158 lfd fp22,((JB_FPRS+8)*8)(r3)
160 ld r23,((JB_GPRS+9)*8)(r3)
161 lfd fp23,((JB_FPRS+9)*8)(r3)
162 ld r24,((JB_GPRS+10)*8)(r3)
163 lfd fp24,((JB_FPRS+10)*8)(r3)
164 ld r25,((JB_GPRS+11)*8)(r3)
165 lfd fp25,((JB_FPRS+11)*8)(r3)
167 ld r26,((JB_GPRS+12)*8)(r3)
168 lfd fp26,((JB_FPRS+12)*8)(r3)
169 ld r27,((JB_GPRS+13)*8)(r3)
170 lfd fp27,((JB_FPRS+13)*8)(r3)
171 ld r28,((JB_GPRS+14)*8)(r3)
172 lfd fp28,((JB_FPRS+14)*8)(r3)
173 ld r29,((JB_GPRS+15)*8)(r3)
174 lfd fp29,((JB_FPRS+15)*8)(r3)
175 ld r30,((JB_GPRS+16)*8)(r3)
176 lfd fp30,((JB_FPRS+16)*8)(r3)
177 ld r31,((JB_GPRS+17)*8)(r3)
178 lfd fp31,((JB_FPRS+17)*8)(r3)
181 END (BP_SYM (__longjmp))