Add hppa*-*-hpux* to targets which do not support split DWARF
[official-gcc.git] / gcc / config / riscv / riscv_crypto.h
blob1bfe3d7c675635e897d6cfcb25d0548ca9f40218
1 /* RISC-V 'Scalar Crypto' Extension intrinsics include file.
2 Copyright (C) 2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
25 #ifndef __RISCV_SCALAR_CRYPTO_H
26 #define __RISCV_SCALAR_CRYPTO_H
28 #include <stdint.h>
30 #ifdef __cplusplus
31 extern "C" {
32 #endif
34 #if defined (__riscv_zknd)
36 #if __riscv_xlen == 32
38 #ifdef __OPTIMIZE__
40 extern __inline uint32_t
41 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
42 __riscv_aes32dsi (uint32_t rs1, uint32_t rs2, const int bs)
44 return __builtin_riscv_aes32dsi (rs1,rs2,bs);
47 extern __inline uint32_t
48 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
49 __riscv_aes32dsmi (uint32_t rs1, uint32_t rs2, const int bs)
51 return __builtin_riscv_aes32dsmi (rs1,rs2,bs);
54 #else
55 #define __riscv_aes32dsi(x, y, bs) __builtin_riscv_aes32dsi (x, y, bs)
56 #define __riscv_aes32dsmi(x, y, bs) __builtin_riscv_aes32dsmi (x, y, bs)
57 #endif
59 #endif
61 #if __riscv_xlen == 64
63 extern __inline uint64_t
64 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
65 __riscv_aes64ds (uint64_t rs1, uint64_t rs2)
67 return __builtin_riscv_aes64ds (rs1,rs2);
70 extern __inline uint64_t
71 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
72 __riscv_aes64dsm (uint64_t rs1, uint64_t rs2)
74 return __builtin_riscv_aes64dsm (rs1,rs2);
77 extern __inline uint64_t
78 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
79 __riscv_aes64im (uint64_t rs1)
81 return __builtin_riscv_aes64im (rs1);
83 #endif
84 #endif // __riscv_zknd
86 #if (defined (__riscv_zknd) || defined (__riscv_zkne)) && (__riscv_xlen == 64)
88 #ifdef __OPTIMIZE__
90 extern __inline uint64_t
91 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
92 __riscv_aes64ks1i (uint64_t rs1, const int rnum)
94 return __builtin_riscv_aes64ks1i (rs1,rnum);
97 #else
98 #define __riscv_aes64ks1i(x, rnum) __builtin_riscv_aes64ks1i (x, rnum)
99 #endif
101 extern __inline uint64_t
102 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
103 __riscv_aes64ks2 (uint64_t rs1, uint64_t rs2)
105 return __builtin_riscv_aes64ks2 (rs1,rs2);
108 #endif // __riscv_zknd || __riscv_zkne
110 #if defined (__riscv_zkne)
112 #if __riscv_xlen == 32
114 #ifdef __OPTIMIZE__
116 extern __inline uint32_t
117 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
118 __riscv_aes32esi (uint32_t rs1, uint32_t rs2, const int bs)
120 return __builtin_riscv_aes32esi (rs1,rs2,bs);
123 extern __inline uint32_t
124 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
125 __riscv_aes32esmi (uint32_t rs1, uint32_t rs2, const int bs)
127 return __builtin_riscv_aes32esmi (rs1,rs2,bs);
130 #else
131 #define __riscv_aes32esi(x, y, bs) __builtin_riscv_aes32esi (x, y, bs)
132 #define __riscv_aes32esmi(x, y, bs) __builtin_riscv_aes32esmi (x, y, bs)
133 #endif
135 #endif
137 #if __riscv_xlen == 64
139 extern __inline uint64_t
140 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
141 __riscv_aes64es (uint64_t rs1,uint64_t rs2)
143 return __builtin_riscv_aes64es (rs1,rs2);
146 extern __inline uint64_t
147 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
148 __riscv_aes64esm (uint64_t rs1,uint64_t rs2)
150 return __builtin_riscv_aes64esm (rs1,rs2);
152 #endif
153 #endif // __riscv_zkne
155 #if defined (__riscv_zknh)
157 extern __inline uint32_t
158 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
159 __riscv_sha256sig0 (uint32_t rs1)
161 return __builtin_riscv_sha256sig0 (rs1);
164 extern __inline uint32_t
165 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
166 __riscv_sha256sig1 (uint32_t rs1)
168 return __builtin_riscv_sha256sig1 (rs1);
171 extern __inline uint32_t
172 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
173 __riscv_sha256sum0 (uint32_t rs1)
175 return __builtin_riscv_sha256sum0 (rs1);
178 extern __inline uint32_t
179 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
180 __riscv_sha256sum1 (uint32_t rs1)
182 return __builtin_riscv_sha256sum1 (rs1);
185 #if __riscv_xlen == 32
187 extern __inline uint32_t
188 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
189 __riscv_sha512sig0h (uint32_t rs1, uint32_t rs2)
191 return __builtin_riscv_sha512sig0h (rs1,rs2);
194 extern __inline uint32_t
195 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
196 __riscv_sha512sig0l (uint32_t rs1, uint32_t rs2)
198 return __builtin_riscv_sha512sig0l (rs1,rs2);
201 extern __inline uint32_t
202 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
203 __riscv_sha512sig1h (uint32_t rs1, uint32_t rs2)
205 return __builtin_riscv_sha512sig1h (rs1,rs2);
208 extern __inline uint32_t
209 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
210 __riscv_sha512sig1l (uint32_t rs1, uint32_t rs2)
212 return __builtin_riscv_sha512sig1l (rs1,rs2);
215 extern __inline uint32_t
216 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
217 __riscv_sha512sum0r (uint32_t rs1, uint32_t rs2)
219 return __builtin_riscv_sha512sum0r (rs1,rs2);
222 extern __inline uint32_t
223 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
224 __riscv_sha512sum1r (uint32_t rs1, uint32_t rs2)
226 return __builtin_riscv_sha512sum1r (rs1,rs2);
229 #endif
231 #if __riscv_xlen == 64
233 extern __inline uint64_t
234 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
235 __riscv_sha512sig0 (uint64_t rs1)
237 return __builtin_riscv_sha512sig0 (rs1);
240 extern __inline uint64_t
241 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
242 __riscv_sha512sig1 (uint64_t rs1)
244 return __builtin_riscv_sha512sig1 (rs1);
247 extern __inline uint64_t
248 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
249 __riscv_sha512sum0 (uint64_t rs1)
251 return __builtin_riscv_sha512sum0 (rs1);
254 extern __inline uint64_t
255 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
256 __riscv_sha512sum1 (uint64_t rs1)
258 return __builtin_riscv_sha512sum1 (rs1);
260 #endif
261 #endif // __riscv_zknh
263 #if defined (__riscv_zksh)
265 extern __inline uint32_t
266 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
267 __riscv_sm3p0 (uint32_t rs1)
269 return __builtin_riscv_sm3p0 (rs1);
272 extern __inline uint32_t
273 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
274 __riscv_sm3p1 (uint32_t rs1)
276 return __builtin_riscv_sm3p1 (rs1);
279 #endif // __riscv_zksh
281 #if defined (__riscv_zksed)
283 #ifdef __OPTIMIZE__
285 extern __inline uint32_t
286 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
287 __riscv_sm4ed (uint32_t rs1, uint32_t rs2, const int bs)
289 return __builtin_riscv_sm4ed (rs1,rs2,bs);
292 extern __inline uint32_t
293 __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
294 __riscv_sm4ks (uint32_t rs1, uint32_t rs2, const int bs)
296 return __builtin_riscv_sm4ks (rs1,rs2,bs);
299 #else
300 #define __riscv_sm4ed(x, y, bs) __builtin_riscv_sm4ed(x, y, bs);
301 #define __riscv_sm4ks(x, y, bs) __builtin_riscv_sm4ks(x, y, bs);
302 #endif
304 #endif // __riscv_zksed
306 #if defined (__cplusplus)
308 #endif // __cplusplus
309 #endif // __RISCV_SCALAR_CRYPTO_H