1 /* Copyright (C) 1991-2017 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
22 #if defined HAVE_STRING_H || defined _LIBC
29 # define MEMCMP memcmp
37 # if __BYTE_ORDER == __BIG_ENDIAN
38 # define WORDS_BIGENDIAN
41 #else /* Not in the GNU C library. */
43 # include <sys/types.h>
45 /* Type to use for aligned memory operations.
46 This should normally be the biggest type supported by a single load
47 and store. Must be an unsigned type. */
48 # define op_t unsigned long int
49 # define OPSIZ (sizeof(op_t))
51 /* Threshold value for when to enter the unrolled loops. */
52 # define OP_T_THRES 16
54 /* Type to use for unaligned operations. */
55 typedef unsigned char byte
;
57 #endif /* In the GNU C library. */
59 /* Provide the appropriate builtins to shift two registers based on
60 the alignment of a pointer held in a third register, and to reverse
61 the bytes in a word. */
63 #define DBLALIGN __insn_dblalign
64 #define REVBYTES __insn_revbytes
66 #define DBLALIGN __insn_dword_align
67 #define REVBYTES __insn_bytex
70 #ifdef WORDS_BIGENDIAN
71 # define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
73 # define CMP_LT_OR_GT(a, b) (REVBYTES(a) > REVBYTES(b) ? 1 : -1)
76 /* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
78 /* The strategy of this memcmp is:
80 1. Compare bytes until one of the block pointers is aligned.
82 2. Compare using memcmp_common_alignment or
83 memcmp_not_common_alignment, regarding the alignment of the other
84 block after the initial byte operations. The maximum number of
85 full words (of type op_t) are compared in this way.
87 3. Compare the few remaining bytes. */
89 static int memcmp_common_alignment (long, long, size_t) __THROW
;
91 /* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
92 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
93 memory operations on `op_t's. */
95 memcmp_common_alignment (long int srcp1
, long int srcp2
, size_t len
)
102 default: /* Avoid warning about uninitialized local variables. */
104 a0
= ((op_t
*) srcp1
)[0];
105 b0
= ((op_t
*) srcp2
)[0];
111 a1
= ((op_t
*) srcp1
)[0];
112 b1
= ((op_t
*) srcp2
)[0];
118 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
120 a0
= ((op_t
*) srcp1
)[0];
121 b0
= ((op_t
*) srcp2
)[0];
126 a1
= ((op_t
*) srcp1
)[0];
127 b1
= ((op_t
*) srcp2
)[0];
131 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
138 a0
= ((op_t
*) srcp1
)[0];
139 b0
= ((op_t
*) srcp2
)[0];
142 if (__glibc_likely (a1
!= b1
))
143 return CMP_LT_OR_GT (a1
, b1
);
146 a1
= ((op_t
*) srcp1
)[0];
147 b1
= ((op_t
*) srcp2
)[0];
150 if (__glibc_likely (a0
!= b0
))
151 return CMP_LT_OR_GT (a0
, b0
);
154 a0
= ((op_t
*) srcp1
)[0];
155 b0
= ((op_t
*) srcp2
)[0];
158 if (__glibc_likely (a1
!= b1
))
159 return CMP_LT_OR_GT (a1
, b1
);
162 a1
= ((op_t
*) srcp1
)[0];
163 b1
= ((op_t
*) srcp2
)[0];
166 if (__glibc_likely (a0
!= b0
))
167 return CMP_LT_OR_GT (a0
, b0
);
173 /* This is the right position for do0. Please don't move
176 if (__glibc_likely (a1
!= b1
))
177 return CMP_LT_OR_GT (a1
, b1
);
181 static int memcmp_not_common_alignment (long, long, size_t) __THROW
;
183 /* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
184 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
185 operations on `op_t', but SRCP1 *should be unaligned*. */
187 memcmp_not_common_alignment (long int srcp1
, long int srcp2
, size_t len
)
194 /* Calculate how to shift a word read at the memory operation
195 aligned srcp1 to make it aligned for comparison. */
197 srcp1i
= (void *) srcp1
;
199 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
200 it points in the middle of. */
205 default: /* Avoid warning about uninitialized local variables. */
207 a1
= ((op_t
*) srcp1
)[0];
208 a2
= ((op_t
*) srcp1
)[1];
209 b2
= ((op_t
*) srcp2
)[0];
215 a0
= ((op_t
*) srcp1
)[0];
216 a1
= ((op_t
*) srcp1
)[1];
217 b1
= ((op_t
*) srcp2
)[0];
223 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
225 a3
= ((op_t
*) srcp1
)[0];
226 a0
= ((op_t
*) srcp1
)[1];
227 b0
= ((op_t
*) srcp2
)[0];
232 a2
= ((op_t
*) srcp1
)[0];
233 a3
= ((op_t
*) srcp1
)[1];
234 b3
= ((op_t
*) srcp2
)[0];
238 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
245 a0
= ((op_t
*) srcp1
)[0];
246 b0
= ((op_t
*) srcp2
)[0];
247 x
= DBLALIGN (a2
, a3
, srcp1i
);
250 if (__glibc_likely (x
!= b3
))
251 return CMP_LT_OR_GT (x
, b3
);
254 a1
= ((op_t
*) srcp1
)[0];
255 b1
= ((op_t
*) srcp2
)[0];
256 x
= DBLALIGN (a3
, a0
, srcp1i
);
259 if (__glibc_likely (x
!= b0
))
260 return CMP_LT_OR_GT (x
, b0
);
263 a2
= ((op_t
*) srcp1
)[0];
264 b2
= ((op_t
*) srcp2
)[0];
265 x
= DBLALIGN (a0
, a1
, srcp1i
);
268 if (__glibc_likely (x
!= b1
))
269 return CMP_LT_OR_GT (x
, b1
);
272 a3
= ((op_t
*) srcp1
)[0];
273 b3
= ((op_t
*) srcp2
)[0];
274 x
= DBLALIGN (a1
, a2
, srcp1i
);
277 if (__glibc_likely (x
!= b2
))
278 return CMP_LT_OR_GT (x
, b2
);
284 /* This is the right position for do0. Please don't move
287 x
= DBLALIGN (a2
, a3
, srcp1i
);
288 if (__glibc_likely (x
!= b3
))
289 return CMP_LT_OR_GT (x
, b3
);
294 MEMCMP (const void *s1
, const void *s2
, size_t len
)
298 long int srcp1
= (long int) s1
;
299 long int srcp2
= (long int) s2
;
302 if (len
>= OP_T_THRES
)
304 /* There are at least some bytes to compare. No need to test
305 for LEN == 0 in this alignment loop. */
306 while (srcp2
% OPSIZ
!= 0)
308 a0
= ((byte
*) srcp1
)[0];
309 b0
= ((byte
*) srcp2
)[0];
313 if (__glibc_likely (res
!= 0))
318 /* SRCP2 is now aligned for memory operations on `op_t'.
319 SRCP1 alignment determines if we can do a simple,
320 aligned compare or need to shuffle bits. */
322 if (srcp1
% OPSIZ
== 0)
323 res
= memcmp_common_alignment (srcp1
, srcp2
, len
/ OPSIZ
);
325 res
= memcmp_not_common_alignment (srcp1
, srcp2
, len
/ OPSIZ
);
329 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
330 srcp1
+= len
& -OPSIZ
;
331 srcp2
+= len
& -OPSIZ
;
335 /* There are just a few bytes to compare. Use byte memory operations. */
338 a0
= ((byte
*) srcp1
)[0];
339 b0
= ((byte
*) srcp2
)[0];
343 if (__glibc_likely (res
!= 0))
350 libc_hidden_builtin_def(memcmp
)
353 weak_alias (memcmp
, bcmp
)