1 /* Copyright (C) 1991-2018 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
22 #if defined HAVE_STRING_H || defined _LIBC
29 # define MEMCMP memcmp
37 # if __BYTE_ORDER == __BIG_ENDIAN
38 # define WORDS_BIGENDIAN
41 #else /* Not in the GNU C library. */
43 # include <sys/types.h>
45 /* Type to use for aligned memory operations.
46 This should normally be the biggest type supported by a single load
47 and store. Must be an unsigned type. */
48 # define op_t unsigned long int
49 # define OPSIZ (sizeof(op_t))
51 /* Threshold value for when to enter the unrolled loops. */
52 # define OP_T_THRES 16
54 /* Type to use for unaligned operations. */
55 typedef unsigned char byte
;
57 #endif /* In the GNU C library. */
59 /* Provide the appropriate builtins to shift two registers based on
60 the alignment of a pointer held in a third register, and to reverse
61 the bytes in a word. */
62 #define DBLALIGN __insn_dblalign
63 #define REVBYTES __insn_revbytes
65 #ifdef WORDS_BIGENDIAN
66 # define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
68 # define CMP_LT_OR_GT(a, b) (REVBYTES(a) > REVBYTES(b) ? 1 : -1)
71 /* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
73 /* The strategy of this memcmp is:
75 1. Compare bytes until one of the block pointers is aligned.
77 2. Compare using memcmp_common_alignment or
78 memcmp_not_common_alignment, regarding the alignment of the other
79 block after the initial byte operations. The maximum number of
80 full words (of type op_t) are compared in this way.
82 3. Compare the few remaining bytes. */
84 static int memcmp_common_alignment (long, long, size_t) __THROW
;
86 /* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
87 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
88 memory operations on `op_t's. */
90 memcmp_common_alignment (long int srcp1
, long int srcp2
, size_t len
)
97 default: /* Avoid warning about uninitialized local variables. */
99 a0
= ((op_t
*) srcp1
)[0];
100 b0
= ((op_t
*) srcp2
)[0];
106 a1
= ((op_t
*) srcp1
)[0];
107 b1
= ((op_t
*) srcp2
)[0];
113 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
115 a0
= ((op_t
*) srcp1
)[0];
116 b0
= ((op_t
*) srcp2
)[0];
121 a1
= ((op_t
*) srcp1
)[0];
122 b1
= ((op_t
*) srcp2
)[0];
126 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
133 a0
= ((op_t
*) srcp1
)[0];
134 b0
= ((op_t
*) srcp2
)[0];
137 if (__glibc_likely (a1
!= b1
))
138 return CMP_LT_OR_GT (a1
, b1
);
141 a1
= ((op_t
*) srcp1
)[0];
142 b1
= ((op_t
*) srcp2
)[0];
145 if (__glibc_likely (a0
!= b0
))
146 return CMP_LT_OR_GT (a0
, b0
);
149 a0
= ((op_t
*) srcp1
)[0];
150 b0
= ((op_t
*) srcp2
)[0];
153 if (__glibc_likely (a1
!= b1
))
154 return CMP_LT_OR_GT (a1
, b1
);
157 a1
= ((op_t
*) srcp1
)[0];
158 b1
= ((op_t
*) srcp2
)[0];
161 if (__glibc_likely (a0
!= b0
))
162 return CMP_LT_OR_GT (a0
, b0
);
168 /* This is the right position for do0. Please don't move
171 if (__glibc_likely (a1
!= b1
))
172 return CMP_LT_OR_GT (a1
, b1
);
176 static int memcmp_not_common_alignment (long, long, size_t) __THROW
;
178 /* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
179 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
180 operations on `op_t', but SRCP1 *should be unaligned*. */
182 memcmp_not_common_alignment (long int srcp1
, long int srcp2
, size_t len
)
189 /* Calculate how to shift a word read at the memory operation
190 aligned srcp1 to make it aligned for comparison. */
192 srcp1i
= (void *) srcp1
;
194 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
195 it points in the middle of. */
200 default: /* Avoid warning about uninitialized local variables. */
202 a1
= ((op_t
*) srcp1
)[0];
203 a2
= ((op_t
*) srcp1
)[1];
204 b2
= ((op_t
*) srcp2
)[0];
210 a0
= ((op_t
*) srcp1
)[0];
211 a1
= ((op_t
*) srcp1
)[1];
212 b1
= ((op_t
*) srcp2
)[0];
218 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
220 a3
= ((op_t
*) srcp1
)[0];
221 a0
= ((op_t
*) srcp1
)[1];
222 b0
= ((op_t
*) srcp2
)[0];
227 a2
= ((op_t
*) srcp1
)[0];
228 a3
= ((op_t
*) srcp1
)[1];
229 b3
= ((op_t
*) srcp2
)[0];
233 if (OP_T_THRES
<= 3 * OPSIZ
&& len
== 0)
240 a0
= ((op_t
*) srcp1
)[0];
241 b0
= ((op_t
*) srcp2
)[0];
242 x
= DBLALIGN (a2
, a3
, srcp1i
);
245 if (__glibc_likely (x
!= b3
))
246 return CMP_LT_OR_GT (x
, b3
);
249 a1
= ((op_t
*) srcp1
)[0];
250 b1
= ((op_t
*) srcp2
)[0];
251 x
= DBLALIGN (a3
, a0
, srcp1i
);
254 if (__glibc_likely (x
!= b0
))
255 return CMP_LT_OR_GT (x
, b0
);
258 a2
= ((op_t
*) srcp1
)[0];
259 b2
= ((op_t
*) srcp2
)[0];
260 x
= DBLALIGN (a0
, a1
, srcp1i
);
263 if (__glibc_likely (x
!= b1
))
264 return CMP_LT_OR_GT (x
, b1
);
267 a3
= ((op_t
*) srcp1
)[0];
268 b3
= ((op_t
*) srcp2
)[0];
269 x
= DBLALIGN (a1
, a2
, srcp1i
);
272 if (__glibc_likely (x
!= b2
))
273 return CMP_LT_OR_GT (x
, b2
);
279 /* This is the right position for do0. Please don't move
282 x
= DBLALIGN (a2
, a3
, srcp1i
);
283 if (__glibc_likely (x
!= b3
))
284 return CMP_LT_OR_GT (x
, b3
);
289 MEMCMP (const void *s1
, const void *s2
, size_t len
)
293 long int srcp1
= (long int) s1
;
294 long int srcp2
= (long int) s2
;
297 if (len
>= OP_T_THRES
)
299 /* There are at least some bytes to compare. No need to test
300 for LEN == 0 in this alignment loop. */
301 while (srcp2
% OPSIZ
!= 0)
303 a0
= ((byte
*) srcp1
)[0];
304 b0
= ((byte
*) srcp2
)[0];
308 if (__glibc_likely (res
!= 0))
313 /* SRCP2 is now aligned for memory operations on `op_t'.
314 SRCP1 alignment determines if we can do a simple,
315 aligned compare or need to shuffle bits. */
317 if (srcp1
% OPSIZ
== 0)
318 res
= memcmp_common_alignment (srcp1
, srcp2
, len
/ OPSIZ
);
320 res
= memcmp_not_common_alignment (srcp1
, srcp2
, len
/ OPSIZ
);
324 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
325 srcp1
+= len
& -OPSIZ
;
326 srcp2
+= len
& -OPSIZ
;
330 /* There are just a few bytes to compare. Use byte memory operations. */
333 a0
= ((byte
*) srcp1
)[0];
334 b0
= ((byte
*) srcp2
)[0];
338 if (__glibc_likely (res
!= 0))
345 libc_hidden_builtin_def(memcmp
)
348 weak_alias (memcmp
, bcmp
)