Define XTABS to TAB3 on alpha to match Linux 4.16.
[glibc.git] / sysdeps / tile / memcmp.c
blobb7cf00af4c89ed65e11bf6f096fe14659e653b06
1 /* Copyright (C) 1991-2018 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
18 #ifdef HAVE_CONFIG_H
19 # include "config.h"
20 #endif
22 #if defined HAVE_STRING_H || defined _LIBC
23 # include <string.h>
24 #endif
26 #undef memcmp
28 #ifndef MEMCMP
29 # define MEMCMP memcmp
30 #endif
32 #ifdef _LIBC
34 # include <memcopy.h>
35 # include <endian.h>
37 # if __BYTE_ORDER == __BIG_ENDIAN
38 # define WORDS_BIGENDIAN
39 # endif
41 #else /* Not in the GNU C library. */
43 # include <sys/types.h>
45 /* Type to use for aligned memory operations.
46 This should normally be the biggest type supported by a single load
47 and store. Must be an unsigned type. */
48 # define op_t unsigned long int
49 # define OPSIZ (sizeof(op_t))
51 /* Threshold value for when to enter the unrolled loops. */
52 # define OP_T_THRES 16
54 /* Type to use for unaligned operations. */
55 typedef unsigned char byte;
57 #endif /* In the GNU C library. */
59 /* Provide the appropriate builtins to shift two registers based on
60 the alignment of a pointer held in a third register, and to reverse
61 the bytes in a word. */
62 #define DBLALIGN __insn_dblalign
63 #define REVBYTES __insn_revbytes
65 #ifdef WORDS_BIGENDIAN
66 # define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
67 #else
68 # define CMP_LT_OR_GT(a, b) (REVBYTES(a) > REVBYTES(b) ? 1 : -1)
69 #endif
71 /* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
73 /* The strategy of this memcmp is:
75 1. Compare bytes until one of the block pointers is aligned.
77 2. Compare using memcmp_common_alignment or
78 memcmp_not_common_alignment, regarding the alignment of the other
79 block after the initial byte operations. The maximum number of
80 full words (of type op_t) are compared in this way.
82 3. Compare the few remaining bytes. */
84 static int memcmp_common_alignment (long, long, size_t) __THROW;
86 /* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
87 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
88 memory operations on `op_t's. */
89 static int
90 memcmp_common_alignment (long int srcp1, long int srcp2, size_t len)
92 op_t a0, a1;
93 op_t b0, b1;
95 switch (len % 4)
97 default: /* Avoid warning about uninitialized local variables. */
98 case 2:
99 a0 = ((op_t *) srcp1)[0];
100 b0 = ((op_t *) srcp2)[0];
101 srcp1 += OPSIZ;
102 srcp2 += OPSIZ;
103 len += 2;
104 goto do1;
105 case 3:
106 a1 = ((op_t *) srcp1)[0];
107 b1 = ((op_t *) srcp2)[0];
108 srcp1 += OPSIZ;
109 srcp2 += OPSIZ;
110 len += 1;
111 goto do2;
112 case 0:
113 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
114 return 0;
115 a0 = ((op_t *) srcp1)[0];
116 b0 = ((op_t *) srcp2)[0];
117 srcp1 += OPSIZ;
118 srcp2 += OPSIZ;
119 goto do3;
120 case 1:
121 a1 = ((op_t *) srcp1)[0];
122 b1 = ((op_t *) srcp2)[0];
123 srcp1 += OPSIZ;
124 srcp2 += OPSIZ;
125 len -= 1;
126 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
127 goto do0;
128 /* Fall through. */
133 a0 = ((op_t *) srcp1)[0];
134 b0 = ((op_t *) srcp2)[0];
135 srcp1 += OPSIZ;
136 srcp2 += OPSIZ;
137 if (__glibc_likely (a1 != b1))
138 return CMP_LT_OR_GT (a1, b1);
140 do3:
141 a1 = ((op_t *) srcp1)[0];
142 b1 = ((op_t *) srcp2)[0];
143 srcp1 += OPSIZ;
144 srcp2 += OPSIZ;
145 if (__glibc_likely (a0 != b0))
146 return CMP_LT_OR_GT (a0, b0);
148 do2:
149 a0 = ((op_t *) srcp1)[0];
150 b0 = ((op_t *) srcp2)[0];
151 srcp1 += OPSIZ;
152 srcp2 += OPSIZ;
153 if (__glibc_likely (a1 != b1))
154 return CMP_LT_OR_GT (a1, b1);
156 do1:
157 a1 = ((op_t *) srcp1)[0];
158 b1 = ((op_t *) srcp2)[0];
159 srcp1 += OPSIZ;
160 srcp2 += OPSIZ;
161 if (__glibc_likely (a0 != b0))
162 return CMP_LT_OR_GT (a0, b0);
164 len -= 4;
166 while (len != 0);
168 /* This is the right position for do0. Please don't move
169 it into the loop. */
170 do0:
171 if (__glibc_likely (a1 != b1))
172 return CMP_LT_OR_GT (a1, b1);
173 return 0;
176 static int memcmp_not_common_alignment (long, long, size_t) __THROW;
178 /* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
179 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
180 operations on `op_t', but SRCP1 *should be unaligned*. */
181 static int
182 memcmp_not_common_alignment (long int srcp1, long int srcp2, size_t len)
184 void * srcp1i;
185 op_t a0, a1, a2, a3;
186 op_t b0, b1, b2, b3;
187 op_t x;
189 /* Calculate how to shift a word read at the memory operation
190 aligned srcp1 to make it aligned for comparison. */
192 srcp1i = (void *) srcp1;
194 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
195 it points in the middle of. */
196 srcp1 &= -OPSIZ;
198 switch (len % 4)
200 default: /* Avoid warning about uninitialized local variables. */
201 case 2:
202 a1 = ((op_t *) srcp1)[0];
203 a2 = ((op_t *) srcp1)[1];
204 b2 = ((op_t *) srcp2)[0];
205 srcp1 += 2 * OPSIZ;
206 srcp2 += 1 * OPSIZ;
207 len += 2;
208 goto do1;
209 case 3:
210 a0 = ((op_t *) srcp1)[0];
211 a1 = ((op_t *) srcp1)[1];
212 b1 = ((op_t *) srcp2)[0];
213 srcp1 += 2 * OPSIZ;
214 srcp2 += 1 * OPSIZ;
215 len += 1;
216 goto do2;
217 case 0:
218 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
219 return 0;
220 a3 = ((op_t *) srcp1)[0];
221 a0 = ((op_t *) srcp1)[1];
222 b0 = ((op_t *) srcp2)[0];
223 srcp1 += 2 * OPSIZ;
224 srcp2 += 1 * OPSIZ;
225 goto do3;
226 case 1:
227 a2 = ((op_t *) srcp1)[0];
228 a3 = ((op_t *) srcp1)[1];
229 b3 = ((op_t *) srcp2)[0];
230 srcp1 += 2 * OPSIZ;
231 srcp2 += 1 * OPSIZ;
232 len -= 1;
233 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
234 goto do0;
235 /* Fall through. */
240 a0 = ((op_t *) srcp1)[0];
241 b0 = ((op_t *) srcp2)[0];
242 x = DBLALIGN (a2, a3, srcp1i);
243 srcp1 += OPSIZ;
244 srcp2 += OPSIZ;
245 if (__glibc_likely (x != b3))
246 return CMP_LT_OR_GT (x, b3);
248 do3:
249 a1 = ((op_t *) srcp1)[0];
250 b1 = ((op_t *) srcp2)[0];
251 x = DBLALIGN (a3, a0, srcp1i);
252 srcp1 += OPSIZ;
253 srcp2 += OPSIZ;
254 if (__glibc_likely (x != b0))
255 return CMP_LT_OR_GT (x, b0);
257 do2:
258 a2 = ((op_t *) srcp1)[0];
259 b2 = ((op_t *) srcp2)[0];
260 x = DBLALIGN (a0, a1, srcp1i);
261 srcp1 += OPSIZ;
262 srcp2 += OPSIZ;
263 if (__glibc_likely (x != b1))
264 return CMP_LT_OR_GT (x, b1);
266 do1:
267 a3 = ((op_t *) srcp1)[0];
268 b3 = ((op_t *) srcp2)[0];
269 x = DBLALIGN (a1, a2, srcp1i);
270 srcp1 += OPSIZ;
271 srcp2 += OPSIZ;
272 if (__glibc_likely (x != b2))
273 return CMP_LT_OR_GT (x, b2);
275 len -= 4;
277 while (len != 0);
279 /* This is the right position for do0. Please don't move
280 it into the loop. */
281 do0:
282 x = DBLALIGN (a2, a3, srcp1i);
283 if (__glibc_likely (x != b3))
284 return CMP_LT_OR_GT (x, b3);
285 return 0;
289 MEMCMP (const void *s1, const void *s2, size_t len)
291 op_t a0;
292 op_t b0;
293 long int srcp1 = (long int) s1;
294 long int srcp2 = (long int) s2;
295 int res;
297 if (len >= OP_T_THRES)
299 /* There are at least some bytes to compare. No need to test
300 for LEN == 0 in this alignment loop. */
301 while (srcp2 % OPSIZ != 0)
303 a0 = ((byte *) srcp1)[0];
304 b0 = ((byte *) srcp2)[0];
305 srcp1 += 1;
306 srcp2 += 1;
307 res = a0 - b0;
308 if (__glibc_likely (res != 0))
309 return res;
310 len -= 1;
313 /* SRCP2 is now aligned for memory operations on `op_t'.
314 SRCP1 alignment determines if we can do a simple,
315 aligned compare or need to shuffle bits. */
317 if (srcp1 % OPSIZ == 0)
318 res = memcmp_common_alignment (srcp1, srcp2, len / OPSIZ);
319 else
320 res = memcmp_not_common_alignment (srcp1, srcp2, len / OPSIZ);
321 if (res != 0)
322 return res;
324 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
325 srcp1 += len & -OPSIZ;
326 srcp2 += len & -OPSIZ;
327 len %= OPSIZ;
330 /* There are just a few bytes to compare. Use byte memory operations. */
331 while (len != 0)
333 a0 = ((byte *) srcp1)[0];
334 b0 = ((byte *) srcp2)[0];
335 srcp1 += 1;
336 srcp2 += 1;
337 res = a0 - b0;
338 if (__glibc_likely (res != 0))
339 return res;
340 len -= 1;
343 return 0;
345 libc_hidden_builtin_def(memcmp)
346 #ifdef weak_alias
347 # undef bcmp
348 weak_alias (memcmp, bcmp)
349 #endif