localedata: Once again correct and regenerate i18n_ctype.
[glibc.git] / sysdeps / tile / memcmp.c
blobc3ed88bca76a90d418787e6976c36028a35ef043
1 /* Copyright (C) 1991-2017 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
18 #ifdef HAVE_CONFIG_H
19 # include "config.h"
20 #endif
22 #if defined HAVE_STRING_H || defined _LIBC
23 # include <string.h>
24 #endif
26 #undef memcmp
28 #ifndef MEMCMP
29 # define MEMCMP memcmp
30 #endif
32 #ifdef _LIBC
34 # include <memcopy.h>
35 # include <endian.h>
37 # if __BYTE_ORDER == __BIG_ENDIAN
38 # define WORDS_BIGENDIAN
39 # endif
41 #else /* Not in the GNU C library. */
43 # include <sys/types.h>
45 /* Type to use for aligned memory operations.
46 This should normally be the biggest type supported by a single load
47 and store. Must be an unsigned type. */
48 # define op_t unsigned long int
49 # define OPSIZ (sizeof(op_t))
51 /* Threshold value for when to enter the unrolled loops. */
52 # define OP_T_THRES 16
54 /* Type to use for unaligned operations. */
55 typedef unsigned char byte;
57 #endif /* In the GNU C library. */
59 /* Provide the appropriate builtins to shift two registers based on
60 the alignment of a pointer held in a third register, and to reverse
61 the bytes in a word. */
62 #ifdef __tilegx__
63 #define DBLALIGN __insn_dblalign
64 #define REVBYTES __insn_revbytes
65 #else
66 #define DBLALIGN __insn_dword_align
67 #define REVBYTES __insn_bytex
68 #endif
70 #ifdef WORDS_BIGENDIAN
71 # define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
72 #else
73 # define CMP_LT_OR_GT(a, b) (REVBYTES(a) > REVBYTES(b) ? 1 : -1)
74 #endif
76 /* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
78 /* The strategy of this memcmp is:
80 1. Compare bytes until one of the block pointers is aligned.
82 2. Compare using memcmp_common_alignment or
83 memcmp_not_common_alignment, regarding the alignment of the other
84 block after the initial byte operations. The maximum number of
85 full words (of type op_t) are compared in this way.
87 3. Compare the few remaining bytes. */
89 static int memcmp_common_alignment (long, long, size_t) __THROW;
91 /* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
92 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
93 memory operations on `op_t's. */
94 static int
95 memcmp_common_alignment (long int srcp1, long int srcp2, size_t len)
97 op_t a0, a1;
98 op_t b0, b1;
100 switch (len % 4)
102 default: /* Avoid warning about uninitialized local variables. */
103 case 2:
104 a0 = ((op_t *) srcp1)[0];
105 b0 = ((op_t *) srcp2)[0];
106 srcp1 += OPSIZ;
107 srcp2 += OPSIZ;
108 len += 2;
109 goto do1;
110 case 3:
111 a1 = ((op_t *) srcp1)[0];
112 b1 = ((op_t *) srcp2)[0];
113 srcp1 += OPSIZ;
114 srcp2 += OPSIZ;
115 len += 1;
116 goto do2;
117 case 0:
118 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
119 return 0;
120 a0 = ((op_t *) srcp1)[0];
121 b0 = ((op_t *) srcp2)[0];
122 srcp1 += OPSIZ;
123 srcp2 += OPSIZ;
124 goto do3;
125 case 1:
126 a1 = ((op_t *) srcp1)[0];
127 b1 = ((op_t *) srcp2)[0];
128 srcp1 += OPSIZ;
129 srcp2 += OPSIZ;
130 len -= 1;
131 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
132 goto do0;
133 /* Fall through. */
138 a0 = ((op_t *) srcp1)[0];
139 b0 = ((op_t *) srcp2)[0];
140 srcp1 += OPSIZ;
141 srcp2 += OPSIZ;
142 if (__glibc_likely (a1 != b1))
143 return CMP_LT_OR_GT (a1, b1);
145 do3:
146 a1 = ((op_t *) srcp1)[0];
147 b1 = ((op_t *) srcp2)[0];
148 srcp1 += OPSIZ;
149 srcp2 += OPSIZ;
150 if (__glibc_likely (a0 != b0))
151 return CMP_LT_OR_GT (a0, b0);
153 do2:
154 a0 = ((op_t *) srcp1)[0];
155 b0 = ((op_t *) srcp2)[0];
156 srcp1 += OPSIZ;
157 srcp2 += OPSIZ;
158 if (__glibc_likely (a1 != b1))
159 return CMP_LT_OR_GT (a1, b1);
161 do1:
162 a1 = ((op_t *) srcp1)[0];
163 b1 = ((op_t *) srcp2)[0];
164 srcp1 += OPSIZ;
165 srcp2 += OPSIZ;
166 if (__glibc_likely (a0 != b0))
167 return CMP_LT_OR_GT (a0, b0);
169 len -= 4;
171 while (len != 0);
173 /* This is the right position for do0. Please don't move
174 it into the loop. */
175 do0:
176 if (__glibc_likely (a1 != b1))
177 return CMP_LT_OR_GT (a1, b1);
178 return 0;
181 static int memcmp_not_common_alignment (long, long, size_t) __THROW;
183 /* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
184 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
185 operations on `op_t', but SRCP1 *should be unaligned*. */
186 static int
187 memcmp_not_common_alignment (long int srcp1, long int srcp2, size_t len)
189 void * srcp1i;
190 op_t a0, a1, a2, a3;
191 op_t b0, b1, b2, b3;
192 op_t x;
194 /* Calculate how to shift a word read at the memory operation
195 aligned srcp1 to make it aligned for comparison. */
197 srcp1i = (void *) srcp1;
199 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
200 it points in the middle of. */
201 srcp1 &= -OPSIZ;
203 switch (len % 4)
205 default: /* Avoid warning about uninitialized local variables. */
206 case 2:
207 a1 = ((op_t *) srcp1)[0];
208 a2 = ((op_t *) srcp1)[1];
209 b2 = ((op_t *) srcp2)[0];
210 srcp1 += 2 * OPSIZ;
211 srcp2 += 1 * OPSIZ;
212 len += 2;
213 goto do1;
214 case 3:
215 a0 = ((op_t *) srcp1)[0];
216 a1 = ((op_t *) srcp1)[1];
217 b1 = ((op_t *) srcp2)[0];
218 srcp1 += 2 * OPSIZ;
219 srcp2 += 1 * OPSIZ;
220 len += 1;
221 goto do2;
222 case 0:
223 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
224 return 0;
225 a3 = ((op_t *) srcp1)[0];
226 a0 = ((op_t *) srcp1)[1];
227 b0 = ((op_t *) srcp2)[0];
228 srcp1 += 2 * OPSIZ;
229 srcp2 += 1 * OPSIZ;
230 goto do3;
231 case 1:
232 a2 = ((op_t *) srcp1)[0];
233 a3 = ((op_t *) srcp1)[1];
234 b3 = ((op_t *) srcp2)[0];
235 srcp1 += 2 * OPSIZ;
236 srcp2 += 1 * OPSIZ;
237 len -= 1;
238 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
239 goto do0;
240 /* Fall through. */
245 a0 = ((op_t *) srcp1)[0];
246 b0 = ((op_t *) srcp2)[0];
247 x = DBLALIGN (a2, a3, srcp1i);
248 srcp1 += OPSIZ;
249 srcp2 += OPSIZ;
250 if (__glibc_likely (x != b3))
251 return CMP_LT_OR_GT (x, b3);
253 do3:
254 a1 = ((op_t *) srcp1)[0];
255 b1 = ((op_t *) srcp2)[0];
256 x = DBLALIGN (a3, a0, srcp1i);
257 srcp1 += OPSIZ;
258 srcp2 += OPSIZ;
259 if (__glibc_likely (x != b0))
260 return CMP_LT_OR_GT (x, b0);
262 do2:
263 a2 = ((op_t *) srcp1)[0];
264 b2 = ((op_t *) srcp2)[0];
265 x = DBLALIGN (a0, a1, srcp1i);
266 srcp1 += OPSIZ;
267 srcp2 += OPSIZ;
268 if (__glibc_likely (x != b1))
269 return CMP_LT_OR_GT (x, b1);
271 do1:
272 a3 = ((op_t *) srcp1)[0];
273 b3 = ((op_t *) srcp2)[0];
274 x = DBLALIGN (a1, a2, srcp1i);
275 srcp1 += OPSIZ;
276 srcp2 += OPSIZ;
277 if (__glibc_likely (x != b2))
278 return CMP_LT_OR_GT (x, b2);
280 len -= 4;
282 while (len != 0);
284 /* This is the right position for do0. Please don't move
285 it into the loop. */
286 do0:
287 x = DBLALIGN (a2, a3, srcp1i);
288 if (__glibc_likely (x != b3))
289 return CMP_LT_OR_GT (x, b3);
290 return 0;
294 MEMCMP (const void *s1, const void *s2, size_t len)
296 op_t a0;
297 op_t b0;
298 long int srcp1 = (long int) s1;
299 long int srcp2 = (long int) s2;
300 int res;
302 if (len >= OP_T_THRES)
304 /* There are at least some bytes to compare. No need to test
305 for LEN == 0 in this alignment loop. */
306 while (srcp2 % OPSIZ != 0)
308 a0 = ((byte *) srcp1)[0];
309 b0 = ((byte *) srcp2)[0];
310 srcp1 += 1;
311 srcp2 += 1;
312 res = a0 - b0;
313 if (__glibc_likely (res != 0))
314 return res;
315 len -= 1;
318 /* SRCP2 is now aligned for memory operations on `op_t'.
319 SRCP1 alignment determines if we can do a simple,
320 aligned compare or need to shuffle bits. */
322 if (srcp1 % OPSIZ == 0)
323 res = memcmp_common_alignment (srcp1, srcp2, len / OPSIZ);
324 else
325 res = memcmp_not_common_alignment (srcp1, srcp2, len / OPSIZ);
326 if (res != 0)
327 return res;
329 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
330 srcp1 += len & -OPSIZ;
331 srcp2 += len & -OPSIZ;
332 len %= OPSIZ;
335 /* There are just a few bytes to compare. Use byte memory operations. */
336 while (len != 0)
338 a0 = ((byte *) srcp1)[0];
339 b0 = ((byte *) srcp2)[0];
340 srcp1 += 1;
341 srcp2 += 1;
342 res = a0 - b0;
343 if (__glibc_likely (res != 0))
344 return res;
345 len -= 1;
348 return 0;
350 libc_hidden_builtin_def(memcmp)
351 #ifdef weak_alias
352 # undef bcmp
353 weak_alias (memcmp, bcmp)
354 #endif