Add more tests for ceil, floor, round, trunc.
[glibc.git] / crypt / crypt.c
blobc63ea935626df8a3d3cf19072430df23c684e7cf
1 /*
2 * UFC-crypt: ultra fast crypt(3) implementation
4 * Copyright (C) 1991-2015 Free Software Foundation, Inc.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; see the file COPYING.LIB. If not,
18 * see <http://www.gnu.org/licenses/>.
20 * @(#)crypt.c 2.25 12/20/96
22 * Semiportable C version
26 #include "crypt-private.h"
28 #ifdef _UFC_32_
31 * 32 bit version
34 #define SBA(sb, v) (*(long32*)((char*)(sb)+(v)))
36 void
37 _ufc_doit_r(itr, __data, res)
38 ufc_long itr, *res;
39 struct crypt_data * __restrict __data;
41 int i;
42 long32 s, *k;
43 long32 *sb01 = (long32*)__data->sb0;
44 long32 *sb23 = (long32*)__data->sb2;
45 long32 l1, l2, r1, r2;
47 l1 = (long32)res[0]; l2 = (long32)res[1];
48 r1 = (long32)res[2]; r2 = (long32)res[3];
50 while(itr--) {
51 k = (long32*)__data->keysched;
52 for(i=8; i--; ) {
53 s = *k++ ^ r1;
54 l1 ^= SBA(sb01, s & 0xffff); l2 ^= SBA(sb01, (s & 0xffff)+4);
55 l1 ^= SBA(sb01, s >>= 16 ); l2 ^= SBA(sb01, (s )+4);
56 s = *k++ ^ r2;
57 l1 ^= SBA(sb23, s & 0xffff); l2 ^= SBA(sb23, (s & 0xffff)+4);
58 l1 ^= SBA(sb23, s >>= 16 ); l2 ^= SBA(sb23, (s )+4);
60 s = *k++ ^ l1;
61 r1 ^= SBA(sb01, s & 0xffff); r2 ^= SBA(sb01, (s & 0xffff)+4);
62 r1 ^= SBA(sb01, s >>= 16 ); r2 ^= SBA(sb01, (s )+4);
63 s = *k++ ^ l2;
64 r1 ^= SBA(sb23, s & 0xffff); r2 ^= SBA(sb23, (s & 0xffff)+4);
65 r1 ^= SBA(sb23, s >>= 16 ); r2 ^= SBA(sb23, (s )+4);
67 s=l1; l1=r1; r1=s; s=l2; l2=r2; r2=s;
69 res[0] = l1; res[1] = l2; res[2] = r1; res[3] = r2;
72 #endif
74 #ifdef _UFC_64_
77 * 64 bit version
80 #define SBA(sb, v) (*(long64*)((char*)(sb)+(v)))
82 void
83 _ufc_doit_r(itr, __data, res)
84 ufc_long itr, *res;
85 struct crypt_data * __restrict __data;
87 int i;
88 long64 l, r, s, *k;
89 long64 *sb01 = (long64*)__data->sb0;
90 long64 *sb23 = (long64*)__data->sb2;
92 l = (((long64)res[0]) << 32) | ((long64)res[1]);
93 r = (((long64)res[2]) << 32) | ((long64)res[3]);
95 while(itr--) {
96 k = (long64*)__data->keysched;
97 for(i=8; i--; ) {
98 s = *k++ ^ r;
99 l ^= SBA(sb23, (s ) & 0xffff);
100 l ^= SBA(sb23, (s >>= 16) & 0xffff);
101 l ^= SBA(sb01, (s >>= 16) & 0xffff);
102 l ^= SBA(sb01, (s >>= 16) );
104 s = *k++ ^ l;
105 r ^= SBA(sb23, (s ) & 0xffff);
106 r ^= SBA(sb23, (s >>= 16) & 0xffff);
107 r ^= SBA(sb01, (s >>= 16) & 0xffff);
108 r ^= SBA(sb01, (s >>= 16) );
110 s=l; l=r; r=s;
113 res[0] = l >> 32; res[1] = l & 0xffffffff;
114 res[2] = r >> 32; res[3] = r & 0xffffffff;
117 #endif