Further harden glibc malloc metadata against 1-byte overflows.
[glibc.git] / crypt / crypt.c
blob17f61e35c2be34260a7b6975f6bc340662a1fa82
1 /*
2 * UFC-crypt: ultra fast crypt(3) implementation
4 * Copyright (C) 1991-2017 Free Software Foundation, Inc.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; see the file COPYING.LIB. If not,
18 * see <http://www.gnu.org/licenses/>.
20 * @(#)crypt.c 2.25 12/20/96
22 * Semiportable C version
26 #include "crypt-private.h"
28 #ifdef _UFC_32_
31 * 32 bit version
34 #define SBA(sb, v) (*(long32*)((char*)(sb)+(v)))
36 void
37 _ufc_doit_r (ufc_long itr, struct crypt_data * __restrict __data,
38 ufc_long *res)
40 int i;
41 long32 s, *k;
42 long32 *sb01 = (long32*)__data->sb0;
43 long32 *sb23 = (long32*)__data->sb2;
44 long32 l1, l2, r1, r2;
46 l1 = (long32)res[0]; l2 = (long32)res[1];
47 r1 = (long32)res[2]; r2 = (long32)res[3];
49 while(itr--) {
50 k = (long32*)__data->keysched;
51 for(i=8; i--; ) {
52 s = *k++ ^ r1;
53 l1 ^= SBA(sb01, s & 0xffff); l2 ^= SBA(sb01, (s & 0xffff)+4);
54 l1 ^= SBA(sb01, s >>= 16 ); l2 ^= SBA(sb01, (s )+4);
55 s = *k++ ^ r2;
56 l1 ^= SBA(sb23, s & 0xffff); l2 ^= SBA(sb23, (s & 0xffff)+4);
57 l1 ^= SBA(sb23, s >>= 16 ); l2 ^= SBA(sb23, (s )+4);
59 s = *k++ ^ l1;
60 r1 ^= SBA(sb01, s & 0xffff); r2 ^= SBA(sb01, (s & 0xffff)+4);
61 r1 ^= SBA(sb01, s >>= 16 ); r2 ^= SBA(sb01, (s )+4);
62 s = *k++ ^ l2;
63 r1 ^= SBA(sb23, s & 0xffff); r2 ^= SBA(sb23, (s & 0xffff)+4);
64 r1 ^= SBA(sb23, s >>= 16 ); r2 ^= SBA(sb23, (s )+4);
66 s=l1; l1=r1; r1=s; s=l2; l2=r2; r2=s;
68 res[0] = l1; res[1] = l2; res[2] = r1; res[3] = r2;
71 #endif
73 #ifdef _UFC_64_
76 * 64 bit version
79 #define SBA(sb, v) (*(long64*)((char*)(sb)+(v)))
81 void
82 _ufc_doit_r (ufc_long itr, struct crypt_data * __restrict __data,
83 ufc_long *res)
85 int i;
86 long64 l, r, s, *k;
87 long64 *sb01 = (long64*)__data->sb0;
88 long64 *sb23 = (long64*)__data->sb2;
90 l = (((long64)res[0]) << 32) | ((long64)res[1]);
91 r = (((long64)res[2]) << 32) | ((long64)res[3]);
93 while(itr--) {
94 k = (long64*)__data->keysched;
95 for(i=8; i--; ) {
96 s = *k++ ^ r;
97 l ^= SBA(sb23, (s ) & 0xffff);
98 l ^= SBA(sb23, (s >>= 16) & 0xffff);
99 l ^= SBA(sb01, (s >>= 16) & 0xffff);
100 l ^= SBA(sb01, (s >>= 16) );
102 s = *k++ ^ l;
103 r ^= SBA(sb23, (s ) & 0xffff);
104 r ^= SBA(sb23, (s >>= 16) & 0xffff);
105 r ^= SBA(sb01, (s >>= 16) & 0xffff);
106 r ^= SBA(sb01, (s >>= 16) );
108 s=l; l=r; r=s;
111 res[0] = l >> 32; res[1] = l & 0xffffffff;
112 res[2] = r >> 32; res[3] = r & 0xffffffff;
115 #endif