1 // SPDX-License-Identifier: GPL-2.0
3 * Speck: a lightweight block cipher
5 * Copyright (c) 2018 Google, Inc
7 * Speck has 10 variants, including 5 block sizes. For now we only implement
8 * the variants Speck128/128, Speck128/192, Speck128/256, Speck64/96, and
9 * Speck64/128. Speck${B}/${K} denotes the variant with a block size of B bits
10 * and a key size of K bits. The Speck128 variants are believed to be the most
11 * secure variants, and they use the same block size and key sizes as AES. The
12 * Speck64 variants are less secure, but on 32-bit processors are usually
13 * faster. The remaining variants (Speck32, Speck48, and Speck96) are even less
14 * secure and/or not as well suited for implementation on either 32-bit or
15 * 64-bit processors, so are omitted.
17 * Reference: "The Simon and Speck Families of Lightweight Block Ciphers"
18 * https://eprint.iacr.org/2013/404.pdf
20 * In a correspondence, the Speck designers have also clarified that the words
21 * should be interpreted in little-endian format, and the words should be
22 * ordered such that the first word of each block is 'y' rather than 'x', and
23 * the first key word (rather than the last) becomes the first round key.
26 #include <asm/unaligned.h>
27 #include <crypto/speck.h>
28 #include <linux/bitops.h>
29 #include <linux/crypto.h>
30 #include <linux/init.h>
31 #include <linux/module.h>
35 static __always_inline
void speck128_round(u64
*x
, u64
*y
, u64 k
)
44 static __always_inline
void speck128_unround(u64
*x
, u64
*y
, u64 k
)
53 void crypto_speck128_encrypt(const struct speck128_tfm_ctx
*ctx
,
54 u8
*out
, const u8
*in
)
56 u64 y
= get_unaligned_le64(in
);
57 u64 x
= get_unaligned_le64(in
+ 8);
60 for (i
= 0; i
< ctx
->nrounds
; i
++)
61 speck128_round(&x
, &y
, ctx
->round_keys
[i
]);
63 put_unaligned_le64(y
, out
);
64 put_unaligned_le64(x
, out
+ 8);
66 EXPORT_SYMBOL_GPL(crypto_speck128_encrypt
);
68 static void speck128_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
70 crypto_speck128_encrypt(crypto_tfm_ctx(tfm
), out
, in
);
73 void crypto_speck128_decrypt(const struct speck128_tfm_ctx
*ctx
,
74 u8
*out
, const u8
*in
)
76 u64 y
= get_unaligned_le64(in
);
77 u64 x
= get_unaligned_le64(in
+ 8);
80 for (i
= ctx
->nrounds
- 1; i
>= 0; i
--)
81 speck128_unround(&x
, &y
, ctx
->round_keys
[i
]);
83 put_unaligned_le64(y
, out
);
84 put_unaligned_le64(x
, out
+ 8);
86 EXPORT_SYMBOL_GPL(crypto_speck128_decrypt
);
88 static void speck128_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
90 crypto_speck128_decrypt(crypto_tfm_ctx(tfm
), out
, in
);
93 int crypto_speck128_setkey(struct speck128_tfm_ctx
*ctx
, const u8
*key
,
101 case SPECK128_128_KEY_SIZE
:
102 k
= get_unaligned_le64(key
);
103 l
[0] = get_unaligned_le64(key
+ 8);
104 ctx
->nrounds
= SPECK128_128_NROUNDS
;
105 for (i
= 0; i
< ctx
->nrounds
; i
++) {
106 ctx
->round_keys
[i
] = k
;
107 speck128_round(&l
[0], &k
, i
);
110 case SPECK128_192_KEY_SIZE
:
111 k
= get_unaligned_le64(key
);
112 l
[0] = get_unaligned_le64(key
+ 8);
113 l
[1] = get_unaligned_le64(key
+ 16);
114 ctx
->nrounds
= SPECK128_192_NROUNDS
;
115 for (i
= 0; i
< ctx
->nrounds
; i
++) {
116 ctx
->round_keys
[i
] = k
;
117 speck128_round(&l
[i
% 2], &k
, i
);
120 case SPECK128_256_KEY_SIZE
:
121 k
= get_unaligned_le64(key
);
122 l
[0] = get_unaligned_le64(key
+ 8);
123 l
[1] = get_unaligned_le64(key
+ 16);
124 l
[2] = get_unaligned_le64(key
+ 24);
125 ctx
->nrounds
= SPECK128_256_NROUNDS
;
126 for (i
= 0; i
< ctx
->nrounds
; i
++) {
127 ctx
->round_keys
[i
] = k
;
128 speck128_round(&l
[i
% 3], &k
, i
);
137 EXPORT_SYMBOL_GPL(crypto_speck128_setkey
);
139 static int speck128_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
142 return crypto_speck128_setkey(crypto_tfm_ctx(tfm
), key
, keylen
);
147 static __always_inline
void speck64_round(u32
*x
, u32
*y
, u32 k
)
156 static __always_inline
void speck64_unround(u32
*x
, u32
*y
, u32 k
)
165 void crypto_speck64_encrypt(const struct speck64_tfm_ctx
*ctx
,
166 u8
*out
, const u8
*in
)
168 u32 y
= get_unaligned_le32(in
);
169 u32 x
= get_unaligned_le32(in
+ 4);
172 for (i
= 0; i
< ctx
->nrounds
; i
++)
173 speck64_round(&x
, &y
, ctx
->round_keys
[i
]);
175 put_unaligned_le32(y
, out
);
176 put_unaligned_le32(x
, out
+ 4);
178 EXPORT_SYMBOL_GPL(crypto_speck64_encrypt
);
180 static void speck64_encrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
182 crypto_speck64_encrypt(crypto_tfm_ctx(tfm
), out
, in
);
185 void crypto_speck64_decrypt(const struct speck64_tfm_ctx
*ctx
,
186 u8
*out
, const u8
*in
)
188 u32 y
= get_unaligned_le32(in
);
189 u32 x
= get_unaligned_le32(in
+ 4);
192 for (i
= ctx
->nrounds
- 1; i
>= 0; i
--)
193 speck64_unround(&x
, &y
, ctx
->round_keys
[i
]);
195 put_unaligned_le32(y
, out
);
196 put_unaligned_le32(x
, out
+ 4);
198 EXPORT_SYMBOL_GPL(crypto_speck64_decrypt
);
200 static void speck64_decrypt(struct crypto_tfm
*tfm
, u8
*out
, const u8
*in
)
202 crypto_speck64_decrypt(crypto_tfm_ctx(tfm
), out
, in
);
205 int crypto_speck64_setkey(struct speck64_tfm_ctx
*ctx
, const u8
*key
,
213 case SPECK64_96_KEY_SIZE
:
214 k
= get_unaligned_le32(key
);
215 l
[0] = get_unaligned_le32(key
+ 4);
216 l
[1] = get_unaligned_le32(key
+ 8);
217 ctx
->nrounds
= SPECK64_96_NROUNDS
;
218 for (i
= 0; i
< ctx
->nrounds
; i
++) {
219 ctx
->round_keys
[i
] = k
;
220 speck64_round(&l
[i
% 2], &k
, i
);
223 case SPECK64_128_KEY_SIZE
:
224 k
= get_unaligned_le32(key
);
225 l
[0] = get_unaligned_le32(key
+ 4);
226 l
[1] = get_unaligned_le32(key
+ 8);
227 l
[2] = get_unaligned_le32(key
+ 12);
228 ctx
->nrounds
= SPECK64_128_NROUNDS
;
229 for (i
= 0; i
< ctx
->nrounds
; i
++) {
230 ctx
->round_keys
[i
] = k
;
231 speck64_round(&l
[i
% 3], &k
, i
);
240 EXPORT_SYMBOL_GPL(crypto_speck64_setkey
);
242 static int speck64_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
245 return crypto_speck64_setkey(crypto_tfm_ctx(tfm
), key
, keylen
);
248 /* Algorithm definitions */
250 static struct crypto_alg speck_algs
[] = {
252 .cra_name
= "speck128",
253 .cra_driver_name
= "speck128-generic",
255 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
256 .cra_blocksize
= SPECK128_BLOCK_SIZE
,
257 .cra_ctxsize
= sizeof(struct speck128_tfm_ctx
),
258 .cra_module
= THIS_MODULE
,
261 .cia_min_keysize
= SPECK128_128_KEY_SIZE
,
262 .cia_max_keysize
= SPECK128_256_KEY_SIZE
,
263 .cia_setkey
= speck128_setkey
,
264 .cia_encrypt
= speck128_encrypt
,
265 .cia_decrypt
= speck128_decrypt
269 .cra_name
= "speck64",
270 .cra_driver_name
= "speck64-generic",
272 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
273 .cra_blocksize
= SPECK64_BLOCK_SIZE
,
274 .cra_ctxsize
= sizeof(struct speck64_tfm_ctx
),
275 .cra_module
= THIS_MODULE
,
278 .cia_min_keysize
= SPECK64_96_KEY_SIZE
,
279 .cia_max_keysize
= SPECK64_128_KEY_SIZE
,
280 .cia_setkey
= speck64_setkey
,
281 .cia_encrypt
= speck64_encrypt
,
282 .cia_decrypt
= speck64_decrypt
288 static int __init
speck_module_init(void)
290 return crypto_register_algs(speck_algs
, ARRAY_SIZE(speck_algs
));
293 static void __exit
speck_module_exit(void)
295 crypto_unregister_algs(speck_algs
, ARRAY_SIZE(speck_algs
));
298 module_init(speck_module_init
);
299 module_exit(speck_module_exit
);
301 MODULE_DESCRIPTION("Speck block cipher (generic)");
302 MODULE_LICENSE("GPL");
303 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
304 MODULE_ALIAS_CRYPTO("speck128");
305 MODULE_ALIAS_CRYPTO("speck128-generic");
306 MODULE_ALIAS_CRYPTO("speck64");
307 MODULE_ALIAS_CRYPTO("speck64-generic");