2 * AEAD: Authenticated Encryption with Associated Data
4 * This file provides API support for AEAD algorithms.
6 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <crypto/internal/geniv.h>
16 #include <crypto/internal/rng.h>
17 #include <crypto/null.h>
18 #include <crypto/scatterwalk.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/rtnetlink.h>
24 #include <linux/slab.h>
25 #include <linux/seq_file.h>
26 #include <linux/cryptouser.h>
27 #include <linux/compiler.h>
28 #include <net/netlink.h>
32 static int setkey_unaligned(struct crypto_aead
*tfm
, const u8
*key
,
35 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
37 u8
*buffer
, *alignbuffer
;
40 absize
= keylen
+ alignmask
;
41 buffer
= kmalloc(absize
, GFP_ATOMIC
);
45 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
46 memcpy(alignbuffer
, key
, keylen
);
47 ret
= crypto_aead_alg(tfm
)->setkey(tfm
, alignbuffer
, keylen
);
48 memset(alignbuffer
, 0, keylen
);
53 int crypto_aead_setkey(struct crypto_aead
*tfm
,
54 const u8
*key
, unsigned int keylen
)
56 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
59 if ((unsigned long)key
& alignmask
)
60 err
= setkey_unaligned(tfm
, key
, keylen
);
62 err
= crypto_aead_alg(tfm
)->setkey(tfm
, key
, keylen
);
67 crypto_aead_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
70 EXPORT_SYMBOL_GPL(crypto_aead_setkey
);
72 int crypto_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
76 if (authsize
> crypto_aead_maxauthsize(tfm
))
79 if (crypto_aead_alg(tfm
)->setauthsize
) {
80 err
= crypto_aead_alg(tfm
)->setauthsize(tfm
, authsize
);
85 tfm
->authsize
= authsize
;
88 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize
);
90 static void crypto_aead_exit_tfm(struct crypto_tfm
*tfm
)
92 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
93 struct aead_alg
*alg
= crypto_aead_alg(aead
);
98 static int crypto_aead_init_tfm(struct crypto_tfm
*tfm
)
100 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
101 struct aead_alg
*alg
= crypto_aead_alg(aead
);
103 crypto_aead_set_flags(aead
, CRYPTO_TFM_NEED_KEY
);
105 aead
->authsize
= alg
->maxauthsize
;
108 aead
->base
.exit
= crypto_aead_exit_tfm
;
111 return alg
->init(aead
);
117 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
119 struct crypto_report_aead raead
;
120 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
122 strncpy(raead
.type
, "aead", sizeof(raead
.type
));
123 strncpy(raead
.geniv
, "<none>", sizeof(raead
.geniv
));
125 raead
.blocksize
= alg
->cra_blocksize
;
126 raead
.maxauthsize
= aead
->maxauthsize
;
127 raead
.ivsize
= aead
->ivsize
;
129 if (nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
,
130 sizeof(struct crypto_report_aead
), &raead
))
131 goto nla_put_failure
;
138 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
144 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
146 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
148 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
150 seq_printf(m
, "type : aead\n");
151 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
153 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
154 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
155 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
156 seq_printf(m
, "geniv : <none>\n");
159 static void crypto_aead_free_instance(struct crypto_instance
*inst
)
161 struct aead_instance
*aead
= aead_instance(inst
);
164 inst
->tmpl
->free(inst
);
171 static const struct crypto_type crypto_aead_type
= {
172 .extsize
= crypto_alg_extsize
,
173 .init_tfm
= crypto_aead_init_tfm
,
174 .free
= crypto_aead_free_instance
,
175 #ifdef CONFIG_PROC_FS
176 .show
= crypto_aead_show
,
178 .report
= crypto_aead_report
,
179 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
180 .maskset
= CRYPTO_ALG_TYPE_MASK
,
181 .type
= CRYPTO_ALG_TYPE_AEAD
,
182 .tfmsize
= offsetof(struct crypto_aead
, base
),
185 static int aead_geniv_setkey(struct crypto_aead
*tfm
,
186 const u8
*key
, unsigned int keylen
)
188 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
190 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
193 static int aead_geniv_setauthsize(struct crypto_aead
*tfm
,
194 unsigned int authsize
)
196 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
198 return crypto_aead_setauthsize(ctx
->child
, authsize
);
201 struct aead_instance
*aead_geniv_alloc(struct crypto_template
*tmpl
,
202 struct rtattr
**tb
, u32 type
, u32 mask
)
205 struct crypto_aead_spawn
*spawn
;
206 struct crypto_attr_type
*algt
;
207 struct aead_instance
*inst
;
208 struct aead_alg
*alg
;
210 unsigned int maxauthsize
;
213 algt
= crypto_get_attr_type(tb
);
215 return ERR_CAST(algt
);
217 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
218 return ERR_PTR(-EINVAL
);
220 name
= crypto_attr_alg_name(tb
[1]);
222 return ERR_CAST(name
);
224 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
226 return ERR_PTR(-ENOMEM
);
228 spawn
= aead_instance_ctx(inst
);
230 /* Ignore async algorithms if necessary. */
231 mask
|= crypto_requires_sync(algt
->type
, algt
->mask
);
233 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
234 err
= crypto_grab_aead(spawn
, name
, type
, mask
);
238 alg
= crypto_spawn_aead_alg(spawn
);
240 ivsize
= crypto_aead_alg_ivsize(alg
);
241 maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
244 if (ivsize
< sizeof(u64
))
248 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
249 "%s(%s)", tmpl
->name
, alg
->base
.cra_name
) >=
252 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
253 "%s(%s)", tmpl
->name
, alg
->base
.cra_driver_name
) >=
257 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
258 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
259 inst
->alg
.base
.cra_blocksize
= alg
->base
.cra_blocksize
;
260 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
261 inst
->alg
.base
.cra_ctxsize
= sizeof(struct aead_geniv_ctx
);
263 inst
->alg
.setkey
= aead_geniv_setkey
;
264 inst
->alg
.setauthsize
= aead_geniv_setauthsize
;
266 inst
->alg
.ivsize
= ivsize
;
267 inst
->alg
.maxauthsize
= maxauthsize
;
273 crypto_drop_aead(spawn
);
279 EXPORT_SYMBOL_GPL(aead_geniv_alloc
);
281 void aead_geniv_free(struct aead_instance
*inst
)
283 crypto_drop_aead(aead_instance_ctx(inst
));
286 EXPORT_SYMBOL_GPL(aead_geniv_free
);
288 int aead_init_geniv(struct crypto_aead
*aead
)
290 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(aead
);
291 struct aead_instance
*inst
= aead_alg_instance(aead
);
292 struct crypto_aead
*child
;
295 spin_lock_init(&ctx
->lock
);
297 err
= crypto_get_default_rng();
301 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
302 crypto_aead_ivsize(aead
));
303 crypto_put_default_rng();
307 ctx
->sknull
= crypto_get_default_null_skcipher();
308 err
= PTR_ERR(ctx
->sknull
);
309 if (IS_ERR(ctx
->sknull
))
312 child
= crypto_spawn_aead(aead_instance_ctx(inst
));
313 err
= PTR_ERR(child
);
318 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(child
) +
319 sizeof(struct aead_request
));
327 crypto_put_default_null_skcipher();
330 EXPORT_SYMBOL_GPL(aead_init_geniv
);
332 void aead_exit_geniv(struct crypto_aead
*tfm
)
334 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
336 crypto_free_aead(ctx
->child
);
337 crypto_put_default_null_skcipher();
339 EXPORT_SYMBOL_GPL(aead_exit_geniv
);
341 int crypto_grab_aead(struct crypto_aead_spawn
*spawn
, const char *name
,
344 spawn
->base
.frontend
= &crypto_aead_type
;
345 return crypto_grab_spawn(&spawn
->base
, name
, type
, mask
);
347 EXPORT_SYMBOL_GPL(crypto_grab_aead
);
349 struct crypto_aead
*crypto_alloc_aead(const char *alg_name
, u32 type
, u32 mask
)
351 return crypto_alloc_tfm(alg_name
, &crypto_aead_type
, type
, mask
);
353 EXPORT_SYMBOL_GPL(crypto_alloc_aead
);
355 static int aead_prepare_alg(struct aead_alg
*alg
)
357 struct crypto_alg
*base
= &alg
->base
;
359 if (max3(alg
->maxauthsize
, alg
->ivsize
, alg
->chunksize
) >
364 alg
->chunksize
= base
->cra_blocksize
;
366 base
->cra_type
= &crypto_aead_type
;
367 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
368 base
->cra_flags
|= CRYPTO_ALG_TYPE_AEAD
;
373 int crypto_register_aead(struct aead_alg
*alg
)
375 struct crypto_alg
*base
= &alg
->base
;
378 err
= aead_prepare_alg(alg
);
382 return crypto_register_alg(base
);
384 EXPORT_SYMBOL_GPL(crypto_register_aead
);
386 void crypto_unregister_aead(struct aead_alg
*alg
)
388 crypto_unregister_alg(&alg
->base
);
390 EXPORT_SYMBOL_GPL(crypto_unregister_aead
);
392 int crypto_register_aeads(struct aead_alg
*algs
, int count
)
396 for (i
= 0; i
< count
; i
++) {
397 ret
= crypto_register_aead(&algs
[i
]);
405 for (--i
; i
>= 0; --i
)
406 crypto_unregister_aead(&algs
[i
]);
410 EXPORT_SYMBOL_GPL(crypto_register_aeads
);
412 void crypto_unregister_aeads(struct aead_alg
*algs
, int count
)
416 for (i
= count
- 1; i
>= 0; --i
)
417 crypto_unregister_aead(&algs
[i
]);
419 EXPORT_SYMBOL_GPL(crypto_unregister_aeads
);
421 int aead_register_instance(struct crypto_template
*tmpl
,
422 struct aead_instance
*inst
)
426 err
= aead_prepare_alg(&inst
->alg
);
430 return crypto_register_instance(tmpl
, aead_crypto_instance(inst
));
432 EXPORT_SYMBOL_GPL(aead_register_instance
);
434 MODULE_LICENSE("GPL");
435 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");