Committer: Michael Beasley <mike@snafu.setup>
[mikesnafu-overlay.git] / arch / s390 / crypto / sha256_s390.c
blobccf8633c4f6580f509cff71c1bce08179037a65c
1 /*
2 * Cryptographic API.
4 * s390 implementation of the SHA256 Secure Hash Algorithm.
6 * s390 Version:
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
10 * Derived from "crypto/sha256_generic.c"
11 * and "arch/s390/crypto/sha1_s390.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
19 #include <linux/init.h>
20 #include <linux/module.h>
21 #include <linux/crypto.h>
22 #include <crypto/sha.h>
24 #include "crypt_s390.h"
26 struct s390_sha256_ctx {
27 u64 count; /* message length */
28 u32 state[8];
29 u8 buf[2 * SHA256_BLOCK_SIZE];
32 static void sha256_init(struct crypto_tfm *tfm)
34 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
36 sctx->state[0] = SHA256_H0;
37 sctx->state[1] = SHA256_H1;
38 sctx->state[2] = SHA256_H2;
39 sctx->state[3] = SHA256_H3;
40 sctx->state[4] = SHA256_H4;
41 sctx->state[5] = SHA256_H5;
42 sctx->state[6] = SHA256_H6;
43 sctx->state[7] = SHA256_H7;
44 sctx->count = 0;
47 static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
48 unsigned int len)
50 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
51 unsigned int index;
52 int ret;
54 /* how much is already in the buffer? */
55 index = sctx->count & 0x3f;
57 sctx->count += len;
59 if ((index + len) < SHA256_BLOCK_SIZE)
60 goto store;
62 /* process one stored block */
63 if (index) {
64 memcpy(sctx->buf + index, data, SHA256_BLOCK_SIZE - index);
65 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
66 SHA256_BLOCK_SIZE);
67 BUG_ON(ret != SHA256_BLOCK_SIZE);
68 data += SHA256_BLOCK_SIZE - index;
69 len -= SHA256_BLOCK_SIZE - index;
72 /* process as many blocks as possible */
73 if (len >= SHA256_BLOCK_SIZE) {
74 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, data,
75 len & ~(SHA256_BLOCK_SIZE - 1));
76 BUG_ON(ret != (len & ~(SHA256_BLOCK_SIZE - 1)));
77 data += ret;
78 len -= ret;
81 store:
82 /* anything left? */
83 if (len)
84 memcpy(sctx->buf + index , data, len);
87 /* Add padding and return the message digest */
88 static void sha256_final(struct crypto_tfm *tfm, u8 *out)
90 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
91 u64 bits;
92 unsigned int index, end;
93 int ret;
95 /* must perform manual padding */
96 index = sctx->count & 0x3f;
97 end = (index < 56) ? SHA256_BLOCK_SIZE : (2 * SHA256_BLOCK_SIZE);
99 /* start pad with 1 */
100 sctx->buf[index] = 0x80;
102 /* pad with zeros */
103 index++;
104 memset(sctx->buf + index, 0x00, end - index - 8);
106 /* append message length */
107 bits = sctx->count * 8;
108 memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
110 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf, end);
111 BUG_ON(ret != end);
113 /* copy digest to out */
114 memcpy(out, sctx->state, SHA256_DIGEST_SIZE);
116 /* wipe context */
117 memset(sctx, 0, sizeof *sctx);
120 static struct crypto_alg alg = {
121 .cra_name = "sha256",
122 .cra_driver_name = "sha256-s390",
123 .cra_priority = CRYPT_S390_PRIORITY,
124 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
125 .cra_blocksize = SHA256_BLOCK_SIZE,
126 .cra_ctxsize = sizeof(struct s390_sha256_ctx),
127 .cra_module = THIS_MODULE,
128 .cra_list = LIST_HEAD_INIT(alg.cra_list),
129 .cra_u = { .digest = {
130 .dia_digestsize = SHA256_DIGEST_SIZE,
131 .dia_init = sha256_init,
132 .dia_update = sha256_update,
133 .dia_final = sha256_final } }
136 static int init(void)
138 if (!crypt_s390_func_available(KIMD_SHA_256))
139 return -EOPNOTSUPP;
141 return crypto_register_alg(&alg);
144 static void __exit fini(void)
146 crypto_unregister_alg(&alg);
149 module_init(init);
150 module_exit(fini);
152 MODULE_ALIAS("sha256");
154 MODULE_LICENSE("GPL");
155 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");