2 #include <linux/ceph/ceph_debug.h>
5 #include <linux/scatterlist.h>
6 #include <linux/sched.h>
7 #include <linux/slab.h>
8 #include <crypto/aes.h>
9 #include <crypto/skcipher.h>
10 #include <linux/key-type.h>
11 #include <linux/sched/mm.h>
13 #include <keys/ceph-type.h>
14 #include <keys/user-type.h>
15 #include <linux/ceph/decode.h>
19 * Set ->key and ->tfm. The rest of the key should be filled in before
20 * this function is called.
22 static int set_secret(struct ceph_crypto_key
*key
, void *buf
)
24 unsigned int noio_flag
;
31 case CEPH_CRYPTO_NONE
:
32 return 0; /* nothing to do */
40 key
->key
= kmemdup(buf
, key
->len
, GFP_NOIO
);
46 /* crypto_alloc_skcipher() allocates with GFP_KERNEL */
47 noio_flag
= memalloc_noio_save();
48 key
->tfm
= crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC
);
49 memalloc_noio_restore(noio_flag
);
50 if (IS_ERR(key
->tfm
)) {
51 ret
= PTR_ERR(key
->tfm
);
56 ret
= crypto_skcipher_setkey(key
->tfm
, key
->key
, key
->len
);
63 ceph_crypto_key_destroy(key
);
67 int ceph_crypto_key_clone(struct ceph_crypto_key
*dst
,
68 const struct ceph_crypto_key
*src
)
70 memcpy(dst
, src
, sizeof(struct ceph_crypto_key
));
71 return set_secret(dst
, src
->key
);
74 int ceph_crypto_key_encode(struct ceph_crypto_key
*key
, void **p
, void *end
)
76 if (*p
+ sizeof(u16
) + sizeof(key
->created
) +
77 sizeof(u16
) + key
->len
> end
)
79 ceph_encode_16(p
, key
->type
);
80 ceph_encode_copy(p
, &key
->created
, sizeof(key
->created
));
81 ceph_encode_16(p
, key
->len
);
82 ceph_encode_copy(p
, key
->key
, key
->len
);
86 int ceph_crypto_key_decode(struct ceph_crypto_key
*key
, void **p
, void *end
)
90 ceph_decode_need(p
, end
, 2*sizeof(u16
) + sizeof(key
->created
), bad
);
91 key
->type
= ceph_decode_16(p
);
92 ceph_decode_copy(p
, &key
->created
, sizeof(key
->created
));
93 key
->len
= ceph_decode_16(p
);
94 ceph_decode_need(p
, end
, key
->len
, bad
);
95 ret
= set_secret(key
, *p
);
100 dout("failed to decode crypto key\n");
104 int ceph_crypto_key_unarmor(struct ceph_crypto_key
*key
, const char *inkey
)
106 int inlen
= strlen(inkey
);
107 int blen
= inlen
* 3 / 4;
111 dout("crypto_key_unarmor %s\n", inkey
);
112 buf
= kmalloc(blen
, GFP_NOFS
);
115 blen
= ceph_unarmor(buf
, inkey
, inkey
+inlen
);
122 ret
= ceph_crypto_key_decode(key
, &p
, p
+ blen
);
126 dout("crypto_key_unarmor key %p type %d len %d\n", key
,
127 key
->type
, key
->len
);
131 void ceph_crypto_key_destroy(struct ceph_crypto_key
*key
)
136 crypto_free_skcipher(key
->tfm
);
141 static const u8
*aes_iv
= (u8
*)CEPH_AES_IV
;
144 * Should be used for buffers allocated with ceph_kvmalloc().
145 * Currently these are encrypt out-buffer (ceph_buffer) and decrypt
146 * in-buffer (msg front).
148 * Dispose of @sgt with teardown_sgtable().
150 * @prealloc_sg is to avoid memory allocation inside sg_alloc_table()
151 * in cases where a single sg is sufficient. No attempt to reduce the
152 * number of sgs by squeezing physically contiguous pages together is
153 * made though, for simplicity.
155 static int setup_sgtable(struct sg_table
*sgt
, struct scatterlist
*prealloc_sg
,
156 const void *buf
, unsigned int buf_len
)
158 struct scatterlist
*sg
;
159 const bool is_vmalloc
= is_vmalloc_addr(buf
);
160 unsigned int off
= offset_in_page(buf
);
161 unsigned int chunk_cnt
= 1;
162 unsigned int chunk_len
= PAGE_ALIGN(off
+ buf_len
);
167 memset(sgt
, 0, sizeof(*sgt
));
172 chunk_cnt
= chunk_len
>> PAGE_SHIFT
;
173 chunk_len
= PAGE_SIZE
;
177 ret
= sg_alloc_table(sgt
, chunk_cnt
, GFP_NOFS
);
181 WARN_ON(chunk_cnt
!= 1);
182 sg_init_table(prealloc_sg
, 1);
183 sgt
->sgl
= prealloc_sg
;
184 sgt
->nents
= sgt
->orig_nents
= 1;
187 for_each_sg(sgt
->sgl
, sg
, sgt
->orig_nents
, i
) {
189 unsigned int len
= min(chunk_len
- off
, buf_len
);
192 page
= vmalloc_to_page(buf
);
194 page
= virt_to_page(buf
);
196 sg_set_page(sg
, page
, len
, off
);
202 WARN_ON(buf_len
!= 0);
207 static void teardown_sgtable(struct sg_table
*sgt
)
209 if (sgt
->orig_nents
> 1)
213 static int ceph_aes_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
214 void *buf
, int buf_len
, int in_len
, int *pout_len
)
216 SKCIPHER_REQUEST_ON_STACK(req
, key
->tfm
);
218 struct scatterlist prealloc_sg
;
219 char iv
[AES_BLOCK_SIZE
] __aligned(8);
220 int pad_byte
= AES_BLOCK_SIZE
- (in_len
& (AES_BLOCK_SIZE
- 1));
221 int crypt_len
= encrypt
? in_len
+ pad_byte
: in_len
;
224 WARN_ON(crypt_len
> buf_len
);
226 memset(buf
+ in_len
, pad_byte
, pad_byte
);
227 ret
= setup_sgtable(&sgt
, &prealloc_sg
, buf
, crypt_len
);
231 memcpy(iv
, aes_iv
, AES_BLOCK_SIZE
);
232 skcipher_request_set_tfm(req
, key
->tfm
);
233 skcipher_request_set_callback(req
, 0, NULL
, NULL
);
234 skcipher_request_set_crypt(req
, sgt
.sgl
, sgt
.sgl
, crypt_len
, iv
);
237 print_hex_dump(KERN_ERR, "key: ", DUMP_PREFIX_NONE, 16, 1,
238 key->key, key->len, 1);
239 print_hex_dump(KERN_ERR, " in: ", DUMP_PREFIX_NONE, 16, 1,
243 ret
= crypto_skcipher_encrypt(req
);
245 ret
= crypto_skcipher_decrypt(req
);
246 skcipher_request_zero(req
);
248 pr_err("%s %scrypt failed: %d\n", __func__
,
249 encrypt
? "en" : "de", ret
);
253 print_hex_dump(KERN_ERR, "out: ", DUMP_PREFIX_NONE, 16, 1,
258 *pout_len
= crypt_len
;
260 pad_byte
= *(char *)(buf
+ in_len
- 1);
261 if (pad_byte
> 0 && pad_byte
<= AES_BLOCK_SIZE
&&
262 in_len
>= pad_byte
) {
263 *pout_len
= in_len
- pad_byte
;
265 pr_err("%s got bad padding %d on in_len %d\n",
266 __func__
, pad_byte
, in_len
);
273 teardown_sgtable(&sgt
);
277 int ceph_crypt(const struct ceph_crypto_key
*key
, bool encrypt
,
278 void *buf
, int buf_len
, int in_len
, int *pout_len
)
281 case CEPH_CRYPTO_NONE
:
284 case CEPH_CRYPTO_AES
:
285 return ceph_aes_crypt(key
, encrypt
, buf
, buf_len
, in_len
,
292 static int ceph_key_preparse(struct key_preparsed_payload
*prep
)
294 struct ceph_crypto_key
*ckey
;
295 size_t datalen
= prep
->datalen
;
300 if (datalen
<= 0 || datalen
> 32767 || !prep
->data
)
304 ckey
= kmalloc(sizeof(*ckey
), GFP_KERNEL
);
308 /* TODO ceph_crypto_key_decode should really take const input */
309 p
= (void *)prep
->data
;
310 ret
= ceph_crypto_key_decode(ckey
, &p
, (char*)prep
->data
+datalen
);
314 prep
->payload
.data
[0] = ckey
;
315 prep
->quotalen
= datalen
;
324 static void ceph_key_free_preparse(struct key_preparsed_payload
*prep
)
326 struct ceph_crypto_key
*ckey
= prep
->payload
.data
[0];
327 ceph_crypto_key_destroy(ckey
);
331 static void ceph_key_destroy(struct key
*key
)
333 struct ceph_crypto_key
*ckey
= key
->payload
.data
[0];
335 ceph_crypto_key_destroy(ckey
);
339 struct key_type key_type_ceph
= {
341 .preparse
= ceph_key_preparse
,
342 .free_preparse
= ceph_key_free_preparse
,
343 .instantiate
= generic_key_instantiate
,
344 .destroy
= ceph_key_destroy
,
347 int ceph_crypto_init(void) {
348 return register_key_type(&key_type_ceph
);
351 void ceph_crypto_shutdown(void) {
352 unregister_key_type(&key_type_ceph
);