2 * AMD Cryptographic Coprocessor (CCP) AES CMAC crypto API support
4 * Copyright (C) 2013 Advanced Micro Devices, Inc.
6 * Author: Tom Lendacky <thomas.lendacky@amd.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
13 #include <linux/module.h>
14 #include <linux/sched.h>
15 #include <linux/delay.h>
16 #include <linux/scatterlist.h>
17 #include <linux/crypto.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/hash.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/scatterwalk.h>
24 #include "ccp-crypto.h"
27 static int ccp_aes_cmac_complete(struct crypto_async_request
*async_req
,
30 struct ahash_request
*req
= ahash_request_cast(async_req
);
31 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
32 struct ccp_aes_cmac_req_ctx
*rctx
= ahash_request_ctx(req
);
33 unsigned int digest_size
= crypto_ahash_digestsize(tfm
);
39 /* Save remaining data to buffer */
40 scatterwalk_map_and_copy(rctx
->buf
, rctx
->cmd
.u
.aes
.src
,
41 rctx
->hash_cnt
, rctx
->hash_rem
, 0);
42 rctx
->buf_count
= rctx
->hash_rem
;
46 memcpy(req
->result
, rctx
->iv
, digest_size
);
49 sg_free_table(&rctx
->data_sg
);
54 static int ccp_do_cmac_update(struct ahash_request
*req
, unsigned int nbytes
,
57 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
58 struct ccp_ctx
*ctx
= crypto_ahash_ctx(tfm
);
59 struct ccp_aes_cmac_req_ctx
*rctx
= ahash_request_ctx(req
);
60 struct scatterlist
*sg
, *cmac_key_sg
= NULL
;
61 unsigned int block_size
=
62 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
63 unsigned int len
, need_pad
, sg_count
;
66 if (!ctx
->u
.aes
.key_len
)
72 if (!final
&& ((nbytes
+ rctx
->buf_count
) <= block_size
)) {
73 scatterwalk_map_and_copy(rctx
->buf
+ rctx
->buf_count
, req
->src
,
75 rctx
->buf_count
+= nbytes
;
80 len
= rctx
->buf_count
+ nbytes
;
83 rctx
->hash_cnt
= final
? len
: len
& ~(block_size
- 1);
84 rctx
->hash_rem
= final
? 0 : len
& (block_size
- 1);
85 if (!final
&& (rctx
->hash_cnt
== len
)) {
86 /* CCP can't do zero length final, so keep some data around */
87 rctx
->hash_cnt
-= block_size
;
88 rctx
->hash_rem
= block_size
;
91 if (final
&& (rctx
->null_msg
|| (len
& (block_size
- 1))))
96 sg_init_one(&rctx
->iv_sg
, rctx
->iv
, sizeof(rctx
->iv
));
98 /* Build the data scatterlist table - allocate enough entries for all
99 * possible data pieces (buffer, input data, padding)
101 sg_count
= (nbytes
) ? sg_nents(req
->src
) + 2 : 2;
102 ret
= sg_alloc_table(&rctx
->data_sg
, sg_count
, GFP_KERNEL
);
107 if (rctx
->buf_count
) {
108 sg_init_one(&rctx
->buf_sg
, rctx
->buf
, rctx
->buf_count
);
109 sg
= ccp_crypto_sg_table_add(&rctx
->data_sg
, &rctx
->buf_sg
);
113 sg
= ccp_crypto_sg_table_add(&rctx
->data_sg
, req
->src
);
116 int pad_length
= block_size
- (len
& (block_size
- 1));
118 rctx
->hash_cnt
+= pad_length
;
120 memset(rctx
->pad
, 0, sizeof(rctx
->pad
));
122 sg_init_one(&rctx
->pad_sg
, rctx
->pad
, pad_length
);
123 sg
= ccp_crypto_sg_table_add(&rctx
->data_sg
, &rctx
->pad_sg
);
128 /* Initialize the K1/K2 scatterlist */
130 cmac_key_sg
= (need_pad
) ? &ctx
->u
.aes
.k2_sg
133 memset(&rctx
->cmd
, 0, sizeof(rctx
->cmd
));
134 INIT_LIST_HEAD(&rctx
->cmd
.entry
);
135 rctx
->cmd
.engine
= CCP_ENGINE_AES
;
136 rctx
->cmd
.u
.aes
.type
= ctx
->u
.aes
.type
;
137 rctx
->cmd
.u
.aes
.mode
= ctx
->u
.aes
.mode
;
138 rctx
->cmd
.u
.aes
.action
= CCP_AES_ACTION_ENCRYPT
;
139 rctx
->cmd
.u
.aes
.key
= &ctx
->u
.aes
.key_sg
;
140 rctx
->cmd
.u
.aes
.key_len
= ctx
->u
.aes
.key_len
;
141 rctx
->cmd
.u
.aes
.iv
= &rctx
->iv_sg
;
142 rctx
->cmd
.u
.aes
.iv_len
= AES_BLOCK_SIZE
;
143 rctx
->cmd
.u
.aes
.src
= (sg
) ? rctx
->data_sg
.sgl
: NULL
;
144 rctx
->cmd
.u
.aes
.src_len
= rctx
->hash_cnt
;
145 rctx
->cmd
.u
.aes
.dst
= NULL
;
146 rctx
->cmd
.u
.aes
.cmac_key
= cmac_key_sg
;
147 rctx
->cmd
.u
.aes
.cmac_key_len
= ctx
->u
.aes
.kn_len
;
148 rctx
->cmd
.u
.aes
.cmac_final
= final
;
150 ret
= ccp_crypto_enqueue_request(&req
->base
, &rctx
->cmd
);
155 static int ccp_aes_cmac_init(struct ahash_request
*req
)
157 struct ccp_aes_cmac_req_ctx
*rctx
= ahash_request_ctx(req
);
159 memset(rctx
, 0, sizeof(*rctx
));
166 static int ccp_aes_cmac_update(struct ahash_request
*req
)
168 return ccp_do_cmac_update(req
, req
->nbytes
, 0);
171 static int ccp_aes_cmac_final(struct ahash_request
*req
)
173 return ccp_do_cmac_update(req
, 0, 1);
176 static int ccp_aes_cmac_finup(struct ahash_request
*req
)
178 return ccp_do_cmac_update(req
, req
->nbytes
, 1);
181 static int ccp_aes_cmac_digest(struct ahash_request
*req
)
185 ret
= ccp_aes_cmac_init(req
);
189 return ccp_do_cmac_update(req
, req
->nbytes
, 1);
192 static int ccp_aes_cmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
193 unsigned int key_len
)
195 struct ccp_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
196 struct ccp_crypto_ahash_alg
*alg
=
197 ccp_crypto_ahash_alg(crypto_ahash_tfm(tfm
));
198 u64 k0_hi
, k0_lo
, k1_hi
, k1_lo
, k2_hi
, k2_lo
;
199 u64 rb_hi
= 0x00, rb_lo
= 0x87;
204 case AES_KEYSIZE_128
:
205 ctx
->u
.aes
.type
= CCP_AES_TYPE_128
;
207 case AES_KEYSIZE_192
:
208 ctx
->u
.aes
.type
= CCP_AES_TYPE_192
;
210 case AES_KEYSIZE_256
:
211 ctx
->u
.aes
.type
= CCP_AES_TYPE_256
;
214 crypto_ahash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
217 ctx
->u
.aes
.mode
= alg
->mode
;
219 /* Set to zero until complete */
220 ctx
->u
.aes
.key_len
= 0;
222 /* Set the key for the AES cipher used to generate the keys */
223 ret
= crypto_cipher_setkey(ctx
->u
.aes
.tfm_cipher
, key
, key_len
);
227 /* Encrypt a block of zeroes - use key area in context */
228 memset(ctx
->u
.aes
.key
, 0, sizeof(ctx
->u
.aes
.key
));
229 crypto_cipher_encrypt_one(ctx
->u
.aes
.tfm_cipher
, ctx
->u
.aes
.key
,
232 /* Generate K1 and K2 */
233 k0_hi
= be64_to_cpu(*((__be64
*)ctx
->u
.aes
.key
));
234 k0_lo
= be64_to_cpu(*((__be64
*)ctx
->u
.aes
.key
+ 1));
236 k1_hi
= (k0_hi
<< 1) | (k0_lo
>> 63);
238 if (ctx
->u
.aes
.key
[0] & 0x80) {
242 gk
= (__be64
*)ctx
->u
.aes
.k1
;
243 *gk
= cpu_to_be64(k1_hi
);
245 *gk
= cpu_to_be64(k1_lo
);
247 k2_hi
= (k1_hi
<< 1) | (k1_lo
>> 63);
249 if (ctx
->u
.aes
.k1
[0] & 0x80) {
253 gk
= (__be64
*)ctx
->u
.aes
.k2
;
254 *gk
= cpu_to_be64(k2_hi
);
256 *gk
= cpu_to_be64(k2_lo
);
258 ctx
->u
.aes
.kn_len
= sizeof(ctx
->u
.aes
.k1
);
259 sg_init_one(&ctx
->u
.aes
.k1_sg
, ctx
->u
.aes
.k1
, sizeof(ctx
->u
.aes
.k1
));
260 sg_init_one(&ctx
->u
.aes
.k2_sg
, ctx
->u
.aes
.k2
, sizeof(ctx
->u
.aes
.k2
));
262 /* Save the supplied key */
263 memset(ctx
->u
.aes
.key
, 0, sizeof(ctx
->u
.aes
.key
));
264 memcpy(ctx
->u
.aes
.key
, key
, key_len
);
265 ctx
->u
.aes
.key_len
= key_len
;
266 sg_init_one(&ctx
->u
.aes
.key_sg
, ctx
->u
.aes
.key
, key_len
);
271 static int ccp_aes_cmac_cra_init(struct crypto_tfm
*tfm
)
273 struct ccp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
274 struct crypto_ahash
*ahash
= __crypto_ahash_cast(tfm
);
275 struct crypto_cipher
*cipher_tfm
;
277 ctx
->complete
= ccp_aes_cmac_complete
;
278 ctx
->u
.aes
.key_len
= 0;
280 crypto_ahash_set_reqsize(ahash
, sizeof(struct ccp_aes_cmac_req_ctx
));
282 cipher_tfm
= crypto_alloc_cipher("aes", 0,
283 CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
);
284 if (IS_ERR(cipher_tfm
)) {
285 pr_warn("could not load aes cipher driver\n");
286 return PTR_ERR(cipher_tfm
);
288 ctx
->u
.aes
.tfm_cipher
= cipher_tfm
;
293 static void ccp_aes_cmac_cra_exit(struct crypto_tfm
*tfm
)
295 struct ccp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
297 if (ctx
->u
.aes
.tfm_cipher
)
298 crypto_free_cipher(ctx
->u
.aes
.tfm_cipher
);
299 ctx
->u
.aes
.tfm_cipher
= NULL
;
302 int ccp_register_aes_cmac_algs(struct list_head
*head
)
304 struct ccp_crypto_ahash_alg
*ccp_alg
;
305 struct ahash_alg
*alg
;
306 struct hash_alg_common
*halg
;
307 struct crypto_alg
*base
;
310 ccp_alg
= kzalloc(sizeof(*ccp_alg
), GFP_KERNEL
);
314 INIT_LIST_HEAD(&ccp_alg
->entry
);
315 ccp_alg
->mode
= CCP_AES_MODE_CMAC
;
318 alg
->init
= ccp_aes_cmac_init
;
319 alg
->update
= ccp_aes_cmac_update
;
320 alg
->final
= ccp_aes_cmac_final
;
321 alg
->finup
= ccp_aes_cmac_finup
;
322 alg
->digest
= ccp_aes_cmac_digest
;
323 alg
->setkey
= ccp_aes_cmac_setkey
;
326 halg
->digestsize
= AES_BLOCK_SIZE
;
329 snprintf(base
->cra_name
, CRYPTO_MAX_ALG_NAME
, "cmac(aes)");
330 snprintf(base
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "cmac-aes-ccp");
331 base
->cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_ASYNC
|
332 CRYPTO_ALG_KERN_DRIVER_ONLY
|
333 CRYPTO_ALG_NEED_FALLBACK
;
334 base
->cra_blocksize
= AES_BLOCK_SIZE
;
335 base
->cra_ctxsize
= sizeof(struct ccp_ctx
);
336 base
->cra_priority
= CCP_CRA_PRIORITY
;
337 base
->cra_type
= &crypto_ahash_type
;
338 base
->cra_init
= ccp_aes_cmac_cra_init
;
339 base
->cra_exit
= ccp_aes_cmac_cra_exit
;
340 base
->cra_module
= THIS_MODULE
;
342 ret
= crypto_register_ahash(alg
);
344 pr_err("%s ahash algorithm registration error (%d)\n",
345 base
->cra_name
, ret
);
350 list_add(&ccp_alg
->entry
, head
);