2 * AMD Cryptographic Coprocessor (CCP) AES crypto API support
4 * Copyright (C) 2013 Advanced Micro Devices, Inc.
6 * Author: Tom Lendacky <thomas.lendacky@amd.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
13 #include <linux/module.h>
14 #include <linux/sched.h>
15 #include <linux/delay.h>
16 #include <linux/scatterlist.h>
17 #include <linux/crypto.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/ctr.h>
21 #include <crypto/scatterwalk.h>
23 #include "ccp-crypto.h"
26 static int ccp_aes_complete(struct crypto_async_request
*async_req
, int ret
)
28 struct ablkcipher_request
*req
= ablkcipher_request_cast(async_req
);
29 struct ccp_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
30 struct ccp_aes_req_ctx
*rctx
= ablkcipher_request_ctx(req
);
35 if (ctx
->u
.aes
.mode
!= CCP_AES_MODE_ECB
)
36 memcpy(req
->info
, rctx
->iv
, AES_BLOCK_SIZE
);
41 static int ccp_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
44 struct ccp_ctx
*ctx
= crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm
));
45 struct ccp_crypto_ablkcipher_alg
*alg
=
46 ccp_crypto_ablkcipher_alg(crypto_ablkcipher_tfm(tfm
));
50 ctx
->u
.aes
.type
= CCP_AES_TYPE_128
;
53 ctx
->u
.aes
.type
= CCP_AES_TYPE_192
;
56 ctx
->u
.aes
.type
= CCP_AES_TYPE_256
;
59 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
62 ctx
->u
.aes
.mode
= alg
->mode
;
63 ctx
->u
.aes
.key_len
= key_len
;
65 memcpy(ctx
->u
.aes
.key
, key
, key_len
);
66 sg_init_one(&ctx
->u
.aes
.key_sg
, ctx
->u
.aes
.key
, key_len
);
71 static int ccp_aes_crypt(struct ablkcipher_request
*req
, bool encrypt
)
73 struct ccp_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
74 struct ccp_aes_req_ctx
*rctx
= ablkcipher_request_ctx(req
);
75 struct scatterlist
*iv_sg
= NULL
;
76 unsigned int iv_len
= 0;
79 if (!ctx
->u
.aes
.key_len
) {
80 pr_err("AES key not set\n");
84 if (((ctx
->u
.aes
.mode
== CCP_AES_MODE_ECB
) ||
85 (ctx
->u
.aes
.mode
== CCP_AES_MODE_CBC
) ||
86 (ctx
->u
.aes
.mode
== CCP_AES_MODE_CFB
)) &&
87 (req
->nbytes
& (AES_BLOCK_SIZE
- 1))) {
88 pr_err("AES request size is not a multiple of the block size\n");
92 if (ctx
->u
.aes
.mode
!= CCP_AES_MODE_ECB
) {
94 pr_err("AES IV not supplied");
98 memcpy(rctx
->iv
, req
->info
, AES_BLOCK_SIZE
);
100 iv_len
= AES_BLOCK_SIZE
;
101 sg_init_one(iv_sg
, rctx
->iv
, iv_len
);
104 memset(&rctx
->cmd
, 0, sizeof(rctx
->cmd
));
105 INIT_LIST_HEAD(&rctx
->cmd
.entry
);
106 rctx
->cmd
.engine
= CCP_ENGINE_AES
;
107 rctx
->cmd
.u
.aes
.type
= ctx
->u
.aes
.type
;
108 rctx
->cmd
.u
.aes
.mode
= ctx
->u
.aes
.mode
;
109 rctx
->cmd
.u
.aes
.action
=
110 (encrypt
) ? CCP_AES_ACTION_ENCRYPT
: CCP_AES_ACTION_DECRYPT
;
111 rctx
->cmd
.u
.aes
.key
= &ctx
->u
.aes
.key_sg
;
112 rctx
->cmd
.u
.aes
.key_len
= ctx
->u
.aes
.key_len
;
113 rctx
->cmd
.u
.aes
.iv
= iv_sg
;
114 rctx
->cmd
.u
.aes
.iv_len
= iv_len
;
115 rctx
->cmd
.u
.aes
.src
= req
->src
;
116 rctx
->cmd
.u
.aes
.src_len
= req
->nbytes
;
117 rctx
->cmd
.u
.aes
.dst
= req
->dst
;
119 ret
= ccp_crypto_enqueue_request(&req
->base
, &rctx
->cmd
);
124 static int ccp_aes_encrypt(struct ablkcipher_request
*req
)
126 return ccp_aes_crypt(req
, true);
129 static int ccp_aes_decrypt(struct ablkcipher_request
*req
)
131 return ccp_aes_crypt(req
, false);
134 static int ccp_aes_cra_init(struct crypto_tfm
*tfm
)
136 struct ccp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
138 ctx
->complete
= ccp_aes_complete
;
139 ctx
->u
.aes
.key_len
= 0;
141 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ccp_aes_req_ctx
);
146 static void ccp_aes_cra_exit(struct crypto_tfm
*tfm
)
150 static int ccp_aes_rfc3686_complete(struct crypto_async_request
*async_req
,
153 struct ablkcipher_request
*req
= ablkcipher_request_cast(async_req
);
154 struct ccp_aes_req_ctx
*rctx
= ablkcipher_request_ctx(req
);
156 /* Restore the original pointer */
157 req
->info
= rctx
->rfc3686_info
;
159 return ccp_aes_complete(async_req
, ret
);
162 static int ccp_aes_rfc3686_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
163 unsigned int key_len
)
165 struct ccp_ctx
*ctx
= crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm
));
167 if (key_len
< CTR_RFC3686_NONCE_SIZE
)
170 key_len
-= CTR_RFC3686_NONCE_SIZE
;
171 memcpy(ctx
->u
.aes
.nonce
, key
+ key_len
, CTR_RFC3686_NONCE_SIZE
);
173 return ccp_aes_setkey(tfm
, key
, key_len
);
176 static int ccp_aes_rfc3686_crypt(struct ablkcipher_request
*req
, bool encrypt
)
178 struct ccp_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
179 struct ccp_aes_req_ctx
*rctx
= ablkcipher_request_ctx(req
);
182 /* Initialize the CTR block */
183 iv
= rctx
->rfc3686_iv
;
184 memcpy(iv
, ctx
->u
.aes
.nonce
, CTR_RFC3686_NONCE_SIZE
);
186 iv
+= CTR_RFC3686_NONCE_SIZE
;
187 memcpy(iv
, req
->info
, CTR_RFC3686_IV_SIZE
);
189 iv
+= CTR_RFC3686_IV_SIZE
;
190 *(__be32
*)iv
= cpu_to_be32(1);
192 /* Point to the new IV */
193 rctx
->rfc3686_info
= req
->info
;
194 req
->info
= rctx
->rfc3686_iv
;
196 return ccp_aes_crypt(req
, encrypt
);
199 static int ccp_aes_rfc3686_encrypt(struct ablkcipher_request
*req
)
201 return ccp_aes_rfc3686_crypt(req
, true);
204 static int ccp_aes_rfc3686_decrypt(struct ablkcipher_request
*req
)
206 return ccp_aes_rfc3686_crypt(req
, false);
209 static int ccp_aes_rfc3686_cra_init(struct crypto_tfm
*tfm
)
211 struct ccp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
213 ctx
->complete
= ccp_aes_rfc3686_complete
;
214 ctx
->u
.aes
.key_len
= 0;
216 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ccp_aes_req_ctx
);
221 static void ccp_aes_rfc3686_cra_exit(struct crypto_tfm
*tfm
)
225 static struct crypto_alg ccp_aes_defaults
= {
226 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
228 CRYPTO_ALG_KERN_DRIVER_ONLY
|
229 CRYPTO_ALG_NEED_FALLBACK
,
230 .cra_blocksize
= AES_BLOCK_SIZE
,
231 .cra_ctxsize
= sizeof(struct ccp_ctx
),
232 .cra_priority
= CCP_CRA_PRIORITY
,
233 .cra_type
= &crypto_ablkcipher_type
,
234 .cra_init
= ccp_aes_cra_init
,
235 .cra_exit
= ccp_aes_cra_exit
,
236 .cra_module
= THIS_MODULE
,
238 .setkey
= ccp_aes_setkey
,
239 .encrypt
= ccp_aes_encrypt
,
240 .decrypt
= ccp_aes_decrypt
,
241 .min_keysize
= AES_MIN_KEY_SIZE
,
242 .max_keysize
= AES_MAX_KEY_SIZE
,
246 static struct crypto_alg ccp_aes_rfc3686_defaults
= {
247 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
249 CRYPTO_ALG_KERN_DRIVER_ONLY
|
250 CRYPTO_ALG_NEED_FALLBACK
,
251 .cra_blocksize
= CTR_RFC3686_BLOCK_SIZE
,
252 .cra_ctxsize
= sizeof(struct ccp_ctx
),
253 .cra_priority
= CCP_CRA_PRIORITY
,
254 .cra_type
= &crypto_ablkcipher_type
,
255 .cra_init
= ccp_aes_rfc3686_cra_init
,
256 .cra_exit
= ccp_aes_rfc3686_cra_exit
,
257 .cra_module
= THIS_MODULE
,
259 .setkey
= ccp_aes_rfc3686_setkey
,
260 .encrypt
= ccp_aes_rfc3686_encrypt
,
261 .decrypt
= ccp_aes_rfc3686_decrypt
,
262 .min_keysize
= AES_MIN_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
263 .max_keysize
= AES_MAX_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
268 enum ccp_aes_mode mode
;
270 const char *driver_name
;
271 unsigned int blocksize
;
273 struct crypto_alg
*alg_defaults
;
276 static struct ccp_aes_def aes_algs
[] = {
278 .mode
= CCP_AES_MODE_ECB
,
280 .driver_name
= "ecb-aes-ccp",
281 .blocksize
= AES_BLOCK_SIZE
,
283 .alg_defaults
= &ccp_aes_defaults
,
286 .mode
= CCP_AES_MODE_CBC
,
288 .driver_name
= "cbc-aes-ccp",
289 .blocksize
= AES_BLOCK_SIZE
,
290 .ivsize
= AES_BLOCK_SIZE
,
291 .alg_defaults
= &ccp_aes_defaults
,
294 .mode
= CCP_AES_MODE_CFB
,
296 .driver_name
= "cfb-aes-ccp",
297 .blocksize
= AES_BLOCK_SIZE
,
298 .ivsize
= AES_BLOCK_SIZE
,
299 .alg_defaults
= &ccp_aes_defaults
,
302 .mode
= CCP_AES_MODE_OFB
,
304 .driver_name
= "ofb-aes-ccp",
306 .ivsize
= AES_BLOCK_SIZE
,
307 .alg_defaults
= &ccp_aes_defaults
,
310 .mode
= CCP_AES_MODE_CTR
,
312 .driver_name
= "ctr-aes-ccp",
314 .ivsize
= AES_BLOCK_SIZE
,
315 .alg_defaults
= &ccp_aes_defaults
,
318 .mode
= CCP_AES_MODE_CTR
,
319 .name
= "rfc3686(ctr(aes))",
320 .driver_name
= "rfc3686-ctr-aes-ccp",
322 .ivsize
= CTR_RFC3686_IV_SIZE
,
323 .alg_defaults
= &ccp_aes_rfc3686_defaults
,
327 static int ccp_register_aes_alg(struct list_head
*head
,
328 const struct ccp_aes_def
*def
)
330 struct ccp_crypto_ablkcipher_alg
*ccp_alg
;
331 struct crypto_alg
*alg
;
334 ccp_alg
= kzalloc(sizeof(*ccp_alg
), GFP_KERNEL
);
338 INIT_LIST_HEAD(&ccp_alg
->entry
);
340 ccp_alg
->mode
= def
->mode
;
342 /* Copy the defaults and override as necessary */
344 memcpy(alg
, def
->alg_defaults
, sizeof(*alg
));
345 snprintf(alg
->cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", def
->name
);
346 snprintf(alg
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
348 alg
->cra_blocksize
= def
->blocksize
;
349 alg
->cra_ablkcipher
.ivsize
= def
->ivsize
;
351 ret
= crypto_register_alg(alg
);
353 pr_err("%s ablkcipher algorithm registration error (%d)\n",
359 list_add(&ccp_alg
->entry
, head
);
364 int ccp_register_aes_algs(struct list_head
*head
)
368 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
369 ret
= ccp_register_aes_alg(head
, &aes_algs
[i
]);