4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include "crypt_s390.h"
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
34 static char keylen_flag = 0;
37 u8 iv[AES_BLOCK_SIZE];
38 u8 key[AES_MAX_KEY_SIZE];
43 struct crypto_blkcipher *blk;
44 struct crypto_cipher *cip;
49 * Check if the key_len is supported by the HW.
50 * Returns 0 if it is, a positive number if it is not and software fallback is
51 * required or a negative number in case the key size is not valid
53 static int need_fallback(unsigned int key_len)
57 if (!(keylen_flag & AES_KEYLEN_128))
61 if (!(keylen_flag & AES_KEYLEN_192))
65 if (!(keylen_flag & AES_KEYLEN_256))
75 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
78 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
81 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
82 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
85 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
87 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
88 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
94 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
97 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
98 u32 *flags = &tfm->crt_flags;
101 ret = need_fallback(key_len);
103 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
107 sctx->key_len = key_len;
109 memcpy(sctx->key, in_key, key_len);
113 return setkey_fallback_cip(tfm, in_key, key_len);
116 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
118 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
120 if (unlikely(need_fallback(sctx->key_len))) {
121 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
125 switch (sctx->key_len) {
127 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
131 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
135 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
141 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
143 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
145 if (unlikely(need_fallback(sctx->key_len))) {
146 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
150 switch (sctx->key_len) {
152 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
156 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
160 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
166 static int fallback_init_cip(struct crypto_tfm *tfm)
168 const char *name = tfm->__crt_alg->cra_name;
169 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
171 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
172 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
174 if (IS_ERR(sctx->fallback.cip)) {
175 pr_err("Allocating AES fallback algorithm %s failed\n",
177 return PTR_ERR(sctx->fallback.blk);
183 static void fallback_exit_cip(struct crypto_tfm *tfm)
185 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
187 crypto_free_cipher(sctx->fallback.cip);
188 sctx->fallback.cip = NULL;
191 static struct crypto_alg aes_alg = {
193 .cra_driver_name = "aes-s390",
194 .cra_priority = CRYPT_S390_PRIORITY,
195 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
196 CRYPTO_ALG_NEED_FALLBACK,
197 .cra_blocksize = AES_BLOCK_SIZE,
198 .cra_ctxsize = sizeof(struct s390_aes_ctx),
199 .cra_module = THIS_MODULE,
200 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
201 .cra_init = fallback_init_cip,
202 .cra_exit = fallback_exit_cip,
205 .cia_min_keysize = AES_MIN_KEY_SIZE,
206 .cia_max_keysize = AES_MAX_KEY_SIZE,
207 .cia_setkey = aes_set_key,
208 .cia_encrypt = aes_encrypt,
209 .cia_decrypt = aes_decrypt,
214 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
217 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
220 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
221 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
222 CRYPTO_TFM_REQ_MASK);
224 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
226 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
227 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
228 CRYPTO_TFM_RES_MASK);
233 static int fallback_blk_dec(struct blkcipher_desc *desc,
234 struct scatterlist *dst, struct scatterlist *src,
238 struct crypto_blkcipher *tfm;
239 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
242 desc->tfm = sctx->fallback.blk;
244 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
250 static int fallback_blk_enc(struct blkcipher_desc *desc,
251 struct scatterlist *dst, struct scatterlist *src,
255 struct crypto_blkcipher *tfm;
256 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
259 desc->tfm = sctx->fallback.blk;
261 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
267 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
268 unsigned int key_len)
270 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
273 ret = need_fallback(key_len);
275 sctx->key_len = key_len;
276 return setkey_fallback_blk(tfm, in_key, key_len);
281 sctx->enc = KM_AES_128_ENCRYPT;
282 sctx->dec = KM_AES_128_DECRYPT;
285 sctx->enc = KM_AES_192_ENCRYPT;
286 sctx->dec = KM_AES_192_DECRYPT;
289 sctx->enc = KM_AES_256_ENCRYPT;
290 sctx->dec = KM_AES_256_DECRYPT;
294 return aes_set_key(tfm, in_key, key_len);
297 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
298 struct blkcipher_walk *walk)
300 int ret = blkcipher_walk_virt(desc, walk);
303 while ((nbytes = walk->nbytes)) {
304 /* only use complete blocks */
305 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
306 u8 *out = walk->dst.virt.addr;
307 u8 *in = walk->src.virt.addr;
309 ret = crypt_s390_km(func, param, out, in, n);
310 BUG_ON((ret < 0) || (ret != n));
312 nbytes &= AES_BLOCK_SIZE - 1;
313 ret = blkcipher_walk_done(desc, walk, nbytes);
319 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
320 struct scatterlist *dst, struct scatterlist *src,
323 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
324 struct blkcipher_walk walk;
326 if (unlikely(need_fallback(sctx->key_len)))
327 return fallback_blk_enc(desc, dst, src, nbytes);
329 blkcipher_walk_init(&walk, dst, src, nbytes);
330 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
333 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
334 struct scatterlist *dst, struct scatterlist *src,
337 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
338 struct blkcipher_walk walk;
340 if (unlikely(need_fallback(sctx->key_len)))
341 return fallback_blk_dec(desc, dst, src, nbytes);
343 blkcipher_walk_init(&walk, dst, src, nbytes);
344 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
347 static int fallback_init_blk(struct crypto_tfm *tfm)
349 const char *name = tfm->__crt_alg->cra_name;
350 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
352 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
353 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
355 if (IS_ERR(sctx->fallback.blk)) {
356 pr_err("Allocating AES fallback algorithm %s failed\n",
358 return PTR_ERR(sctx->fallback.blk);
364 static void fallback_exit_blk(struct crypto_tfm *tfm)
366 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
368 crypto_free_blkcipher(sctx->fallback.blk);
369 sctx->fallback.blk = NULL;
372 static struct crypto_alg ecb_aes_alg = {
373 .cra_name = "ecb(aes)",
374 .cra_driver_name = "ecb-aes-s390",
375 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
376 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
377 CRYPTO_ALG_NEED_FALLBACK,
378 .cra_blocksize = AES_BLOCK_SIZE,
379 .cra_ctxsize = sizeof(struct s390_aes_ctx),
380 .cra_type = &crypto_blkcipher_type,
381 .cra_module = THIS_MODULE,
382 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
383 .cra_init = fallback_init_blk,
384 .cra_exit = fallback_exit_blk,
387 .min_keysize = AES_MIN_KEY_SIZE,
388 .max_keysize = AES_MAX_KEY_SIZE,
389 .setkey = ecb_aes_set_key,
390 .encrypt = ecb_aes_encrypt,
391 .decrypt = ecb_aes_decrypt,
396 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
397 unsigned int key_len)
399 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
402 ret = need_fallback(key_len);
404 sctx->key_len = key_len;
405 return setkey_fallback_blk(tfm, in_key, key_len);
410 sctx->enc = KMC_AES_128_ENCRYPT;
411 sctx->dec = KMC_AES_128_DECRYPT;
414 sctx->enc = KMC_AES_192_ENCRYPT;
415 sctx->dec = KMC_AES_192_DECRYPT;
418 sctx->enc = KMC_AES_256_ENCRYPT;
419 sctx->dec = KMC_AES_256_DECRYPT;
423 return aes_set_key(tfm, in_key, key_len);
426 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
427 struct blkcipher_walk *walk)
429 int ret = blkcipher_walk_virt(desc, walk);
430 unsigned int nbytes = walk->nbytes;
435 memcpy(param, walk->iv, AES_BLOCK_SIZE);
437 /* only use complete blocks */
438 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
439 u8 *out = walk->dst.virt.addr;
440 u8 *in = walk->src.virt.addr;
442 ret = crypt_s390_kmc(func, param, out, in, n);
443 BUG_ON((ret < 0) || (ret != n));
445 nbytes &= AES_BLOCK_SIZE - 1;
446 ret = blkcipher_walk_done(desc, walk, nbytes);
447 } while ((nbytes = walk->nbytes));
448 memcpy(walk->iv, param, AES_BLOCK_SIZE);
454 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
455 struct scatterlist *dst, struct scatterlist *src,
458 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
459 struct blkcipher_walk walk;
461 if (unlikely(need_fallback(sctx->key_len)))
462 return fallback_blk_enc(desc, dst, src, nbytes);
464 blkcipher_walk_init(&walk, dst, src, nbytes);
465 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
468 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
469 struct scatterlist *dst, struct scatterlist *src,
472 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
473 struct blkcipher_walk walk;
475 if (unlikely(need_fallback(sctx->key_len)))
476 return fallback_blk_dec(desc, dst, src, nbytes);
478 blkcipher_walk_init(&walk, dst, src, nbytes);
479 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
482 static struct crypto_alg cbc_aes_alg = {
483 .cra_name = "cbc(aes)",
484 .cra_driver_name = "cbc-aes-s390",
485 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
486 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
487 CRYPTO_ALG_NEED_FALLBACK,
488 .cra_blocksize = AES_BLOCK_SIZE,
489 .cra_ctxsize = sizeof(struct s390_aes_ctx),
490 .cra_type = &crypto_blkcipher_type,
491 .cra_module = THIS_MODULE,
492 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
493 .cra_init = fallback_init_blk,
494 .cra_exit = fallback_exit_blk,
497 .min_keysize = AES_MIN_KEY_SIZE,
498 .max_keysize = AES_MAX_KEY_SIZE,
499 .ivsize = AES_BLOCK_SIZE,
500 .setkey = cbc_aes_set_key,
501 .encrypt = cbc_aes_encrypt,
502 .decrypt = cbc_aes_decrypt,
507 static int __init aes_s390_init(void)
511 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
512 keylen_flag |= AES_KEYLEN_128;
513 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
514 keylen_flag |= AES_KEYLEN_192;
515 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
516 keylen_flag |= AES_KEYLEN_256;
521 /* z9 109 and z9 BC/EC only support 128 bit key length */
522 if (keylen_flag == AES_KEYLEN_128)
523 pr_info("AES hardware acceleration is only available for"
526 ret = crypto_register_alg(&aes_alg);
530 ret = crypto_register_alg(&ecb_aes_alg);
534 ret = crypto_register_alg(&cbc_aes_alg);
542 crypto_unregister_alg(&ecb_aes_alg);
544 crypto_unregister_alg(&aes_alg);
549 static void __exit aes_s390_fini(void)
551 crypto_unregister_alg(&cbc_aes_alg);
552 crypto_unregister_alg(&ecb_aes_alg);
553 crypto_unregister_alg(&aes_alg);
556 module_init(aes_s390_init);
557 module_exit(aes_s390_fini);
561 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
562 MODULE_LICENSE("GPL");