4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11 * Derived from "crypto/aes_generic.c"
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/aes.h>
21 #include <crypto/algapi.h>
22 #include <linux/err.h>
23 #include <linux/module.h>
24 #include <linux/init.h>
25 #include "crypt_s390.h"
27 #define AES_KEYLEN_128 1
28 #define AES_KEYLEN_192 2
29 #define AES_KEYLEN_256 4
31 static char keylen_flag = 0;
34 u8 iv[AES_BLOCK_SIZE];
35 u8 key[AES_MAX_KEY_SIZE];
40 struct crypto_blkcipher *blk;
41 struct crypto_cipher *cip;
46 * Check if the key_len is supported by the HW.
47 * Returns 0 if it is, a positive number if it is not and software fallback is
48 * required or a negative number in case the key size is not valid
50 static int need_fallback(unsigned int key_len)
54 if (!(keylen_flag & AES_KEYLEN_128))
58 if (!(keylen_flag & AES_KEYLEN_192))
62 if (!(keylen_flag & AES_KEYLEN_256))
72 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
75 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
78 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
79 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
82 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
84 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
85 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
91 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
94 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
95 u32 *flags = &tfm->crt_flags;
98 ret = need_fallback(key_len);
100 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
104 sctx->key_len = key_len;
106 memcpy(sctx->key, in_key, key_len);
110 return setkey_fallback_cip(tfm, in_key, key_len);
113 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
115 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
117 if (unlikely(need_fallback(sctx->key_len))) {
118 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
122 switch (sctx->key_len) {
124 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
128 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
132 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
138 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
140 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
142 if (unlikely(need_fallback(sctx->key_len))) {
143 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
147 switch (sctx->key_len) {
149 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
153 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
157 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
163 static int fallback_init_cip(struct crypto_tfm *tfm)
165 const char *name = tfm->__crt_alg->cra_name;
166 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
168 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
169 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
171 if (IS_ERR(sctx->fallback.cip)) {
172 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
173 return PTR_ERR(sctx->fallback.blk);
179 static void fallback_exit_cip(struct crypto_tfm *tfm)
181 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
183 crypto_free_cipher(sctx->fallback.cip);
184 sctx->fallback.cip = NULL;
187 static struct crypto_alg aes_alg = {
189 .cra_driver_name = "aes-s390",
190 .cra_priority = CRYPT_S390_PRIORITY,
191 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
192 CRYPTO_ALG_NEED_FALLBACK,
193 .cra_blocksize = AES_BLOCK_SIZE,
194 .cra_ctxsize = sizeof(struct s390_aes_ctx),
195 .cra_module = THIS_MODULE,
196 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
197 .cra_init = fallback_init_cip,
198 .cra_exit = fallback_exit_cip,
201 .cia_min_keysize = AES_MIN_KEY_SIZE,
202 .cia_max_keysize = AES_MAX_KEY_SIZE,
203 .cia_setkey = aes_set_key,
204 .cia_encrypt = aes_encrypt,
205 .cia_decrypt = aes_decrypt,
210 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
213 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
216 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
217 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
218 CRYPTO_TFM_REQ_MASK);
220 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
222 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
223 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
224 CRYPTO_TFM_RES_MASK);
229 static int fallback_blk_dec(struct blkcipher_desc *desc,
230 struct scatterlist *dst, struct scatterlist *src,
234 struct crypto_blkcipher *tfm;
235 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
238 desc->tfm = sctx->fallback.blk;
240 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
246 static int fallback_blk_enc(struct blkcipher_desc *desc,
247 struct scatterlist *dst, struct scatterlist *src,
251 struct crypto_blkcipher *tfm;
252 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
255 desc->tfm = sctx->fallback.blk;
257 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
263 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
264 unsigned int key_len)
266 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
269 ret = need_fallback(key_len);
271 sctx->key_len = key_len;
272 return setkey_fallback_blk(tfm, in_key, key_len);
277 sctx->enc = KM_AES_128_ENCRYPT;
278 sctx->dec = KM_AES_128_DECRYPT;
281 sctx->enc = KM_AES_192_ENCRYPT;
282 sctx->dec = KM_AES_192_DECRYPT;
285 sctx->enc = KM_AES_256_ENCRYPT;
286 sctx->dec = KM_AES_256_DECRYPT;
290 return aes_set_key(tfm, in_key, key_len);
293 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
294 struct blkcipher_walk *walk)
296 int ret = blkcipher_walk_virt(desc, walk);
299 while ((nbytes = walk->nbytes)) {
300 /* only use complete blocks */
301 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
302 u8 *out = walk->dst.virt.addr;
303 u8 *in = walk->src.virt.addr;
305 ret = crypt_s390_km(func, param, out, in, n);
306 BUG_ON((ret < 0) || (ret != n));
308 nbytes &= AES_BLOCK_SIZE - 1;
309 ret = blkcipher_walk_done(desc, walk, nbytes);
315 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
316 struct scatterlist *dst, struct scatterlist *src,
319 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
320 struct blkcipher_walk walk;
322 if (unlikely(need_fallback(sctx->key_len)))
323 return fallback_blk_enc(desc, dst, src, nbytes);
325 blkcipher_walk_init(&walk, dst, src, nbytes);
326 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
329 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
330 struct scatterlist *dst, struct scatterlist *src,
333 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
334 struct blkcipher_walk walk;
336 if (unlikely(need_fallback(sctx->key_len)))
337 return fallback_blk_dec(desc, dst, src, nbytes);
339 blkcipher_walk_init(&walk, dst, src, nbytes);
340 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
343 static int fallback_init_blk(struct crypto_tfm *tfm)
345 const char *name = tfm->__crt_alg->cra_name;
346 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
348 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
349 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
351 if (IS_ERR(sctx->fallback.blk)) {
352 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
353 return PTR_ERR(sctx->fallback.blk);
359 static void fallback_exit_blk(struct crypto_tfm *tfm)
361 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
363 crypto_free_blkcipher(sctx->fallback.blk);
364 sctx->fallback.blk = NULL;
367 static struct crypto_alg ecb_aes_alg = {
368 .cra_name = "ecb(aes)",
369 .cra_driver_name = "ecb-aes-s390",
370 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
371 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
372 CRYPTO_ALG_NEED_FALLBACK,
373 .cra_blocksize = AES_BLOCK_SIZE,
374 .cra_ctxsize = sizeof(struct s390_aes_ctx),
375 .cra_type = &crypto_blkcipher_type,
376 .cra_module = THIS_MODULE,
377 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
378 .cra_init = fallback_init_blk,
379 .cra_exit = fallback_exit_blk,
382 .min_keysize = AES_MIN_KEY_SIZE,
383 .max_keysize = AES_MAX_KEY_SIZE,
384 .setkey = ecb_aes_set_key,
385 .encrypt = ecb_aes_encrypt,
386 .decrypt = ecb_aes_decrypt,
391 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
392 unsigned int key_len)
394 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
397 ret = need_fallback(key_len);
399 sctx->key_len = key_len;
400 return setkey_fallback_blk(tfm, in_key, key_len);
405 sctx->enc = KMC_AES_128_ENCRYPT;
406 sctx->dec = KMC_AES_128_DECRYPT;
409 sctx->enc = KMC_AES_192_ENCRYPT;
410 sctx->dec = KMC_AES_192_DECRYPT;
413 sctx->enc = KMC_AES_256_ENCRYPT;
414 sctx->dec = KMC_AES_256_DECRYPT;
418 return aes_set_key(tfm, in_key, key_len);
421 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
422 struct blkcipher_walk *walk)
424 int ret = blkcipher_walk_virt(desc, walk);
425 unsigned int nbytes = walk->nbytes;
430 memcpy(param, walk->iv, AES_BLOCK_SIZE);
432 /* only use complete blocks */
433 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
434 u8 *out = walk->dst.virt.addr;
435 u8 *in = walk->src.virt.addr;
437 ret = crypt_s390_kmc(func, param, out, in, n);
438 BUG_ON((ret < 0) || (ret != n));
440 nbytes &= AES_BLOCK_SIZE - 1;
441 ret = blkcipher_walk_done(desc, walk, nbytes);
442 } while ((nbytes = walk->nbytes));
443 memcpy(walk->iv, param, AES_BLOCK_SIZE);
449 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
450 struct scatterlist *dst, struct scatterlist *src,
453 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
454 struct blkcipher_walk walk;
456 if (unlikely(need_fallback(sctx->key_len)))
457 return fallback_blk_enc(desc, dst, src, nbytes);
459 blkcipher_walk_init(&walk, dst, src, nbytes);
460 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
463 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
464 struct scatterlist *dst, struct scatterlist *src,
467 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
468 struct blkcipher_walk walk;
470 if (unlikely(need_fallback(sctx->key_len)))
471 return fallback_blk_dec(desc, dst, src, nbytes);
473 blkcipher_walk_init(&walk, dst, src, nbytes);
474 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
477 static struct crypto_alg cbc_aes_alg = {
478 .cra_name = "cbc(aes)",
479 .cra_driver_name = "cbc-aes-s390",
480 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
481 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
482 CRYPTO_ALG_NEED_FALLBACK,
483 .cra_blocksize = AES_BLOCK_SIZE,
484 .cra_ctxsize = sizeof(struct s390_aes_ctx),
485 .cra_type = &crypto_blkcipher_type,
486 .cra_module = THIS_MODULE,
487 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
488 .cra_init = fallback_init_blk,
489 .cra_exit = fallback_exit_blk,
492 .min_keysize = AES_MIN_KEY_SIZE,
493 .max_keysize = AES_MAX_KEY_SIZE,
494 .ivsize = AES_BLOCK_SIZE,
495 .setkey = cbc_aes_set_key,
496 .encrypt = cbc_aes_encrypt,
497 .decrypt = cbc_aes_decrypt,
502 static int __init aes_init(void)
506 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
507 keylen_flag |= AES_KEYLEN_128;
508 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
509 keylen_flag |= AES_KEYLEN_192;
510 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
511 keylen_flag |= AES_KEYLEN_256;
516 /* z9 109 and z9 BC/EC only support 128 bit key length */
517 if (keylen_flag == AES_KEYLEN_128)
519 "aes_s390: hardware acceleration only available for"
522 ret = crypto_register_alg(&aes_alg);
526 ret = crypto_register_alg(&ecb_aes_alg);
530 ret = crypto_register_alg(&cbc_aes_alg);
538 crypto_unregister_alg(&ecb_aes_alg);
540 crypto_unregister_alg(&aes_alg);
545 static void __exit aes_fini(void)
547 crypto_unregister_alg(&cbc_aes_alg);
548 crypto_unregister_alg(&ecb_aes_alg);
549 crypto_unregister_alg(&aes_alg);
552 module_init(aes_init);
553 module_exit(aes_fini);
557 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
558 MODULE_LICENSE("GPL");