4 * s390 implementation of the AES Cipher Algorithm.
7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com)
10 * Derived from "crypto/aes.c"
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
19 #include <crypto/algapi.h>
20 #include <linux/module.h>
21 #include <linux/init.h>
22 #include "crypt_s390.h"
24 #define AES_MIN_KEY_SIZE 16
25 #define AES_MAX_KEY_SIZE 32
27 /* data block size for all key lengths */
28 #define AES_BLOCK_SIZE 16
30 #define AES_KEYLEN_128 1
31 #define AES_KEYLEN_192 2
32 #define AES_KEYLEN_256 4
34 static char keylen_flag = 0;
37 u8 iv[AES_BLOCK_SIZE];
38 u8 key[AES_MAX_KEY_SIZE];
44 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
47 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
48 u32 *flags = &tfm->crt_flags;
52 if (!(keylen_flag & AES_KEYLEN_128))
56 if (!(keylen_flag & AES_KEYLEN_192))
61 if (!(keylen_flag & AES_KEYLEN_256))
69 sctx->key_len = key_len;
70 memcpy(sctx->key, in_key, key_len);
73 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
77 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
79 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
81 switch (sctx->key_len) {
83 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
87 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
91 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
97 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
99 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
101 switch (sctx->key_len) {
103 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
107 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
111 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
118 static struct crypto_alg aes_alg = {
120 .cra_driver_name = "aes-s390",
121 .cra_priority = CRYPT_S390_PRIORITY,
122 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
123 .cra_blocksize = AES_BLOCK_SIZE,
124 .cra_ctxsize = sizeof(struct s390_aes_ctx),
125 .cra_module = THIS_MODULE,
126 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
129 .cia_min_keysize = AES_MIN_KEY_SIZE,
130 .cia_max_keysize = AES_MAX_KEY_SIZE,
131 .cia_setkey = aes_set_key,
132 .cia_encrypt = aes_encrypt,
133 .cia_decrypt = aes_decrypt,
138 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
139 unsigned int key_len)
141 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
145 sctx->enc = KM_AES_128_ENCRYPT;
146 sctx->dec = KM_AES_128_DECRYPT;
149 sctx->enc = KM_AES_192_ENCRYPT;
150 sctx->dec = KM_AES_192_DECRYPT;
153 sctx->enc = KM_AES_256_ENCRYPT;
154 sctx->dec = KM_AES_256_DECRYPT;
158 return aes_set_key(tfm, in_key, key_len);
161 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
162 struct blkcipher_walk *walk)
164 int ret = blkcipher_walk_virt(desc, walk);
167 while ((nbytes = walk->nbytes)) {
168 /* only use complete blocks */
169 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
170 u8 *out = walk->dst.virt.addr;
171 u8 *in = walk->src.virt.addr;
173 ret = crypt_s390_km(func, param, out, in, n);
174 BUG_ON((ret < 0) || (ret != n));
176 nbytes &= AES_BLOCK_SIZE - 1;
177 ret = blkcipher_walk_done(desc, walk, nbytes);
183 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
184 struct scatterlist *dst, struct scatterlist *src,
187 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
188 struct blkcipher_walk walk;
190 blkcipher_walk_init(&walk, dst, src, nbytes);
191 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
194 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
195 struct scatterlist *dst, struct scatterlist *src,
198 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
199 struct blkcipher_walk walk;
201 blkcipher_walk_init(&walk, dst, src, nbytes);
202 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
205 static struct crypto_alg ecb_aes_alg = {
206 .cra_name = "ecb(aes)",
207 .cra_driver_name = "ecb-aes-s390",
208 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
209 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
210 .cra_blocksize = AES_BLOCK_SIZE,
211 .cra_ctxsize = sizeof(struct s390_aes_ctx),
212 .cra_type = &crypto_blkcipher_type,
213 .cra_module = THIS_MODULE,
214 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
217 .min_keysize = AES_MIN_KEY_SIZE,
218 .max_keysize = AES_MAX_KEY_SIZE,
219 .setkey = ecb_aes_set_key,
220 .encrypt = ecb_aes_encrypt,
221 .decrypt = ecb_aes_decrypt,
226 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
227 unsigned int key_len)
229 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
233 sctx->enc = KMC_AES_128_ENCRYPT;
234 sctx->dec = KMC_AES_128_DECRYPT;
237 sctx->enc = KMC_AES_192_ENCRYPT;
238 sctx->dec = KMC_AES_192_DECRYPT;
241 sctx->enc = KMC_AES_256_ENCRYPT;
242 sctx->dec = KMC_AES_256_DECRYPT;
246 return aes_set_key(tfm, in_key, key_len);
249 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
250 struct blkcipher_walk *walk)
252 int ret = blkcipher_walk_virt(desc, walk);
253 unsigned int nbytes = walk->nbytes;
258 memcpy(param, walk->iv, AES_BLOCK_SIZE);
260 /* only use complete blocks */
261 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
262 u8 *out = walk->dst.virt.addr;
263 u8 *in = walk->src.virt.addr;
265 ret = crypt_s390_kmc(func, param, out, in, n);
266 BUG_ON((ret < 0) || (ret != n));
268 nbytes &= AES_BLOCK_SIZE - 1;
269 ret = blkcipher_walk_done(desc, walk, nbytes);
270 } while ((nbytes = walk->nbytes));
271 memcpy(walk->iv, param, AES_BLOCK_SIZE);
277 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
278 struct scatterlist *dst, struct scatterlist *src,
281 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
282 struct blkcipher_walk walk;
284 blkcipher_walk_init(&walk, dst, src, nbytes);
285 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
288 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
289 struct scatterlist *dst, struct scatterlist *src,
292 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
293 struct blkcipher_walk walk;
295 blkcipher_walk_init(&walk, dst, src, nbytes);
296 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
299 static struct crypto_alg cbc_aes_alg = {
300 .cra_name = "cbc(aes)",
301 .cra_driver_name = "cbc-aes-s390",
302 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
303 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
304 .cra_blocksize = AES_BLOCK_SIZE,
305 .cra_ctxsize = sizeof(struct s390_aes_ctx),
306 .cra_type = &crypto_blkcipher_type,
307 .cra_module = THIS_MODULE,
308 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
311 .min_keysize = AES_MIN_KEY_SIZE,
312 .max_keysize = AES_MAX_KEY_SIZE,
313 .ivsize = AES_BLOCK_SIZE,
314 .setkey = cbc_aes_set_key,
315 .encrypt = cbc_aes_encrypt,
316 .decrypt = cbc_aes_decrypt,
321 static int __init aes_init(void)
325 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
326 keylen_flag |= AES_KEYLEN_128;
327 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
328 keylen_flag |= AES_KEYLEN_192;
329 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
330 keylen_flag |= AES_KEYLEN_256;
335 /* z9 109 and z9 BC/EC only support 128 bit key length */
336 if (keylen_flag == AES_KEYLEN_128)
338 "aes_s390: hardware acceleration only available for"
341 ret = crypto_register_alg(&aes_alg);
345 ret = crypto_register_alg(&ecb_aes_alg);
349 ret = crypto_register_alg(&cbc_aes_alg);
357 crypto_unregister_alg(&ecb_aes_alg);
359 crypto_unregister_alg(&aes_alg);
364 static void __exit aes_fini(void)
366 crypto_unregister_alg(&cbc_aes_alg);
367 crypto_unregister_alg(&ecb_aes_alg);
368 crypto_unregister_alg(&aes_alg);
371 module_init(aes_init);
372 module_exit(aes_fini);
376 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
377 MODULE_LICENSE("GPL");