Merge branch 'drm-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/airlied...
[linux-2.6] / arch / x86 / crypto / aesni-intel_glue.c
1 /*
2  * Support for Intel AES-NI instructions. This file contains glue
3  * code, the real AES implementation is in intel-aes_asm.S.
4  *
5  * Copyright (C) 2008, Intel Corp.
6  *    Author: Huang Ying <ying.huang@intel.com>
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  */
13
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
21 #include <asm/i387.h>
22 #include <asm/aes.h>
23
24 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
25 #define HAS_CTR
26 #endif
27
28 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
29 #define HAS_LRW
30 #endif
31
32 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
33 #define HAS_PCBC
34 #endif
35
36 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
37 #define HAS_XTS
38 #endif
39
40 struct async_aes_ctx {
41         struct cryptd_ablkcipher *cryptd_tfm;
42 };
43
44 #define AESNI_ALIGN     16
45 #define AES_BLOCK_MASK  (~(AES_BLOCK_SIZE-1))
46
47 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
48                              unsigned int key_len);
49 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
50                           const u8 *in);
51 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
52                           const u8 *in);
53 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
54                               const u8 *in, unsigned int len);
55 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
56                               const u8 *in, unsigned int len);
57 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
58                               const u8 *in, unsigned int len, u8 *iv);
59 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
60                               const u8 *in, unsigned int len, u8 *iv);
61
62 static inline int kernel_fpu_using(void)
63 {
64         if (in_interrupt() && !(read_cr0() & X86_CR0_TS))
65                 return 1;
66         return 0;
67 }
68
69 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
70 {
71         unsigned long addr = (unsigned long)raw_ctx;
72         unsigned long align = AESNI_ALIGN;
73
74         if (align <= crypto_tfm_ctx_alignment())
75                 align = 1;
76         return (struct crypto_aes_ctx *)ALIGN(addr, align);
77 }
78
79 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
80                               const u8 *in_key, unsigned int key_len)
81 {
82         struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
83         u32 *flags = &tfm->crt_flags;
84         int err;
85
86         if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
87             key_len != AES_KEYSIZE_256) {
88                 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
89                 return -EINVAL;
90         }
91
92         if (kernel_fpu_using())
93                 err = crypto_aes_expand_key(ctx, in_key, key_len);
94         else {
95                 kernel_fpu_begin();
96                 err = aesni_set_key(ctx, in_key, key_len);
97                 kernel_fpu_end();
98         }
99
100         return err;
101 }
102
103 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
104                        unsigned int key_len)
105 {
106         return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
107 }
108
109 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
110 {
111         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
112
113         if (kernel_fpu_using())
114                 crypto_aes_encrypt_x86(ctx, dst, src);
115         else {
116                 kernel_fpu_begin();
117                 aesni_enc(ctx, dst, src);
118                 kernel_fpu_end();
119         }
120 }
121
122 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
123 {
124         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
125
126         if (kernel_fpu_using())
127                 crypto_aes_decrypt_x86(ctx, dst, src);
128         else {
129                 kernel_fpu_begin();
130                 aesni_dec(ctx, dst, src);
131                 kernel_fpu_end();
132         }
133 }
134
135 static struct crypto_alg aesni_alg = {
136         .cra_name               = "aes",
137         .cra_driver_name        = "aes-aesni",
138         .cra_priority           = 300,
139         .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
140         .cra_blocksize          = AES_BLOCK_SIZE,
141         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
142         .cra_alignmask          = 0,
143         .cra_module             = THIS_MODULE,
144         .cra_list               = LIST_HEAD_INIT(aesni_alg.cra_list),
145         .cra_u  = {
146                 .cipher = {
147                         .cia_min_keysize        = AES_MIN_KEY_SIZE,
148                         .cia_max_keysize        = AES_MAX_KEY_SIZE,
149                         .cia_setkey             = aes_set_key,
150                         .cia_encrypt            = aes_encrypt,
151                         .cia_decrypt            = aes_decrypt
152                 }
153         }
154 };
155
156 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
157 {
158         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
159
160         aesni_enc(ctx, dst, src);
161 }
162
163 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
164 {
165         struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
166
167         aesni_dec(ctx, dst, src);
168 }
169
170 static struct crypto_alg __aesni_alg = {
171         .cra_name               = "__aes-aesni",
172         .cra_driver_name        = "__driver-aes-aesni",
173         .cra_priority           = 0,
174         .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
175         .cra_blocksize          = AES_BLOCK_SIZE,
176         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
177         .cra_alignmask          = 0,
178         .cra_module             = THIS_MODULE,
179         .cra_list               = LIST_HEAD_INIT(__aesni_alg.cra_list),
180         .cra_u  = {
181                 .cipher = {
182                         .cia_min_keysize        = AES_MIN_KEY_SIZE,
183                         .cia_max_keysize        = AES_MAX_KEY_SIZE,
184                         .cia_setkey             = aes_set_key,
185                         .cia_encrypt            = __aes_encrypt,
186                         .cia_decrypt            = __aes_decrypt
187                 }
188         }
189 };
190
191 static int ecb_encrypt(struct blkcipher_desc *desc,
192                        struct scatterlist *dst, struct scatterlist *src,
193                        unsigned int nbytes)
194 {
195         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
196         struct blkcipher_walk walk;
197         int err;
198
199         blkcipher_walk_init(&walk, dst, src, nbytes);
200         err = blkcipher_walk_virt(desc, &walk);
201
202         kernel_fpu_begin();
203         while ((nbytes = walk.nbytes)) {
204                 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
205                               nbytes & AES_BLOCK_MASK);
206                 nbytes &= AES_BLOCK_SIZE - 1;
207                 err = blkcipher_walk_done(desc, &walk, nbytes);
208         }
209         kernel_fpu_end();
210
211         return err;
212 }
213
214 static int ecb_decrypt(struct blkcipher_desc *desc,
215                        struct scatterlist *dst, struct scatterlist *src,
216                        unsigned int nbytes)
217 {
218         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
219         struct blkcipher_walk walk;
220         int err;
221
222         blkcipher_walk_init(&walk, dst, src, nbytes);
223         err = blkcipher_walk_virt(desc, &walk);
224
225         kernel_fpu_begin();
226         while ((nbytes = walk.nbytes)) {
227                 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
228                               nbytes & AES_BLOCK_MASK);
229                 nbytes &= AES_BLOCK_SIZE - 1;
230                 err = blkcipher_walk_done(desc, &walk, nbytes);
231         }
232         kernel_fpu_end();
233
234         return err;
235 }
236
237 static struct crypto_alg blk_ecb_alg = {
238         .cra_name               = "__ecb-aes-aesni",
239         .cra_driver_name        = "__driver-ecb-aes-aesni",
240         .cra_priority           = 0,
241         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
242         .cra_blocksize          = AES_BLOCK_SIZE,
243         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
244         .cra_alignmask          = 0,
245         .cra_type               = &crypto_blkcipher_type,
246         .cra_module             = THIS_MODULE,
247         .cra_list               = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
248         .cra_u = {
249                 .blkcipher = {
250                         .min_keysize    = AES_MIN_KEY_SIZE,
251                         .max_keysize    = AES_MAX_KEY_SIZE,
252                         .setkey         = aes_set_key,
253                         .encrypt        = ecb_encrypt,
254                         .decrypt        = ecb_decrypt,
255                 },
256         },
257 };
258
259 static int cbc_encrypt(struct blkcipher_desc *desc,
260                        struct scatterlist *dst, struct scatterlist *src,
261                        unsigned int nbytes)
262 {
263         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
264         struct blkcipher_walk walk;
265         int err;
266
267         blkcipher_walk_init(&walk, dst, src, nbytes);
268         err = blkcipher_walk_virt(desc, &walk);
269
270         kernel_fpu_begin();
271         while ((nbytes = walk.nbytes)) {
272                 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
273                               nbytes & AES_BLOCK_MASK, walk.iv);
274                 nbytes &= AES_BLOCK_SIZE - 1;
275                 err = blkcipher_walk_done(desc, &walk, nbytes);
276         }
277         kernel_fpu_end();
278
279         return err;
280 }
281
282 static int cbc_decrypt(struct blkcipher_desc *desc,
283                        struct scatterlist *dst, struct scatterlist *src,
284                        unsigned int nbytes)
285 {
286         struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
287         struct blkcipher_walk walk;
288         int err;
289
290         blkcipher_walk_init(&walk, dst, src, nbytes);
291         err = blkcipher_walk_virt(desc, &walk);
292
293         kernel_fpu_begin();
294         while ((nbytes = walk.nbytes)) {
295                 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
296                               nbytes & AES_BLOCK_MASK, walk.iv);
297                 nbytes &= AES_BLOCK_SIZE - 1;
298                 err = blkcipher_walk_done(desc, &walk, nbytes);
299         }
300         kernel_fpu_end();
301
302         return err;
303 }
304
305 static struct crypto_alg blk_cbc_alg = {
306         .cra_name               = "__cbc-aes-aesni",
307         .cra_driver_name        = "__driver-cbc-aes-aesni",
308         .cra_priority           = 0,
309         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
310         .cra_blocksize          = AES_BLOCK_SIZE,
311         .cra_ctxsize            = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
312         .cra_alignmask          = 0,
313         .cra_type               = &crypto_blkcipher_type,
314         .cra_module             = THIS_MODULE,
315         .cra_list               = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
316         .cra_u = {
317                 .blkcipher = {
318                         .min_keysize    = AES_MIN_KEY_SIZE,
319                         .max_keysize    = AES_MAX_KEY_SIZE,
320                         .setkey         = aes_set_key,
321                         .encrypt        = cbc_encrypt,
322                         .decrypt        = cbc_decrypt,
323                 },
324         },
325 };
326
327 static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
328                         unsigned int key_len)
329 {
330         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
331         struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
332         int err;
333
334         crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
335         crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
336                                     & CRYPTO_TFM_REQ_MASK);
337         err = crypto_ablkcipher_setkey(child, key, key_len);
338         crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
339                                     & CRYPTO_TFM_RES_MASK);
340         return err;
341 }
342
343 static int ablk_encrypt(struct ablkcipher_request *req)
344 {
345         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
346         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
347
348         if (kernel_fpu_using()) {
349                 struct ablkcipher_request *cryptd_req =
350                         ablkcipher_request_ctx(req);
351                 memcpy(cryptd_req, req, sizeof(*req));
352                 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
353                 return crypto_ablkcipher_encrypt(cryptd_req);
354         } else {
355                 struct blkcipher_desc desc;
356                 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
357                 desc.info = req->info;
358                 desc.flags = 0;
359                 return crypto_blkcipher_crt(desc.tfm)->encrypt(
360                         &desc, req->dst, req->src, req->nbytes);
361         }
362 }
363
364 static int ablk_decrypt(struct ablkcipher_request *req)
365 {
366         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
367         struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
368
369         if (kernel_fpu_using()) {
370                 struct ablkcipher_request *cryptd_req =
371                         ablkcipher_request_ctx(req);
372                 memcpy(cryptd_req, req, sizeof(*req));
373                 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
374                 return crypto_ablkcipher_decrypt(cryptd_req);
375         } else {
376                 struct blkcipher_desc desc;
377                 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
378                 desc.info = req->info;
379                 desc.flags = 0;
380                 return crypto_blkcipher_crt(desc.tfm)->decrypt(
381                         &desc, req->dst, req->src, req->nbytes);
382         }
383 }
384
385 static void ablk_exit(struct crypto_tfm *tfm)
386 {
387         struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
388
389         cryptd_free_ablkcipher(ctx->cryptd_tfm);
390 }
391
392 static void ablk_init_common(struct crypto_tfm *tfm,
393                              struct cryptd_ablkcipher *cryptd_tfm)
394 {
395         struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
396
397         ctx->cryptd_tfm = cryptd_tfm;
398         tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
399                 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
400 }
401
402 static int ablk_ecb_init(struct crypto_tfm *tfm)
403 {
404         struct cryptd_ablkcipher *cryptd_tfm;
405
406         cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
407         if (IS_ERR(cryptd_tfm))
408                 return PTR_ERR(cryptd_tfm);
409         ablk_init_common(tfm, cryptd_tfm);
410         return 0;
411 }
412
413 static struct crypto_alg ablk_ecb_alg = {
414         .cra_name               = "ecb(aes)",
415         .cra_driver_name        = "ecb-aes-aesni",
416         .cra_priority           = 400,
417         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
418         .cra_blocksize          = AES_BLOCK_SIZE,
419         .cra_ctxsize            = sizeof(struct async_aes_ctx),
420         .cra_alignmask          = 0,
421         .cra_type               = &crypto_ablkcipher_type,
422         .cra_module             = THIS_MODULE,
423         .cra_list               = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
424         .cra_init               = ablk_ecb_init,
425         .cra_exit               = ablk_exit,
426         .cra_u = {
427                 .ablkcipher = {
428                         .min_keysize    = AES_MIN_KEY_SIZE,
429                         .max_keysize    = AES_MAX_KEY_SIZE,
430                         .setkey         = ablk_set_key,
431                         .encrypt        = ablk_encrypt,
432                         .decrypt        = ablk_decrypt,
433                 },
434         },
435 };
436
437 static int ablk_cbc_init(struct crypto_tfm *tfm)
438 {
439         struct cryptd_ablkcipher *cryptd_tfm;
440
441         cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
442         if (IS_ERR(cryptd_tfm))
443                 return PTR_ERR(cryptd_tfm);
444         ablk_init_common(tfm, cryptd_tfm);
445         return 0;
446 }
447
448 static struct crypto_alg ablk_cbc_alg = {
449         .cra_name               = "cbc(aes)",
450         .cra_driver_name        = "cbc-aes-aesni",
451         .cra_priority           = 400,
452         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
453         .cra_blocksize          = AES_BLOCK_SIZE,
454         .cra_ctxsize            = sizeof(struct async_aes_ctx),
455         .cra_alignmask          = 0,
456         .cra_type               = &crypto_ablkcipher_type,
457         .cra_module             = THIS_MODULE,
458         .cra_list               = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
459         .cra_init               = ablk_cbc_init,
460         .cra_exit               = ablk_exit,
461         .cra_u = {
462                 .ablkcipher = {
463                         .min_keysize    = AES_MIN_KEY_SIZE,
464                         .max_keysize    = AES_MAX_KEY_SIZE,
465                         .ivsize         = AES_BLOCK_SIZE,
466                         .setkey         = ablk_set_key,
467                         .encrypt        = ablk_encrypt,
468                         .decrypt        = ablk_decrypt,
469                 },
470         },
471 };
472
473 #ifdef HAS_CTR
474 static int ablk_ctr_init(struct crypto_tfm *tfm)
475 {
476         struct cryptd_ablkcipher *cryptd_tfm;
477
478         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
479                                              0, 0);
480         if (IS_ERR(cryptd_tfm))
481                 return PTR_ERR(cryptd_tfm);
482         ablk_init_common(tfm, cryptd_tfm);
483         return 0;
484 }
485
486 static struct crypto_alg ablk_ctr_alg = {
487         .cra_name               = "ctr(aes)",
488         .cra_driver_name        = "ctr-aes-aesni",
489         .cra_priority           = 400,
490         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
491         .cra_blocksize          = 1,
492         .cra_ctxsize            = sizeof(struct async_aes_ctx),
493         .cra_alignmask          = 0,
494         .cra_type               = &crypto_ablkcipher_type,
495         .cra_module             = THIS_MODULE,
496         .cra_list               = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
497         .cra_init               = ablk_ctr_init,
498         .cra_exit               = ablk_exit,
499         .cra_u = {
500                 .ablkcipher = {
501                         .min_keysize    = AES_MIN_KEY_SIZE,
502                         .max_keysize    = AES_MAX_KEY_SIZE,
503                         .ivsize         = AES_BLOCK_SIZE,
504                         .setkey         = ablk_set_key,
505                         .encrypt        = ablk_encrypt,
506                         .decrypt        = ablk_decrypt,
507                         .geniv          = "chainiv",
508                 },
509         },
510 };
511 #endif
512
513 #ifdef HAS_LRW
514 static int ablk_lrw_init(struct crypto_tfm *tfm)
515 {
516         struct cryptd_ablkcipher *cryptd_tfm;
517
518         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
519                                              0, 0);
520         if (IS_ERR(cryptd_tfm))
521                 return PTR_ERR(cryptd_tfm);
522         ablk_init_common(tfm, cryptd_tfm);
523         return 0;
524 }
525
526 static struct crypto_alg ablk_lrw_alg = {
527         .cra_name               = "lrw(aes)",
528         .cra_driver_name        = "lrw-aes-aesni",
529         .cra_priority           = 400,
530         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
531         .cra_blocksize          = AES_BLOCK_SIZE,
532         .cra_ctxsize            = sizeof(struct async_aes_ctx),
533         .cra_alignmask          = 0,
534         .cra_type               = &crypto_ablkcipher_type,
535         .cra_module             = THIS_MODULE,
536         .cra_list               = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
537         .cra_init               = ablk_lrw_init,
538         .cra_exit               = ablk_exit,
539         .cra_u = {
540                 .ablkcipher = {
541                         .min_keysize    = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
542                         .max_keysize    = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
543                         .ivsize         = AES_BLOCK_SIZE,
544                         .setkey         = ablk_set_key,
545                         .encrypt        = ablk_encrypt,
546                         .decrypt        = ablk_decrypt,
547                 },
548         },
549 };
550 #endif
551
552 #ifdef HAS_PCBC
553 static int ablk_pcbc_init(struct crypto_tfm *tfm)
554 {
555         struct cryptd_ablkcipher *cryptd_tfm;
556
557         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
558                                              0, 0);
559         if (IS_ERR(cryptd_tfm))
560                 return PTR_ERR(cryptd_tfm);
561         ablk_init_common(tfm, cryptd_tfm);
562         return 0;
563 }
564
565 static struct crypto_alg ablk_pcbc_alg = {
566         .cra_name               = "pcbc(aes)",
567         .cra_driver_name        = "pcbc-aes-aesni",
568         .cra_priority           = 400,
569         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
570         .cra_blocksize          = AES_BLOCK_SIZE,
571         .cra_ctxsize            = sizeof(struct async_aes_ctx),
572         .cra_alignmask          = 0,
573         .cra_type               = &crypto_ablkcipher_type,
574         .cra_module             = THIS_MODULE,
575         .cra_list               = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
576         .cra_init               = ablk_pcbc_init,
577         .cra_exit               = ablk_exit,
578         .cra_u = {
579                 .ablkcipher = {
580                         .min_keysize    = AES_MIN_KEY_SIZE,
581                         .max_keysize    = AES_MAX_KEY_SIZE,
582                         .ivsize         = AES_BLOCK_SIZE,
583                         .setkey         = ablk_set_key,
584                         .encrypt        = ablk_encrypt,
585                         .decrypt        = ablk_decrypt,
586                 },
587         },
588 };
589 #endif
590
591 #ifdef HAS_XTS
592 static int ablk_xts_init(struct crypto_tfm *tfm)
593 {
594         struct cryptd_ablkcipher *cryptd_tfm;
595
596         cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
597                                              0, 0);
598         if (IS_ERR(cryptd_tfm))
599                 return PTR_ERR(cryptd_tfm);
600         ablk_init_common(tfm, cryptd_tfm);
601         return 0;
602 }
603
604 static struct crypto_alg ablk_xts_alg = {
605         .cra_name               = "xts(aes)",
606         .cra_driver_name        = "xts-aes-aesni",
607         .cra_priority           = 400,
608         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
609         .cra_blocksize          = AES_BLOCK_SIZE,
610         .cra_ctxsize            = sizeof(struct async_aes_ctx),
611         .cra_alignmask          = 0,
612         .cra_type               = &crypto_ablkcipher_type,
613         .cra_module             = THIS_MODULE,
614         .cra_list               = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
615         .cra_init               = ablk_xts_init,
616         .cra_exit               = ablk_exit,
617         .cra_u = {
618                 .ablkcipher = {
619                         .min_keysize    = 2 * AES_MIN_KEY_SIZE,
620                         .max_keysize    = 2 * AES_MAX_KEY_SIZE,
621                         .ivsize         = AES_BLOCK_SIZE,
622                         .setkey         = ablk_set_key,
623                         .encrypt        = ablk_encrypt,
624                         .decrypt        = ablk_decrypt,
625                 },
626         },
627 };
628 #endif
629
630 static int __init aesni_init(void)
631 {
632         int err;
633
634         if (!cpu_has_aes) {
635                 printk(KERN_ERR "Intel AES-NI instructions are not detected.\n");
636                 return -ENODEV;
637         }
638         if ((err = crypto_register_alg(&aesni_alg)))
639                 goto aes_err;
640         if ((err = crypto_register_alg(&__aesni_alg)))
641                 goto __aes_err;
642         if ((err = crypto_register_alg(&blk_ecb_alg)))
643                 goto blk_ecb_err;
644         if ((err = crypto_register_alg(&blk_cbc_alg)))
645                 goto blk_cbc_err;
646         if ((err = crypto_register_alg(&ablk_ecb_alg)))
647                 goto ablk_ecb_err;
648         if ((err = crypto_register_alg(&ablk_cbc_alg)))
649                 goto ablk_cbc_err;
650 #ifdef HAS_CTR
651         if ((err = crypto_register_alg(&ablk_ctr_alg)))
652                 goto ablk_ctr_err;
653 #endif
654 #ifdef HAS_LRW
655         if ((err = crypto_register_alg(&ablk_lrw_alg)))
656                 goto ablk_lrw_err;
657 #endif
658 #ifdef HAS_PCBC
659         if ((err = crypto_register_alg(&ablk_pcbc_alg)))
660                 goto ablk_pcbc_err;
661 #endif
662 #ifdef HAS_XTS
663         if ((err = crypto_register_alg(&ablk_xts_alg)))
664                 goto ablk_xts_err;
665 #endif
666
667         return err;
668
669 #ifdef HAS_XTS
670 ablk_xts_err:
671 #endif
672 #ifdef HAS_PCBC
673         crypto_unregister_alg(&ablk_pcbc_alg);
674 ablk_pcbc_err:
675 #endif
676 #ifdef HAS_LRW
677         crypto_unregister_alg(&ablk_lrw_alg);
678 ablk_lrw_err:
679 #endif
680 #ifdef HAS_CTR
681         crypto_unregister_alg(&ablk_ctr_alg);
682 ablk_ctr_err:
683 #endif
684         crypto_unregister_alg(&ablk_cbc_alg);
685 ablk_cbc_err:
686         crypto_unregister_alg(&ablk_ecb_alg);
687 ablk_ecb_err:
688         crypto_unregister_alg(&blk_cbc_alg);
689 blk_cbc_err:
690         crypto_unregister_alg(&blk_ecb_alg);
691 blk_ecb_err:
692         crypto_unregister_alg(&__aesni_alg);
693 __aes_err:
694         crypto_unregister_alg(&aesni_alg);
695 aes_err:
696         return err;
697 }
698
699 static void __exit aesni_exit(void)
700 {
701 #ifdef HAS_XTS
702         crypto_unregister_alg(&ablk_xts_alg);
703 #endif
704 #ifdef HAS_PCBC
705         crypto_unregister_alg(&ablk_pcbc_alg);
706 #endif
707 #ifdef HAS_LRW
708         crypto_unregister_alg(&ablk_lrw_alg);
709 #endif
710 #ifdef HAS_CTR
711         crypto_unregister_alg(&ablk_ctr_alg);
712 #endif
713         crypto_unregister_alg(&ablk_cbc_alg);
714         crypto_unregister_alg(&ablk_ecb_alg);
715         crypto_unregister_alg(&blk_cbc_alg);
716         crypto_unregister_alg(&blk_ecb_alg);
717         crypto_unregister_alg(&__aesni_alg);
718         crypto_unregister_alg(&aesni_alg);
719 }
720
721 module_init(aesni_init);
722 module_exit(aesni_exit);
723
724 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
725 MODULE_LICENSE("GPL");
726 MODULE_ALIAS("aes");