4 * TEA, XTEA, and XETA crypto alogrithms
6 * The TEA and Xtended TEA algorithms were developed by David Wheeler
7 * and Roger Needham at the Computer Laboratory of Cambridge University.
9 * Due to the order of evaluation in XTEA many people have incorrectly
10 * implemented it. XETA (XTEA in the wrong order), exists for
11 * compatibility with these implementations.
13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
22 #include <linux/init.h>
23 #include <linux/module.h>
25 #include <asm/scatterlist.h>
26 #include <linux/crypto.h>
28 #define TEA_KEY_SIZE 16
29 #define TEA_BLOCK_SIZE 8
31 #define TEA_DELTA 0x9e3779b9
33 #define XTEA_KEY_SIZE 16
34 #define XTEA_BLOCK_SIZE 8
35 #define XTEA_ROUNDS 32
36 #define XTEA_DELTA 0x9e3779b9
38 #define u32_in(x) le32_to_cpu(*(const __le32 *)(x))
39 #define u32_out(to, from) (*(__le32 *)(to) = cpu_to_le32(from))
49 static int tea_setkey(void *ctx_arg, const u8 *in_key,
50 unsigned int key_len, u32 *flags)
53 struct tea_ctx *ctx = ctx_arg;
57 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
61 ctx->KEY[0] = u32_in (in_key);
62 ctx->KEY[1] = u32_in (in_key + 4);
63 ctx->KEY[2] = u32_in (in_key + 8);
64 ctx->KEY[3] = u32_in (in_key + 12);
70 static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
75 struct tea_ctx *ctx = ctx_arg;
89 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
90 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
97 static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
102 struct tea_ctx *ctx = ctx_arg;
105 z = u32_in (src + 4);
112 sum = TEA_DELTA << 5;
117 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
118 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
123 u32_out (dst + 4, z);
127 static int xtea_setkey(void *ctx_arg, const u8 *in_key,
128 unsigned int key_len, u32 *flags)
131 struct xtea_ctx *ctx = ctx_arg;
135 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
139 ctx->KEY[0] = u32_in (in_key);
140 ctx->KEY[1] = u32_in (in_key + 4);
141 ctx->KEY[2] = u32_in (in_key + 8);
142 ctx->KEY[3] = u32_in (in_key + 12);
148 static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
152 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
154 struct xtea_ctx *ctx = ctx_arg;
157 z = u32_in (src + 4);
159 while (sum != limit) {
160 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
162 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
166 u32_out (dst + 4, z);
170 static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
174 struct tea_ctx *ctx = ctx_arg;
177 z = u32_in (src + 4);
179 sum = XTEA_DELTA * XTEA_ROUNDS;
182 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
184 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
188 u32_out (dst + 4, z);
193 static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src)
197 u32 limit = XTEA_DELTA * XTEA_ROUNDS;
199 struct xtea_ctx *ctx = ctx_arg;
202 z = u32_in (src + 4);
204 while (sum != limit) {
205 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
207 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
211 u32_out (dst + 4, z);
215 static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src)
219 struct tea_ctx *ctx = ctx_arg;
222 z = u32_in (src + 4);
224 sum = XTEA_DELTA * XTEA_ROUNDS;
227 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
229 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
233 u32_out (dst + 4, z);
237 static struct crypto_alg tea_alg = {
239 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
240 .cra_blocksize = TEA_BLOCK_SIZE,
241 .cra_ctxsize = sizeof (struct tea_ctx),
242 .cra_module = THIS_MODULE,
243 .cra_list = LIST_HEAD_INIT(tea_alg.cra_list),
244 .cra_u = { .cipher = {
245 .cia_min_keysize = TEA_KEY_SIZE,
246 .cia_max_keysize = TEA_KEY_SIZE,
247 .cia_setkey = tea_setkey,
248 .cia_encrypt = tea_encrypt,
249 .cia_decrypt = tea_decrypt } }
252 static struct crypto_alg xtea_alg = {
254 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
255 .cra_blocksize = XTEA_BLOCK_SIZE,
256 .cra_ctxsize = sizeof (struct xtea_ctx),
257 .cra_module = THIS_MODULE,
258 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
259 .cra_u = { .cipher = {
260 .cia_min_keysize = XTEA_KEY_SIZE,
261 .cia_max_keysize = XTEA_KEY_SIZE,
262 .cia_setkey = xtea_setkey,
263 .cia_encrypt = xtea_encrypt,
264 .cia_decrypt = xtea_decrypt } }
267 static struct crypto_alg xeta_alg = {
269 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
270 .cra_blocksize = XTEA_BLOCK_SIZE,
271 .cra_ctxsize = sizeof (struct xtea_ctx),
272 .cra_module = THIS_MODULE,
273 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
274 .cra_u = { .cipher = {
275 .cia_min_keysize = XTEA_KEY_SIZE,
276 .cia_max_keysize = XTEA_KEY_SIZE,
277 .cia_setkey = xtea_setkey,
278 .cia_encrypt = xeta_encrypt,
279 .cia_decrypt = xeta_decrypt } }
282 static int __init init(void)
286 ret = crypto_register_alg(&tea_alg);
290 ret = crypto_register_alg(&xtea_alg);
292 crypto_unregister_alg(&tea_alg);
296 ret = crypto_register_alg(&xeta_alg);
298 crypto_unregister_alg(&tea_alg);
299 crypto_unregister_alg(&xtea_alg);
307 static void __exit fini(void)
309 crypto_unregister_alg(&tea_alg);
310 crypto_unregister_alg(&xtea_alg);
311 crypto_unregister_alg(&xeta_alg);
314 MODULE_ALIAS("xtea");
315 MODULE_ALIAS("xeta");
320 MODULE_LICENSE("GPL");
321 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");