serpent_avx_glue.c 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. /*
  2. * Glue Code for AVX assembler versions of Serpent Cipher
  3. *
  4. * Copyright (C) 2012 Johannes Goetzfried
  5. * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
  6. *
  7. * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License as published by
  11. * the Free Software Foundation; either version 2 of the License, or
  12. * (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
  22. * USA
  23. *
  24. */
  25. #include <linux/module.h>
  26. #include <linux/types.h>
  27. #include <linux/crypto.h>
  28. #include <linux/err.h>
  29. #include <crypto/algapi.h>
  30. #include <crypto/internal/simd.h>
  31. #include <crypto/serpent.h>
  32. #include <crypto/xts.h>
  33. #include <asm/crypto/glue_helper.h>
  34. #include <asm/crypto/serpent-avx.h>
  35. /* 8-way parallel cipher functions */
  36. asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  37. const u8 *src);
  38. EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
  39. asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  40. const u8 *src);
  41. EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
  42. asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  43. const u8 *src);
  44. EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
  45. asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  46. const u8 *src, le128 *iv);
  47. EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
  48. asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  49. const u8 *src, le128 *iv);
  50. EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
  51. asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  52. const u8 *src, le128 *iv);
  53. EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
  54. void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  55. {
  56. be128 ctrblk;
  57. le128_to_be128(&ctrblk, iv);
  58. le128_inc(iv);
  59. __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
  60. u128_xor(dst, src, (u128 *)&ctrblk);
  61. }
  62. EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
  63. void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  64. {
  65. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  66. GLUE_FUNC_CAST(__serpent_encrypt));
  67. }
  68. EXPORT_SYMBOL_GPL(serpent_xts_enc);
  69. void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  70. {
  71. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  72. GLUE_FUNC_CAST(__serpent_decrypt));
  73. }
  74. EXPORT_SYMBOL_GPL(serpent_xts_dec);
  75. static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
  76. const u8 *key, unsigned int keylen)
  77. {
  78. return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen);
  79. }
  80. int xts_serpent_setkey(struct crypto_skcipher *tfm, const u8 *key,
  81. unsigned int keylen)
  82. {
  83. struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  84. int err;
  85. err = xts_verify_key(tfm, key, keylen);
  86. if (err)
  87. return err;
  88. /* first half of xts-key is for crypt */
  89. err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
  90. if (err)
  91. return err;
  92. /* second half of xts-key is for tweak */
  93. return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
  94. }
  95. EXPORT_SYMBOL_GPL(xts_serpent_setkey);
  96. static const struct common_glue_ctx serpent_enc = {
  97. .num_funcs = 2,
  98. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  99. .funcs = { {
  100. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  101. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
  102. }, {
  103. .num_blocks = 1,
  104. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
  105. } }
  106. };
  107. static const struct common_glue_ctx serpent_ctr = {
  108. .num_funcs = 2,
  109. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  110. .funcs = { {
  111. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  112. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
  113. }, {
  114. .num_blocks = 1,
  115. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
  116. } }
  117. };
  118. static const struct common_glue_ctx serpent_enc_xts = {
  119. .num_funcs = 2,
  120. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  121. .funcs = { {
  122. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  123. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
  124. }, {
  125. .num_blocks = 1,
  126. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
  127. } }
  128. };
  129. static const struct common_glue_ctx serpent_dec = {
  130. .num_funcs = 2,
  131. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  132. .funcs = { {
  133. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  134. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
  135. }, {
  136. .num_blocks = 1,
  137. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
  138. } }
  139. };
  140. static const struct common_glue_ctx serpent_dec_cbc = {
  141. .num_funcs = 2,
  142. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  143. .funcs = { {
  144. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  145. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
  146. }, {
  147. .num_blocks = 1,
  148. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
  149. } }
  150. };
  151. static const struct common_glue_ctx serpent_dec_xts = {
  152. .num_funcs = 2,
  153. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  154. .funcs = { {
  155. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  156. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
  157. }, {
  158. .num_blocks = 1,
  159. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
  160. } }
  161. };
  162. static int ecb_encrypt(struct skcipher_request *req)
  163. {
  164. return glue_ecb_req_128bit(&serpent_enc, req);
  165. }
  166. static int ecb_decrypt(struct skcipher_request *req)
  167. {
  168. return glue_ecb_req_128bit(&serpent_dec, req);
  169. }
  170. static int cbc_encrypt(struct skcipher_request *req)
  171. {
  172. return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt),
  173. req);
  174. }
  175. static int cbc_decrypt(struct skcipher_request *req)
  176. {
  177. return glue_cbc_decrypt_req_128bit(&serpent_dec_cbc, req);
  178. }
  179. static int ctr_crypt(struct skcipher_request *req)
  180. {
  181. return glue_ctr_req_128bit(&serpent_ctr, req);
  182. }
  183. static int xts_encrypt(struct skcipher_request *req)
  184. {
  185. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  186. struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  187. return glue_xts_req_128bit(&serpent_enc_xts, req,
  188. XTS_TWEAK_CAST(__serpent_encrypt),
  189. &ctx->tweak_ctx, &ctx->crypt_ctx);
  190. }
  191. static int xts_decrypt(struct skcipher_request *req)
  192. {
  193. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  194. struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  195. return glue_xts_req_128bit(&serpent_dec_xts, req,
  196. XTS_TWEAK_CAST(__serpent_encrypt),
  197. &ctx->tweak_ctx, &ctx->crypt_ctx);
  198. }
  199. static struct skcipher_alg serpent_algs[] = {
  200. {
  201. .base.cra_name = "__ecb(serpent)",
  202. .base.cra_driver_name = "__ecb-serpent-avx",
  203. .base.cra_priority = 500,
  204. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  205. .base.cra_blocksize = SERPENT_BLOCK_SIZE,
  206. .base.cra_ctxsize = sizeof(struct serpent_ctx),
  207. .base.cra_module = THIS_MODULE,
  208. .min_keysize = SERPENT_MIN_KEY_SIZE,
  209. .max_keysize = SERPENT_MAX_KEY_SIZE,
  210. .setkey = serpent_setkey_skcipher,
  211. .encrypt = ecb_encrypt,
  212. .decrypt = ecb_decrypt,
  213. }, {
  214. .base.cra_name = "__cbc(serpent)",
  215. .base.cra_driver_name = "__cbc-serpent-avx",
  216. .base.cra_priority = 500,
  217. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  218. .base.cra_blocksize = SERPENT_BLOCK_SIZE,
  219. .base.cra_ctxsize = sizeof(struct serpent_ctx),
  220. .base.cra_module = THIS_MODULE,
  221. .min_keysize = SERPENT_MIN_KEY_SIZE,
  222. .max_keysize = SERPENT_MAX_KEY_SIZE,
  223. .ivsize = SERPENT_BLOCK_SIZE,
  224. .setkey = serpent_setkey_skcipher,
  225. .encrypt = cbc_encrypt,
  226. .decrypt = cbc_decrypt,
  227. }, {
  228. .base.cra_name = "__ctr(serpent)",
  229. .base.cra_driver_name = "__ctr-serpent-avx",
  230. .base.cra_priority = 500,
  231. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  232. .base.cra_blocksize = 1,
  233. .base.cra_ctxsize = sizeof(struct serpent_ctx),
  234. .base.cra_module = THIS_MODULE,
  235. .min_keysize = SERPENT_MIN_KEY_SIZE,
  236. .max_keysize = SERPENT_MAX_KEY_SIZE,
  237. .ivsize = SERPENT_BLOCK_SIZE,
  238. .chunksize = SERPENT_BLOCK_SIZE,
  239. .setkey = serpent_setkey_skcipher,
  240. .encrypt = ctr_crypt,
  241. .decrypt = ctr_crypt,
  242. }, {
  243. .base.cra_name = "__xts(serpent)",
  244. .base.cra_driver_name = "__xts-serpent-avx",
  245. .base.cra_priority = 500,
  246. .base.cra_flags = CRYPTO_ALG_INTERNAL,
  247. .base.cra_blocksize = SERPENT_BLOCK_SIZE,
  248. .base.cra_ctxsize = sizeof(struct serpent_xts_ctx),
  249. .base.cra_module = THIS_MODULE,
  250. .min_keysize = 2 * SERPENT_MIN_KEY_SIZE,
  251. .max_keysize = 2 * SERPENT_MAX_KEY_SIZE,
  252. .ivsize = SERPENT_BLOCK_SIZE,
  253. .setkey = xts_serpent_setkey,
  254. .encrypt = xts_encrypt,
  255. .decrypt = xts_decrypt,
  256. },
  257. };
  258. static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
  259. static int __init serpent_init(void)
  260. {
  261. const char *feature_name;
  262. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
  263. &feature_name)) {
  264. pr_info("CPU feature '%s' is not supported.\n", feature_name);
  265. return -ENODEV;
  266. }
  267. return simd_register_skciphers_compat(serpent_algs,
  268. ARRAY_SIZE(serpent_algs),
  269. serpent_simd_algs);
  270. }
  271. static void __exit serpent_exit(void)
  272. {
  273. simd_unregister_skciphers(serpent_algs, ARRAY_SIZE(serpent_algs),
  274. serpent_simd_algs);
  275. }
  276. module_init(serpent_init);
  277. module_exit(serpent_exit);
  278. MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
  279. MODULE_LICENSE("GPL");
  280. MODULE_ALIAS_CRYPTO("serpent");