2 * Quick & dirty crypto testing module.
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
11 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
18 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
20 * Software Foundation; either version 2 of the License, or (at your option)
25 #include <crypto/hash.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/gfp.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/string.h>
32 #include <linux/moduleparam.h>
33 #include <linux/jiffies.h>
34 #include <linux/timex.h>
35 #include <linux/interrupt.h>
40 * Need slab memory for testing (size in number of pages).
45 * Used by test_cipher_speed()
51 * Used by test_cipher_speed()
53 static unsigned int sec;
55 static char *alg = NULL;
59 static char *tvmem[TVMEMSIZE];
61 static char *check[] = {
62 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
63 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
64 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
65 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
66 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
67 "lzo", "cts", "zlib", NULL
70 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
71 struct scatterlist *sg, int blen, int sec)
73 unsigned long start, end;
77 for (start = jiffies, end = start + sec * HZ, bcount = 0;
78 time_before(jiffies, end); bcount++) {
80 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
82 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
88 printk("%d operations in %d seconds (%ld bytes)\n",
89 bcount, sec, (long)bcount * blen);
93 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
94 struct scatterlist *sg, int blen)
96 unsigned long cycles = 0;
104 for (i = 0; i < 4; i++) {
106 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
108 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
114 /* The real thing. */
115 for (i = 0; i < 8; i++) {
118 start = get_cycles();
120 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
122 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
128 cycles += end - start;
136 printk("1 operation in %lu cycles (%d bytes)\n",
137 (cycles + 4) / 8, blen);
142 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
144 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
145 struct cipher_speed_template *template,
146 unsigned int tcount, u8 *keysize)
148 unsigned int ret, i, j, iv_len;
149 const char *key, iv[128];
150 struct crypto_blkcipher *tfm;
151 struct blkcipher_desc desc;
160 printk("\ntesting speed of %s %s\n", algo, e);
162 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
165 printk("failed to load transform for %s: %ld\n", algo,
175 b_size = block_sizes;
177 struct scatterlist sg[TVMEMSIZE];
179 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
180 printk("template (%u) too big for "
181 "tvmem (%lu)\n", *keysize + *b_size,
182 TVMEMSIZE * PAGE_SIZE);
186 printk("test %u (%d bit key, %d byte blocks): ", i,
187 *keysize * 8, *b_size);
189 memset(tvmem[0], 0xff, PAGE_SIZE);
191 /* set key, plain text and IV */
193 for (j = 0; j < tcount; j++) {
194 if (template[j].klen == *keysize) {
195 key = template[j].key;
200 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
202 printk("setkey() failed flags=%x\n",
203 crypto_blkcipher_get_flags(tfm));
207 sg_init_table(sg, TVMEMSIZE);
208 sg_set_buf(sg, tvmem[0] + *keysize,
209 PAGE_SIZE - *keysize);
210 for (j = 1; j < TVMEMSIZE; j++) {
211 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
212 memset (tvmem[j], 0xff, PAGE_SIZE);
215 iv_len = crypto_blkcipher_ivsize(tfm);
217 memset(&iv, 0xff, iv_len);
218 crypto_blkcipher_set_iv(tfm, iv, iv_len);
222 ret = test_cipher_jiffies(&desc, enc, sg,
225 ret = test_cipher_cycles(&desc, enc, sg,
229 printk("%s() failed flags=%x\n", e, desc.flags);
239 crypto_free_blkcipher(tfm);
242 static int test_hash_jiffies_digest(struct hash_desc *desc,
243 struct scatterlist *sg, int blen,
246 unsigned long start, end;
250 for (start = jiffies, end = start + sec * HZ, bcount = 0;
251 time_before(jiffies, end); bcount++) {
252 ret = crypto_hash_digest(desc, sg, blen, out);
257 printk("%6u opers/sec, %9lu bytes/sec\n",
258 bcount / sec, ((long)bcount * blen) / sec);
263 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
264 int blen, int plen, char *out, int sec)
266 unsigned long start, end;
271 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
273 for (start = jiffies, end = start + sec * HZ, bcount = 0;
274 time_before(jiffies, end); bcount++) {
275 ret = crypto_hash_init(desc);
278 for (pcount = 0; pcount < blen; pcount += plen) {
279 ret = crypto_hash_update(desc, sg, plen);
283 /* we assume there is enough space in 'out' for the result */
284 ret = crypto_hash_final(desc, out);
289 printk("%6u opers/sec, %9lu bytes/sec\n",
290 bcount / sec, ((long)bcount * blen) / sec);
295 static int test_hash_cycles_digest(struct hash_desc *desc,
296 struct scatterlist *sg, int blen, char *out)
298 unsigned long cycles = 0;
306 for (i = 0; i < 4; i++) {
307 ret = crypto_hash_digest(desc, sg, blen, out);
312 /* The real thing. */
313 for (i = 0; i < 8; i++) {
316 start = get_cycles();
318 ret = crypto_hash_digest(desc, sg, blen, out);
324 cycles += end - start;
334 printk("%6lu cycles/operation, %4lu cycles/byte\n",
335 cycles / 8, cycles / (8 * blen));
340 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
341 int blen, int plen, char *out)
343 unsigned long cycles = 0;
348 return test_hash_cycles_digest(desc, sg, blen, out);
354 for (i = 0; i < 4; i++) {
355 ret = crypto_hash_init(desc);
358 for (pcount = 0; pcount < blen; pcount += plen) {
359 ret = crypto_hash_update(desc, sg, plen);
363 ret = crypto_hash_final(desc, out);
368 /* The real thing. */
369 for (i = 0; i < 8; i++) {
372 start = get_cycles();
374 ret = crypto_hash_init(desc);
377 for (pcount = 0; pcount < blen; pcount += plen) {
378 ret = crypto_hash_update(desc, sg, plen);
382 ret = crypto_hash_final(desc, out);
388 cycles += end - start;
398 printk("%6lu cycles/operation, %4lu cycles/byte\n",
399 cycles / 8, cycles / (8 * blen));
404 static void test_hash_sg_init(struct scatterlist *sg)
408 sg_init_table(sg, TVMEMSIZE);
409 for (i = 0; i < TVMEMSIZE; i++) {
410 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
411 memset(tvmem[i], 0xff, PAGE_SIZE);
415 static void test_hash_speed(const char *algo, unsigned int sec,
416 struct hash_speed *speed)
418 struct scatterlist sg[TVMEMSIZE];
419 struct crypto_hash *tfm;
420 struct hash_desc desc;
421 static char output[1024];
425 printk(KERN_INFO "\ntesting speed of %s\n", algo);
427 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
430 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
438 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
439 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
440 crypto_hash_digestsize(tfm), sizeof(output));
444 test_hash_sg_init(sg);
445 for (i = 0; speed[i].blen != 0; i++) {
446 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
448 "template (%u) too big for tvmem (%lu)\n",
449 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
454 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
456 printk(KERN_INFO "test%3u "
457 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
458 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
461 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
462 speed[i].plen, output, sec);
464 ret = test_hash_cycles(&desc, sg, speed[i].blen,
465 speed[i].plen, output);
468 printk(KERN_ERR "hashing failed ret=%d\n", ret);
474 crypto_free_hash(tfm);
477 struct tcrypt_result {
478 struct completion completion;
482 static void tcrypt_complete(struct crypto_async_request *req, int err)
484 struct tcrypt_result *res = req->data;
486 if (err == -EINPROGRESS)
490 complete(&res->completion);
493 static inline int do_one_ahash_op(struct ahash_request *req, int ret)
495 if (ret == -EINPROGRESS || ret == -EBUSY) {
496 struct tcrypt_result *tr = req->base.data;
498 ret = wait_for_completion_interruptible(&tr->completion);
501 INIT_COMPLETION(tr->completion);
506 static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
509 unsigned long start, end;
513 for (start = jiffies, end = start + sec * HZ, bcount = 0;
514 time_before(jiffies, end); bcount++) {
515 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
520 printk("%6u opers/sec, %9lu bytes/sec\n",
521 bcount / sec, ((long)bcount * blen) / sec);
526 static int test_ahash_jiffies(struct ahash_request *req, int blen,
527 int plen, char *out, int sec)
529 unsigned long start, end;
534 return test_ahash_jiffies_digest(req, blen, out, sec);
536 for (start = jiffies, end = start + sec * HZ, bcount = 0;
537 time_before(jiffies, end); bcount++) {
538 ret = crypto_ahash_init(req);
541 for (pcount = 0; pcount < blen; pcount += plen) {
542 ret = do_one_ahash_op(req, crypto_ahash_update(req));
546 /* we assume there is enough space in 'out' for the result */
547 ret = do_one_ahash_op(req, crypto_ahash_final(req));
552 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
553 bcount / sec, ((long)bcount * blen) / sec);
558 static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
561 unsigned long cycles = 0;
565 for (i = 0; i < 4; i++) {
566 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
571 /* The real thing. */
572 for (i = 0; i < 8; i++) {
575 start = get_cycles();
577 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
583 cycles += end - start;
590 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
591 cycles / 8, cycles / (8 * blen));
596 static int test_ahash_cycles(struct ahash_request *req, int blen,
599 unsigned long cycles = 0;
603 return test_ahash_cycles_digest(req, blen, out);
606 for (i = 0; i < 4; i++) {
607 ret = crypto_ahash_init(req);
610 for (pcount = 0; pcount < blen; pcount += plen) {
611 ret = do_one_ahash_op(req, crypto_ahash_update(req));
615 ret = do_one_ahash_op(req, crypto_ahash_final(req));
620 /* The real thing. */
621 for (i = 0; i < 8; i++) {
624 start = get_cycles();
626 ret = crypto_ahash_init(req);
629 for (pcount = 0; pcount < blen; pcount += plen) {
630 ret = do_one_ahash_op(req, crypto_ahash_update(req));
634 ret = do_one_ahash_op(req, crypto_ahash_final(req));
640 cycles += end - start;
647 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
648 cycles / 8, cycles / (8 * blen));
653 static void test_ahash_speed(const char *algo, unsigned int sec,
654 struct hash_speed *speed)
656 struct scatterlist sg[TVMEMSIZE];
657 struct tcrypt_result tresult;
658 struct ahash_request *req;
659 struct crypto_ahash *tfm;
660 static char output[1024];
663 printk(KERN_INFO "\ntesting speed of async %s\n", algo);
665 tfm = crypto_alloc_ahash(algo, 0, 0);
667 pr_err("failed to load transform for %s: %ld\n",
672 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
673 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
674 crypto_ahash_digestsize(tfm), sizeof(output));
678 test_hash_sg_init(sg);
679 req = ahash_request_alloc(tfm, GFP_KERNEL);
681 pr_err("ahash request allocation failure\n");
685 init_completion(&tresult.completion);
686 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
687 tcrypt_complete, &tresult);
689 for (i = 0; speed[i].blen != 0; i++) {
690 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
691 pr_err("template (%u) too big for tvmem (%lu)\n",
692 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
697 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
698 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
700 ahash_request_set_crypt(req, sg, output, speed[i].plen);
703 ret = test_ahash_jiffies(req, speed[i].blen,
704 speed[i].plen, output, sec);
706 ret = test_ahash_cycles(req, speed[i].blen,
707 speed[i].plen, output);
710 pr_err("hashing failed ret=%d\n", ret);
715 ahash_request_free(req);
718 crypto_free_ahash(tfm);
721 static void test_available(void)
726 printk("alg %s ", *name);
727 printk(crypto_has_alg(*name, 0, 0) ?
728 "found\n" : "not found\n");
733 static inline int tcrypt_test(const char *alg)
737 ret = alg_test(alg, alg, 0, 0);
738 /* non-fips algs return -EINVAL in fips mode */
739 if (fips_enabled && ret == -EINVAL)
744 static int do_test(int m)
751 for (i = 1; i < 200; i++)
756 ret += tcrypt_test("md5");
760 ret += tcrypt_test("sha1");
764 ret += tcrypt_test("ecb(des)");
765 ret += tcrypt_test("cbc(des)");
769 ret += tcrypt_test("ecb(des3_ede)");
770 ret += tcrypt_test("cbc(des3_ede)");
774 ret += tcrypt_test("md4");
778 ret += tcrypt_test("sha256");
782 ret += tcrypt_test("ecb(blowfish)");
783 ret += tcrypt_test("cbc(blowfish)");
787 ret += tcrypt_test("ecb(twofish)");
788 ret += tcrypt_test("cbc(twofish)");
792 ret += tcrypt_test("ecb(serpent)");
796 ret += tcrypt_test("ecb(aes)");
797 ret += tcrypt_test("cbc(aes)");
798 ret += tcrypt_test("lrw(aes)");
799 ret += tcrypt_test("xts(aes)");
800 ret += tcrypt_test("ctr(aes)");
801 ret += tcrypt_test("rfc3686(ctr(aes))");
805 ret += tcrypt_test("sha384");
809 ret += tcrypt_test("sha512");
813 ret += tcrypt_test("deflate");
817 ret += tcrypt_test("ecb(cast5)");
821 ret += tcrypt_test("ecb(cast6)");
825 ret += tcrypt_test("ecb(arc4)");
829 ret += tcrypt_test("michael_mic");
833 ret += tcrypt_test("crc32c");
837 ret += tcrypt_test("ecb(tea)");
841 ret += tcrypt_test("ecb(xtea)");
845 ret += tcrypt_test("ecb(khazad)");
849 ret += tcrypt_test("wp512");
853 ret += tcrypt_test("wp384");
857 ret += tcrypt_test("wp256");
861 ret += tcrypt_test("ecb(tnepres)");
865 ret += tcrypt_test("ecb(anubis)");
866 ret += tcrypt_test("cbc(anubis)");
870 ret += tcrypt_test("tgr192");
875 ret += tcrypt_test("tgr160");
879 ret += tcrypt_test("tgr128");
883 ret += tcrypt_test("ecb(xeta)");
887 ret += tcrypt_test("pcbc(fcrypt)");
891 ret += tcrypt_test("ecb(camellia)");
892 ret += tcrypt_test("cbc(camellia)");
895 ret += tcrypt_test("sha224");
899 ret += tcrypt_test("salsa20");
903 ret += tcrypt_test("gcm(aes)");
907 ret += tcrypt_test("lzo");
911 ret += tcrypt_test("ccm(aes)");
915 ret += tcrypt_test("cts(cbc(aes))");
919 ret += tcrypt_test("rmd128");
923 ret += tcrypt_test("rmd160");
927 ret += tcrypt_test("rmd256");
931 ret += tcrypt_test("rmd320");
935 ret += tcrypt_test("ecb(seed)");
939 ret += tcrypt_test("zlib");
943 ret += tcrypt_test("rfc4309(ccm(aes))");
947 ret += tcrypt_test("hmac(md5)");
951 ret += tcrypt_test("hmac(sha1)");
955 ret += tcrypt_test("hmac(sha256)");
959 ret += tcrypt_test("hmac(sha384)");
963 ret += tcrypt_test("hmac(sha512)");
967 ret += tcrypt_test("hmac(sha224)");
971 ret += tcrypt_test("xcbc(aes)");
975 ret += tcrypt_test("hmac(rmd128)");
979 ret += tcrypt_test("hmac(rmd160)");
983 ret += tcrypt_test("vmac(aes)");
987 ret += tcrypt_test("ansi_cprng");
991 ret += tcrypt_test("rfc4106(gcm(aes))");
995 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
996 speed_template_16_24_32);
997 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
998 speed_template_16_24_32);
999 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1000 speed_template_16_24_32);
1001 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1002 speed_template_16_24_32);
1003 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1004 speed_template_32_40_48);
1005 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1006 speed_template_32_40_48);
1007 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1008 speed_template_32_48_64);
1009 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1010 speed_template_32_48_64);
1014 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1015 des3_speed_template, DES3_SPEED_VECTORS,
1017 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1018 des3_speed_template, DES3_SPEED_VECTORS,
1020 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1021 des3_speed_template, DES3_SPEED_VECTORS,
1023 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1024 des3_speed_template, DES3_SPEED_VECTORS,
1029 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1030 speed_template_16_24_32);
1031 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1032 speed_template_16_24_32);
1033 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1034 speed_template_16_24_32);
1035 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1036 speed_template_16_24_32);
1040 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1041 speed_template_8_32);
1042 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1043 speed_template_8_32);
1044 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1045 speed_template_8_32);
1046 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1047 speed_template_8_32);
1051 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1053 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1055 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1057 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1062 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1063 speed_template_16_24_32);
1064 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1065 speed_template_16_24_32);
1066 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1067 speed_template_16_24_32);
1068 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1069 speed_template_16_24_32);
1073 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1074 speed_template_16_32);
1081 test_hash_speed("md4", sec, generic_hash_speed_template);
1082 if (mode > 300 && mode < 400) break;
1085 test_hash_speed("md5", sec, generic_hash_speed_template);
1086 if (mode > 300 && mode < 400) break;
1089 test_hash_speed("sha1", sec, generic_hash_speed_template);
1090 if (mode > 300 && mode < 400) break;
1093 test_hash_speed("sha256", sec, generic_hash_speed_template);
1094 if (mode > 300 && mode < 400) break;
1097 test_hash_speed("sha384", sec, generic_hash_speed_template);
1098 if (mode > 300 && mode < 400) break;
1101 test_hash_speed("sha512", sec, generic_hash_speed_template);
1102 if (mode > 300 && mode < 400) break;
1105 test_hash_speed("wp256", sec, generic_hash_speed_template);
1106 if (mode > 300 && mode < 400) break;
1109 test_hash_speed("wp384", sec, generic_hash_speed_template);
1110 if (mode > 300 && mode < 400) break;
1113 test_hash_speed("wp512", sec, generic_hash_speed_template);
1114 if (mode > 300 && mode < 400) break;
1117 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1118 if (mode > 300 && mode < 400) break;
1121 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1122 if (mode > 300 && mode < 400) break;
1125 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1126 if (mode > 300 && mode < 400) break;
1129 test_hash_speed("sha224", sec, generic_hash_speed_template);
1130 if (mode > 300 && mode < 400) break;
1133 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1134 if (mode > 300 && mode < 400) break;
1137 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1138 if (mode > 300 && mode < 400) break;
1141 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1142 if (mode > 300 && mode < 400) break;
1145 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1146 if (mode > 300 && mode < 400) break;
1149 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1150 if (mode > 300 && mode < 400) break;
1159 test_ahash_speed("md4", sec, generic_hash_speed_template);
1160 if (mode > 400 && mode < 500) break;
1163 test_ahash_speed("md5", sec, generic_hash_speed_template);
1164 if (mode > 400 && mode < 500) break;
1167 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1168 if (mode > 400 && mode < 500) break;
1171 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1172 if (mode > 400 && mode < 500) break;
1175 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1176 if (mode > 400 && mode < 500) break;
1179 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1180 if (mode > 400 && mode < 500) break;
1183 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1184 if (mode > 400 && mode < 500) break;
1187 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1188 if (mode > 400 && mode < 500) break;
1191 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1192 if (mode > 400 && mode < 500) break;
1195 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1196 if (mode > 400 && mode < 500) break;
1199 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1200 if (mode > 400 && mode < 500) break;
1203 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1204 if (mode > 400 && mode < 500) break;
1207 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1208 if (mode > 400 && mode < 500) break;
1211 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1212 if (mode > 400 && mode < 500) break;
1215 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1216 if (mode > 400 && mode < 500) break;
1219 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1220 if (mode > 400 && mode < 500) break;
1223 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1224 if (mode > 400 && mode < 500) break;
1237 static int do_alg_test(const char *alg, u32 type, u32 mask)
1239 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
1243 static int __init tcrypt_mod_init(void)
1248 for (i = 0; i < TVMEMSIZE; i++) {
1249 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
1255 err = do_alg_test(alg, type, mask);
1257 err = do_test(mode);
1260 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
1264 /* We intentionaly return -EAGAIN to prevent keeping the module,
1265 * unless we're running in fips mode. It does all its work from
1266 * init() and doesn't offer any runtime functionality, but in
1267 * the fips case, checking for a successful load is helpful.
1268 * => we don't need it in the memory, do we?
1275 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
1276 free_page((unsigned long)tvmem[i]);
1282 * If an init function is provided, an exit function must also be provided
1283 * to allow module unload.
1285 static void __exit tcrypt_mod_fini(void) { }
1287 module_init(tcrypt_mod_init);
1288 module_exit(tcrypt_mod_fini);
1290 module_param(alg, charp, 0);
1291 module_param(type, uint, 0);
1292 module_param(mask, uint, 0);
1293 module_param(mode, int, 0);
1294 module_param(sec, uint, 0);
1295 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
1296 "(defaults to zero which uses CPU cycles instead)");
1298 MODULE_LICENSE("GPL");
1299 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
1300 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");