2 * Quick & dirty crypto testing module.
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
11 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
18 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
20 * Software Foundation; either version 2 of the License, or (at your option)
25 #include <crypto/hash.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/gfp.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/string.h>
32 #include <linux/moduleparam.h>
33 #include <linux/jiffies.h>
34 #include <linux/timex.h>
35 #include <linux/interrupt.h>
40 * Need slab memory for testing (size in number of pages).
45 * Used by test_cipher_speed()
51 * Used by test_cipher_speed()
53 static unsigned int sec;
55 static char *alg = NULL;
59 static char *tvmem[TVMEMSIZE];
61 static char *check[] = {
62 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
63 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
64 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
65 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
66 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
67 "lzo", "cts", "zlib", NULL
70 static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
71 struct scatterlist *sg, int blen, int sec)
73 unsigned long start, end;
77 for (start = jiffies, end = start + sec * HZ, bcount = 0;
78 time_before(jiffies, end); bcount++) {
80 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
82 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
88 printk("%d operations in %d seconds (%ld bytes)\n",
89 bcount, sec, (long)bcount * blen);
93 static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
94 struct scatterlist *sg, int blen)
96 unsigned long cycles = 0;
104 for (i = 0; i < 4; i++) {
106 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
108 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
114 /* The real thing. */
115 for (i = 0; i < 8; i++) {
118 start = get_cycles();
120 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
122 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
128 cycles += end - start;
136 printk("1 operation in %lu cycles (%d bytes)\n",
137 (cycles + 4) / 8, blen);
142 static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
144 static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
145 struct cipher_speed_template *template,
146 unsigned int tcount, u8 *keysize)
148 unsigned int ret, i, j, iv_len;
151 struct crypto_blkcipher *tfm;
152 struct blkcipher_desc desc;
161 printk("\ntesting speed of %s %s\n", algo, e);
163 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
166 printk("failed to load transform for %s: %ld\n", algo,
176 b_size = block_sizes;
178 struct scatterlist sg[TVMEMSIZE];
180 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
181 printk("template (%u) too big for "
182 "tvmem (%lu)\n", *keysize + *b_size,
183 TVMEMSIZE * PAGE_SIZE);
187 printk("test %u (%d bit key, %d byte blocks): ", i,
188 *keysize * 8, *b_size);
190 memset(tvmem[0], 0xff, PAGE_SIZE);
192 /* set key, plain text and IV */
194 for (j = 0; j < tcount; j++) {
195 if (template[j].klen == *keysize) {
196 key = template[j].key;
201 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
203 printk("setkey() failed flags=%x\n",
204 crypto_blkcipher_get_flags(tfm));
208 sg_init_table(sg, TVMEMSIZE);
209 sg_set_buf(sg, tvmem[0] + *keysize,
210 PAGE_SIZE - *keysize);
211 for (j = 1; j < TVMEMSIZE; j++) {
212 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
213 memset (tvmem[j], 0xff, PAGE_SIZE);
216 iv_len = crypto_blkcipher_ivsize(tfm);
218 memset(&iv, 0xff, iv_len);
219 crypto_blkcipher_set_iv(tfm, iv, iv_len);
223 ret = test_cipher_jiffies(&desc, enc, sg,
226 ret = test_cipher_cycles(&desc, enc, sg,
230 printk("%s() failed flags=%x\n", e, desc.flags);
240 crypto_free_blkcipher(tfm);
243 static int test_hash_jiffies_digest(struct hash_desc *desc,
244 struct scatterlist *sg, int blen,
247 unsigned long start, end;
251 for (start = jiffies, end = start + sec * HZ, bcount = 0;
252 time_before(jiffies, end); bcount++) {
253 ret = crypto_hash_digest(desc, sg, blen, out);
258 printk("%6u opers/sec, %9lu bytes/sec\n",
259 bcount / sec, ((long)bcount * blen) / sec);
264 static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
265 int blen, int plen, char *out, int sec)
267 unsigned long start, end;
272 return test_hash_jiffies_digest(desc, sg, blen, out, sec);
274 for (start = jiffies, end = start + sec * HZ, bcount = 0;
275 time_before(jiffies, end); bcount++) {
276 ret = crypto_hash_init(desc);
279 for (pcount = 0; pcount < blen; pcount += plen) {
280 ret = crypto_hash_update(desc, sg, plen);
284 /* we assume there is enough space in 'out' for the result */
285 ret = crypto_hash_final(desc, out);
290 printk("%6u opers/sec, %9lu bytes/sec\n",
291 bcount / sec, ((long)bcount * blen) / sec);
296 static int test_hash_cycles_digest(struct hash_desc *desc,
297 struct scatterlist *sg, int blen, char *out)
299 unsigned long cycles = 0;
307 for (i = 0; i < 4; i++) {
308 ret = crypto_hash_digest(desc, sg, blen, out);
313 /* The real thing. */
314 for (i = 0; i < 8; i++) {
317 start = get_cycles();
319 ret = crypto_hash_digest(desc, sg, blen, out);
325 cycles += end - start;
335 printk("%6lu cycles/operation, %4lu cycles/byte\n",
336 cycles / 8, cycles / (8 * blen));
341 static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
342 int blen, int plen, char *out)
344 unsigned long cycles = 0;
349 return test_hash_cycles_digest(desc, sg, blen, out);
355 for (i = 0; i < 4; i++) {
356 ret = crypto_hash_init(desc);
359 for (pcount = 0; pcount < blen; pcount += plen) {
360 ret = crypto_hash_update(desc, sg, plen);
364 ret = crypto_hash_final(desc, out);
369 /* The real thing. */
370 for (i = 0; i < 8; i++) {
373 start = get_cycles();
375 ret = crypto_hash_init(desc);
378 for (pcount = 0; pcount < blen; pcount += plen) {
379 ret = crypto_hash_update(desc, sg, plen);
383 ret = crypto_hash_final(desc, out);
389 cycles += end - start;
399 printk("%6lu cycles/operation, %4lu cycles/byte\n",
400 cycles / 8, cycles / (8 * blen));
405 static void test_hash_sg_init(struct scatterlist *sg)
409 sg_init_table(sg, TVMEMSIZE);
410 for (i = 0; i < TVMEMSIZE; i++) {
411 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
412 memset(tvmem[i], 0xff, PAGE_SIZE);
416 static void test_hash_speed(const char *algo, unsigned int sec,
417 struct hash_speed *speed)
419 struct scatterlist sg[TVMEMSIZE];
420 struct crypto_hash *tfm;
421 struct hash_desc desc;
422 static char output[1024];
426 printk(KERN_INFO "\ntesting speed of %s\n", algo);
428 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
431 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
439 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
440 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
441 crypto_hash_digestsize(tfm), sizeof(output));
445 test_hash_sg_init(sg);
446 for (i = 0; speed[i].blen != 0; i++) {
447 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
449 "template (%u) too big for tvmem (%lu)\n",
450 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
455 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
457 printk(KERN_INFO "test%3u "
458 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
459 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
462 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
463 speed[i].plen, output, sec);
465 ret = test_hash_cycles(&desc, sg, speed[i].blen,
466 speed[i].plen, output);
469 printk(KERN_ERR "hashing failed ret=%d\n", ret);
475 crypto_free_hash(tfm);
478 struct tcrypt_result {
479 struct completion completion;
483 static void tcrypt_complete(struct crypto_async_request *req, int err)
485 struct tcrypt_result *res = req->data;
487 if (err == -EINPROGRESS)
491 complete(&res->completion);
494 static inline int do_one_ahash_op(struct ahash_request *req, int ret)
496 if (ret == -EINPROGRESS || ret == -EBUSY) {
497 struct tcrypt_result *tr = req->base.data;
499 ret = wait_for_completion_interruptible(&tr->completion);
502 INIT_COMPLETION(tr->completion);
507 static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
510 unsigned long start, end;
514 for (start = jiffies, end = start + sec * HZ, bcount = 0;
515 time_before(jiffies, end); bcount++) {
516 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
521 printk("%6u opers/sec, %9lu bytes/sec\n",
522 bcount / sec, ((long)bcount * blen) / sec);
527 static int test_ahash_jiffies(struct ahash_request *req, int blen,
528 int plen, char *out, int sec)
530 unsigned long start, end;
535 return test_ahash_jiffies_digest(req, blen, out, sec);
537 for (start = jiffies, end = start + sec * HZ, bcount = 0;
538 time_before(jiffies, end); bcount++) {
539 ret = crypto_ahash_init(req);
542 for (pcount = 0; pcount < blen; pcount += plen) {
543 ret = do_one_ahash_op(req, crypto_ahash_update(req));
547 /* we assume there is enough space in 'out' for the result */
548 ret = do_one_ahash_op(req, crypto_ahash_final(req));
553 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
554 bcount / sec, ((long)bcount * blen) / sec);
559 static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
562 unsigned long cycles = 0;
566 for (i = 0; i < 4; i++) {
567 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
572 /* The real thing. */
573 for (i = 0; i < 8; i++) {
576 start = get_cycles();
578 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
584 cycles += end - start;
591 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
592 cycles / 8, cycles / (8 * blen));
597 static int test_ahash_cycles(struct ahash_request *req, int blen,
600 unsigned long cycles = 0;
604 return test_ahash_cycles_digest(req, blen, out);
607 for (i = 0; i < 4; i++) {
608 ret = crypto_ahash_init(req);
611 for (pcount = 0; pcount < blen; pcount += plen) {
612 ret = do_one_ahash_op(req, crypto_ahash_update(req));
616 ret = do_one_ahash_op(req, crypto_ahash_final(req));
621 /* The real thing. */
622 for (i = 0; i < 8; i++) {
625 start = get_cycles();
627 ret = crypto_ahash_init(req);
630 for (pcount = 0; pcount < blen; pcount += plen) {
631 ret = do_one_ahash_op(req, crypto_ahash_update(req));
635 ret = do_one_ahash_op(req, crypto_ahash_final(req));
641 cycles += end - start;
648 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
649 cycles / 8, cycles / (8 * blen));
654 static void test_ahash_speed(const char *algo, unsigned int sec,
655 struct hash_speed *speed)
657 struct scatterlist sg[TVMEMSIZE];
658 struct tcrypt_result tresult;
659 struct ahash_request *req;
660 struct crypto_ahash *tfm;
661 static char output[1024];
664 printk(KERN_INFO "\ntesting speed of async %s\n", algo);
666 tfm = crypto_alloc_ahash(algo, 0, 0);
668 pr_err("failed to load transform for %s: %ld\n",
673 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
674 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
675 crypto_ahash_digestsize(tfm), sizeof(output));
679 test_hash_sg_init(sg);
680 req = ahash_request_alloc(tfm, GFP_KERNEL);
682 pr_err("ahash request allocation failure\n");
686 init_completion(&tresult.completion);
687 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
688 tcrypt_complete, &tresult);
690 for (i = 0; speed[i].blen != 0; i++) {
691 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
692 pr_err("template (%u) too big for tvmem (%lu)\n",
693 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
698 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
699 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
701 ahash_request_set_crypt(req, sg, output, speed[i].plen);
704 ret = test_ahash_jiffies(req, speed[i].blen,
705 speed[i].plen, output, sec);
707 ret = test_ahash_cycles(req, speed[i].blen,
708 speed[i].plen, output);
711 pr_err("hashing failed ret=%d\n", ret);
716 ahash_request_free(req);
719 crypto_free_ahash(tfm);
722 static void test_available(void)
727 printk("alg %s ", *name);
728 printk(crypto_has_alg(*name, 0, 0) ?
729 "found\n" : "not found\n");
734 static inline int tcrypt_test(const char *alg)
738 ret = alg_test(alg, alg, 0, 0);
739 /* non-fips algs return -EINVAL in fips mode */
740 if (fips_enabled && ret == -EINVAL)
745 static int do_test(int m)
752 for (i = 1; i < 200; i++)
757 ret += tcrypt_test("md5");
761 ret += tcrypt_test("sha1");
765 ret += tcrypt_test("ecb(des)");
766 ret += tcrypt_test("cbc(des)");
770 ret += tcrypt_test("ecb(des3_ede)");
771 ret += tcrypt_test("cbc(des3_ede)");
775 ret += tcrypt_test("md4");
779 ret += tcrypt_test("sha256");
783 ret += tcrypt_test("ecb(blowfish)");
784 ret += tcrypt_test("cbc(blowfish)");
785 ret += tcrypt_test("ctr(blowfish)");
789 ret += tcrypt_test("ecb(twofish)");
790 ret += tcrypt_test("cbc(twofish)");
791 ret += tcrypt_test("ctr(twofish)");
795 ret += tcrypt_test("ecb(serpent)");
799 ret += tcrypt_test("ecb(aes)");
800 ret += tcrypt_test("cbc(aes)");
801 ret += tcrypt_test("lrw(aes)");
802 ret += tcrypt_test("xts(aes)");
803 ret += tcrypt_test("ctr(aes)");
804 ret += tcrypt_test("rfc3686(ctr(aes))");
808 ret += tcrypt_test("sha384");
812 ret += tcrypt_test("sha512");
816 ret += tcrypt_test("deflate");
820 ret += tcrypt_test("ecb(cast5)");
824 ret += tcrypt_test("ecb(cast6)");
828 ret += tcrypt_test("ecb(arc4)");
832 ret += tcrypt_test("michael_mic");
836 ret += tcrypt_test("crc32c");
840 ret += tcrypt_test("ecb(tea)");
844 ret += tcrypt_test("ecb(xtea)");
848 ret += tcrypt_test("ecb(khazad)");
852 ret += tcrypt_test("wp512");
856 ret += tcrypt_test("wp384");
860 ret += tcrypt_test("wp256");
864 ret += tcrypt_test("ecb(tnepres)");
868 ret += tcrypt_test("ecb(anubis)");
869 ret += tcrypt_test("cbc(anubis)");
873 ret += tcrypt_test("tgr192");
878 ret += tcrypt_test("tgr160");
882 ret += tcrypt_test("tgr128");
886 ret += tcrypt_test("ecb(xeta)");
890 ret += tcrypt_test("pcbc(fcrypt)");
894 ret += tcrypt_test("ecb(camellia)");
895 ret += tcrypt_test("cbc(camellia)");
898 ret += tcrypt_test("sha224");
902 ret += tcrypt_test("salsa20");
906 ret += tcrypt_test("gcm(aes)");
910 ret += tcrypt_test("lzo");
914 ret += tcrypt_test("ccm(aes)");
918 ret += tcrypt_test("cts(cbc(aes))");
922 ret += tcrypt_test("rmd128");
926 ret += tcrypt_test("rmd160");
930 ret += tcrypt_test("rmd256");
934 ret += tcrypt_test("rmd320");
938 ret += tcrypt_test("ecb(seed)");
942 ret += tcrypt_test("zlib");
946 ret += tcrypt_test("rfc4309(ccm(aes))");
950 ret += tcrypt_test("hmac(md5)");
954 ret += tcrypt_test("hmac(sha1)");
958 ret += tcrypt_test("hmac(sha256)");
962 ret += tcrypt_test("hmac(sha384)");
966 ret += tcrypt_test("hmac(sha512)");
970 ret += tcrypt_test("hmac(sha224)");
974 ret += tcrypt_test("xcbc(aes)");
978 ret += tcrypt_test("hmac(rmd128)");
982 ret += tcrypt_test("hmac(rmd160)");
986 ret += tcrypt_test("vmac(aes)");
990 ret += tcrypt_test("ansi_cprng");
994 ret += tcrypt_test("rfc4106(gcm(aes))");
998 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
999 speed_template_16_24_32);
1000 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1001 speed_template_16_24_32);
1002 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1003 speed_template_16_24_32);
1004 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1005 speed_template_16_24_32);
1006 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1007 speed_template_32_40_48);
1008 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1009 speed_template_32_40_48);
1010 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1011 speed_template_32_48_64);
1012 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1013 speed_template_32_48_64);
1014 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1015 speed_template_16_24_32);
1016 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1017 speed_template_16_24_32);
1021 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1022 des3_speed_template, DES3_SPEED_VECTORS,
1024 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1025 des3_speed_template, DES3_SPEED_VECTORS,
1027 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1028 des3_speed_template, DES3_SPEED_VECTORS,
1030 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1031 des3_speed_template, DES3_SPEED_VECTORS,
1036 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1037 speed_template_16_24_32);
1038 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1039 speed_template_16_24_32);
1040 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1041 speed_template_16_24_32);
1042 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1043 speed_template_16_24_32);
1044 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1045 speed_template_16_24_32);
1046 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1047 speed_template_16_24_32);
1051 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1052 speed_template_8_32);
1053 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1054 speed_template_8_32);
1055 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1056 speed_template_8_32);
1057 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1058 speed_template_8_32);
1059 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1060 speed_template_8_32);
1061 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1062 speed_template_8_32);
1066 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1068 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1070 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1072 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1077 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1078 speed_template_16_24_32);
1079 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1080 speed_template_16_24_32);
1081 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1082 speed_template_16_24_32);
1083 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1084 speed_template_16_24_32);
1088 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1089 speed_template_16_32);
1096 test_hash_speed("md4", sec, generic_hash_speed_template);
1097 if (mode > 300 && mode < 400) break;
1100 test_hash_speed("md5", sec, generic_hash_speed_template);
1101 if (mode > 300 && mode < 400) break;
1104 test_hash_speed("sha1", sec, generic_hash_speed_template);
1105 if (mode > 300 && mode < 400) break;
1108 test_hash_speed("sha256", sec, generic_hash_speed_template);
1109 if (mode > 300 && mode < 400) break;
1112 test_hash_speed("sha384", sec, generic_hash_speed_template);
1113 if (mode > 300 && mode < 400) break;
1116 test_hash_speed("sha512", sec, generic_hash_speed_template);
1117 if (mode > 300 && mode < 400) break;
1120 test_hash_speed("wp256", sec, generic_hash_speed_template);
1121 if (mode > 300 && mode < 400) break;
1124 test_hash_speed("wp384", sec, generic_hash_speed_template);
1125 if (mode > 300 && mode < 400) break;
1128 test_hash_speed("wp512", sec, generic_hash_speed_template);
1129 if (mode > 300 && mode < 400) break;
1132 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1133 if (mode > 300 && mode < 400) break;
1136 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1137 if (mode > 300 && mode < 400) break;
1140 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1141 if (mode > 300 && mode < 400) break;
1144 test_hash_speed("sha224", sec, generic_hash_speed_template);
1145 if (mode > 300 && mode < 400) break;
1148 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1149 if (mode > 300 && mode < 400) break;
1152 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1153 if (mode > 300 && mode < 400) break;
1156 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1157 if (mode > 300 && mode < 400) break;
1160 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1161 if (mode > 300 && mode < 400) break;
1164 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1165 if (mode > 300 && mode < 400) break;
1174 test_ahash_speed("md4", sec, generic_hash_speed_template);
1175 if (mode > 400 && mode < 500) break;
1178 test_ahash_speed("md5", sec, generic_hash_speed_template);
1179 if (mode > 400 && mode < 500) break;
1182 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1183 if (mode > 400 && mode < 500) break;
1186 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1187 if (mode > 400 && mode < 500) break;
1190 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1191 if (mode > 400 && mode < 500) break;
1194 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1195 if (mode > 400 && mode < 500) break;
1198 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1199 if (mode > 400 && mode < 500) break;
1202 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1203 if (mode > 400 && mode < 500) break;
1206 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1207 if (mode > 400 && mode < 500) break;
1210 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1211 if (mode > 400 && mode < 500) break;
1214 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1215 if (mode > 400 && mode < 500) break;
1218 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1219 if (mode > 400 && mode < 500) break;
1222 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1223 if (mode > 400 && mode < 500) break;
1226 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1227 if (mode > 400 && mode < 500) break;
1230 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1231 if (mode > 400 && mode < 500) break;
1234 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1235 if (mode > 400 && mode < 500) break;
1238 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1239 if (mode > 400 && mode < 500) break;
1252 static int do_alg_test(const char *alg, u32 type, u32 mask)
1254 return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
1258 static int __init tcrypt_mod_init(void)
1263 for (i = 0; i < TVMEMSIZE; i++) {
1264 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
1270 err = do_alg_test(alg, type, mask);
1272 err = do_test(mode);
1275 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
1279 /* We intentionaly return -EAGAIN to prevent keeping the module,
1280 * unless we're running in fips mode. It does all its work from
1281 * init() and doesn't offer any runtime functionality, but in
1282 * the fips case, checking for a successful load is helpful.
1283 * => we don't need it in the memory, do we?
1290 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
1291 free_page((unsigned long)tvmem[i]);
1297 * If an init function is provided, an exit function must also be provided
1298 * to allow module unload.
1300 static void __exit tcrypt_mod_fini(void) { }
1302 module_init(tcrypt_mod_init);
1303 module_exit(tcrypt_mod_fini);
1305 module_param(alg, charp, 0);
1306 module_param(type, uint, 0);
1307 module_param(mask, uint, 0);
1308 module_param(mode, int, 0);
1309 module_param(sec, uint, 0);
1310 MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
1311 "(defaults to zero which uses CPU cycles instead)");
1313 MODULE_LICENSE("GPL");
1314 MODULE_DESCRIPTION("Quick & dirty crypto testing module");
1315 MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");