crypto: aesni - replace CTR function pointer with static call
authorArd Biesheuvel <ardb@kernel.org>
Sat, 16 Jan 2021 16:48:09 +0000 (17:48 +0100)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 22 Jan 2021 03:58:04 +0000 (14:58 +1100)
Indirect calls are very expensive on x86, so use a static call to set
the system-wide AES-NI/CTR asm helper.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/x86/crypto/aesni-intel_glue.c

index a548fdbc30732cbd2c62d0bb74bff815b3516ffc..d96685457196f9b74dafb7f12fb077e0a759d93d 100644 (file)
@@ -34,6 +34,7 @@
 #include <linux/jump_label.h>
 #include <linux/workqueue.h>
 #include <linux/spinlock.h>
+#include <linux/static_call.h>
 
 
 #define AESNI_ALIGN    16
@@ -107,10 +108,9 @@ asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out,
 
 #ifdef CONFIG_X86_64
 
-static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
-                             const u8 *in, unsigned int len, u8 *iv);
 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
                              const u8 *in, unsigned int len, u8 *iv);
+DEFINE_STATIC_CALL(aesni_ctr_enc_tfm, aesni_ctr_enc);
 
 /* Scatter / Gather routines, with args similar to above */
 asmlinkage void aesni_gcm_init(void *ctx,
@@ -520,8 +520,10 @@ static int ctr_crypt(struct skcipher_request *req)
 
        kernel_fpu_begin();
        while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
-               aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
-                                     nbytes & AES_BLOCK_MASK, walk.iv);
+               static_call(aesni_ctr_enc_tfm)(ctx, walk.dst.virt.addr,
+                                              walk.src.virt.addr,
+                                              nbytes & AES_BLOCK_MASK,
+                                              walk.iv);
                nbytes &= AES_BLOCK_SIZE - 1;
                err = skcipher_walk_done(&walk, nbytes);
        }
@@ -1160,10 +1162,9 @@ static int __init aesni_init(void)
        } else {
                pr_info("SSE version of gcm_enc/dec engaged.\n");
        }
-       aesni_ctr_enc_tfm = aesni_ctr_enc;
        if (boot_cpu_has(X86_FEATURE_AVX)) {
                /* optimize performance of ctr mode encryption transform */
-               aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
+               static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
                pr_info("AES CTR mode by8 optimization enabled\n");
        }
 #endif