diff mbox series

[v2,16/19] crypto: x86 - print CPU optimized loaded messages

Message ID 20221012215931.3896-17-elliott@hpe.com
State New
Headers show
Series [RFC,1/7] rcu: correct CONFIG_EXT_RCU_CPU_STALL_TIMEOUT descriptions | expand

Commit Message

Elliott, Robert (Servers) Oct. 12, 2022, 9:59 p.m. UTC
Print a positive message at the info level if the CPU-optimized module
is loaded, for all modules except the sha modules.

Signed-off-by: Robert Elliott <elliott@hpe.com>
---
 arch/x86/crypto/aegis128-aesni-glue.c      |  8 +++++--
 arch/x86/crypto/aesni-intel_glue.c         | 22 +++++++++++++------
 arch/x86/crypto/aria_aesni_avx_glue.c      | 13 ++++++++---
 arch/x86/crypto/blake2s-glue.c             | 14 ++++++++++--
 arch/x86/crypto/blowfish_glue.c            |  2 ++
 arch/x86/crypto/camellia_aesni_avx2_glue.c |  6 +++++-
 arch/x86/crypto/camellia_aesni_avx_glue.c  |  6 +++++-
 arch/x86/crypto/camellia_glue.c            |  3 +++
 arch/x86/crypto/cast5_avx_glue.c           |  6 +++++-
 arch/x86/crypto/cast6_avx_glue.c           |  6 +++++-
 arch/x86/crypto/chacha_glue.c              | 17 +++++++++++++--
 arch/x86/crypto/crc32-pclmul_glue.c        |  8 ++++++-
 arch/x86/crypto/crc32c-intel_glue.c        | 15 +++++++++++--
 arch/x86/crypto/crct10dif-pclmul_glue.c    |  7 +++++-
 arch/x86/crypto/curve25519-x86_64.c        | 13 +++++++++--
 arch/x86/crypto/des3_ede_glue.c            |  2 ++
 arch/x86/crypto/ghash-clmulni-intel_glue.c |  1 +
 arch/x86/crypto/nhpoly1305-avx2-glue.c     |  7 +++++-
 arch/x86/crypto/nhpoly1305-sse2-glue.c     |  7 +++++-
 arch/x86/crypto/poly1305_glue.c            | 25 ++++++++++++++++++----
 arch/x86/crypto/polyval-clmulni_glue.c     |  7 +++++-
 arch/x86/crypto/serpent_avx2_glue.c        |  7 ++++--
 arch/x86/crypto/serpent_avx_glue.c         |  6 +++++-
 arch/x86/crypto/serpent_sse2_glue.c        |  7 +++++-
 arch/x86/crypto/sm3_avx_glue.c             |  6 +++++-
 arch/x86/crypto/sm4_aesni_avx2_glue.c      |  6 +++++-
 arch/x86/crypto/sm4_aesni_avx_glue.c       |  7 ++++--
 arch/x86/crypto/twofish_avx_glue.c         | 10 ++++++---
 arch/x86/crypto/twofish_glue.c             |  7 +++++-
 arch/x86/crypto/twofish_glue_3way.c        |  9 ++++++--
 30 files changed, 213 insertions(+), 47 deletions(-)

Comments

Jason A. Donenfeld Oct. 13, 2022, 12:40 a.m. UTC | #1
On Wed, Oct 12, 2022 at 04:59:28PM -0500, Robert Elliott wrote:
> Print a positive message at the info level if the CPU-optimized module
> is loaded, for all modules except the sha modules.

Why!? This is just meaningless clutter. If the admin wants to see what
modules are loaded, he uses `lsmod`.

Also, what's special about sha?

Anyway, please don't do this.

Jason
kernel test robot Oct. 13, 2022, 1:47 p.m. UTC | #2
Hi Robert,

Thank you for the patch! Perhaps something to improve:

[auto build test WARNING on herbert-cryptodev-2.6/master]
[also build test WARNING on herbert-crypto-2.6/master linus/master next-20221012]
[cannot apply to v6.0]
[If your patch is applied to the wrong git tree, kindly drop us a note.
And when submitting patch, we suggest to use '--base' as documented in
https://git-scm.com/docs/git-format-patch#_base_tree_information]

url:    https://github.com/intel-lab-lkp/linux/commits/Robert-Elliott/crypto-tcrypt-test-crc32/20221013-065919
base:   https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git master
config: x86_64-allyesconfig
compiler: gcc-11 (Debian 11.3.0-8) 11.3.0
reproduce (this is a W=1 build):
        # https://github.com/intel-lab-lkp/linux/commit/15a63fd12ab4d509e54c5db6daf2e8e81fdf0cf5
        git remote add linux-review https://github.com/intel-lab-lkp/linux
        git fetch --no-tags linux-review Robert-Elliott/crypto-tcrypt-test-crc32/20221013-065919
        git checkout 15a63fd12ab4d509e54c5db6daf2e8e81fdf0cf5
        # save the config file
        mkdir build_dir && cp config build_dir/.config
        make W=1 O=build_dir ARCH=x86_64 SHELL=/bin/bash arch/x86/crypto/

If you fix the issue, kindly add following tag where applicable
| Reported-by: kernel test robot <lkp@intel.com>

All warnings (new ones prefixed by >>):

>> arch/x86/crypto/serpent_avx2_glue.c:100:32: warning: 'module_cpu_ids' defined but not used [-Wunused-const-variable=]
     100 | static const struct x86_cpu_id module_cpu_ids[] = {
         |                                ^~~~~~~~~~~~~~
--
>> arch/x86/crypto/aegis128-aesni-glue.c:268:32: warning: 'module_cpu_ids' defined but not used [-Wunused-const-variable=]
     268 | static const struct x86_cpu_id module_cpu_ids[] = {
         |                                ^~~~~~~~~~~~~~
--
>> arch/x86/crypto/sm4_aesni_avx_glue.c:451:32: warning: 'module_cpu_ids' defined but not used [-Wunused-const-variable=]
     451 | static const struct x86_cpu_id module_cpu_ids[] = {
         |                                ^~~~~~~~~~~~~~


vim +/module_cpu_ids +100 arch/x86/crypto/serpent_avx2_glue.c

e16bf974b3d965 Eric Biggers   2018-02-19   99  
385e7cb709ad4a Robert Elliott 2022-10-12 @100  static const struct x86_cpu_id module_cpu_ids[] = {
385e7cb709ad4a Robert Elliott 2022-10-12  101  	X86_MATCH_FEATURE(X86_FEATURE_AVX2, NULL),
385e7cb709ad4a Robert Elliott 2022-10-12  102  	{}
385e7cb709ad4a Robert Elliott 2022-10-12  103  };
385e7cb709ad4a Robert Elliott 2022-10-12  104  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
385e7cb709ad4a Robert Elliott 2022-10-12  105
kernel test robot Oct. 13, 2022, 1:48 p.m. UTC | #3
Hi Robert,

Thank you for the patch! Perhaps something to improve:

[auto build test WARNING on herbert-cryptodev-2.6/master]
[also build test WARNING on herbert-crypto-2.6/master linus/master next-20221012]
[cannot apply to v6.0]
[If your patch is applied to the wrong git tree, kindly drop us a note.
And when submitting patch, we suggest to use '--base' as documented in
https://git-scm.com/docs/git-format-patch#_base_tree_information]

url:    https://github.com/intel-lab-lkp/linux/commits/Robert-Elliott/crypto-tcrypt-test-crc32/20221013-065919
base:   https://git.kernel.org/pub/scm/linux/kernel/git/herbert/cryptodev-2.6.git master
config: x86_64-randconfig-m001
compiler: gcc-11 (Debian 11.3.0-8) 11.3.0

If you fix the issue, kindly add following tag where applicable
| Reported-by: kernel test robot <lkp@intel.com>

smatch warnings:
arch/x86/crypto/serpent_avx2_glue.c:113 serpent_avx2_init() warn: inconsistent indenting
arch/x86/crypto/serpent_avx2_glue.c:115 serpent_avx2_init() warn: ignoring unreachable code.
arch/x86/crypto/aegis128-aesni-glue.c:280 crypto_aegis128_aesni_module_init() warn: inconsistent indenting
arch/x86/crypto/aegis128-aesni-glue.c:282 crypto_aegis128_aesni_module_init() warn: ignoring unreachable code.
arch/x86/crypto/sm4_aesni_avx_glue.c:466 sm4_init() warn: inconsistent indenting
arch/x86/crypto/sm4_aesni_avx_glue.c:468 sm4_init() warn: ignoring unreachable code.

vim +113 arch/x86/crypto/serpent_avx2_glue.c

56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  107  
f16a005cde3b1f Randy Dunlap    2022-03-16  108  static int __init serpent_avx2_init(void)
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  109  {
534ff06e39292b Ingo Molnar     2015-04-28  110  	const char *feature_name;
15a63fd12ab4d5 Robert Elliott  2022-10-12  111  	int ret;
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  112  
385e7cb709ad4a Robert Elliott  2022-10-12 @113  		return -ENODEV;
385e7cb709ad4a Robert Elliott  2022-10-12  114  
abcfdfe07de75f Borislav Petkov 2016-04-04 @115  	if (!boot_cpu_has(X86_FEATURE_AVX2) || !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
b54b4bbbf5e931 Ingo Molnar     2015-05-22  116  		pr_info("AVX2 instructions are not detected.\n");
b54b4bbbf5e931 Ingo Molnar     2015-05-22  117  		return -ENODEV;
b54b4bbbf5e931 Ingo Molnar     2015-05-22  118  	}
d91cab78133d33 Dave Hansen     2015-09-02  119  	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
d91cab78133d33 Dave Hansen     2015-09-02  120  				&feature_name)) {
534ff06e39292b Ingo Molnar     2015-04-28  121  		pr_info("CPU feature '%s' is not supported.\n", feature_name);
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  122  		return -ENODEV;
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  123  	}
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  124  
15a63fd12ab4d5 Robert Elliott  2022-10-12  125  	ret = simd_register_skciphers_compat(serpent_algs,
e16bf974b3d965 Eric Biggers    2018-02-19  126  					      ARRAY_SIZE(serpent_algs),
e16bf974b3d965 Eric Biggers    2018-02-19  127  					      serpent_simd_algs);
15a63fd12ab4d5 Robert Elliott  2022-10-12  128  	if (!ret)
15a63fd12ab4d5 Robert Elliott  2022-10-12  129  		pr_info("CPU-optimized crypto module loaded\n");
15a63fd12ab4d5 Robert Elliott  2022-10-12  130  	return ret;
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  131  }
56d76c96a9f3e3 Jussi Kivilinna 2013-04-13  132
diff mbox series

Patch

diff --git a/arch/x86/crypto/aegis128-aesni-glue.c b/arch/x86/crypto/aegis128-aesni-glue.c
index 122bfd04ee47..e8eaf79ef220 100644
--- a/arch/x86/crypto/aegis128-aesni-glue.c
+++ b/arch/x86/crypto/aegis128-aesni-glue.c
@@ -275,7 +275,8 @@  static struct simd_aead_alg *simd_alg;
 
 static int __init crypto_aegis128_aesni_module_init(void)
 {
-	if (!x86_match_cpu(module_cpu_ids))
+	int ret;
+
 		return -ENODEV;
 
 	if (!boot_cpu_has(X86_FEATURE_XMM2) ||
@@ -283,8 +284,11 @@  static int __init crypto_aegis128_aesni_module_init(void)
 	    !cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
 		return -ENODEV;
 
-	return simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
+	ret = simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
 					  &simd_alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit crypto_aegis128_aesni_module_exit(void)
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index df93cb44b4eb..56023ba70049 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -1238,25 +1238,28 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 static int __init aesni_init(void)
 {
 	int err;
+	int enabled_gcm_sse = 0;
+	int enabled_gcm_avx = 0;
+	int enabled_gcm_avx2 = 0;
+	int enabled_ctr_avx = 0;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 #ifdef CONFIG_X86_64
 	if (boot_cpu_has(X86_FEATURE_AVX2)) {
-		pr_info("AVX2 version of gcm_enc/dec engaged.\n");
+		enabled_gcm_avx = 1;
+		enabled_gcm_avx2 = 1;
 		static_branch_enable(&gcm_use_avx);
 		static_branch_enable(&gcm_use_avx2);
-	} else
-	if (boot_cpu_has(X86_FEATURE_AVX)) {
-		pr_info("AVX version of gcm_enc/dec engaged.\n");
+	} else if (boot_cpu_has(X86_FEATURE_AVX)) {
+		enabled_gcm_avx = 1;
 		static_branch_enable(&gcm_use_avx);
 	} else {
-		pr_info("SSE version of gcm_enc/dec engaged.\n");
+		enabled_gcm_sse = 1;
 	}
 	if (boot_cpu_has(X86_FEATURE_AVX)) {
-		/* optimize performance of ctr mode encryption transform */
+		enabled_ctr_avx = 1;
 		static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
-		pr_info("AES CTR mode by8 optimization enabled\n");
 	}
 #endif /* CONFIG_X86_64 */
 
@@ -1283,6 +1286,11 @@  static int __init aesni_init(void)
 		goto unregister_aeads;
 #endif /* CONFIG_X86_64 */
 
+	pr_info("CPU-optimized crypto module loaded (GCM SSE=%s, AVX=%s, AVX2=%s)(CTR AVX=%s)\n",
+		enabled_gcm_sse ? "yes" : "no",
+		enabled_gcm_avx ? "yes" : "no",
+		enabled_gcm_avx2 ? "yes" : "no",
+		enabled_ctr_avx ? "yes" : "no");
 	return 0;
 
 #ifdef CONFIG_X86_64
diff --git a/arch/x86/crypto/aria_aesni_avx_glue.c b/arch/x86/crypto/aria_aesni_avx_glue.c
index 589097728bd1..d58fb995a266 100644
--- a/arch/x86/crypto/aria_aesni_avx_glue.c
+++ b/arch/x86/crypto/aria_aesni_avx_glue.c
@@ -170,6 +170,8 @@  static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
 static int __init aria_avx_init(void)
 {
 	const char *feature_name;
+	int ret;
+	int enabled_gfni = 0;
 
 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
 	    !boot_cpu_has(X86_FEATURE_AES) ||
@@ -188,15 +190,20 @@  static int __init aria_avx_init(void)
 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
+		enabled_gfni = 1;
 	} else {
 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
 	}
 
-	return simd_register_skciphers_compat(aria_algs,
-					      ARRAY_SIZE(aria_algs),
-					      aria_simd_algs);
+	ret = simd_register_skciphers_compat(aria_algs,
+					     ARRAY_SIZE(aria_algs),
+					     aria_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded (GFNI=%s)\n",
+			enabled_gfni ? "yes" : "no");
+	return ret;
 }
 
 static void __exit aria_avx_exit(void)
diff --git a/arch/x86/crypto/blake2s-glue.c b/arch/x86/crypto/blake2s-glue.c
index ac7fb7a9922b..4f2f385f6674 100644
--- a/arch/x86/crypto/blake2s-glue.c
+++ b/arch/x86/crypto/blake2s-glue.c
@@ -66,11 +66,16 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init blake2s_mod_init(void)
 {
+	int enabled_ssse3 = 0;
+	int enabled_avx512 = 0;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
-	if (boot_cpu_has(X86_FEATURE_SSSE3))
+	if (boot_cpu_has(X86_FEATURE_SSSE3)) {
+		enabled_ssse3 = 1;
 		static_branch_enable(&blake2s_use_ssse3);
+	}
 
 	if (IS_ENABLED(CONFIG_AS_AVX512) &&
 	    boot_cpu_has(X86_FEATURE_AVX) &&
@@ -78,9 +83,14 @@  static int __init blake2s_mod_init(void)
 	    boot_cpu_has(X86_FEATURE_AVX512F) &&
 	    boot_cpu_has(X86_FEATURE_AVX512VL) &&
 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM |
-			      XFEATURE_MASK_AVX512, NULL))
+			      XFEATURE_MASK_AVX512, NULL)) {
+		enabled_avx512 = 1;
 		static_branch_enable(&blake2s_use_avx512);
+	}
 
+	pr_info("CPU-optimized crypto module loaded (SSSE3=%s, AVX512=%s)\n",
+		enabled_ssse3 ? "yes" : "no",
+		enabled_avx512 ? "yes" : "no");
 	return 0;
 }
 
diff --git a/arch/x86/crypto/blowfish_glue.c b/arch/x86/crypto/blowfish_glue.c
index 5cfcbb91c4ca..27b7aed9a488 100644
--- a/arch/x86/crypto/blowfish_glue.c
+++ b/arch/x86/crypto/blowfish_glue.c
@@ -336,6 +336,8 @@  static int __init blowfish_init(void)
 	if (err)
 		crypto_unregister_alg(&bf_cipher_alg);
 
+	if (!err)
+		pr_info("CPU-optimized crypto module loaded\n");
 	return err;
 }
 
diff --git a/arch/x86/crypto/camellia_aesni_avx2_glue.c b/arch/x86/crypto/camellia_aesni_avx2_glue.c
index 851f2a29963c..e6c4ed1e40d2 100644
--- a/arch/x86/crypto/camellia_aesni_avx2_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx2_glue.c
@@ -114,6 +114,7 @@  static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
 static int __init camellia_aesni_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -132,9 +133,12 @@  static int __init camellia_aesni_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(camellia_algs,
+	ret = simd_register_skciphers_compat(camellia_algs,
 					      ARRAY_SIZE(camellia_algs),
 					      camellia_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit camellia_aesni_fini(void)
diff --git a/arch/x86/crypto/camellia_aesni_avx_glue.c b/arch/x86/crypto/camellia_aesni_avx_glue.c
index 8846493c92fb..6a9eadf0fe90 100644
--- a/arch/x86/crypto/camellia_aesni_avx_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx_glue.c
@@ -113,6 +113,7 @@  static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
 static int __init camellia_aesni_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -130,9 +131,12 @@  static int __init camellia_aesni_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(camellia_algs,
+	ret = simd_register_skciphers_compat(camellia_algs,
 					      ARRAY_SIZE(camellia_algs),
 					      camellia_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit camellia_aesni_fini(void)
diff --git a/arch/x86/crypto/camellia_glue.c b/arch/x86/crypto/camellia_glue.c
index 3c14a904af00..94dd2973bb47 100644
--- a/arch/x86/crypto/camellia_glue.c
+++ b/arch/x86/crypto/camellia_glue.c
@@ -1410,6 +1410,9 @@  static int __init camellia_init(void)
 	if (err)
 		crypto_unregister_alg(&camellia_cipher_alg);
 
+	if (!err)
+		pr_info("CPU-optimized crypto module loaded\n");
+
 	return err;
 }
 
diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c
index fdeec0849ab5..b5ae17c3ac53 100644
--- a/arch/x86/crypto/cast5_avx_glue.c
+++ b/arch/x86/crypto/cast5_avx_glue.c
@@ -107,6 +107,7 @@  static struct simd_skcipher_alg *cast5_simd_algs[ARRAY_SIZE(cast5_algs)];
 static int __init cast5_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -117,9 +118,12 @@  static int __init cast5_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(cast5_algs,
+	ret = simd_register_skciphers_compat(cast5_algs,
 					      ARRAY_SIZE(cast5_algs),
 					      cast5_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit cast5_exit(void)
diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c
index 9258082408eb..d1c14a5f80d7 100644
--- a/arch/x86/crypto/cast6_avx_glue.c
+++ b/arch/x86/crypto/cast6_avx_glue.c
@@ -107,6 +107,7 @@  static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
 static int __init cast6_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -117,9 +118,12 @@  static int __init cast6_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(cast6_algs,
+	ret = simd_register_skciphers_compat(cast6_algs,
 					      ARRAY_SIZE(cast6_algs),
 					      cast6_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit cast6_exit(void)
diff --git a/arch/x86/crypto/chacha_glue.c b/arch/x86/crypto/chacha_glue.c
index 8e5cadc808b4..de424fbe9f0e 100644
--- a/arch/x86/crypto/chacha_glue.c
+++ b/arch/x86/crypto/chacha_glue.c
@@ -289,6 +289,9 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init chacha_simd_mod_init(void)
 {
+	int ret;
+	int enabled_avx2 = 0;
+	int enabled_avx512 = 0;
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
@@ -298,15 +301,25 @@  static int __init chacha_simd_mod_init(void)
 	if (boot_cpu_has(X86_FEATURE_AVX) &&
 	    boot_cpu_has(X86_FEATURE_AVX2) &&
 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+		enabled_avx2 = 1;
 		static_branch_enable(&chacha_use_avx2);
 
 		if (IS_ENABLED(CONFIG_AS_AVX512) &&
 		    boot_cpu_has(X86_FEATURE_AVX512VL) &&
-		    boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
+		    boot_cpu_has(X86_FEATURE_AVX512BW)) { /* kmovq */
+			enabled_avx512 = 1;
 			static_branch_enable(&chacha_use_avx512vl);
+		}
 	}
-	return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
+	ret = IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
 		crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded (AVX2=%s, AVX512=%s)\n",
+			enabled_avx2 ? "yes" : "no",
+			enabled_avx512 ? "yes" : "no");
+	else
+		pr_info("CPU-optimized crypto module not loaded");
+	return ret;
 }
 
 static void __exit chacha_simd_mod_fini(void)
diff --git a/arch/x86/crypto/crc32-pclmul_glue.c b/arch/x86/crypto/crc32-pclmul_glue.c
index bc2b31b04e05..c56d3d3ab0a0 100644
--- a/arch/x86/crypto/crc32-pclmul_glue.c
+++ b/arch/x86/crypto/crc32-pclmul_glue.c
@@ -190,9 +190,15 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init crc32_pclmul_mod_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
-	return crypto_register_shash(&alg);
+
+	ret = crypto_register_shash(&alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit crc32_pclmul_mod_fini(void)
diff --git a/arch/x86/crypto/crc32c-intel_glue.c b/arch/x86/crypto/crc32c-intel_glue.c
index ebf530934a3e..c633d303f19b 100644
--- a/arch/x86/crypto/crc32c-intel_glue.c
+++ b/arch/x86/crypto/crc32c-intel_glue.c
@@ -242,16 +242,27 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init crc32c_intel_mod_init(void)
 {
-	if (!x86_match_cpu(module_cpu_ids))
+	int ret;
+	int pcl_enabled = 0;
+
+	if (!x86_match_cpu(module_cpu_ids)) {
+		pr_info("CPU-optimized crypto module not loaded, required CPU feature (SSE4.2) not supported\n");
 		return -ENODEV;
+	}
+
 #ifdef CONFIG_X86_64
 	if (boot_cpu_has(X86_FEATURE_PCLMULQDQ)) {
+		pcl_enabled = 1;
 		alg.update = crc32c_pcl_intel_update;
 		alg.finup = crc32c_pcl_intel_finup;
 		alg.digest = crc32c_pcl_intel_digest;
 	}
 #endif
-	return crypto_register_shash(&alg);
+	ret = crypto_register_shash(&alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded (PCLMULQDQ=%s)\n",
+			pcl_enabled ? "yes" : "no");
+	return ret;
 }
 
 static void __exit crc32c_intel_mod_fini(void)
diff --git a/arch/x86/crypto/crct10dif-pclmul_glue.c b/arch/x86/crypto/crct10dif-pclmul_glue.c
index 03e35a1b7677..4476b9af1e61 100644
--- a/arch/x86/crypto/crct10dif-pclmul_glue.c
+++ b/arch/x86/crypto/crct10dif-pclmul_glue.c
@@ -146,10 +146,15 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init crct10dif_intel_mod_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
-	return crypto_register_shash(&alg);
+	ret = crypto_register_shash(&alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit crct10dif_intel_mod_fini(void)
diff --git a/arch/x86/crypto/curve25519-x86_64.c b/arch/x86/crypto/curve25519-x86_64.c
index f9a1adb0c183..b9289feef375 100644
--- a/arch/x86/crypto/curve25519-x86_64.c
+++ b/arch/x86/crypto/curve25519-x86_64.c
@@ -1709,15 +1709,24 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init curve25519_mod_init(void)
 {
+	int ret;
+	int enabled_adx = 0;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
-	if (boot_cpu_has(X86_FEATURE_BMI2) && boot_cpu_has(X86_FEATURE_ADX))
+	if (boot_cpu_has(X86_FEATURE_BMI2) && boot_cpu_has(X86_FEATURE_ADX)) {
+		enabled_adx = 1;
 		static_branch_enable(&curve25519_use_bmi2_adx);
+	}
 	else
 		return 0;
-	return IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
+	ret = IS_REACHABLE(CONFIG_CRYPTO_KPP) ?
 		crypto_register_kpp(&curve25519_alg) : 0;
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded (ADX=%s)\n",
+			enabled_adx ? "yes" : "no");
+	return ret;
 }
 
 static void __exit curve25519_mod_exit(void)
diff --git a/arch/x86/crypto/des3_ede_glue.c b/arch/x86/crypto/des3_ede_glue.c
index 83e686a6c2f3..7b4dd02007ed 100644
--- a/arch/x86/crypto/des3_ede_glue.c
+++ b/arch/x86/crypto/des3_ede_glue.c
@@ -384,6 +384,8 @@  static int __init des3_ede_x86_init(void)
 	if (err)
 		crypto_unregister_alg(&des3_ede_cipher);
 
+	if (!err)
+		pr_info("CPU-optimized crypto module loaded\n");
 	return err;
 }
 
diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c
index 3ad55144da48..496a410eaff7 100644
--- a/arch/x86/crypto/ghash-clmulni-intel_glue.c
+++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c
@@ -349,6 +349,7 @@  static int __init ghash_pclmulqdqni_mod_init(void)
 	if (err)
 		goto err_shash;
 
+	pr_info("CPU-optimized crypto module loaded\n");
 	return 0;
 
 err_shash:
diff --git a/arch/x86/crypto/nhpoly1305-avx2-glue.c b/arch/x86/crypto/nhpoly1305-avx2-glue.c
index 40f49107e5a9..2dc7b618771f 100644
--- a/arch/x86/crypto/nhpoly1305-avx2-glue.c
+++ b/arch/x86/crypto/nhpoly1305-avx2-glue.c
@@ -68,6 +68,8 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init nhpoly1305_mod_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
@@ -75,7 +77,10 @@  static int __init nhpoly1305_mod_init(void)
 	    !boot_cpu_has(X86_FEATURE_OSXSAVE))
 		return -ENODEV;
 
-	return crypto_register_shash(&nhpoly1305_alg);
+	ret = crypto_register_shash(&nhpoly1305_alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit nhpoly1305_mod_exit(void)
diff --git a/arch/x86/crypto/nhpoly1305-sse2-glue.c b/arch/x86/crypto/nhpoly1305-sse2-glue.c
index bb40fed92c92..bf0f8ac7afd6 100644
--- a/arch/x86/crypto/nhpoly1305-sse2-glue.c
+++ b/arch/x86/crypto/nhpoly1305-sse2-glue.c
@@ -68,13 +68,18 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init nhpoly1305_mod_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
 	if (!boot_cpu_has(X86_FEATURE_XMM2))
 		return -ENODEV;
 
-	return crypto_register_shash(&nhpoly1305_alg);
+	ret = crypto_register_shash(&nhpoly1305_alg);
+	if (ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit nhpoly1305_mod_exit(void)
diff --git a/arch/x86/crypto/poly1305_glue.c b/arch/x86/crypto/poly1305_glue.c
index a2a7cb39cdec..c9ebb6b90d1f 100644
--- a/arch/x86/crypto/poly1305_glue.c
+++ b/arch/x86/crypto/poly1305_glue.c
@@ -273,22 +273,39 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init poly1305_simd_mod_init(void)
 {
+	int ret;
+	int enabled_avx = 0;
+	int enabled_avx2 = 0;
+	int enabled_avx512 = 0;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
 	if (boot_cpu_has(X86_FEATURE_AVX) &&
-	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+		enabled_avx = 1;
 		static_branch_enable(&poly1305_use_avx);
+	}
 	if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) &&
-	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
+	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
+		enabled_avx2 = 1;
 		static_branch_enable(&poly1305_use_avx2);
+	}
 	if (IS_ENABLED(CONFIG_AS_AVX512) && boot_cpu_has(X86_FEATURE_AVX) &&
 	    boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX512F) &&
 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM | XFEATURE_MASK_AVX512, NULL) &&
 	    /* Skylake downclocks unacceptably much when using zmm, but later generations are fast. */
-	    boot_cpu_data.x86_model != INTEL_FAM6_SKYLAKE_X)
+	    boot_cpu_data.x86_model != INTEL_FAM6_SKYLAKE_X) {
+		enabled_avx512 = 1;
 		static_branch_enable(&poly1305_use_avx512);
-	return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+	}
+	ret = IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded (AVX=%s, AVX2=%s, AVX512=%s)\n",
+			enabled_avx ? "yes" : "no",
+			enabled_avx2 ? "yes" : "no",
+			enabled_avx512 ? "yes" : "no");
+	return ret;
 }
 
 static void __exit poly1305_simd_mod_exit(void)
diff --git a/arch/x86/crypto/polyval-clmulni_glue.c b/arch/x86/crypto/polyval-clmulni_glue.c
index 5a345db20ca9..7a3a80085c90 100644
--- a/arch/x86/crypto/polyval-clmulni_glue.c
+++ b/arch/x86/crypto/polyval-clmulni_glue.c
@@ -183,13 +183,18 @@  MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
 
 static int __init polyval_clmulni_mod_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(pcmul_cpu_id))
 		return -ENODEV;
 
 	if (!boot_cpu_has(X86_FEATURE_AVX))
 		return -ENODEV;
 
-	return crypto_register_shash(&polyval_alg);
+	ret = crypto_register_shash(&polyval_alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit polyval_clmulni_mod_exit(void)
diff --git a/arch/x86/crypto/serpent_avx2_glue.c b/arch/x86/crypto/serpent_avx2_glue.c
index 5944bf5ead2e..bf59addaf804 100644
--- a/arch/x86/crypto/serpent_avx2_glue.c
+++ b/arch/x86/crypto/serpent_avx2_glue.c
@@ -108,8 +108,8 @@  static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
 static int __init serpent_avx2_init(void)
 {
 	const char *feature_name;
+	int ret;
 
-	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
 	if (!boot_cpu_has(X86_FEATURE_AVX2) || !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
@@ -122,9 +122,12 @@  static int __init serpent_avx2_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(serpent_algs,
+	ret = simd_register_skciphers_compat(serpent_algs,
 					      ARRAY_SIZE(serpent_algs),
 					      serpent_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit serpent_avx2_fini(void)
diff --git a/arch/x86/crypto/serpent_avx_glue.c b/arch/x86/crypto/serpent_avx_glue.c
index 45713c7a4cb9..7b0c02a61552 100644
--- a/arch/x86/crypto/serpent_avx_glue.c
+++ b/arch/x86/crypto/serpent_avx_glue.c
@@ -114,6 +114,7 @@  static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
 static int __init serpent_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -124,9 +125,12 @@  static int __init serpent_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(serpent_algs,
+	ret = simd_register_skciphers_compat(serpent_algs,
 					      ARRAY_SIZE(serpent_algs),
 					      serpent_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit serpent_exit(void)
diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c
index d8aa0d3fbf15..f82880ef6f10 100644
--- a/arch/x86/crypto/serpent_sse2_glue.c
+++ b/arch/x86/crypto/serpent_sse2_glue.c
@@ -116,6 +116,8 @@  static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
 
 static int __init serpent_sse2_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
@@ -124,9 +126,12 @@  static int __init serpent_sse2_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(serpent_algs,
+	ret = simd_register_skciphers_compat(serpent_algs,
 					      ARRAY_SIZE(serpent_algs),
 					      serpent_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit serpent_sse2_exit(void)
diff --git a/arch/x86/crypto/sm3_avx_glue.c b/arch/x86/crypto/sm3_avx_glue.c
index 475b9637a06d..532f07b05745 100644
--- a/arch/x86/crypto/sm3_avx_glue.c
+++ b/arch/x86/crypto/sm3_avx_glue.c
@@ -125,6 +125,7 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 static int __init sm3_avx_mod_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -145,7 +146,10 @@  static int __init sm3_avx_mod_init(void)
 		return -ENODEV;
 	}
 
-	return crypto_register_shash(&sm3_avx_alg);
+	ret = crypto_register_shash(&sm3_avx_alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit sm3_avx_mod_exit(void)
diff --git a/arch/x86/crypto/sm4_aesni_avx2_glue.c b/arch/x86/crypto/sm4_aesni_avx2_glue.c
index 3fe9e170b880..42819ee5d36d 100644
--- a/arch/x86/crypto/sm4_aesni_avx2_glue.c
+++ b/arch/x86/crypto/sm4_aesni_avx2_glue.c
@@ -143,6 +143,7 @@  simd_sm4_aesni_avx2_skciphers[ARRAY_SIZE(sm4_aesni_avx2_skciphers)];
 static int __init sm4_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -161,9 +162,12 @@  static int __init sm4_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(sm4_aesni_avx2_skciphers,
+	ret = simd_register_skciphers_compat(sm4_aesni_avx2_skciphers,
 					ARRAY_SIZE(sm4_aesni_avx2_skciphers),
 					simd_sm4_aesni_avx2_skciphers);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit sm4_exit(void)
diff --git a/arch/x86/crypto/sm4_aesni_avx_glue.c b/arch/x86/crypto/sm4_aesni_avx_glue.c
index 14ae012948ae..8a25376d341f 100644
--- a/arch/x86/crypto/sm4_aesni_avx_glue.c
+++ b/arch/x86/crypto/sm4_aesni_avx_glue.c
@@ -461,8 +461,8 @@  simd_sm4_aesni_avx_skciphers[ARRAY_SIZE(sm4_aesni_avx_skciphers)];
 static int __init sm4_init(void)
 {
 	const char *feature_name;
+	int ret;
 
-	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
@@ -478,9 +478,12 @@  static int __init sm4_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(sm4_aesni_avx_skciphers,
+	ret = simd_register_skciphers_compat(sm4_aesni_avx_skciphers,
 					ARRAY_SIZE(sm4_aesni_avx_skciphers),
 					simd_sm4_aesni_avx_skciphers);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit sm4_exit(void)
diff --git a/arch/x86/crypto/twofish_avx_glue.c b/arch/x86/crypto/twofish_avx_glue.c
index 044e4f92e2c0..ccf016bf6ef2 100644
--- a/arch/x86/crypto/twofish_avx_glue.c
+++ b/arch/x86/crypto/twofish_avx_glue.c
@@ -117,6 +117,7 @@  static struct simd_skcipher_alg *twofish_simd_algs[ARRAY_SIZE(twofish_algs)];
 static int __init twofish_init(void)
 {
 	const char *feature_name;
+	int ret;
 
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
@@ -126,9 +127,12 @@  static int __init twofish_init(void)
 		return -ENODEV;
 	}
 
-	return simd_register_skciphers_compat(twofish_algs,
-					      ARRAY_SIZE(twofish_algs),
-					      twofish_simd_algs);
+	ret = simd_register_skciphers_compat(twofish_algs,
+					     ARRAY_SIZE(twofish_algs),
+					     twofish_simd_algs);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit twofish_exit(void)
diff --git a/arch/x86/crypto/twofish_glue.c b/arch/x86/crypto/twofish_glue.c
index 031ed290c755..5756b9cab982 100644
--- a/arch/x86/crypto/twofish_glue.c
+++ b/arch/x86/crypto/twofish_glue.c
@@ -92,10 +92,15 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init twofish_glue_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
-	return crypto_register_alg(&alg);
+	ret = crypto_register_alg(&alg);
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit twofish_glue_fini(void)
diff --git a/arch/x86/crypto/twofish_glue_3way.c b/arch/x86/crypto/twofish_glue_3way.c
index 7e2a18e3abe7..2fde637b40c8 100644
--- a/arch/x86/crypto/twofish_glue_3way.c
+++ b/arch/x86/crypto/twofish_glue_3way.c
@@ -151,6 +151,8 @@  MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
 
 static int __init twofish_3way_init(void)
 {
+	int ret;
+
 	if (!x86_match_cpu(module_cpu_ids))
 		return -ENODEV;
 
@@ -162,8 +164,11 @@  static int __init twofish_3way_init(void)
 		return -ENODEV;
 	}
 
-	return crypto_register_skciphers(tf_skciphers,
-					 ARRAY_SIZE(tf_skciphers));
+	ret = crypto_register_skciphers(tf_skciphers,
+					ARRAY_SIZE(tf_skciphers));
+	if (!ret)
+		pr_info("CPU-optimized crypto module loaded\n");
+	return ret;
 }
 
 static void __exit twofish_3way_fini(void)