diff mbox series

[2/3] crypto: aead/shash - yield at end of operations

Message ID 20221219203733.3063192-3-elliott@hpe.com
State New
Headers show
Series [1/3] crypto: skcipher - always yield at end of walk | expand

Commit Message

Elliott, Robert (Servers) Dec. 19, 2022, 8:37 p.m. UTC
Add crypto_yield() calls at the end of all the encrypt and decrypt
functions to let the scheduler use the CPU after possibly a long
tenure by the crypto driver.

This reduces RCU stalls and soft lockups when running crypto
functions back-to-back that don't have their own yield calls
(e.g., aligned generic functions).

Signed-off-by: Robert Elliott <elliott@hpe.com>
---
 crypto/aead.c  |  4 ++++
 crypto/shash.c | 32 ++++++++++++++++++++++++--------
 2 files changed, 28 insertions(+), 8 deletions(-)
diff mbox series

Patch

diff --git a/crypto/aead.c b/crypto/aead.c
index 16991095270d..f88378f4d4f5 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -93,6 +93,8 @@  int crypto_aead_encrypt(struct aead_request *req)
 	else
 		ret = crypto_aead_alg(aead)->encrypt(req);
 	crypto_stats_aead_encrypt(cryptlen, alg, ret);
+
+	crypto_yield(crypto_aead_get_flags(aead));
 	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_aead_encrypt);
@@ -112,6 +114,8 @@  int crypto_aead_decrypt(struct aead_request *req)
 	else
 		ret = crypto_aead_alg(aead)->decrypt(req);
 	crypto_stats_aead_decrypt(cryptlen, alg, ret);
+
+	crypto_yield(crypto_aead_get_flags(aead));
 	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_aead_decrypt);
diff --git a/crypto/shash.c b/crypto/shash.c
index 868b6ba2b3b7..6fea17a50048 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -114,11 +114,15 @@  int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 	struct crypto_shash *tfm = desc->tfm;
 	struct shash_alg *shash = crypto_shash_alg(tfm);
 	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	int ret;
 
 	if ((unsigned long)data & alignmask)
-		return shash_update_unaligned(desc, data, len);
+		ret = shash_update_unaligned(desc, data, len);
+	else
+		ret = shash->update(desc, data, len);
 
-	return shash->update(desc, data, len);
+	crypto_yield(crypto_shash_get_flags(tfm));
+	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_shash_update);
 
@@ -155,11 +159,15 @@  int crypto_shash_final(struct shash_desc *desc, u8 *out)
 	struct crypto_shash *tfm = desc->tfm;
 	struct shash_alg *shash = crypto_shash_alg(tfm);
 	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	int ret;
 
 	if ((unsigned long)out & alignmask)
-		return shash_final_unaligned(desc, out);
+		ret = shash_final_unaligned(desc, out);
+	else
+		ret = shash->final(desc, out);
 
-	return shash->final(desc, out);
+	crypto_yield(crypto_shash_get_flags(tfm));
+	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_shash_final);
 
@@ -176,11 +184,15 @@  int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
 	struct crypto_shash *tfm = desc->tfm;
 	struct shash_alg *shash = crypto_shash_alg(tfm);
 	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	int ret;
 
 	if (((unsigned long)data | (unsigned long)out) & alignmask)
-		return shash_finup_unaligned(desc, data, len, out);
+		ret = shash_finup_unaligned(desc, data, len, out);
+	else
+		ret = shash->finup(desc, data, len, out);
 
-	return shash->finup(desc, data, len, out);
+	crypto_yield(crypto_shash_get_flags(tfm));
+	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_shash_finup);
 
@@ -197,14 +209,18 @@  int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
 	struct crypto_shash *tfm = desc->tfm;
 	struct shash_alg *shash = crypto_shash_alg(tfm);
 	unsigned long alignmask = crypto_shash_alignmask(tfm);
+	int ret;
 
 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
 		return -ENOKEY;
 
 	if (((unsigned long)data | (unsigned long)out) & alignmask)
-		return shash_digest_unaligned(desc, data, len, out);
+		ret = shash_digest_unaligned(desc, data, len, out);
+	else
+		ret = shash->digest(desc, data, len, out);
 
-	return shash->digest(desc, data, len, out);
+	crypto_yield(crypto_shash_get_flags(tfm));
+	return ret;
 }
 EXPORT_SYMBOL_GPL(crypto_shash_digest);