[PATCH 08/12] crypto: adiantum - Use memcpy_{to,from}_sglist()

Eric Biggers ebiggers at kernel.org
Wed Dec 10 17:18:40 PST 2025


Call the newer, easier-to-read functions memcpy_to_sglist() and
memcpy_from_sglist() directly instead of calling
scatterwalk_map_and_copy().  No change in behavior.

Signed-off-by: Eric Biggers <ebiggers at kernel.org>
---
 crypto/adiantum.c | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/crypto/adiantum.c b/crypto/adiantum.c
index 519e95228ad8..6d882f926ab0 100644
--- a/crypto/adiantum.c
+++ b/crypto/adiantum.c
@@ -425,12 +425,12 @@ static int adiantum_finish(struct skcipher_request *req)
 		kunmap_local(virt);
 	} else {
 		/* Slow path that works for any destination scatterlist */
 		adiantum_hash_message(req, dst, &digest);
 		le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
-		scatterwalk_map_and_copy(&rctx->rbuf.bignum, dst,
-					 bulk_len, sizeof(le128), 1);
+		memcpy_to_sglist(dst, bulk_len, &rctx->rbuf.bignum,
+				 sizeof(le128));
 	}
 	return 0;
 }
 
 static void adiantum_streamcipher_done(void *data, int err)
@@ -475,12 +475,12 @@ static int adiantum_crypt(struct skcipher_request *req, bool enc)
 		memcpy(&rctx->rbuf.bignum, virt + bulk_len, sizeof(le128));
 		kunmap_local(virt);
 	} else {
 		/* Slow path that works for any source scatterlist */
 		adiantum_hash_message(req, src, &digest);
-		scatterwalk_map_and_copy(&rctx->rbuf.bignum, src,
-					 bulk_len, sizeof(le128), 0);
+		memcpy_from_sglist(&rctx->rbuf.bignum, src, bulk_len,
+				   sizeof(le128));
 	}
 	le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &rctx->header_hash);
 	le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
 
 	/* If encrypting, encrypt P_M with the block cipher to get C_M */
-- 
2.52.0




More information about the linux-arm-kernel mailing list