[PATCH 12/15] crypto: xilinx: Replace zynqmp prefix with xilinx

Harsh Jain h.jain at amd.com
Wed Oct 29 03:21:55 PDT 2025


Replace zynqmp with xilinx to have more generic name.

Signed-off-by: Harsh Jain <h.jain at amd.com>
---
 drivers/crypto/xilinx/zynqmp-aes-gcm.c | 130 ++++++++++++-------------
 1 file changed, 64 insertions(+), 66 deletions(-)

diff --git a/drivers/crypto/xilinx/zynqmp-aes-gcm.c b/drivers/crypto/xilinx/zynqmp-aes-gcm.c
index e3e7aef87571..b0ebb4971608 100644
--- a/drivers/crypto/xilinx/zynqmp-aes-gcm.c
+++ b/drivers/crypto/xilinx/zynqmp-aes-gcm.c
@@ -19,10 +19,8 @@
 #include <linux/string.h>
 
 #define ZYNQMP_DMA_BIT_MASK	32U
-
-#define ZYNQMP_AES_KEY_SIZE		AES_KEYSIZE_256
-#define ZYNQMP_AES_AUTH_SIZE		16U
-#define ZYNQMP_AES_BLK_SIZE		1U
+#define XILINX_AES_AUTH_SIZE		16U
+#define XILINX_AES_BLK_SIZE		1U
 #define ZYNQMP_AES_MIN_INPUT_BLK_SIZE	4U
 #define ZYNQMP_AES_WORD_LEN		4U
 
@@ -31,9 +29,9 @@
 #define ZYNQMP_AES_PUF_NOT_PROGRAMMED	0xE300
 #define XILINX_KEY_MAGIC		0x3EA0
 
-enum zynqmp_aead_op {
-	ZYNQMP_AES_DECRYPT = 0,
-	ZYNQMP_AES_ENCRYPT
+enum xilinx_aead_op {
+	XILINX_AES_DECRYPT = 0,
+	XILINX_AES_ENCRYPT
 };
 
 enum zynqmp_aead_keysrc {
@@ -42,7 +40,7 @@ enum zynqmp_aead_keysrc {
 	ZYNQMP_AES_PUF_KEY
 };
 
-struct zynqmp_aead_drv_ctx {
+struct xilinx_aead_drv_ctx {
 	struct aead_engine_alg aead;
 	struct device *dev;
 	struct crypto_engine *engine;
@@ -63,7 +61,7 @@ struct zynqmp_aead_hw_req {
 	u64 keysrc;
 };
 
-struct zynqmp_aead_tfm_ctx {
+struct xilinx_aead_tfm_ctx {
 	struct device *dev;
 	dma_addr_t key_dma_addr;
 	u8 *key;
@@ -74,13 +72,13 @@ struct zynqmp_aead_tfm_ctx {
 };
 
 struct zynqmp_aead_req_ctx {
-	enum zynqmp_aead_op op;
+	enum xilinx_aead_op op;
 };
 
 static int zynqmp_aes_aead_cipher(struct aead_request *req)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
+	struct xilinx_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
 	dma_addr_t dma_addr_data, dma_addr_hw_req;
 	struct device *dev = tfm_ctx->dev;
@@ -92,7 +90,7 @@ static int zynqmp_aes_aead_cipher(struct aead_request *req)
 	void *dmabuf;
 	char *kbuf;
 
-	dma_size = req->cryptlen + ZYNQMP_AES_AUTH_SIZE;
+	dma_size = req->cryptlen + XILINX_AES_AUTH_SIZE;
 	kbuf = kmalloc(dma_size, GFP_KERNEL);
 	if (!kbuf)
 		return -ENOMEM;
@@ -117,10 +115,10 @@ static int zynqmp_aes_aead_cipher(struct aead_request *req)
 	hwreq->keysrc = tfm_ctx->keysrc;
 	hwreq->op = rq_ctx->op;
 
-	if (hwreq->op == ZYNQMP_AES_ENCRYPT)
+	if (hwreq->op == XILINX_AES_ENCRYPT)
 		hwreq->size = data_size;
 	else
-		hwreq->size = data_size - ZYNQMP_AES_AUTH_SIZE;
+		hwreq->size = data_size - XILINX_AES_AUTH_SIZE;
 
 	if (hwreq->keysrc == ZYNQMP_AES_KUP_KEY)
 		hwreq->key = tfm_ctx->key_dma_addr;
@@ -162,10 +160,10 @@ static int zynqmp_aes_aead_cipher(struct aead_request *req)
 			break;
 		}
 	} else {
-		if (hwreq->op == ZYNQMP_AES_ENCRYPT)
+		if (hwreq->op == XILINX_AES_ENCRYPT)
 			data_size = data_size + crypto_aead_authsize(aead);
 		else
-			data_size = data_size - ZYNQMP_AES_AUTH_SIZE;
+			data_size = data_size - XILINX_AES_AUTH_SIZE;
 
 		sg_copy_from_buffer(req->dst, sg_nents(req->dst),
 				    kbuf, data_size);
@@ -181,12 +179,12 @@ static int zynqmp_aes_aead_cipher(struct aead_request *req)
 	return ret;
 }
 
-static int zynqmp_fallback_check(struct zynqmp_aead_tfm_ctx *tfm_ctx,
+static int zynqmp_fallback_check(struct xilinx_aead_tfm_ctx *tfm_ctx,
 				 struct aead_request *req)
 {
 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
 
-	if (tfm_ctx->authsize != ZYNQMP_AES_AUTH_SIZE && rq_ctx->op == ZYNQMP_AES_DECRYPT)
+	if (tfm_ctx->authsize != XILINX_AES_AUTH_SIZE && rq_ctx->op == XILINX_AES_DECRYPT)
 		return 1;
 
 	if (req->assoclen != 0 ||
@@ -199,14 +197,14 @@ static int zynqmp_fallback_check(struct zynqmp_aead_tfm_ctx *tfm_ctx,
 	if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0)
 		return 1;
 
-	if (rq_ctx->op == ZYNQMP_AES_DECRYPT &&
-	    req->cryptlen <= ZYNQMP_AES_AUTH_SIZE)
+	if (rq_ctx->op == XILINX_AES_DECRYPT &&
+	    req->cryptlen <= XILINX_AES_AUTH_SIZE)
 		return 1;
 
 	return 0;
 }
 
-static int zynqmp_handle_aes_req(struct crypto_engine *engine, void *req)
+static int xilinx_handle_aes_req(struct crypto_engine *engine, void *req)
 {
 	struct aead_request *areq =
 				container_of(req, struct aead_request, base);
@@ -224,7 +222,7 @@ static int zynqmp_aes_aead_setkey(struct crypto_aead *aead, const u8 *key,
 				  unsigned int keylen)
 {
 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_tfm_ctx(tfm);
+	struct xilinx_aead_tfm_ctx *tfm_ctx = crypto_tfm_ctx(tfm);
 	struct xilinx_hwkey_info hwkey;
 	unsigned char keysrc;
 	int err;
@@ -244,11 +242,11 @@ static int zynqmp_aes_aead_setkey(struct crypto_aead *aead, const u8 *key,
 		return -EINVAL;
 	}
 
-	if (keylen == ZYNQMP_AES_KEY_SIZE && tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY) {
+	if (keylen == AES_KEYSIZE_256 && tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY) {
 		tfm_ctx->keylen = keylen;
 		memcpy(tfm_ctx->key, key, keylen);
 		dma_sync_single_for_device(tfm_ctx->dev, tfm_ctx->key_dma_addr,
-					   ZYNQMP_AES_KEY_SIZE,
+					   AES_KEYSIZE_256,
 					   DMA_TO_DEVICE);
 	} else if (tfm_ctx->keysrc != ZYNQMP_AES_KUP_KEY) {
 		return -EINVAL;
@@ -265,12 +263,12 @@ static int zynqmp_aes_aead_setkey(struct crypto_aead *aead, const u8 *key,
 	return err;
 }
 
-static int zynqmp_aes_aead_setauthsize(struct crypto_aead *aead,
+static int xilinx_aes_aead_setauthsize(struct crypto_aead *aead,
 				       unsigned int authsize)
 {
 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx =
-			(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
+	struct xilinx_aead_tfm_ctx *tfm_ctx =
+			(struct xilinx_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
 
 	tfm_ctx->authsize = authsize;
 	return crypto_aead_setauthsize(tfm_ctx->fbk_cipher, authsize);
@@ -281,17 +279,17 @@ static int zynqmp_aes_aead_encrypt(struct aead_request *req)
 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
 	struct aead_request *subreq = aead_request_ctx(req);
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
+	struct xilinx_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
 	struct aead_alg *alg = crypto_aead_alg(aead);
-	struct zynqmp_aead_drv_ctx *drv_ctx;
+	struct xilinx_aead_drv_ctx *drv_ctx;
 	int err;
 
-	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, aead.base);
+	drv_ctx = container_of(alg, struct xilinx_aead_drv_ctx, aead.base);
 	if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY &&
 	    tfm_ctx->keylen == sizeof(struct xilinx_hwkey_info))
 		return -EINVAL;
 
-	rq_ctx->op = ZYNQMP_AES_ENCRYPT;
+	rq_ctx->op = XILINX_AES_ENCRYPT;
 	err = zynqmp_fallback_check(tfm_ctx, req);
 	if (err && tfm_ctx->keysrc != ZYNQMP_AES_KUP_KEY)
 		return -EOPNOTSUPP;
@@ -316,13 +314,13 @@ static int zynqmp_aes_aead_decrypt(struct aead_request *req)
 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
 	struct aead_request *subreq = aead_request_ctx(req);
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
+	struct xilinx_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
 	struct aead_alg *alg = crypto_aead_alg(aead);
-	struct zynqmp_aead_drv_ctx *drv_ctx;
+	struct xilinx_aead_drv_ctx *drv_ctx;
 	int err;
 
-	rq_ctx->op = ZYNQMP_AES_DECRYPT;
-	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, aead.base);
+	rq_ctx->op = XILINX_AES_DECRYPT;
+	drv_ctx = container_of(alg, struct xilinx_aead_drv_ctx, aead.base);
 	if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY &&
 	    tfm_ctx->keylen == sizeof(struct xilinx_hwkey_info))
 		return -EINVAL;
@@ -344,15 +342,15 @@ static int zynqmp_aes_aead_decrypt(struct aead_request *req)
 	return crypto_transfer_aead_request_to_engine(drv_ctx->engine, req);
 }
 
-static int zynqmp_aes_aead_init(struct crypto_aead *aead)
+static int xilinx_aes_aead_init(struct crypto_aead *aead)
 {
 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx =
-		(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
-	struct zynqmp_aead_drv_ctx *drv_ctx;
+	struct xilinx_aead_tfm_ctx *tfm_ctx =
+		(struct xilinx_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
+	struct xilinx_aead_drv_ctx *drv_ctx;
 	struct aead_alg *alg = crypto_aead_alg(aead);
 
-	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, aead.base);
+	drv_ctx = container_of(alg, struct xilinx_aead_drv_ctx, aead.base);
 	tfm_ctx->dev = drv_ctx->dev;
 	tfm_ctx->keylen = 0;
 	tfm_ctx->keysrc = ZYNQMP_AES_KUP_KEY;
@@ -366,13 +364,13 @@ static int zynqmp_aes_aead_init(struct crypto_aead *aead)
 		       __func__, drv_ctx->aead.base.base.cra_name);
 		return PTR_ERR(tfm_ctx->fbk_cipher);
 	}
-	tfm_ctx->key = kmalloc(ZYNQMP_AES_KEY_SIZE, GFP_KERNEL);
+	tfm_ctx->key = kmalloc(AES_KEYSIZE_256, GFP_KERNEL);
 	if (!tfm_ctx->key) {
 		crypto_free_aead(tfm_ctx->fbk_cipher);
 		return -ENOMEM;
 	}
 	tfm_ctx->key_dma_addr = dma_map_single(tfm_ctx->dev, tfm_ctx->key,
-					       ZYNQMP_AES_KEY_SIZE,
+					       AES_KEYSIZE_256,
 					       DMA_TO_DEVICE);
 	if (unlikely(dma_mapping_error(tfm_ctx->dev, tfm_ctx->key_dma_addr))) {
 		kfree(tfm_ctx->key);
@@ -387,31 +385,31 @@ static int zynqmp_aes_aead_init(struct crypto_aead *aead)
 	return 0;
 }
 
-static void zynqmp_aes_aead_exit(struct crypto_aead *aead)
+static void xilinx_aes_aead_exit(struct crypto_aead *aead)
 {
 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
-	struct zynqmp_aead_tfm_ctx *tfm_ctx =
-			(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
+	struct xilinx_aead_tfm_ctx *tfm_ctx =
+			(struct xilinx_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
 
-	dma_unmap_single(tfm_ctx->dev, tfm_ctx->key_dma_addr, ZYNQMP_AES_KEY_SIZE, DMA_TO_DEVICE);
+	dma_unmap_single(tfm_ctx->dev, tfm_ctx->key_dma_addr, AES_KEYSIZE_256, DMA_TO_DEVICE);
 	kfree(tfm_ctx->key);
 	if (tfm_ctx->fbk_cipher) {
 		crypto_free_aead(tfm_ctx->fbk_cipher);
 		tfm_ctx->fbk_cipher = NULL;
 	}
-	memzero_explicit(tfm_ctx, sizeof(struct zynqmp_aead_tfm_ctx));
+	memzero_explicit(tfm_ctx, sizeof(struct xilinx_aead_tfm_ctx));
 }
 
-static struct zynqmp_aead_drv_ctx zynqmp_aes_drv_ctx = {
+static struct xilinx_aead_drv_ctx zynqmp_aes_drv_ctx = {
 	.aead.base = {
 		.setkey		= zynqmp_aes_aead_setkey,
-		.setauthsize	= zynqmp_aes_aead_setauthsize,
+		.setauthsize	= xilinx_aes_aead_setauthsize,
 		.encrypt	= zynqmp_aes_aead_encrypt,
 		.decrypt	= zynqmp_aes_aead_decrypt,
-		.init		= zynqmp_aes_aead_init,
-		.exit		= zynqmp_aes_aead_exit,
+		.init		= xilinx_aes_aead_init,
+		.exit		= xilinx_aes_aead_exit,
 		.ivsize		= GCM_AES_IV_SIZE,
-		.maxauthsize	= ZYNQMP_AES_AUTH_SIZE,
+		.maxauthsize	= XILINX_AES_AUTH_SIZE,
 		.base = {
 		.cra_name		= "gcm(aes)",
 		.cra_driver_name	= "xilinx-zynqmp-aes-gcm",
@@ -421,13 +419,13 @@ static struct zynqmp_aead_drv_ctx zynqmp_aes_drv_ctx = {
 					  CRYPTO_ALG_ALLOCATES_MEMORY |
 					  CRYPTO_ALG_KERN_DRIVER_ONLY |
 					  CRYPTO_ALG_NEED_FALLBACK,
-		.cra_blocksize		= ZYNQMP_AES_BLK_SIZE,
-		.cra_ctxsize		= sizeof(struct zynqmp_aead_tfm_ctx),
+		.cra_blocksize		= XILINX_AES_BLK_SIZE,
+		.cra_ctxsize		= sizeof(struct xilinx_aead_tfm_ctx),
 		.cra_module		= THIS_MODULE,
 		}
 	},
 	.aead.op = {
-		.do_one_request = zynqmp_handle_aes_req,
+		.do_one_request = xilinx_handle_aes_req,
 	},
 };
 
@@ -440,9 +438,9 @@ static struct xlnx_feature aes_feature_map[] = {
 	{ /* sentinel */ }
 };
 
-static int zynqmp_aes_aead_probe(struct platform_device *pdev)
+static int xilinx_aes_aead_probe(struct platform_device *pdev)
 {
-	struct zynqmp_aead_drv_ctx *aes_drv_ctx;
+	struct xilinx_aead_drv_ctx *aes_drv_ctx;
 	struct device *dev = &pdev->dev;
 	int err;
 
@@ -493,18 +491,18 @@ static int zynqmp_aes_aead_probe(struct platform_device *pdev)
 	return err;
 }
 
-static void zynqmp_aes_aead_remove(struct platform_device *pdev)
+static void xilinx_aes_aead_remove(struct platform_device *pdev)
 {
-	struct zynqmp_aead_drv_ctx *aes_drv_ctx;
+	struct xilinx_aead_drv_ctx *aes_drv_ctx;
 
 	aes_drv_ctx = platform_get_drvdata(pdev);
 	crypto_engine_exit(aes_drv_ctx->engine);
 	crypto_engine_unregister_aead(&aes_drv_ctx->aead);
 }
 
-static struct platform_driver zynqmp_aes_driver = {
-	.probe	= zynqmp_aes_aead_probe,
-	.remove = zynqmp_aes_aead_remove,
+static struct platform_driver xilinx_aes_driver = {
+	.probe	= xilinx_aes_aead_probe,
+	.remove = xilinx_aes_aead_remove,
 	.driver = {
 		.name		= "zynqmp-aes",
 	},
@@ -516,15 +514,15 @@ static int __init aes_driver_init(void)
 {
 	int ret;
 
-	ret = platform_driver_register(&zynqmp_aes_driver);
+	ret = platform_driver_register(&xilinx_aes_driver);
 	if (ret)
 		return ret;
 
-	platform_dev = platform_device_register_simple(zynqmp_aes_driver.driver.name,
+	platform_dev = platform_device_register_simple(xilinx_aes_driver.driver.name,
 						       0, NULL, 0);
 	if (IS_ERR(platform_dev)) {
 		ret = PTR_ERR(platform_dev);
-		platform_driver_unregister(&zynqmp_aes_driver);
+		platform_driver_unregister(&xilinx_aes_driver);
 	}
 
 	return ret;
@@ -533,7 +531,7 @@ static int __init aes_driver_init(void)
 static void __exit aes_driver_exit(void)
 {
 	platform_device_unregister(platform_dev);
-	platform_driver_unregister(&zynqmp_aes_driver);
+	platform_driver_unregister(&xilinx_aes_driver);
 }
 
 module_init(aes_driver_init);
-- 
2.49.1




More information about the linux-arm-kernel mailing list