From: Nicolas Toromanoff nicolas.toromanoff@st.com
[ Upstream commit 10b89c43a64eb0d236903b79a3bc9d8f6cbfd9c7 ]
Ensure CRC algorithm is registered only once in crypto framework when there are several instances of CRC devices.
Update the CRC device list management to avoid that only the first CRC instance is used.
Fixes: b51dbe90912a ("crypto: stm32 - Support for STM32 CRC32 crypto module")
Signed-off-by: Nicolas Toromanoff nicolas.toromanoff@st.com Signed-off-by: Herbert Xu herbert@gondor.apana.org.au Signed-off-by: Sasha Levin sashal@kernel.org Signed-off-by: Yang Yingliang yangyingliang@huawei.com --- drivers/crypto/stm32/stm32_crc32.c | 48 ++++++++++++++++++++++-------- 1 file changed, 36 insertions(+), 12 deletions(-)
diff --git a/drivers/crypto/stm32/stm32_crc32.c b/drivers/crypto/stm32/stm32_crc32.c index c5ad83ad2f72..47d31335c2d4 100644 --- a/drivers/crypto/stm32/stm32_crc32.c +++ b/drivers/crypto/stm32/stm32_crc32.c @@ -93,16 +93,29 @@ static int stm32_crc_setkey(struct crypto_shash *tfm, const u8 *key, return 0; }
-static int stm32_crc_init(struct shash_desc *desc) +static struct stm32_crc *stm32_crc_get_next_crc(void) { - struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc); - struct stm32_crc_ctx *mctx = crypto_shash_ctx(desc->tfm); struct stm32_crc *crc;
spin_lock_bh(&crc_list.lock); crc = list_first_entry(&crc_list.dev_list, struct stm32_crc, list); + if (crc) + list_move_tail(&crc->list, &crc_list.dev_list); spin_unlock_bh(&crc_list.lock);
+ return crc; +} + +static int stm32_crc_init(struct shash_desc *desc) +{ + struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc); + struct stm32_crc_ctx *mctx = crypto_shash_ctx(desc->tfm); + struct stm32_crc *crc; + + crc = stm32_crc_get_next_crc(); + if (!crc) + return -ENODEV; + pm_runtime_get_sync(crc->dev);
/* Reset, set key, poly and configure in bit reverse mode */ @@ -127,9 +140,9 @@ static int stm32_crc_update(struct shash_desc *desc, const u8 *d8, struct stm32_crc_ctx *mctx = crypto_shash_ctx(desc->tfm); struct stm32_crc *crc;
- spin_lock_bh(&crc_list.lock); - crc = list_first_entry(&crc_list.dev_list, struct stm32_crc, list); - spin_unlock_bh(&crc_list.lock); + crc = stm32_crc_get_next_crc(); + if (!crc) + return -ENODEV;
pm_runtime_get_sync(crc->dev);
@@ -202,6 +215,8 @@ static int stm32_crc_digest(struct shash_desc *desc, const u8 *data, return stm32_crc_init(desc) ?: stm32_crc_finup(desc, data, length, out); }
+static unsigned int refcnt; +static DEFINE_MUTEX(refcnt_lock); static struct shash_alg algs[] = { /* CRC-32 */ { @@ -294,12 +309,18 @@ static int stm32_crc_probe(struct platform_device *pdev) list_add(&crc->list, &crc_list.dev_list); spin_unlock(&crc_list.lock);
- ret = crypto_register_shashes(algs, ARRAY_SIZE(algs)); - if (ret) { - dev_err(dev, "Failed to register\n"); - clk_disable_unprepare(crc->clk); - return ret; + mutex_lock(&refcnt_lock); + if (!refcnt) { + ret = crypto_register_shashes(algs, ARRAY_SIZE(algs)); + if (ret) { + mutex_unlock(&refcnt_lock); + dev_err(dev, "Failed to register\n"); + clk_disable_unprepare(crc->clk); + return ret; + } } + refcnt++; + mutex_unlock(&refcnt_lock);
dev_info(dev, "Initialized\n");
@@ -320,7 +341,10 @@ static int stm32_crc_remove(struct platform_device *pdev) list_del(&crc->list); spin_unlock(&crc_list.lock);
- crypto_unregister_shashes(algs, ARRAY_SIZE(algs)); + mutex_lock(&refcnt_lock); + if (!--refcnt) + crypto_unregister_shashes(algs, ARRAY_SIZE(algs)); + mutex_unlock(&refcnt_lock);
pm_runtime_disable(crc->dev); pm_runtime_put_noidle(crc->dev);