Skip to content

Commit 51bcbe6

Browse files
lin755gregkh
authored andcommitted
crypto: hisilicon/sec2 - fix for sec spec check
[ Upstream commit f4f353c ] During encryption and decryption, user requests must be checked first, if the specifications that are not supported by the hardware are used, the software computing is used for processing. Fixes: 2f072d7 ("crypto: hisilicon - Add aead support on SEC2") Signed-off-by: Wenkai Lin <[email protected]> Signed-off-by: Chenghai Huang <[email protected]> Signed-off-by: Herbert Xu <[email protected]> Signed-off-by: Sasha Levin <[email protected]>
1 parent 71f4581 commit 51bcbe6

File tree

2 files changed

+39
-63
lines changed

2 files changed

+39
-63
lines changed

drivers/crypto/hisilicon/sec2/sec.h

-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ struct sec_aead_req {
3737
u8 *a_ivin;
3838
dma_addr_t a_ivin_dma;
3939
struct aead_request *aead_req;
40-
bool fallback;
4140
};
4241

4342
/* SEC request of Crypto */

drivers/crypto/hisilicon/sec2/sec_crypto.c

+39-62
Original file line numberDiff line numberDiff line change
@@ -690,14 +690,10 @@ static int sec_skcipher_fbtfm_init(struct crypto_skcipher *tfm)
690690

691691
c_ctx->fallback = false;
692692

693-
/* Currently, only XTS mode need fallback tfm when using 192bit key */
694-
if (likely(strncmp(alg, "xts", SEC_XTS_NAME_SZ)))
695-
return 0;
696-
697693
c_ctx->fbtfm = crypto_alloc_sync_skcipher(alg, 0,
698694
CRYPTO_ALG_NEED_FALLBACK);
699695
if (IS_ERR(c_ctx->fbtfm)) {
700-
pr_err("failed to alloc xts mode fallback tfm!\n");
696+
pr_err("failed to alloc fallback tfm for %s!\n", alg);
701697
return PTR_ERR(c_ctx->fbtfm);
702698
}
703699

@@ -857,7 +853,7 @@ static int sec_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
857853
}
858854

859855
memcpy(c_ctx->c_key, key, keylen);
860-
if (c_ctx->fallback && c_ctx->fbtfm) {
856+
if (c_ctx->fbtfm) {
861857
ret = crypto_sync_skcipher_setkey(c_ctx->fbtfm, key, keylen);
862858
if (ret) {
863859
dev_err(dev, "failed to set fallback skcipher key!\n");
@@ -1159,8 +1155,10 @@ static int sec_aead_setkey(struct crypto_aead *tfm, const u8 *key,
11591155
}
11601156

11611157
ret = crypto_authenc_extractkeys(&keys, key, keylen);
1162-
if (ret)
1158+
if (ret) {
1159+
dev_err(dev, "sec extract aead keys err!\n");
11631160
goto bad_key;
1161+
}
11641162

11651163
ret = sec_aead_aes_set_key(c_ctx, &keys);
11661164
if (ret) {
@@ -1174,12 +1172,6 @@ static int sec_aead_setkey(struct crypto_aead *tfm, const u8 *key,
11741172
goto bad_key;
11751173
}
11761174

1177-
if (ctx->a_ctx.a_key_len & WORD_MASK) {
1178-
ret = -EINVAL;
1179-
dev_err(dev, "AUTH key length error!\n");
1180-
goto bad_key;
1181-
}
1182-
11831175
ret = sec_aead_fallback_setkey(a_ctx, tfm, key, keylen);
11841176
if (ret) {
11851177
dev_err(dev, "set sec fallback key err!\n");
@@ -1999,8 +1991,7 @@ static int sec_aead_sha512_ctx_init(struct crypto_aead *tfm)
19991991
return sec_aead_ctx_init(tfm, "sha512");
20001992
}
20011993

2002-
static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
2003-
struct sec_req *sreq)
1994+
static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx, struct sec_req *sreq)
20041995
{
20051996
u32 cryptlen = sreq->c_req.sk_req->cryptlen;
20061997
struct device *dev = ctx->dev;
@@ -2022,10 +2013,6 @@ static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
20222013
}
20232014
break;
20242015
case SEC_CMODE_CTR:
2025-
if (unlikely(ctx->sec->qm.ver < QM_HW_V3)) {
2026-
dev_err(dev, "skcipher HW version error!\n");
2027-
ret = -EINVAL;
2028-
}
20292016
break;
20302017
default:
20312018
ret = -EINVAL;
@@ -2034,17 +2021,21 @@ static int sec_skcipher_cryptlen_check(struct sec_ctx *ctx,
20342021
return ret;
20352022
}
20362023

2037-
static int sec_skcipher_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
2024+
static int sec_skcipher_param_check(struct sec_ctx *ctx,
2025+
struct sec_req *sreq, bool *need_fallback)
20382026
{
20392027
struct skcipher_request *sk_req = sreq->c_req.sk_req;
20402028
struct device *dev = ctx->dev;
20412029
u8 c_alg = ctx->c_ctx.c_alg;
20422030

2043-
if (unlikely(!sk_req->src || !sk_req->dst ||
2044-
sk_req->cryptlen > MAX_INPUT_DATA_LEN)) {
2031+
if (unlikely(!sk_req->src || !sk_req->dst)) {
20452032
dev_err(dev, "skcipher input param error!\n");
20462033
return -EINVAL;
20472034
}
2035+
2036+
if (sk_req->cryptlen > MAX_INPUT_DATA_LEN)
2037+
*need_fallback = true;
2038+
20482039
sreq->c_req.c_len = sk_req->cryptlen;
20492040

20502041
if (ctx->pbuf_supported && sk_req->cryptlen <= SEC_PBUF_SZ)
@@ -2102,6 +2093,7 @@ static int sec_skcipher_crypto(struct skcipher_request *sk_req, bool encrypt)
21022093
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(sk_req);
21032094
struct sec_req *req = skcipher_request_ctx(sk_req);
21042095
struct sec_ctx *ctx = crypto_skcipher_ctx(tfm);
2096+
bool need_fallback = false;
21052097
int ret;
21062098

21072099
if (!sk_req->cryptlen) {
@@ -2115,11 +2107,11 @@ static int sec_skcipher_crypto(struct skcipher_request *sk_req, bool encrypt)
21152107
req->c_req.encrypt = encrypt;
21162108
req->ctx = ctx;
21172109

2118-
ret = sec_skcipher_param_check(ctx, req);
2110+
ret = sec_skcipher_param_check(ctx, req, &need_fallback);
21192111
if (unlikely(ret))
21202112
return -EINVAL;
21212113

2122-
if (unlikely(ctx->c_ctx.fallback))
2114+
if (unlikely(ctx->c_ctx.fallback || need_fallback))
21232115
return sec_skcipher_soft_crypto(ctx, sk_req, encrypt);
21242116

21252117
return ctx->req_op->process(ctx, req);
@@ -2227,52 +2219,35 @@ static int sec_aead_spec_check(struct sec_ctx *ctx, struct sec_req *sreq)
22272219
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
22282220
size_t sz = crypto_aead_authsize(tfm);
22292221
u8 c_mode = ctx->c_ctx.c_mode;
2230-
struct device *dev = ctx->dev;
22312222
int ret;
22322223

2233-
/* Hardware does not handle cases where authsize is not 4 bytes aligned */
2234-
if (c_mode == SEC_CMODE_CBC && (sz & WORD_MASK)) {
2235-
sreq->aead_req.fallback = true;
2224+
if (unlikely(ctx->sec->qm.ver == QM_HW_V2 && !sreq->c_req.c_len))
22362225
return -EINVAL;
2237-
}
22382226

22392227
if (unlikely(req->cryptlen + req->assoclen > MAX_INPUT_DATA_LEN ||
2240-
req->assoclen > SEC_MAX_AAD_LEN)) {
2241-
dev_err(dev, "aead input spec error!\n");
2228+
req->assoclen > SEC_MAX_AAD_LEN))
22422229
return -EINVAL;
2243-
}
22442230

22452231
if (c_mode == SEC_CMODE_CCM) {
2246-
if (unlikely(req->assoclen > SEC_MAX_CCM_AAD_LEN)) {
2247-
dev_err_ratelimited(dev, "CCM input aad parameter is too long!\n");
2232+
if (unlikely(req->assoclen > SEC_MAX_CCM_AAD_LEN))
22482233
return -EINVAL;
2249-
}
2250-
ret = aead_iv_demension_check(req);
2251-
if (ret) {
2252-
dev_err(dev, "aead input iv param error!\n");
2253-
return ret;
2254-
}
2255-
}
22562234

2257-
if (sreq->c_req.encrypt)
2258-
sreq->c_req.c_len = req->cryptlen;
2259-
else
2260-
sreq->c_req.c_len = req->cryptlen - sz;
2261-
if (c_mode == SEC_CMODE_CBC) {
2262-
if (unlikely(sreq->c_req.c_len & (AES_BLOCK_SIZE - 1))) {
2263-
dev_err(dev, "aead crypto length error!\n");
2235+
ret = aead_iv_demension_check(req);
2236+
if (unlikely(ret))
2237+
return -EINVAL;
2238+
} else if (c_mode == SEC_CMODE_CBC) {
2239+
if (unlikely(sz & WORD_MASK))
2240+
return -EINVAL;
2241+
if (unlikely(ctx->a_ctx.a_key_len & WORD_MASK))
22642242
return -EINVAL;
2265-
}
22662243
}
22672244

22682245
return 0;
22692246
}
22702247

2271-
static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
2248+
static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq, bool *need_fallback)
22722249
{
22732250
struct aead_request *req = sreq->aead_req.aead_req;
2274-
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2275-
size_t authsize = crypto_aead_authsize(tfm);
22762251
struct device *dev = ctx->dev;
22772252
u8 c_alg = ctx->c_ctx.c_alg;
22782253

@@ -2281,12 +2256,10 @@ static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
22812256
return -EINVAL;
22822257
}
22832258

2284-
if (ctx->sec->qm.ver == QM_HW_V2) {
2285-
if (unlikely(!req->cryptlen || (!sreq->c_req.encrypt &&
2286-
req->cryptlen <= authsize))) {
2287-
sreq->aead_req.fallback = true;
2288-
return -EINVAL;
2289-
}
2259+
if (unlikely(ctx->c_ctx.c_mode == SEC_CMODE_CBC &&
2260+
sreq->c_req.c_len & (AES_BLOCK_SIZE - 1))) {
2261+
dev_err(dev, "aead cbc mode input data length error!\n");
2262+
return -EINVAL;
22902263
}
22912264

22922265
/* Support AES or SM4 */
@@ -2295,8 +2268,10 @@ static int sec_aead_param_check(struct sec_ctx *ctx, struct sec_req *sreq)
22952268
return -EINVAL;
22962269
}
22972270

2298-
if (unlikely(sec_aead_spec_check(ctx, sreq)))
2271+
if (unlikely(sec_aead_spec_check(ctx, sreq))) {
2272+
*need_fallback = true;
22992273
return -EINVAL;
2274+
}
23002275

23012276
if (ctx->pbuf_supported && (req->cryptlen + req->assoclen) <=
23022277
SEC_PBUF_SZ)
@@ -2340,17 +2315,19 @@ static int sec_aead_crypto(struct aead_request *a_req, bool encrypt)
23402315
struct crypto_aead *tfm = crypto_aead_reqtfm(a_req);
23412316
struct sec_req *req = aead_request_ctx(a_req);
23422317
struct sec_ctx *ctx = crypto_aead_ctx(tfm);
2318+
size_t sz = crypto_aead_authsize(tfm);
2319+
bool need_fallback = false;
23432320
int ret;
23442321

23452322
req->flag = a_req->base.flags;
23462323
req->aead_req.aead_req = a_req;
23472324
req->c_req.encrypt = encrypt;
23482325
req->ctx = ctx;
2349-
req->aead_req.fallback = false;
2326+
req->c_req.c_len = a_req->cryptlen - (req->c_req.encrypt ? 0 : sz);
23502327

2351-
ret = sec_aead_param_check(ctx, req);
2328+
ret = sec_aead_param_check(ctx, req, &need_fallback);
23522329
if (unlikely(ret)) {
2353-
if (req->aead_req.fallback)
2330+
if (need_fallback)
23542331
return sec_aead_soft_crypto(ctx, a_req, encrypt);
23552332
return -EINVAL;
23562333
}

0 commit comments

Comments
 (0)