Skip to content

Commit 4e4a088

Browse files
committed
crypto: arm64/sm4-gcm - Fix possible crash in GCM cryption
An often overlooked aspect of the skcipher walker API is that an error is not just indicated by a non-zero return value, but by the fact that walk->nbytes is zero. Thus it is an error to call skcipher_walk_done after getting back walk->nbytes == 0 from the previous interaction with the walker. This is because when walk->nbytes is zero the walker is left in an undefined state and any further calls to it may try to free uninitialised stack memory. The sm4 arm64 ccm code gets this wrong and ends up calling skcipher_walk_done even when walk->nbytes is zero. This patch rewrites the loop in a form that resembles other callers. Reported-by: Tianjia Zhang <[email protected]> Fixes: ae1b83c ("crypto: arm64/sm4 - add CE implementation for GCM mode") Signed-off-by: Herbert Xu <[email protected]> Tested-by: Tianjia Zhang <[email protected]> Signed-off-by: Herbert Xu <[email protected]>
1 parent 0ceb587 commit 4e4a088

File tree

1 file changed

+25
-26
lines changed

1 file changed

+25
-26
lines changed

arch/arm64/crypto/sm4-ce-gcm-glue.c

Lines changed: 25 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -135,72 +135,75 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u8 ghash[])
135135
}
136136

137137
static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
138-
struct sm4_gcm_ctx *ctx, u8 ghash[],
138+
u8 ghash[], int err,
139139
void (*sm4_ce_pmull_gcm_crypt)(const u32 *rkey_enc,
140140
u8 *dst, const u8 *src, u8 *iv,
141141
unsigned int nbytes, u8 *ghash,
142142
const u8 *ghash_table, const u8 *lengths))
143143
{
144+
struct crypto_aead *aead = crypto_aead_reqtfm(req);
145+
struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
144146
u8 __aligned(8) iv[SM4_BLOCK_SIZE];
145147
be128 __aligned(8) lengths;
146-
int err;
147148

148149
memset(ghash, 0, SM4_BLOCK_SIZE);
149150

150151
lengths.a = cpu_to_be64(req->assoclen * 8);
151152
lengths.b = cpu_to_be64(walk->total * 8);
152153

153-
memcpy(iv, walk->iv, GCM_IV_SIZE);
154+
memcpy(iv, req->iv, GCM_IV_SIZE);
154155
put_unaligned_be32(2, iv + GCM_IV_SIZE);
155156

156157
kernel_neon_begin();
157158

158159
if (req->assoclen)
159160
gcm_calculate_auth_mac(req, ghash);
160161

161-
do {
162+
while (walk->nbytes) {
162163
unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
163164
const u8 *src = walk->src.virt.addr;
164165
u8 *dst = walk->dst.virt.addr;
165166

166167
if (walk->nbytes == walk->total) {
167-
tail = 0;
168-
169168
sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
170169
walk->nbytes, ghash,
171170
ctx->ghash_table,
172171
(const u8 *)&lengths);
173-
} else if (walk->nbytes - tail) {
174-
sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
175-
walk->nbytes - tail, ghash,
176-
ctx->ghash_table, NULL);
172+
173+
kernel_neon_end();
174+
175+
return skcipher_walk_done(walk, 0);
177176
}
178177

178+
sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
179+
walk->nbytes - tail, ghash,
180+
ctx->ghash_table, NULL);
181+
179182
kernel_neon_end();
180183

181184
err = skcipher_walk_done(walk, tail);
182-
if (err)
183-
return err;
184-
if (walk->nbytes)
185-
kernel_neon_begin();
186-
} while (walk->nbytes > 0);
187185

188-
return 0;
186+
kernel_neon_begin();
187+
}
188+
189+
sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, NULL, NULL, iv,
190+
walk->nbytes, ghash, ctx->ghash_table,
191+
(const u8 *)&lengths);
192+
193+
kernel_neon_end();
194+
195+
return err;
189196
}
190197

191198
static int gcm_encrypt(struct aead_request *req)
192199
{
193200
struct crypto_aead *aead = crypto_aead_reqtfm(req);
194-
struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
195201
u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
196202
struct skcipher_walk walk;
197203
int err;
198204

199205
err = skcipher_walk_aead_encrypt(&walk, req, false);
200-
if (err)
201-
return err;
202-
203-
err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_enc);
206+
err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_enc);
204207
if (err)
205208
return err;
206209

@@ -215,17 +218,13 @@ static int gcm_decrypt(struct aead_request *req)
215218
{
216219
struct crypto_aead *aead = crypto_aead_reqtfm(req);
217220
unsigned int authsize = crypto_aead_authsize(aead);
218-
struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
219221
u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
220222
u8 authtag[SM4_BLOCK_SIZE];
221223
struct skcipher_walk walk;
222224
int err;
223225

224226
err = skcipher_walk_aead_decrypt(&walk, req, false);
225-
if (err)
226-
return err;
227-
228-
err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_dec);
227+
err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_dec);
229228
if (err)
230229
return err;
231230

0 commit comments

Comments
 (0)