Source
292
292
memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
293
293
crypto_aegis256_update_a(state, &buf);
294
294
}
295
295
}
296
296
297
297
static void crypto_aegis256_process_crypt(struct aegis_state *state,
298
298
struct aead_request *req,
299
299
const struct aegis256_ops *ops)
300
300
{
301
301
struct skcipher_walk walk;
302
-
u8 *src, *dst;
303
-
unsigned int chunksize;
304
302
305
303
ops->skcipher_walk_init(&walk, req, false);
306
304
307
305
while (walk.nbytes) {
308
-
src = walk.src.virt.addr;
309
-
dst = walk.dst.virt.addr;
310
-
chunksize = walk.nbytes;
306
+
unsigned int nbytes = walk.nbytes;
311
307
312
-
ops->crypt_chunk(state, dst, src, chunksize);
308
+
if (nbytes < walk.total)
309
+
nbytes = round_down(nbytes, walk.stride);
313
310
314
-
skcipher_walk_done(&walk, 0);
311
+
ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
312
+
nbytes);
313
+
314
+
skcipher_walk_done(&walk, walk.nbytes - nbytes);
315
315
}
316
316
}
317
317
318
318
static void crypto_aegis256_final(struct aegis_state *state,
319
319
union aegis_block *tag_xor,
320
320
u64 assoclen, u64 cryptlen)
321
321
{
322
322
u64 assocbits = assoclen * 8;
323
323
u64 cryptbits = cryptlen * 8;
324
324