Lines Matching refs:walk

94 	struct skcipher_walk walk;  in __ecb_crypt()  local
97 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt()
99 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
100 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
102 if (walk.nbytes < walk.total) in __ecb_crypt()
104 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
107 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
110 err = skcipher_walk_done(&walk, in __ecb_crypt()
111 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
168 struct skcipher_walk walk; in cbc_decrypt() local
171 err = skcipher_walk_virt(&walk, req, false); in cbc_decrypt()
173 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
174 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
176 if (walk.nbytes < walk.total) in cbc_decrypt()
178 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
181 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
183 walk.iv); in cbc_decrypt()
185 err = skcipher_walk_done(&walk, in cbc_decrypt()
186 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
239 struct skcipher_walk walk; in ctr_encrypt() local
243 err = skcipher_walk_virt(&walk, req, false); in ctr_encrypt()
245 while (walk.nbytes > 0) { in ctr_encrypt()
246 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in ctr_encrypt()
247 u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL; in ctr_encrypt()
249 if (walk.nbytes < walk.total) { in ctr_encrypt()
251 walk.stride / AES_BLOCK_SIZE); in ctr_encrypt()
256 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt()
257 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt()
261 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
262 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
265 walk.total % AES_BLOCK_SIZE); in ctr_encrypt()
267 err = skcipher_walk_done(&walk, 0); in ctr_encrypt()
270 err = skcipher_walk_done(&walk, in ctr_encrypt()
271 walk.nbytes - blocks * AES_BLOCK_SIZE); in ctr_encrypt()
353 struct skcipher_walk walk; in __xts_crypt() local
369 err = skcipher_walk_virt(&walk, req, true); in __xts_crypt()
373 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); in __xts_crypt()
375 while (walk.nbytes >= AES_BLOCK_SIZE) { in __xts_crypt()
376 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __xts_crypt()
379 if (walk.nbytes < walk.total) { in __xts_crypt()
381 walk.stride / AES_BLOCK_SIZE); in __xts_crypt()
386 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk, in __xts_crypt()
387 ctx->key.rounds, blocks, walk.iv, reorder_last_tweak); in __xts_crypt()
389 err = skcipher_walk_done(&walk, in __xts_crypt()
390 walk.nbytes - blocks * AES_BLOCK_SIZE); in __xts_crypt()