Lines Matching refs:walk

98 	struct skcipher_walk walk;  in __ecb_crypt()  local
101 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt()
103 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
104 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
106 if (walk.nbytes < walk.total) in __ecb_crypt()
108 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
111 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
114 err = skcipher_walk_done(&walk, in __ecb_crypt()
115 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
158 struct skcipher_walk walk; in cbc_encrypt() local
161 err = skcipher_walk_virt(&walk, req, false); in cbc_encrypt()
163 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
164 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
168 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt()
170 walk.iv); in cbc_encrypt()
172 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
181 struct skcipher_walk walk; in cbc_decrypt() local
184 err = skcipher_walk_virt(&walk, req, false); in cbc_decrypt()
186 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
187 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
189 if (walk.nbytes < walk.total) in cbc_decrypt()
191 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
194 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
196 walk.iv); in cbc_decrypt()
198 err = skcipher_walk_done(&walk, in cbc_decrypt()
199 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
209 struct skcipher_walk walk; in ctr_encrypt() local
213 err = skcipher_walk_virt(&walk, req, false); in ctr_encrypt()
215 while (walk.nbytes > 0) { in ctr_encrypt()
216 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in ctr_encrypt()
217 u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL; in ctr_encrypt()
219 if (walk.nbytes < walk.total) { in ctr_encrypt()
221 walk.stride / AES_BLOCK_SIZE); in ctr_encrypt()
226 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt()
227 ctx->rk, ctx->rounds, blocks, walk.iv, final); in ctr_encrypt()
231 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
232 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
235 walk.total % AES_BLOCK_SIZE); in ctr_encrypt()
237 err = skcipher_walk_done(&walk, 0); in ctr_encrypt()
240 err = skcipher_walk_done(&walk, in ctr_encrypt()
241 walk.nbytes - blocks * AES_BLOCK_SIZE); in ctr_encrypt()
281 struct skcipher_walk walk; in __xts_crypt() local
306 err = skcipher_walk_virt(&walk, req, false); in __xts_crypt()
310 while (walk.nbytes >= AES_BLOCK_SIZE) { in __xts_crypt()
311 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __xts_crypt()
313 if (walk.nbytes < walk.total || walk.nbytes % AES_BLOCK_SIZE) in __xts_crypt()
315 walk.stride / AES_BLOCK_SIZE); in __xts_crypt()
317 out = walk.dst.virt.addr; in __xts_crypt()
318 in = walk.src.virt.addr; in __xts_crypt()
319 nbytes = walk.nbytes; in __xts_crypt()
324 neon_aes_ecb_encrypt(walk.iv, walk.iv, in __xts_crypt()
330 walk.iv); in __xts_crypt()
337 if (walk.nbytes == walk.total && nbytes > 0) in __xts_crypt()
341 err = skcipher_walk_done(&walk, nbytes); in __xts_crypt()
355 err = skcipher_walk_virt(&walk, req, false); in __xts_crypt()
359 out = walk.dst.virt.addr; in __xts_crypt()
360 in = walk.src.virt.addr; in __xts_crypt()
361 nbytes = walk.nbytes; in __xts_crypt()
367 nbytes, ctx->twkey, walk.iv, first ?: 2); in __xts_crypt()
370 nbytes, ctx->twkey, walk.iv, first ?: 2); in __xts_crypt()
373 return skcipher_walk_done(&walk, 0); in __xts_crypt()