/linux/crypto/ |
A D | skcipher.c | 170 if (walk->iv != walk->oiv) in skcipher_walk_done() 171 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_done() 172 if (walk->buffer != walk->page) in skcipher_walk_done() 209 if (!err && walk->iv != walk->oiv) in skcipher_walk_complete() 210 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_complete() 211 if (walk->buffer != walk->page) in skcipher_walk_complete() 237 walk->buffer = walk->page; in skcipher_next_slow() 309 if (offset_in_page(walk->page) + walk->nbytes + walk->stride > in skcipher_next_copy() 313 walk->page += walk->nbytes; in skcipher_next_copy() 367 if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) { in skcipher_walk_next() [all …]
|
A D | cfb.c | 51 u8 *iv = walk->iv; in crypto_cfb_final() 65 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() 87 u8 *iv = walk->iv; in crypto_cfb_encrypt_inplace() 106 struct skcipher_walk walk; in crypto_cfb_encrypt() local 113 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt() 120 if (walk.nbytes) { in crypto_cfb_encrypt() 135 u8 *iv = walk->iv; in crypto_cfb_decrypt_segment() 157 u8 * const iv = walk->iv; in crypto_cfb_decrypt_inplace() 173 if (walk->src.virt.addr == walk->dst.virt.addr) in crypto_cfb_decrypt_blocks() 182 struct skcipher_walk walk; in crypto_cfb_decrypt() local [all …]
|
A D | cbc.c | 23 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment() 24 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment() 27 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() 54 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() 76 struct skcipher_walk walk; in crypto_cbc_encrypt() local 81 while (walk.nbytes) { in crypto_cbc_encrypt() 82 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cbc_encrypt() 102 u8 *iv = walk->iv; in crypto_cbc_decrypt_segment() 158 struct skcipher_walk walk; in crypto_cbc_decrypt() local 163 while (walk.nbytes) { in crypto_cbc_decrypt() [all …]
|
A D | pcbc.c | 26 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment() 27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() 28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() 48 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace() 49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() 68 struct skcipher_walk walk; in crypto_pcbc_encrypt() local 75 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_pcbc_encrypt() 95 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() 116 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_inplace() 135 struct skcipher_walk walk; in crypto_pcbc_decrypt() local [all …]
|
A D | ahash.c | 48 walk->data = kmap_atomic(walk->pg); in hash_walk_next() 68 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in hash_walk_new_entry() 69 walk->offset = offset_in_page(walk->offset); in hash_walk_new_entry() 72 if (walk->entrylen > walk->total) in hash_walk_new_entry() 73 walk->entrylen = walk->total; in hash_walk_new_entry() 74 walk->total -= walk->entrylen; in hash_walk_new_entry() 83 walk->data -= walk->offset; in crypto_hash_walk_done() 88 walk->offset = ALIGN(walk->offset, alignmask + 1); in crypto_hash_walk_done() 93 walk->data += walk->offset; in crypto_hash_walk_done() 106 walk->pg++; in crypto_hash_walk_done() [all …]
|
A D | ofb.c | 23 struct skcipher_walk walk; in crypto_ofb_crypt() local 28 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 29 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt() 30 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt() 31 u8 * const iv = walk.iv; in crypto_ofb_crypt() 32 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt() 44 if (walk.nbytes) { in crypto_ofb_crypt() 45 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() 46 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, in crypto_ofb_crypt() 47 walk.nbytes); in crypto_ofb_crypt() [all …]
|
/linux/mm/ |
A D | pagewalk.c | 48 if (walk->no_vma) { in walk_pte_range() 113 if (pmd_none(*pmd) || (!walk->vma && !walk->no_vma)) { in walk_pmd_range() 144 if (walk->vma) { in walk_pmd_range() 174 if (pud_none(*pud) || (!walk->vma && !walk->no_vma)) { in walk_pud_range() 197 if (walk->vma) in walk_pud_range() 256 if (walk->pgd) in walk_pgd_range() 443 if (!walk.mm) in walk_page_range() 457 walk.vma = vma; in walk_page_range() 474 if (walk.vma || walk.ops->pte_hole) in walk_page_range() 520 if (!walk.mm) in walk_page_vma() [all …]
|
A D | mapping_dirty_helpers.c | 34 struct mm_walk *walk) in wp_pte() argument 126 struct mm_walk *walk) in wp_clean_pmd_entry() argument 134 walk->action = ACTION_AGAIN; in wp_clean_pmd_entry() 139 walk->action = ACTION_CONTINUE; in wp_clean_pmd_entry() 156 struct mm_walk *walk) in wp_clean_pud_entry() argument 164 walk->action = ACTION_AGAIN; in wp_clean_pud_entry() 170 walk->action = ACTION_CONTINUE; in wp_clean_pud_entry() 185 struct mm_walk *walk) in wp_clean_pre_vma() argument 193 walk->vma, walk->mm, start, end); in wp_clean_pre_vma() 202 inc_tlb_flush_pending(walk->mm); in wp_clean_pre_vma() [all …]
|
A D | ptdump.c | 18 struct ptdump_state *st = walk->private; in note_kasan_page_table() 22 walk->action = ACTION_CONTINUE; in note_kasan_page_table() 31 struct ptdump_state *st = walk->private; in ptdump_pgd_entry() 37 return note_kasan_page_table(walk, addr); in ptdump_pgd_entry() 52 struct ptdump_state *st = walk->private; in ptdump_p4d_entry() 58 return note_kasan_page_table(walk, addr); in ptdump_p4d_entry() 73 struct ptdump_state *st = walk->private; in ptdump_pud_entry() 94 struct ptdump_state *st = walk->private; in ptdump_pmd_entry() 113 struct ptdump_state *st = walk->private; in ptdump_pte_entry() 125 int depth, struct mm_walk *walk) in ptdump_hole() argument [all …]
|
A D | hmm.c | 67 struct vm_area_struct *vma = walk->vma; in hmm_vma_fault() 161 if (!walk->vma) { in hmm_vma_walk_hole() 325 struct mm_walk *walk) in hmm_vma_walk_pmd() argument 344 pmd_migration_entry_wait(walk->mm, pmdp); in hmm_vma_walk_pmd() 413 struct mm_walk *walk) in hmm_vma_walk_pud() argument 426 walk->action = ACTION_CONTINUE; in hmm_vma_walk_pud() 464 walk->action = ACTION_SUBTREE; in hmm_vma_walk_pud() 477 struct mm_walk *walk) in hmm_vma_walk_hugetlb_entry() argument 482 struct vm_area_struct *vma = walk->vma; in hmm_vma_walk_hugetlb_entry() 515 struct mm_walk *walk) in hmm_vma_walk_test() argument [all …]
|
/linux/include/crypto/ |
A D | scatterwalk.h | 30 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen() 45 walk->offset += nbytes; in scatterwalk_advance() 51 return !(walk->offset & alignmask); in scatterwalk_aligned() 56 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page() 67 walk->sg = sg; in scatterwalk_start() 68 walk->offset = sg->offset; in scatterwalk_start() 74 offset_in_page(walk->offset); in scatterwalk_map() 83 page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); in scatterwalk_pagedone() 87 if (more && walk->offset >= walk->sg->offset + walk->sg->length) in scatterwalk_pagedone() 88 scatterwalk_start(walk, sg_next(walk->sg)); in scatterwalk_pagedone() [all …]
|
/linux/arch/arm/crypto/ |
A D | aes-ce-glue.c | 179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 460 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt() 482 if (walk.nbytes < walk.total) in xts_encrypt() 490 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt() 508 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt() 509 ctx->key1.key_enc, rounds, walk.nbytes, walk.iv, in xts_encrypt() 532 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt() 554 if (walk.nbytes < walk.total) in xts_decrypt() 562 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_decrypt() [all …]
|
A D | aes-neonbs-glue.c | 94 struct skcipher_walk walk; in __ecb_crypt() local 102 if (walk.nbytes < walk.total) in __ecb_crypt() 107 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 176 if (walk.nbytes < walk.total) in cbc_decrypt() 181 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt() 183 walk.iv); in cbc_decrypt() 249 if (walk.nbytes < walk.total) { in ctr_encrypt() 256 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt() 373 crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv); in __xts_crypt() 379 if (walk.nbytes < walk.total) { in __xts_crypt() [all …]
|
/linux/arch/arm64/crypto/ |
A D | aes-neonbs-glue.c | 106 if (walk.nbytes < walk.total) in __ecb_crypt() 111 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 168 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt() 189 if (walk.nbytes < walk.total) in cbc_decrypt() 194 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt() 196 walk.iv); in cbc_decrypt() 219 if (walk.nbytes < walk.total) { in ctr_encrypt() 226 aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt() 313 if (walk.nbytes < walk.total || walk.nbytes % AES_BLOCK_SIZE) in __xts_crypt() 324 neon_aes_ecb_encrypt(walk.iv, walk.iv, in __xts_crypt() [all …]
|
A D | aes-glue.c | 187 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 207 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 324 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt() 381 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt() 482 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ctr_encrypt() 504 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt() 526 if (walk.nbytes < walk.total) in xts_encrypt() 534 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt() 552 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt() 576 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt() [all …]
|
A D | aes-ce-ccm-glue.c | 125 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac() 175 if (walk.nbytes == walk.total) in ccm_encrypt() 178 ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ccm_encrypt() 182 if (walk.nbytes == walk.total) in ccm_encrypt() 187 if (walk.nbytes) { in ccm_encrypt() 194 } while (walk.nbytes); in ccm_encrypt() 233 if (walk.nbytes == walk.total) in ccm_decrypt() 236 ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ccm_decrypt() 240 if (walk.nbytes == walk.total) in ccm_decrypt() 245 if (walk.nbytes) { in ccm_decrypt() [all …]
|
/linux/arch/x86/crypto/ |
A D | sm4_aesni_avx_glue.c | 43 struct skcipher_walk walk; in ecb_do_crypt() local 97 struct skcipher_walk walk; in sm4_cbc_encrypt() local 104 const u8 *iv = walk.iv; in sm4_cbc_encrypt() 116 if (iv != walk.iv) in sm4_cbc_encrypt() 131 struct skcipher_walk walk; in sm4_avx_cbc_decrypt() local 195 struct skcipher_walk walk; in sm4_cfb_encrypt() local 203 const u8 *iv = walk.iv; in sm4_cfb_encrypt() 215 if (iv != walk.iv) in sm4_cfb_encrypt() 219 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_cfb_encrypt() 280 if (walk.nbytes == walk.total && nbytes > 0) { in sm4_avx_cfb_decrypt() [all …]
|
A D | aesni-intel_glue.c | 416 walk.nbytes, walk.iv); in cts_cbc_encrypt() 472 walk.nbytes, walk.iv); in cts_cbc_decrypt() 516 if (walk.nbytes == walk.total && nbytes > 0) { in ctr_crypt() 687 walk.src.virt.addr, walk.nbytes); in gcmaes_crypt_by_sg() 690 walk.src.virt.addr, walk.nbytes); in gcmaes_crypt_by_sg() 888 walk.dst.virt.addr, walk.src.virt.addr, in xts_crypt() 892 walk.dst.virt.addr, walk.src.virt.addr, in xts_crypt() 920 walk.dst.virt.addr, walk.src.virt.addr, in xts_crypt() 921 walk.nbytes, walk.iv); in xts_crypt() 924 walk.dst.virt.addr, walk.src.virt.addr, in xts_crypt() [all …]
|
A D | blowfish_glue.c | 76 struct skcipher_walk walk; in ecb_crypt() local 82 while ((nbytes = walk.nbytes)) { in ecb_crypt() 83 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 84 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 127 struct skcipher_walk *walk) in __cbc_encrypt() argument 133 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() 145 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 153 struct skcipher_walk walk; in cbc_encrypt() local 224 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 225 *(u64 *)walk->iv = last_iv; in __cbc_decrypt() [all …]
|
A D | des3_ede_glue.c | 77 struct skcipher_walk walk; in ecb_crypt() local 83 while ((nbytes = walk.nbytes)) { in ecb_crypt() 84 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 85 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 135 struct skcipher_walk *walk) in __cbc_encrypt() argument 141 u64 *iv = (u64 *)walk->iv; in __cbc_encrypt() 153 *(u64 *)walk->iv = *iv; in __cbc_encrypt() 161 struct skcipher_walk walk; in cbc_encrypt() local 230 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt() 231 *(u64 *)walk->iv = last_iv; in __cbc_decrypt() [all …]
|
A D | aegis128-aesni-glue.c | 73 struct scatter_walk walk; in crypto_aegis128_aesni_process_ad() local 77 scatterwalk_start(&walk, sg_src); in crypto_aegis128_aesni_process_ad() 107 scatterwalk_advance(&walk, size); in crypto_aegis128_aesni_process_ad() 108 scatterwalk_done(&walk, 0, assoclen); in crypto_aegis128_aesni_process_ad() 124 walk->src.virt.addr, walk->dst.virt.addr); in crypto_aegis128_aesni_process_crypt() 125 skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE); in crypto_aegis128_aesni_process_crypt() 128 if (walk->nbytes) { in crypto_aegis128_aesni_process_crypt() 129 ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr, in crypto_aegis128_aesni_process_crypt() 130 walk->dst.virt.addr); in crypto_aegis128_aesni_process_crypt() 131 skcipher_walk_done(walk, 0); in crypto_aegis128_aesni_process_crypt() [all …]
|
/linux/arch/sparc/crypto/ |
A D | aes_glue.c | 223 struct skcipher_walk walk; in ecb_encrypt() local 247 struct skcipher_walk walk; in ecb_decrypt() local 272 struct skcipher_walk walk; in cbc_encrypt() local 285 walk.iv); in cbc_encrypt() 297 struct skcipher_walk walk; in cbc_decrypt() local 311 walk.iv); in cbc_decrypt() 322 u8 *ctrblk = walk->iv; in ctr_crypt_final() 338 struct skcipher_walk walk; in ctr_crypt() local 349 walk.dst.virt.addr, in ctr_crypt() 351 walk.iv); in ctr_crypt() [all …]
|
A D | des_glue.c | 99 struct skcipher_walk walk; in __ecb_crypt() local 112 des_sparc64_ecb_crypt(walk.src.virt.addr, walk.dst.virt.addr, in __ecb_crypt() 140 struct skcipher_walk walk; in __cbc_crypt() local 155 walk.dst.virt.addr, in __cbc_crypt() 158 walk.iv); in __cbc_crypt() 161 walk.dst.virt.addr, in __cbc_crypt() 164 walk.iv); in __cbc_crypt() 248 struct skcipher_walk walk; in __ecb3_crypt() local 264 walk.dst.virt.addr, in __ecb3_crypt() 314 walk.iv); in __cbc3_crypt() [all …]
|
/linux/arch/powerpc/crypto/ |
A D | aes-spe-glue.c | 196 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt() 199 ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt() 203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt() 235 ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt() 237 walk.iv); in ppc_cbc_crypt() 239 ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt() 241 walk.iv); in ppc_cbc_crypt() 244 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_cbc_crypt() 276 ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ctr_crypt() 280 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ctr_crypt() [all …]
|
/linux/drivers/atm/ |
A D | idt77105.c | 86 struct idt77105_priv *walk; in idt77105_stats_timer_func() local 91 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func() 92 dev = walk->dev; in idt77105_stats_timer_func() 94 stats = &walk->stats; in idt77105_stats_timer_func() 115 struct idt77105_priv *walk; in idt77105_restart_timer_func() local 120 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func() 121 dev = walk->dev; in idt77105_restart_timer_func() 323 struct idt77105_priv *walk, *prev; in idt77105_stop() local 332 walk != NULL; in idt77105_stop() 333 prev = walk, walk = walk->next) { in idt77105_stop() [all …]
|