/linux/arch/x86/kernel/fpu/ |
A D | xstate.h | 104 #define XSTATE_XSAVE(st, lmask, hmask, err) \ argument 124 #define XSTATE_XRESTORE(st, lmask, hmask) \ argument 166 u32 hmask = mask >> 32; in os_xsave() local 186 u32 hmask = mask >> 32; in os_xrstor() local 197 u32 hmask = mask >> 32; in os_xrstor_supervisor() local 251 u32 hmask; in xsave_to_user_sigframe() local 259 hmask = mask >> 32; in xsave_to_user_sigframe() 263 XSTATE_OP(XSAVE, buf, lmask, hmask, err); in xsave_to_user_sigframe() 276 u32 hmask = mask >> 32; in xrstor_from_user_sigframe() local 282 XSTATE_OP(XRSTOR, xstate, lmask, hmask, err); in xrstor_from_user_sigframe() [all …]
|
A D | xstate.c | 375 u32 hmask = mask >> 32; in os_xrstor_booting() local 379 XSTATE_OP(XRSTORS, xstate, lmask, hmask, err); in os_xrstor_booting() 381 XSTATE_OP(XRSTOR, xstate, lmask, hmask, err); in os_xrstor_booting()
|
/linux/net/xfrm/ |
A D | xfrm_hash.h | 87 unsigned int hmask) in __xfrm_dst_hash() argument 98 return (h ^ (h >> 16)) & hmask; in __xfrm_dst_hash() 104 unsigned int hmask) in __xfrm_src_hash() argument 115 return (h ^ (h >> 16)) & hmask; in __xfrm_src_hash() 120 unsigned short family, unsigned int hmask) in __xfrm_spi_hash() argument 143 return (index ^ (index >> 8)) & hmask; in __idx_hash() 158 return hmask + 1; in __sel_hash() 166 return hmask + 1; in __sel_hash() 172 return h & hmask; in __sel_hash() 178 unsigned int hmask, in __addr_hash() argument [all …]
|
A D | xfrm_policy.c | 485 unsigned int hmask = net->xfrm.policy_bydst[dir].hmask; in policy_hash_bysel() local 493 if (hash == hmask + 1) in policy_hash_bysel() 505 unsigned int hmask = net->xfrm.policy_bydst[dir].hmask; in policy_hash_direct() local 576 unsigned int hmask = net->xfrm.policy_bydst[dir].hmask; in xfrm_bydst_resize() local 634 unsigned int hmask = net->xfrm.policy_bydst[dir].hmask; in xfrm_bydst_should_resize() local 640 cnt > hmask) in xfrm_bydst_should_resize() 651 total > hmask) in xfrm_byidx_should_resize() 1223 unsigned int hmask; in xfrm_hash_rebuild() local 1301 hmask = net->xfrm.policy_bydst[dir].hmask; in xfrm_hash_rebuild() 4006 hmask = 8 - 1; in xfrm_policy_init() [all …]
|
/linux/arch/riscv/mm/ |
A D | tlbflush.c | 35 struct cpumask hmask; in __sbi_tlb_flush_range() local 49 riscv_cpuid_to_hartid_mask(cmask, &hmask); in __sbi_tlb_flush_range() 50 sbi_remote_sfence_vma_asid(cpumask_bits(&hmask), in __sbi_tlb_flush_range() 59 riscv_cpuid_to_hartid_mask(cmask, &hmask); in __sbi_tlb_flush_range() 60 sbi_remote_sfence_vma(cpumask_bits(&hmask), in __sbi_tlb_flush_range()
|
/linux/arch/riscv/kvm/ |
A D | vcpu_sbi.c | 77 ulong hmask; in kvm_riscv_vcpu_sbi_ecall() local 114 hmask = kvm_riscv_vcpu_unpriv_read(vcpu, false, cp->a0, in kvm_riscv_vcpu_sbi_ecall() 117 hmask = (1UL << atomic_read(&kvm->online_vcpus)) - 1; in kvm_riscv_vcpu_sbi_ecall() 124 for_each_set_bit(i, &hmask, BITS_PER_LONG) { in kvm_riscv_vcpu_sbi_ecall() 138 hmask = kvm_riscv_vcpu_unpriv_read(vcpu, false, cp->a0, in kvm_riscv_vcpu_sbi_ecall() 141 hmask = (1UL << atomic_read(&kvm->online_vcpus)) - 1; in kvm_riscv_vcpu_sbi_ecall() 149 for_each_set_bit(i, &hmask, BITS_PER_LONG) { in kvm_riscv_vcpu_sbi_ecall()
|
A D | vmid.c | 70 struct cpumask hmask; in kvm_riscv_stage2_vmid_update() local 105 riscv_cpuid_to_hartid_mask(cpu_online_mask, &hmask); in kvm_riscv_stage2_vmid_update() 106 sbi_remote_hfence_gvma(cpumask_bits(&hmask), 0, 0); in kvm_riscv_stage2_vmid_update()
|
A D | mmu.c | 154 struct cpumask hmask; in stage2_remote_tlb_flush() local 167 riscv_cpuid_to_hartid_mask(cpu_online_mask, &hmask); in stage2_remote_tlb_flush() 168 sbi_remote_hfence_gvma_vmid(cpumask_bits(&hmask), addr, size, in stage2_remote_tlb_flush()
|
/linux/arch/ia64/lib/ |
A D | do_csum.S | 95 #define hmask r16 macro 141 mov hmask=-1 // initialize head mask 169 shl hmask=hmask,tmp2 // build head mask, mask off [0,first1off[ 175 (p8) and hmask=hmask,tmask // apply tail mask to head mask if 1 word only 181 and word1[0]=firstval,hmask // and mask it as appropriate
|
/linux/include/linux/ |
A D | inetdevice.h | 211 __u32 hmask; in bad_mask() local 214 hmask = ntohl(mask); in bad_mask() 215 if (hmask & (hmask+1)) in bad_mask() 293 __u32 hmask = ntohl(mask); in inet_mask_len() local 294 if (!hmask) in inet_mask_len() 296 return 32 - ffz(~hmask); in inet_mask_len()
|
A D | pagewalk.h | 50 int (*hugetlb_entry)(pte_t *pte, unsigned long hmask,
|
/linux/arch/powerpc/mm/book3s32/ |
A D | mmu.c | 399 unsigned int hmask = Hash_mask >> (16 - LG_HPTEG_SIZE); in MMU_init_hw_patch() local 418 modify_instruction_site(&patch__hash_page_B, 0xffff, hmask); in MMU_init_hw_patch() 419 modify_instruction_site(&patch__hash_page_C, 0xffff, hmask); in MMU_init_hw_patch() 427 modify_instruction_site(&patch__flush_hash_B, 0xffff, hmask); in MMU_init_hw_patch()
|
/linux/drivers/net/wan/ |
A D | fsl_ucc_hdlc.h | 56 __be16 hmask; member 105 unsigned short hmask; member
|
A D | fsl_ucc_hdlc.c | 280 iowrite16be(priv->hmask, &priv->ucc_pram->hmask); in uhdlc_init() 1209 if (of_property_read_u16(np, "fsl,hmask", &uhdlc_priv->hmask)) in ucc_hdlc_probe() 1210 uhdlc_priv->hmask = DEFAULT_ADDR_MASK; in ucc_hdlc_probe()
|
/linux/Documentation/devicetree/bindings/soc/fsl/cpm_qe/ |
A D | network.txt | 101 - fsl,hmask 106 fsl,hmask = /bits/ 16 <0x0000>;
|
/linux/include/net/netns/ |
A D | xfrm.h | 16 unsigned int hmask; member
|
/linux/mm/ |
A D | pagewalk.c | 299 unsigned long hmask = huge_page_mask(h); in walk_hugetlb_range() local 307 pte = huge_pte_offset(walk->mm, addr & hmask, sz); in walk_hugetlb_range() 310 err = ops->hugetlb_entry(pte, hmask, addr, next, walk); in walk_hugetlb_range()
|
A D | hmm.c | 475 static int hmm_vma_walk_hugetlb_entry(pte_t *pte, unsigned long hmask, in hmm_vma_walk_hugetlb_entry() argument 503 pfn = pte_pfn(entry) + ((start & ~hmask) >> PAGE_SHIFT); in hmm_vma_walk_hugetlb_entry()
|
A D | mincore.c | 24 static int mincore_hugetlb(pte_t *pte, unsigned long hmask, unsigned long addr, in mincore_hugetlb() argument
|
/linux/drivers/gpu/drm/tegra/ |
A D | drm.h | 60 unsigned int hmask, vmask; member
|
/linux/include/media/tpg/ |
A D | v4l2-tpg.h | 195 unsigned hmask[TPG_MAX_PLANES]; member 441 return ((x / tpg->hdownsampling[plane]) & tpg->hmask[plane]) * in tpg_hdiv()
|
/linux/fs/proc/ |
A D | task_mmu.c | 688 static int smaps_hugetlb_range(pte_t *pte, unsigned long hmask, in smaps_hugetlb_range() argument 1503 static int pagemap_hugetlb_range(pte_t *ptep, unsigned long hmask, in pagemap_hugetlb_range() argument 1529 ((addr & ~hmask) >> PAGE_SHIFT); in pagemap_hugetlb_range() 1831 static int gather_hugetlb_stats(pte_t *pte, unsigned long hmask, in gather_hugetlb_stats() argument 1851 static int gather_hugetlb_stats(pte_t *pte, unsigned long hmask, in gather_hugetlb_stats() argument
|
/linux/drivers/net/ethernet/netronome/nfp/abm/ |
A D | cls.c | 45 if (knode->sel->hoff || knode->sel->hmask) { in nfp_abm_u32_check_knode()
|
/linux/drivers/net/ethernet/intel/ixgbe/ |
A D | ixgbe_x550.c | 3703 u32 hmask = mask & ~IXGBE_GSSR_TOKEN_SM; in ixgbe_acquire_swfw_sync_x550em_a() local 3709 if (hmask) in ixgbe_acquire_swfw_sync_x550em_a() 3710 status = ixgbe_acquire_swfw_sync_X540(hw, hmask); in ixgbe_acquire_swfw_sync_x550em_a() 3719 if (hmask) in ixgbe_acquire_swfw_sync_x550em_a() 3720 ixgbe_release_swfw_sync_X540(hw, hmask); in ixgbe_acquire_swfw_sync_x550em_a() 3738 u32 hmask = mask & ~IXGBE_GSSR_TOKEN_SM; in ixgbe_release_swfw_sync_x550em_a() local 3743 if (hmask) in ixgbe_release_swfw_sync_x550em_a() 3744 ixgbe_release_swfw_sync_X540(hw, hmask); in ixgbe_release_swfw_sync_x550em_a()
|
/linux/tools/include/uapi/linux/ |
A D | pkt_cls.h | 182 __be32 hmask; member
|