/linux/drivers/clk/qcom/ |
A D | clk-rcg.c | 106 static u32 md_to_m(struct mn *mn, u32 md) in md_to_m() argument 132 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md) in mn_to_md() argument 149 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m) in ns_m_to_n() argument 156 static u32 reg_to_mnctr_mode(struct mn *mn, u32 val) in reg_to_mnctr_mode() argument 163 static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns) in mn_to_ns() argument 202 struct mn *mn; in configure_bank() local 225 mn = &rcg->mn[new_bank]; in configure_bank() 346 struct mn *mn = &rcg->mn; in clk_rcg_recalc_rate() local 372 struct mn *mn; in clk_dyn_rcg_recalc_rate() local 383 mn = &rcg->mn[bank]; in clk_dyn_rcg_recalc_rate() [all …]
|
A D | gcc-msm8660.c | 103 .mn = { 154 .mn = { 205 .mn = { 256 .mn = { 307 .mn = { 358 .mn = { 409 .mn = { 460 .mn = { 509 .mn = { 558 .mn = { [all …]
|
A D | clk-rcg.h | 30 struct mn { struct 79 struct mn mn; member 117 struct mn mn[2]; member
|
A D | gcc-mdm9615.c | 187 .mn = { 238 .mn = { 289 .mn = { 340 .mn = { 391 .mn = { 454 .mn = { 503 .mn = { 552 .mn = { 601 .mn = { 650 .mn = { [all …]
|
A D | gcc-ipq806x.c | 343 .mn = { 394 .mn = { 445 .mn = { 496 .mn = { 547 .mn = { 598 .mn = { 662 .mn = { 711 .mn = { 760 .mn = { 809 .mn = { [all …]
|
A D | gcc-msm8960.c | 329 .mn = { 380 .mn = { 431 .mn = { 482 .mn = { 533 .mn = { 584 .mn = { 635 .mn = { 686 .mn = { 735 .mn = { 784 .mn = { [all …]
|
A D | mmcc-msm8960.c | 172 .mn = { 221 .mn = { 270 .mn = { 326 .mn = { 390 .mn = { 454 .mn = { 705 .mn = { 1134 .mn = { 1424 .mn = { 1714 .mn = { [all …]
|
A D | lcc-ipq806x.c | 111 .mn = { 225 .mn = { 305 .mn = { 364 .mn = {
|
A D | lcc-mdm9615.c | 94 .mn = { 200 .mn = { \ 344 .mn = { 412 .mn = {
|
A D | lcc-msm8960.c | 92 .mn = { 198 .mn = { \ 342 .mn = { 410 .mn = {
|
/linux/drivers/misc/sgi-gru/ |
A D | grutlbpurge.c | 209 static int gru_invalidate_range_start(struct mmu_notifier *mn, in gru_invalidate_range_start() argument 212 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_start() 224 static void gru_invalidate_range_end(struct mmu_notifier *mn, in gru_invalidate_range_end() argument 227 struct gru_mm_struct *gms = container_of(mn, struct gru_mm_struct, in gru_invalidate_range_end() 252 static void gru_free_notifier(struct mmu_notifier *mn) in gru_free_notifier() argument 254 kfree(container_of(mn, struct gru_mm_struct, ms_notifier)); in gru_free_notifier() 267 struct mmu_notifier *mn; in gru_register_mmu_notifier() local 269 mn = mmu_notifier_get_locked(&gru_mmuops, current->mm); in gru_register_mmu_notifier() 270 if (IS_ERR(mn)) in gru_register_mmu_notifier() 271 return ERR_CAST(mn); in gru_register_mmu_notifier() [all …]
|
/linux/drivers/iommu/arm/arm-smmu-v3/ |
A D | arm-smmu-v3-sva.c | 16 struct mmu_notifier mn; member 24 #define mn_to_smmu(mn) container_of(mn, struct arm_smmu_mmu_notifier, mn) argument 184 struct arm_smmu_mmu_notifier *smmu_mn = mn_to_smmu(mn); in arm_smmu_mm_invalidate_range() 196 struct arm_smmu_mmu_notifier *smmu_mn = mn_to_smmu(mn); in arm_smmu_mm_release() 220 kfree(mn_to_smmu(mn)); in arm_smmu_mmu_notifier_free() 239 if (smmu_mn->mn.mm == mm) { in arm_smmu_mmu_notifier_get() 258 smmu_mn->mn.ops = &arm_smmu_mmu_notifier_ops; in arm_smmu_mmu_notifier_get() 260 ret = mmu_notifier_register(&smmu_mn->mn, mm); in arm_smmu_mmu_notifier_get() 275 mmu_notifier_put(&smmu_mn->mn); in arm_smmu_mmu_notifier_get() 283 struct mm_struct *mm = smmu_mn->mn.mm; in arm_smmu_mmu_notifier_put() [all …]
|
/linux/drivers/infiniband/hw/hfi1/ |
A D | mmu_rb.c | 58 INIT_HLIST_NODE(&h->mn.hlist); in hfi1_mmu_rb_register() 60 h->mn.ops = &mn_opts; in hfi1_mmu_rb_register() 66 ret = mmu_notifier_register(&h->mn, current->mm); in hfi1_mmu_rb_register() 84 mmu_notifier_unregister(&handler->mn, handler->mn.mm); in hfi1_mmu_rb_unregister() 117 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_insert() 172 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_remove_unless_exact() 197 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_evict() 233 if (current->mm != handler->mn.mm) in hfi1_mmu_rb_remove() 246 static int mmu_notifier_range_start(struct mmu_notifier *mn, in mmu_notifier_range_start() argument 250 container_of(mn, struct mmu_rb_handler, mn); in mmu_notifier_range_start()
|
/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
A D | memory.c | 38 nvkm_mm_free(&fb->tags.mm, &tags->mn); in nvkm_memory_tags_put() 61 if (tags->mn && tags->mn->length != nr) { in nvkm_memory_tags_get() 77 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, nr, nr, 1, &tags->mn)) { in nvkm_memory_tags_get() 79 clr(device, tags->mn->offset, tags->mn->length); in nvkm_memory_tags_get() 90 tags->mn = NULL; in nvkm_memory_tags_get()
|
/linux/mm/ |
A D | page_ext.c | 351 struct memory_notify *mn = arg; in page_ext_callback() local 356 ret = online_page_ext(mn->start_pfn, in page_ext_callback() 357 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 360 offline_page_ext(mn->start_pfn, in page_ext_callback() 361 mn->nr_pages, mn->status_change_nid); in page_ext_callback() 364 offline_page_ext(mn->start_pfn, in page_ext_callback() 365 mn->nr_pages, mn->status_change_nid); in page_ext_callback()
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/diag/ |
A D | en_rep_tracepoint.h | 24 TP_fast_assign(const struct mlx5e_neigh *mn = &nhe->m_neigh; 34 if (mn->family == AF_INET) { 35 *p32 = mn->dst_ip.v4; 37 } else if (mn->family == AF_INET6) { 38 *pin6 = mn->dst_ip.v6;
|
A D | en_tc_tracepoint.h | 85 TP_fast_assign(const struct mlx5e_neigh *mn = &nhe->m_neigh; 94 if (mn->family == AF_INET) { 95 *p32 = mn->dst_ip.v4; 97 } else if (mn->family == AF_INET6) { 98 *pin6 = mn->dst_ip.v6;
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
A D | ram.c | 34 struct nvkm_mm_node *mn; member 45 .mem = vram->mn, in nvkm_vram_map() 54 return (u64)nvkm_mm_size(nvkm_vram(memory)->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_size() 61 if (!nvkm_mm_contiguous(vram->mn)) in nvkm_vram_addr() 63 return (u64)nvkm_mm_addr(vram->mn) << NVKM_RAM_MM_SHIFT; in nvkm_vram_addr() 82 struct nvkm_mm_node *next = vram->mn; in nvkm_vram_dtor() 130 node = &vram->mn; in nvkm_ram_get()
|
/linux/drivers/iommu/amd/ |
A D | iommu_v2.c | 42 struct mmu_notifier mn; /* mmu_notifier handle */ member 337 mmu_notifier_unregister(&pasid_state->mn, pasid_state->mm); in free_pasid_states() 356 static struct pasid_state *mn_to_state(struct mmu_notifier *mn) in mn_to_state() argument 358 return container_of(mn, struct pasid_state, mn); in mn_to_state() 361 static void mn_invalidate_range(struct mmu_notifier *mn, in mn_invalidate_range() argument 368 pasid_state = mn_to_state(mn); in mn_invalidate_range() 386 pasid_state = mn_to_state(mn); in mn_release() 639 pasid_state->mn.ops = &iommu_mn; in amd_iommu_bind_pasid() 644 mmu_notifier_register(&pasid_state->mn, mm); in amd_iommu_bind_pasid() 671 mmu_notifier_unregister(&pasid_state->mn, mm); in amd_iommu_bind_pasid() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
A D | gk20a.c | 53 struct nvkm_mm_node *mn; member 130 return (u64)gk20a_instobj(memory)->mn->offset << 12; in gk20a_instobj_addr() 136 return (u64)gk20a_instobj(memory)->mn->length << 12; in gk20a_instobj_size() 288 .mem = node->mn, in gk20a_instobj_map() 304 dma_free_attrs(dev, (u64)node->base.mn->length << PAGE_SHIFT, in gk20a_instobj_dtor_dma() 317 struct nvkm_mm_node *r = node->base.mn; in gk20a_instobj_dtor_iommu() 335 for (i = 0; i < node->base.mn->length; i++) { in gk20a_instobj_dtor_iommu() 416 node->base.mn = &node->r; in gk20a_instobj_ctor_dma() 493 node->base.mn = r; in gk20a_instobj_ctor_iommu() 542 size, align, (u64)node->mn->offset << 12); in gk20a_instobj_new()
|
/linux/drivers/scsi/qla2xxx/ |
A D | qla_bsg.c | 1066 if (!mn) { in qla84xx_updatefw() 1077 mn->entry_count = 1; in qla84xx_updatefw() 1145 if (!mn) { in qla84xx_mgmt_cmd() 1152 mn->entry_count = 1; in qla84xx_mgmt_cmd() 1191 mn->parameter1 = in qla84xx_mgmt_cmd() 1197 mn->parameter1 = in qla84xx_mgmt_cmd() 1200 mn->parameter2 = in qla84xx_mgmt_cmd() 1243 mn->parameter1 = in qla84xx_mgmt_cmd() 1249 mn->parameter1 = in qla84xx_mgmt_cmd() 1252 mn->parameter2 = in qla84xx_mgmt_cmd() [all …]
|
/linux/arch/x86/kernel/apic/ |
A D | x2apic_uv_x.c | 1242 struct mn { struct 1250 static void get_mn(struct mn *mnp) in get_mn() argument 1272 struct mn mn; in uv_init_hub_info() local 1274 get_mn(&mn); in uv_init_hub_info() 1275 hi->gpa_mask = mn.m_val ? in uv_init_hub_info() 1276 (1UL << (mn.m_val + mn.n_val)) - 1 : in uv_init_hub_info() 1279 hi->m_val = mn.m_val; in uv_init_hub_info() 1280 hi->n_val = mn.n_val; in uv_init_hub_info() 1281 hi->m_shift = mn.m_shift; in uv_init_hub_info() 1282 hi->n_lshift = mn.n_lshift ? mn.n_lshift : 0; in uv_init_hub_info() [all …]
|
/linux/drivers/gpu/drm/radeon/ |
A D | radeon_mn.c | 49 static bool radeon_mn_invalidate(struct mmu_interval_notifier *mn, in radeon_mn_invalidate() argument 53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); in radeon_mn_invalidate()
|
/linux/net/sched/ |
A D | sch_teql.c | 231 struct neighbour *mn; in __teql_resolve() local 233 mn = __neigh_lookup_errno(n->tbl, n->primary_key, dev); in __teql_resolve() 235 if (IS_ERR(mn)) in __teql_resolve() 236 return PTR_ERR(mn); in __teql_resolve() 237 n = mn; in __teql_resolve()
|
/linux/arch/x86/kernel/cpu/sgx/ |
A D | encl.c | 447 static void sgx_mmu_notifier_release(struct mmu_notifier *mn, in sgx_mmu_notifier_release() argument 450 struct sgx_encl_mm *encl_mm = container_of(mn, struct sgx_encl_mm, mmu_notifier); in sgx_mmu_notifier_release() 468 mmu_notifier_put(mn); in sgx_mmu_notifier_release() 472 static void sgx_mmu_notifier_free(struct mmu_notifier *mn) in sgx_mmu_notifier_free() argument 474 struct sgx_encl_mm *encl_mm = container_of(mn, struct sgx_encl_mm, mmu_notifier); in sgx_mmu_notifier_free()
|