/linux/include/linux/ |
A D | log2.h | 217 #define order_base_2(n) \ macro 231 return order_base_2(n) + 1; in __bits_per() 232 return order_base_2(n); in __bits_per()
|
A D | hugetlb_cgroup.h | 32 #define HUGETLB_CGROUP_MIN_ORDER order_base_2(__MAX_CGROUP_SUBPAGE_INDEX + 1)
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/ |
A D | params.c | 78 return MLX5_MPWRQ_LOG_WQE_SZ - order_base_2(linear_frag_sz); in mlx5e_mpwqe_log_pkts_per_wqe() 123 log_stride_sz = order_base_2(mlx5e_rx_get_linear_frag_sz(params, xsk)); in mlx5e_rx_mpwqe_is_linear_skb() 145 return order_base_2(DIV_ROUND_UP(MLX5E_RX_MAX_HEAD, MLX5E_SHAMPO_WQ_BASE_HEAD_ENTRY_SIZE)); in mlx5e_shampo_get_log_hd_entry_size() 151 return order_base_2(MLX5E_SHAMPO_WQ_RESRV_SIZE / MLX5E_SHAMPO_WQ_BASE_RESRV_SIZE); in mlx5e_shampo_get_log_rsrv_size() 160 return order_base_2(DIV_ROUND_UP(resrv_size, params->sw_mtu)); in mlx5e_shampo_get_log_pkt_per_rsrv() 168 return order_base_2(mlx5e_rx_get_linear_frag_sz(params, xsk)); in mlx5e_mpwqe_get_log_stride_size() 439 info->log_num_frags = order_base_2(info->num_frags); in mlx5e_build_rq_frags_info() 454 return order_base_2(sz); in mlx5e_get_rqwq_log_stride() 481 return order_base_2((wqe_size / rsrv_size) * wq_size * (pkt_per_rsrv + 1)); in mlx5e_shampo_get_log_cq_size() 744 return max_t(u8, MLX5E_PARAMS_MINIMUM_LOG_SQ_SIZE, order_base_2(wqebbs)); in mlx5e_build_icosq_log_wq_sz()
|
/linux/mm/ |
A D | percpu-km.c | 65 pages = alloc_pages(gfp, order_base_2(nr_pages)); in pcpu_create_chunk() 98 __free_pages(chunk->data, order_base_2(nr_pages)); in pcpu_destroy_chunk()
|
/linux/drivers/watchdog/ |
A D | imgpdc_wdt.c | 120 val |= order_base_2(wdt->wdt_dev.timeout * clk_rate) - 1; in __pdc_wdt_set_timeout() 238 if (order_base_2(clk_rate) > PDC_WDT_CONFIG_DELAY_MASK + 1) { in pdc_wdt_probe() 243 if (order_base_2(clk_rate) == 0) in pdc_wdt_probe()
|
/linux/drivers/gpu/drm/etnaviv/ |
A D | etnaviv_cmdbuf.c | 93 order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); in etnaviv_cmdbuf_init() 121 int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / in etnaviv_cmdbuf_free()
|
/linux/drivers/infiniband/hw/hns/ |
A D | hns_roce_alloc.c | 90 buf->trunk_shift = order_base_2(ALIGN(size, PAGE_SIZE)); in hns_roce_buf_alloc() 93 buf->trunk_shift = order_base_2(ALIGN(page_size, PAGE_SIZE)); in hns_roce_buf_alloc()
|
/linux/drivers/clk/sunxi/ |
A D | clk-sun9i-core.c | 161 _p = order_base_2(DIV_ROUND_UP(req->parent_rate, req->rate)); in sun9i_a80_get_ahb_factors() 247 req->p = order_base_2(div); in sun9i_a80_get_apb1_factors()
|
/linux/drivers/net/ethernet/netronome/nfp/abm/ |
A D | cls.c | 127 bits_per_prio = roundup_pow_of_two(order_base_2(abm->num_bands)); in nfp_abm_update_band_map() 132 base_shift = 8 - order_base_2(abm->num_prios); in nfp_abm_update_band_map()
|
A D | ctrl.c | 319 size = roundup_pow_of_two(order_base_2(abm->num_bands)); in nfp_abm_ctrl_prio_map_size() 392 abm->dscp_mask = GENMASK(7, 8 - order_base_2(abm->num_prios)); in nfp_abm_ctrl_find_addrs()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
A D | kfd_mqd_manager_cik.c | 207 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd() 253 m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma() 341 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd_hiq()
|
A D | kfd_mqd_manager_v9.c | 224 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd() 253 order_base_2(q->eop_ring_buffer_size / 4) - 1); in update_mqd() 392 m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
|
A D | kfd_mqd_manager_vi.c | 181 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd() 213 order_base_2(q->eop_ring_buffer_size / 4) - 1); in __update_mqd() 362 m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
A D | gpfifonv50.c | 69 ilength = order_base_2(args->v0.ilength / 8); in nv50_fifo_gpfifo_new()
|
A D | gpfifog84.c | 69 ilength = order_base_2(args->v0.ilength / 8); in g84_fifo_gpfifo_new()
|
/linux/drivers/pci/controller/ |
A D | pcie-xilinx.c | 240 hwirq = bitmap_find_free_region(port->msi_map, XILINX_NUM_MSI_IRQS, order_base_2(nr_irqs)); in xilinx_msi_domain_alloc() 263 bitmap_release_region(port->msi_map, d->hwirq, order_base_2(nr_irqs)); in xilinx_msi_domain_free()
|
A D | pcie-iproc-msi.c | 263 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_alloc() 291 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_free()
|
/linux/arch/powerpc/kvm/ |
A D | book3s_hv_builtin.c | 60 VM_BUG_ON(order_base_2(nr_pages) < KVM_CMA_CHUNK_ORDER - PAGE_SHIFT); in kvm_alloc_hpt_cma() 62 return cma_alloc(kvm_cma, nr_pages, order_base_2(HPT_ALIGN_PAGES), in kvm_alloc_hpt_cma()
|
/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
A D | ramht.c | 153 ramht->bits = order_base_2(ramht->size); in nvkm_ramht_new()
|
/linux/drivers/pwm/ |
A D | pwm-sl28cpld.c | 138 prescaler = order_base_2(prescaler); in sl28cpld_pwm_apply()
|
/linux/drivers/irqchip/ |
A D | irq-armada-370-xp.c | 236 order_base_2(nr_irqs)); in armada_370_xp_msi_alloc() 258 bitmap_release_region(msi_used, d->hwirq, order_base_2(nr_irqs)); in armada_370_xp_msi_free()
|
/linux/drivers/gpu/drm/ |
A D | drm_bufs.c | 254 map->size, order_base_2(map->size), map->handle); in drm_addmap_core() 741 order = order_base_2(request->size); in drm_legacy_addbufs_agp() 912 order = order_base_2(request->size); in drm_legacy_addbufs_pci() 1132 order = order_base_2(request->size); in drm_legacy_addbufs_sg() 1426 order = order_base_2(request->size); in drm_legacy_markbufs()
|
/linux/drivers/infiniband/sw/rxe/ |
A D | rxe_queue.c | 81 q->log2_elem_size = order_base_2(elem_size); in rxe_queue_init()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | mes_v10_1.c | 616 (order_base_2(MES_EOP_SIZE / 4) - 1)); in mes_v10_1_mqd_init() 663 (order_base_2(ring->ring_size / 4) - 1)); in mes_v10_1_mqd_init() 665 ((order_base_2(AMDGPU_GPU_PAGE_SIZE / 4) - 1) << 8)); in mes_v10_1_mqd_init()
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/ |
A D | en.h | 93 MLX5_MPWRQ_LOG_STRIDE_SZ(mdev, order_base_2(MLX5E_RX_MAX_HEAD)) 112 #define MLX5E_ORDER2_MAX_PACKET_MTU (order_base_2(10 * 1024)) 868 DECLARE_HASHTABLE(qos_tc2node, order_base_2(MLX5E_QOS_MAX_LEAF_NODES));
|