Home
last modified time | relevance | path

Searched refs:order_base_2 (Results 1 – 25 of 125) sorted by relevance

12345

/linux/include/linux/
H A Dlog2.h217 #define order_base_2(n) \ macro
231 return order_base_2(n) + 1; in __bits_per()
232 return order_base_2(n); in __bits_per()
/linux/kernel/
H A Dbounds.c22 DEFINE(NR_CPUS_BITS, order_base_2(CONFIG_NR_CPUS)); in main()
26 DEFINE(LRU_GEN_WIDTH, order_base_2(MAX_NR_GENS + 1)); in main()
/linux/mm/
H A Dpercpu-km.c65 pages = alloc_pages(gfp, order_base_2(nr_pages)); in pcpu_create_chunk()
98 __free_pages(chunk->data, order_base_2(nr_pages)); in pcpu_destroy_chunk()
/linux/drivers/watchdog/
H A Dimgpdc_wdt.c120 val |= order_base_2(wdt->wdt_dev.timeout * clk_rate) - 1; in __pdc_wdt_set_timeout()
213 if (order_base_2(clk_rate) > PDC_WDT_CONFIG_DELAY_MASK + 1) { in pdc_wdt_probe()
218 if (order_base_2(clk_rate) == 0) in pdc_wdt_probe()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dparams.c22 u8 req_page_shift = xsk ? order_base_2(xsk->chunk_size) : PAGE_SHIFT; in mlx5e_mpwrq_page_shift()
293 order_base_2(linear_stride_sz); in mlx5e_mpwqe_log_pkts_per_wqe()
369 log_stride_sz = order_base_2(mlx5e_rx_get_linear_stride_sz(mdev, params, xsk, true)); in mlx5e_rx_mpwqe_is_linear_skb()
413 return order_base_2(DIV_ROUND_UP(MLX5E_RX_MAX_HEAD, MLX5E_SHAMPO_WQ_BASE_HEAD_ENTRY_SIZE)); in mlx5e_shampo_get_log_hd_entry_size()
419 return order_base_2(MLX5E_SHAMPO_WQ_RESRV_SIZE / MLX5E_SHAMPO_WQ_BASE_RESRV_SIZE); in mlx5e_shampo_get_log_rsrv_size()
428 return order_base_2(DIV_ROUND_UP(resrv_size, params->sw_mtu)); in mlx5e_shampo_get_log_pkt_per_rsrv()
436 return order_base_2(mlx5e_rx_get_linear_stride_sz(mdev, params, xsk, true)); in mlx5e_mpwqe_get_log_stride_size()
794 info->log_num_frags = order_base_2(info->num_frags); in mlx5e_build_rq_frags_info()
813 return order_base_2(sz); in mlx5e_get_rqwq_log_stride()
840 return order_base_2((wqe_size / rsrv_size) * wq_size * (pkt_per_rsrv + 1)); in mlx5e_shampo_get_log_cq_size()
[all …]
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_cmdbuf.c92 order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); in etnaviv_cmdbuf_init()
120 int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / in etnaviv_cmdbuf_free()
/linux/drivers/infiniband/hw/hns/
H A Dhns_roce_alloc.c89 buf->trunk_shift = order_base_2(ALIGN(size, PAGE_SIZE)); in hns_roce_buf_alloc()
92 buf->trunk_shift = order_base_2(ALIGN(page_size, PAGE_SIZE)); in hns_roce_buf_alloc()
/linux/drivers/clk/sunxi/
H A Dclk-sun9i-core.c161 _p = order_base_2(DIV_ROUND_UP(req->parent_rate, req->rate)); in sun9i_a80_get_ahb_factors()
247 req->p = order_base_2(div); in sun9i_a80_get_apb1_factors()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_mqd_manager_cik.c191 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd()
230 m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
351 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd_hiq()
H A Dkfd_mqd_manager_vi.c181 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd()
213 order_base_2(q->eop_ring_buffer_size / 4) - 1); in __update_mqd()
364 m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
H A Dkfd_mqd_manager_v9.c252 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd()
285 min(0xA, order_base_2(q->eop_ring_buffer_size / 4) - 1) : 0; in update_mqd()
482 m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
/linux/drivers/net/ethernet/netronome/nfp/abm/
H A Dcls.c127 bits_per_prio = roundup_pow_of_two(order_base_2(abm->num_bands)); in nfp_abm_update_band_map()
132 base_shift = 8 - order_base_2(abm->num_prios); in nfp_abm_update_band_map()
H A Dctrl.c319 size = roundup_pow_of_two(order_base_2(abm->num_bands)); in nfp_abm_ctrl_prio_map_size()
392 abm->dscp_mask = GENMASK(7, 8 - order_base_2(abm->num_prios)); in nfp_abm_ctrl_find_addrs()
/linux/drivers/net/ethernet/marvell/octeontx2/nic/
H A Dqos.h42 DECLARE_HASHTABLE(qos_hlist, order_base_2(OTX2_QOS_MAX_LEAF_NODES));
/linux/arch/powerpc/kvm/
H A Dbook3s_hv_builtin.c60 VM_BUG_ON(order_base_2(nr_pages) < KVM_CMA_CHUNK_ORDER - PAGE_SHIFT); in kvm_alloc_hpt_cma()
62 return cma_alloc(kvm_cma, nr_pages, order_base_2(HPT_ALIGN_PAGES), in kvm_alloc_hpt_cma()
/linux/drivers/pci/controller/
H A Dpcie-xilinx.c234 hwirq = bitmap_find_free_region(pcie->msi_map, XILINX_NUM_MSI_IRQS, order_base_2(nr_irqs)); in xilinx_msi_domain_alloc()
257 bitmap_release_region(pcie->msi_map, d->hwirq, order_base_2(nr_irqs)); in xilinx_msi_domain_free()
H A Dpcie-iproc-msi.c263 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_alloc()
291 order_base_2(msi->nr_cpus * nr_irqs)); in iproc_msi_irq_domain_free()
/linux/drivers/gpu/drm/nouveau/nvkm/core/
H A Dramht.c153 ramht->bits = order_base_2(ramht->size); in nvkm_ramht_new()
/linux/drivers/pwm/
H A Dpwm-sl28cpld.c143 prescaler = order_base_2(prescaler); in sl28cpld_pwm_apply()
/linux/drivers/infiniband/sw/rxe/
H A Drxe_queue.c81 q->log2_elem_size = order_base_2(elem_size); in rxe_queue_init()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dcik_ih.c127 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in cik_ih_irq_init()
H A Dsi_ih.c77 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in si_ih_irq_init()
H A Damdgpu_ih.c48 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init()
H A Diceland_ih.c129 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in iceland_ih_irq_init()
/linux/drivers/irqchip/
H A Dirq-armada-370-xp.c290 order_base_2(nr_irqs)); in mpic_msi_alloc()
312 bitmap_release_region(mpic->msi_used, d->hwirq, order_base_2(nr_irqs)); in mpic_msi_free()

12345