/linux/block/ |
H A D | blk-settings.c | 102 min_not_zero(lim->max_hw_zone_append_sectors, in blk_validate_zoned_limits() 304 max_hw_sectors = min_not_zero(lim->max_hw_sectors, in blk_validate_limits() 697 t->max_sectors = min_not_zero(t->max_sectors, b->max_sectors); in blk_stack_limits() 698 t->max_user_sectors = min_not_zero(t->max_user_sectors, in blk_stack_limits() 700 t->max_hw_sectors = min_not_zero(t->max_hw_sectors, b->max_hw_sectors); in blk_stack_limits() 701 t->max_dev_sectors = min_not_zero(t->max_dev_sectors, b->max_dev_sectors); in blk_stack_limits() 707 t->seg_boundary_mask = min_not_zero(t->seg_boundary_mask, in blk_stack_limits() 709 t->virt_boundary_mask = min_not_zero(t->virt_boundary_mask, in blk_stack_limits() 712 t->max_segments = min_not_zero(t->max_segments, b->max_segments); in blk_stack_limits() 713 t->max_discard_segments = min_not_zero(t->max_discard_segments, in blk_stack_limits() [all …]
|
/linux/tools/testing/selftests/bpf/progs/ |
H A D | bpf_dctcp.c | 18 #define min_not_zero(x, y) ({ \ macro 137 alpha -= min_not_zero(alpha, alpha >> dctcp_shift_g); in BPF_PROG()
|
/linux/net/ipv4/ |
H A D | tcp_cdg.c | 145 ca->delay_min = min_not_zero(ca->delay_min, ca->rtt.min); in tcp_cdg_hystart_update() 324 ca->rtt.min = min_not_zero(ca->rtt.min, sample->rtt_us); in tcp_cdg_acked()
|
H A D | tcp_dctcp.c | 154 alpha -= min_not_zero(alpha, alpha >> dctcp_shift_g); in dctcp_update_alpha()
|
/linux/include/linux/ |
H A D | dma-direct.h | 133 return end <= min_not_zero(*dev->dma_mask, dev->bus_dma_limit); in dma_capable()
|
H A D | minmax.h | 172 * min_not_zero - return the minimum that is _not_ zero, unless both are zero 176 #define min_not_zero(x, y) ({ \ 184 #define min_not_zero( global() macro
|
/linux/drivers/nvme/target/ |
H A D | passthru.c | 105 max_hw_sectors = min_not_zero(pctrl->max_segments << PAGE_SECTORS_SHIFT, in nvmet_passthru_override_id_ctrl() 112 max_hw_sectors = min_not_zero(BIO_MAX_VECS << PAGE_SECTORS_SHIFT, in nvmet_passthru_override_id_ctrl()
|
/linux/kernel/dma/ |
H A D | direct.c | 49 u64 dma_limit = min_not_zero( in dma_direct_optimal_gfp_mask() 76 min_not_zero(dev->coherent_dma_mask, dev->bus_dma_limit); in dma_coherent_ok()
|
H A D | contiguous.c | 226 selected_limit = min_not_zero(limit_cmdline, limit); in dma_contiguous_reserve()
|
H A D | mapping.c | 131 return min_not_zero(mask, dev->bus_dma_limit) >= in dma_go_direct() 925 if (min_not_zero(dma_get_mask(dev), dev->bus_dma_limit) < in dma_addressing_limited()
|
H A D | swiotlb.c | 1210 phys_limit = min_not_zero(*dev->dma_mask, dev->bus_dma_limit); in swiotlb_find_slots()
|
/linux/drivers/nvdimm/ |
H A D | region_devs.c | 124 num_flush = min_not_zero(num_flush, nvdimm->num_flush); in nd_region_activate() 386 avail = min_not_zero(avail, nd_pmem_max_contiguous_dpa( in nd_region_allocatable_dpa()
|
/linux/drivers/net/wwan/t7xx/ |
H A D | t7xx_hif_dpmaif_tx.c | 146 real_rel_cnt = min_not_zero(budget, rel_cnt); in t7xx_dpmaif_tx_release()
|
/linux/drivers/acpi/arm64/ |
H A D | iort.c | 2028 limit = min_not_zero(limit, local_limit); in acpi_iort_dma_get_max_cpu_address() 2037 limit = min_not_zero(limit, local_limit); in acpi_iort_dma_get_max_cpu_address()
|
/linux/fs/btrfs/ |
H A D | zoned.c | 752 fs_info->max_extent_size = min_not_zero(fs_info->max_extent_size, in btrfs_check_zoned_mode() 1401 bg->zone_capacity = min_not_zero(zone_info[0].capacity, zone_info[1].capacity); in btrfs_load_block_group_dup() 1447 bg->zone_capacity = min_not_zero(zone_info[0].capacity, zone_info[1].capacity); in btrfs_load_block_group_raid1()
|
/linux/drivers/gpu/drm/panfrost/ |
H A D | panfrost_mmu.c | 266 *count = min_not_zero(blk_offset, size) / SZ_4K; in get_pgsize()
|
/linux/io_uring/ |
H A D | kbuf.c | 232 needed = min_not_zero(needed, (size_t) PEEK_MAX_IMPORT); in io_ring_buffers_peek()
|
H A D | net.c | 579 .max_len = min_not_zero(sr->len, INT_MAX), in io_send_select_buffer() 1099 arg.max_len = min_not_zero(sr->len, kmsg->msg.msg_inq); in io_recv_buf_select()
|
/linux/drivers/thunderbolt/ |
H A D | tunnel.c | 128 spare = min_not_zero(sw->max_dma_credits, dma_credits); in tb_available_credits() 1822 credits = min_not_zero(dma_credits, nhi->sw->max_dma_credits); in tb_tunnel_alloc_dma()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_object.c | 136 places[c].lpfn = min_not_zero(places[c].lpfn, visible_pfn); in amdgpu_bo_placement_from_domain()
|
/linux/drivers/infiniband/ulp/rtrs/ |
H A D | rtrs-clt.c | 1876 clt_path->s.signal_interval = min_not_zero(queue_depth, in rtrs_rdma_conn_established() 1893 clt->max_io_size = min_not_zero(clt_path->max_io_size, in rtrs_rdma_conn_established()
|
/linux/drivers/block/drbd/ |
H A D | drbd_req.c | 1726 et = min_not_zero(dt, ent); in request_timer_fn()
|
/linux/arch/arm/mm/ |
H A D | dma-mapping.c | 537 u64 mask = min_not_zero(dev->coherent_dma_mask, dev->bus_dma_limit); in __dma_alloc()
|
/linux/drivers/iommu/ |
H A D | dma-iommu.c | 777 dma_limit = min_not_zero(dma_limit, dev->bus_dma_limit); in iommu_dma_alloc_iova()
|
/linux/drivers/target/ |
H A D | target_core_spc.c | 555 put_unaligned_be32(min_not_zero(mtl, io_max_blocks), &buf[8]); in spc_emulate_evpd_b0()
|