Home
last modified time | relevance | path

Searched refs:gfp_mask (Results 1 – 25 of 28) sorted by relevance

12

/freebsd/sys/compat/linuxkpi/common/include/linux/
H A Dscatterlist.h206 sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc()
209 return ((void *)__get_free_page(gfp_mask)); in sg_kmalloc()
211 return (kmalloc(nents * sizeof(struct scatterlist), gfp_mask));
266 gfp_t gfp_mask, sg_alloc_fn *alloc_fn) in __sg_alloc_table()
293 sg = alloc_fn(alloc_size, gfp_mask); in __sg_alloc_table()
319 sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table()
324 NULL, gfp_mask, sg_kmalloc); in sg_alloc_table()
338 gfp_t gfp_mask) in __sg_alloc_table_from_pages()
344 unsigned int max_segment, gfp_t gfp_mask) in __sg_alloc_table_from_pages()
375 if (__predict_false((rc = sg_alloc_table(sgt, segs, gfp_mask)))) in __sg_alloc_table_from_pages()
203 sg_kmalloc(unsigned int nents,gfp_t gfp_mask) sg_kmalloc() argument
263 __sg_alloc_table(struct sg_table * table,unsigned int nents,unsigned int max_ents,struct scatterlist * first_chunk,gfp_t gfp_mask,sg_alloc_fn * alloc_fn) __sg_alloc_table() argument
316 sg_alloc_table(struct sg_table * table,unsigned int nents,gfp_t gfp_mask) sg_alloc_table() argument
335 __sg_alloc_table_from_pages(struct sg_table * sgt,struct page ** pages,unsigned int count,unsigned long off,unsigned long size,unsigned int max_segment,struct scatterlist * prv,unsigned int left_pages,gfp_t gfp_mask) __sg_alloc_table_from_pages() argument
422 sg_alloc_table_from_pages(struct sg_table * sgt,struct page ** pages,unsigned int count,unsigned long off,unsigned long size,gfp_t gfp_mask) sg_alloc_table_from_pages() argument
437 sg_alloc_table_from_pages_segment(struct sg_table * sgt,struct page ** pages,unsigned int count,unsigned int off,unsigned long size,unsigned int max_segment,gfp_t gfp_mask) sg_alloc_table_from_pages_segment() argument
[all...]
H A Dradix-tree.h52 gfp_t gfp_mask; member
61 { .rnode = NULL, .gfp_mask = mask, .height = 0 };
63 { (root)->rnode = NULL; (root)->gfp_mask = mask; (root)->height = 0; }
H A Didr.h79 void idr_preload(gfp_t gfp_mask);
84 int idr_pre_get(struct idr *idp, gfp_t gfp_mask);
113 int ida_pre_get(struct ida *ida, gfp_t gfp_mask);
120 gfp_t gfp_mask);
H A Dshrinker.h35 gfp_t gfp_mask; member
H A Dmm.h403 might_alloc(gfp_t gfp_mask __unused) in might_alloc()
/freebsd/sys/dev/mlx4/mlx4_core/
H A Dmlx4_icm.c99 gfp_t gfp_mask, int node) in mlx4_alloc_icm_pages() argument
103 page = alloc_pages_node(node, gfp_mask, order); in mlx4_alloc_icm_pages()
105 page = alloc_pages(gfp_mask, order); in mlx4_alloc_icm_pages()
115 int order, gfp_t gfp_mask) in mlx4_alloc_icm_coherent() argument
118 &sg_dma_address(mem), gfp_mask); in mlx4_alloc_icm_coherent()
129 gfp_t gfp_mask, int coherent) in mlx4_alloc_icm() argument
137 BUG_ON(coherent && (gfp_mask & __GFP_HIGHMEM)); in mlx4_alloc_icm()
140 gfp_mask & ~(__GFP_HIGHMEM | __GFP_NOWARN), in mlx4_alloc_icm()
144 gfp_mask & ~(__GFP_HIGHMEM | __GFP_NOWARN)); in mlx4_alloc_icm()
157 gfp_mask & ~(__GFP_HIGHMEM | in mlx4_alloc_icm()
[all …]
H A Dicm.h70 gfp_t gfp_mask, int coherent);
/freebsd/sys/ofed/drivers/infiniband/core/
H A Dib_sa_query.c742 static int alloc_mad(struct ib_sa_query *query, gfp_t gfp_mask) in alloc_mad() argument
758 gfp_mask, in alloc_mad()
792 static int send_mad(struct ib_sa_query *query, int timeout_ms, gfp_t gfp_mask) in send_mad() argument
794 bool preload = gfpflags_allow_blocking(gfp_mask); in send_mad()
799 idr_preload(gfp_mask); in send_mad()
900 int timeout_ms, gfp_t gfp_mask, in ib_sa_path_rec_get() argument
920 query = kzalloc(sizeof(*query), gfp_mask); in ib_sa_path_rec_get()
925 ret = alloc_mad(&query->sa_query, gfp_mask); in ib_sa_path_rec_get()
950 ret = send_mad(&query->sa_query, timeout_ms, gfp_mask); in ib_sa_path_rec_get()
1020 int timeout_ms, gfp_t gfp_mask, in ib_sa_service_rec_query() argument
[all …]
H A Dsa.h58 int timeout_ms, gfp_t gfp_mask,
H A Dib_multicast.c561 union ib_gid *mgid, gfp_t gfp_mask) in acquire_group() argument
576 group = kzalloc(sizeof *group, gfp_mask); in acquire_group()
613 ib_sa_comp_mask comp_mask, gfp_t gfp_mask, in ib_sa_join_multicast() argument
627 member = kmalloc(sizeof *member, gfp_mask); in ib_sa_join_multicast()
642 &rec->mgid, gfp_mask); in ib_sa_join_multicast()
H A Dib_mad.c941 size_t mad_size, gfp_t gfp_mask) in alloc_send_rmpp_list() argument
955 seg = kmalloc(sizeof (*seg) + seg_size, gfp_mask); in alloc_send_rmpp_list()
989 gfp_t gfp_mask, in ib_create_send_mad() argument
1020 buf = kzalloc(sizeof *mad_send_wr + size, gfp_mask); in ib_create_send_mad()
1056 ret = alloc_send_rmpp_list(mad_send_wr, mad_size, gfp_mask); in ib_create_send_mad()
/freebsd/sys/compat/linuxkpi/common/src/
H A Dlinux_radix.c197 node = malloc(sizeof(*node), M_RADIX, root->gfp_mask | M_ZERO); in radix_tree_insert()
217 node = malloc(sizeof(*node), M_RADIX, root->gfp_mask | M_ZERO); in radix_tree_insert()
249 root->gfp_mask | M_ZERO); in radix_tree_insert()
301 node = malloc(sizeof(*node), M_RADIX, root->gfp_mask | M_ZERO); in radix_tree_store()
321 node = malloc(sizeof(*node), M_RADIX, root->gfp_mask | M_ZERO); in radix_tree_store()
353 root->gfp_mask | M_ZERO); in radix_tree_store()
H A Dlinux_idr.c123 idr_preload(gfp_t gfp_mask) in idr_preload() argument
136 cacheval = malloc(sizeof(*cacheval), M_IDR, M_ZERO | gfp_mask); in idr_preload()
362 idr_pre_get(struct idr *idr, gfp_t gfp_mask) in idr_pre_get() argument
377 iln = malloc(sizeof(*il), M_IDR, M_ZERO | gfp_mask); in idr_pre_get()
669 idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp_mask) in idr_alloc() argument
680 idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp_mask) in idr_alloc_cyclic() argument
H A Dlinux_xarray.c351 xa->xa_head.gfp_mask = GFP_NOWAIT; in xa_init_flags()
/freebsd/sys/ofed/include/rdma/
H A Dib_sa.h332 int timeout_ms, gfp_t gfp_mask,
344 int timeout_ms, gfp_t gfp_mask,
390 ib_sa_comp_mask comp_mask, gfp_t gfp_mask,
455 int timeout_ms, gfp_t gfp_mask,
465 int timeout_ms, gfp_t gfp_mask,
H A Dib_mad.h806 gfp_t gfp_mask,
/freebsd/sys/dev/mthca/
H A Dmthca_memfree.c106 static int mthca_alloc_icm_pages(struct scatterlist *mem, int order, gfp_t gfp_mask) in mthca_alloc_icm_pages() argument
114 page = alloc_pages(gfp_mask | __GFP_ZERO, order); in mthca_alloc_icm_pages()
123 int order, gfp_t gfp_mask) in mthca_alloc_icm_coherent() argument
126 gfp_mask); in mthca_alloc_icm_coherent()
137 gfp_t gfp_mask, int coherent) in mthca_alloc_icm() argument
145 BUG_ON(coherent && (gfp_mask & __GFP_HIGHMEM)); in mthca_alloc_icm()
147 icm = kmalloc(sizeof *icm, gfp_mask & ~(__GFP_HIGHMEM | __GFP_NOWARN)); in mthca_alloc_icm()
159 gfp_mask & ~(__GFP_HIGHMEM | __GFP_NOWARN)); in mthca_alloc_icm()
175 cur_order, gfp_mask); in mthca_alloc_icm()
178 cur_order, gfp_mask); in mthca_alloc_icm()
H A Dmthca_memfree.h81 gfp_t gfp_mask, int coherent);
H A Dmthca_cmd.h252 gfp_t gfp_mask);
/freebsd/sys/contrib/dev/athk/
H A Dmain.c36 gfp_t gfp_mask) in ath_rxbuf_alloc() argument
54 skb = __dev_alloc_skb(len + common->cachelsz - 1, gfp_mask); in ath_rxbuf_alloc()
H A Dath.h201 gfp_t gfp_mask);
/freebsd/sys/contrib/openzfs/config/
H A Dkernel-bio.m4205 dnl # bio_alloc(gfp_t gfp_mask, unsigned short nr_iovecs)
207 … # bio_alloc(struct block_device *bdev, unsigned short nr_vecs, unsigned int opf, gfp_t gfp_mask)
217 gfp_t gfp_mask = 0;
222 struct bio *__attribute__((unused)) allocated = bio_alloc(bdev, nr_iovecs, opf, gfp_mask);
/freebsd/sys/contrib/dev/iwlwifi/pcie/
H A Drx.c372 gfp_t gfp_mask = priority;
375 gfp_mask |= __GFP_COMP;
395 page = alloc_pages(gfp_mask, trans_pcie->rx_page_order);
404 if (!(gfp_mask & __GFP_NOWARN) && net_ratelimit())
535 gfp_t gfp_mask = GFP_KERNEL;
539 gfp_mask |= __GFP_NOWARN;
558 gfp_mask);
/freebsd/sys/contrib/openzfs/module/os/linux/zfs/
H A Darc_os.c227 if (sc->gfp_mask & __GFP_FS) in arc_shrinker_scan()
H A Dvdev_disk.c596 vdev_bio_alloc(struct block_device *bdev, gfp_t gfp_mask, in vdev_bio_alloc() argument
602 bio = bio_alloc(bdev, nr_vecs, 0, gfp_mask); in vdev_bio_alloc()
604 bio = bio_alloc(gfp_mask, nr_vecs); in vdev_bio_alloc()

12