Lines Matching full:flags
83 gfp_t flags = __GFP_ZERO | GFP_ATOMIC; in ioat3_alloc_sed() local
85 sed = kmem_cache_alloc(ioat_sed_cache, flags); in ioat3_alloc_sed()
91 flags, &sed->dma); in ioat3_alloc_sed()
102 dma_addr_t dma_src, size_t len, unsigned long flags) in ioat_dma_prep_memcpy_lock() argument
139 desc->txd.flags = flags; in ioat_dma_prep_memcpy_lock()
141 hw->ctl_f.int_en = !!(flags & DMA_PREP_INTERRUPT); in ioat_dma_prep_memcpy_lock()
142 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat_dma_prep_memcpy_lock()
154 size_t len, unsigned long flags) in __ioat_prep_xor_lock() argument
223 desc->txd.flags = flags; in __ioat_prep_xor_lock()
227 xor->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_xor_lock()
231 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_xor_lock()
235 hw->ctl_f.int_en = !!(flags & DMA_PREP_INTERRUPT); in __ioat_prep_xor_lock()
246 unsigned int src_cnt, size_t len, unsigned long flags) in ioat_prep_xor() argument
253 return __ioat_prep_xor_lock(chan, NULL, dest, src, src_cnt, len, flags); in ioat_prep_xor()
259 enum sum_check_flags *result, unsigned long flags) in ioat_prep_xor_val() argument
272 src_cnt - 1, len, flags); in ioat_prep_xor_val()
286 dev_dbg(dev, "desc[%d]: (%#llx->%#llx) flags: %#x" in dump_pq_desc_dbg()
291 desc->txd.flags, pq->size, pq->ctl, pq->ctl_f.op, in dump_pq_desc_dbg()
319 dev_dbg(dev, "desc[%d]: (%#llx->%#llx) flags: %#x" in dump_pq16_desc_dbg()
324 desc->txd.flags, pq->size, pq->ctl, in dump_pq16_desc_dbg()
342 size_t len, unsigned long flags) in __ioat_prep_pq_lock() argument
362 BUG_ON(src_cnt + dmaf_continue(flags) < 2); in __ioat_prep_pq_lock()
369 if (src_cnt + dmaf_p_disabled_continue(flags) > 3 || in __ioat_prep_pq_lock()
370 (dmaf_continue(flags) && !dmaf_p_disabled_continue(flags))) { in __ioat_prep_pq_lock()
409 if (dmaf_p_disabled_continue(flags)) in __ioat_prep_pq_lock()
411 else if (dmaf_continue(flags)) { in __ioat_prep_pq_lock()
425 pq->ctl_f.p_disable = !!(flags & DMA_PREP_PQ_DISABLE_P); in __ioat_prep_pq_lock()
426 pq->ctl_f.q_disable = !!(flags & DMA_PREP_PQ_DISABLE_Q); in __ioat_prep_pq_lock()
433 desc->txd.flags = flags; in __ioat_prep_pq_lock()
437 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_pq_lock()
441 pq->ctl_f.int_en = !!(flags & DMA_PREP_INTERRUPT); in __ioat_prep_pq_lock()
447 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_pq_lock()
451 hw->ctl_f.int_en = !!(flags & DMA_PREP_INTERRUPT); in __ioat_prep_pq_lock()
466 size_t len, unsigned long flags) in __ioat_prep_pq16_lock() argument
522 if (dmaf_p_disabled_continue(flags)) in __ioat_prep_pq16_lock()
524 else if (dmaf_continue(flags)) { in __ioat_prep_pq16_lock()
539 pq->ctl_f.p_disable = !!(flags & DMA_PREP_PQ_DISABLE_P); in __ioat_prep_pq16_lock()
540 pq->ctl_f.q_disable = !!(flags & DMA_PREP_PQ_DISABLE_Q); in __ioat_prep_pq16_lock()
547 desc->txd.flags = flags; in __ioat_prep_pq16_lock()
551 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_pq16_lock()
554 pq->ctl_f.int_en = !!(flags & DMA_PREP_INTERRUPT); in __ioat_prep_pq16_lock()
563 static int src_cnt_flags(unsigned int src_cnt, unsigned long flags) in src_cnt_flags() argument
565 if (dmaf_p_disabled_continue(flags)) in src_cnt_flags()
567 else if (dmaf_continue(flags)) in src_cnt_flags()
576 unsigned long flags) in ioat_prep_pq() argument
584 if (flags & DMA_PREP_PQ_DISABLE_P) in ioat_prep_pq()
586 if (flags & DMA_PREP_PQ_DISABLE_Q) in ioat_prep_pq()
592 if ((flags & DMA_PREP_PQ_DISABLE_P) && src_cnt == 1) { in ioat_prep_pq()
596 BUG_ON(flags & DMA_PREP_PQ_DISABLE_Q); in ioat_prep_pq()
602 return src_cnt_flags(src_cnt, flags) > 8 ? in ioat_prep_pq()
605 flags) : in ioat_prep_pq()
607 single_source_coef, len, flags); in ioat_prep_pq()
610 return src_cnt_flags(src_cnt, flags) > 8 ? in ioat_prep_pq()
612 scf, len, flags) : in ioat_prep_pq()
614 scf, len, flags); in ioat_prep_pq()
621 enum sum_check_flags *pqres, unsigned long flags) in ioat_prep_pq_val() argument
629 if (flags & DMA_PREP_PQ_DISABLE_P) in ioat_prep_pq_val()
631 if (flags & DMA_PREP_PQ_DISABLE_Q) in ioat_prep_pq_val()
639 return src_cnt_flags(src_cnt, flags) > 8 ? in ioat_prep_pq_val()
641 flags) : in ioat_prep_pq_val()
643 flags); in ioat_prep_pq_val()
648 unsigned int src_cnt, size_t len, unsigned long flags) in ioat_prep_pqxor() argument
662 flags |= DMA_PREP_PQ_DISABLE_Q; in ioat_prep_pqxor()
665 return src_cnt_flags(src_cnt, flags) > 8 ? in ioat_prep_pqxor()
667 flags) : in ioat_prep_pqxor()
669 flags); in ioat_prep_pqxor()
675 enum sum_check_flags *result, unsigned long flags) in ioat_prep_pqxor_val() argument
694 flags |= DMA_PREP_PQ_DISABLE_Q; in ioat_prep_pqxor_val()
697 return src_cnt_flags(src_cnt, flags) > 8 ? in ioat_prep_pqxor_val()
699 scf, len, flags) : in ioat_prep_pqxor_val()
701 scf, len, flags); in ioat_prep_pqxor_val()
705 ioat_prep_interrupt_lock(struct dma_chan *c, unsigned long flags) in ioat_prep_interrupt_lock() argument
723 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat_prep_interrupt_lock()
729 desc->txd.flags = flags; in ioat_prep_interrupt_lock()