Lines Matching full:sg

552 				     struct ib_sge *sg)  in mlx5r_umr_unmap_free_xlt()  argument
556 dma_unmap_single(ddev, sg->addr, sg->length, DMA_TO_DEVICE); in mlx5r_umr_unmap_free_xlt()
557 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_unmap_free_xlt()
563 static void *mlx5r_umr_create_xlt(struct mlx5_ib_dev *dev, struct ib_sge *sg, in mlx5r_umr_create_xlt() argument
574 sg->length = nents * ent_size; in mlx5r_umr_create_xlt()
575 dma = dma_map_single(ddev, xlt, sg->length, DMA_TO_DEVICE); in mlx5r_umr_create_xlt()
578 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_create_xlt()
581 sg->addr = dma; in mlx5r_umr_create_xlt()
582 sg->lkey = dev->umrc.pd->local_dma_lkey; in mlx5r_umr_create_xlt()
589 unsigned int flags, struct ib_sge *sg) in mlx5r_umr_set_update_xlt_ctrl_seg() argument
598 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_set_update_xlt_ctrl_seg()
617 struct ib_sge *sg) in mlx5r_umr_set_update_xlt_data_seg() argument
619 data_seg->byte_count = cpu_to_be32(sg->length); in mlx5r_umr_set_update_xlt_data_seg()
620 data_seg->lkey = cpu_to_be32(sg->lkey); in mlx5r_umr_set_update_xlt_data_seg()
621 data_seg->addr = cpu_to_be64(sg->addr); in mlx5r_umr_set_update_xlt_data_seg()
636 struct mlx5_ib_mr *mr, struct ib_sge *sg, in mlx5r_umr_final_update_xlt() argument
663 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_final_update_xlt()
664 wqe->data_seg.byte_count = cpu_to_be32(sg->length); in mlx5r_umr_final_update_xlt()
680 struct ib_sge sg; in _mlx5r_umr_update_mr_pas() local
685 entry = mlx5r_umr_create_xlt(dev, &sg, in _mlx5r_umr_update_mr_pas()
691 orig_sg_length = sg.length; in _mlx5r_umr_update_mr_pas()
692 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in _mlx5r_umr_update_mr_pas()
703 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in _mlx5r_umr_update_mr_pas()
707 if (curr_entry == entry + sg.length) { in _mlx5r_umr_update_mr_pas()
708 dma_sync_single_for_device(ddev, sg.addr, sg.length, in _mlx5r_umr_update_mr_pas()
715 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in _mlx5r_umr_update_mr_pas()
717 offset += sg.length; in _mlx5r_umr_update_mr_pas()
742 sg.length = ALIGN(final_size, MLX5_UMR_FLEX_ALIGNMENT); in _mlx5r_umr_update_mr_pas()
743 memset(curr_entry, 0, sg.length - final_size); in _mlx5r_umr_update_mr_pas()
744 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in _mlx5r_umr_update_mr_pas()
746 dma_sync_single_for_device(ddev, sg.addr, sg.length, DMA_TO_DEVICE); in _mlx5r_umr_update_mr_pas()
750 sg.length = orig_sg_length; in _mlx5r_umr_update_mr_pas()
751 mlx5r_umr_unmap_free_xlt(dev, entry, &sg); in _mlx5r_umr_update_mr_pas()
798 struct ib_sge sg; in mlx5r_umr_update_xlt() local
818 xlt = mlx5r_umr_create_xlt(dev, &sg, npages, desc_size, flags); in mlx5r_umr_update_xlt()
822 pages_iter = sg.length / desc_size; in mlx5r_umr_update_xlt()
823 orig_sg_length = sg.length; in mlx5r_umr_update_xlt()
832 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_xlt()
834 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_xlt()
841 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
844 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
846 sg.length = ALIGN(size_to_map, MLX5_UMR_FLEX_ALIGNMENT); in mlx5r_umr_update_xlt()
849 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_xlt()
853 sg.length = orig_sg_length; in mlx5r_umr_update_xlt()
854 mlx5r_umr_unmap_free_xlt(dev, xlt, &sg); in mlx5r_umr_update_xlt()