Lines Matching full:sg
488 struct ib_sge *sg) in mlx5r_umr_unmap_free_xlt() argument
492 dma_unmap_single(ddev, sg->addr, sg->length, DMA_TO_DEVICE); in mlx5r_umr_unmap_free_xlt()
493 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_unmap_free_xlt()
499 static void *mlx5r_umr_create_xlt(struct mlx5_ib_dev *dev, struct ib_sge *sg, in mlx5r_umr_create_xlt() argument
510 sg->length = nents * ent_size; in mlx5r_umr_create_xlt()
511 dma = dma_map_single(ddev, xlt, sg->length, DMA_TO_DEVICE); in mlx5r_umr_create_xlt()
514 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_create_xlt()
517 sg->addr = dma; in mlx5r_umr_create_xlt()
518 sg->lkey = dev->umrc.pd->local_dma_lkey; in mlx5r_umr_create_xlt()
525 unsigned int flags, struct ib_sge *sg) in mlx5r_umr_set_update_xlt_ctrl_seg() argument
534 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_set_update_xlt_ctrl_seg()
553 struct ib_sge *sg) in mlx5r_umr_set_update_xlt_data_seg() argument
555 data_seg->byte_count = cpu_to_be32(sg->length); in mlx5r_umr_set_update_xlt_data_seg()
556 data_seg->lkey = cpu_to_be32(sg->lkey); in mlx5r_umr_set_update_xlt_data_seg()
557 data_seg->addr = cpu_to_be64(sg->addr); in mlx5r_umr_set_update_xlt_data_seg()
572 struct mlx5_ib_mr *mr, struct ib_sge *sg, in mlx5r_umr_final_update_xlt() argument
599 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_final_update_xlt()
600 wqe->data_seg.byte_count = cpu_to_be32(sg->length); in mlx5r_umr_final_update_xlt()
618 struct ib_sge sg; in mlx5r_umr_update_mr_pas() local
626 dev, &sg, ib_umem_num_dma_blocks(mr->umem, 1 << mr->page_shift), in mlx5r_umr_update_mr_pas()
631 orig_sg_length = sg.length; in mlx5r_umr_update_mr_pas()
633 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_mr_pas()
636 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_mr_pas()
642 if (cur_mtt == (void *)mtt + sg.length) { in mlx5r_umr_update_mr_pas()
643 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_mr_pas()
650 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_mr_pas()
652 offset += sg.length; in mlx5r_umr_update_mr_pas()
669 sg.length = ALIGN(final_size, MLX5_UMR_MTT_ALIGNMENT); in mlx5r_umr_update_mr_pas()
670 memset(cur_mtt, 0, sg.length - final_size); in mlx5r_umr_update_mr_pas()
671 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_mr_pas()
673 dma_sync_single_for_device(ddev, sg.addr, sg.length, DMA_TO_DEVICE); in mlx5r_umr_update_mr_pas()
677 sg.length = orig_sg_length; in mlx5r_umr_update_mr_pas()
678 mlx5r_umr_unmap_free_xlt(dev, mtt, &sg); in mlx5r_umr_update_mr_pas()
703 struct ib_sge sg; in mlx5r_umr_update_xlt() local
723 xlt = mlx5r_umr_create_xlt(dev, &sg, npages, desc_size, flags); in mlx5r_umr_update_xlt()
727 pages_iter = sg.length / desc_size; in mlx5r_umr_update_xlt()
728 orig_sg_length = sg.length; in mlx5r_umr_update_xlt()
737 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_xlt()
739 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_xlt()
746 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
749 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
751 sg.length = ALIGN(size_to_map, MLX5_UMR_MTT_ALIGNMENT); in mlx5r_umr_update_xlt()
754 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_xlt()
758 sg.length = orig_sg_length; in mlx5r_umr_update_xlt()
759 mlx5r_umr_unmap_free_xlt(dev, xlt, &sg); in mlx5r_umr_update_xlt()