/Linux-v5.4/drivers/infiniband/core/ |
D | rw.c | 87 u32 sg_cnt, u32 offset) in rdma_rw_init_one_mr() argument 91 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_one_mr() 119 u8 port_num, struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_mr_wrs() argument 127 ctx->nr_ops = (sg_cnt + pages_per_mr - 1) / pages_per_mr; in rdma_rw_init_mr_wrs() 136 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_mr_wrs() 138 ret = rdma_rw_init_one_mr(qp, port_num, reg, sg, sg_cnt, in rdma_rw_init_mr_wrs() 168 sg_cnt -= nents; in rdma_rw_init_mr_wrs() 190 struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_map_wrs() argument 198 ctx->nr_ops = DIV_ROUND_UP(sg_cnt, max_sge); in rdma_rw_init_map_wrs() 200 ctx->map.sges = sge = kcalloc(sg_cnt, sizeof(*sge), GFP_KERNEL); in rdma_rw_init_map_wrs() [all …]
|
/Linux-v5.4/drivers/scsi/qla2xxx/ |
D | qla_bsg.c | 40 bsg_job->request_payload.sg_cnt, DMA_TO_DEVICE); in qla2x00_bsg_sp_free() 45 bsg_job->reply_payload.sg_cnt, DMA_FROM_DEVICE); in qla2x00_bsg_sp_free() 48 bsg_job->request_payload.sg_cnt, DMA_TO_DEVICE); in qla2x00_bsg_sp_free() 51 bsg_job->reply_payload.sg_cnt, DMA_FROM_DEVICE); in qla2x00_bsg_sp_free() 183 bsg_job->reply_payload.sg_cnt, ha->fcp_prio_cfg, in qla24xx_proc_fcp_prio_cfg_cmd() 210 bsg_job->request_payload.sg_cnt, ha->fcp_prio_cfg, in qla24xx_proc_fcp_prio_cfg_cmd() 288 if (bsg_job->request_payload.sg_cnt > 1 || in qla2x00_process_els() 289 bsg_job->reply_payload.sg_cnt > 1) { in qla2x00_process_els() 293 bsg_job->request_payload.sg_cnt, in qla2x00_process_els() 294 bsg_job->reply_payload.sg_cnt); in qla2x00_process_els() [all …]
|
/Linux-v5.4/include/rdma/ |
D | rw.h | 46 struct scatterlist *sg, u32 sg_cnt, u32 sg_offset, 49 struct scatterlist *sg, u32 sg_cnt, 53 u8 port_num, struct scatterlist *sg, u32 sg_cnt, 58 u8 port_num, struct scatterlist *sg, u32 sg_cnt,
|
/Linux-v5.4/drivers/scsi/snic/ |
D | snic_fwint.h | 108 __le16 sg_cnt; member 115 u16 sg_cnt, ulong ctx) in snic_io_hdr_enc() argument 122 hdr->sg_cnt = cpu_to_le16(sg_cnt); in snic_io_hdr_enc() 194 __le16 sg_cnt; member 268 __le16 sg_cnt; /* Number of SG Elements */ member
|
D | snic_io.c | 234 snic_req_init(struct snic *snic, int sg_cnt) in snic_req_init() argument 239 typ = (sg_cnt <= SNIC_REQ_CACHE_DFLT_SGL) ? in snic_req_init() 260 if (sg_cnt == 0) in snic_req_init() 263 rqi->req_len += (sg_cnt * sizeof(struct snic_sg_desc)); in snic_req_init() 265 if (sg_cnt > atomic64_read(&snic->s_stats.io.max_sgl)) in snic_req_init() 266 atomic64_set(&snic->s_stats.io.max_sgl, sg_cnt); in snic_req_init() 268 SNIC_BUG_ON(sg_cnt > SNIC_MAX_SG_DESC_CNT); in snic_req_init() 269 atomic64_inc(&snic->s_stats.io.sgl_cnt[sg_cnt - 1]); in snic_req_init() 545 fn, line, req->hdr.cmnd_id, req->hdr.sg_cnt, req->hdr.status, in snic_dump_desc()
|
D | snic_res.h | 30 u32 data_len, u16 sg_cnt, ulong sgl_addr, in snic_icmnd_init() argument 33 snic_io_hdr_enc(&req->hdr, SNIC_REQ_ICMND, 0, cmnd_id, host_id, sg_cnt, in snic_icmnd_init()
|
D | snic_io.h | 110 snic_req_init(struct snic *, int sg_cnt);
|
D | snic_scsi.c | 166 int sg_cnt) in snic_queue_icmnd_req() argument 176 if (sg_cnt) { in snic_queue_icmnd_req() 180 for_each_sg(scsi_sglist(sc), sg, sg_cnt, i) { in snic_queue_icmnd_req() 218 sg_cnt, in snic_queue_icmnd_req() 245 int sg_cnt = 0; in snic_issue_scsi_req() local 254 sg_cnt = scsi_dma_map(sc); in snic_issue_scsi_req() 255 if (sg_cnt < 0) { in snic_issue_scsi_req() 257 sc->cmnd[0], sg_cnt, CMD_STATE(sc)); in snic_issue_scsi_req() 265 rqi = snic_req_init(snic, sg_cnt); in snic_issue_scsi_req() 284 ret = snic_queue_icmnd_req(snic, rqi, sc, sg_cnt); in snic_issue_scsi_req() [all …]
|
/Linux-v5.4/drivers/nvme/target/ |
D | io-cmd-bdev.c | 147 int sg_cnt = req->sg_cnt; in nvmet_bdev_execute_rw() local 153 if (!req->sg_cnt) { in nvmet_bdev_execute_rw() 177 bio = bio_alloc(GFP_KERNEL, min(sg_cnt, BIO_MAX_PAGES)); in nvmet_bdev_execute_rw() 185 for_each_sg(req->sg, sg, req->sg_cnt, i) { in nvmet_bdev_execute_rw() 190 bio = bio_alloc(GFP_KERNEL, min(sg_cnt, BIO_MAX_PAGES)); in nvmet_bdev_execute_rw() 200 sg_cnt--; in nvmet_bdev_execute_rw()
|
D | io-cmd-file.c | 136 ssize_t nr_bvec = req->sg_cnt; in nvmet_file_execute_io() 155 for_each_sg(req->sg, sg, req->sg_cnt, i) { in nvmet_file_execute_io() 233 ssize_t nr_bvec = req->sg_cnt; in nvmet_file_execute_rw() 235 if (!req->sg_cnt || !nr_bvec) { in nvmet_file_execute_rw()
|
D | core.c | 91 if (sg_pcopy_from_buffer(req->sg, req->sg_cnt, buf, len, off) != len) { in nvmet_copy_to_sgl() 100 if (sg_pcopy_to_buffer(req->sg, req->sg_cnt, buf, len, off) != len) { in nvmet_copy_from_sgl() 109 if (sg_zero_buffer(req->sg, req->sg_cnt, len, off) != len) { in nvmet_zero_sgl() 866 req->sg_cnt = 0; in nvmet_req_init() 954 req->sg = pci_p2pmem_alloc_sgl(p2p_dev, &req->sg_cnt, in nvmet_req_alloc_sgl() 968 req->sg = sgl_alloc(req->transfer_len, GFP_KERNEL, &req->sg_cnt); in nvmet_req_alloc_sgl() 984 req->sg_cnt = 0; in nvmet_req_free_sgl()
|
D | fc.c | 1898 fcpreq->sg_cnt = 0; in nvmet_fc_transfer_fcp_data() 1900 fcpreq->sg_cnt < tgtport->max_sg_cnt && in nvmet_fc_transfer_fcp_data() 1902 fcpreq->sg_cnt++; in nvmet_fc_transfer_fcp_data() 1906 if (tlen < remaininglen && fcpreq->sg_cnt == 0) { in nvmet_fc_transfer_fcp_data() 1907 fcpreq->sg_cnt++; in nvmet_fc_transfer_fcp_data() 2219 fod->req.sg_cnt = fod->data_sg_cnt; in nvmet_fc_handle_fcp_rqst()
|
/Linux-v5.4/drivers/staging/rts5208/ |
D | rtsx_transport.c | 323 int sg_cnt, i, resid; in rtsx_transfer_sglist_adma_partial() local 358 sg_cnt = dma_map_sg(&rtsx->pci->dev, sg, num_sg, dma_dir); in rtsx_transfer_sglist_adma_partial() 371 for (i = *index; i < sg_cnt; i++) { in rtsx_transfer_sglist_adma_partial() 397 if ((i == sg_cnt - 1) || !resid) in rtsx_transfer_sglist_adma_partial() 527 int sg_cnt, j; in rtsx_transfer_sglist_adma() local 530 sg_cnt = buf_cnt % (HOST_SG_TBL_BUF_LEN / 8); in rtsx_transfer_sglist_adma() 532 sg_cnt = HOST_SG_TBL_BUF_LEN / 8; in rtsx_transfer_sglist_adma() 535 for (j = 0; j < sg_cnt; j++) { in rtsx_transfer_sglist_adma() 544 if (j == (sg_cnt - 1)) in rtsx_transfer_sglist_adma() 585 sg_ptr += sg_cnt; in rtsx_transfer_sglist_adma()
|
/Linux-v5.4/drivers/net/wireless/broadcom/brcm80211/brcmfmac/ |
D | bcmsdh.c | 335 struct mmc_command *mc, int sg_cnt, int req_sz, in mmc_submit_one() argument 342 md->sg_len = sg_cnt; in mmc_submit_one() 382 unsigned int req_sz, func_blk_sz, sg_cnt, sg_data_sz, pkt_offset; in brcmf_sdiod_sglist_rw() local 446 sg_cnt = 0; in brcmf_sdiod_sglist_rw() 459 sg_cnt++; in brcmf_sdiod_sglist_rw() 464 if (req_sz >= max_req_sz || sg_cnt >= max_seg_cnt) { in brcmf_sdiod_sglist_rw() 466 sg_cnt, req_sz, func_blk_sz, in brcmf_sdiod_sglist_rw() 471 sg_cnt = 0; in brcmf_sdiod_sglist_rw() 476 if (sg_cnt) in brcmf_sdiod_sglist_rw() 478 sg_cnt, req_sz, func_blk_sz, in brcmf_sdiod_sglist_rw()
|
/Linux-v5.4/drivers/scsi/smartpqi/ |
D | smartpqi_sas_transport.c | 489 job->reply_payload.sg_cnt, ¶meters->request, in pqi_build_csmi_smp_passthru_buffer() 500 job->reply_payload.sg_cnt, &smp_buf->parameters.response, in pqi_build_sas_smp_handler_reply() 537 if (job->request_payload.sg_cnt > 1 || job->reply_payload.sg_cnt > 1) { in pqi_sas_smp_handler()
|
/Linux-v5.4/drivers/scsi/ufs/ |
D | ufs_bsg.c | 72 job->request_payload.sg_cnt, descp, in ufs_bsg_alloc_desc_buffer() 148 job->request_payload.sg_cnt, in ufs_bsg_request()
|
/Linux-v5.4/include/linux/ |
D | nvme-fc-driver.h | 144 int sg_cnt; member 617 int sg_cnt; member
|
D | bsg-lib.h | 25 int sg_cnt; member
|
/Linux-v5.4/drivers/i2c/busses/ |
D | i2c-qup.c | 226 unsigned int sg_cnt; member 674 ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++], in qup_i2c_bam_make_desc() 681 ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++], in qup_i2c_bam_make_desc() 691 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc() 705 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc() 712 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc() 733 u32 tx_cnt = qup->btx.sg_cnt, rx_cnt = qup->brx.sg_cnt; in qup_i2c_bam_schedule_desc() 835 qup->btx.sg_cnt = 0; in qup_i2c_bam_clear_tag_buffers() 836 qup->brx.sg_cnt = 0; in qup_i2c_bam_clear_tag_buffers() 885 if (qup->btx.sg_cnt > qup->max_xfer_sg_len || in qup_i2c_bam_xfer() [all …]
|
/Linux-v5.4/drivers/scsi/qla4xxx/ |
D | ql4_bsg.c | 64 bsg_job->reply_payload.sg_cnt, in qla4xxx_read_flash() 124 bsg_job->request_payload.sg_cnt, flash, length); in qla4xxx_update_flash() 188 bsg_job->reply_payload.sg_cnt, in qla4xxx_get_acb_state() 259 bsg_job->reply_payload.sg_cnt, in qla4xxx_read_nvram() 323 bsg_job->request_payload.sg_cnt, nvram, len); in qla4xxx_update_nvram() 437 bsg_job->reply_payload.sg_cnt, in qla4xxx_bsg_get_acb()
|
/Linux-v5.4/drivers/scsi/libsas/ |
D | sas_host_smp.c | 242 job->request_payload.sg_cnt, req_data, in sas_smp_host_handler() 344 job->reply_payload.sg_cnt, resp_data, in sas_smp_host_handler()
|
/Linux-v5.4/drivers/target/tcm_fc/ |
D | tcm_fc.h | 118 u32 sg_cnt; /* No. of item in scatterlist */ member
|
/Linux-v5.4/drivers/mmc/host/ |
D | wmt-sdmmc.c | 571 int sg_cnt; in wmt_mci_request() local 620 sg_cnt = dma_map_sg(mmc_dev(mmc), req->data->sg, in wmt_mci_request() 626 sg_cnt = dma_map_sg(mmc_dev(mmc), req->data->sg, in wmt_mci_request() 636 for_each_sg(req->data->sg, sg, sg_cnt, i) { in wmt_mci_request()
|
/Linux-v5.4/drivers/scsi/lpfc/ |
D | lpfc_nvme.c | 852 if (nCmd->sg_cnt) in lpfc_nvme_adj_fcp_sgls() 1214 if (nCmd->sg_cnt) { in lpfc_nvme_prep_io_cmd() 1330 if (nCmd->sg_cnt) { in lpfc_nvme_prep_io_dma() 1338 lpfc_ncmd->seg_cnt = nCmd->sg_cnt; in lpfc_nvme_prep_io_dma() 1356 nseg = nCmd->sg_cnt; in lpfc_nvme_prep_io_dma() 1467 nCmd->sg_cnt, nCmd->payload_length); in lpfc_nvme_prep_io_dma() 1598 if (!lpfc_queue_info->qidx && !pnvme_fcreq->sg_cnt) { in lpfc_nvme_fcp_io_submit() 1724 if (lpfc_ncmd->nvmeCmd->sg_cnt) { in lpfc_nvme_fcp_io_submit()
|
/Linux-v5.4/drivers/scsi/bfa/ |
D | bfad_im.c | 1213 int sg_cnt = 0; in bfad_im_queuecommand_lck() local 1232 sg_cnt = scsi_dma_map(cmnd); in bfad_im_queuecommand_lck() 1233 if (sg_cnt < 0) in bfad_im_queuecommand_lck() 1255 itnim->bfa_itnim, sg_cnt); in bfad_im_queuecommand_lck()
|