Searched refs:seg_cnt (Results 1 – 11 of 11) sorted by relevance
2792 int seg_cnt; in qla1280_64bit_start_scsi() local2799 seg_cnt = scsi_dma_map(cmd); in qla1280_64bit_start_scsi()2800 if (seg_cnt > 0) { in qla1280_64bit_start_scsi()2801 if (seg_cnt > 2) { in qla1280_64bit_start_scsi()2802 req_cnt += (seg_cnt - 2) / 5; in qla1280_64bit_start_scsi()2803 if ((seg_cnt - 2) % 5) in qla1280_64bit_start_scsi()2806 } else if (seg_cnt < 0) { in qla1280_64bit_start_scsi()2822 ha->req_q_cnt, seg_cnt); in qla1280_64bit_start_scsi()2890 pkt->dseg_count = cpu_to_le16(seg_cnt); in qla1280_64bit_start_scsi()2895 if (seg_cnt) { /* If data transfer. */ in qla1280_64bit_start_scsi()[all …]
311 unsigned short seg_cnt; in blk_recount_segments() local315 seg_cnt = bio_segments(bio); in blk_recount_segments()317 seg_cnt = bio->bi_vcnt; in blk_recount_segments()320 (seg_cnt < queue_max_segments(q))) in blk_recount_segments()321 bio->bi_phys_segments = seg_cnt; in blk_recount_segments()
1089 psb->seg_cnt = 0; in lpfc_release_scsi_buf_s3()1115 psb->seg_cnt = 0; in lpfc_release_scsi_buf_s4()1202 lpfc_cmd->seg_cnt = nseg; in lpfc_scsi_prep_dma_buf_s3()1203 if (lpfc_cmd->seg_cnt > phba->cfg_sg_seg_cnt) { in lpfc_scsi_prep_dma_buf_s3()1208 lpfc_cmd->seg_cnt); in lpfc_scsi_prep_dma_buf_s3()1209 lpfc_cmd->seg_cnt = 0; in lpfc_scsi_prep_dma_buf_s3()2754 lpfc_cmd->seg_cnt = datasegcnt; in lpfc_bg_scsi_prep_dma_buf_s3()2757 if (lpfc_cmd->seg_cnt > phba->cfg_sg_seg_cnt) in lpfc_bg_scsi_prep_dma_buf_s3()2766 if ((lpfc_cmd->seg_cnt + 2) > phba->cfg_total_seg_cnt) in lpfc_bg_scsi_prep_dma_buf_s3()2811 lpfc_cmd->seg_cnt = 0; in lpfc_bg_scsi_prep_dma_buf_s3()[all …]
97 uint32_t seg_cnt; /* Number of scatter-gather segments returned by member
148 uint32_t seg_cnt; /* Number of scatter-gather segments returned by member
1334 lpfc_ncmd->seg_cnt = nCmd->sg_cnt; in lpfc_nvme_prep_io_dma()1335 if (lpfc_ncmd->seg_cnt > lpfc_nvme_template.max_sgl_segments) { in lpfc_nvme_prep_io_dma()1341 lpfc_ncmd->seg_cnt); in lpfc_nvme_prep_io_dma()1342 lpfc_ncmd->seg_cnt = 0; in lpfc_nvme_prep_io_dma()1359 lpfc_ncmd->seg_cnt = 0; in lpfc_nvme_prep_io_dma()
2317 int seg_cnt, seg = 0; in ql_send_map() local2320 seg_cnt = tx_cb->seg_count; in ql_send_map()2342 if (seg_cnt == 1) { in ql_send_map()2358 if ((seg == 2 && seg_cnt > 3) || in ql_send_map()2359 (seg == 7 && seg_cnt > 8) || in ql_send_map()2360 (seg == 12 && seg_cnt > 13) || in ql_send_map()2361 (seg == 17 && seg_cnt > 18)) { in ql_send_map()2424 if ((seg == 2 && seg_cnt > 3) || in ql_send_map()2425 (seg == 7 && seg_cnt > 8) || in ql_send_map()2426 (seg == 12 && seg_cnt > 13) || in ql_send_map()[all …]
2333 prm->seg_cnt = pci_map_sg(cmd->qpair->pdev, cmd->sg, in qlt_pci_map_calc_cnt()2335 if (unlikely(prm->seg_cnt == 0)) in qlt_pci_map_calc_cnt()2345 if (prm->seg_cnt > QLA_TGT_DATASEGS_PER_CMD_24XX) in qlt_pci_map_calc_cnt()2346 prm->req_cnt += DIV_ROUND_UP(prm->seg_cnt - in qlt_pci_map_calc_cnt()2353 prm->seg_cnt = DIV_ROUND_UP(cmd->bufflen, cmd->blk_sz); in qlt_pci_map_calc_cnt()2354 prm->tot_dsds = prm->seg_cnt; in qlt_pci_map_calc_cnt()2356 prm->tot_dsds = prm->seg_cnt; in qlt_pci_map_calc_cnt()2543 while (prm->seg_cnt > 0) { in qlt_load_cont_data_segments()2565 cnt < QLA_TGT_DATASEGS_PER_CONT_24XX && prm->seg_cnt; in qlt_load_cont_data_segments()2566 cnt++, prm->seg_cnt--) { in qlt_load_cont_data_segments()[all …]
984 int seg_cnt; member
107 u8 seg_cnt; member737 tx_msghdr->seg_cnt = 1; in brcmf_msgbuf_txflow()
522 * Zero out seg_cnt in prep_io failure path to prevent double sg690 * Save seg_cnt from dma_map_sg. Save scatter-gather start address691 and pass back to dma_unmap_sg in error with seg_cnt.