Lines Matching refs:qc

167 #define HSDEV_FROM_QC(qc)	((struct sata_dwc_device *)(qc)->ap->host->private_data)  argument
184 static void sata_dwc_bmdma_start_by_tag(struct ata_queued_cmd *qc, u8 tag);
185 static int sata_dwc_qc_complete(struct ata_port *ap, struct ata_queued_cmd *qc,
363 static struct dma_async_tx_descriptor *dma_dwc_xfer_setup(struct ata_queued_cmd *qc) in dma_dwc_xfer_setup() argument
365 struct ata_port *ap = qc->ap; in dma_dwc_xfer_setup()
371 if (qc->dma_dir == DMA_DEV_TO_MEM) { in dma_dwc_xfer_setup()
379 sconf.direction = qc->dma_dir; in dma_dwc_xfer_setup()
388 desc = dmaengine_prep_slave_sg(hsdevp->chan, qc->sg, qc->n_elem, in dma_dwc_xfer_setup()
389 qc->dma_dir, in dma_dwc_xfer_setup()
399 qc->sg, qc->n_elem, &hsdev->dmadr); in dma_dwc_xfer_setup()
458 struct ata_queued_cmd *qc; in sata_dwc_error_intr() local
487 qc = ata_qc_from_tag(ap, tag); in sata_dwc_error_intr()
488 if (qc) in sata_dwc_error_intr()
489 qc->err_mask |= err_mask; in sata_dwc_error_intr()
508 struct ata_queued_cmd *qc; in sata_dwc_isr() local
545 qc = ata_qc_from_tag(ap, tag); in sata_dwc_isr()
551 qc->ap->link.active_tag = tag; in sata_dwc_isr()
552 sata_dwc_bmdma_start_by_tag(qc, tag); in sata_dwc_isr()
566 qc = ata_qc_from_tag(ap, tag); in sata_dwc_isr()
569 if (unlikely(!qc || (qc->tf.flags & ATA_TFLAG_POLLING))) { in sata_dwc_isr()
572 __func__, qc); in sata_dwc_isr()
579 qc->ap->link.active_tag = tag; in sata_dwc_isr()
584 sata_dwc_qc_complete(ap, qc, 1); in sata_dwc_isr()
590 __func__, get_prot_descript(qc->tf.protocol)); in sata_dwc_isr()
592 if (ata_is_dma(qc->tf.protocol)) { in sata_dwc_isr()
610 } else if (ata_is_pio(qc->tf.protocol)) { in sata_dwc_isr()
611 ata_sff_hsm_move(ap, qc, status, 0); in sata_dwc_isr()
615 if (unlikely(sata_dwc_qc_complete(ap, qc, 1))) in sata_dwc_isr()
660 qc = ata_qc_from_tag(ap, tag); in sata_dwc_isr()
663 qc->ap->link.active_tag = tag; in sata_dwc_isr()
670 sata_dwc_qc_complete(ap, qc, 1); in sata_dwc_isr()
677 get_prot_descript(qc->tf.protocol)); in sata_dwc_isr()
678 if (ata_is_dma(qc->tf.protocol)) { in sata_dwc_isr()
687 if (unlikely(sata_dwc_qc_complete(ap, qc, 1))) in sata_dwc_isr()
744 struct ata_queued_cmd *qc; in sata_dwc_dma_xfer_complete() local
750 qc = ata_qc_from_tag(ap, tag); in sata_dwc_dma_xfer_complete()
751 if (!qc) { in sata_dwc_dma_xfer_complete()
760 __func__, qc->hw_tag, qc->tf.command, in sata_dwc_dma_xfer_complete()
761 get_dma_dir_descript(qc->dma_dir), in sata_dwc_dma_xfer_complete()
762 get_prot_descript(qc->tf.protocol), in sata_dwc_dma_xfer_complete()
767 if (ata_is_dma(qc->tf.protocol)) { in sata_dwc_dma_xfer_complete()
776 sata_dwc_qc_complete(ap, qc, check_status); in sata_dwc_dma_xfer_complete()
779 sata_dwc_qc_complete(ap, qc, check_status); in sata_dwc_dma_xfer_complete()
783 static int sata_dwc_qc_complete(struct ata_port *ap, struct ata_queued_cmd *qc, in sata_dwc_qc_complete() argument
788 u8 tag = qc->hw_tag; in sata_dwc_qc_complete()
800 qc->tf.command, status, ap->print_id, qc->tf.protocol); in sata_dwc_qc_complete()
806 ata_qc_complete(qc); in sata_dwc_qc_complete()
988 static void sata_dwc_bmdma_setup_by_tag(struct ata_queued_cmd *qc, u8 tag) in sata_dwc_bmdma_setup_by_tag() argument
990 sata_dwc_exec_command_by_tag(qc->ap, &qc->tf, tag, in sata_dwc_bmdma_setup_by_tag()
994 static void sata_dwc_bmdma_setup(struct ata_queued_cmd *qc) in sata_dwc_bmdma_setup() argument
996 u8 tag = qc->hw_tag; in sata_dwc_bmdma_setup()
998 if (ata_is_ncq(qc->tf.protocol)) { in sata_dwc_bmdma_setup()
999 dev_dbg(qc->ap->dev, "%s: ap->link.sactive=0x%08x tag=%d\n", in sata_dwc_bmdma_setup()
1000 __func__, qc->ap->link.sactive, tag); in sata_dwc_bmdma_setup()
1004 sata_dwc_bmdma_setup_by_tag(qc, tag); in sata_dwc_bmdma_setup()
1007 static void sata_dwc_bmdma_start_by_tag(struct ata_queued_cmd *qc, u8 tag) in sata_dwc_bmdma_start_by_tag() argument
1011 struct sata_dwc_device *hsdev = HSDEV_FROM_QC(qc); in sata_dwc_bmdma_start_by_tag()
1012 struct ata_port *ap = qc->ap; in sata_dwc_bmdma_start_by_tag()
1015 int dir = qc->dma_dir; in sata_dwc_bmdma_start_by_tag()
1032 __func__, qc, tag, qc->tf.command, in sata_dwc_bmdma_start_by_tag()
1033 get_dma_dir_descript(qc->dma_dir), start_dma); in sata_dwc_bmdma_start_by_tag()
1034 sata_dwc_tf_dump(ap, &qc->tf); in sata_dwc_bmdma_start_by_tag()
1056 static void sata_dwc_bmdma_start(struct ata_queued_cmd *qc) in sata_dwc_bmdma_start() argument
1058 u8 tag = qc->hw_tag; in sata_dwc_bmdma_start()
1060 if (ata_is_ncq(qc->tf.protocol)) { in sata_dwc_bmdma_start()
1061 dev_dbg(qc->ap->dev, "%s: ap->link.sactive=0x%08x tag=%d\n", in sata_dwc_bmdma_start()
1062 __func__, qc->ap->link.sactive, tag); in sata_dwc_bmdma_start()
1066 dev_dbg(qc->ap->dev, "%s\n", __func__); in sata_dwc_bmdma_start()
1067 sata_dwc_bmdma_start_by_tag(qc, tag); in sata_dwc_bmdma_start()
1070 static unsigned int sata_dwc_qc_issue(struct ata_queued_cmd *qc) in sata_dwc_qc_issue() argument
1073 u8 tag = qc->hw_tag; in sata_dwc_qc_issue()
1074 struct ata_port *ap = qc->ap; in sata_dwc_qc_issue()
1078 if (qc->hw_tag > 0 || ap->link.sactive > 1) in sata_dwc_qc_issue()
1081 __func__, ap->print_id, qc->tf.command, in sata_dwc_qc_issue()
1082 ata_get_cmd_descript(qc->tf.command), in sata_dwc_qc_issue()
1083 qc->hw_tag, get_prot_descript(qc->tf.protocol), in sata_dwc_qc_issue()
1087 if (!ata_is_ncq(qc->tf.protocol)) in sata_dwc_qc_issue()
1090 if (ata_is_dma(qc->tf.protocol)) { in sata_dwc_qc_issue()
1091 hsdevp->desc[tag] = dma_dwc_xfer_setup(qc); in sata_dwc_qc_issue()
1098 if (ata_is_ncq(qc->tf.protocol)) { in sata_dwc_qc_issue()
1103 dev_dbg(qc->ap->dev, in sata_dwc_qc_issue()
1105 __func__, tag, qc->ap->link.sactive, sactive); in sata_dwc_qc_issue()
1107 ap->ops->sff_tf_load(ap, &qc->tf); in sata_dwc_qc_issue()
1108 sata_dwc_exec_command_by_tag(ap, &qc->tf, tag, in sata_dwc_qc_issue()
1111 return ata_bmdma_qc_issue(qc); in sata_dwc_qc_issue()