Lines Matching refs:pp
591 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_register_mode() local
592 void __iomem *mmio = pp->ctl_block; in nv_adma_register_mode()
596 if (pp->flags & NV_ADMA_PORT_REGISTER_MODE) in nv_adma_register_mode()
624 pp->flags |= NV_ADMA_PORT_REGISTER_MODE; in nv_adma_register_mode()
629 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_mode() local
630 void __iomem *mmio = pp->ctl_block; in nv_adma_mode()
634 if (!(pp->flags & NV_ADMA_PORT_REGISTER_MODE)) in nv_adma_mode()
637 WARN_ON(pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE); in nv_adma_mode()
654 pp->flags &= ~NV_ADMA_PORT_REGISTER_MODE; in nv_adma_mode()
660 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_slave_config() local
711 pp->flags &= ~NV_ADMA_ATAPI_SETUP_COMPLETE; in nv_adma_slave_config()
714 pp->flags |= NV_ADMA_ATAPI_SETUP_COMPLETE; in nv_adma_slave_config()
733 rc = dma_set_mask(&pdev->dev, pp->adma_dma_mask); in nv_adma_slave_config()
750 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_check_atapi_dma() local
751 return !(pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE); in nv_adma_check_atapi_dma()
802 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_check_cpb() local
803 u8 flags = pp->cpb[cpb_num].resp_flags; in nv_adma_check_cpb()
879 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_interrupt() local
880 void __iomem *mmio = pp->ctl_block; in nv_adma_interrupt()
888 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) { in nv_adma_interrupt()
896 if (pp->flags & NV_ADMA_PORT_REGISTER_MODE) { in nv_adma_interrupt()
912 gen_ctl = readl(pp->gen_block + NV_ADMA_GEN_CTL); in nv_adma_interrupt()
994 struct nv_adma_port_priv *pp = host->ports[0]->private_data; in nv_adma_interrupt() local
995 writel(notifier_clears[0], pp->notifier_clear_block); in nv_adma_interrupt()
996 pp = host->ports[1]->private_data; in nv_adma_interrupt()
997 writel(notifier_clears[1], pp->notifier_clear_block); in nv_adma_interrupt()
1007 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_freeze() local
1008 void __iomem *mmio = pp->ctl_block; in nv_adma_freeze()
1013 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) in nv_adma_freeze()
1029 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_thaw() local
1030 void __iomem *mmio = pp->ctl_block; in nv_adma_thaw()
1035 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) in nv_adma_thaw()
1047 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_irq_clear() local
1048 void __iomem *mmio = pp->ctl_block; in nv_adma_irq_clear()
1051 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) { in nv_adma_irq_clear()
1072 pp = ap->host->ports[0]->private_data; in nv_adma_irq_clear()
1073 writel(notifier_clears[0], pp->notifier_clear_block); in nv_adma_irq_clear()
1074 pp = ap->host->ports[1]->private_data; in nv_adma_irq_clear()
1075 writel(notifier_clears[1], pp->notifier_clear_block); in nv_adma_irq_clear()
1080 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_post_internal_cmd() local
1082 if (pp->flags & NV_ADMA_PORT_REGISTER_MODE) in nv_adma_post_internal_cmd()
1089 struct nv_adma_port_priv *pp; in nv_adma_port_start() local
1112 pp = devm_kzalloc(dev, sizeof(*pp), GFP_KERNEL); in nv_adma_port_start()
1113 if (!pp) in nv_adma_port_start()
1118 pp->ctl_block = mmio; in nv_adma_port_start()
1119 pp->gen_block = ap->host->iomap[NV_MMIO_BAR] + NV_ADMA_GEN; in nv_adma_port_start()
1120 pp->notifier_clear_block = pp->gen_block + in nv_adma_port_start()
1129 pp->adma_dma_mask = *dev->dma_mask; in nv_adma_port_start()
1141 pp->cpb = mem; in nv_adma_port_start()
1142 pp->cpb_dma = mem_dma; in nv_adma_port_start()
1153 pp->aprd = mem; in nv_adma_port_start()
1154 pp->aprd_dma = mem_dma; in nv_adma_port_start()
1156 ap->private_data = pp; in nv_adma_port_start()
1162 pp->flags = NV_ADMA_PORT_REGISTER_MODE; in nv_adma_port_start()
1184 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_port_stop() local
1185 void __iomem *mmio = pp->ctl_block; in nv_adma_port_stop()
1194 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_port_suspend() local
1195 void __iomem *mmio = pp->ctl_block; in nv_adma_port_suspend()
1211 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_port_resume() local
1212 void __iomem *mmio = pp->ctl_block; in nv_adma_port_resume()
1216 writel(pp->cpb_dma & 0xFFFFFFFF, mmio + NV_ADMA_CPB_BASE_LOW); in nv_adma_port_resume()
1217 writel((pp->cpb_dma >> 16) >> 16, mmio + NV_ADMA_CPB_BASE_HIGH); in nv_adma_port_resume()
1223 pp->flags |= NV_ADMA_PORT_REGISTER_MODE; in nv_adma_port_resume()
1312 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_fill_sg() local
1321 &pp->aprd[NV_ADMA_SGTBL_LEN * qc->hw_tag + (si-5)]; in nv_adma_fill_sg()
1325 cpb->next_aprd = cpu_to_le64(((u64)(pp->aprd_dma + NV_ADMA_SGTBL_SZ * qc->hw_tag))); in nv_adma_fill_sg()
1332 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_use_reg_mode() local
1336 if ((pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) || in nv_adma_use_reg_mode()
1349 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_qc_prep() local
1350 struct nv_adma_cpb *cpb = &pp->cpb[qc->hw_tag]; in nv_adma_qc_prep()
1355 BUG_ON(!(pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) && in nv_adma_qc_prep()
1397 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_qc_issue() local
1398 void __iomem *mmio = pp->ctl_block; in nv_adma_qc_issue()
1415 BUG_ON(!(pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) && in nv_adma_qc_issue()
1426 if (curr_ncq != pp->last_issue_ncq) { in nv_adma_qc_issue()
1430 pp->last_issue_ncq = curr_ncq; in nv_adma_qc_issue()
1636 struct nv_adma_port_priv *pp = ap->private_data; in nv_adma_error_handler() local
1637 if (!(pp->flags & NV_ADMA_PORT_REGISTER_MODE)) { in nv_adma_error_handler()
1638 void __iomem *mmio = pp->ctl_block; in nv_adma_error_handler()
1645 u32 gen_ctl = readl(pp->gen_block + NV_ADMA_GEN_CTL); in nv_adma_error_handler()
1658 struct nv_adma_cpb *cpb = &pp->cpb[i]; in nv_adma_error_handler()
1673 pp->cpb[i].ctl_flags &= ~NV_CPB_CTL_CPB_VALID; in nv_adma_error_handler()
1692 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_qc_to_dq() local
1693 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_to_dq()
1703 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_qc_from_dq() local
1704 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_from_dq()
1720 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_fis_reinit() local
1722 pp->dhfis_bits = 0; in nv_swncq_fis_reinit()
1723 pp->dmafis_bits = 0; in nv_swncq_fis_reinit()
1724 pp->sdbfis_bits = 0; in nv_swncq_fis_reinit()
1725 pp->ncq_flags = 0; in nv_swncq_fis_reinit()
1730 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_pp_reinit() local
1731 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_pp_reinit()
1736 pp->qc_active = 0; in nv_swncq_pp_reinit()
1737 pp->last_issue_tag = ATA_TAG_POISON; in nv_swncq_pp_reinit()
1743 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_irq_clear() local
1745 writew(fis, pp->irq_block); in nv_swncq_irq_clear()
1758 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_ncq_stop() local
1768 pp->qc_active, pp->defer_queue.defer_bits, pp->last_issue_tag, in nv_swncq_ncq_stop()
1769 pp->dhfis_bits, pp->dmafis_bits, pp->sdbfis_bits); in nv_swncq_ncq_stop()
1775 sactive = readl(pp->sactive_block); in nv_swncq_ncq_stop()
1776 done_mask = pp->qc_active ^ sactive; in nv_swncq_ncq_stop()
1781 if (pp->qc_active & (1 << i)) in nv_swncq_ncq_stop()
1790 (pp->dhfis_bits >> i) & 0x1, in nv_swncq_ncq_stop()
1791 (pp->dmafis_bits >> i) & 0x1, in nv_swncq_ncq_stop()
1792 (pp->sdbfis_bits >> i) & 0x1, in nv_swncq_ncq_stop()
1930 struct nv_swncq_port_priv *pp; in nv_swncq_port_start() local
1938 pp = devm_kzalloc(dev, sizeof(*pp), GFP_KERNEL); in nv_swncq_port_start()
1939 if (!pp) in nv_swncq_port_start()
1942 pp->prd = dmam_alloc_coherent(dev, ATA_PRD_TBL_SZ * ATA_MAX_QUEUE, in nv_swncq_port_start()
1943 &pp->prd_dma, GFP_KERNEL); in nv_swncq_port_start()
1944 if (!pp->prd) in nv_swncq_port_start()
1947 ap->private_data = pp; in nv_swncq_port_start()
1948 pp->sactive_block = ap->ioaddr.scr_addr + 4 * SCR_ACTIVE; in nv_swncq_port_start()
1949 pp->irq_block = mmio + NV_INT_STATUS_MCP55 + ap->port_no * 2; in nv_swncq_port_start()
1950 pp->tag_block = mmio + NV_NCQ_REG_MCP55 + ap->port_no * 2; in nv_swncq_port_start()
1974 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_fill_sg() local
1978 prd = pp->prd + ATA_MAX_PRD * qc->hw_tag; in nv_swncq_fill_sg()
2009 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_issue_atacmd() local
2016 writel((1 << qc->hw_tag), pp->sactive_block); in nv_swncq_issue_atacmd()
2017 pp->last_issue_tag = qc->hw_tag; in nv_swncq_issue_atacmd()
2018 pp->dhfis_bits &= ~(1 << qc->hw_tag); in nv_swncq_issue_atacmd()
2019 pp->dmafis_bits &= ~(1 << qc->hw_tag); in nv_swncq_issue_atacmd()
2020 pp->qc_active |= (0x1 << qc->hw_tag); in nv_swncq_issue_atacmd()
2033 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_qc_issue() local
2040 if (!pp->qc_active) in nv_swncq_qc_issue()
2076 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_sdbfis() local
2096 sactive = readl(pp->sactive_block); in nv_swncq_sdbfis()
2097 done_mask = pp->qc_active ^ sactive; in nv_swncq_sdbfis()
2099 pp->qc_active &= ~done_mask; in nv_swncq_sdbfis()
2100 pp->dhfis_bits &= ~done_mask; in nv_swncq_sdbfis()
2101 pp->dmafis_bits &= ~done_mask; in nv_swncq_sdbfis()
2102 pp->sdbfis_bits |= done_mask; in nv_swncq_sdbfis()
2111 if (pp->qc_active & pp->dhfis_bits) in nv_swncq_sdbfis()
2114 if ((pp->ncq_flags & ncq_saw_backout) || in nv_swncq_sdbfis()
2115 (pp->qc_active ^ pp->dhfis_bits)) in nv_swncq_sdbfis()
2124 ap->print_id, ap->qc_active, pp->qc_active, in nv_swncq_sdbfis()
2125 pp->defer_queue.defer_bits, pp->dhfis_bits, in nv_swncq_sdbfis()
2126 pp->dmafis_bits, pp->last_issue_tag); in nv_swncq_sdbfis()
2131 qc = ata_qc_from_tag(ap, pp->last_issue_tag); in nv_swncq_sdbfis()
2136 if (pp->defer_queue.defer_bits) { in nv_swncq_sdbfis()
2148 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_tag() local
2151 tag = readb(pp->tag_block) >> 2; in nv_swncq_tag()
2161 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_dmafis() local
2175 iowrite32(pp->prd_dma + ATA_PRD_TBL_SZ * qc->hw_tag, in nv_swncq_dmafis()
2189 struct nv_swncq_port_priv *pp = ap->private_data; in nv_swncq_host_interrupt() local
2208 if (!pp->qc_active) in nv_swncq_host_interrupt()
2229 pp->ncq_flags |= ncq_saw_backout; in nv_swncq_host_interrupt()
2233 pp->ncq_flags |= ncq_saw_sdb; in nv_swncq_host_interrupt()
2236 ap->print_id, pp->qc_active, pp->dhfis_bits, in nv_swncq_host_interrupt()
2237 pp->dmafis_bits, readl(pp->sactive_block)); in nv_swncq_host_interrupt()
2246 pp->dhfis_bits |= (0x1 << pp->last_issue_tag); in nv_swncq_host_interrupt()
2247 pp->ncq_flags |= ncq_saw_d2h; in nv_swncq_host_interrupt()
2248 if (pp->ncq_flags & (ncq_saw_sdb | ncq_saw_backout)) { in nv_swncq_host_interrupt()
2256 !(pp->ncq_flags & ncq_saw_dmas)) { in nv_swncq_host_interrupt()
2261 if (pp->defer_queue.defer_bits) { in nv_swncq_host_interrupt()
2273 pp->dmafis_bits |= (0x1 << nv_swncq_tag(ap)); in nv_swncq_host_interrupt()
2274 pp->ncq_flags |= ncq_saw_dmas; in nv_swncq_host_interrupt()
2420 struct nv_adma_port_priv *pp; in nv_pci_device_resume() local
2424 pp = host->ports[0]->private_data; in nv_pci_device_resume()
2425 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) in nv_pci_device_resume()
2431 pp = host->ports[1]->private_data; in nv_pci_device_resume()
2432 if (pp->flags & NV_ADMA_ATAPI_SETUP_COMPLETE) in nv_pci_device_resume()