Lines Matching refs:ioat_chan

130 		  struct ioatdma_chan *ioat_chan, int idx);
414 struct ioatdma_chan *ioat_chan; in ioat_dma_setup_interrupts() local
443 ioat_chan = ioat_chan_by_index(ioat_dma, i); in ioat_dma_setup_interrupts()
446 "ioat-msix", ioat_chan); in ioat_dma_setup_interrupts()
450 ioat_chan = ioat_chan_by_index(ioat_dma, j); in ioat_dma_setup_interrupts()
451 devm_free_irq(dev, msix->vector, ioat_chan); in ioat_dma_setup_interrupts()
580 struct ioatdma_chan *ioat_chan; in ioat_enumerate_channels() local
601 ioat_chan = devm_kzalloc(dev, sizeof(*ioat_chan), GFP_KERNEL); in ioat_enumerate_channels()
602 if (!ioat_chan) in ioat_enumerate_channels()
605 ioat_init_channel(ioat_dma, ioat_chan, i); in ioat_enumerate_channels()
606 ioat_chan->xfercap_log = xfercap_log; in ioat_enumerate_channels()
607 spin_lock_init(&ioat_chan->prep_lock); in ioat_enumerate_channels()
608 if (ioat_reset_hw(ioat_chan)) { in ioat_enumerate_channels()
623 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_free_chan_resources() local
624 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_free_chan_resources()
626 const int total_descs = 1 << ioat_chan->alloc_order; in ioat_free_chan_resources()
633 if (!ioat_chan->ring) in ioat_free_chan_resources()
636 ioat_stop(ioat_chan); in ioat_free_chan_resources()
637 ioat_reset_hw(ioat_chan); in ioat_free_chan_resources()
639 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_free_chan_resources()
640 spin_lock_bh(&ioat_chan->prep_lock); in ioat_free_chan_resources()
641 descs = ioat_ring_space(ioat_chan); in ioat_free_chan_resources()
642 dev_dbg(to_dev(ioat_chan), "freeing %d idle descriptors\n", descs); in ioat_free_chan_resources()
644 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head + i); in ioat_free_chan_resources()
649 dev_err(to_dev(ioat_chan), "Freeing %d in use descriptors!\n", in ioat_free_chan_resources()
653 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail + i); in ioat_free_chan_resources()
654 dump_desc_dbg(ioat_chan, desc); in ioat_free_chan_resources()
658 for (i = 0; i < ioat_chan->desc_chunks; i++) { in ioat_free_chan_resources()
659 dma_free_coherent(to_dev(ioat_chan), SZ_2M, in ioat_free_chan_resources()
660 ioat_chan->descs[i].virt, in ioat_free_chan_resources()
661 ioat_chan->descs[i].hw); in ioat_free_chan_resources()
662 ioat_chan->descs[i].virt = NULL; in ioat_free_chan_resources()
663 ioat_chan->descs[i].hw = 0; in ioat_free_chan_resources()
665 ioat_chan->desc_chunks = 0; in ioat_free_chan_resources()
667 kfree(ioat_chan->ring); in ioat_free_chan_resources()
668 ioat_chan->ring = NULL; in ioat_free_chan_resources()
669 ioat_chan->alloc_order = 0; in ioat_free_chan_resources()
670 dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion, in ioat_free_chan_resources()
671 ioat_chan->completion_dma); in ioat_free_chan_resources()
672 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_free_chan_resources()
673 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_free_chan_resources()
675 ioat_chan->last_completion = 0; in ioat_free_chan_resources()
676 ioat_chan->completion_dma = 0; in ioat_free_chan_resources()
677 ioat_chan->dmacount = 0; in ioat_free_chan_resources()
685 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_alloc_chan_resources() local
693 if (ioat_chan->ring) in ioat_alloc_chan_resources()
694 return 1 << ioat_chan->alloc_order; in ioat_alloc_chan_resources()
697 writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET); in ioat_alloc_chan_resources()
701 ioat_chan->completion = in ioat_alloc_chan_resources()
702 dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool, in ioat_alloc_chan_resources()
703 GFP_NOWAIT, &ioat_chan->completion_dma); in ioat_alloc_chan_resources()
704 if (!ioat_chan->completion) in ioat_alloc_chan_resources()
707 writel(((u64)ioat_chan->completion_dma) & 0x00000000FFFFFFFF, in ioat_alloc_chan_resources()
708 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_LOW); in ioat_alloc_chan_resources()
709 writel(((u64)ioat_chan->completion_dma) >> 32, in ioat_alloc_chan_resources()
710 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH); in ioat_alloc_chan_resources()
717 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_alloc_chan_resources()
718 spin_lock_bh(&ioat_chan->prep_lock); in ioat_alloc_chan_resources()
719 ioat_chan->ring = ring; in ioat_alloc_chan_resources()
720 ioat_chan->head = 0; in ioat_alloc_chan_resources()
721 ioat_chan->issued = 0; in ioat_alloc_chan_resources()
722 ioat_chan->tail = 0; in ioat_alloc_chan_resources()
723 ioat_chan->alloc_order = order; in ioat_alloc_chan_resources()
724 set_bit(IOAT_RUN, &ioat_chan->state); in ioat_alloc_chan_resources()
725 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_alloc_chan_resources()
726 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_alloc_chan_resources()
728 ioat_start_null_desc(ioat_chan); in ioat_alloc_chan_resources()
733 status = ioat_chansts(ioat_chan); in ioat_alloc_chan_resources()
737 return 1 << ioat_chan->alloc_order; in ioat_alloc_chan_resources()
739 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_alloc_chan_resources()
741 dev_WARN(to_dev(ioat_chan), in ioat_alloc_chan_resources()
750 struct ioatdma_chan *ioat_chan, int idx) in ioat_init_channel() argument
753 struct dma_chan *c = &ioat_chan->dma_chan; in ioat_init_channel()
756 ioat_chan->ioat_dma = ioat_dma; in ioat_init_channel()
757 ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1)); in ioat_init_channel()
758 spin_lock_init(&ioat_chan->cleanup_lock); in ioat_init_channel()
759 ioat_chan->dma_chan.device = dma; in ioat_init_channel()
760 dma_cookie_init(&ioat_chan->dma_chan); in ioat_init_channel()
761 list_add_tail(&ioat_chan->dma_chan.device_node, &dma->channels); in ioat_init_channel()
762 ioat_dma->idx[idx] = ioat_chan; in ioat_init_channel()
763 timer_setup(&ioat_chan->timer, ioat_timer_event, 0); in ioat_init_channel()
764 tasklet_init(&ioat_chan->cleanup_task, ioat_cleanup_event, data); in ioat_init_channel()
1050 struct ioatdma_chan *ioat_chan; in ioat_intr_quirk() local
1061 ioat_chan = to_ioat_chan(c); in ioat_intr_quirk()
1062 errmask = readl(ioat_chan->reg_base + in ioat_intr_quirk()
1066 writel(errmask, ioat_chan->reg_base + in ioat_intr_quirk()
1078 struct ioatdma_chan *ioat_chan; in ioat3_dma_probe() local
1164 ioat_chan = to_ioat_chan(c); in ioat3_dma_probe()
1166 ioat_chan->reg_base + IOAT_DCACTRL_OFFSET); in ioat3_dma_probe()
1195 struct ioatdma_chan *ioat_chan; in ioat_shutdown() local
1202 ioat_chan = ioat_dma->idx[i]; in ioat_shutdown()
1203 if (!ioat_chan) in ioat_shutdown()
1206 spin_lock_bh(&ioat_chan->prep_lock); in ioat_shutdown()
1207 set_bit(IOAT_CHAN_DOWN, &ioat_chan->state); in ioat_shutdown()
1208 del_timer_sync(&ioat_chan->timer); in ioat_shutdown()
1209 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_shutdown()
1211 ioat_reset_hw(ioat_chan); in ioat_shutdown()
1219 struct ioatdma_chan *ioat_chan; in ioat_resume() local
1224 ioat_chan = ioat_dma->idx[i]; in ioat_resume()
1225 if (!ioat_chan) in ioat_resume()
1228 spin_lock_bh(&ioat_chan->prep_lock); in ioat_resume()
1229 clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state); in ioat_resume()
1230 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_resume()
1232 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_resume()
1233 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_resume()