Lines Matching refs:cio2
111 static void cio2_fbpt_exit_dummy(struct cio2_device *cio2) in cio2_fbpt_exit_dummy() argument
113 struct device *dev = &cio2->pci_dev->dev; in cio2_fbpt_exit_dummy()
115 if (cio2->dummy_lop) { in cio2_fbpt_exit_dummy()
116 dma_free_coherent(dev, PAGE_SIZE, cio2->dummy_lop, in cio2_fbpt_exit_dummy()
117 cio2->dummy_lop_bus_addr); in cio2_fbpt_exit_dummy()
118 cio2->dummy_lop = NULL; in cio2_fbpt_exit_dummy()
120 if (cio2->dummy_page) { in cio2_fbpt_exit_dummy()
121 dma_free_coherent(dev, PAGE_SIZE, cio2->dummy_page, in cio2_fbpt_exit_dummy()
122 cio2->dummy_page_bus_addr); in cio2_fbpt_exit_dummy()
123 cio2->dummy_page = NULL; in cio2_fbpt_exit_dummy()
127 static int cio2_fbpt_init_dummy(struct cio2_device *cio2) in cio2_fbpt_init_dummy() argument
129 struct device *dev = &cio2->pci_dev->dev; in cio2_fbpt_init_dummy()
132 cio2->dummy_page = dma_alloc_coherent(dev, PAGE_SIZE, in cio2_fbpt_init_dummy()
133 &cio2->dummy_page_bus_addr, in cio2_fbpt_init_dummy()
135 cio2->dummy_lop = dma_alloc_coherent(dev, PAGE_SIZE, in cio2_fbpt_init_dummy()
136 &cio2->dummy_lop_bus_addr, in cio2_fbpt_init_dummy()
138 if (!cio2->dummy_page || !cio2->dummy_lop) { in cio2_fbpt_init_dummy()
139 cio2_fbpt_exit_dummy(cio2); in cio2_fbpt_init_dummy()
147 cio2->dummy_lop[i] = PFN_DOWN(cio2->dummy_page_bus_addr); in cio2_fbpt_init_dummy()
152 static void cio2_fbpt_entry_enable(struct cio2_device *cio2, in cio2_fbpt_entry_enable() argument
172 static void cio2_fbpt_entry_init_dummy(struct cio2_device *cio2, in cio2_fbpt_entry_init_dummy() argument
183 entry[i].lop_page_addr = PFN_DOWN(cio2->dummy_lop_bus_addr); in cio2_fbpt_entry_init_dummy()
185 cio2_fbpt_entry_enable(cio2, entry); in cio2_fbpt_entry_init_dummy()
189 static void cio2_fbpt_entry_init_buf(struct cio2_device *cio2, in cio2_fbpt_entry_init_buf() argument
224 entry->lop_page_addr = PFN_DOWN(cio2->dummy_lop_bus_addr); in cio2_fbpt_entry_init_buf()
226 cio2_fbpt_entry_enable(cio2, entry); in cio2_fbpt_entry_init_buf()
229 static int cio2_fbpt_init(struct cio2_device *cio2, struct cio2_queue *q) in cio2_fbpt_init() argument
231 struct device *dev = &cio2->pci_dev->dev; in cio2_fbpt_init()
306 static int cio2_csi2_calc_timing(struct cio2_device *cio2, struct cio2_queue *q, in cio2_csi2_calc_timing() argument
310 struct device *dev = &cio2->pci_dev->dev; in cio2_csi2_calc_timing()
347 static int cio2_hw_init(struct cio2_device *cio2, struct cio2_queue *q) in cio2_hw_init() argument
356 void __iomem *const base = cio2->base; in cio2_hw_init()
368 r = cio2_csi2_calc_timing(cio2, q, &timing, fmt->bpp, lanes); in cio2_hw_init()
509 static void cio2_hw_exit(struct cio2_device *cio2, struct cio2_queue *q) in cio2_hw_exit() argument
511 struct device *dev = &cio2->pci_dev->dev; in cio2_hw_exit()
512 void __iomem *const base = cio2->base; in cio2_hw_exit()
539 static void cio2_buffer_done(struct cio2_device *cio2, unsigned int dma_chan) in cio2_buffer_done() argument
541 struct device *dev = &cio2->pci_dev->dev; in cio2_buffer_done()
542 struct cio2_queue *q = cio2->cur_queue; in cio2_buffer_done()
582 cio2_fbpt_entry_init_dummy(cio2, entry); in cio2_buffer_done()
588 static void cio2_queue_event_sof(struct cio2_device *cio2, struct cio2_queue *q) in cio2_queue_event_sof() argument
657 static void cio2_irq_handle_once(struct cio2_device *cio2, u32 int_status) in cio2_irq_handle_once() argument
659 struct device *dev = &cio2->pci_dev->dev; in cio2_irq_handle_once()
660 void __iomem *const base = cio2->base; in cio2_irq_handle_once()
700 cio2_buffer_done(cio2, d); in cio2_irq_handle_once()
714 cio2_queue_event_sof(cio2, in cio2_irq_handle_once()
715 cio2->cur_queue); in cio2_irq_handle_once()
758 struct cio2_device *cio2 = cio2_ptr; in cio2_irq() local
759 void __iomem *const base = cio2->base; in cio2_irq()
760 struct device *dev = &cio2->pci_dev->dev; in cio2_irq()
770 cio2_irq_handle_once(cio2, int_status); in cio2_irq()
802 struct cio2_device *cio2 = vb2_get_drv_priv(vq); in cio2_vb2_queue_setup() local
803 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_queue_setup()
823 cio2_fbpt_entry_init_dummy(cio2, &q->fbpt[i * CIO2_MAX_LOPS]); in cio2_vb2_queue_setup()
835 struct cio2_device *cio2 = vb2_get_drv_priv(vb->vb2_queue); in cio2_vb2_buf_init() local
836 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_buf_init()
879 b->lop[i][j] = PFN_DOWN(cio2->dummy_page_bus_addr); in cio2_vb2_buf_init()
890 struct cio2_device *cio2 = vb2_get_drv_priv(vb->vb2_queue); in cio2_vb2_buf_queue() local
891 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_buf_queue()
918 fbpt_rp = (readl(cio2->base + CIO2_REG_CDMARI(CIO2_DMA_CHAN)) in cio2_vb2_buf_queue()
943 cio2_fbpt_entry_init_buf(cio2, b, entry); in cio2_vb2_buf_queue()
965 struct cio2_device *cio2 = vb2_get_drv_priv(vb->vb2_queue); in cio2_vb2_buf_cleanup() local
966 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_buf_cleanup()
981 struct cio2_device *cio2 = vb2_get_drv_priv(vq); in cio2_vb2_start_streaming() local
982 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_start_streaming()
985 cio2->cur_queue = q; in cio2_vb2_start_streaming()
998 r = cio2_hw_init(cio2, q); in cio2_vb2_start_streaming()
1007 cio2->streaming = true; in cio2_vb2_start_streaming()
1012 cio2_hw_exit(cio2, q); in cio2_vb2_start_streaming()
1026 struct cio2_device *cio2 = vb2_get_drv_priv(vq); in cio2_vb2_stop_streaming() local
1027 struct device *dev = &cio2->pci_dev->dev; in cio2_vb2_stop_streaming()
1032 cio2_hw_exit(cio2, q); in cio2_vb2_stop_streaming()
1033 synchronize_irq(cio2->pci_dev->irq); in cio2_vb2_stop_streaming()
1037 cio2->streaming = false; in cio2_vb2_stop_streaming()
1324 struct cio2_device *cio2 = video_get_drvdata(vd); in cio2_video_link_validate() local
1325 struct device *dev = &cio2->pci_dev->dev; in cio2_video_link_validate()
1388 struct cio2_device *cio2 = to_cio2_device(notifier); in cio2_notifier_bound() local
1393 if (cio2->queue[s_asd->csi2.port].sensor) in cio2_notifier_bound()
1400 q = &cio2->queue[s_asd->csi2.port]; in cio2_notifier_bound()
1404 q->csi_rx_base = cio2->base + CIO2_REG_PIPE_BASE(q->csi2.port); in cio2_notifier_bound()
1414 struct cio2_device *cio2 = to_cio2_device(notifier); in cio2_notifier_unbind() local
1417 cio2->queue[s_asd->csi2.port].sensor = NULL; in cio2_notifier_unbind()
1423 struct cio2_device *cio2 = to_cio2_device(notifier); in cio2_notifier_complete() local
1424 struct device *dev = &cio2->pci_dev->dev; in cio2_notifier_complete()
1430 list_for_each_entry(asd, &cio2->notifier.done_list, asc_entry) { in cio2_notifier_complete()
1432 q = &cio2->queue[s_asd->csi2.port]; in cio2_notifier_complete()
1453 return v4l2_device_register_subdev_nodes(&cio2->v4l2_dev); in cio2_notifier_complete()
1462 static int cio2_parse_firmware(struct cio2_device *cio2) in cio2_parse_firmware() argument
1464 struct device *dev = &cio2->pci_dev->dev; in cio2_parse_firmware()
1484 s_asd = v4l2_async_nf_add_fwnode_remote(&cio2->notifier, ep, in cio2_parse_firmware()
1508 cio2->notifier.ops = &cio2_async_ops; in cio2_parse_firmware()
1509 ret = v4l2_async_nf_register(&cio2->notifier); in cio2_parse_firmware()
1525 static int cio2_queue_init(struct cio2_device *cio2, struct cio2_queue *q) in cio2_queue_init() argument
1530 struct device *dev = &cio2->pci_dev->dev; in cio2_queue_init()
1560 r = cio2_fbpt_init(cio2, q); in cio2_queue_init()
1590 CIO2_ENTITY_NAME " %td", q - cio2->queue); in cio2_queue_init()
1592 v4l2_set_subdevdata(subdev, cio2); in cio2_queue_init()
1593 r = v4l2_device_register_subdev(&cio2->v4l2_dev, subdev); in cio2_queue_init()
1607 vbq->drv_priv = cio2; in cio2_queue_init()
1617 "%s %td", CIO2_NAME, q - cio2->queue); in cio2_queue_init()
1621 vdev->lock = &cio2->lock; in cio2_queue_init()
1622 vdev->v4l2_dev = &cio2->v4l2_dev; in cio2_queue_init()
1625 video_set_drvdata(vdev, cio2); in cio2_queue_init()
1658 static void cio2_queue_exit(struct cio2_device *cio2, struct cio2_queue *q) in cio2_queue_exit() argument
1664 cio2_fbpt_exit(q, &cio2->pci_dev->dev); in cio2_queue_exit()
1669 static int cio2_queues_init(struct cio2_device *cio2) in cio2_queues_init() argument
1674 r = cio2_queue_init(cio2, &cio2->queue[i]); in cio2_queues_init()
1683 cio2_queue_exit(cio2, &cio2->queue[i]); in cio2_queues_init()
1688 static void cio2_queues_exit(struct cio2_device *cio2) in cio2_queues_exit() argument
1693 cio2_queue_exit(cio2, &cio2->queue[i]); in cio2_queues_exit()
1719 struct cio2_device *cio2; in cio2_pci_probe() local
1739 cio2 = devm_kzalloc(dev, sizeof(*cio2), GFP_KERNEL); in cio2_pci_probe()
1740 if (!cio2) in cio2_pci_probe()
1742 cio2->pci_dev = pci_dev; in cio2_pci_probe()
1759 cio2->base = pcim_iomap_table(pci_dev)[CIO2_PCI_BAR]; in cio2_pci_probe()
1761 pci_set_drvdata(pci_dev, cio2); in cio2_pci_probe()
1777 r = cio2_fbpt_init_dummy(cio2); in cio2_pci_probe()
1781 mutex_init(&cio2->lock); in cio2_pci_probe()
1783 cio2->media_dev.dev = dev; in cio2_pci_probe()
1784 strscpy(cio2->media_dev.model, CIO2_DEVICE_NAME, in cio2_pci_probe()
1785 sizeof(cio2->media_dev.model)); in cio2_pci_probe()
1786 cio2->media_dev.hw_revision = 0; in cio2_pci_probe()
1788 media_device_init(&cio2->media_dev); in cio2_pci_probe()
1789 r = media_device_register(&cio2->media_dev); in cio2_pci_probe()
1793 cio2->v4l2_dev.mdev = &cio2->media_dev; in cio2_pci_probe()
1794 r = v4l2_device_register(dev, &cio2->v4l2_dev); in cio2_pci_probe()
1800 r = cio2_queues_init(cio2); in cio2_pci_probe()
1804 v4l2_async_nf_init(&cio2->notifier, &cio2->v4l2_dev); in cio2_pci_probe()
1807 r = cio2_parse_firmware(cio2); in cio2_pci_probe()
1812 CIO2_NAME, cio2); in cio2_pci_probe()
1824 v4l2_async_nf_unregister(&cio2->notifier); in cio2_pci_probe()
1825 v4l2_async_nf_cleanup(&cio2->notifier); in cio2_pci_probe()
1826 cio2_queues_exit(cio2); in cio2_pci_probe()
1828 v4l2_device_unregister(&cio2->v4l2_dev); in cio2_pci_probe()
1830 media_device_unregister(&cio2->media_dev); in cio2_pci_probe()
1831 media_device_cleanup(&cio2->media_dev); in cio2_pci_probe()
1833 mutex_destroy(&cio2->lock); in cio2_pci_probe()
1834 cio2_fbpt_exit_dummy(cio2); in cio2_pci_probe()
1841 struct cio2_device *cio2 = pci_get_drvdata(pci_dev); in cio2_pci_remove() local
1843 media_device_unregister(&cio2->media_dev); in cio2_pci_remove()
1844 v4l2_async_nf_unregister(&cio2->notifier); in cio2_pci_remove()
1845 v4l2_async_nf_cleanup(&cio2->notifier); in cio2_pci_remove()
1846 cio2_queues_exit(cio2); in cio2_pci_remove()
1847 cio2_fbpt_exit_dummy(cio2); in cio2_pci_remove()
1848 v4l2_device_unregister(&cio2->v4l2_dev); in cio2_pci_remove()
1849 media_device_cleanup(&cio2->media_dev); in cio2_pci_remove()
1850 mutex_destroy(&cio2->lock); in cio2_pci_remove()
1859 struct cio2_device *cio2 = pci_get_drvdata(pci_dev); in cio2_runtime_suspend() local
1860 void __iomem *const base = cio2->base; in cio2_runtime_suspend()
1877 struct cio2_device *cio2 = pci_get_drvdata(pci_dev); in cio2_runtime_resume() local
1878 void __iomem *const base = cio2->base; in cio2_runtime_resume()
1941 static void cio2_fbpt_rearrange(struct cio2_device *cio2, struct cio2_queue *q) in cio2_fbpt_rearrange() argument
1968 cio2_fbpt_entry_enable(cio2, q->fbpt + i * CIO2_MAX_LOPS); in cio2_fbpt_rearrange()
1974 struct cio2_device *cio2 = pci_get_drvdata(pci_dev); in cio2_suspend() local
1975 struct cio2_queue *q = cio2->cur_queue; in cio2_suspend()
1979 if (!cio2->streaming) in cio2_suspend()
1989 cio2_hw_exit(cio2, q); in cio2_suspend()
1998 cio2_fbpt_rearrange(cio2, q); in cio2_suspend()
2007 struct cio2_device *cio2 = dev_get_drvdata(dev); in cio2_resume() local
2008 struct cio2_queue *q = cio2->cur_queue; in cio2_resume()
2012 if (!cio2->streaming) in cio2_resume()
2021 r = cio2_hw_init(cio2, q); in cio2_resume()
2030 cio2_hw_exit(cio2, q); in cio2_resume()