Lines Matching refs:sdmac
326 struct sdma_channel *sdmac; member
624 static int sdma_config_ownership(struct sdma_channel *sdmac, in sdma_config_ownership() argument
627 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership()
628 int channel = sdmac->channel; in sdma_config_ownership()
723 static void sdma_event_enable(struct sdma_channel *sdmac, unsigned int event) in sdma_event_enable() argument
725 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_enable()
726 int channel = sdmac->channel; in sdma_event_enable()
735 static void sdma_event_disable(struct sdma_channel *sdmac, unsigned int event) in sdma_event_disable() argument
737 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_disable()
738 int channel = sdmac->channel; in sdma_event_disable()
752 static void sdma_start_desc(struct sdma_channel *sdmac) in sdma_start_desc() argument
754 struct virt_dma_desc *vd = vchan_next_desc(&sdmac->vc); in sdma_start_desc()
756 struct sdma_engine *sdma = sdmac->sdma; in sdma_start_desc()
757 int channel = sdmac->channel; in sdma_start_desc()
760 sdmac->desc = NULL; in sdma_start_desc()
763 sdmac->desc = desc = to_sdma_desc(&vd->tx); in sdma_start_desc()
769 sdma_enable_channel(sdma, sdmac->channel); in sdma_start_desc()
772 static void sdma_update_channel_loop(struct sdma_channel *sdmac) in sdma_update_channel_loop() argument
776 enum dma_status old_status = sdmac->status; in sdma_update_channel_loop()
782 while (sdmac->desc) { in sdma_update_channel_loop()
783 struct sdma_desc *desc = sdmac->desc; in sdma_update_channel_loop()
792 sdmac->status = DMA_ERROR; in sdma_update_channel_loop()
813 spin_unlock(&sdmac->vc.lock); in sdma_update_channel_loop()
815 spin_lock(&sdmac->vc.lock); in sdma_update_channel_loop()
818 sdmac->status = old_status; in sdma_update_channel_loop()
824 struct sdma_channel *sdmac = (struct sdma_channel *) data; in mxc_sdma_handle_channel_normal() local
828 sdmac->desc->chn_real_count = 0; in mxc_sdma_handle_channel_normal()
833 for (i = 0; i < sdmac->desc->num_bd; i++) { in mxc_sdma_handle_channel_normal()
834 bd = &sdmac->desc->bd[i]; in mxc_sdma_handle_channel_normal()
838 sdmac->desc->chn_real_count += bd->mode.count; in mxc_sdma_handle_channel_normal()
842 sdmac->status = DMA_ERROR; in mxc_sdma_handle_channel_normal()
844 sdmac->status = DMA_COMPLETE; in mxc_sdma_handle_channel_normal()
859 struct sdma_channel *sdmac = &sdma->channel[channel]; in sdma_int_handler() local
862 spin_lock(&sdmac->vc.lock); in sdma_int_handler()
863 desc = sdmac->desc; in sdma_int_handler()
865 if (sdmac->flags & IMX_DMA_SG_LOOP) { in sdma_int_handler()
866 sdma_update_channel_loop(sdmac); in sdma_int_handler()
868 mxc_sdma_handle_channel_normal(sdmac); in sdma_int_handler()
870 sdma_start_desc(sdmac); in sdma_int_handler()
874 spin_unlock(&sdmac->vc.lock); in sdma_int_handler()
884 static void sdma_get_pc(struct sdma_channel *sdmac, in sdma_get_pc() argument
887 struct sdma_engine *sdma = sdmac->sdma; in sdma_get_pc()
895 sdmac->pc_from_device = 0; in sdma_get_pc()
896 sdmac->pc_to_device = 0; in sdma_get_pc()
897 sdmac->device_to_device = 0; in sdma_get_pc()
898 sdmac->pc_to_pc = 0; in sdma_get_pc()
972 sdmac->pc_from_device = per_2_emi; in sdma_get_pc()
973 sdmac->pc_to_device = emi_2_per; in sdma_get_pc()
974 sdmac->device_to_device = per_2_per; in sdma_get_pc()
975 sdmac->pc_to_pc = emi_2_emi; in sdma_get_pc()
978 static int sdma_load_context(struct sdma_channel *sdmac) in sdma_load_context() argument
980 struct sdma_engine *sdma = sdmac->sdma; in sdma_load_context()
981 int channel = sdmac->channel; in sdma_load_context()
988 if (sdmac->context_loaded) in sdma_load_context()
991 if (sdmac->direction == DMA_DEV_TO_MEM) in sdma_load_context()
992 load_address = sdmac->pc_from_device; in sdma_load_context()
993 else if (sdmac->direction == DMA_DEV_TO_DEV) in sdma_load_context()
994 load_address = sdmac->device_to_device; in sdma_load_context()
995 else if (sdmac->direction == DMA_MEM_TO_MEM) in sdma_load_context()
996 load_address = sdmac->pc_to_pc; in sdma_load_context()
998 load_address = sdmac->pc_to_device; in sdma_load_context()
1004 dev_dbg(sdma->dev, "wml = 0x%08x\n", (u32)sdmac->watermark_level); in sdma_load_context()
1005 dev_dbg(sdma->dev, "shp_addr = 0x%08x\n", sdmac->shp_addr); in sdma_load_context()
1006 dev_dbg(sdma->dev, "per_addr = 0x%08x\n", sdmac->per_addr); in sdma_load_context()
1007 dev_dbg(sdma->dev, "event_mask0 = 0x%08x\n", (u32)sdmac->event_mask[0]); in sdma_load_context()
1008 dev_dbg(sdma->dev, "event_mask1 = 0x%08x\n", (u32)sdmac->event_mask[1]); in sdma_load_context()
1018 context->gReg[0] = sdmac->event_mask[1]; in sdma_load_context()
1019 context->gReg[1] = sdmac->event_mask[0]; in sdma_load_context()
1020 context->gReg[2] = sdmac->per_addr; in sdma_load_context()
1021 context->gReg[6] = sdmac->shp_addr; in sdma_load_context()
1022 context->gReg[7] = sdmac->watermark_level; in sdma_load_context()
1033 sdmac->context_loaded = true; in sdma_load_context()
1045 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_disable_channel() local
1046 struct sdma_engine *sdma = sdmac->sdma; in sdma_disable_channel()
1047 int channel = sdmac->channel; in sdma_disable_channel()
1050 sdmac->status = DMA_ERROR; in sdma_disable_channel()
1056 struct sdma_channel *sdmac = container_of(work, struct sdma_channel, in sdma_channel_terminate_work() local
1069 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_channel_terminate_work()
1070 vchan_get_all_descriptors(&sdmac->vc, &head); in sdma_channel_terminate_work()
1071 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_channel_terminate_work()
1072 vchan_dma_desc_free_list(&sdmac->vc, &head); in sdma_channel_terminate_work()
1073 sdmac->context_loaded = false; in sdma_channel_terminate_work()
1078 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_terminate_all() local
1081 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_terminate_all()
1085 if (sdmac->desc) { in sdma_terminate_all()
1086 vchan_terminate_vdesc(&sdmac->desc->vd); in sdma_terminate_all()
1087 sdmac->desc = NULL; in sdma_terminate_all()
1088 schedule_work(&sdmac->terminate_worker); in sdma_terminate_all()
1091 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_terminate_all()
1098 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_channel_synchronize() local
1100 vchan_synchronize(&sdmac->vc); in sdma_channel_synchronize()
1102 flush_work(&sdmac->terminate_worker); in sdma_channel_synchronize()
1105 static void sdma_set_watermarklevel_for_p2p(struct sdma_channel *sdmac) in sdma_set_watermarklevel_for_p2p() argument
1107 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_watermarklevel_for_p2p()
1109 int lwml = sdmac->watermark_level & SDMA_WATERMARK_LEVEL_LWML; in sdma_set_watermarklevel_for_p2p()
1110 int hwml = (sdmac->watermark_level & SDMA_WATERMARK_LEVEL_HWML) >> 16; in sdma_set_watermarklevel_for_p2p()
1112 set_bit(sdmac->event_id0 % 32, &sdmac->event_mask[1]); in sdma_set_watermarklevel_for_p2p()
1113 set_bit(sdmac->event_id1 % 32, &sdmac->event_mask[0]); in sdma_set_watermarklevel_for_p2p()
1115 if (sdmac->event_id0 > 31) in sdma_set_watermarklevel_for_p2p()
1116 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_LWE; in sdma_set_watermarklevel_for_p2p()
1118 if (sdmac->event_id1 > 31) in sdma_set_watermarklevel_for_p2p()
1119 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_HWE; in sdma_set_watermarklevel_for_p2p()
1127 sdmac->watermark_level &= ~(SDMA_WATERMARK_LEVEL_LWML | in sdma_set_watermarklevel_for_p2p()
1129 sdmac->watermark_level |= hwml; in sdma_set_watermarklevel_for_p2p()
1130 sdmac->watermark_level |= lwml << 16; in sdma_set_watermarklevel_for_p2p()
1131 swap(sdmac->event_mask[0], sdmac->event_mask[1]); in sdma_set_watermarklevel_for_p2p()
1134 if (sdmac->per_address2 >= sdma->spba_start_addr && in sdma_set_watermarklevel_for_p2p()
1135 sdmac->per_address2 <= sdma->spba_end_addr) in sdma_set_watermarklevel_for_p2p()
1136 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_SP; in sdma_set_watermarklevel_for_p2p()
1138 if (sdmac->per_address >= sdma->spba_start_addr && in sdma_set_watermarklevel_for_p2p()
1139 sdmac->per_address <= sdma->spba_end_addr) in sdma_set_watermarklevel_for_p2p()
1140 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_DP; in sdma_set_watermarklevel_for_p2p()
1142 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_CONT; in sdma_set_watermarklevel_for_p2p()
1147 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config_channel() local
1152 sdmac->event_mask[0] = 0; in sdma_config_channel()
1153 sdmac->event_mask[1] = 0; in sdma_config_channel()
1154 sdmac->shp_addr = 0; in sdma_config_channel()
1155 sdmac->per_addr = 0; in sdma_config_channel()
1157 switch (sdmac->peripheral_type) { in sdma_config_channel()
1159 sdma_config_ownership(sdmac, false, true, true); in sdma_config_channel()
1162 sdma_config_ownership(sdmac, false, true, false); in sdma_config_channel()
1165 sdma_config_ownership(sdmac, true, true, false); in sdma_config_channel()
1169 sdma_get_pc(sdmac, sdmac->peripheral_type); in sdma_config_channel()
1171 if ((sdmac->peripheral_type != IMX_DMATYPE_MEMORY) && in sdma_config_channel()
1172 (sdmac->peripheral_type != IMX_DMATYPE_DSP)) { in sdma_config_channel()
1174 if (sdmac->event_id1) { in sdma_config_channel()
1175 if (sdmac->peripheral_type == IMX_DMATYPE_ASRC_SP || in sdma_config_channel()
1176 sdmac->peripheral_type == IMX_DMATYPE_ASRC) in sdma_config_channel()
1177 sdma_set_watermarklevel_for_p2p(sdmac); in sdma_config_channel()
1179 __set_bit(sdmac->event_id0, sdmac->event_mask); in sdma_config_channel()
1182 sdmac->shp_addr = sdmac->per_address; in sdma_config_channel()
1183 sdmac->per_addr = sdmac->per_address2; in sdma_config_channel()
1185 sdmac->watermark_level = 0; /* FIXME: M3_BASE_ADDRESS */ in sdma_config_channel()
1188 ret = sdma_load_context(sdmac); in sdma_config_channel()
1193 static int sdma_set_channel_priority(struct sdma_channel *sdmac, in sdma_set_channel_priority() argument
1196 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_channel_priority()
1197 int channel = sdmac->channel; in sdma_set_channel_priority()
1236 desc->bd = dma_alloc_coherent(desc->sdmac->sdma->dev, bd_size, in sdma_alloc_bd()
1250 dma_free_coherent(desc->sdmac->sdma->dev, bd_size, desc->bd, in sdma_free_bd()
1264 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_alloc_chan_resources() local
1279 dev_dbg(sdmac->sdma->dev, "MEMCPY in case?\n"); in sdma_alloc_chan_resources()
1286 sdma_get_pc(sdmac, IMX_DMATYPE_MEMORY); in sdma_alloc_chan_resources()
1302 sdmac->peripheral_type = data->peripheral_type; in sdma_alloc_chan_resources()
1303 sdmac->event_id0 = data->dma_request; in sdma_alloc_chan_resources()
1304 sdmac->event_id1 = data->dma_request2; in sdma_alloc_chan_resources()
1306 ret = clk_enable(sdmac->sdma->clk_ipg); in sdma_alloc_chan_resources()
1309 ret = clk_enable(sdmac->sdma->clk_ahb); in sdma_alloc_chan_resources()
1313 ret = sdma_set_channel_priority(sdmac, prio); in sdma_alloc_chan_resources()
1320 clk_disable(sdmac->sdma->clk_ahb); in sdma_alloc_chan_resources()
1322 clk_disable(sdmac->sdma->clk_ipg); in sdma_alloc_chan_resources()
1328 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_free_chan_resources() local
1329 struct sdma_engine *sdma = sdmac->sdma; in sdma_free_chan_resources()
1335 sdma_event_disable(sdmac, sdmac->event_id0); in sdma_free_chan_resources()
1336 if (sdmac->event_id1) in sdma_free_chan_resources()
1337 sdma_event_disable(sdmac, sdmac->event_id1); in sdma_free_chan_resources()
1339 sdmac->event_id0 = 0; in sdma_free_chan_resources()
1340 sdmac->event_id1 = 0; in sdma_free_chan_resources()
1341 sdmac->context_loaded = false; in sdma_free_chan_resources()
1343 sdma_set_channel_priority(sdmac, 0); in sdma_free_chan_resources()
1349 static struct sdma_desc *sdma_transfer_init(struct sdma_channel *sdmac, in sdma_transfer_init() argument
1358 sdmac->status = DMA_IN_PROGRESS; in sdma_transfer_init()
1359 sdmac->direction = direction; in sdma_transfer_init()
1360 sdmac->flags = 0; in sdma_transfer_init()
1366 desc->sdmac = sdmac; in sdma_transfer_init()
1374 sdma_config_ownership(sdmac, false, true, false); in sdma_transfer_init()
1376 if (sdma_load_context(sdmac)) in sdma_transfer_init()
1391 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_memcpy() local
1392 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_memcpy()
1393 int channel = sdmac->channel; in sdma_prep_memcpy()
1405 desc = sdma_transfer_init(sdmac, DMA_MEM_TO_MEM, in sdma_prep_memcpy()
1440 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_memcpy()
1448 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_slave_sg() local
1449 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_slave_sg()
1451 int channel = sdmac->channel; in sdma_prep_slave_sg()
1455 sdma_config_write(chan, &sdmac->slave_config, direction); in sdma_prep_slave_sg()
1457 desc = sdma_transfer_init(sdmac, direction, sg_len); in sdma_prep_slave_sg()
1481 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_slave_sg()
1484 switch (sdmac->word_size) { in sdma_prep_slave_sg()
1518 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_slave_sg()
1523 sdmac->status = DMA_ERROR; in sdma_prep_slave_sg()
1532 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_dma_cyclic() local
1533 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_dma_cyclic()
1535 int channel = sdmac->channel; in sdma_prep_dma_cyclic()
1541 sdma_config_write(chan, &sdmac->slave_config, direction); in sdma_prep_dma_cyclic()
1543 desc = sdma_transfer_init(sdmac, direction, num_periods); in sdma_prep_dma_cyclic()
1549 sdmac->flags |= IMX_DMA_SG_LOOP; in sdma_prep_dma_cyclic()
1565 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_dma_cyclic()
1567 if (sdmac->word_size == DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_dma_cyclic()
1570 bd->mode.command = sdmac->word_size; in sdma_prep_dma_cyclic()
1589 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_dma_cyclic()
1594 sdmac->status = DMA_ERROR; in sdma_prep_dma_cyclic()
1602 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config_write() local
1605 sdmac->per_address = dmaengine_cfg->src_addr; in sdma_config_write()
1606 sdmac->watermark_level = dmaengine_cfg->src_maxburst * in sdma_config_write()
1608 sdmac->word_size = dmaengine_cfg->src_addr_width; in sdma_config_write()
1610 sdmac->per_address2 = dmaengine_cfg->src_addr; in sdma_config_write()
1611 sdmac->per_address = dmaengine_cfg->dst_addr; in sdma_config_write()
1612 sdmac->watermark_level = dmaengine_cfg->src_maxburst & in sdma_config_write()
1614 sdmac->watermark_level |= (dmaengine_cfg->dst_maxburst << 16) & in sdma_config_write()
1616 sdmac->word_size = dmaengine_cfg->dst_addr_width; in sdma_config_write()
1618 sdmac->per_address = dmaengine_cfg->dst_addr; in sdma_config_write()
1619 sdmac->watermark_level = dmaengine_cfg->dst_maxburst * in sdma_config_write()
1621 sdmac->word_size = dmaengine_cfg->dst_addr_width; in sdma_config_write()
1623 sdmac->direction = direction; in sdma_config_write()
1630 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config() local
1632 memcpy(&sdmac->slave_config, dmaengine_cfg, sizeof(*dmaengine_cfg)); in sdma_config()
1635 if (sdmac->event_id0 >= sdmac->sdma->drvdata->num_events) in sdma_config()
1637 sdma_event_enable(sdmac, sdmac->event_id0); in sdma_config()
1639 if (sdmac->event_id1) { in sdma_config()
1640 if (sdmac->event_id1 >= sdmac->sdma->drvdata->num_events) in sdma_config()
1642 sdma_event_enable(sdmac, sdmac->event_id1); in sdma_config()
1652 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_tx_status() local
1663 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_tx_status()
1665 vd = vchan_find_desc(&sdmac->vc, cookie); in sdma_tx_status()
1668 else if (sdmac->desc && sdmac->desc->vd.tx.cookie == cookie) in sdma_tx_status()
1669 desc = sdmac->desc; in sdma_tx_status()
1672 if (sdmac->flags & IMX_DMA_SG_LOOP) in sdma_tx_status()
1681 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_tx_status()
1686 return sdmac->status; in sdma_tx_status()
1691 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_issue_pending() local
1694 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_issue_pending()
1695 if (vchan_issue_pending(&sdmac->vc) && !sdmac->desc) in sdma_issue_pending()
1696 sdma_start_desc(sdmac); in sdma_issue_pending()
1697 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_issue_pending()
1946 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_filter_fn() local
1952 sdmac->data = *data; in sdma_filter_fn()
1953 chan->private = &sdmac->data; in sdma_filter_fn()
2074 struct sdma_channel *sdmac = &sdma->channel[i]; in sdma_probe() local
2076 sdmac->sdma = sdma; in sdma_probe()
2078 sdmac->channel = i; in sdma_probe()
2079 sdmac->vc.desc_free = sdma_desc_free; in sdma_probe()
2080 INIT_WORK(&sdmac->terminate_worker, in sdma_probe()
2088 vchan_init(&sdmac->vc, &sdma->dma_device); in sdma_probe()
2199 struct sdma_channel *sdmac = &sdma->channel[i]; in sdma_remove() local
2201 tasklet_kill(&sdmac->vc.task); in sdma_remove()
2202 sdma_free_chan_resources(&sdmac->vc.chan); in sdma_remove()