Lines Matching refs:sdmac

384 	struct sdma_channel	*sdmac;  member
668 static int sdma_config_ownership(struct sdma_channel *sdmac, in sdma_config_ownership() argument
671 struct sdma_engine *sdma = sdmac->sdma; in sdma_config_ownership()
672 int channel = sdmac->channel; in sdma_config_ownership()
767 static void sdma_event_enable(struct sdma_channel *sdmac, unsigned int event) in sdma_event_enable() argument
769 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_enable()
770 int channel = sdmac->channel; in sdma_event_enable()
779 static void sdma_event_disable(struct sdma_channel *sdmac, unsigned int event) in sdma_event_disable() argument
781 struct sdma_engine *sdma = sdmac->sdma; in sdma_event_disable()
782 int channel = sdmac->channel; in sdma_event_disable()
796 static void sdma_start_desc(struct sdma_channel *sdmac) in sdma_start_desc() argument
798 struct virt_dma_desc *vd = vchan_next_desc(&sdmac->vc); in sdma_start_desc()
800 struct sdma_engine *sdma = sdmac->sdma; in sdma_start_desc()
801 int channel = sdmac->channel; in sdma_start_desc()
804 sdmac->desc = NULL; in sdma_start_desc()
807 sdmac->desc = desc = to_sdma_desc(&vd->tx); in sdma_start_desc()
813 sdma_enable_channel(sdma, sdmac->channel); in sdma_start_desc()
816 static void sdma_update_channel_loop(struct sdma_channel *sdmac) in sdma_update_channel_loop() argument
820 enum dma_status old_status = sdmac->status; in sdma_update_channel_loop()
826 while (sdmac->desc) { in sdma_update_channel_loop()
827 struct sdma_desc *desc = sdmac->desc; in sdma_update_channel_loop()
836 sdmac->status = DMA_ERROR; in sdma_update_channel_loop()
857 spin_unlock(&sdmac->vc.lock); in sdma_update_channel_loop()
859 spin_lock(&sdmac->vc.lock); in sdma_update_channel_loop()
862 sdmac->status = old_status; in sdma_update_channel_loop()
868 struct sdma_channel *sdmac = (struct sdma_channel *) data; in mxc_sdma_handle_channel_normal() local
872 sdmac->desc->chn_real_count = 0; in mxc_sdma_handle_channel_normal()
877 for (i = 0; i < sdmac->desc->num_bd; i++) { in mxc_sdma_handle_channel_normal()
878 bd = &sdmac->desc->bd[i]; in mxc_sdma_handle_channel_normal()
882 sdmac->desc->chn_real_count += bd->mode.count; in mxc_sdma_handle_channel_normal()
886 sdmac->status = DMA_ERROR; in mxc_sdma_handle_channel_normal()
888 sdmac->status = DMA_COMPLETE; in mxc_sdma_handle_channel_normal()
903 struct sdma_channel *sdmac = &sdma->channel[channel]; in sdma_int_handler() local
906 spin_lock(&sdmac->vc.lock); in sdma_int_handler()
907 desc = sdmac->desc; in sdma_int_handler()
909 if (sdmac->flags & IMX_DMA_SG_LOOP) { in sdma_int_handler()
910 sdma_update_channel_loop(sdmac); in sdma_int_handler()
912 mxc_sdma_handle_channel_normal(sdmac); in sdma_int_handler()
914 sdma_start_desc(sdmac); in sdma_int_handler()
918 spin_unlock(&sdmac->vc.lock); in sdma_int_handler()
928 static void sdma_get_pc(struct sdma_channel *sdmac, in sdma_get_pc() argument
931 struct sdma_engine *sdma = sdmac->sdma; in sdma_get_pc()
939 sdmac->pc_from_device = 0; in sdma_get_pc()
940 sdmac->pc_to_device = 0; in sdma_get_pc()
941 sdmac->device_to_device = 0; in sdma_get_pc()
942 sdmac->pc_to_pc = 0; in sdma_get_pc()
943 sdmac->is_ram_script = false; in sdma_get_pc()
973 if (sdmac->sdma->drvdata->ecspi_fixed) { in sdma_get_pc()
977 sdmac->is_ram_script = true; in sdma_get_pc()
990 sdmac->is_ram_script = true; in sdma_get_pc()
1005 sdmac->is_ram_script = true; in sdma_get_pc()
1030 sdmac->pc_from_device = per_2_emi; in sdma_get_pc()
1031 sdmac->pc_to_device = emi_2_per; in sdma_get_pc()
1032 sdmac->device_to_device = per_2_per; in sdma_get_pc()
1033 sdmac->pc_to_pc = emi_2_emi; in sdma_get_pc()
1036 static int sdma_load_context(struct sdma_channel *sdmac) in sdma_load_context() argument
1038 struct sdma_engine *sdma = sdmac->sdma; in sdma_load_context()
1039 int channel = sdmac->channel; in sdma_load_context()
1046 if (sdmac->direction == DMA_DEV_TO_MEM) in sdma_load_context()
1047 load_address = sdmac->pc_from_device; in sdma_load_context()
1048 else if (sdmac->direction == DMA_DEV_TO_DEV) in sdma_load_context()
1049 load_address = sdmac->device_to_device; in sdma_load_context()
1050 else if (sdmac->direction == DMA_MEM_TO_MEM) in sdma_load_context()
1051 load_address = sdmac->pc_to_pc; in sdma_load_context()
1053 load_address = sdmac->pc_to_device; in sdma_load_context()
1059 dev_dbg(sdma->dev, "wml = 0x%08x\n", (u32)sdmac->watermark_level); in sdma_load_context()
1060 dev_dbg(sdma->dev, "shp_addr = 0x%08x\n", sdmac->shp_addr); in sdma_load_context()
1061 dev_dbg(sdma->dev, "per_addr = 0x%08x\n", sdmac->per_addr); in sdma_load_context()
1062 dev_dbg(sdma->dev, "event_mask0 = 0x%08x\n", (u32)sdmac->event_mask[0]); in sdma_load_context()
1063 dev_dbg(sdma->dev, "event_mask1 = 0x%08x\n", (u32)sdmac->event_mask[1]); in sdma_load_context()
1073 context->gReg[0] = sdmac->event_mask[1]; in sdma_load_context()
1074 context->gReg[1] = sdmac->event_mask[0]; in sdma_load_context()
1075 context->gReg[2] = sdmac->per_addr; in sdma_load_context()
1076 context->gReg[6] = sdmac->shp_addr; in sdma_load_context()
1077 context->gReg[7] = sdmac->watermark_level; in sdma_load_context()
1098 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_disable_channel() local
1099 struct sdma_engine *sdma = sdmac->sdma; in sdma_disable_channel()
1100 int channel = sdmac->channel; in sdma_disable_channel()
1103 sdmac->status = DMA_ERROR; in sdma_disable_channel()
1109 struct sdma_channel *sdmac = container_of(work, struct sdma_channel, in sdma_channel_terminate_work() local
1119 vchan_dma_desc_free_list(&sdmac->vc, &sdmac->terminated); in sdma_channel_terminate_work()
1124 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_terminate_all() local
1127 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_terminate_all()
1131 if (sdmac->desc) { in sdma_terminate_all()
1132 vchan_terminate_vdesc(&sdmac->desc->vd); in sdma_terminate_all()
1139 vchan_get_all_descriptors(&sdmac->vc, &sdmac->terminated); in sdma_terminate_all()
1140 sdmac->desc = NULL; in sdma_terminate_all()
1141 schedule_work(&sdmac->terminate_worker); in sdma_terminate_all()
1144 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_terminate_all()
1151 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_channel_synchronize() local
1153 vchan_synchronize(&sdmac->vc); in sdma_channel_synchronize()
1155 flush_work(&sdmac->terminate_worker); in sdma_channel_synchronize()
1158 static void sdma_set_watermarklevel_for_p2p(struct sdma_channel *sdmac) in sdma_set_watermarklevel_for_p2p() argument
1160 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_watermarklevel_for_p2p()
1162 int lwml = sdmac->watermark_level & SDMA_WATERMARK_LEVEL_LWML; in sdma_set_watermarklevel_for_p2p()
1163 int hwml = (sdmac->watermark_level & SDMA_WATERMARK_LEVEL_HWML) >> 16; in sdma_set_watermarklevel_for_p2p()
1165 set_bit(sdmac->event_id0 % 32, &sdmac->event_mask[1]); in sdma_set_watermarklevel_for_p2p()
1166 set_bit(sdmac->event_id1 % 32, &sdmac->event_mask[0]); in sdma_set_watermarklevel_for_p2p()
1168 if (sdmac->event_id0 > 31) in sdma_set_watermarklevel_for_p2p()
1169 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_LWE; in sdma_set_watermarklevel_for_p2p()
1171 if (sdmac->event_id1 > 31) in sdma_set_watermarklevel_for_p2p()
1172 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_HWE; in sdma_set_watermarklevel_for_p2p()
1180 sdmac->watermark_level &= ~(SDMA_WATERMARK_LEVEL_LWML | in sdma_set_watermarklevel_for_p2p()
1182 sdmac->watermark_level |= hwml; in sdma_set_watermarklevel_for_p2p()
1183 sdmac->watermark_level |= lwml << 16; in sdma_set_watermarklevel_for_p2p()
1184 swap(sdmac->event_mask[0], sdmac->event_mask[1]); in sdma_set_watermarklevel_for_p2p()
1187 if (sdmac->per_address2 >= sdma->spba_start_addr && in sdma_set_watermarklevel_for_p2p()
1188 sdmac->per_address2 <= sdma->spba_end_addr) in sdma_set_watermarklevel_for_p2p()
1189 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_SP; in sdma_set_watermarklevel_for_p2p()
1191 if (sdmac->per_address >= sdma->spba_start_addr && in sdma_set_watermarklevel_for_p2p()
1192 sdmac->per_address <= sdma->spba_end_addr) in sdma_set_watermarklevel_for_p2p()
1193 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_DP; in sdma_set_watermarklevel_for_p2p()
1195 sdmac->watermark_level |= SDMA_WATERMARK_LEVEL_CONT; in sdma_set_watermarklevel_for_p2p()
1200 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config_channel() local
1204 sdmac->event_mask[0] = 0; in sdma_config_channel()
1205 sdmac->event_mask[1] = 0; in sdma_config_channel()
1206 sdmac->shp_addr = 0; in sdma_config_channel()
1207 sdmac->per_addr = 0; in sdma_config_channel()
1209 switch (sdmac->peripheral_type) { in sdma_config_channel()
1211 sdma_config_ownership(sdmac, false, true, true); in sdma_config_channel()
1214 sdma_config_ownership(sdmac, false, true, false); in sdma_config_channel()
1217 sdma_config_ownership(sdmac, true, true, false); in sdma_config_channel()
1221 sdma_get_pc(sdmac, sdmac->peripheral_type); in sdma_config_channel()
1223 if ((sdmac->peripheral_type != IMX_DMATYPE_MEMORY) && in sdma_config_channel()
1224 (sdmac->peripheral_type != IMX_DMATYPE_DSP)) { in sdma_config_channel()
1226 if (sdmac->event_id1) { in sdma_config_channel()
1227 if (sdmac->peripheral_type == IMX_DMATYPE_ASRC_SP || in sdma_config_channel()
1228 sdmac->peripheral_type == IMX_DMATYPE_ASRC) in sdma_config_channel()
1229 sdma_set_watermarklevel_for_p2p(sdmac); in sdma_config_channel()
1231 __set_bit(sdmac->event_id0, sdmac->event_mask); in sdma_config_channel()
1234 sdmac->shp_addr = sdmac->per_address; in sdma_config_channel()
1235 sdmac->per_addr = sdmac->per_address2; in sdma_config_channel()
1237 sdmac->watermark_level = 0; /* FIXME: M3_BASE_ADDRESS */ in sdma_config_channel()
1243 static int sdma_set_channel_priority(struct sdma_channel *sdmac, in sdma_set_channel_priority() argument
1246 struct sdma_engine *sdma = sdmac->sdma; in sdma_set_channel_priority()
1247 int channel = sdmac->channel; in sdma_set_channel_priority()
1286 desc->bd = dma_alloc_coherent(desc->sdmac->sdma->dev, bd_size, in sdma_alloc_bd()
1300 dma_free_coherent(desc->sdmac->sdma->dev, bd_size, desc->bd, in sdma_free_bd()
1314 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_alloc_chan_resources() local
1329 dev_dbg(sdmac->sdma->dev, "MEMCPY in case?\n"); in sdma_alloc_chan_resources()
1336 sdma_get_pc(sdmac, IMX_DMATYPE_MEMORY); in sdma_alloc_chan_resources()
1352 sdmac->peripheral_type = data->peripheral_type; in sdma_alloc_chan_resources()
1353 sdmac->event_id0 = data->dma_request; in sdma_alloc_chan_resources()
1354 sdmac->event_id1 = data->dma_request2; in sdma_alloc_chan_resources()
1356 ret = clk_enable(sdmac->sdma->clk_ipg); in sdma_alloc_chan_resources()
1359 ret = clk_enable(sdmac->sdma->clk_ahb); in sdma_alloc_chan_resources()
1363 ret = sdma_set_channel_priority(sdmac, prio); in sdma_alloc_chan_resources()
1370 clk_disable(sdmac->sdma->clk_ahb); in sdma_alloc_chan_resources()
1372 clk_disable(sdmac->sdma->clk_ipg); in sdma_alloc_chan_resources()
1378 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_free_chan_resources() local
1379 struct sdma_engine *sdma = sdmac->sdma; in sdma_free_chan_resources()
1385 sdma_event_disable(sdmac, sdmac->event_id0); in sdma_free_chan_resources()
1386 if (sdmac->event_id1) in sdma_free_chan_resources()
1387 sdma_event_disable(sdmac, sdmac->event_id1); in sdma_free_chan_resources()
1389 sdmac->event_id0 = 0; in sdma_free_chan_resources()
1390 sdmac->event_id1 = 0; in sdma_free_chan_resources()
1392 sdma_set_channel_priority(sdmac, 0); in sdma_free_chan_resources()
1398 static struct sdma_desc *sdma_transfer_init(struct sdma_channel *sdmac, in sdma_transfer_init() argument
1403 if (!sdmac->sdma->fw_loaded && sdmac->is_ram_script) { in sdma_transfer_init()
1404 dev_warn_once(sdmac->sdma->dev, "sdma firmware not ready!\n"); in sdma_transfer_init()
1412 sdmac->status = DMA_IN_PROGRESS; in sdma_transfer_init()
1413 sdmac->direction = direction; in sdma_transfer_init()
1414 sdmac->flags = 0; in sdma_transfer_init()
1420 desc->sdmac = sdmac; in sdma_transfer_init()
1428 sdma_config_ownership(sdmac, false, true, false); in sdma_transfer_init()
1430 if (sdma_load_context(sdmac)) in sdma_transfer_init()
1445 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_memcpy() local
1446 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_memcpy()
1447 int channel = sdmac->channel; in sdma_prep_memcpy()
1459 desc = sdma_transfer_init(sdmac, DMA_MEM_TO_MEM, in sdma_prep_memcpy()
1494 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_memcpy()
1502 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_slave_sg() local
1503 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_slave_sg()
1505 int channel = sdmac->channel; in sdma_prep_slave_sg()
1509 sdma_config_write(chan, &sdmac->slave_config, direction); in sdma_prep_slave_sg()
1511 desc = sdma_transfer_init(sdmac, direction, sg_len); in sdma_prep_slave_sg()
1535 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_slave_sg()
1538 switch (sdmac->word_size) { in sdma_prep_slave_sg()
1572 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_slave_sg()
1577 sdmac->status = DMA_ERROR; in sdma_prep_slave_sg()
1586 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_prep_dma_cyclic() local
1587 struct sdma_engine *sdma = sdmac->sdma; in sdma_prep_dma_cyclic()
1589 int channel = sdmac->channel; in sdma_prep_dma_cyclic()
1595 sdma_config_write(chan, &sdmac->slave_config, direction); in sdma_prep_dma_cyclic()
1597 desc = sdma_transfer_init(sdmac, direction, num_periods); in sdma_prep_dma_cyclic()
1603 sdmac->flags |= IMX_DMA_SG_LOOP; in sdma_prep_dma_cyclic()
1619 if (sdmac->word_size > DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_dma_cyclic()
1621 if (sdmac->word_size == DMA_SLAVE_BUSWIDTH_4_BYTES) in sdma_prep_dma_cyclic()
1624 bd->mode.command = sdmac->word_size; in sdma_prep_dma_cyclic()
1643 return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); in sdma_prep_dma_cyclic()
1648 sdmac->status = DMA_ERROR; in sdma_prep_dma_cyclic()
1656 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config_write() local
1659 sdmac->per_address = dmaengine_cfg->src_addr; in sdma_config_write()
1660 sdmac->watermark_level = dmaengine_cfg->src_maxburst * in sdma_config_write()
1662 sdmac->word_size = dmaengine_cfg->src_addr_width; in sdma_config_write()
1664 sdmac->per_address2 = dmaengine_cfg->src_addr; in sdma_config_write()
1665 sdmac->per_address = dmaengine_cfg->dst_addr; in sdma_config_write()
1666 sdmac->watermark_level = dmaengine_cfg->src_maxburst & in sdma_config_write()
1668 sdmac->watermark_level |= (dmaengine_cfg->dst_maxburst << 16) & in sdma_config_write()
1670 sdmac->word_size = dmaengine_cfg->dst_addr_width; in sdma_config_write()
1672 sdmac->per_address = dmaengine_cfg->dst_addr; in sdma_config_write()
1673 sdmac->watermark_level = dmaengine_cfg->dst_maxburst * in sdma_config_write()
1675 sdmac->word_size = dmaengine_cfg->dst_addr_width; in sdma_config_write()
1677 sdmac->direction = direction; in sdma_config_write()
1684 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_config() local
1686 memcpy(&sdmac->slave_config, dmaengine_cfg, sizeof(*dmaengine_cfg)); in sdma_config()
1689 if (sdmac->event_id0 >= sdmac->sdma->drvdata->num_events) in sdma_config()
1691 sdma_event_enable(sdmac, sdmac->event_id0); in sdma_config()
1693 if (sdmac->event_id1) { in sdma_config()
1694 if (sdmac->event_id1 >= sdmac->sdma->drvdata->num_events) in sdma_config()
1696 sdma_event_enable(sdmac, sdmac->event_id1); in sdma_config()
1706 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_tx_status() local
1717 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_tx_status()
1719 vd = vchan_find_desc(&sdmac->vc, cookie); in sdma_tx_status()
1722 else if (sdmac->desc && sdmac->desc->vd.tx.cookie == cookie) in sdma_tx_status()
1723 desc = sdmac->desc; in sdma_tx_status()
1726 if (sdmac->flags & IMX_DMA_SG_LOOP) in sdma_tx_status()
1735 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_tx_status()
1740 return sdmac->status; in sdma_tx_status()
1745 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_issue_pending() local
1748 spin_lock_irqsave(&sdmac->vc.lock, flags); in sdma_issue_pending()
1749 if (vchan_issue_pending(&sdmac->vc) && !sdmac->desc) in sdma_issue_pending()
1750 sdma_start_desc(sdmac); in sdma_issue_pending()
1751 spin_unlock_irqrestore(&sdmac->vc.lock, flags); in sdma_issue_pending()
2015 struct sdma_channel *sdmac = to_sdma_chan(chan); in sdma_filter_fn() local
2021 sdmac->data = *data; in sdma_filter_fn()
2022 chan->private = &sdmac->data; in sdma_filter_fn()
2129 struct sdma_channel *sdmac = &sdma->channel[i]; in sdma_probe() local
2131 sdmac->sdma = sdma; in sdma_probe()
2133 sdmac->channel = i; in sdma_probe()
2134 sdmac->vc.desc_free = sdma_desc_free; in sdma_probe()
2135 INIT_LIST_HEAD(&sdmac->terminated); in sdma_probe()
2136 INIT_WORK(&sdmac->terminate_worker, in sdma_probe()
2144 vchan_init(&sdmac->vc, &sdma->dma_device); in sdma_probe()
2241 struct sdma_channel *sdmac = &sdma->channel[i]; in sdma_remove() local
2243 tasklet_kill(&sdmac->vc.task); in sdma_remove()
2244 sdma_free_chan_resources(&sdmac->vc.chan); in sdma_remove()