Lines Matching refs:d40c
603 static struct device *chan2dev(struct d40_chan *d40c) in chan2dev() argument
605 return &d40c->chan.dev->device; in chan2dev()
627 #define chan_err(d40c, format, arg...) \ argument
628 d40_err(chan2dev(d40c), format, ## arg)
634 static int d40_pool_lli_alloc(struct d40_chan *d40c, struct d40_desc *d40d, in d40_pool_lli_alloc() argument
637 bool is_log = chan_is_logical(d40c); in d40_pool_lli_alloc()
669 d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev, in d40_pool_lli_alloc()
674 if (dma_mapping_error(d40c->base->dev, in d40_pool_lli_alloc()
686 static void d40_pool_lli_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_pool_lli_free() argument
689 dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr, in d40_pool_lli_free()
701 static int d40_lcla_alloc_one(struct d40_chan *d40c, in d40_lcla_alloc_one() argument
708 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
715 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_alloc_one()
717 if (!d40c->base->lcla_pool.alloc_map[idx]) { in d40_lcla_alloc_one()
718 d40c->base->lcla_pool.alloc_map[idx] = d40d; in d40_lcla_alloc_one()
725 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
730 static int d40_lcla_free_all(struct d40_chan *d40c, in d40_lcla_free_all() argument
737 if (chan_is_physical(d40c)) in d40_lcla_free_all()
740 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
743 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_free_all()
745 if (d40c->base->lcla_pool.alloc_map[idx] == d40d) { in d40_lcla_free_all()
746 d40c->base->lcla_pool.alloc_map[idx] = NULL; in d40_lcla_free_all()
755 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
766 static struct d40_desc *d40_desc_get(struct d40_chan *d40c) in d40_desc_get() argument
770 if (!list_empty(&d40c->client)) { in d40_desc_get()
774 list_for_each_entry_safe(d, _d, &d40c->client, node) { in d40_desc_get()
785 desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT); in d40_desc_get()
793 static void d40_desc_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_free() argument
796 d40_pool_lli_free(d40c, d40d); in d40_desc_free()
797 d40_lcla_free_all(d40c, d40d); in d40_desc_free()
798 kmem_cache_free(d40c->base->desc_slab, d40d); in d40_desc_free()
801 static void d40_desc_submit(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_submit() argument
803 list_add_tail(&desc->node, &d40c->active); in d40_desc_submit()
823 static void d40_desc_done(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_done() argument
825 list_add_tail(&desc->node, &d40c->done); in d40_desc_done()
940 static void d40_desc_load(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_load() argument
942 if (chan_is_physical(d40c)) { in d40_desc_load()
943 d40_phy_lli_load(d40c, d40d); in d40_desc_load()
946 d40_log_lli_to_lcxa(d40c, d40d); in d40_desc_load()
949 static struct d40_desc *d40_first_active_get(struct d40_chan *d40c) in d40_first_active_get() argument
951 return list_first_entry_or_null(&d40c->active, struct d40_desc, node); in d40_first_active_get()
955 static void d40_desc_queue(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_queue() argument
959 list_add_tail(&desc->node, &d40c->pending_queue); in d40_desc_queue()
962 static struct d40_desc *d40_first_pending(struct d40_chan *d40c) in d40_first_pending() argument
964 return list_first_entry_or_null(&d40c->pending_queue, struct d40_desc, in d40_first_pending()
968 static struct d40_desc *d40_first_queued(struct d40_chan *d40c) in d40_first_queued() argument
970 return list_first_entry_or_null(&d40c->queue, struct d40_desc, node); in d40_first_queued()
973 static struct d40_desc *d40_first_done(struct d40_chan *d40c) in d40_first_done() argument
975 return list_first_entry_or_null(&d40c->done, struct d40_desc, node); in d40_first_done()
1038 static int __d40_execute_command_phy(struct d40_chan *d40c, in __d40_execute_command_phy() argument
1049 ret = __d40_execute_command_phy(d40c, D40_DMA_SUSPEND_REQ); in __d40_execute_command_phy()
1054 spin_lock_irqsave(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1056 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_phy()
1057 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_phy()
1059 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_phy()
1063 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1064 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1070 wmask = 0xffffffff & ~(D40_CHAN_POS_MASK(d40c->phy_chan->num)); in __d40_execute_command_phy()
1071 writel(wmask | (command << D40_CHAN_POS(d40c->phy_chan->num)), in __d40_execute_command_phy()
1078 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1079 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1094 chan_err(d40c, in __d40_execute_command_phy()
1096 d40c->phy_chan->num, d40c->log_num, in __d40_execute_command_phy()
1104 spin_unlock_irqrestore(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1108 static void d40_term_all(struct d40_chan *d40c) in d40_term_all() argument
1114 while ((d40d = d40_first_done(d40c))) { in d40_term_all()
1116 d40_desc_free(d40c, d40d); in d40_term_all()
1120 while ((d40d = d40_first_active_get(d40c))) { in d40_term_all()
1122 d40_desc_free(d40c, d40d); in d40_term_all()
1126 while ((d40d = d40_first_queued(d40c))) { in d40_term_all()
1128 d40_desc_free(d40c, d40d); in d40_term_all()
1132 while ((d40d = d40_first_pending(d40c))) { in d40_term_all()
1134 d40_desc_free(d40c, d40d); in d40_term_all()
1138 if (!list_empty(&d40c->client)) in d40_term_all()
1139 list_for_each_entry_safe(d40d, _d, &d40c->client, node) { in d40_term_all()
1141 d40_desc_free(d40c, d40d); in d40_term_all()
1145 if (!list_empty(&d40c->prepare_queue)) in d40_term_all()
1147 &d40c->prepare_queue, node) { in d40_term_all()
1149 d40_desc_free(d40c, d40d); in d40_term_all()
1152 d40c->pending_tx = 0; in d40_term_all()
1155 static void __d40_config_set_event(struct d40_chan *d40c, in __d40_config_set_event() argument
1159 void __iomem *addr = chan_base(d40c) + reg; in __d40_config_set_event()
1199 chan_err(d40c, in __d40_config_set_event()
1201 "status %x\n", d40c->phy_chan->num, in __d40_config_set_event()
1202 d40c->log_num, status); in __d40_config_set_event()
1223 dev_dbg(chan2dev(d40c), in __d40_config_set_event()
1238 static void d40_config_set_event(struct d40_chan *d40c, in d40_config_set_event() argument
1241 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_config_set_event()
1244 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_config_set_event()
1245 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_config_set_event()
1246 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1249 if (d40c->dma_cfg.dir != DMA_DEV_TO_MEM) in d40_config_set_event()
1250 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1254 static u32 d40_chan_has_events(struct d40_chan *d40c) in d40_chan_has_events() argument
1256 void __iomem *chanbase = chan_base(d40c); in d40_chan_has_events()
1266 __d40_execute_command_log(struct d40_chan *d40c, enum d40_command command) in __d40_execute_command_log() argument
1273 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_log()
1274 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_log()
1276 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_log()
1279 spin_lock_irqsave(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1286 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_log()
1287 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_log()
1290 d40_config_set_event(d40c, D40_SUSPEND_REQ_EVENTLINE); in __d40_execute_command_log()
1292 d40_config_set_event(d40c, D40_DEACTIVATE_EVENTLINE); in __d40_execute_command_log()
1294 if (!d40_chan_has_events(d40c) && (command == D40_DMA_STOP)) in __d40_execute_command_log()
1295 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1301 d40_config_set_event(d40c, D40_ACTIVATE_EVENTLINE); in __d40_execute_command_log()
1302 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1310 spin_unlock_irqrestore(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1314 static int d40_channel_execute_command(struct d40_chan *d40c, in d40_channel_execute_command() argument
1317 if (chan_is_logical(d40c)) in d40_channel_execute_command()
1318 return __d40_execute_command_log(d40c, command); in d40_channel_execute_command()
1320 return __d40_execute_command_phy(d40c, command); in d40_channel_execute_command()
1323 static u32 d40_get_prmo(struct d40_chan *d40c) in d40_get_prmo() argument
1342 if (chan_is_physical(d40c)) in d40_get_prmo()
1343 return phy_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1345 return log_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1348 static void d40_config_write(struct d40_chan *d40c) in d40_config_write() argument
1354 addr_base = (d40c->phy_chan->num % 2) * 4; in d40_config_write()
1356 var = ((u32)(chan_is_logical(d40c)) + 1) << in d40_config_write()
1357 D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1358 writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base); in d40_config_write()
1361 var = d40_get_prmo(d40c) << D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1363 writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base); in d40_config_write()
1365 if (chan_is_logical(d40c)) { in d40_config_write()
1366 int lidx = (d40c->phy_chan->num << D40_SREG_ELEM_LOG_LIDX_POS) in d40_config_write()
1368 void __iomem *chanbase = chan_base(d40c); in d40_config_write()
1371 writel(d40c->src_def_cfg, chanbase + D40_CHAN_REG_SSCFG); in d40_config_write()
1372 writel(d40c->dst_def_cfg, chanbase + D40_CHAN_REG_SDCFG); in d40_config_write()
1384 static u32 d40_residue(struct d40_chan *d40c) in d40_residue() argument
1388 if (chan_is_logical(d40c)) in d40_residue()
1389 num_elt = (readl(&d40c->lcpa->lcsp2) & D40_MEM_LCSP2_ECNT_MASK) in d40_residue()
1392 u32 val = readl(chan_base(d40c) + D40_CHAN_REG_SDELT); in d40_residue()
1397 return num_elt * d40c->dma_cfg.dst_info.data_width; in d40_residue()
1400 static bool d40_tx_is_linked(struct d40_chan *d40c) in d40_tx_is_linked() argument
1404 if (chan_is_logical(d40c)) in d40_tx_is_linked()
1405 is_link = readl(&d40c->lcpa->lcsp3) & D40_MEM_LCSP3_DLOS_MASK; in d40_tx_is_linked()
1407 is_link = readl(chan_base(d40c) + D40_CHAN_REG_SDLNK) in d40_tx_is_linked()
1415 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_pause() local
1419 if (d40c->phy_chan == NULL) { in d40_pause()
1420 chan_err(d40c, "Channel is not allocated!\n"); in d40_pause()
1424 if (!d40c->busy) in d40_pause()
1427 spin_lock_irqsave(&d40c->lock, flags); in d40_pause()
1428 pm_runtime_get_sync(d40c->base->dev); in d40_pause()
1430 res = d40_channel_execute_command(d40c, D40_DMA_SUSPEND_REQ); in d40_pause()
1432 pm_runtime_mark_last_busy(d40c->base->dev); in d40_pause()
1433 pm_runtime_put_autosuspend(d40c->base->dev); in d40_pause()
1434 spin_unlock_irqrestore(&d40c->lock, flags); in d40_pause()
1440 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_resume() local
1444 if (d40c->phy_chan == NULL) { in d40_resume()
1445 chan_err(d40c, "Channel is not allocated!\n"); in d40_resume()
1449 if (!d40c->busy) in d40_resume()
1452 spin_lock_irqsave(&d40c->lock, flags); in d40_resume()
1453 pm_runtime_get_sync(d40c->base->dev); in d40_resume()
1456 if (d40_residue(d40c) || d40_tx_is_linked(d40c)) in d40_resume()
1457 res = d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_resume()
1459 pm_runtime_mark_last_busy(d40c->base->dev); in d40_resume()
1460 pm_runtime_put_autosuspend(d40c->base->dev); in d40_resume()
1461 spin_unlock_irqrestore(&d40c->lock, flags); in d40_resume()
1467 struct d40_chan *d40c = container_of(tx->chan, in d40_tx_submit() local
1474 spin_lock_irqsave(&d40c->lock, flags); in d40_tx_submit()
1476 d40_desc_queue(d40c, d40d); in d40_tx_submit()
1477 spin_unlock_irqrestore(&d40c->lock, flags); in d40_tx_submit()
1482 static int d40_start(struct d40_chan *d40c) in d40_start() argument
1484 return d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_start()
1487 static struct d40_desc *d40_queue_start(struct d40_chan *d40c) in d40_queue_start() argument
1493 d40d = d40_first_queued(d40c); in d40_queue_start()
1496 if (!d40c->busy) { in d40_queue_start()
1497 d40c->busy = true; in d40_queue_start()
1498 pm_runtime_get_sync(d40c->base->dev); in d40_queue_start()
1505 d40_desc_submit(d40c, d40d); in d40_queue_start()
1508 d40_desc_load(d40c, d40d); in d40_queue_start()
1511 err = d40_start(d40c); in d40_queue_start()
1521 static void dma_tc_handle(struct d40_chan *d40c) in dma_tc_handle() argument
1526 d40d = d40_first_active_get(d40c); in dma_tc_handle()
1539 && !d40_tx_is_linked(d40c) in dma_tc_handle()
1540 && !d40_residue(d40c)) { in dma_tc_handle()
1541 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1542 d40_desc_load(d40c, d40d); in dma_tc_handle()
1543 (void) d40_start(d40c); in dma_tc_handle()
1549 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1552 d40_desc_load(d40c, d40d); in dma_tc_handle()
1554 (void) d40_start(d40c); in dma_tc_handle()
1558 if (d40_queue_start(d40c) == NULL) { in dma_tc_handle()
1559 d40c->busy = false; in dma_tc_handle()
1561 pm_runtime_mark_last_busy(d40c->base->dev); in dma_tc_handle()
1562 pm_runtime_put_autosuspend(d40c->base->dev); in dma_tc_handle()
1566 d40_desc_done(d40c, d40d); in dma_tc_handle()
1569 d40c->pending_tx++; in dma_tc_handle()
1570 tasklet_schedule(&d40c->tasklet); in dma_tc_handle()
1576 struct d40_chan *d40c = from_tasklet(d40c, t, tasklet); in dma_tasklet() local
1582 spin_lock_irqsave(&d40c->lock, flags); in dma_tasklet()
1585 d40d = d40_first_done(d40c); in dma_tasklet()
1588 d40d = d40_first_active_get(d40c); in dma_tasklet()
1600 if (d40c->pending_tx == 0) { in dma_tasklet()
1601 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1612 d40_desc_free(d40c, d40d); in dma_tasklet()
1615 d40_lcla_free_all(d40c, d40d); in dma_tasklet()
1616 list_add_tail(&d40d->node, &d40c->client); in dma_tasklet()
1621 d40c->pending_tx--; in dma_tasklet()
1623 if (d40c->pending_tx) in dma_tasklet()
1624 tasklet_schedule(&d40c->tasklet); in dma_tasklet()
1626 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1634 if (d40c->pending_tx > 0) in dma_tasklet()
1635 d40c->pending_tx--; in dma_tasklet()
1636 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1645 struct d40_chan *d40c; in d40_handle_interrupt() local
1671 d40c = base->lookup_phy_chans[idx]; in d40_handle_interrupt()
1673 d40c = base->lookup_log_chans[il[row].offset + idx]; in d40_handle_interrupt()
1675 if (!d40c) { in d40_handle_interrupt()
1686 spin_lock(&d40c->lock); in d40_handle_interrupt()
1689 dma_tc_handle(d40c); in d40_handle_interrupt()
1694 spin_unlock(&d40c->lock); in d40_handle_interrupt()
1702 static int d40_validate_conf(struct d40_chan *d40c, in d40_validate_conf() argument
1709 chan_err(d40c, "Invalid direction.\n"); in d40_validate_conf()
1713 if ((is_log && conf->dev_type > d40c->base->num_log_chans) || in d40_validate_conf()
1714 (!is_log && conf->dev_type > d40c->base->num_phy_chans) || in d40_validate_conf()
1716 chan_err(d40c, "Invalid device type (%d)\n", conf->dev_type); in d40_validate_conf()
1725 chan_err(d40c, "periph to periph not supported\n"); in d40_validate_conf()
1738 chan_err(d40c, "src (burst x width) != dst (burst x width)\n"); in d40_validate_conf()
1832 static int d40_allocate_channel(struct d40_chan *d40c, bool *first_phy_user) in d40_allocate_channel() argument
1834 int dev_type = d40c->dma_cfg.dev_type; in d40_allocate_channel()
1843 bool is_log = d40c->dma_cfg.mode == STEDMA40_MODE_LOGICAL; in d40_allocate_channel()
1845 phys = d40c->base->phy_res; in d40_allocate_channel()
1846 num_phy_chans = d40c->base->num_phy_chans; in d40_allocate_channel()
1848 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_allocate_channel()
1851 } else if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_allocate_channel()
1852 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1863 if (d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1865 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1866 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1880 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1893 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1894 d40c->log_num = D40_PHY_CHAN; in d40_allocate_channel()
1901 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1904 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1905 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1908 dev_err(chan2dev(d40c), in d40_allocate_channel()
1917 dev_err(chan2dev(d40c), in d40_allocate_channel()
1946 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1947 d40c->log_num = log_num; in d40_allocate_channel()
1951 d40c->base->lookup_log_chans[d40c->log_num] = d40c; in d40_allocate_channel()
1953 d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c; in d40_allocate_channel()
1959 static int d40_config_memcpy(struct d40_chan *d40c) in d40_config_memcpy() argument
1961 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy()
1964 d40c->dma_cfg = dma40_memcpy_conf_log; in d40_config_memcpy()
1965 d40c->dma_cfg.dev_type = dma40_memcpy_channels[d40c->chan.chan_id]; in d40_config_memcpy()
1967 d40_log_cfg(&d40c->dma_cfg, in d40_config_memcpy()
1968 &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_config_memcpy()
1972 d40c->dma_cfg = dma40_memcpy_conf_phy; in d40_config_memcpy()
1975 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_TIM_POS); in d40_config_memcpy()
1978 d40c->src_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
1979 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
1982 chan_err(d40c, "No memcpy\n"); in d40_config_memcpy()
1989 static int d40_free_dma(struct d40_chan *d40c) in d40_free_dma() argument
1993 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_free_dma()
1994 struct d40_phy_res *phy = d40c->phy_chan; in d40_free_dma()
1998 d40_term_all(d40c); in d40_free_dma()
2001 chan_err(d40c, "phy == null\n"); in d40_free_dma()
2007 chan_err(d40c, "channel already free\n"); in d40_free_dma()
2011 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_free_dma()
2012 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) in d40_free_dma()
2014 else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_free_dma()
2017 chan_err(d40c, "Unknown direction\n"); in d40_free_dma()
2021 pm_runtime_get_sync(d40c->base->dev); in d40_free_dma()
2022 res = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_free_dma()
2024 chan_err(d40c, "stop failed\n"); in d40_free_dma()
2028 d40_alloc_mask_free(phy, is_src, chan_is_logical(d40c) ? event : 0); in d40_free_dma()
2030 if (chan_is_logical(d40c)) in d40_free_dma()
2031 d40c->base->lookup_log_chans[d40c->log_num] = NULL; in d40_free_dma()
2033 d40c->base->lookup_phy_chans[phy->num] = NULL; in d40_free_dma()
2035 if (d40c->busy) { in d40_free_dma()
2036 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2037 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2040 d40c->busy = false; in d40_free_dma()
2041 d40c->phy_chan = NULL; in d40_free_dma()
2042 d40c->configured = false; in d40_free_dma()
2044 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2045 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2049 static bool d40_is_paused(struct d40_chan *d40c) in d40_is_paused() argument
2051 void __iomem *chanbase = chan_base(d40c); in d40_is_paused()
2056 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_is_paused()
2058 spin_lock_irqsave(&d40c->lock, flags); in d40_is_paused()
2060 if (chan_is_physical(d40c)) { in d40_is_paused()
2061 if (d40c->phy_chan->num % 2 == 0) in d40_is_paused()
2062 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in d40_is_paused()
2064 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in d40_is_paused()
2067 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in d40_is_paused()
2068 D40_CHAN_POS(d40c->phy_chan->num); in d40_is_paused()
2074 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_is_paused()
2075 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_is_paused()
2077 } else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_is_paused()
2080 chan_err(d40c, "Unknown direction\n"); in d40_is_paused()
2090 spin_unlock_irqrestore(&d40c->lock, flags); in d40_is_paused()
2097 struct d40_chan *d40c = in stedma40_residue() local
2102 spin_lock_irqsave(&d40c->lock, flags); in stedma40_residue()
2103 bytes_left = d40_residue(d40c); in stedma40_residue()
2104 spin_unlock_irqrestore(&d40c->lock, flags); in stedma40_residue()
2275 struct d40_chan *d40c = in stedma40_filter() local
2280 err = d40_validate_conf(d40c, info); in stedma40_filter()
2282 d40c->dma_cfg = *info; in stedma40_filter()
2284 err = d40_config_memcpy(d40c); in stedma40_filter()
2287 d40c->configured = true; in stedma40_filter()
2293 static void __d40_set_prio_rt(struct d40_chan *d40c, int dev_type, bool src) in __d40_set_prio_rt() argument
2295 bool realtime = d40c->dma_cfg.realtime; in __d40_set_prio_rt()
2296 bool highprio = d40c->dma_cfg.high_priority; in __d40_set_prio_rt()
2302 struct d40_gen_dmac *dmac = &d40c->base->gen_dmac; in __d40_set_prio_rt()
2313 if (!src && chan_is_logical(d40c)) in __d40_set_prio_rt()
2322 writel(bit, d40c->base->virtbase + prioreg + group * 4); in __d40_set_prio_rt()
2323 writel(bit, d40c->base->virtbase + rtreg + group * 4); in __d40_set_prio_rt()
2326 static void d40_set_prio_realtime(struct d40_chan *d40c) in d40_set_prio_realtime() argument
2328 if (d40c->base->rev < 3) in d40_set_prio_realtime()
2331 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_set_prio_realtime()
2332 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2333 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, true); in d40_set_prio_realtime()
2335 if ((d40c->dma_cfg.dir == DMA_MEM_TO_DEV) || in d40_set_prio_realtime()
2336 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2337 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, false); in d40_set_prio_realtime()
2393 struct d40_chan *d40c = in d40_alloc_chan_resources() local
2396 spin_lock_irqsave(&d40c->lock, flags); in d40_alloc_chan_resources()
2401 if (!d40c->configured) { in d40_alloc_chan_resources()
2402 err = d40_config_memcpy(d40c); in d40_alloc_chan_resources()
2404 chan_err(d40c, "Failed to configure memcpy channel\n"); in d40_alloc_chan_resources()
2409 err = d40_allocate_channel(d40c, &is_free_phy); in d40_alloc_chan_resources()
2411 chan_err(d40c, "Failed to allocate channel\n"); in d40_alloc_chan_resources()
2412 d40c->configured = false; in d40_alloc_chan_resources()
2416 pm_runtime_get_sync(d40c->base->dev); in d40_alloc_chan_resources()
2418 d40_set_prio_realtime(d40c); in d40_alloc_chan_resources()
2420 if (chan_is_logical(d40c)) { in d40_alloc_chan_resources()
2421 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_alloc_chan_resources()
2422 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2423 d40c->dma_cfg.dev_type * D40_LCPA_CHAN_SIZE; in d40_alloc_chan_resources()
2425 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2426 d40c->dma_cfg.dev_type * in d40_alloc_chan_resources()
2430 d40c->src_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2431 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2434 dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n", in d40_alloc_chan_resources()
2435 chan_is_logical(d40c) ? "logical" : "physical", in d40_alloc_chan_resources()
2436 d40c->phy_chan->num, in d40_alloc_chan_resources()
2437 d40c->dma_cfg.use_fixed_channel ? ", fixed" : ""); in d40_alloc_chan_resources()
2446 d40_config_write(d40c); in d40_alloc_chan_resources()
2448 pm_runtime_mark_last_busy(d40c->base->dev); in d40_alloc_chan_resources()
2449 pm_runtime_put_autosuspend(d40c->base->dev); in d40_alloc_chan_resources()
2450 spin_unlock_irqrestore(&d40c->lock, flags); in d40_alloc_chan_resources()
2456 struct d40_chan *d40c = in d40_free_chan_resources() local
2461 if (d40c->phy_chan == NULL) { in d40_free_chan_resources()
2462 chan_err(d40c, "Cannot free unallocated channel\n"); in d40_free_chan_resources()
2466 spin_lock_irqsave(&d40c->lock, flags); in d40_free_chan_resources()
2468 err = d40_free_dma(d40c); in d40_free_chan_resources()
2471 chan_err(d40c, "Failed to free channel\n"); in d40_free_chan_resources()
2472 spin_unlock_irqrestore(&d40c->lock, flags); in d40_free_chan_resources()
2542 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_tx_status() local
2545 if (d40c->phy_chan == NULL) { in d40_tx_status()
2546 chan_err(d40c, "Cannot read status of unallocated channel\n"); in d40_tx_status()
2554 if (d40_is_paused(d40c)) in d40_tx_status()
2562 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_issue_pending() local
2565 if (d40c->phy_chan == NULL) { in d40_issue_pending()
2566 chan_err(d40c, "Channel is not allocated!\n"); in d40_issue_pending()
2570 spin_lock_irqsave(&d40c->lock, flags); in d40_issue_pending()
2572 list_splice_tail_init(&d40c->pending_queue, &d40c->queue); in d40_issue_pending()
2575 if (!d40c->busy) in d40_issue_pending()
2576 (void) d40_queue_start(d40c); in d40_issue_pending()
2578 spin_unlock_irqrestore(&d40c->lock, flags); in d40_issue_pending()
2584 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_terminate_all() local
2587 if (d40c->phy_chan == NULL) { in d40_terminate_all()
2588 chan_err(d40c, "Channel is not allocated!\n"); in d40_terminate_all()
2592 spin_lock_irqsave(&d40c->lock, flags); in d40_terminate_all()
2594 pm_runtime_get_sync(d40c->base->dev); in d40_terminate_all()
2595 ret = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_terminate_all()
2597 chan_err(d40c, "Failed to stop channel\n"); in d40_terminate_all()
2599 d40_term_all(d40c); in d40_terminate_all()
2600 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2601 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2602 if (d40c->busy) { in d40_terminate_all()
2603 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2604 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2606 d40c->busy = false; in d40_terminate_all()
2608 spin_unlock_irqrestore(&d40c->lock, flags); in d40_terminate_all()
2613 dma40_config_to_halfchannel(struct d40_chan *d40c, in dma40_config_to_halfchannel() argument
2619 if (chan_is_logical(d40c)) { in dma40_config_to_halfchannel()
2648 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config() local
2650 memcpy(&d40c->slave_config, config, sizeof(*config)); in d40_set_runtime_config()
2660 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config_write() local
2661 struct stedma40_chan_cfg *cfg = &d40c->dma_cfg; in d40_set_runtime_config_write()
2667 if (d40c->phy_chan == NULL) { in d40_set_runtime_config_write()
2668 chan_err(d40c, "Channel is not allocated!\n"); in d40_set_runtime_config_write()
2681 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2697 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2709 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2716 dev_err(d40c->base->dev, "no address supplied\n"); in d40_set_runtime_config_write()
2721 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2750 ret = dma40_config_to_halfchannel(d40c, &cfg->src_info, in d40_set_runtime_config_write()
2755 ret = dma40_config_to_halfchannel(d40c, &cfg->dst_info, in d40_set_runtime_config_write()
2761 if (chan_is_logical(d40c)) in d40_set_runtime_config_write()
2762 d40_log_cfg(cfg, &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_set_runtime_config_write()
2764 d40_phy_cfg(cfg, &d40c->src_def_cfg, &d40c->dst_def_cfg); in d40_set_runtime_config_write()
2767 d40c->runtime_addr = config_addr; in d40_set_runtime_config_write()
2768 d40c->runtime_direction = direction; in d40_set_runtime_config_write()
2769 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2787 struct d40_chan *d40c; in d40_chan_init() local
2792 d40c = &chans[i]; in d40_chan_init()
2793 d40c->base = base; in d40_chan_init()
2794 d40c->chan.device = dma; in d40_chan_init()
2796 spin_lock_init(&d40c->lock); in d40_chan_init()
2798 d40c->log_num = D40_PHY_CHAN; in d40_chan_init()
2800 INIT_LIST_HEAD(&d40c->done); in d40_chan_init()
2801 INIT_LIST_HEAD(&d40c->active); in d40_chan_init()
2802 INIT_LIST_HEAD(&d40c->queue); in d40_chan_init()
2803 INIT_LIST_HEAD(&d40c->pending_queue); in d40_chan_init()
2804 INIT_LIST_HEAD(&d40c->client); in d40_chan_init()
2805 INIT_LIST_HEAD(&d40c->prepare_queue); in d40_chan_init()
2807 tasklet_setup(&d40c->tasklet, dma_tasklet); in d40_chan_init()
2809 list_add_tail(&d40c->chan.device_node, in d40_chan_init()