Lines Matching refs:tx_chn

115 		bool tx_chn)  in of_k3_udma_glue_parse_chn()  argument
155 if (tx_chn && !(thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) { in of_k3_udma_glue_parse_chn()
160 if (!tx_chn && (thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) { in of_k3_udma_glue_parse_chn()
178 if (tx_chn) in of_k3_udma_glue_parse_chn()
188 static void k3_udma_glue_dump_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_dump_tx_chn() argument
190 struct device *dev = tx_chn->common.dev; in k3_udma_glue_dump_tx_chn()
196 tx_chn->udma_tchan_id, in k3_udma_glue_dump_tx_chn()
197 tx_chn->common.src_thread, in k3_udma_glue_dump_tx_chn()
198 tx_chn->common.dst_thread); in k3_udma_glue_dump_tx_chn()
220 static int k3_udma_glue_cfg_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_cfg_tx_chn() argument
222 const struct udma_tisci_rm *tisci_rm = tx_chn->common.tisci_rm; in k3_udma_glue_cfg_tx_chn()
236 req.index = tx_chn->udma_tchan_id; in k3_udma_glue_cfg_tx_chn()
237 if (tx_chn->tx_pause_on_err) in k3_udma_glue_cfg_tx_chn()
239 if (tx_chn->tx_filt_einfo) in k3_udma_glue_cfg_tx_chn()
241 if (tx_chn->tx_filt_pswords) in k3_udma_glue_cfg_tx_chn()
244 if (tx_chn->tx_supr_tdpkt) in k3_udma_glue_cfg_tx_chn()
246 req.tx_fetch_size = tx_chn->common.hdesc_size >> 2; in k3_udma_glue_cfg_tx_chn()
247 req.txcq_qnum = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_cfg_tx_chn()
248 req.tx_atype = tx_chn->common.atype_asel; in k3_udma_glue_cfg_tx_chn()
256 struct k3_udma_glue_tx_channel *tx_chn; in k3_udma_glue_request_tx_chn() local
259 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn()
260 if (!tx_chn) in k3_udma_glue_request_tx_chn()
263 tx_chn->common.dev = dev; in k3_udma_glue_request_tx_chn()
264 tx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_tx_chn()
265 tx_chn->tx_pause_on_err = cfg->tx_pause_on_err; in k3_udma_glue_request_tx_chn()
266 tx_chn->tx_filt_einfo = cfg->tx_filt_einfo; in k3_udma_glue_request_tx_chn()
267 tx_chn->tx_filt_pswords = cfg->tx_filt_pswords; in k3_udma_glue_request_tx_chn()
268 tx_chn->tx_supr_tdpkt = cfg->tx_supr_tdpkt; in k3_udma_glue_request_tx_chn()
272 &tx_chn->common, true); in k3_udma_glue_request_tx_chn()
276 tx_chn->common.hdesc_size = cppi5_hdesc_calc_size(tx_chn->common.epib, in k3_udma_glue_request_tx_chn()
277 tx_chn->common.psdata_size, in k3_udma_glue_request_tx_chn()
278 tx_chn->common.swdata_size); in k3_udma_glue_request_tx_chn()
280 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn()
281 tx_chn->udma_tchan_id = tx_chn->common.ep_config->mapped_channel_id; in k3_udma_glue_request_tx_chn()
283 tx_chn->udma_tchan_id = -1; in k3_udma_glue_request_tx_chn()
286 tx_chn->udma_tchanx = xudma_tchan_get(tx_chn->common.udmax, in k3_udma_glue_request_tx_chn()
287 tx_chn->udma_tchan_id); in k3_udma_glue_request_tx_chn()
288 if (IS_ERR(tx_chn->udma_tchanx)) { in k3_udma_glue_request_tx_chn()
289 ret = PTR_ERR(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn()
293 tx_chn->udma_tchan_id = xudma_tchan_get_id(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn()
295 tx_chn->common.chan_dev.class = &k3_udma_glue_devclass; in k3_udma_glue_request_tx_chn()
296 tx_chn->common.chan_dev.parent = xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_request_tx_chn()
297 dev_set_name(&tx_chn->common.chan_dev, "tchan%d-0x%04x", in k3_udma_glue_request_tx_chn()
298 tx_chn->udma_tchan_id, tx_chn->common.dst_thread); in k3_udma_glue_request_tx_chn()
299 ret = device_register(&tx_chn->common.chan_dev); in k3_udma_glue_request_tx_chn()
302 put_device(&tx_chn->common.chan_dev); in k3_udma_glue_request_tx_chn()
303 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_request_tx_chn()
307 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn()
309 tx_chn->common.chan_dev.dma_coherent = true; in k3_udma_glue_request_tx_chn()
310 dma_coerce_mask_and_coherent(&tx_chn->common.chan_dev, in k3_udma_glue_request_tx_chn()
314 atomic_set(&tx_chn->free_pkts, cfg->txcq_cfg.size); in k3_udma_glue_request_tx_chn()
316 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn()
317 tx_chn->udma_tflow_id = tx_chn->common.ep_config->default_flow_id; in k3_udma_glue_request_tx_chn()
319 tx_chn->udma_tflow_id = tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn()
322 ret = k3_ringacc_request_rings_pair(tx_chn->common.ringacc, in k3_udma_glue_request_tx_chn()
323 tx_chn->udma_tflow_id, -1, in k3_udma_glue_request_tx_chn()
324 &tx_chn->ringtx, in k3_udma_glue_request_tx_chn()
325 &tx_chn->ringtxcq); in k3_udma_glue_request_tx_chn()
332 cfg->tx_cfg.dma_dev = k3_udma_glue_tx_get_dma_device(tx_chn); in k3_udma_glue_request_tx_chn()
336 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn()
337 cfg->tx_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn()
338 cfg->txcq_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn()
341 ret = k3_ringacc_ring_cfg(tx_chn->ringtx, &cfg->tx_cfg); in k3_udma_glue_request_tx_chn()
347 ret = k3_ringacc_ring_cfg(tx_chn->ringtxcq, &cfg->txcq_cfg); in k3_udma_glue_request_tx_chn()
354 tx_chn->common.src_thread = in k3_udma_glue_request_tx_chn()
355 xudma_dev_get_psil_base(tx_chn->common.udmax) + in k3_udma_glue_request_tx_chn()
356 tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn()
358 ret = k3_udma_glue_cfg_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
364 k3_udma_glue_dump_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
366 return tx_chn; in k3_udma_glue_request_tx_chn()
369 k3_udma_glue_release_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
374 void k3_udma_glue_release_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_release_tx_chn() argument
376 if (tx_chn->psil_paired) { in k3_udma_glue_release_tx_chn()
377 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
378 tx_chn->common.src_thread, in k3_udma_glue_release_tx_chn()
379 tx_chn->common.dst_thread); in k3_udma_glue_release_tx_chn()
380 tx_chn->psil_paired = false; in k3_udma_glue_release_tx_chn()
383 if (!IS_ERR_OR_NULL(tx_chn->udma_tchanx)) in k3_udma_glue_release_tx_chn()
384 xudma_tchan_put(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
385 tx_chn->udma_tchanx); in k3_udma_glue_release_tx_chn()
387 if (tx_chn->ringtxcq) in k3_udma_glue_release_tx_chn()
388 k3_ringacc_ring_free(tx_chn->ringtxcq); in k3_udma_glue_release_tx_chn()
390 if (tx_chn->ringtx) in k3_udma_glue_release_tx_chn()
391 k3_ringacc_ring_free(tx_chn->ringtx); in k3_udma_glue_release_tx_chn()
393 if (tx_chn->common.chan_dev.parent) { in k3_udma_glue_release_tx_chn()
394 device_unregister(&tx_chn->common.chan_dev); in k3_udma_glue_release_tx_chn()
395 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_release_tx_chn()
400 int k3_udma_glue_push_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_push_tx_chn() argument
406 if (!atomic_add_unless(&tx_chn->free_pkts, -1, 0)) in k3_udma_glue_push_tx_chn()
409 ringtxcq_id = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_push_tx_chn()
412 return k3_ringacc_ring_push(tx_chn->ringtx, &desc_dma); in k3_udma_glue_push_tx_chn()
416 int k3_udma_glue_pop_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_pop_tx_chn() argument
421 ret = k3_ringacc_ring_pop(tx_chn->ringtxcq, desc_dma); in k3_udma_glue_pop_tx_chn()
423 atomic_inc(&tx_chn->free_pkts); in k3_udma_glue_pop_tx_chn()
429 int k3_udma_glue_enable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_enable_tx_chn() argument
433 ret = xudma_navss_psil_pair(tx_chn->common.udmax, in k3_udma_glue_enable_tx_chn()
434 tx_chn->common.src_thread, in k3_udma_glue_enable_tx_chn()
435 tx_chn->common.dst_thread); in k3_udma_glue_enable_tx_chn()
437 dev_err(tx_chn->common.dev, "PSI-L request err %d\n", ret); in k3_udma_glue_enable_tx_chn()
441 tx_chn->psil_paired = true; in k3_udma_glue_enable_tx_chn()
443 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_PEER_RT_EN_REG, in k3_udma_glue_enable_tx_chn()
446 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_enable_tx_chn()
449 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn en"); in k3_udma_glue_enable_tx_chn()
454 void k3_udma_glue_disable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_disable_tx_chn() argument
456 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis1"); in k3_udma_glue_disable_tx_chn()
458 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, 0); in k3_udma_glue_disable_tx_chn()
460 xudma_tchanrt_write(tx_chn->udma_tchanx, in k3_udma_glue_disable_tx_chn()
462 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis2"); in k3_udma_glue_disable_tx_chn()
464 if (tx_chn->psil_paired) { in k3_udma_glue_disable_tx_chn()
465 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_disable_tx_chn()
466 tx_chn->common.src_thread, in k3_udma_glue_disable_tx_chn()
467 tx_chn->common.dst_thread); in k3_udma_glue_disable_tx_chn()
468 tx_chn->psil_paired = false; in k3_udma_glue_disable_tx_chn()
473 void k3_udma_glue_tdown_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tdown_tx_chn() argument
479 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown1"); in k3_udma_glue_tdown_tx_chn()
481 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_tdown_tx_chn()
484 val = xudma_tchanrt_read(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG); in k3_udma_glue_tdown_tx_chn()
487 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
491 dev_err(tx_chn->common.dev, "TX tdown timeout\n"); in k3_udma_glue_tdown_tx_chn()
497 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
500 dev_err(tx_chn->common.dev, "TX tdown peer not stopped\n"); in k3_udma_glue_tdown_tx_chn()
501 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown2"); in k3_udma_glue_tdown_tx_chn()
505 void k3_udma_glue_reset_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_reset_tx_chn() argument
509 struct device *dev = tx_chn->common.dev; in k3_udma_glue_reset_tx_chn()
520 occ_tx = k3_ringacc_ring_get_occ(tx_chn->ringtx); in k3_udma_glue_reset_tx_chn()
524 ret = k3_ringacc_ring_pop(tx_chn->ringtx, &desc_dma); in k3_udma_glue_reset_tx_chn()
534 k3_ringacc_ring_reset(tx_chn->ringtxcq); in k3_udma_glue_reset_tx_chn()
535 k3_ringacc_ring_reset_dma(tx_chn->ringtx, occ_tx); in k3_udma_glue_reset_tx_chn()
539 u32 k3_udma_glue_tx_get_hdesc_size(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_hdesc_size() argument
541 return tx_chn->common.hdesc_size; in k3_udma_glue_tx_get_hdesc_size()
545 u32 k3_udma_glue_tx_get_txcq_id(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_txcq_id() argument
547 return k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_tx_get_txcq_id()
551 int k3_udma_glue_tx_get_irq(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_irq() argument
553 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_tx_get_irq()
554 tx_chn->virq = xudma_pktdma_tflow_get_irq(tx_chn->common.udmax, in k3_udma_glue_tx_get_irq()
555 tx_chn->udma_tflow_id); in k3_udma_glue_tx_get_irq()
557 tx_chn->virq = k3_ringacc_get_ring_irq_num(tx_chn->ringtxcq); in k3_udma_glue_tx_get_irq()
560 return tx_chn->virq; in k3_udma_glue_tx_get_irq()
565 k3_udma_glue_tx_get_dma_device(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_dma_device() argument
567 if (xudma_is_pktdma(tx_chn->common.udmax) && in k3_udma_glue_tx_get_dma_device()
568 (tx_chn->common.atype_asel == 14 || tx_chn->common.atype_asel == 15)) in k3_udma_glue_tx_get_dma_device()
569 return &tx_chn->common.chan_dev; in k3_udma_glue_tx_get_dma_device()
571 return xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_tx_get_dma_device()
575 void k3_udma_glue_tx_dma_to_cppi5_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_dma_to_cppi5_addr() argument
578 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_dma_to_cppi5_addr()
579 !tx_chn->common.atype_asel) in k3_udma_glue_tx_dma_to_cppi5_addr()
582 *addr |= (u64)tx_chn->common.atype_asel << K3_ADDRESS_ASEL_SHIFT; in k3_udma_glue_tx_dma_to_cppi5_addr()
586 void k3_udma_glue_tx_cppi5_to_dma_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_cppi5_to_dma_addr() argument
589 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_cppi5_to_dma_addr()
590 !tx_chn->common.atype_asel) in k3_udma_glue_tx_cppi5_to_dma_addr()