Lines Matching refs:tx_chn

115 		bool tx_chn)  in of_k3_udma_glue_parse_chn()  argument
155 if (tx_chn && !(thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) { in of_k3_udma_glue_parse_chn()
160 if (!tx_chn && (thread_id & K3_PSIL_DST_THREAD_ID_OFFSET)) { in of_k3_udma_glue_parse_chn()
178 if (tx_chn) in of_k3_udma_glue_parse_chn()
188 static void k3_udma_glue_dump_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_dump_tx_chn() argument
190 struct device *dev = tx_chn->common.dev; in k3_udma_glue_dump_tx_chn()
196 tx_chn->udma_tchan_id, in k3_udma_glue_dump_tx_chn()
197 tx_chn->common.src_thread, in k3_udma_glue_dump_tx_chn()
198 tx_chn->common.dst_thread); in k3_udma_glue_dump_tx_chn()
220 static int k3_udma_glue_cfg_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_cfg_tx_chn() argument
222 const struct udma_tisci_rm *tisci_rm = tx_chn->common.tisci_rm; in k3_udma_glue_cfg_tx_chn()
236 req.index = tx_chn->udma_tchan_id; in k3_udma_glue_cfg_tx_chn()
237 if (tx_chn->tx_pause_on_err) in k3_udma_glue_cfg_tx_chn()
239 if (tx_chn->tx_filt_einfo) in k3_udma_glue_cfg_tx_chn()
241 if (tx_chn->tx_filt_pswords) in k3_udma_glue_cfg_tx_chn()
244 if (tx_chn->tx_supr_tdpkt) in k3_udma_glue_cfg_tx_chn()
246 req.tx_fetch_size = tx_chn->common.hdesc_size >> 2; in k3_udma_glue_cfg_tx_chn()
247 req.txcq_qnum = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_cfg_tx_chn()
248 req.tx_atype = tx_chn->common.atype_asel; in k3_udma_glue_cfg_tx_chn()
256 struct k3_udma_glue_tx_channel *tx_chn; in k3_udma_glue_request_tx_chn() local
259 tx_chn = devm_kzalloc(dev, sizeof(*tx_chn), GFP_KERNEL); in k3_udma_glue_request_tx_chn()
260 if (!tx_chn) in k3_udma_glue_request_tx_chn()
263 tx_chn->common.dev = dev; in k3_udma_glue_request_tx_chn()
264 tx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_tx_chn()
265 tx_chn->tx_pause_on_err = cfg->tx_pause_on_err; in k3_udma_glue_request_tx_chn()
266 tx_chn->tx_filt_einfo = cfg->tx_filt_einfo; in k3_udma_glue_request_tx_chn()
267 tx_chn->tx_filt_pswords = cfg->tx_filt_pswords; in k3_udma_glue_request_tx_chn()
268 tx_chn->tx_supr_tdpkt = cfg->tx_supr_tdpkt; in k3_udma_glue_request_tx_chn()
272 &tx_chn->common, true); in k3_udma_glue_request_tx_chn()
276 tx_chn->common.hdesc_size = cppi5_hdesc_calc_size(tx_chn->common.epib, in k3_udma_glue_request_tx_chn()
277 tx_chn->common.psdata_size, in k3_udma_glue_request_tx_chn()
278 tx_chn->common.swdata_size); in k3_udma_glue_request_tx_chn()
280 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn()
281 tx_chn->udma_tchan_id = tx_chn->common.ep_config->mapped_channel_id; in k3_udma_glue_request_tx_chn()
283 tx_chn->udma_tchan_id = -1; in k3_udma_glue_request_tx_chn()
286 tx_chn->udma_tchanx = xudma_tchan_get(tx_chn->common.udmax, in k3_udma_glue_request_tx_chn()
287 tx_chn->udma_tchan_id); in k3_udma_glue_request_tx_chn()
288 if (IS_ERR(tx_chn->udma_tchanx)) { in k3_udma_glue_request_tx_chn()
289 ret = PTR_ERR(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn()
293 tx_chn->udma_tchan_id = xudma_tchan_get_id(tx_chn->udma_tchanx); in k3_udma_glue_request_tx_chn()
295 tx_chn->common.chan_dev.class = &k3_udma_glue_devclass; in k3_udma_glue_request_tx_chn()
296 tx_chn->common.chan_dev.parent = xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_request_tx_chn()
297 dev_set_name(&tx_chn->common.chan_dev, "tchan%d-0x%04x", in k3_udma_glue_request_tx_chn()
298 tx_chn->udma_tchan_id, tx_chn->common.dst_thread); in k3_udma_glue_request_tx_chn()
299 ret = device_register(&tx_chn->common.chan_dev); in k3_udma_glue_request_tx_chn()
302 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_request_tx_chn()
306 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn()
308 tx_chn->common.chan_dev.dma_coherent = true; in k3_udma_glue_request_tx_chn()
309 dma_coerce_mask_and_coherent(&tx_chn->common.chan_dev, in k3_udma_glue_request_tx_chn()
313 atomic_set(&tx_chn->free_pkts, cfg->txcq_cfg.size); in k3_udma_glue_request_tx_chn()
315 if (xudma_is_pktdma(tx_chn->common.udmax)) in k3_udma_glue_request_tx_chn()
316 tx_chn->udma_tflow_id = tx_chn->common.ep_config->default_flow_id; in k3_udma_glue_request_tx_chn()
318 tx_chn->udma_tflow_id = tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn()
321 ret = k3_ringacc_request_rings_pair(tx_chn->common.ringacc, in k3_udma_glue_request_tx_chn()
322 tx_chn->udma_tflow_id, -1, in k3_udma_glue_request_tx_chn()
323 &tx_chn->ringtx, in k3_udma_glue_request_tx_chn()
324 &tx_chn->ringtxcq); in k3_udma_glue_request_tx_chn()
331 cfg->tx_cfg.dma_dev = k3_udma_glue_tx_get_dma_device(tx_chn); in k3_udma_glue_request_tx_chn()
335 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_request_tx_chn()
336 cfg->tx_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn()
337 cfg->txcq_cfg.asel = tx_chn->common.atype_asel; in k3_udma_glue_request_tx_chn()
340 ret = k3_ringacc_ring_cfg(tx_chn->ringtx, &cfg->tx_cfg); in k3_udma_glue_request_tx_chn()
346 ret = k3_ringacc_ring_cfg(tx_chn->ringtxcq, &cfg->txcq_cfg); in k3_udma_glue_request_tx_chn()
353 tx_chn->common.src_thread = in k3_udma_glue_request_tx_chn()
354 xudma_dev_get_psil_base(tx_chn->common.udmax) + in k3_udma_glue_request_tx_chn()
355 tx_chn->udma_tchan_id; in k3_udma_glue_request_tx_chn()
357 ret = k3_udma_glue_cfg_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
363 k3_udma_glue_dump_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
365 return tx_chn; in k3_udma_glue_request_tx_chn()
368 k3_udma_glue_release_tx_chn(tx_chn); in k3_udma_glue_request_tx_chn()
373 void k3_udma_glue_release_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_release_tx_chn() argument
375 if (tx_chn->psil_paired) { in k3_udma_glue_release_tx_chn()
376 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
377 tx_chn->common.src_thread, in k3_udma_glue_release_tx_chn()
378 tx_chn->common.dst_thread); in k3_udma_glue_release_tx_chn()
379 tx_chn->psil_paired = false; in k3_udma_glue_release_tx_chn()
382 if (!IS_ERR_OR_NULL(tx_chn->udma_tchanx)) in k3_udma_glue_release_tx_chn()
383 xudma_tchan_put(tx_chn->common.udmax, in k3_udma_glue_release_tx_chn()
384 tx_chn->udma_tchanx); in k3_udma_glue_release_tx_chn()
386 if (tx_chn->ringtxcq) in k3_udma_glue_release_tx_chn()
387 k3_ringacc_ring_free(tx_chn->ringtxcq); in k3_udma_glue_release_tx_chn()
389 if (tx_chn->ringtx) in k3_udma_glue_release_tx_chn()
390 k3_ringacc_ring_free(tx_chn->ringtx); in k3_udma_glue_release_tx_chn()
392 if (tx_chn->common.chan_dev.parent) { in k3_udma_glue_release_tx_chn()
393 device_unregister(&tx_chn->common.chan_dev); in k3_udma_glue_release_tx_chn()
394 tx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_release_tx_chn()
399 int k3_udma_glue_push_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_push_tx_chn() argument
405 if (!atomic_add_unless(&tx_chn->free_pkts, -1, 0)) in k3_udma_glue_push_tx_chn()
408 ringtxcq_id = k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_push_tx_chn()
411 return k3_ringacc_ring_push(tx_chn->ringtx, &desc_dma); in k3_udma_glue_push_tx_chn()
415 int k3_udma_glue_pop_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_pop_tx_chn() argument
420 ret = k3_ringacc_ring_pop(tx_chn->ringtxcq, desc_dma); in k3_udma_glue_pop_tx_chn()
422 atomic_inc(&tx_chn->free_pkts); in k3_udma_glue_pop_tx_chn()
428 int k3_udma_glue_enable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_enable_tx_chn() argument
432 ret = xudma_navss_psil_pair(tx_chn->common.udmax, in k3_udma_glue_enable_tx_chn()
433 tx_chn->common.src_thread, in k3_udma_glue_enable_tx_chn()
434 tx_chn->common.dst_thread); in k3_udma_glue_enable_tx_chn()
436 dev_err(tx_chn->common.dev, "PSI-L request err %d\n", ret); in k3_udma_glue_enable_tx_chn()
440 tx_chn->psil_paired = true; in k3_udma_glue_enable_tx_chn()
442 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_PEER_RT_EN_REG, in k3_udma_glue_enable_tx_chn()
445 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_enable_tx_chn()
448 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn en"); in k3_udma_glue_enable_tx_chn()
453 void k3_udma_glue_disable_tx_chn(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_disable_tx_chn() argument
455 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis1"); in k3_udma_glue_disable_tx_chn()
457 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, 0); in k3_udma_glue_disable_tx_chn()
459 xudma_tchanrt_write(tx_chn->udma_tchanx, in k3_udma_glue_disable_tx_chn()
461 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn dis2"); in k3_udma_glue_disable_tx_chn()
463 if (tx_chn->psil_paired) { in k3_udma_glue_disable_tx_chn()
464 xudma_navss_psil_unpair(tx_chn->common.udmax, in k3_udma_glue_disable_tx_chn()
465 tx_chn->common.src_thread, in k3_udma_glue_disable_tx_chn()
466 tx_chn->common.dst_thread); in k3_udma_glue_disable_tx_chn()
467 tx_chn->psil_paired = false; in k3_udma_glue_disable_tx_chn()
472 void k3_udma_glue_tdown_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tdown_tx_chn() argument
478 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown1"); in k3_udma_glue_tdown_tx_chn()
480 xudma_tchanrt_write(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_tdown_tx_chn()
483 val = xudma_tchanrt_read(tx_chn->udma_tchanx, UDMA_CHAN_RT_CTL_REG); in k3_udma_glue_tdown_tx_chn()
486 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
490 dev_err(tx_chn->common.dev, "TX tdown timeout\n"); in k3_udma_glue_tdown_tx_chn()
496 val = xudma_tchanrt_read(tx_chn->udma_tchanx, in k3_udma_glue_tdown_tx_chn()
499 dev_err(tx_chn->common.dev, "TX tdown peer not stopped\n"); in k3_udma_glue_tdown_tx_chn()
500 k3_udma_glue_dump_tx_rt_chn(tx_chn, "txchn tdown2"); in k3_udma_glue_tdown_tx_chn()
504 void k3_udma_glue_reset_tx_chn(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_reset_tx_chn() argument
508 struct device *dev = tx_chn->common.dev; in k3_udma_glue_reset_tx_chn()
519 occ_tx = k3_ringacc_ring_get_occ(tx_chn->ringtx); in k3_udma_glue_reset_tx_chn()
523 ret = k3_ringacc_ring_pop(tx_chn->ringtx, &desc_dma); in k3_udma_glue_reset_tx_chn()
533 k3_ringacc_ring_reset(tx_chn->ringtxcq); in k3_udma_glue_reset_tx_chn()
534 k3_ringacc_ring_reset_dma(tx_chn->ringtx, occ_tx); in k3_udma_glue_reset_tx_chn()
538 u32 k3_udma_glue_tx_get_hdesc_size(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_hdesc_size() argument
540 return tx_chn->common.hdesc_size; in k3_udma_glue_tx_get_hdesc_size()
544 u32 k3_udma_glue_tx_get_txcq_id(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_txcq_id() argument
546 return k3_ringacc_get_ring_id(tx_chn->ringtxcq); in k3_udma_glue_tx_get_txcq_id()
550 int k3_udma_glue_tx_get_irq(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_irq() argument
552 if (xudma_is_pktdma(tx_chn->common.udmax)) { in k3_udma_glue_tx_get_irq()
553 tx_chn->virq = xudma_pktdma_tflow_get_irq(tx_chn->common.udmax, in k3_udma_glue_tx_get_irq()
554 tx_chn->udma_tflow_id); in k3_udma_glue_tx_get_irq()
556 tx_chn->virq = k3_ringacc_get_ring_irq_num(tx_chn->ringtxcq); in k3_udma_glue_tx_get_irq()
559 return tx_chn->virq; in k3_udma_glue_tx_get_irq()
564 k3_udma_glue_tx_get_dma_device(struct k3_udma_glue_tx_channel *tx_chn) in k3_udma_glue_tx_get_dma_device() argument
566 if (xudma_is_pktdma(tx_chn->common.udmax) && in k3_udma_glue_tx_get_dma_device()
567 (tx_chn->common.atype_asel == 14 || tx_chn->common.atype_asel == 15)) in k3_udma_glue_tx_get_dma_device()
568 return &tx_chn->common.chan_dev; in k3_udma_glue_tx_get_dma_device()
570 return xudma_get_device(tx_chn->common.udmax); in k3_udma_glue_tx_get_dma_device()
574 void k3_udma_glue_tx_dma_to_cppi5_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_dma_to_cppi5_addr() argument
577 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_dma_to_cppi5_addr()
578 !tx_chn->common.atype_asel) in k3_udma_glue_tx_dma_to_cppi5_addr()
581 *addr |= (u64)tx_chn->common.atype_asel << K3_ADDRESS_ASEL_SHIFT; in k3_udma_glue_tx_dma_to_cppi5_addr()
585 void k3_udma_glue_tx_cppi5_to_dma_addr(struct k3_udma_glue_tx_channel *tx_chn, in k3_udma_glue_tx_cppi5_to_dma_addr() argument
588 if (!xudma_is_pktdma(tx_chn->common.udmax) || in k3_udma_glue_tx_cppi5_to_dma_addr()
589 !tx_chn->common.atype_asel) in k3_udma_glue_tx_cppi5_to_dma_addr()