Lines Matching refs:rx_chn

597 static int k3_udma_glue_cfg_rx_chn(struct k3_udma_glue_rx_channel *rx_chn)  in k3_udma_glue_cfg_rx_chn()  argument
599 const struct udma_tisci_rm *tisci_rm = rx_chn->common.tisci_rm; in k3_udma_glue_cfg_rx_chn()
611 req.index = rx_chn->udma_rchan_id; in k3_udma_glue_cfg_rx_chn()
612 req.rx_fetch_size = rx_chn->common.hdesc_size >> 2; in k3_udma_glue_cfg_rx_chn()
619 if (!xudma_is_pktdma(rx_chn->common.udmax) && rx_chn->flow_num && in k3_udma_glue_cfg_rx_chn()
620 rx_chn->flow_id_base != rx_chn->udma_rchan_id) { in k3_udma_glue_cfg_rx_chn()
624 req.flowid_start = rx_chn->flow_id_base; in k3_udma_glue_cfg_rx_chn()
625 req.flowid_cnt = rx_chn->flow_num; in k3_udma_glue_cfg_rx_chn()
628 req.rx_atype = rx_chn->common.atype_asel; in k3_udma_glue_cfg_rx_chn()
632 dev_err(rx_chn->common.dev, "rchan%d cfg failed %d\n", in k3_udma_glue_cfg_rx_chn()
633 rx_chn->udma_rchan_id, ret); in k3_udma_glue_cfg_rx_chn()
638 static void k3_udma_glue_release_rx_flow(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_release_rx_flow() argument
641 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_num]; in k3_udma_glue_release_rx_flow()
652 xudma_rflow_put(rx_chn->common.udmax, flow->udma_rflow); in k3_udma_glue_release_rx_flow()
654 rx_chn->flows_ready--; in k3_udma_glue_release_rx_flow()
657 static int k3_udma_glue_cfg_rx_flow(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_cfg_rx_flow() argument
661 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_idx]; in k3_udma_glue_cfg_rx_flow()
662 const struct udma_tisci_rm *tisci_rm = rx_chn->common.tisci_rm; in k3_udma_glue_cfg_rx_flow()
663 struct device *dev = rx_chn->common.dev; in k3_udma_glue_cfg_rx_flow()
669 flow->udma_rflow = xudma_rflow_get(rx_chn->common.udmax, in k3_udma_glue_cfg_rx_flow()
682 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_cfg_rx_flow()
684 xudma_get_rflow_ring_offset(rx_chn->common.udmax); in k3_udma_glue_cfg_rx_flow()
692 ret = k3_ringacc_request_rings_pair(rx_chn->common.ringacc, in k3_udma_glue_cfg_rx_flow()
702 flow_cfg->rx_cfg.dma_dev = k3_udma_glue_rx_get_dma_device(rx_chn); in k3_udma_glue_cfg_rx_flow()
706 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_cfg_rx_flow()
707 flow_cfg->rx_cfg.asel = rx_chn->common.atype_asel; in k3_udma_glue_cfg_rx_flow()
708 flow_cfg->rxfdq_cfg.asel = rx_chn->common.atype_asel; in k3_udma_glue_cfg_rx_flow()
723 if (rx_chn->remote) { in k3_udma_glue_cfg_rx_flow()
749 if (rx_chn->common.epib) in k3_udma_glue_cfg_rx_flow()
751 if (rx_chn->common.psdata_size) in k3_udma_glue_cfg_rx_flow()
773 rx_chn->flows_ready++; in k3_udma_glue_cfg_rx_flow()
775 flow->udma_rflow_id, rx_chn->flows_ready); in k3_udma_glue_cfg_rx_flow()
784 xudma_rflow_put(rx_chn->common.udmax, flow->udma_rflow); in k3_udma_glue_cfg_rx_flow()
836 k3_udma_glue_allocate_rx_flows(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_allocate_rx_flows() argument
846 if (rx_chn->flow_id_base != -1 && in k3_udma_glue_allocate_rx_flows()
847 !xudma_rflow_is_gp(rx_chn->common.udmax, rx_chn->flow_id_base)) in k3_udma_glue_allocate_rx_flows()
851 ret = xudma_alloc_gp_rflow_range(rx_chn->common.udmax, in k3_udma_glue_allocate_rx_flows()
852 rx_chn->flow_id_base, in k3_udma_glue_allocate_rx_flows()
853 rx_chn->flow_num); in k3_udma_glue_allocate_rx_flows()
855 dev_err(rx_chn->common.dev, "UDMAX reserve_rflow %d cnt:%d err: %d\n", in k3_udma_glue_allocate_rx_flows()
856 rx_chn->flow_id_base, rx_chn->flow_num, ret); in k3_udma_glue_allocate_rx_flows()
859 rx_chn->flow_id_base = ret; in k3_udma_glue_allocate_rx_flows()
868 struct k3_udma_glue_rx_channel *rx_chn; in k3_udma_glue_request_rx_chn_priv() local
879 rx_chn = devm_kzalloc(dev, sizeof(*rx_chn), GFP_KERNEL); in k3_udma_glue_request_rx_chn_priv()
880 if (!rx_chn) in k3_udma_glue_request_rx_chn_priv()
883 rx_chn->common.dev = dev; in k3_udma_glue_request_rx_chn_priv()
884 rx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_rx_chn_priv()
885 rx_chn->remote = false; in k3_udma_glue_request_rx_chn_priv()
889 &rx_chn->common, false); in k3_udma_glue_request_rx_chn_priv()
893 rx_chn->common.hdesc_size = cppi5_hdesc_calc_size(rx_chn->common.epib, in k3_udma_glue_request_rx_chn_priv()
894 rx_chn->common.psdata_size, in k3_udma_glue_request_rx_chn_priv()
895 rx_chn->common.swdata_size); in k3_udma_glue_request_rx_chn_priv()
897 ep_cfg = rx_chn->common.ep_config; in k3_udma_glue_request_rx_chn_priv()
899 if (xudma_is_pktdma(rx_chn->common.udmax)) in k3_udma_glue_request_rx_chn_priv()
900 rx_chn->udma_rchan_id = ep_cfg->mapped_channel_id; in k3_udma_glue_request_rx_chn_priv()
902 rx_chn->udma_rchan_id = -1; in k3_udma_glue_request_rx_chn_priv()
905 rx_chn->udma_rchanx = xudma_rchan_get(rx_chn->common.udmax, in k3_udma_glue_request_rx_chn_priv()
906 rx_chn->udma_rchan_id); in k3_udma_glue_request_rx_chn_priv()
907 if (IS_ERR(rx_chn->udma_rchanx)) { in k3_udma_glue_request_rx_chn_priv()
908 ret = PTR_ERR(rx_chn->udma_rchanx); in k3_udma_glue_request_rx_chn_priv()
912 rx_chn->udma_rchan_id = xudma_rchan_get_id(rx_chn->udma_rchanx); in k3_udma_glue_request_rx_chn_priv()
914 rx_chn->common.chan_dev.class = &k3_udma_glue_devclass; in k3_udma_glue_request_rx_chn_priv()
915 rx_chn->common.chan_dev.parent = xudma_get_device(rx_chn->common.udmax); in k3_udma_glue_request_rx_chn_priv()
916 dev_set_name(&rx_chn->common.chan_dev, "rchan%d-0x%04x", in k3_udma_glue_request_rx_chn_priv()
917 rx_chn->udma_rchan_id, rx_chn->common.src_thread); in k3_udma_glue_request_rx_chn_priv()
918 ret = device_register(&rx_chn->common.chan_dev); in k3_udma_glue_request_rx_chn_priv()
921 put_device(&rx_chn->common.chan_dev); in k3_udma_glue_request_rx_chn_priv()
922 rx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_request_rx_chn_priv()
926 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_request_rx_chn_priv()
928 rx_chn->common.chan_dev.dma_coherent = true; in k3_udma_glue_request_rx_chn_priv()
929 dma_coerce_mask_and_coherent(&rx_chn->common.chan_dev, in k3_udma_glue_request_rx_chn_priv()
933 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_request_rx_chn_priv()
947 rx_chn->flow_id_base = flow_start; in k3_udma_glue_request_rx_chn_priv()
949 rx_chn->flow_id_base = cfg->flow_id_base; in k3_udma_glue_request_rx_chn_priv()
953 rx_chn->flow_id_base = rx_chn->udma_rchan_id; in k3_udma_glue_request_rx_chn_priv()
956 rx_chn->flow_num = cfg->flow_id_num; in k3_udma_glue_request_rx_chn_priv()
958 rx_chn->flows = devm_kcalloc(dev, rx_chn->flow_num, in k3_udma_glue_request_rx_chn_priv()
959 sizeof(*rx_chn->flows), GFP_KERNEL); in k3_udma_glue_request_rx_chn_priv()
960 if (!rx_chn->flows) { in k3_udma_glue_request_rx_chn_priv()
965 ret = k3_udma_glue_allocate_rx_flows(rx_chn, cfg); in k3_udma_glue_request_rx_chn_priv()
969 for (i = 0; i < rx_chn->flow_num; i++) in k3_udma_glue_request_rx_chn_priv()
970 rx_chn->flows[i].udma_rflow_id = rx_chn->flow_id_base + i; in k3_udma_glue_request_rx_chn_priv()
973 rx_chn->common.dst_thread = in k3_udma_glue_request_rx_chn_priv()
974 xudma_dev_get_psil_base(rx_chn->common.udmax) + in k3_udma_glue_request_rx_chn_priv()
975 rx_chn->udma_rchan_id; in k3_udma_glue_request_rx_chn_priv()
977 ret = k3_udma_glue_cfg_rx_chn(rx_chn); in k3_udma_glue_request_rx_chn_priv()
985 ret = k3_udma_glue_cfg_rx_flow(rx_chn, 0, cfg->def_flow_cfg); in k3_udma_glue_request_rx_chn_priv()
990 k3_udma_glue_dump_rx_chn(rx_chn); in k3_udma_glue_request_rx_chn_priv()
992 return rx_chn; in k3_udma_glue_request_rx_chn_priv()
995 k3_udma_glue_release_rx_chn(rx_chn); in k3_udma_glue_request_rx_chn_priv()
1003 struct k3_udma_glue_rx_channel *rx_chn; in k3_udma_glue_request_remote_rx_chn() local
1017 rx_chn = devm_kzalloc(dev, sizeof(*rx_chn), GFP_KERNEL); in k3_udma_glue_request_remote_rx_chn()
1018 if (!rx_chn) in k3_udma_glue_request_remote_rx_chn()
1021 rx_chn->common.dev = dev; in k3_udma_glue_request_remote_rx_chn()
1022 rx_chn->common.swdata_size = cfg->swdata_size; in k3_udma_glue_request_remote_rx_chn()
1023 rx_chn->remote = true; in k3_udma_glue_request_remote_rx_chn()
1024 rx_chn->udma_rchan_id = -1; in k3_udma_glue_request_remote_rx_chn()
1025 rx_chn->flow_num = cfg->flow_id_num; in k3_udma_glue_request_remote_rx_chn()
1026 rx_chn->flow_id_base = cfg->flow_id_base; in k3_udma_glue_request_remote_rx_chn()
1027 rx_chn->psil_paired = false; in k3_udma_glue_request_remote_rx_chn()
1031 &rx_chn->common, false); in k3_udma_glue_request_remote_rx_chn()
1035 rx_chn->common.hdesc_size = cppi5_hdesc_calc_size(rx_chn->common.epib, in k3_udma_glue_request_remote_rx_chn()
1036 rx_chn->common.psdata_size, in k3_udma_glue_request_remote_rx_chn()
1037 rx_chn->common.swdata_size); in k3_udma_glue_request_remote_rx_chn()
1039 rx_chn->flows = devm_kcalloc(dev, rx_chn->flow_num, in k3_udma_glue_request_remote_rx_chn()
1040 sizeof(*rx_chn->flows), GFP_KERNEL); in k3_udma_glue_request_remote_rx_chn()
1041 if (!rx_chn->flows) { in k3_udma_glue_request_remote_rx_chn()
1046 rx_chn->common.chan_dev.class = &k3_udma_glue_devclass; in k3_udma_glue_request_remote_rx_chn()
1047 rx_chn->common.chan_dev.parent = xudma_get_device(rx_chn->common.udmax); in k3_udma_glue_request_remote_rx_chn()
1048 dev_set_name(&rx_chn->common.chan_dev, "rchan_remote-0x%04x", in k3_udma_glue_request_remote_rx_chn()
1049 rx_chn->common.src_thread); in k3_udma_glue_request_remote_rx_chn()
1050 ret = device_register(&rx_chn->common.chan_dev); in k3_udma_glue_request_remote_rx_chn()
1053 put_device(&rx_chn->common.chan_dev); in k3_udma_glue_request_remote_rx_chn()
1054 rx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_request_remote_rx_chn()
1058 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_request_remote_rx_chn()
1060 rx_chn->common.chan_dev.dma_coherent = true; in k3_udma_glue_request_remote_rx_chn()
1061 dma_coerce_mask_and_coherent(&rx_chn->common.chan_dev, in k3_udma_glue_request_remote_rx_chn()
1065 ret = k3_udma_glue_allocate_rx_flows(rx_chn, cfg); in k3_udma_glue_request_remote_rx_chn()
1069 for (i = 0; i < rx_chn->flow_num; i++) in k3_udma_glue_request_remote_rx_chn()
1070 rx_chn->flows[i].udma_rflow_id = rx_chn->flow_id_base + i; in k3_udma_glue_request_remote_rx_chn()
1072 k3_udma_glue_dump_rx_chn(rx_chn); in k3_udma_glue_request_remote_rx_chn()
1074 return rx_chn; in k3_udma_glue_request_remote_rx_chn()
1077 k3_udma_glue_release_rx_chn(rx_chn); in k3_udma_glue_request_remote_rx_chn()
1092 void k3_udma_glue_release_rx_chn(struct k3_udma_glue_rx_channel *rx_chn) in k3_udma_glue_release_rx_chn() argument
1096 if (IS_ERR_OR_NULL(rx_chn->common.udmax)) in k3_udma_glue_release_rx_chn()
1099 if (rx_chn->psil_paired) { in k3_udma_glue_release_rx_chn()
1100 xudma_navss_psil_unpair(rx_chn->common.udmax, in k3_udma_glue_release_rx_chn()
1101 rx_chn->common.src_thread, in k3_udma_glue_release_rx_chn()
1102 rx_chn->common.dst_thread); in k3_udma_glue_release_rx_chn()
1103 rx_chn->psil_paired = false; in k3_udma_glue_release_rx_chn()
1106 for (i = 0; i < rx_chn->flow_num; i++) in k3_udma_glue_release_rx_chn()
1107 k3_udma_glue_release_rx_flow(rx_chn, i); in k3_udma_glue_release_rx_chn()
1109 if (xudma_rflow_is_gp(rx_chn->common.udmax, rx_chn->flow_id_base)) in k3_udma_glue_release_rx_chn()
1110 xudma_free_gp_rflow_range(rx_chn->common.udmax, in k3_udma_glue_release_rx_chn()
1111 rx_chn->flow_id_base, in k3_udma_glue_release_rx_chn()
1112 rx_chn->flow_num); in k3_udma_glue_release_rx_chn()
1114 if (!IS_ERR_OR_NULL(rx_chn->udma_rchanx)) in k3_udma_glue_release_rx_chn()
1115 xudma_rchan_put(rx_chn->common.udmax, in k3_udma_glue_release_rx_chn()
1116 rx_chn->udma_rchanx); in k3_udma_glue_release_rx_chn()
1118 if (rx_chn->common.chan_dev.parent) { in k3_udma_glue_release_rx_chn()
1119 device_unregister(&rx_chn->common.chan_dev); in k3_udma_glue_release_rx_chn()
1120 rx_chn->common.chan_dev.parent = NULL; in k3_udma_glue_release_rx_chn()
1125 int k3_udma_glue_rx_flow_init(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_flow_init() argument
1129 if (flow_idx >= rx_chn->flow_num) in k3_udma_glue_rx_flow_init()
1132 return k3_udma_glue_cfg_rx_flow(rx_chn, flow_idx, flow_cfg); in k3_udma_glue_rx_flow_init()
1136 u32 k3_udma_glue_rx_flow_get_fdq_id(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_flow_get_fdq_id() argument
1141 if (flow_idx >= rx_chn->flow_num) in k3_udma_glue_rx_flow_get_fdq_id()
1144 flow = &rx_chn->flows[flow_idx]; in k3_udma_glue_rx_flow_get_fdq_id()
1150 u32 k3_udma_glue_rx_get_flow_id_base(struct k3_udma_glue_rx_channel *rx_chn) in k3_udma_glue_rx_get_flow_id_base() argument
1152 return rx_chn->flow_id_base; in k3_udma_glue_rx_get_flow_id_base()
1156 int k3_udma_glue_rx_flow_enable(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_flow_enable() argument
1159 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_idx]; in k3_udma_glue_rx_flow_enable()
1160 const struct udma_tisci_rm *tisci_rm = rx_chn->common.tisci_rm; in k3_udma_glue_rx_flow_enable()
1161 struct device *dev = rx_chn->common.dev; in k3_udma_glue_rx_flow_enable()
1167 if (!rx_chn->remote) in k3_udma_glue_rx_flow_enable()
1199 int k3_udma_glue_rx_flow_disable(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_flow_disable() argument
1202 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_idx]; in k3_udma_glue_rx_flow_disable()
1203 const struct udma_tisci_rm *tisci_rm = rx_chn->common.tisci_rm; in k3_udma_glue_rx_flow_disable()
1204 struct device *dev = rx_chn->common.dev; in k3_udma_glue_rx_flow_disable()
1208 if (!rx_chn->remote) in k3_udma_glue_rx_flow_disable()
1236 int k3_udma_glue_enable_rx_chn(struct k3_udma_glue_rx_channel *rx_chn) in k3_udma_glue_enable_rx_chn() argument
1240 if (rx_chn->remote) in k3_udma_glue_enable_rx_chn()
1243 if (rx_chn->flows_ready < rx_chn->flow_num) in k3_udma_glue_enable_rx_chn()
1246 ret = xudma_navss_psil_pair(rx_chn->common.udmax, in k3_udma_glue_enable_rx_chn()
1247 rx_chn->common.src_thread, in k3_udma_glue_enable_rx_chn()
1248 rx_chn->common.dst_thread); in k3_udma_glue_enable_rx_chn()
1250 dev_err(rx_chn->common.dev, "PSI-L request err %d\n", ret); in k3_udma_glue_enable_rx_chn()
1254 rx_chn->psil_paired = true; in k3_udma_glue_enable_rx_chn()
1256 xudma_rchanrt_write(rx_chn->udma_rchanx, UDMA_CHAN_RT_CTL_REG, in k3_udma_glue_enable_rx_chn()
1259 xudma_rchanrt_write(rx_chn->udma_rchanx, UDMA_CHAN_RT_PEER_RT_EN_REG, in k3_udma_glue_enable_rx_chn()
1262 k3_udma_glue_dump_rx_rt_chn(rx_chn, "rxrt en"); in k3_udma_glue_enable_rx_chn()
1267 void k3_udma_glue_disable_rx_chn(struct k3_udma_glue_rx_channel *rx_chn) in k3_udma_glue_disable_rx_chn() argument
1269 k3_udma_glue_dump_rx_rt_chn(rx_chn, "rxrt dis1"); in k3_udma_glue_disable_rx_chn()
1271 xudma_rchanrt_write(rx_chn->udma_rchanx, in k3_udma_glue_disable_rx_chn()
1273 xudma_rchanrt_write(rx_chn->udma_rchanx, UDMA_CHAN_RT_CTL_REG, 0); in k3_udma_glue_disable_rx_chn()
1275 k3_udma_glue_dump_rx_rt_chn(rx_chn, "rxrt dis2"); in k3_udma_glue_disable_rx_chn()
1277 if (rx_chn->psil_paired) { in k3_udma_glue_disable_rx_chn()
1278 xudma_navss_psil_unpair(rx_chn->common.udmax, in k3_udma_glue_disable_rx_chn()
1279 rx_chn->common.src_thread, in k3_udma_glue_disable_rx_chn()
1280 rx_chn->common.dst_thread); in k3_udma_glue_disable_rx_chn()
1281 rx_chn->psil_paired = false; in k3_udma_glue_disable_rx_chn()
1286 void k3_udma_glue_tdown_rx_chn(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_tdown_rx_chn() argument
1292 if (rx_chn->remote) in k3_udma_glue_tdown_rx_chn()
1295 k3_udma_glue_dump_rx_rt_chn(rx_chn, "rxrt tdown1"); in k3_udma_glue_tdown_rx_chn()
1297 xudma_rchanrt_write(rx_chn->udma_rchanx, UDMA_CHAN_RT_PEER_RT_EN_REG, in k3_udma_glue_tdown_rx_chn()
1300 val = xudma_rchanrt_read(rx_chn->udma_rchanx, UDMA_CHAN_RT_CTL_REG); in k3_udma_glue_tdown_rx_chn()
1303 val = xudma_rchanrt_read(rx_chn->udma_rchanx, in k3_udma_glue_tdown_rx_chn()
1307 dev_err(rx_chn->common.dev, "RX tdown timeout\n"); in k3_udma_glue_tdown_rx_chn()
1313 val = xudma_rchanrt_read(rx_chn->udma_rchanx, in k3_udma_glue_tdown_rx_chn()
1316 dev_err(rx_chn->common.dev, "TX tdown peer not stopped\n"); in k3_udma_glue_tdown_rx_chn()
1317 k3_udma_glue_dump_rx_rt_chn(rx_chn, "rxrt tdown2"); in k3_udma_glue_tdown_rx_chn()
1321 void k3_udma_glue_reset_rx_chn(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_reset_rx_chn() argument
1325 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_num]; in k3_udma_glue_reset_rx_chn()
1326 struct device *dev = rx_chn->common.dev; in k3_udma_glue_reset_rx_chn()
1365 int k3_udma_glue_push_rx_chn(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_push_rx_chn() argument
1369 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_num]; in k3_udma_glue_push_rx_chn()
1375 int k3_udma_glue_pop_rx_chn(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_pop_rx_chn() argument
1378 struct k3_udma_glue_rx_flow *flow = &rx_chn->flows[flow_num]; in k3_udma_glue_pop_rx_chn()
1384 int k3_udma_glue_rx_get_irq(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_get_irq() argument
1389 flow = &rx_chn->flows[flow_num]; in k3_udma_glue_rx_get_irq()
1391 if (xudma_is_pktdma(rx_chn->common.udmax)) { in k3_udma_glue_rx_get_irq()
1392 flow->virq = xudma_pktdma_rflow_get_irq(rx_chn->common.udmax, in k3_udma_glue_rx_get_irq()
1403 k3_udma_glue_rx_get_dma_device(struct k3_udma_glue_rx_channel *rx_chn) in k3_udma_glue_rx_get_dma_device() argument
1405 if (xudma_is_pktdma(rx_chn->common.udmax) && in k3_udma_glue_rx_get_dma_device()
1406 (rx_chn->common.atype_asel == 14 || rx_chn->common.atype_asel == 15)) in k3_udma_glue_rx_get_dma_device()
1407 return &rx_chn->common.chan_dev; in k3_udma_glue_rx_get_dma_device()
1409 return xudma_get_device(rx_chn->common.udmax); in k3_udma_glue_rx_get_dma_device()
1413 void k3_udma_glue_rx_dma_to_cppi5_addr(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_dma_to_cppi5_addr() argument
1416 if (!xudma_is_pktdma(rx_chn->common.udmax) || in k3_udma_glue_rx_dma_to_cppi5_addr()
1417 !rx_chn->common.atype_asel) in k3_udma_glue_rx_dma_to_cppi5_addr()
1420 *addr |= (u64)rx_chn->common.atype_asel << K3_ADDRESS_ASEL_SHIFT; in k3_udma_glue_rx_dma_to_cppi5_addr()
1424 void k3_udma_glue_rx_cppi5_to_dma_addr(struct k3_udma_glue_rx_channel *rx_chn, in k3_udma_glue_rx_cppi5_to_dma_addr() argument
1427 if (!xudma_is_pktdma(rx_chn->common.udmax) || in k3_udma_glue_rx_cppi5_to_dma_addr()
1428 !rx_chn->common.atype_asel) in k3_udma_glue_rx_cppi5_to_dma_addr()