Lines Matching refs:trb
337 static inline u32 trb_read_##name(struct tegra_xudc_trb *trb) \
339 return (le32_to_cpu(trb->member) >> (shift)) & (mask); \
342 trb_write_##name(struct tegra_xudc_trb *trb, u32 val) \
346 tmp = le32_to_cpu(trb->member) & ~((mask) << (shift)); \
348 trb->member = cpu_to_le32(tmp); \
370 static inline u64 trb_read_data_ptr(struct tegra_xudc_trb *trb) in trb_read_data_ptr() argument
372 return ((u64)trb_read_data_hi(trb) << 32) | in trb_read_data_ptr()
373 trb_read_data_lo(trb); in trb_read_data_ptr()
376 static inline void trb_write_data_ptr(struct tegra_xudc_trb *trb, u64 addr) in trb_write_data_ptr() argument
378 trb_write_data_lo(trb, lower_32_bits(addr)); in trb_write_data_ptr()
379 trb_write_data_hi(trb, upper_32_bits(addr)); in trb_write_data_ptr()
619 struct tegra_xudc_trb *trb) in dump_trb() argument
623 type, trb, trb->data_lo, trb->data_hi, trb->status, in dump_trb()
624 trb->control); in dump_trb()
890 struct tegra_xudc_trb *trb) in trb_virt_to_phys() argument
894 index = trb - ep->transfer_ring; in trb_virt_to_phys()
899 return (ep->transfer_ring_phys + index * sizeof(*trb)); in trb_virt_to_phys()
905 struct tegra_xudc_trb *trb; in trb_phys_to_virt() local
908 index = (addr - ep->transfer_ring_phys) / sizeof(*trb); in trb_phys_to_virt()
913 trb = &ep->transfer_ring[index]; in trb_phys_to_virt()
915 return trb; in trb_phys_to_virt()
1075 struct tegra_xudc_trb *trb, in tegra_xudc_queue_one_trb() argument
1089 trb_write_data_ptr(trb, buf_addr); in tegra_xudc_queue_one_trb()
1091 trb_write_transfer_len(trb, len); in tegra_xudc_queue_one_trb()
1092 trb_write_td_size(trb, req->trbs_needed - req->trbs_queued - 1); in tegra_xudc_queue_one_trb()
1096 trb_write_chain(trb, 0); in tegra_xudc_queue_one_trb()
1098 trb_write_chain(trb, 1); in tegra_xudc_queue_one_trb()
1100 trb_write_ioc(trb, ioc); in tegra_xudc_queue_one_trb()
1105 trb_write_isp(trb, 1); in tegra_xudc_queue_one_trb()
1107 trb_write_isp(trb, 0); in tegra_xudc_queue_one_trb()
1112 trb_write_type(trb, TRB_TYPE_DATA_STAGE); in tegra_xudc_queue_one_trb()
1114 trb_write_type(trb, TRB_TYPE_STATUS_STAGE); in tegra_xudc_queue_one_trb()
1118 trb_write_data_stage_dir(trb, 1); in tegra_xudc_queue_one_trb()
1120 trb_write_data_stage_dir(trb, 0); in tegra_xudc_queue_one_trb()
1122 trb_write_type(trb, TRB_TYPE_ISOCH); in tegra_xudc_queue_one_trb()
1123 trb_write_sia(trb, 1); in tegra_xudc_queue_one_trb()
1124 trb_write_frame_id(trb, 0); in tegra_xudc_queue_one_trb()
1125 trb_write_tlbpc(trb, 0); in tegra_xudc_queue_one_trb()
1127 trb_write_type(trb, TRB_TYPE_STREAM); in tegra_xudc_queue_one_trb()
1128 trb_write_stream_id(trb, req->usb_req.stream_id); in tegra_xudc_queue_one_trb()
1130 trb_write_type(trb, TRB_TYPE_NORMAL); in tegra_xudc_queue_one_trb()
1131 trb_write_stream_id(trb, 0); in tegra_xudc_queue_one_trb()
1134 trb_write_cycle(trb, ep->pcs); in tegra_xudc_queue_one_trb()
1139 dump_trb(xudc, "TRANSFER", trb); in tegra_xudc_queue_one_trb()
1179 struct tegra_xudc_trb *trb = &ep->transfer_ring[ep->enq_ptr]; in tegra_xudc_queue_trbs() local
1185 tegra_xudc_queue_one_trb(ep, req, trb, ioc); in tegra_xudc_queue_trbs()
1186 req->last_trb = trb; in tegra_xudc_queue_trbs()
1190 trb = &ep->transfer_ring[ep->enq_ptr]; in tegra_xudc_queue_trbs()
1191 trb_write_cycle(trb, ep->pcs); in tegra_xudc_queue_trbs()
1332 struct tegra_xudc_trb *trb = req->first_trb; in squeeze_transfer_ring() local
1333 bool pcs_enq = trb_read_cycle(trb); in squeeze_transfer_ring()
1340 while (trb != &ep->transfer_ring[ep->enq_ptr]) { in squeeze_transfer_ring()
1341 pcs = trb_read_cycle(trb); in squeeze_transfer_ring()
1342 memset(trb, 0, sizeof(*trb)); in squeeze_transfer_ring()
1343 trb_write_cycle(trb, !pcs); in squeeze_transfer_ring()
1344 trb++; in squeeze_transfer_ring()
1346 if (trb_read_type(trb) == TRB_TYPE_LINK) in squeeze_transfer_ring()
1347 trb = ep->transfer_ring; in squeeze_transfer_ring()
1375 struct tegra_xudc_trb *trb) in trb_in_request() argument
1378 req->first_trb, req->last_trb, trb); in trb_in_request()
1380 if (trb >= req->first_trb && (trb <= req->last_trb || in trb_in_request()
1384 if (trb < req->first_trb && trb <= req->last_trb && in trb_in_request()
1397 struct tegra_xudc_trb *trb) in trb_before_request() argument
1402 __func__, req->first_trb, req->last_trb, enq_trb, trb); in trb_before_request()
1404 if (trb < req->first_trb && (enq_trb <= trb || in trb_before_request()
1408 if (trb > req->first_trb && req->first_trb < enq_trb && enq_trb <= trb) in trb_before_request()
1695 struct tegra_xudc_trb *trb) in setup_link_trb() argument
1697 trb_write_data_ptr(trb, ep->transfer_ring_phys); in setup_link_trb()
1698 trb_write_type(trb, TRB_TYPE_LINK); in setup_link_trb()
1699 trb_write_toggle_cycle(trb, 1); in setup_link_trb()
2630 trb_to_request(struct tegra_xudc_ep *ep, struct tegra_xudc_trb *trb) in trb_to_request() argument
2638 if (trb_in_request(ep, req, trb)) in trb_to_request()
2650 struct tegra_xudc_trb *trb; in tegra_xudc_handle_transfer_completion() local
2656 trb = trb_phys_to_virt(ep, trb_read_data_ptr(event)); in tegra_xudc_handle_transfer_completion()
2657 req = trb_to_request(ep, trb); in tegra_xudc_handle_transfer_completion()
2663 if (req && (short_packet || (!trb_read_chain(trb) && in tegra_xudc_handle_transfer_completion()
2701 struct tegra_xudc_trb *trb; in tegra_xudc_handle_transfer_event() local
2711 trb = trb_phys_to_virt(ep, trb_read_data_ptr(event)); in tegra_xudc_handle_transfer_event()
2714 ep->deq_ptr = (trb - ep->transfer_ring) + 1; in tegra_xudc_handle_transfer_event()