Lines Matching refs:prl_tx
123 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_subsys_init() local
127 prl_tx->dev = dev; in prl_subsys_init()
132 smf_set_initial(SMF_CTX(prl_tx), &prl_tx_states[PRL_TX_SUSPEND]); in prl_subsys_init()
186 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_send_ctrl_msg() local
189 prl_tx->emsg.type = type; in prl_send_ctrl_msg()
191 prl_tx->msg_type = msg; in prl_send_ctrl_msg()
193 prl_tx->emsg.len = 0; in prl_send_ctrl_msg()
195 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT); in prl_send_ctrl_msg()
209 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_send_data_msg() local
212 prl_tx->emsg.type = type; in prl_send_data_msg()
214 prl_tx->msg_type = msg; in prl_send_data_msg()
216 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT); in prl_send_data_msg()
284 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_first_msg_notificaiton() local
286 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_FIRST_MSG_PENDING); in prl_first_msg_notificaiton()
295 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_run() local
331 smf_run_state(SMF_CTX(prl_tx)); in prl_run()
369 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in alert_handler() local
377 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_TX_ERROR); in alert_handler()
380 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_TX_DISCARDED); in alert_handler()
383 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_TX_COMPLETE); in alert_handler()
400 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_tx_set_state() local
403 smf_set_state(SMF_CTX(prl_tx), &prl_tx_states[state]); in prl_tx_set_state()
435 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in increment_msgid_counter() local
438 if (prl_tx->last_xmit_type >= NUM_SOP_STAR_TYPES) { in increment_msgid_counter()
442 prl_tx->msg_id_counter[prl_tx->last_xmit_type] = in increment_msgid_counter()
443 (prl_tx->msg_id_counter[prl_tx->last_xmit_type] + 1) & PD_MESSAGE_ID_COUNT; in increment_msgid_counter()
452 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in get_sop_star_header() local
453 const bool is_sop_packet = prl_tx->emsg.type == PD_PACKET_SOP; in get_sop_star_header()
457 header.message_type = prl_tx->msg_type; in get_sop_star_header()
459 header.specification_revision = data->rev[prl_tx->emsg.type]; in get_sop_star_header()
461 header.message_id = prl_tx->msg_id_counter[prl_tx->emsg.type]; in get_sop_star_header()
462 header.number_of_data_objects = PD_CONVERT_BYTES_TO_PD_HEADER_COUNT(prl_tx->emsg.len); in get_sop_star_header()
474 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_tx_construct_message() local
478 prl_tx->emsg.header.raw_value = in prl_tx_construct_message()
479 prl_tx->emsg.type < NUM_SOP_STAR_TYPES ? get_sop_star_header(dev) : 0; in prl_tx_construct_message()
482 prl_tx->last_xmit_type = prl_tx->emsg.type; in prl_tx_construct_message()
489 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_TX_COMPLETE); in prl_tx_construct_message()
492 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT); in prl_tx_construct_message()
498 tcpc_transmit_data(tcpc, &prl_tx->emsg); in prl_tx_construct_message()
507 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_hr_send_msg_to_phy() local
511 prl_tx->emsg.header.raw_value = 0; in prl_hr_send_msg_to_phy()
512 prl_tx->emsg.type = PD_PACKET_TX_HARD_RESET; in prl_hr_send_msg_to_phy()
519 data->prl_tx->flags = ATOMIC_INIT(0); in prl_hr_send_msg_to_phy()
522 tcpc_transmit_data(tcpc, &prl_tx->emsg); in prl_hr_send_msg_to_phy()
532 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_init() local
553 prl_tx->flags = ATOMIC_INIT(0); in prl_init()
554 prl_tx->last_xmit_type = PD_PACKET_SOP; in prl_init()
556 prl_tx->msg_id_counter[i] = 0; in prl_init()
558 usbc_timer_init(&prl_tx->pd_t_tx_timeout, PD_T_TX_TIMEOUT_MS); in prl_init()
559 usbc_timer_init(&prl_tx->pd_t_sink_tx, PD_T_SINK_TX_MAX_MS); in prl_init()
574 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_phy_layer_reset_entry() local
575 const struct device *dev = prl_tx->dev; in prl_tx_phy_layer_reset_entry()
593 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_wait_for_message_request_entry() local
598 prl_tx->flags = ATOMIC_INIT(0); in prl_tx_wait_for_message_request_entry()
606 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_wait_for_message_request_run() local
607 const struct device *dev = prl_tx->dev; in prl_tx_wait_for_message_request_run()
614 if (atomic_test_and_clear_bit(&prl_tx->flags, PRL_FLAGS_SINK_NG)) { in prl_tx_wait_for_message_request_run()
618 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_WAIT_SINK_OK); in prl_tx_wait_for_message_request_run()
626 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_WAIT_SINK_OK) || in prl_tx_wait_for_message_request_run()
627 atomic_test_bit(&prl_tx->flags, PRL_FLAGS_SINK_NG)) { in prl_tx_wait_for_message_request_run()
641 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_SINK_NG); in prl_tx_wait_for_message_request_run()
644 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_WAIT_SINK_OK); in prl_tx_wait_for_message_request_run()
652 if (atomic_test_and_clear_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT)) { in prl_tx_wait_for_message_request_run()
656 if ((prl_tx->msg_type == PD_CTRL_SOFT_RESET) && (prl_tx->emsg.len == 0)) { in prl_tx_wait_for_message_request_run()
674 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_layer_reset_for_transmit_entry() local
675 const struct device *dev = prl_tx->dev; in prl_tx_layer_reset_for_transmit_entry()
681 if (prl_tx->emsg.type < NUM_SOP_STAR_TYPES) { in prl_tx_layer_reset_for_transmit_entry()
690 prl_tx->msg_id_counter[prl_tx->emsg.type] = 0; in prl_tx_layer_reset_for_transmit_entry()
697 prl_rx->msg_id[prl_tx->emsg.type] = -1; in prl_tx_layer_reset_for_transmit_entry()
710 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_wait_for_phy_response_entry() local
713 usbc_timer_start(&prl_tx->pd_t_tx_timeout); in prl_tx_wait_for_phy_response_entry()
721 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_wait_for_phy_response_run() local
722 const struct device *dev = prl_tx->dev; in prl_tx_wait_for_phy_response_run()
725 if (atomic_test_and_clear_bit(&prl_tx->flags, PRL_FLAGS_TX_DISCARDED)) { in prl_tx_wait_for_phy_response_run()
732 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_TX_COMPLETE)) { in prl_tx_wait_for_phy_response_run()
742 } else if (usbc_timer_expired(&prl_tx->pd_t_tx_timeout) || in prl_tx_wait_for_phy_response_run()
743 atomic_test_bit(&prl_tx->flags, PRL_FLAGS_TX_ERROR)) { in prl_tx_wait_for_phy_response_run()
749 pe_report_error(dev, ERR_XMIT, prl_tx->last_xmit_type); in prl_tx_wait_for_phy_response_run()
760 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_wait_for_phy_response_exit() local
761 const struct device *dev = prl_tx->dev; in prl_tx_wait_for_phy_response_exit()
763 usbc_timer_stop(&prl_tx->pd_t_tx_timeout); in prl_tx_wait_for_phy_response_exit()
775 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_src_source_tx_entry() local
776 const struct device *dev = prl_tx->dev; in prl_tx_src_source_tx_entry()
786 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_src_source_tx_run() local
787 const struct device *dev = prl_tx->dev; in prl_tx_src_source_tx_run()
789 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT)) { in prl_tx_src_source_tx_run()
812 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_snk_start_ams_run() local
813 const struct device *dev = prl_tx->dev; in prl_tx_snk_start_ams_run()
815 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT)) { in prl_tx_snk_start_ams_run()
830 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_src_pending_entry() local
835 usbc_timer_start(&prl_tx->pd_t_sink_tx); in prl_tx_src_pending_entry()
843 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_src_pending_run() local
844 const struct device *dev = prl_tx->dev; in prl_tx_src_pending_run()
846 if (usbc_timer_expired(&prl_tx->pd_t_sink_tx)) { in prl_tx_src_pending_run()
851 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT); in prl_tx_src_pending_run()
854 if ((prl_tx->msg_type == PD_CTRL_SOFT_RESET) && (prl_tx->emsg.len == 0)) { in prl_tx_src_pending_run()
860 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_FIRST_MSG_PENDING)) { in prl_tx_src_pending_run()
861 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_FIRST_MSG_PENDING); in prl_tx_src_pending_run()
876 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_src_pending_exit() local
879 usbc_timer_stop(&prl_tx->pd_t_sink_tx); in prl_tx_src_pending_exit()
897 struct protocol_layer_tx_t *prl_tx = (struct protocol_layer_tx_t *)obj; in prl_tx_snk_pending_run() local
898 const struct device *dev = prl_tx->dev; in prl_tx_snk_pending_run()
913 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT); in prl_tx_snk_pending_run()
920 if ((prl_tx->msg_type == PD_CTRL_SOFT_RESET) && (prl_tx->emsg.len == 0)) { in prl_tx_snk_pending_run()
924 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_FIRST_MSG_PENDING)) { in prl_tx_snk_pending_run()
925 atomic_clear_bit(&prl_tx->flags, PRL_FLAGS_FIRST_MSG_PENDING); in prl_tx_snk_pending_run()
1008 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_hr_reset_layer_entry() local
1017 prl_tx->flags = ATOMIC_INIT(0); in prl_hr_reset_layer_entry()
1022 prl_tx->msg_id_counter[i] = 0; in prl_hr_reset_layer_entry()
1101 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_hr_wait_for_phy_hard_reset_complete_run() local
1106 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_TX_COMPLETE) || in prl_hr_wait_for_phy_hard_reset_complete_run()
1169 struct protocol_layer_tx_t *prl_tx = data->prl_tx; in prl_rx_wait_for_phy_message() local
1212 prl_tx->msg_id_counter[pkt_type] = 0; in prl_rx_wait_for_phy_message()
1243 if (atomic_test_bit(&prl_tx->flags, PRL_FLAGS_MSG_XMIT)) { in prl_rx_wait_for_phy_message()
1248 atomic_set_bit(&prl_tx->flags, PRL_FLAGS_TX_DISCARDED); in prl_rx_wait_for_phy_message()