Lines Matching refs:ctx_data

375 	unsigned int state = s->ctx_data.rx.data_block_state;  in pool_ideal_nonblocking_data_blocks()
409 s->ctx_data.rx.data_block_state = state; in pool_ideal_nonblocking_data_blocks()
456 unsigned int last = s->ctx_data.rx.last_syt_offset; in pool_ideal_syt_offsets()
457 unsigned int state = s->ctx_data.rx.syt_offset_state; in pool_ideal_syt_offsets()
468 s->ctx_data.rx.last_syt_offset = last; in pool_ideal_syt_offsets()
469 s->ctx_data.rx.syt_offset_state = state; in pool_ideal_syt_offsets()
499 const unsigned int cache_size = s->ctx_data.tx.cache.size; in calculate_cached_cycle_count()
500 unsigned int cycles = s->ctx_data.tx.cache.tail; in calculate_cached_cycle_count()
512 const unsigned int cache_size = s->ctx_data.tx.cache.size; in cache_seq()
513 struct seq_desc *cache = s->ctx_data.tx.cache.descs; in cache_seq()
514 unsigned int cache_tail = s->ctx_data.tx.cache.tail; in cache_seq()
531 s->ctx_data.tx.cache.tail = cache_tail; in cache_seq()
536 struct seq_desc *descs = s->ctx_data.rx.seq.descs; in pool_ideal_seq_descs()
537 unsigned int seq_tail = s->ctx_data.rx.seq.tail; in pool_ideal_seq_descs()
538 const unsigned int seq_size = s->ctx_data.rx.seq.size; in pool_ideal_seq_descs()
547 s->ctx_data.rx.seq.tail = (seq_tail + count) % seq_size; in pool_ideal_seq_descs()
552 struct amdtp_stream *target = s->ctx_data.rx.replay_target; in pool_replayed_seq()
553 const struct seq_desc *cache = target->ctx_data.tx.cache.descs; in pool_replayed_seq()
554 const unsigned int cache_size = target->ctx_data.tx.cache.size; in pool_replayed_seq()
555 unsigned int cache_head = s->ctx_data.rx.cache_head; in pool_replayed_seq()
556 struct seq_desc *descs = s->ctx_data.rx.seq.descs; in pool_replayed_seq()
557 const unsigned int seq_size = s->ctx_data.rx.seq.size; in pool_replayed_seq()
558 unsigned int seq_tail = s->ctx_data.rx.seq.tail; in pool_replayed_seq()
567 s->ctx_data.rx.seq.tail = seq_tail; in pool_replayed_seq()
568 s->ctx_data.rx.cache_head = cache_head; in pool_replayed_seq()
575 if (!d->replay.enable || !s->ctx_data.rx.replay_target) { in pool_seq_descs()
581 struct amdtp_stream *tx = s->ctx_data.rx.replay_target; in pool_seq_descs()
582 const unsigned int cache_size = tx->ctx_data.tx.cache.size; in pool_seq_descs()
583 const unsigned int cache_head = s->ctx_data.rx.cache_head; in pool_seq_descs()
659 params->header_length = s->ctx_data.tx.ctx_header_size; in queue_in_packet()
660 params->payload_length = s->ctx_data.tx.max_ctx_payload_length; in queue_in_packet()
674 ((s->ctx_data.rx.fdf << CIP_FDF_SHIFT) & CIP_FDF_MASK) | in generate_cip_header()
774 if (*data_blocks > 0 && s->ctx_data.tx.dbc_interval > 0) in check_cip_header()
775 dbc_interval = s->ctx_data.tx.dbc_interval; in check_cip_header()
814 if (payload_length > cip_header_size + s->ctx_data.tx.max_ctx_payload_length) { in parse_ir_ctx_header()
817 payload_length, cip_header_size + s->ctx_data.tx.max_ctx_payload_length); in parse_ir_ctx_header()
962 ctx_header += s->ctx_data.tx.ctx_header_size / sizeof(*ctx_header); in generate_device_pkt_descs()
986 const struct seq_desc *seq_descs = s->ctx_data.rx.seq.descs; in generate_pkt_descs()
987 const unsigned int seq_size = s->ctx_data.rx.seq.size; in generate_pkt_descs()
989 unsigned int seq_head = s->ctx_data.rx.seq.head; in generate_pkt_descs()
1023 s->ctx_data.rx.seq.head = seq_head; in generate_pkt_descs()
1054 unsigned int event_count = s->ctx_data.rx.event_count; in process_rx_packets()
1113 s->ctx_data.rx.event_count = event_count; in process_rx_packets()
1216 packets = header_length / s->ctx_data.tx.ctx_header_size; in process_tx_packets()
1256 packets = header_length / s->ctx_data.tx.ctx_header_size; in drop_tx_packets()
1258 ctx_header += (packets - 1) * s->ctx_data.tx.ctx_header_size / sizeof(*ctx_header); in drop_tx_packets()
1284 packets = header_length / s->ctx_data.tx.ctx_header_size; in process_tx_packets_intermediately()
1294 ctx_header += s->ctx_data.tx.ctx_header_size / sizeof(__be32); in process_tx_packets_intermediately()
1301 size_t length = s->ctx_data.tx.ctx_header_size * offset; in process_tx_packets_intermediately()
1336 count = header_length / s->ctx_data.tx.ctx_header_size; in drop_tx_packets_initially()
1373 ctx_header += s->ctx_data.tx.ctx_header_size / sizeof(__be32); in drop_tx_packets_initially()
1379 s->ctx_data.tx.event_starts = true; in drop_tx_packets_initially()
1390 if (s->ctx_data.tx.event_starts) in drop_tx_packets_initially()
1482 tx = rx->ctx_data.rx.replay_target; in irq_target_callback_skip()
1484 if (cached_cycles > tx->ctx_data.tx.cache.size / 2) in irq_target_callback_skip()
1610 s->ctx_data.tx.max_ctx_payload_length = max_ctx_payload_size; in amdtp_stream_start()
1611 s->ctx_data.tx.ctx_header_size = ctx_header_size; in amdtp_stream_start()
1612 s->ctx_data.tx.event_starts = false; in amdtp_stream_start()
1617 s->ctx_data.tx.cache.size = max_t(unsigned int, s->syt_interval * 2, in amdtp_stream_start()
1619 s->ctx_data.tx.cache.tail = 0; in amdtp_stream_start()
1620 s->ctx_data.tx.cache.descs = kcalloc(s->ctx_data.tx.cache.size, in amdtp_stream_start()
1621 sizeof(*s->ctx_data.tx.cache.descs), GFP_KERNEL); in amdtp_stream_start()
1622 if (!s->ctx_data.tx.cache.descs) { in amdtp_stream_start()
1641 s->ctx_data.rx.seq.descs = kcalloc(queue_size, sizeof(*s->ctx_data.rx.seq.descs), GFP_KERNEL); in amdtp_stream_start()
1642 if (!s->ctx_data.rx.seq.descs) { in amdtp_stream_start()
1646 s->ctx_data.rx.seq.size = queue_size; in amdtp_stream_start()
1647 s->ctx_data.rx.seq.tail = 0; in amdtp_stream_start()
1648 s->ctx_data.rx.seq.head = 0; in amdtp_stream_start()
1651 s->ctx_data.rx.data_block_state = entry->data_block; in amdtp_stream_start()
1652 s->ctx_data.rx.syt_offset_state = entry->syt_offset; in amdtp_stream_start()
1653 s->ctx_data.rx.last_syt_offset = TICKS_PER_CYCLE; in amdtp_stream_start()
1655 s->ctx_data.rx.event_count = 0; in amdtp_stream_start()
1710 kfree(s->ctx_data.rx.seq.descs); in amdtp_stream_start()
1713 kfree(s->ctx_data.tx.cache.descs); in amdtp_stream_start()
1804 kfree(s->ctx_data.rx.seq.descs); in amdtp_stream_stop()
1807 kfree(s->ctx_data.tx.cache.descs); in amdtp_stream_stop()
1919 rx->ctx_data.rx.replay_target = tx; in make_association()
1920 rx->ctx_data.rx.cache_head = 0; in make_association()