Lines Matching refs:tx_queue
281 static inline void efx_farch_notify_tx_desc(struct efx_tx_queue *tx_queue) in efx_farch_notify_tx_desc() argument
286 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_notify_tx_desc()
288 efx_writed_page(tx_queue->efx, ®, in efx_farch_notify_tx_desc()
289 FR_AZ_TX_DESC_UPD_DWORD_P0, tx_queue->queue); in efx_farch_notify_tx_desc()
293 static inline void efx_farch_push_tx_desc(struct efx_tx_queue *tx_queue, in efx_farch_push_tx_desc() argument
302 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_push_tx_desc()
306 efx_writeo_page(tx_queue->efx, ®, in efx_farch_push_tx_desc()
307 FR_BZ_TX_DESC_UPD_P0, tx_queue->queue); in efx_farch_push_tx_desc()
315 void efx_farch_tx_write(struct efx_tx_queue *tx_queue) in efx_farch_tx_write() argument
320 unsigned old_write_count = tx_queue->write_count; in efx_farch_tx_write()
322 tx_queue->xmit_more_available = false; in efx_farch_tx_write()
323 if (unlikely(tx_queue->write_count == tx_queue->insert_count)) in efx_farch_tx_write()
327 write_ptr = tx_queue->write_count & tx_queue->ptr_mask; in efx_farch_tx_write()
328 buffer = &tx_queue->buffer[write_ptr]; in efx_farch_tx_write()
329 txd = efx_tx_desc(tx_queue, write_ptr); in efx_farch_tx_write()
330 ++tx_queue->write_count; in efx_farch_tx_write()
342 } while (tx_queue->write_count != tx_queue->insert_count); in efx_farch_tx_write()
346 if (efx_nic_may_push_tx_desc(tx_queue, old_write_count)) { in efx_farch_tx_write()
347 txd = efx_tx_desc(tx_queue, in efx_farch_tx_write()
348 old_write_count & tx_queue->ptr_mask); in efx_farch_tx_write()
349 efx_farch_push_tx_desc(tx_queue, txd); in efx_farch_tx_write()
350 ++tx_queue->pushes; in efx_farch_tx_write()
352 efx_farch_notify_tx_desc(tx_queue); in efx_farch_tx_write()
356 unsigned int efx_farch_tx_limit_len(struct efx_tx_queue *tx_queue, in efx_farch_tx_limit_len() argument
369 int efx_farch_tx_probe(struct efx_tx_queue *tx_queue) in efx_farch_tx_probe() argument
371 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_probe()
374 entries = tx_queue->ptr_mask + 1; in efx_farch_tx_probe()
375 return efx_alloc_special_buffer(efx, &tx_queue->txd, in efx_farch_tx_probe()
379 void efx_farch_tx_init(struct efx_tx_queue *tx_queue) in efx_farch_tx_init() argument
381 int csum = tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD; in efx_farch_tx_init()
382 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_init()
386 efx_init_special_buffer(efx, &tx_queue->txd); in efx_farch_tx_init()
393 FRF_AZ_TX_DESCQ_BUF_BASE_ID, tx_queue->txd.index, in efx_farch_tx_init()
395 tx_queue->channel->channel, in efx_farch_tx_init()
397 FRF_AZ_TX_DESCQ_LABEL, tx_queue->queue, in efx_farch_tx_init()
399 __ffs(tx_queue->txd.entries), in efx_farch_tx_init()
407 tx_queue->queue); in efx_farch_tx_init()
411 (tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI) ? in efx_farch_tx_init()
414 efx_writeo_table(efx, ®, FR_BZ_TX_PACE_TBL, tx_queue->queue); in efx_farch_tx_init()
417 static void efx_farch_flush_tx_queue(struct efx_tx_queue *tx_queue) in efx_farch_flush_tx_queue() argument
419 struct efx_nic *efx = tx_queue->efx; in efx_farch_flush_tx_queue()
422 WARN_ON(atomic_read(&tx_queue->flush_outstanding)); in efx_farch_flush_tx_queue()
423 atomic_set(&tx_queue->flush_outstanding, 1); in efx_farch_flush_tx_queue()
427 FRF_AZ_TX_FLUSH_DESCQ, tx_queue->queue); in efx_farch_flush_tx_queue()
431 void efx_farch_tx_fini(struct efx_tx_queue *tx_queue) in efx_farch_tx_fini() argument
433 struct efx_nic *efx = tx_queue->efx; in efx_farch_tx_fini()
439 tx_queue->queue); in efx_farch_tx_fini()
442 efx_fini_special_buffer(efx, &tx_queue->txd); in efx_farch_tx_fini()
446 void efx_farch_tx_remove(struct efx_tx_queue *tx_queue) in efx_farch_tx_remove() argument
448 efx_free_special_buffer(tx_queue->efx, &tx_queue->txd); in efx_farch_tx_remove()
601 struct efx_tx_queue *tx_queue; in efx_check_tx_flush_complete() local
604 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_check_tx_flush_complete()
606 FR_BZ_TX_DESC_PTR_TBL, tx_queue->queue); in efx_check_tx_flush_complete()
613 tx_queue->queue); in efx_check_tx_flush_complete()
615 } else if (atomic_cmpxchg(&tx_queue->flush_outstanding, in efx_check_tx_flush_complete()
622 "the queue\n", tx_queue->queue); in efx_check_tx_flush_complete()
629 tx_queue)); in efx_check_tx_flush_complete()
645 struct efx_tx_queue *tx_queue; in efx_farch_do_flush() local
649 efx_for_each_channel_tx_queue(tx_queue, channel) { in efx_farch_do_flush()
650 efx_farch_flush_tx_queue(tx_queue); in efx_farch_do_flush()
713 struct efx_tx_queue *tx_queue; in efx_farch_fini_dmaq() local
729 efx_for_each_channel_tx_queue(tx_queue, channel) in efx_farch_fini_dmaq()
730 efx_farch_tx_fini(tx_queue); in efx_farch_fini_dmaq()
823 struct efx_tx_queue *tx_queue; in efx_farch_handle_tx_event() local
833 tx_queue = efx_channel_get_tx_queue( in efx_farch_handle_tx_event()
835 efx_xmit_done(tx_queue, tx_ev_desc_ptr); in efx_farch_handle_tx_event()
839 tx_queue = efx_channel_get_tx_queue( in efx_farch_handle_tx_event()
843 efx_farch_notify_tx_desc(tx_queue); in efx_farch_handle_tx_event()
1081 struct efx_tx_queue *tx_queue; in efx_farch_handle_tx_flush_done() local
1086 tx_queue = efx_get_tx_queue(efx, qid / EFX_TXQ_TYPES, in efx_farch_handle_tx_flush_done()
1088 if (atomic_cmpxchg(&tx_queue->flush_outstanding, 1, 0)) { in efx_farch_handle_tx_flush_done()
1089 efx_farch_magic_event(tx_queue->channel, in efx_farch_handle_tx_flush_done()
1090 EFX_CHANNEL_MAGIC_TX_DRAIN(tx_queue)); in efx_farch_handle_tx_flush_done()