Searched refs:sw_tail (Results 1 – 10 of 10) sorted by relevance
/Linux-v5.4/drivers/crypto/cavium/zip/ |
D | zip_device.c | 59 return ((zip_dev->iq[queue].sw_head - zip_dev->iq[queue].sw_tail) * in zip_cmd_queue_consumed() 110 zip_dbg("sw_tail : %lx", zip_dev->iq[queue].sw_tail); in zip_load_instr() 128 zip_dev->iq[queue].sw_head = zip_dev->iq[queue].sw_tail; in zip_load_instr() 151 zip_dev->iq[queue].sw_head, zip_dev->iq[queue].sw_tail, in zip_load_instr() 197 zip_dev->iq[queue].sw_head, zip_dev->iq[queue].sw_tail, in zip_update_cmd_bufs()
|
D | zip_mem.c | 78 zip_dbg("Freeing cmd_qbuf 0x%lx\n", zip->iq[q].sw_tail); in zip_cmd_qbuf_free() 80 free_pages((u64)zip->iq[q].sw_tail, get_order(ZIP_CMD_QBUF_SIZE)); in zip_cmd_qbuf_free()
|
D | zip_main.h | 87 u64 *sw_tail; member
|
D | zip_main.c | 180 zip->iq[q].sw_tail = zip->iq[q].sw_head; in zip_init_hw() 198 zip->iq[q].sw_head, zip->iq[q].sw_tail, in zip_init_hw()
|
/Linux-v5.4/drivers/net/ethernet/aquantia/atlantic/ |
D | aq_ring.h | 118 unsigned int sw_tail; member 151 return (((self->sw_tail >= self->sw_head)) ? in aq_ring_avail_dx() 152 (self->size - 1) - self->sw_tail + self->sw_head : in aq_ring_avail_dx() 153 self->sw_head - self->sw_tail - 1); in aq_ring_avail_dx()
|
D | aq_ring.c | 184 self->sw_tail = 0; in aq_ring_init() 449 self->sw_tail = aq_ring_next_dx(self, self->sw_tail)) { in aq_ring_rx_fill() 450 buff = &self->buff_ring[self->sw_tail]; in aq_ring_rx_fill() 472 for (; self->sw_head != self->sw_tail; in aq_ring_rx_deinit()
|
D | aq_vec.c | 75 sw_tail_old = ring[AQ_VEC_RX_ID].sw_tail; in aq_vec_poll()
|
D | aq_nic.c | 430 unsigned int dx = ring->sw_tail; in aq_nic_map_skb() 547 for (dx = ring->sw_tail; in aq_nic_map_skb()
|
/Linux-v5.4/drivers/net/ethernet/aquantia/atlantic/hw_atl/ |
D | hw_atl_a0.c | 427 hw_atl_reg_tx_dma_desc_tail_ptr_set(self, ring->sw_tail, ring->idx); in hw_atl_a0_hw_tx_ring_tail_update() 442 buff = &ring->buff_ring[ring->sw_tail]; in hw_atl_a0_hw_ring_tx_xmit() 446 txd = (struct hw_atl_txd_s *)&ring->dx_ring[ring->sw_tail * in hw_atl_a0_hw_ring_tx_xmit() 452 buff = &ring->buff_ring[ring->sw_tail]; in hw_atl_a0_hw_ring_tx_xmit() 499 ring->sw_tail = aq_ring_next_dx(ring, ring->sw_tail); in hw_atl_a0_hw_ring_tx_xmit() 582 for (; sw_tail_old != ring->sw_tail; in hw_atl_a0_hw_ring_rx_fill() 619 for (; ring->hw_head != ring->sw_tail; in hw_atl_a0_hw_ring_rx_receive()
|
D | hw_atl_b0.c | 482 hw_atl_reg_tx_dma_desc_tail_ptr_set(self, ring->sw_tail, ring->idx); in hw_atl_b0_hw_tx_ring_tail_update() 498 buff = &ring->buff_ring[ring->sw_tail]; in hw_atl_b0_hw_ring_tx_xmit() 502 txd = (struct hw_atl_txd_s *)&ring->dx_ring[ring->sw_tail * in hw_atl_b0_hw_ring_tx_xmit() 508 buff = &ring->buff_ring[ring->sw_tail]; in hw_atl_b0_hw_ring_tx_xmit() 566 ring->sw_tail = aq_ring_next_dx(ring, ring->sw_tail); in hw_atl_b0_hw_ring_tx_xmit() 650 for (; sw_tail_old != ring->sw_tail; in hw_atl_b0_hw_ring_rx_fill() 687 for (; ring->hw_head != ring->sw_tail; in hw_atl_b0_hw_ring_rx_receive()
|