Lines Matching refs:tpd_ring
795 adapter->tpd_ring[0].count = 1024; in atl1c_sw_init()
882 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_clean_tx_ring() local
887 ring_count = tpd_ring->count; in atl1c_clean_tx_ring()
889 buffer_info = &tpd_ring->buffer_info[index]; in atl1c_clean_tx_ring()
896 memset(tpd_ring->desc, 0, sizeof(struct atl1c_tpd_desc) * in atl1c_clean_tx_ring()
898 atomic_set(&tpd_ring->next_to_clean, 0); in atl1c_clean_tx_ring()
899 tpd_ring->next_to_use = 0; in atl1c_clean_tx_ring()
932 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_init_ring_ptrs() local
939 tpd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs()
940 atomic_set(&tpd_ring[i].next_to_clean, 0); in atl1c_init_ring_ptrs()
941 buffer_info = tpd_ring[i].buffer_info; in atl1c_init_ring_ptrs()
942 for (j = 0; j < tpd_ring->count; j++) in atl1c_init_ring_ptrs()
976 if (adapter->tpd_ring[0].buffer_info) { in atl1c_free_ring_resources()
977 kfree(adapter->tpd_ring[0].buffer_info); in atl1c_free_ring_resources()
978 adapter->tpd_ring[0].buffer_info = NULL; in atl1c_free_ring_resources()
997 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_setup_ring_resources() local
1015 tpd_ring[i].count = tpd_ring[0].count; in atl1c_setup_ring_resources()
1017 size = sizeof(struct atl1c_buffer) * (tpd_ring->count * tqc + in atl1c_setup_ring_resources()
1019 tpd_ring->buffer_info = kzalloc(size, GFP_KERNEL); in atl1c_setup_ring_resources()
1020 if (unlikely(!tpd_ring->buffer_info)) in atl1c_setup_ring_resources()
1024 tpd_ring[i].adapter = adapter; in atl1c_setup_ring_resources()
1025 tpd_ring[i].num = i; in atl1c_setup_ring_resources()
1026 tpd_ring[i].buffer_info = (tpd_ring->buffer_info + count); in atl1c_setup_ring_resources()
1027 count += tpd_ring[i].count; in atl1c_setup_ring_resources()
1035 rfd_ring[i].buffer_info = (tpd_ring->buffer_info + count); in atl1c_setup_ring_resources()
1045 sizeof(struct atl1c_tpd_desc) * tpd_ring->count * tqc + in atl1c_setup_ring_resources()
1058 tpd_ring[0].dma = roundup(ring_header->dma, 8); in atl1c_setup_ring_resources()
1059 offset = tpd_ring[0].dma - ring_header->dma; in atl1c_setup_ring_resources()
1061 tpd_ring[i].dma = ring_header->dma + offset; in atl1c_setup_ring_resources()
1062 tpd_ring[i].desc = (u8 *)ring_header->desc + offset; in atl1c_setup_ring_resources()
1063 tpd_ring[i].size = in atl1c_setup_ring_resources()
1064 sizeof(struct atl1c_tpd_desc) * tpd_ring[i].count; in atl1c_setup_ring_resources()
1065 offset += roundup(tpd_ring[i].size, 8); in atl1c_setup_ring_resources()
1086 kfree(tpd_ring->buffer_info); in atl1c_setup_ring_resources()
1095 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_configure_des_ring() local
1104 (u32)((tpd_ring[0].dma & AT_DMA_HI_ADDR_MASK) >> 32)); in atl1c_configure_des_ring()
1108 (u32)(tpd_ring[i].dma & AT_DMA_LO_ADDR_MASK)); in atl1c_configure_des_ring()
1111 (u32)(tpd_ring[0].count & TPD_RING_SIZE_MASK)); in atl1c_configure_des_ring()
1601 struct atl1c_tpd_ring *tpd_ring = in atl1c_clean_tx() local
1603 struct atl1c_adapter *adapter = tpd_ring->adapter; in atl1c_clean_tx()
1605 netdev_get_tx_queue(napi->dev, tpd_ring->num); in atl1c_clean_tx()
1608 u16 next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1c_clean_tx()
1613 AT_READ_REGW(&adapter->hw, atl1c_qregs[tpd_ring->num].tpd_cons, in atl1c_clean_tx()
1617 buffer_info = &tpd_ring->buffer_info[next_to_clean]; in atl1c_clean_tx()
1623 if (++next_to_clean == tpd_ring->count) in atl1c_clean_tx()
1625 atomic_set(&tpd_ring->next_to_clean, next_to_clean); in atl1c_clean_tx()
1636 adapter->hw.intr_mask |= atl1c_qregs[tpd_ring->num].tx_isr; in atl1c_clean_tx()
1663 if (napi_schedule_prep(&adapter->tpd_ring[i].napi)) { in atl1c_intr_rx_tx()
1665 __napi_schedule(&adapter->tpd_ring[i].napi); in atl1c_intr_rx_tx()
2015 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_tpd_avail() local
2019 next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1c_tpd_avail()
2020 next_to_use = tpd_ring->next_to_use; in atl1c_tpd_avail()
2024 (tpd_ring->count + next_to_clean - next_to_use - 1); in atl1c_tpd_avail()
2035 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_get_tpd() local
2039 next_to_use = tpd_ring->next_to_use; in atl1c_get_tpd()
2040 if (++tpd_ring->next_to_use == tpd_ring->count) in atl1c_get_tpd()
2041 tpd_ring->next_to_use = 0; in atl1c_get_tpd()
2042 tpd_desc = ATL1C_TPD_DESC(tpd_ring, next_to_use); in atl1c_get_tpd()
2050 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_get_tx_buffer() local
2052 return &tpd_ring->buffer_info[tpd - in atl1c_get_tx_buffer()
2053 (struct atl1c_tpd_desc *)tpd_ring->desc]; in atl1c_get_tx_buffer()
2179 struct atl1c_tpd_ring *tpd_ring = &adpt->tpd_ring[queue]; in atl1c_tx_rollback() local
2184 first_index = first_tpd - (struct atl1c_tpd_desc *)tpd_ring->desc; in atl1c_tx_rollback()
2186 while (index != tpd_ring->next_to_use) { in atl1c_tx_rollback()
2187 tpd = ATL1C_TPD_DESC(tpd_ring, index); in atl1c_tx_rollback()
2188 buffer_info = &tpd_ring->buffer_info[index]; in atl1c_tx_rollback()
2191 if (++index == tpd_ring->count) in atl1c_tx_rollback()
2194 tpd_ring->next_to_use = first_index; in atl1c_tx_rollback()
2297 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_tx_queue() local
2300 tpd_ring->next_to_use); in atl1c_tx_queue()
2439 napi_enable(&adapter->tpd_ring[i].napi); in atl1c_up()
2464 napi_disable(&adapter->tpd_ring[i].napi); in atl1c_down()
2730 netif_napi_add_tx(netdev, &adapter->tpd_ring[i].napi, in atl1c_probe()