Lines Matching full:rp

3289 static unsigned int niu_hash_rxaddr(struct rx_ring_info *rp, u64 a)  in niu_hash_rxaddr()  argument
3297 static struct page *niu_find_rxpage(struct rx_ring_info *rp, u64 addr, in niu_find_rxpage() argument
3300 unsigned int h = niu_hash_rxaddr(rp, addr); in niu_find_rxpage()
3304 pp = &rp->rxhash[h]; in niu_find_rxpage()
3317 static void niu_hash_page(struct rx_ring_info *rp, struct page *page, u64 base) in niu_hash_page() argument
3319 unsigned int h = niu_hash_rxaddr(rp, base); in niu_hash_page()
3322 niu_next_page(page) = rp->rxhash[h]; in niu_hash_page()
3323 rp->rxhash[h] = page; in niu_hash_page()
3326 static int niu_rbr_add_page(struct niu *np, struct rx_ring_info *rp, in niu_rbr_add_page() argument
3344 niu_hash_page(rp, page, addr); in niu_rbr_add_page()
3345 if (rp->rbr_blocks_per_page > 1) in niu_rbr_add_page()
3346 page_ref_add(page, rp->rbr_blocks_per_page - 1); in niu_rbr_add_page()
3348 for (i = 0; i < rp->rbr_blocks_per_page; i++) { in niu_rbr_add_page()
3349 __le32 *rbr = &rp->rbr[start_index + i]; in niu_rbr_add_page()
3352 addr += rp->rbr_block_size; in niu_rbr_add_page()
3358 static void niu_rbr_refill(struct niu *np, struct rx_ring_info *rp, gfp_t mask) in niu_rbr_refill() argument
3360 int index = rp->rbr_index; in niu_rbr_refill()
3362 rp->rbr_pending++; in niu_rbr_refill()
3363 if ((rp->rbr_pending % rp->rbr_blocks_per_page) == 0) { in niu_rbr_refill()
3364 int err = niu_rbr_add_page(np, rp, mask, index); in niu_rbr_refill()
3367 rp->rbr_pending--; in niu_rbr_refill()
3371 rp->rbr_index += rp->rbr_blocks_per_page; in niu_rbr_refill()
3372 BUG_ON(rp->rbr_index > rp->rbr_table_size); in niu_rbr_refill()
3373 if (rp->rbr_index == rp->rbr_table_size) in niu_rbr_refill()
3374 rp->rbr_index = 0; in niu_rbr_refill()
3376 if (rp->rbr_pending >= rp->rbr_kick_thresh) { in niu_rbr_refill()
3377 nw64(RBR_KICK(rp->rx_channel), rp->rbr_pending); in niu_rbr_refill()
3378 rp->rbr_pending = 0; in niu_rbr_refill()
3383 static int niu_rx_pkt_ignore(struct niu *np, struct rx_ring_info *rp) in niu_rx_pkt_ignore() argument
3385 unsigned int index = rp->rcr_index; in niu_rx_pkt_ignore()
3388 rp->rx_dropped++; in niu_rx_pkt_ignore()
3396 val = le64_to_cpup(&rp->rcr[index]); in niu_rx_pkt_ignore()
3399 page = niu_find_rxpage(rp, addr, &link); in niu_rx_pkt_ignore()
3401 rcr_size = rp->rbr_sizes[(val & RCR_ENTRY_PKTBUFSZ) >> in niu_rx_pkt_ignore()
3410 rp->rbr_refill_pending++; in niu_rx_pkt_ignore()
3413 index = NEXT_RCR(rp, index); in niu_rx_pkt_ignore()
3418 rp->rcr_index = index; in niu_rx_pkt_ignore()
3424 struct rx_ring_info *rp) in niu_process_rx_pkt() argument
3426 unsigned int index = rp->rcr_index; in niu_process_rx_pkt()
3433 return niu_rx_pkt_ignore(np, rp); in niu_process_rx_pkt()
3443 val = le64_to_cpup(&rp->rcr[index]); in niu_process_rx_pkt()
3451 page = niu_find_rxpage(rp, addr, &link); in niu_process_rx_pkt()
3453 rcr_size = rp->rbr_sizes[(val & RCR_ENTRY_PKTBUFSZ) >> in niu_process_rx_pkt()
3472 if ((page->index + rp->rbr_block_size) - rcr_size == addr) { in niu_process_rx_pkt()
3478 rp->rbr_refill_pending++; in niu_process_rx_pkt()
3482 index = NEXT_RCR(rp, index); in niu_process_rx_pkt()
3487 rp->rcr_index = index; in niu_process_rx_pkt()
3503 rp->rx_packets++; in niu_process_rx_pkt()
3504 rp->rx_bytes += skb->len; in niu_process_rx_pkt()
3507 skb_record_rx_queue(skb, rp->rx_channel); in niu_process_rx_pkt()
3513 static int niu_rbr_fill(struct niu *np, struct rx_ring_info *rp, gfp_t mask) in niu_rbr_fill() argument
3515 int blocks_per_page = rp->rbr_blocks_per_page; in niu_rbr_fill()
3516 int err, index = rp->rbr_index; in niu_rbr_fill()
3519 while (index < (rp->rbr_table_size - blocks_per_page)) { in niu_rbr_fill()
3520 err = niu_rbr_add_page(np, rp, mask, index); in niu_rbr_fill()
3527 rp->rbr_index = index; in niu_rbr_fill()
3531 static void niu_rbr_free(struct niu *np, struct rx_ring_info *rp) in niu_rbr_free() argument
3538 page = rp->rxhash[i]; in niu_rbr_free()
3554 for (i = 0; i < rp->rbr_table_size; i++) in niu_rbr_free()
3555 rp->rbr[i] = cpu_to_le32(0); in niu_rbr_free()
3556 rp->rbr_index = 0; in niu_rbr_free()
3559 static int release_tx_packet(struct niu *np, struct tx_ring_info *rp, int idx) in release_tx_packet() argument
3561 struct tx_buff_info *tb = &rp->tx_buffs[idx]; in release_tx_packet()
3570 rp->tx_packets++; in release_tx_packet()
3571 rp->tx_bytes += (((tx_flags & TXHDR_LEN) >> TXHDR_LEN_SHIFT) - in release_tx_packet()
3578 if (le64_to_cpu(rp->descr[idx]) & TX_DESC_MARK) in release_tx_packet()
3579 rp->mark_pending--; in release_tx_packet()
3583 idx = NEXT_TX(rp, idx); in release_tx_packet()
3588 tb = &rp->tx_buffs[idx]; in release_tx_packet()
3593 idx = NEXT_TX(rp, idx); in release_tx_packet()
3601 #define NIU_TX_WAKEUP_THRESH(rp) ((rp)->pending / 4) argument
3603 static void niu_tx_work(struct niu *np, struct tx_ring_info *rp) in niu_tx_work() argument
3610 index = (rp - np->tx_rings); in niu_tx_work()
3613 cs = rp->tx_cs; in niu_tx_work()
3618 pkt_cnt = (pkt_cnt - rp->last_pkt_cnt) & in niu_tx_work()
3621 rp->last_pkt_cnt = tmp; in niu_tx_work()
3623 cons = rp->cons; in niu_tx_work()
3629 cons = release_tx_packet(np, rp, cons); in niu_tx_work()
3631 rp->cons = cons; in niu_tx_work()
3636 (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp)))) { in niu_tx_work()
3639 (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp))) in niu_tx_work()
3646 struct rx_ring_info *rp, in niu_sync_rx_discard_stats() argument
3660 int rx_channel = rp->rx_channel; in niu_sync_rx_discard_stats()
3671 rp->rx_errors += misc & RXMISC_COUNT; in niu_sync_rx_discard_stats()
3686 rp->rx_dropped += wred & RED_DIS_CNT_COUNT; in niu_sync_rx_discard_stats()
3698 struct rx_ring_info *rp, int budget) in niu_rx_work() argument
3701 struct rxdma_mailbox *mbox = rp->mbox; in niu_rx_work()
3705 stat = nr64(RX_DMA_CTL_STAT(rp->rx_channel)); in niu_rx_work()
3706 qlen = nr64(RCRSTAT_A(rp->rx_channel)) & RCRSTAT_A_QLEN; in niu_rx_work()
3716 __func__, rp->rx_channel, (unsigned long long)stat, qlen); in niu_rx_work()
3721 rcr_done += niu_process_rx_pkt(napi, np, rp); in niu_rx_work()
3725 if (rp->rbr_refill_pending >= rp->rbr_kick_thresh) { in niu_rx_work()
3728 for (i = 0; i < rp->rbr_refill_pending; i++) in niu_rx_work()
3729 niu_rbr_refill(np, rp, GFP_ATOMIC); in niu_rx_work()
3730 rp->rbr_refill_pending = 0; in niu_rx_work()
3737 nw64(RX_DMA_CTL_STAT(rp->rx_channel), stat); in niu_rx_work()
3741 niu_sync_rx_discard_stats(np, rp, 0x7FFF); in niu_rx_work()
3757 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_poll_core() local
3758 if (tx_vec & (1 << rp->tx_channel)) in niu_poll_core()
3759 niu_tx_work(np, rp); in niu_poll_core()
3760 nw64(LD_IM0(LDN_TXDMA(rp->tx_channel)), 0); in niu_poll_core()
3764 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_poll_core() local
3766 if (rx_vec & (1 << rp->rx_channel)) { in niu_poll_core()
3769 this_work_done = niu_rx_work(&lp->napi, np, rp, in niu_poll_core()
3775 nw64(LD_IM0(LDN_RXDMA(rp->rx_channel)), 0); in niu_poll_core()
3796 static void niu_log_rxchan_errors(struct niu *np, struct rx_ring_info *rp, in niu_log_rxchan_errors() argument
3799 netdev_err(np->dev, "RX channel %u errors ( ", rp->rx_channel); in niu_log_rxchan_errors()
3833 static int niu_rx_error(struct niu *np, struct rx_ring_info *rp) in niu_rx_error() argument
3835 u64 stat = nr64(RX_DMA_CTL_STAT(rp->rx_channel)); in niu_rx_error()
3845 rp->rx_channel, in niu_rx_error()
3848 niu_log_rxchan_errors(np, rp, stat); in niu_rx_error()
3851 nw64(RX_DMA_CTL_STAT(rp->rx_channel), in niu_rx_error()
3857 static void niu_log_txchan_errors(struct niu *np, struct tx_ring_info *rp, in niu_log_txchan_errors() argument
3860 netdev_err(np->dev, "TX channel %u errors ( ", rp->tx_channel); in niu_log_txchan_errors()
3882 static int niu_tx_error(struct niu *np, struct tx_ring_info *rp) in niu_tx_error() argument
3886 cs = nr64(TX_CS(rp->tx_channel)); in niu_tx_error()
3887 logh = nr64(TX_RNG_ERR_LOGH(rp->tx_channel)); in niu_tx_error()
3888 logl = nr64(TX_RNG_ERR_LOGL(rp->tx_channel)); in niu_tx_error()
3891 rp->tx_channel, in niu_tx_error()
3896 niu_log_txchan_errors(np, rp, cs); in niu_tx_error()
4091 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_slowpath_interrupt() local
4093 if (rx_vec & (1 << rp->rx_channel)) { in niu_slowpath_interrupt()
4094 int r = niu_rx_error(np, rp); in niu_slowpath_interrupt()
4099 nw64(RX_DMA_CTL_STAT(rp->rx_channel), in niu_slowpath_interrupt()
4109 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_slowpath_interrupt() local
4111 if (tx_vec & (1 << rp->tx_channel)) { in niu_slowpath_interrupt()
4112 int r = niu_tx_error(np, rp); in niu_slowpath_interrupt()
4142 static void niu_rxchan_intr(struct niu *np, struct rx_ring_info *rp, in niu_rxchan_intr() argument
4145 struct rxdma_mailbox *mbox = rp->mbox; in niu_rxchan_intr()
4150 nw64(RX_DMA_CTL_STAT(rp->rx_channel), stat_write); in niu_rxchan_intr()
4156 static void niu_txchan_intr(struct niu *np, struct tx_ring_info *rp, in niu_txchan_intr() argument
4159 rp->tx_cs = nr64(TX_CS(rp->tx_channel)); in niu_txchan_intr()
4162 "%s() cs[%llx]\n", __func__, (unsigned long long)rp->tx_cs); in niu_txchan_intr()
4175 struct rx_ring_info *rp = &np->rx_rings[i]; in __niu_fastpath_interrupt() local
4176 int ldn = LDN_RXDMA(rp->rx_channel); in __niu_fastpath_interrupt()
4182 if (rx_vec & (1 << rp->rx_channel)) in __niu_fastpath_interrupt()
4183 niu_rxchan_intr(np, rp, ldn); in __niu_fastpath_interrupt()
4187 struct tx_ring_info *rp = &np->tx_rings[i]; in __niu_fastpath_interrupt() local
4188 int ldn = LDN_TXDMA(rp->tx_channel); in __niu_fastpath_interrupt()
4194 if (tx_vec & (1 << rp->tx_channel)) in __niu_fastpath_interrupt()
4195 niu_txchan_intr(np, rp, ldn); in __niu_fastpath_interrupt()
4255 static void niu_free_rx_ring_info(struct niu *np, struct rx_ring_info *rp) in niu_free_rx_ring_info() argument
4257 if (rp->mbox) { in niu_free_rx_ring_info()
4260 rp->mbox, rp->mbox_dma); in niu_free_rx_ring_info()
4261 rp->mbox = NULL; in niu_free_rx_ring_info()
4263 if (rp->rcr) { in niu_free_rx_ring_info()
4266 rp->rcr, rp->rcr_dma); in niu_free_rx_ring_info()
4267 rp->rcr = NULL; in niu_free_rx_ring_info()
4268 rp->rcr_table_size = 0; in niu_free_rx_ring_info()
4269 rp->rcr_index = 0; in niu_free_rx_ring_info()
4271 if (rp->rbr) { in niu_free_rx_ring_info()
4272 niu_rbr_free(np, rp); in niu_free_rx_ring_info()
4276 rp->rbr, rp->rbr_dma); in niu_free_rx_ring_info()
4277 rp->rbr = NULL; in niu_free_rx_ring_info()
4278 rp->rbr_table_size = 0; in niu_free_rx_ring_info()
4279 rp->rbr_index = 0; in niu_free_rx_ring_info()
4281 kfree(rp->rxhash); in niu_free_rx_ring_info()
4282 rp->rxhash = NULL; in niu_free_rx_ring_info()
4285 static void niu_free_tx_ring_info(struct niu *np, struct tx_ring_info *rp) in niu_free_tx_ring_info() argument
4287 if (rp->mbox) { in niu_free_tx_ring_info()
4290 rp->mbox, rp->mbox_dma); in niu_free_tx_ring_info()
4291 rp->mbox = NULL; in niu_free_tx_ring_info()
4293 if (rp->descr) { in niu_free_tx_ring_info()
4297 if (rp->tx_buffs[i].skb) in niu_free_tx_ring_info()
4298 (void) release_tx_packet(np, rp, i); in niu_free_tx_ring_info()
4303 rp->descr, rp->descr_dma); in niu_free_tx_ring_info()
4304 rp->descr = NULL; in niu_free_tx_ring_info()
4305 rp->pending = 0; in niu_free_tx_ring_info()
4306 rp->prod = 0; in niu_free_tx_ring_info()
4307 rp->cons = 0; in niu_free_tx_ring_info()
4308 rp->wrap_bit = 0; in niu_free_tx_ring_info()
4318 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_free_channels() local
4320 niu_free_rx_ring_info(np, rp); in niu_free_channels()
4329 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_free_channels() local
4331 niu_free_tx_ring_info(np, rp); in niu_free_channels()
4340 struct rx_ring_info *rp) in niu_alloc_rx_ring_info() argument
4344 rp->rxhash = kcalloc(MAX_RBR_RING_SIZE, sizeof(struct page *), in niu_alloc_rx_ring_info()
4346 if (!rp->rxhash) in niu_alloc_rx_ring_info()
4349 rp->mbox = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4351 &rp->mbox_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4352 if (!rp->mbox) in niu_alloc_rx_ring_info()
4354 if ((unsigned long)rp->mbox & (64UL - 1)) { in niu_alloc_rx_ring_info()
4356 rp->mbox); in niu_alloc_rx_ring_info()
4360 rp->rcr = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4362 &rp->rcr_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4363 if (!rp->rcr) in niu_alloc_rx_ring_info()
4365 if ((unsigned long)rp->rcr & (64UL - 1)) { in niu_alloc_rx_ring_info()
4367 rp->rcr); in niu_alloc_rx_ring_info()
4370 rp->rcr_table_size = MAX_RCR_RING_SIZE; in niu_alloc_rx_ring_info()
4371 rp->rcr_index = 0; in niu_alloc_rx_ring_info()
4373 rp->rbr = np->ops->alloc_coherent(np->device, in niu_alloc_rx_ring_info()
4375 &rp->rbr_dma, GFP_KERNEL); in niu_alloc_rx_ring_info()
4376 if (!rp->rbr) in niu_alloc_rx_ring_info()
4378 if ((unsigned long)rp->rbr & (64UL - 1)) { in niu_alloc_rx_ring_info()
4380 rp->rbr); in niu_alloc_rx_ring_info()
4383 rp->rbr_table_size = MAX_RBR_RING_SIZE; in niu_alloc_rx_ring_info()
4384 rp->rbr_index = 0; in niu_alloc_rx_ring_info()
4385 rp->rbr_pending = 0; in niu_alloc_rx_ring_info()
4390 static void niu_set_max_burst(struct niu *np, struct tx_ring_info *rp) in niu_set_max_burst() argument
4397 rp->max_burst = mtu + 32; in niu_set_max_burst()
4398 if (rp->max_burst > 4096) in niu_set_max_burst()
4399 rp->max_burst = 4096; in niu_set_max_burst()
4403 struct tx_ring_info *rp) in niu_alloc_tx_ring_info() argument
4407 rp->mbox = np->ops->alloc_coherent(np->device, in niu_alloc_tx_ring_info()
4409 &rp->mbox_dma, GFP_KERNEL); in niu_alloc_tx_ring_info()
4410 if (!rp->mbox) in niu_alloc_tx_ring_info()
4412 if ((unsigned long)rp->mbox & (64UL - 1)) { in niu_alloc_tx_ring_info()
4414 rp->mbox); in niu_alloc_tx_ring_info()
4418 rp->descr = np->ops->alloc_coherent(np->device, in niu_alloc_tx_ring_info()
4420 &rp->descr_dma, GFP_KERNEL); in niu_alloc_tx_ring_info()
4421 if (!rp->descr) in niu_alloc_tx_ring_info()
4423 if ((unsigned long)rp->descr & (64UL - 1)) { in niu_alloc_tx_ring_info()
4425 rp->descr); in niu_alloc_tx_ring_info()
4429 rp->pending = MAX_TX_RING_SIZE; in niu_alloc_tx_ring_info()
4430 rp->prod = 0; in niu_alloc_tx_ring_info()
4431 rp->cons = 0; in niu_alloc_tx_ring_info()
4432 rp->wrap_bit = 0; in niu_alloc_tx_ring_info()
4435 rp->mark_freq = rp->pending / 4; in niu_alloc_tx_ring_info()
4437 niu_set_max_burst(np, rp); in niu_alloc_tx_ring_info()
4442 static void niu_size_rbr(struct niu *np, struct rx_ring_info *rp) in niu_size_rbr() argument
4448 rp->rbr_block_size = 1 << bss; in niu_size_rbr()
4449 rp->rbr_blocks_per_page = 1 << (PAGE_SHIFT-bss); in niu_size_rbr()
4451 rp->rbr_sizes[0] = 256; in niu_size_rbr()
4452 rp->rbr_sizes[1] = 1024; in niu_size_rbr()
4456 rp->rbr_sizes[2] = 4096; in niu_size_rbr()
4460 rp->rbr_sizes[2] = 8192; in niu_size_rbr()
4464 rp->rbr_sizes[2] = 2048; in niu_size_rbr()
4466 rp->rbr_sizes[3] = rp->rbr_block_size; in niu_size_rbr()
4501 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_alloc_channels() local
4503 rp->np = np; in niu_alloc_channels()
4504 rp->rx_channel = first_rx_channel + i; in niu_alloc_channels()
4506 err = niu_alloc_rx_ring_info(np, rp); in niu_alloc_channels()
4510 niu_size_rbr(np, rp); in niu_alloc_channels()
4513 rp->nonsyn_window = 64; in niu_alloc_channels()
4514 rp->nonsyn_threshold = rp->rcr_table_size - 64; in niu_alloc_channels()
4515 rp->syn_window = 64; in niu_alloc_channels()
4516 rp->syn_threshold = rp->rcr_table_size - 64; in niu_alloc_channels()
4517 rp->rcr_pkt_threshold = 16; in niu_alloc_channels()
4518 rp->rcr_timeout = 8; in niu_alloc_channels()
4519 rp->rbr_kick_thresh = RBR_REFILL_MIN; in niu_alloc_channels()
4520 if (rp->rbr_kick_thresh < rp->rbr_blocks_per_page) in niu_alloc_channels()
4521 rp->rbr_kick_thresh = rp->rbr_blocks_per_page; in niu_alloc_channels()
4523 err = niu_rbr_fill(np, rp, GFP_KERNEL); in niu_alloc_channels()
4541 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_alloc_channels() local
4543 rp->np = np; in niu_alloc_channels()
4544 rp->tx_channel = first_tx_channel + i; in niu_alloc_channels()
4546 err = niu_alloc_tx_ring_info(np, rp); in niu_alloc_channels()
4672 static int niu_init_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_init_one_tx_channel() argument
4674 int err, channel = rp->tx_channel; in niu_init_one_tx_channel()
4689 nw64(TXC_DMA_MAX(channel), rp->max_burst); in niu_init_one_tx_channel()
4692 if (rp->descr_dma & ~(TX_RNG_CFIG_STADDR_BASE | in niu_init_one_tx_channel()
4695 channel, (unsigned long long)rp->descr_dma); in niu_init_one_tx_channel()
4700 * blocks. rp->pending is the number of TX descriptors in in niu_init_one_tx_channel()
4704 ring_len = (rp->pending / 8); in niu_init_one_tx_channel()
4707 rp->descr_dma); in niu_init_one_tx_channel()
4710 if (((rp->mbox_dma >> 32) & ~TXDMA_MBH_MBADDR) || in niu_init_one_tx_channel()
4711 ((u32)rp->mbox_dma & ~TXDMA_MBL_MBADDR)) { in niu_init_one_tx_channel()
4713 channel, (unsigned long long)rp->mbox_dma); in niu_init_one_tx_channel()
4716 nw64(TXDMA_MBH(channel), rp->mbox_dma >> 32); in niu_init_one_tx_channel()
4717 nw64(TXDMA_MBL(channel), rp->mbox_dma & TXDMA_MBL_MBADDR); in niu_init_one_tx_channel()
4721 rp->last_pkt_cnt = 0; in niu_init_one_tx_channel()
4812 static void niu_rx_channel_wred_init(struct niu *np, struct rx_ring_info *rp) in niu_rx_channel_wred_init() argument
4816 val = (((u64)rp->nonsyn_window << RDC_RED_PARA_WIN_SHIFT) | in niu_rx_channel_wred_init()
4817 ((u64)rp->nonsyn_threshold << RDC_RED_PARA_THRE_SHIFT) | in niu_rx_channel_wred_init()
4818 ((u64)rp->syn_window << RDC_RED_PARA_WIN_SYN_SHIFT) | in niu_rx_channel_wred_init()
4819 ((u64)rp->syn_threshold << RDC_RED_PARA_THRE_SYN_SHIFT)); in niu_rx_channel_wred_init()
4820 nw64(RDC_RED_PARA(rp->rx_channel), val); in niu_rx_channel_wred_init()
4823 static int niu_compute_rbr_cfig_b(struct rx_ring_info *rp, u64 *ret) in niu_compute_rbr_cfig_b() argument
4828 switch (rp->rbr_block_size) { in niu_compute_rbr_cfig_b()
4845 switch (rp->rbr_sizes[2]) { in niu_compute_rbr_cfig_b()
4863 switch (rp->rbr_sizes[1]) { in niu_compute_rbr_cfig_b()
4881 switch (rp->rbr_sizes[0]) { in niu_compute_rbr_cfig_b()
4925 static int niu_init_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_init_one_rx_channel() argument
4927 int err, channel = rp->rx_channel; in niu_init_one_rx_channel()
4938 niu_rx_channel_wred_init(np, rp); in niu_init_one_rx_channel()
4946 nw64(RXDMA_CFIG1(channel), rp->mbox_dma >> 32); in niu_init_one_rx_channel()
4948 ((rp->mbox_dma & RXDMA_CFIG2_MBADDR_L) | in niu_init_one_rx_channel()
4951 ((u64)rp->rbr_table_size << RBR_CFIG_A_LEN_SHIFT) | in niu_init_one_rx_channel()
4952 (rp->rbr_dma & (RBR_CFIG_A_STADDR_BASE | RBR_CFIG_A_STADDR))); in niu_init_one_rx_channel()
4953 err = niu_compute_rbr_cfig_b(rp, &val); in niu_init_one_rx_channel()
4958 ((u64)rp->rcr_table_size << RCRCFIG_A_LEN_SHIFT) | in niu_init_one_rx_channel()
4959 (rp->rcr_dma & (RCRCFIG_A_STADDR_BASE | RCRCFIG_A_STADDR))); in niu_init_one_rx_channel()
4961 ((u64)rp->rcr_pkt_threshold << RCRCFIG_B_PTHRES_SHIFT) | in niu_init_one_rx_channel()
4963 ((u64)rp->rcr_timeout << RCRCFIG_B_TIMEOUT_SHIFT)); in niu_init_one_rx_channel()
4969 nw64(RBR_KICK(channel), rp->rbr_index); in niu_init_one_rx_channel()
4999 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_init_rx_channels() local
5001 err = niu_init_one_rx_channel(np, rp); in niu_init_rx_channels()
5838 static void niu_stop_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_stop_one_tx_channel() argument
5840 (void) niu_tx_channel_stop(np, rp->tx_channel); in niu_stop_one_tx_channel()
5848 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_stop_tx_channels() local
5850 niu_stop_one_tx_channel(np, rp); in niu_stop_tx_channels()
5854 static void niu_reset_one_tx_channel(struct niu *np, struct tx_ring_info *rp) in niu_reset_one_tx_channel() argument
5856 (void) niu_tx_channel_reset(np, rp->tx_channel); in niu_reset_one_tx_channel()
5864 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_reset_tx_channels() local
5866 niu_reset_one_tx_channel(np, rp); in niu_reset_tx_channels()
5870 static void niu_stop_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_stop_one_rx_channel() argument
5872 (void) niu_enable_rx_channel(np, rp->rx_channel, 0); in niu_stop_one_rx_channel()
5880 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_stop_rx_channels() local
5882 niu_stop_one_rx_channel(np, rp); in niu_stop_rx_channels()
5886 static void niu_reset_one_rx_channel(struct niu *np, struct rx_ring_info *rp) in niu_reset_one_rx_channel() argument
5888 int channel = rp->rx_channel; in niu_reset_one_rx_channel()
5901 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_reset_rx_channels() local
5903 niu_reset_one_rx_channel(np, rp); in niu_reset_rx_channels()
5947 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_init_hw() local
5949 err = niu_init_one_tx_channel(np, rp); in niu_init_hw()
6250 struct rx_ring_info *rp = &rx_rings[i]; in niu_get_rx_stats() local
6252 niu_sync_rx_discard_stats(np, rp, 0); in niu_get_rx_stats()
6254 pkts += rp->rx_packets; in niu_get_rx_stats()
6255 bytes += rp->rx_bytes; in niu_get_rx_stats()
6256 dropped += rp->rx_dropped; in niu_get_rx_stats()
6257 errors += rp->rx_errors; in niu_get_rx_stats()
6281 struct tx_ring_info *rp = &tx_rings[i]; in niu_get_tx_stats() local
6283 pkts += rp->tx_packets; in niu_get_tx_stats()
6284 bytes += rp->tx_bytes; in niu_get_tx_stats()
6285 errors += rp->tx_errors; in niu_get_tx_stats()
6455 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_reset_buffers() local
6460 page = rp->rxhash[j]; in niu_reset_buffers()
6465 rp->rbr[k++] = cpu_to_le32(base); in niu_reset_buffers()
6470 err = niu_rbr_add_page(np, rp, GFP_ATOMIC, k); in niu_reset_buffers()
6475 rp->rbr_index = rp->rbr_table_size - 1; in niu_reset_buffers()
6476 rp->rcr_index = 0; in niu_reset_buffers()
6477 rp->rbr_pending = 0; in niu_reset_buffers()
6478 rp->rbr_refill_pending = 0; in niu_reset_buffers()
6483 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_reset_buffers() local
6486 if (rp->tx_buffs[j].skb) in niu_reset_buffers()
6487 (void) release_tx_packet(np, rp, j); in niu_reset_buffers()
6490 rp->pending = MAX_TX_RING_SIZE; in niu_reset_buffers()
6491 rp->prod = 0; in niu_reset_buffers()
6492 rp->cons = 0; in niu_reset_buffers()
6493 rp->wrap_bit = 0; in niu_reset_buffers()
6546 static void niu_set_txd(struct tx_ring_info *rp, int index, in niu_set_txd() argument
6550 __le64 *desc = &rp->descr[index]; in niu_set_txd()
6629 struct tx_ring_info *rp; in niu_start_xmit() local
6637 rp = &np->tx_rings[i]; in niu_start_xmit()
6640 if (niu_tx_avail(rp) <= (skb_shinfo(skb)->nr_frags + 1)) { in niu_start_xmit()
6643 rp->tx_errors++; in niu_start_xmit()
6676 prod = rp->prod; in niu_start_xmit()
6678 rp->tx_buffs[prod].skb = skb; in niu_start_xmit()
6679 rp->tx_buffs[prod].mapping = mapping; in niu_start_xmit()
6682 if (++rp->mark_counter == rp->mark_freq) { in niu_start_xmit()
6683 rp->mark_counter = 0; in niu_start_xmit()
6685 rp->mark_pending++; in niu_start_xmit()
6701 niu_set_txd(rp, prod, mapping, this_len, mrk, nfg); in niu_start_xmit()
6704 prod = NEXT_TX(rp, prod); in niu_start_xmit()
6717 rp->tx_buffs[prod].skb = NULL; in niu_start_xmit()
6718 rp->tx_buffs[prod].mapping = mapping; in niu_start_xmit()
6720 niu_set_txd(rp, prod, mapping, len, 0, 0); in niu_start_xmit()
6722 prod = NEXT_TX(rp, prod); in niu_start_xmit()
6725 if (prod < rp->prod) in niu_start_xmit()
6726 rp->wrap_bit ^= TX_RING_KICK_WRAP; in niu_start_xmit()
6727 rp->prod = prod; in niu_start_xmit()
6729 nw64(TX_RING_KICK(rp->tx_channel), rp->wrap_bit | (prod << 3)); in niu_start_xmit()
6731 if (unlikely(niu_tx_avail(rp) <= (MAX_SKB_FRAGS + 1))) { in niu_start_xmit()
6733 if (niu_tx_avail(rp) > NIU_TX_WAKEUP_THRESH(rp)) in niu_start_xmit()
6741 rp->tx_errors++; in niu_start_xmit()
7808 struct rx_ring_info *rp = &np->rx_rings[i]; in niu_get_ethtool_stats() local
7810 niu_sync_rx_discard_stats(np, rp, 0); in niu_get_ethtool_stats()
7812 data[0] = rp->rx_channel; in niu_get_ethtool_stats()
7813 data[1] = rp->rx_packets; in niu_get_ethtool_stats()
7814 data[2] = rp->rx_bytes; in niu_get_ethtool_stats()
7815 data[3] = rp->rx_dropped; in niu_get_ethtool_stats()
7816 data[4] = rp->rx_errors; in niu_get_ethtool_stats()
7820 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_get_ethtool_stats() local
7822 data[0] = rp->tx_channel; in niu_get_ethtool_stats()
7823 data[1] = rp->tx_packets; in niu_get_ethtool_stats()
7824 data[2] = rp->tx_bytes; in niu_get_ethtool_stats()
7825 data[3] = rp->tx_errors; in niu_get_ethtool_stats()