Lines Matching refs:rcb
255 bnad_rxq_alloc_uninit(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_uninit() argument
257 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_uninit()
267 bnad_rxq_alloc_init(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_init() argument
269 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_init()
272 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_alloc_init()
274 order = get_order(rcb->rxq->buffer_size); in bnad_rxq_alloc_init()
278 if (bna_is_small_rxq(rcb->id)) { in bnad_rxq_alloc_init()
280 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
282 if (rcb->rxq->multi_buffer) { in bnad_rxq_alloc_init()
284 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
289 (rcb->rxq->buffer_size > 2048) ? in bnad_rxq_alloc_init()
330 bnad_rxq_cleanup(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_cleanup() argument
332 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_cleanup()
335 for (i = 0; i < rcb->q_depth; i++) { in bnad_rxq_cleanup()
343 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_cleanup()
347 bnad_rxq_refill_page(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_page() argument
350 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_page()
357 prod = rcb->producer_index; in bnad_rxq_refill_page()
358 q_depth = rcb->q_depth; in bnad_rxq_refill_page()
379 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_page()
388 rcb->rxq->rxbuf_map_failed++; in bnad_rxq_refill_page()
403 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_page()
411 rcb->producer_index = prod; in bnad_rxq_refill_page()
413 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_page()
414 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_page()
421 bnad_rxq_refill_skb(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_skb() argument
424 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_skb()
430 buff_sz = rcb->rxq->buffer_size; in bnad_rxq_refill_skb()
431 prod = rcb->producer_index; in bnad_rxq_refill_skb()
432 q_depth = rcb->q_depth; in bnad_rxq_refill_skb()
442 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_skb()
451 rcb->rxq->rxbuf_map_failed++; in bnad_rxq_refill_skb()
459 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_skb()
467 rcb->producer_index = prod; in bnad_rxq_refill_skb()
469 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_skb()
470 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_skb()
477 bnad_rxq_post(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_post() argument
479 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_post()
482 to_alloc = BNA_QE_FREE_CNT(rcb, rcb->q_depth); in bnad_rxq_post()
487 bnad_rxq_refill_skb(bnad, rcb, to_alloc); in bnad_rxq_post()
489 bnad_rxq_refill_page(bnad, rcb, to_alloc); in bnad_rxq_post()
507 bnad_cq_drop_packet(struct bnad *bnad, struct bna_rcb *rcb, in bnad_cq_drop_packet() argument
514 unmap_q = rcb->unmap_q; in bnad_cq_drop_packet()
517 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_drop_packet()
529 struct bna_rcb *rcb; in bnad_cq_setup_skb_frags() local
539 rcb = bna_is_small_rxq(cmpl->rxq_id) ? ccb->rcb[1] : ccb->rcb[0]; in bnad_cq_setup_skb_frags()
540 unmap_q = rcb->unmap_q; in bnad_cq_setup_skb_frags()
541 bnad = rcb->bnad; in bnad_cq_setup_skb_frags()
542 ci = rcb->consumer_index; in bnad_cq_setup_skb_frags()
553 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_setup_skb_frags()
598 struct bna_rcb *rcb = NULL; in bnad_cq_process() local
628 rcb = ccb->rcb[1]; in bnad_cq_process()
630 rcb = ccb->rcb[0]; in bnad_cq_process()
632 unmap_q = rcb->unmap_q; in bnad_cq_process()
635 sop_ci = rcb->consumer_index; in bnad_cq_process()
691 bnad_cq_drop_packet(bnad, rcb, sop_ci, nvecs); in bnad_cq_process()
692 rcb->rxq->rx_packets_with_error++; in bnad_cq_process()
702 rcb->rxq->rx_packets++; in bnad_cq_process()
703 rcb->rxq->rx_bytes += totlen; in bnad_cq_process()
728 BNA_QE_INDX_ADD(rcb->consumer_index, nvecs, rcb->q_depth); in bnad_cq_process()
737 if (likely(test_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags))) in bnad_cq_process()
740 bnad_rxq_post(bnad, ccb->rcb[0]); in bnad_cq_process()
741 if (ccb->rcb[1]) in bnad_cq_process()
742 bnad_rxq_post(bnad, ccb->rcb[1]); in bnad_cq_process()
1171 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[0]->flags); in bnad_cb_rx_stall()
1173 if (ccb->rcb[1]) in bnad_cb_rx_stall()
1174 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[1]->flags); in bnad_cb_rx_stall()
1206 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[0]); in bnad_rx_cleanup()
1207 if (rx_ctrl->ccb->rcb[1]) in bnad_rx_cleanup()
1208 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[1]); in bnad_rx_cleanup()
1230 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags); in bnad_cb_rx_cleanup()
1232 if (ccb->rcb[1]) in bnad_cb_rx_cleanup()
1233 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[1]->flags); in bnad_cb_rx_cleanup()
1244 struct bna_rcb *rcb; in bnad_cb_rx_post() local
1257 rcb = ccb->rcb[j]; in bnad_cb_rx_post()
1258 if (!rcb) in bnad_cb_rx_post()
1261 bnad_rxq_alloc_init(bnad, rcb); in bnad_cb_rx_post()
1262 set_bit(BNAD_RXQ_STARTED, &rcb->flags); in bnad_cb_rx_post()
1263 set_bit(BNAD_RXQ_POST_OK, &rcb->flags); in bnad_cb_rx_post()
1264 bnad_rxq_post(bnad, rcb); in bnad_cb_rx_post()
2402 rx_ctrl[j].ccb->rcb[0]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2404 rx_ctrl[j].ccb->rcb[0]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
2405 if (bnad->rx_info[i].rx_ctrl[j].ccb->rcb[1] && in bnad_netdev_qstats_fill()
2407 rcb[1]->rxq) { in bnad_netdev_qstats_fill()
2410 ccb->rcb[1]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2413 ccb->rcb[1]->rxq->rx_bytes; in bnad_netdev_qstats_fill()