Searched refs:rx_bd_num (Results 1 – 6 of 6) sorted by relevance
239 priv->rx_bd_num * sizeof(struct qtnf_pearl_rx_bd); in pearl_alloc_bd_table()275 writel(priv->rx_bd_num | (sizeof(struct qtnf_pearl_rx_bd)) << 16, in pearl_alloc_bd_table()333 ps->base.rx_bd_num * sizeof(struct qtnf_pearl_rx_bd)); in pearl_alloc_rx_buffers()335 for (i = 0; i < ps->base.rx_bd_num; i++) { in pearl_alloc_rx_buffers()355 for (i = 0; i < priv->rx_bd_num; i++) { in qtnf_pearl_free_xfer_buffers()396 writel(ps->base.rx_bd_num, PCIE_HHBM_Q_LIMIT_REG(ps->pcie_reg_base)); in pearl_hhbm_init()423 if (!priv->rx_bd_num || !is_power_of_2(priv->rx_bd_num)) { in qtnf_pcie_pearl_init_xfer()425 priv->rx_bd_num); in qtnf_pcie_pearl_init_xfer()429 val = priv->rx_bd_num * sizeof(dma_addr_t); in qtnf_pcie_pearl_init_xfer()432 priv->rx_bd_num); in qtnf_pcie_pearl_init_xfer()[all …]
195 priv->rx_bd_num * sizeof(struct qtnf_topaz_rx_bd) + in topaz_alloc_bd_table()229 vaddr = ((struct qtnf_topaz_rx_bd *)vaddr) + priv->rx_bd_num; in topaz_alloc_bd_table()230 paddr += priv->rx_bd_num * sizeof(struct qtnf_topaz_rx_bd); in topaz_alloc_bd_table()280 ts->base.rx_bd_num * sizeof(struct qtnf_topaz_rx_bd)); in topaz_alloc_rx_buffers()282 for (i = 0; i < ts->base.rx_bd_num; i++) { in topaz_alloc_rx_buffers()288 ts->rx_bd_vbase[ts->base.rx_bd_num - 1].info |= in topaz_alloc_rx_buffers()305 for (i = 0; i < priv->rx_bd_num; i++) { in qtnf_topaz_free_xfer_buffers()354 qtnf_non_posted_write(priv->rx_bd_num, &bda->bda_rc_rx_bd_num); in qtnf_pcie_topaz_init_xfer()681 if (++r_idx >= priv->rx_bd_num) in qtnf_topaz_rx_poll()689 priv->rx_bd_num) > 0) { in qtnf_topaz_rx_poll()[all …]
46 u16 rx_bd_num; member
71 priv->rx_bd_num * sizeof(*priv->rx_skb); in qtnf_pcie_alloc_skb_array()340 pcie_priv->rx_bd_num = rx_bd_size_param; in qtnf_pcie_probe()
163 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()171 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()213 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()224 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()227 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()275 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_bd_init()755 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_recv()1201 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()1221 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()1556 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()[all …]
454 u32 rx_bd_num; member