Lines Matching refs:iwdev
15 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_query_device() local
16 struct irdma_pci_f *rf = iwdev->rf; in irdma_query_device()
17 struct pci_dev *pcidev = iwdev->rf->pcidev; in irdma_query_device()
24 ether_addr_copy((u8 *)&props->sys_image_guid, iwdev->netdev->dev_addr); in irdma_query_device()
27 props->device_cap_flags = iwdev->device_cap_flags; in irdma_query_device()
99 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_query_port() local
100 struct net_device *netdev = iwdev->netdev; in irdma_query_port()
129 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port()
151 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy()
152 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap_legacy()
210 ibdev_dbg(&ucontext->iwdev->ibdev, in irdma_mmap()
217 ibdev_dbg(&ucontext->iwdev->ibdev, in irdma_mmap()
222 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap()
240 ibdev_dbg(&ucontext->iwdev->ibdev, in irdma_mmap()
256 struct irdma_device *iwdev = iwqp->iwdev; in irdma_alloc_push_page() local
260 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_alloc_push_page()
272 cqp_info->in.u.manage_push_page.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_alloc_push_page()
275 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_alloc_push_page()
277 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page()
282 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_alloc_push_page()
297 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_alloc_ucontext() local
309 ucontext->iwdev = iwdev; in irdma_alloc_ucontext()
312 uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext()
319 uresp.max_qps = iwdev->rf->max_qp; in irdma_alloc_ucontext()
320 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext()
321 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext()
327 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_alloc_ucontext()
362 ibdev_err(&iwdev->ibdev, in irdma_alloc_ucontext()
387 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_alloc_pd() local
388 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_alloc_pd()
389 struct irdma_pci_f *rf = iwdev->rf; in irdma_alloc_pd()
431 struct irdma_device *iwdev = to_iwdev(ibpd->device); in irdma_dealloc_pd() local
433 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_pds, iwpd->sc_pd.pd_id); in irdma_dealloc_pd()
492 struct irdma_device *iwdev = ucontext->iwdev; in irdma_setup_push_mmap_entries() local
496 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_setup_push_mmap_entries()
528 struct irdma_device *iwdev = iwqp->iwdev; in irdma_destroy_qp() local
538 irdma_cqp_qp_destroy_cmd(&iwdev->rf->sc_dev, &iwqp->sc_qp); in irdma_destroy_qp()
559 static void irdma_setup_virt_qp(struct irdma_device *iwdev, in irdma_setup_virt_qp() argument
585 static int irdma_setup_kmode_qp(struct irdma_device *iwdev, in irdma_setup_kmode_qp() argument
596 struct irdma_uk_attrs *uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_setup_kmode_qp()
638 mem->va = dma_alloc_coherent(iwdev->rf->hw.device, mem->size, in irdma_setup_kmode_qp()
666 struct irdma_pci_f *rf = iwqp->iwdev->rf; in irdma_cqp_create_qp_cmd()
696 struct irdma_device *iwdev = iwqp->iwdev; in irdma_roce_fill_and_set_qpctx_info() local
697 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_roce_fill_and_set_qpctx_info()
702 udp_info->snd_mss = ib_mtu_enum_to_int(ib_mtu_int_to_enum(iwdev->vsi.mtu)); in irdma_roce_fill_and_set_qpctx_info()
703 udp_info->cwnd = iwdev->roce_cwnd; in irdma_roce_fill_and_set_qpctx_info()
709 ether_addr_copy(roce_info->mac_addr, iwdev->netdev->dev_addr); in irdma_roce_fill_and_set_qpctx_info()
717 roce_info->ack_credits = iwdev->roce_ackcreds; in irdma_roce_fill_and_set_qpctx_info()
736 struct irdma_device *iwdev = iwqp->iwdev; in irdma_iw_fill_and_set_qpctx_info() local
737 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_iw_fill_and_set_qpctx_info()
741 ether_addr_copy(iwarp_info->mac_addr, iwdev->netdev->dev_addr); in irdma_iw_fill_and_set_qpctx_info()
764 struct irdma_device *iwdev) in irdma_validate_qp_attrs() argument
766 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_validate_qp_attrs()
777 if (rdma_protocol_roce(&iwdev->ibdev, 1)) { in irdma_validate_qp_attrs()
802 struct irdma_device *iwdev = to_iwdev(ibpd->device); in irdma_create_qp() local
803 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_qp()
819 err_code = irdma_validate_qp_attrs(init_attr, iwdev); in irdma_create_qp()
826 init_info.vsi = &iwdev->vsi; in irdma_create_qp()
839 iwqp->iwdev = iwdev; in irdma_create_qp()
873 if (!rdma_protocol_roce(&iwdev->ibdev, 1)) in irdma_create_qp()
883 ibdev_dbg(&iwdev->ibdev, in irdma_create_qp()
904 ibdev_dbg(&iwdev->ibdev, "VERBS: no pbl info\n"); in irdma_create_qp()
909 irdma_setup_virt_qp(iwdev, iwqp, &init_info); in irdma_create_qp()
912 err_code = irdma_setup_kmode_qp(iwdev, iwqp, &init_info, init_attr); in irdma_create_qp()
916 ibdev_dbg(&iwdev->ibdev, "VERBS: setup qp failed\n"); in irdma_create_qp()
920 if (rdma_protocol_roce(&iwdev->ibdev, 1)) { in irdma_create_qp()
942 ibdev_dbg(&iwdev->ibdev, "VERBS: qp_init fail\n"); in irdma_create_qp()
950 if (rdma_protocol_roce(&iwdev->ibdev, 1)) in irdma_create_qp()
967 if (rdma_protocol_roce(&iwdev->ibdev, 1)) { in irdma_create_qp()
968 if (dev->ws_add(&iwdev->vsi, 0)) { in irdma_create_qp()
983 if (rdma_protocol_iwarp(&iwdev->ibdev, 1)) in irdma_create_qp()
994 ibdev_dbg(&iwdev->ibdev, "VERBS: copy_to_udata failed\n"); in irdma_create_qp()
1110 struct irdma_device *iwdev = iwqp->iwdev; in irdma_modify_qp_roce() local
1111 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp_roce()
1188 if (vlan_id >= VLAN_N_VID && iwdev->dcb) in irdma_modify_qp_roce()
1214 udp_info->arp_idx = irdma_arp_table(iwdev->rf, in irdma_modify_qp_roce()
1236 irdma_add_arp(iwdev->rf, local_ip, udp_info->ipv4, in irdma_modify_qp_roce()
1242 ibdev_err(&iwdev->ibdev, in irdma_modify_qp_roce()
1255 ibdev_err(&iwdev->ibdev, in irdma_modify_qp_roce()
1276 ibdev_dbg(&iwdev->ibdev, in irdma_modify_qp_roce()
1285 ibdev_warn(&iwdev->ibdev, "modify_qp invalid for qp_id=%d, old_state=0x%x, new_state=0x%x\n", in irdma_modify_qp_roce()
1327 if (iwdev->push_mode && udata && in irdma_modify_qp_roce()
1353 irdma_hw_modify_qp(iwdev, iwqp, &info, true); in irdma_modify_qp_roce()
1391 if (irdma_hw_modify_qp(iwdev, iwqp, &info, true)) in irdma_modify_qp_roce()
1427 ibdev_dbg(&iwdev->ibdev, in irdma_modify_qp_roce()
1452 struct irdma_device *iwdev = iwqp->iwdev; in irdma_modify_qp() local
1453 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp()
1472 ibdev_dbg(&iwdev->ibdev, in irdma_modify_qp()
1493 if (iwdev->push_mode && udata && in irdma_modify_qp()
1570 iwdev->iw_status && in irdma_modify_qp()
1606 if (irdma_hw_modify_qp(iwdev, iwqp, &info, true)) in irdma_modify_qp()
1630 spin_lock_irqsave(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1634 spin_unlock_irqrestore(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1643 spin_unlock_irqrestore(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1665 ibdev_dbg(&iwdev->ibdev, in irdma_modify_qp()
1721 struct irdma_device *iwdev, in irdma_process_resize_list() argument
1734 queue_work(iwdev->cleanup_wq, &cq_buf->work); in irdma_process_resize_list()
1748 struct irdma_device *iwdev = to_iwdev(ib_cq->device); in irdma_destroy_cq() local
1758 irdma_process_resize_list(iwcq, iwdev, NULL); in irdma_destroy_cq()
1761 irdma_cq_wq_destroy(iwdev->rf, cq); in irdma_destroy_cq()
1762 irdma_cq_free_rsrc(iwdev->rf, iwcq); in irdma_destroy_cq()
1789 struct irdma_device *iwdev; in irdma_resize_cq() local
1796 iwdev = to_iwdev(ibcq->device); in irdma_resize_cq()
1797 rf = iwdev->rf; in irdma_resize_cq()
1938 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_create_cq() local
1939 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_cq()
1974 info.vsi = &iwdev->vsi; in irdma_create_cq()
2078 ibdev_dbg(&iwdev->ibdev, "VERBS: init cq fail\n"); in irdma_create_cq()
2109 ibdev_dbg(&iwdev->ibdev, in irdma_create_cq()
2152 static void irdma_free_stag(struct irdma_device *iwdev, u32 stag) in irdma_free_stag() argument
2156 stag_idx = (stag & iwdev->rf->mr_stagmask) >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_free_stag()
2157 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, stag_idx); in irdma_free_stag()
2164 static u32 irdma_create_stag(struct irdma_device *iwdev) in irdma_create_stag() argument
2177 driver_key = random & ~iwdev->rf->mr_stagmask; in irdma_create_stag()
2178 next_stag_index = (random & iwdev->rf->mr_stagmask) >> 8; in irdma_create_stag()
2179 next_stag_index %= iwdev->rf->max_mr; in irdma_create_stag()
2181 ret = irdma_alloc_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, in irdma_create_stag()
2182 iwdev->rf->max_mr, &stag_index, in irdma_create_stag()
2342 static int irdma_handle_q_mem(struct irdma_device *iwdev, in irdma_handle_q_mem() argument
2357 err = irdma_setup_pbles(iwdev->rf, iwmr, use_pbles); in irdma_handle_q_mem()
2362 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_handle_q_mem()
2411 ibdev_dbg(&iwdev->ibdev, "VERBS: MR type error\n"); in irdma_handle_q_mem()
2416 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_handle_q_mem()
2428 static int irdma_hw_alloc_mw(struct irdma_device *iwdev, struct irdma_mr *iwmr) in irdma_hw_alloc_mw() argument
2436 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hw_alloc_mw()
2452 cqp_info->in.u.mw_alloc.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_mw()
2454 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hw_alloc_mw()
2455 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hw_alloc_mw()
2467 struct irdma_device *iwdev = to_iwdev(ibmw->device); in irdma_alloc_mw() local
2472 stag = irdma_create_stag(iwdev); in irdma_alloc_mw()
2479 err_code = irdma_hw_alloc_mw(iwdev, iwmr); in irdma_alloc_mw()
2481 irdma_free_stag(iwdev, stag); in irdma_alloc_mw()
2497 struct irdma_device *iwdev = to_iwdev(ibmw->device); in irdma_dealloc_mw() local
2502 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_dealloc_mw()
2514 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_dealloc_mw()
2516 irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_dealloc_mw()
2517 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_dealloc_mw()
2518 irdma_free_stag(iwdev, iwmr->stag); in irdma_dealloc_mw()
2528 static int irdma_hw_alloc_stag(struct irdma_device *iwdev, in irdma_hw_alloc_stag() argument
2538 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hw_alloc_stag()
2552 cqp_info->in.u.alloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_stag()
2554 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hw_alloc_stag()
2555 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hw_alloc_stag()
2571 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_alloc_mr() local
2583 stag = irdma_create_stag(iwdev); in irdma_alloc_mr()
2599 status = irdma_get_pble(iwdev->rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_alloc_mr()
2604 err_code = irdma_hw_alloc_stag(iwdev, iwmr); in irdma_alloc_mr()
2612 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_alloc_mr()
2614 irdma_free_stag(iwdev, stag); in irdma_alloc_mr()
2665 static int irdma_hwreg_mr(struct irdma_device *iwdev, struct irdma_mr *iwmr, in irdma_hwreg_mr() argument
2677 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hwreg_mr()
2710 cqp_info->in.u.mr_reg_non_shared.dev = &iwdev->rf->sc_dev; in irdma_hwreg_mr()
2712 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hwreg_mr()
2713 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hwreg_mr()
2733 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_reg_user_mr() local
2747 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr()
2753 ibdev_dbg(&iwdev->ibdev, in irdma_reg_user_mr()
2802 err = irdma_handle_q_mem(iwdev, &req, iwpbl, use_pbles); in irdma_reg_user_mr()
2814 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.feature_flags & IRDMA_FEATURE_CQ_RESIZE) in irdma_reg_user_mr()
2823 err = irdma_handle_q_mem(iwdev, &req, iwpbl, use_pbles); in irdma_reg_user_mr()
2837 err = irdma_setup_pbles(iwdev->rf, iwmr, use_pbles); in irdma_reg_user_mr()
2845 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_reg_user_mr()
2850 stag = irdma_create_stag(iwdev); in irdma_reg_user_mr()
2859 err = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_user_mr()
2861 irdma_free_stag(iwdev, stag); in irdma_reg_user_mr()
2876 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_reg_user_mr()
2894 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_reg_phys_mr() local
2911 stag = irdma_create_stag(iwdev); in irdma_reg_phys_mr()
2925 status = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_phys_mr()
2927 irdma_free_stag(iwdev, stag); in irdma_reg_phys_mr()
2995 struct irdma_device *iwdev = to_iwdev(ib_mr->device); in irdma_dereg_mr() local
3014 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_dereg_mr()
3029 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_dereg_mr()
3031 irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_dereg_mr()
3032 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_dereg_mr()
3033 irdma_free_stag(iwdev, iwmr->stag); in irdma_dereg_mr()
3036 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_dereg_mr()
3084 dev = &iwqp->iwdev->rf->sc_dev; in irdma_post_send()
3250 ibdev_dbg(&iwqp->iwdev->ibdev, in irdma_post_send()
3309 ibdev_dbg(&iwqp->iwdev->ibdev, in irdma_post_recv()
3446 ibdev_err(&iwqp->iwdev->ibdev, in irdma_process_cqe()
3502 struct irdma_device *iwdev; in __irdma_poll_cq() local
3508 iwdev = to_iwdev(iwcq->ibcq.device); in __irdma_poll_cq()
3558 resized_bufs = irdma_process_resize_list(iwcq, iwdev, NULL); in __irdma_poll_cq()
3561 resized_bufs = irdma_process_resize_list(iwcq, iwdev, last_buf); in __irdma_poll_cq()
3568 ibdev_dbg(&iwdev->ibdev, "%s: Error polling CQ, irdma_err: %d\n", in __irdma_poll_cq()
3741 struct irdma_device *iwdev = to_iwdev(dev); in irdma_get_dev_fw_str() local
3744 irdma_fw_major_ver(&iwdev->rf->sc_dev), in irdma_get_dev_fw_str()
3745 irdma_fw_minor_ver(&iwdev->rf->sc_dev)); in irdma_get_dev_fw_str()
3778 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_get_hw_stats() local
3779 struct irdma_dev_hw_stats *hw_stats = &iwdev->vsi.pestat->hw_stats; in irdma_get_hw_stats()
3781 if (iwdev->rf->rdma_ver >= IRDMA_GEN_2) in irdma_get_hw_stats()
3782 irdma_cqp_gather_stats_cmd(&iwdev->rf->sc_dev, iwdev->vsi.pestat, true); in irdma_get_hw_stats()
3784 irdma_cqp_gather_stats_gen1(&iwdev->rf->sc_dev, iwdev->vsi.pestat); in irdma_get_hw_stats()
3801 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_query_gid() local
3804 ether_addr_copy(gid->raw, iwdev->netdev->dev_addr); in irdma_query_gid()
3859 static int irdma_mcast_cqp_op(struct irdma_device *iwdev, in irdma_mcast_cqp_op() argument
3866 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_mcast_cqp_op()
3875 cqp_info->in.u.mc_create.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_mcast_cqp_op()
3876 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_mcast_cqp_op()
3877 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_mcast_cqp_op()
3926 struct irdma_device *iwdev = iwqp->iwdev; in irdma_attach_mcast() local
3927 struct irdma_pci_f *rf = iwdev->rf; in irdma_attach_mcast()
3951 ibdev_dbg(&iwdev->ibdev, in irdma_attach_mcast()
3960 ibdev_dbg(&iwdev->ibdev, in irdma_attach_mcast()
4006 mc_qht_elem->mc_grp_ctx.hmc_fcn_id = iwdev->vsi.fcn_id; in irdma_attach_mcast()
4028 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_attach_mcast()
4031 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_attach_mcast()
4070 struct irdma_device *iwdev = iwqp->iwdev; in irdma_detach_mcast() local
4071 struct irdma_pci_f *rf = iwdev->rf; in irdma_detach_mcast()
4094 ibdev_dbg(&iwdev->ibdev, in irdma_detach_mcast()
4104 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_detach_mcast()
4107 ibdev_dbg(&iwdev->ibdev, in irdma_detach_mcast()
4125 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_detach_mcast()
4128 ibdev_dbg(&iwdev->ibdev, in irdma_detach_mcast()
4153 struct irdma_device *iwdev = to_iwdev(ibah->pd->device); in irdma_create_ah() local
4154 struct irdma_pci_f *rf = iwdev->rf; in irdma_create_ah()
4175 sc_ah->ah_info.vsi = &iwdev->vsi; in irdma_create_ah()
4226 ah_info->dst_arpindex = irdma_add_arp(iwdev->rf, ah_info->dest_ip_addr, in irdma_create_ah()
4234 if (ah_info->vlan_tag >= VLAN_N_VID && iwdev->dcb) in irdma_create_ah()
4243 err = irdma_ah_cqp_op(iwdev->rf, sc_ah, IRDMA_OP_AH_CREATE, in irdma_create_ah()
4248 ibdev_dbg(&iwdev->ibdev, in irdma_create_ah()
4262 ibdev_dbg(&iwdev->ibdev, in irdma_create_ah()
4277 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_ahs, ah_id); in irdma_create_ah()
4289 struct irdma_device *iwdev = to_iwdev(ibah->device); in irdma_destroy_ah() local
4292 irdma_ah_cqp_op(iwdev->rf, &ah->sc_ah, IRDMA_OP_AH_DESTROY, in irdma_destroy_ah()
4295 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_ahs, in irdma_destroy_ah()
4415 static void irdma_init_roce_device(struct irdma_device *iwdev) in irdma_init_roce_device() argument
4417 iwdev->ibdev.node_type = RDMA_NODE_IB_CA; in irdma_init_roce_device()
4418 iwdev->ibdev.node_guid = irdma_mac_to_guid(iwdev->netdev); in irdma_init_roce_device()
4419 ib_set_device_ops(&iwdev->ibdev, &irdma_roce_dev_ops); in irdma_init_roce_device()
4426 static int irdma_init_iw_device(struct irdma_device *iwdev) in irdma_init_iw_device() argument
4428 struct net_device *netdev = iwdev->netdev; in irdma_init_iw_device()
4430 iwdev->ibdev.node_type = RDMA_NODE_RNIC; in irdma_init_iw_device()
4431 ether_addr_copy((u8 *)&iwdev->ibdev.node_guid, netdev->dev_addr); in irdma_init_iw_device()
4432 iwdev->ibdev.ops.iw_add_ref = irdma_qp_add_ref; in irdma_init_iw_device()
4433 iwdev->ibdev.ops.iw_rem_ref = irdma_qp_rem_ref; in irdma_init_iw_device()
4434 iwdev->ibdev.ops.iw_get_qp = irdma_get_qp; in irdma_init_iw_device()
4435 iwdev->ibdev.ops.iw_connect = irdma_connect; in irdma_init_iw_device()
4436 iwdev->ibdev.ops.iw_accept = irdma_accept; in irdma_init_iw_device()
4437 iwdev->ibdev.ops.iw_reject = irdma_reject; in irdma_init_iw_device()
4438 iwdev->ibdev.ops.iw_create_listen = irdma_create_listen; in irdma_init_iw_device()
4439 iwdev->ibdev.ops.iw_destroy_listen = irdma_destroy_listen; in irdma_init_iw_device()
4440 memcpy(iwdev->ibdev.iw_ifname, netdev->name, in irdma_init_iw_device()
4441 sizeof(iwdev->ibdev.iw_ifname)); in irdma_init_iw_device()
4442 ib_set_device_ops(&iwdev->ibdev, &irdma_iw_dev_ops); in irdma_init_iw_device()
4451 static int irdma_init_rdma_device(struct irdma_device *iwdev) in irdma_init_rdma_device() argument
4453 struct pci_dev *pcidev = iwdev->rf->pcidev; in irdma_init_rdma_device()
4456 if (iwdev->roce_mode) { in irdma_init_rdma_device()
4457 irdma_init_roce_device(iwdev); in irdma_init_rdma_device()
4459 ret = irdma_init_iw_device(iwdev); in irdma_init_rdma_device()
4463 iwdev->ibdev.phys_port_cnt = 1; in irdma_init_rdma_device()
4464 iwdev->ibdev.num_comp_vectors = iwdev->rf->ceqs_count; in irdma_init_rdma_device()
4465 iwdev->ibdev.dev.parent = &pcidev->dev; in irdma_init_rdma_device()
4466 ib_set_device_ops(&iwdev->ibdev, &irdma_dev_ops); in irdma_init_rdma_device()
4475 void irdma_port_ibevent(struct irdma_device *iwdev) in irdma_port_ibevent() argument
4479 event.device = &iwdev->ibdev; in irdma_port_ibevent()
4482 iwdev->iw_status ? IB_EVENT_PORT_ACTIVE : IB_EVENT_PORT_ERR; in irdma_port_ibevent()
4491 void irdma_ib_unregister_device(struct irdma_device *iwdev) in irdma_ib_unregister_device() argument
4493 iwdev->iw_status = 0; in irdma_ib_unregister_device()
4494 irdma_port_ibevent(iwdev); in irdma_ib_unregister_device()
4495 ib_unregister_device(&iwdev->ibdev); in irdma_ib_unregister_device()
4502 int irdma_ib_register_device(struct irdma_device *iwdev) in irdma_ib_register_device() argument
4506 ret = irdma_init_rdma_device(iwdev); in irdma_ib_register_device()
4510 ret = ib_device_set_netdev(&iwdev->ibdev, iwdev->netdev, 1); in irdma_ib_register_device()
4513 dma_set_max_seg_size(iwdev->rf->hw.device, UINT_MAX); in irdma_ib_register_device()
4514 ret = ib_register_device(&iwdev->ibdev, "irdma%d", iwdev->rf->hw.device); in irdma_ib_register_device()
4518 iwdev->iw_status = 1; in irdma_ib_register_device()
4519 irdma_port_ibevent(iwdev); in irdma_ib_register_device()
4525 ibdev_dbg(&iwdev->ibdev, "VERBS: Register RDMA device fail\n"); in irdma_ib_register_device()
4539 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_ib_dealloc_device() local
4541 irdma_rt_deinit_hw(iwdev); in irdma_ib_dealloc_device()
4542 irdma_ctrl_deinit_hw(iwdev->rf); in irdma_ib_dealloc_device()
4543 kfree(iwdev->rf); in irdma_ib_dealloc_device()