Searched refs:sc_dev (Results 1 – 6 of 6) sorted by relevance
| /Linux-v5.15/drivers/infiniband/hw/irdma/ |
| D | hw.c | 75 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_puda_ce_handler() 104 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_ceq() 199 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_process_aeq() 431 irdma_ena_intr(&rf->sc_dev, rf->iw_msixtbl[0].idx); in irdma_dpc() 444 irdma_ena_intr(&rf->sc_dev, iwceq->msix_idx); in irdma_ceq_dpc() 527 ibdev_err(to_ibdev(&iwceq->rf->sc_dev), "expected irq = %d received irq = %d\n", in irdma_ceq_handler() 545 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_irq() 563 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_cqp() 605 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_destroy_aeq() 609 rf->sc_dev.irq_ops->irdma_cfg_aeq(&rf->sc_dev, rf->iw_msixtbl->idx, false); in irdma_destroy_aeq() [all …]
|
| D | verbs.c | 18 struct irdma_hw_attrs *hw_attrs = &rf->sc_dev.hw_attrs; in irdma_query_device() 25 props->fw_ver = (u64)irdma_fw_major_ver(&rf->sc_dev) << 32 | in irdma_query_device() 26 irdma_fw_minor_ver(&rf->sc_dev); in irdma_query_device() 129 props->max_msg_sz = iwdev->rf->sc_dev.hw_attrs.max_hw_outbound_msg_size; in irdma_query_port() 151 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy() 277 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page() 312 uk_attrs = &iwdev->rf->sc_dev.hw_attrs.uk_attrs; in irdma_alloc_ucontext() 320 uresp.max_pds = iwdev->rf->sc_dev.hw_attrs.max_hw_pds; in irdma_alloc_ucontext() 321 uresp.wq_size = iwdev->rf->sc_dev.hw_attrs.max_qp_wr * 2; in irdma_alloc_ucontext() 327 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_alloc_ucontext() [all …]
|
| D | main.c | 94 irdma_log_invalid_mtu(l2params.mtu, &iwdev->rf->sc_dev); in irdma_iidc_event_handler() 120 pe_criterr = readl(iwdev->rf->sc_dev.hw_regs[IRDMA_GLPE_CRITERR]); in irdma_iidc_event_handler()
|
| D | main.h | 296 struct irdma_sc_dev sc_dev; member 404 return container_of(dev, struct irdma_pci_f, sc_dev); in dev_to_rf()
|
| D | utils.c | 524 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_cleanup_pending_cqp_op() 561 cqp_timeout.compl_cqp_cmds = rf->sc_dev.cqp_cmd_stats[IRDMA_OP_CMPL_CMDS]; in irdma_wait_event() 569 irdma_check_cqp_progress(&cqp_timeout, &rf->sc_dev); in irdma_wait_event() 686 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_handle_cqp_op() 752 return &(container_of(dev, struct irdma_pci_f, sc_dev))->iwdev->ibdev; in to_ibdev() 1137 dma_free_coherent(rf->sc_dev.hw->device, iwqp->q2_ctx_mem.size, in irdma_free_qp_rsrc() 1140 dma_free_coherent(rf->sc_dev.hw->device, iwqp->kqp.dma_mem.size, in irdma_free_qp_rsrc() 2024 if (!rf->sc_dev.ceq_valid) in irdma_cqp_ws_node_cmd()
|
| D | cm.c | 2156 if (irdma_puda_create_ah(&iwdev->rf->sc_dev, &ah_info, wait, in irdma_cm_create_ah() 2174 irdma_puda_free_ah(&iwdev->rf->sc_dev, cm_node->ah); in irdma_cm_free_ah() 2228 cm_node->dev = &iwdev->rf->sc_dev; in irdma_make_cm_node() 3098 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev >= IRDMA_GEN_2) { in irdma_receive_ilq() 3210 cm_core->dev = &iwdev->rf->sc_dev; in irdma_setup_cm_core() 3579 dma_free_coherent(iwdev->rf->sc_dev.hw->device, in irdma_free_lsmm_rsrc() 3615 dev = &iwdev->rf->sc_dev; in irdma_accept() 4087 dev = &iwdev->rf->sc_dev; in irdma_cm_event_connected()
|