Lines Matching refs:hr_dev

44 static int hns_roce_set_mac(struct hns_roce_dev *hr_dev, u32 port,  in hns_roce_set_mac()  argument
50 if (hr_dev->pci_dev->revision >= PCI_REVISION_ID_HIP09) in hns_roce_set_mac()
53 if (!memcmp(hr_dev->dev_addr[port], addr, ETH_ALEN)) in hns_roce_set_mac()
57 hr_dev->dev_addr[port][i] = addr[i]; in hns_roce_set_mac()
59 phy_port = hr_dev->iboe.phy_port[port]; in hns_roce_set_mac()
60 return hr_dev->hw->set_mac(hr_dev, phy_port, addr); in hns_roce_set_mac()
65 struct hns_roce_dev *hr_dev = to_hr_dev(attr->device); in hns_roce_add_gid() local
69 if (port >= hr_dev->caps.num_ports) in hns_roce_add_gid()
72 ret = hr_dev->hw->set_gid(hr_dev, attr->index, &attr->gid, attr); in hns_roce_add_gid()
79 struct hns_roce_dev *hr_dev = to_hr_dev(attr->device); in hns_roce_del_gid() local
83 if (port >= hr_dev->caps.num_ports) in hns_roce_del_gid()
86 ret = hr_dev->hw->set_gid(hr_dev, attr->index, NULL, NULL); in hns_roce_del_gid()
91 static int handle_en_event(struct hns_roce_dev *hr_dev, u32 port, in handle_en_event() argument
94 struct device *dev = hr_dev->dev; in handle_en_event()
98 netdev = hr_dev->iboe.netdevs[port]; in handle_en_event()
109 ret = hns_roce_set_mac(hr_dev, port, netdev->dev_addr); in handle_en_event()
129 struct hns_roce_dev *hr_dev = NULL; in hns_roce_netdev_event() local
133 hr_dev = container_of(self, struct hns_roce_dev, iboe.nb); in hns_roce_netdev_event()
134 iboe = &hr_dev->iboe; in hns_roce_netdev_event()
136 for (port = 0; port < hr_dev->caps.num_ports; port++) { in hns_roce_netdev_event()
138 ret = handle_en_event(hr_dev, port, event); in hns_roce_netdev_event()
148 static int hns_roce_setup_mtu_mac(struct hns_roce_dev *hr_dev) in hns_roce_setup_mtu_mac() argument
153 for (i = 0; i < hr_dev->caps.num_ports; i++) { in hns_roce_setup_mtu_mac()
154 ret = hns_roce_set_mac(hr_dev, i, in hns_roce_setup_mtu_mac()
155 hr_dev->iboe.netdevs[i]->dev_addr); in hns_roce_setup_mtu_mac()
167 struct hns_roce_dev *hr_dev = to_hr_dev(ib_dev); in hns_roce_query_device() local
171 props->fw_ver = hr_dev->caps.fw_ver; in hns_roce_query_device()
172 props->sys_image_guid = cpu_to_be64(hr_dev->sys_image_guid); in hns_roce_query_device()
174 props->page_size_cap = hr_dev->caps.page_size_cap; in hns_roce_query_device()
175 props->vendor_id = hr_dev->vendor_id; in hns_roce_query_device()
176 props->vendor_part_id = hr_dev->vendor_part_id; in hns_roce_query_device()
177 props->hw_ver = hr_dev->hw_rev; in hns_roce_query_device()
178 props->max_qp = hr_dev->caps.num_qps; in hns_roce_query_device()
179 props->max_qp_wr = hr_dev->caps.max_wqes; in hns_roce_query_device()
182 props->max_send_sge = hr_dev->caps.max_sq_sg; in hns_roce_query_device()
183 props->max_recv_sge = hr_dev->caps.max_rq_sg; in hns_roce_query_device()
185 props->max_cq = hr_dev->caps.num_cqs; in hns_roce_query_device()
186 props->max_cqe = hr_dev->caps.max_cqes; in hns_roce_query_device()
187 props->max_mr = hr_dev->caps.num_mtpts; in hns_roce_query_device()
188 props->max_pd = hr_dev->caps.num_pds; in hns_roce_query_device()
189 props->max_qp_rd_atom = hr_dev->caps.max_qp_dest_rdma; in hns_roce_query_device()
190 props->max_qp_init_rd_atom = hr_dev->caps.max_qp_init_rdma; in hns_roce_query_device()
191 props->atomic_cap = hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_ATOMIC ? in hns_roce_query_device()
194 props->local_ca_ack_delay = hr_dev->caps.local_ca_ack_delay; in hns_roce_query_device()
195 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_SRQ) { in hns_roce_query_device()
196 props->max_srq = hr_dev->caps.num_srqs; in hns_roce_query_device()
197 props->max_srq_wr = hr_dev->caps.max_srq_wrs; in hns_roce_query_device()
198 props->max_srq_sge = hr_dev->caps.max_srq_sges; in hns_roce_query_device()
201 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_FRMR && in hns_roce_query_device()
202 hr_dev->pci_dev->revision >= PCI_REVISION_ID_HIP09) { in hns_roce_query_device()
207 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_XRC) in hns_roce_query_device()
216 struct hns_roce_dev *hr_dev = to_hr_dev(ib_dev); in hns_roce_query_port() local
217 struct device *dev = hr_dev->dev; in hns_roce_query_port()
228 props->max_mtu = hr_dev->caps.max_mtu; in hns_roce_query_port()
229 props->gid_tbl_len = hr_dev->caps.gid_table_len[port]; in hns_roce_query_port()
240 spin_lock_irqsave(&hr_dev->iboe.lock, flags); in hns_roce_query_port()
242 net_dev = hr_dev->iboe.netdevs[port]; in hns_roce_query_port()
244 spin_unlock_irqrestore(&hr_dev->iboe.lock, flags); in hns_roce_query_port()
258 spin_unlock_irqrestore(&hr_dev->iboe.lock, flags); in hns_roce_query_port()
361 struct hns_roce_dev *hr_dev = to_hr_dev(uctx->device); in hns_roce_alloc_ucontext() local
366 if (!hr_dev->active) in hns_roce_alloc_ucontext()
369 resp.qp_tab_size = hr_dev->caps.num_qps; in hns_roce_alloc_ucontext()
370 resp.srq_tab_size = hr_dev->caps.num_srqs; in hns_roce_alloc_ucontext()
377 if (hr_dev->pci_dev->revision >= PCI_REVISION_ID_HIP09) in hns_roce_alloc_ucontext()
382 resp.max_inline_data = hr_dev->caps.max_sq_inline; in hns_roce_alloc_ucontext()
385 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_RQ_INLINE) { in hns_roce_alloc_ucontext()
391 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_CQE_INLINE) { in hns_roce_alloc_ucontext()
397 ret = hns_roce_uar_alloc(hr_dev, &context->uar); in hns_roce_alloc_ucontext()
405 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_CQ_RECORD_DB || in hns_roce_alloc_ucontext()
406 hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_QP_RECORD_DB) { in hns_roce_alloc_ucontext()
411 resp.cqe_size = hr_dev->caps.cqe_sz; in hns_roce_alloc_ucontext()
424 ida_free(&hr_dev->uar_ida.ida, (int)context->uar.logic_idx); in hns_roce_alloc_ucontext()
433 struct hns_roce_dev *hr_dev = to_hr_dev(ibcontext->device); in hns_roce_dealloc_ucontext() local
437 ida_free(&hr_dev->uar_ida.ida, (int)context->uar.logic_idx); in hns_roce_dealloc_ucontext()
549 struct hns_roce_dev *hr_dev = to_hr_dev(device); in hns_roce_alloc_hw_port_stats() local
552 if (port > hr_dev->caps.num_ports) { in hns_roce_alloc_hw_port_stats()
557 if (hr_dev->pci_dev->revision <= PCI_REVISION_ID_HIP08 || in hns_roce_alloc_hw_port_stats()
558 hr_dev->is_vf) in hns_roce_alloc_hw_port_stats()
570 struct hns_roce_dev *hr_dev = to_hr_dev(device); in hns_roce_get_hw_stats() local
577 if (port > hr_dev->caps.num_ports) in hns_roce_get_hw_stats()
580 if (hr_dev->pci_dev->revision <= PCI_REVISION_ID_HIP08 || in hns_roce_get_hw_stats()
581 hr_dev->is_vf) in hns_roce_get_hw_stats()
584 ret = hr_dev->hw->query_hw_counter(hr_dev, stats->value, port, in hns_roce_get_hw_stats()
595 static void hns_roce_unregister_device(struct hns_roce_dev *hr_dev) in hns_roce_unregister_device() argument
597 struct hns_roce_ib_iboe *iboe = &hr_dev->iboe; in hns_roce_unregister_device()
599 hr_dev->active = false; in hns_roce_unregister_device()
601 ib_unregister_device(&hr_dev->ib_dev); in hns_roce_unregister_device()
686 static int hns_roce_register_device(struct hns_roce_dev *hr_dev) in hns_roce_register_device() argument
691 struct device *dev = hr_dev->dev; in hns_roce_register_device()
694 iboe = &hr_dev->iboe; in hns_roce_register_device()
697 ib_dev = &hr_dev->ib_dev; in hns_roce_register_device()
702 ib_dev->phys_port_cnt = hr_dev->caps.num_ports; in hns_roce_register_device()
703 ib_dev->local_dma_lkey = hr_dev->caps.reserved_lkey; in hns_roce_register_device()
704 ib_dev->num_comp_vectors = hr_dev->caps.num_comp_vectors; in hns_roce_register_device()
706 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_REREG_MR) in hns_roce_register_device()
709 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_MW) in hns_roce_register_device()
712 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_FRMR) in hns_roce_register_device()
715 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_SRQ) { in hns_roce_register_device()
717 ib_set_device_ops(ib_dev, hr_dev->hw->hns_roce_dev_srq_ops); in hns_roce_register_device()
720 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_XRC) in hns_roce_register_device()
723 ib_set_device_ops(ib_dev, hr_dev->hw->hns_roce_dev_ops); in hns_roce_register_device()
726 for (i = 0; i < hr_dev->caps.num_ports; i++) { in hns_roce_register_device()
727 if (!hr_dev->iboe.netdevs[i]) in hns_roce_register_device()
730 ret = ib_device_set_netdev(ib_dev, hr_dev->iboe.netdevs[i], in hns_roce_register_device()
742 ret = hns_roce_setup_mtu_mac(hr_dev); in hns_roce_register_device()
755 hr_dev->active = true; in hns_roce_register_device()
764 static int hns_roce_init_hem(struct hns_roce_dev *hr_dev) in hns_roce_init_hem() argument
766 struct device *dev = hr_dev->dev; in hns_roce_init_hem()
769 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->mr_table.mtpt_table, in hns_roce_init_hem()
770 HEM_TYPE_MTPT, hr_dev->caps.mtpt_entry_sz, in hns_roce_init_hem()
771 hr_dev->caps.num_mtpts); in hns_roce_init_hem()
777 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->qp_table.qp_table, in hns_roce_init_hem()
778 HEM_TYPE_QPC, hr_dev->caps.qpc_sz, in hns_roce_init_hem()
779 hr_dev->caps.num_qps); in hns_roce_init_hem()
785 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->qp_table.irrl_table, in hns_roce_init_hem()
787 hr_dev->caps.irrl_entry_sz * in hns_roce_init_hem()
788 hr_dev->caps.max_qp_init_rdma, in hns_roce_init_hem()
789 hr_dev->caps.num_qps); in hns_roce_init_hem()
795 if (hr_dev->caps.trrl_entry_sz) { in hns_roce_init_hem()
796 ret = hns_roce_init_hem_table(hr_dev, in hns_roce_init_hem()
797 &hr_dev->qp_table.trrl_table, in hns_roce_init_hem()
799 hr_dev->caps.trrl_entry_sz * in hns_roce_init_hem()
800 hr_dev->caps.max_qp_dest_rdma, in hns_roce_init_hem()
801 hr_dev->caps.num_qps); in hns_roce_init_hem()
809 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->cq_table.table, in hns_roce_init_hem()
810 HEM_TYPE_CQC, hr_dev->caps.cqc_entry_sz, in hns_roce_init_hem()
811 hr_dev->caps.num_cqs); in hns_roce_init_hem()
817 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_SRQ) { in hns_roce_init_hem()
818 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->srq_table.table, in hns_roce_init_hem()
820 hr_dev->caps.srqc_entry_sz, in hns_roce_init_hem()
821 hr_dev->caps.num_srqs); in hns_roce_init_hem()
829 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_QP_FLOW_CTRL) { in hns_roce_init_hem()
830 ret = hns_roce_init_hem_table(hr_dev, in hns_roce_init_hem()
831 &hr_dev->qp_table.sccc_table, in hns_roce_init_hem()
833 hr_dev->caps.sccc_sz, in hns_roce_init_hem()
834 hr_dev->caps.num_qps); in hns_roce_init_hem()
842 if (hr_dev->caps.qpc_timer_entry_sz) { in hns_roce_init_hem()
843 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->qpc_timer_table, in hns_roce_init_hem()
845 hr_dev->caps.qpc_timer_entry_sz, in hns_roce_init_hem()
846 hr_dev->caps.qpc_timer_bt_num); in hns_roce_init_hem()
854 if (hr_dev->caps.cqc_timer_entry_sz) { in hns_roce_init_hem()
855 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->cqc_timer_table, in hns_roce_init_hem()
857 hr_dev->caps.cqc_timer_entry_sz, in hns_roce_init_hem()
858 hr_dev->caps.cqc_timer_bt_num); in hns_roce_init_hem()
866 if (hr_dev->caps.gmv_entry_sz) { in hns_roce_init_hem()
867 ret = hns_roce_init_hem_table(hr_dev, &hr_dev->gmv_table, in hns_roce_init_hem()
869 hr_dev->caps.gmv_entry_sz, in hns_roce_init_hem()
870 hr_dev->caps.gmv_entry_num); in hns_roce_init_hem()
882 if (hr_dev->caps.cqc_timer_entry_sz) in hns_roce_init_hem()
883 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->cqc_timer_table); in hns_roce_init_hem()
886 if (hr_dev->caps.qpc_timer_entry_sz) in hns_roce_init_hem()
887 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->qpc_timer_table); in hns_roce_init_hem()
890 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_QP_FLOW_CTRL) in hns_roce_init_hem()
891 hns_roce_cleanup_hem_table(hr_dev, in hns_roce_init_hem()
892 &hr_dev->qp_table.sccc_table); in hns_roce_init_hem()
894 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_SRQ) in hns_roce_init_hem()
895 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->srq_table.table); in hns_roce_init_hem()
898 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->cq_table.table); in hns_roce_init_hem()
901 if (hr_dev->caps.trrl_entry_sz) in hns_roce_init_hem()
902 hns_roce_cleanup_hem_table(hr_dev, in hns_roce_init_hem()
903 &hr_dev->qp_table.trrl_table); in hns_roce_init_hem()
906 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->qp_table.irrl_table); in hns_roce_init_hem()
909 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->qp_table.qp_table); in hns_roce_init_hem()
912 hns_roce_cleanup_hem_table(hr_dev, &hr_dev->mr_table.mtpt_table); in hns_roce_init_hem()
922 static int hns_roce_setup_hca(struct hns_roce_dev *hr_dev) in hns_roce_setup_hca() argument
924 struct device *dev = hr_dev->dev; in hns_roce_setup_hca()
927 spin_lock_init(&hr_dev->sm_lock); in hns_roce_setup_hca()
929 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_CQ_RECORD_DB || in hns_roce_setup_hca()
930 hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_QP_RECORD_DB) { in hns_roce_setup_hca()
931 INIT_LIST_HEAD(&hr_dev->pgdir_list); in hns_roce_setup_hca()
932 mutex_init(&hr_dev->pgdir_mutex); in hns_roce_setup_hca()
935 hns_roce_init_uar_table(hr_dev); in hns_roce_setup_hca()
937 ret = hns_roce_uar_alloc(hr_dev, &hr_dev->priv_uar); in hns_roce_setup_hca()
943 ret = hns_roce_init_qp_table(hr_dev); in hns_roce_setup_hca()
949 hns_roce_init_pd_table(hr_dev); in hns_roce_setup_hca()
951 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_XRC) in hns_roce_setup_hca()
952 hns_roce_init_xrcd_table(hr_dev); in hns_roce_setup_hca()
954 hns_roce_init_mr_table(hr_dev); in hns_roce_setup_hca()
956 hns_roce_init_cq_table(hr_dev); in hns_roce_setup_hca()
958 if (hr_dev->caps.flags & HNS_ROCE_CAP_FLAG_SRQ) in hns_roce_setup_hca()
959 hns_roce_init_srq_table(hr_dev); in hns_roce_setup_hca()
964 ida_destroy(&hr_dev->uar_ida.ida); in hns_roce_setup_hca()
983 void hns_roce_handle_device_err(struct hns_roce_dev *hr_dev) in hns_roce_handle_device_err() argument
993 spin_lock_irqsave(&hr_dev->qp_list_lock, flags); in hns_roce_handle_device_err()
994 list_for_each_entry(hr_qp, &hr_dev->qp_list, node) { in hns_roce_handle_device_err()
1007 hns_roce_cq_completion(hr_dev, hr_cq->cqn); in hns_roce_handle_device_err()
1009 spin_unlock_irqrestore(&hr_dev->qp_list_lock, flags); in hns_roce_handle_device_err()
1012 int hns_roce_init(struct hns_roce_dev *hr_dev) in hns_roce_init() argument
1014 struct device *dev = hr_dev->dev; in hns_roce_init()
1017 hr_dev->is_reset = false; in hns_roce_init()
1019 if (hr_dev->hw->cmq_init) { in hns_roce_init()
1020 ret = hr_dev->hw->cmq_init(hr_dev); in hns_roce_init()
1027 ret = hr_dev->hw->hw_profile(hr_dev); in hns_roce_init()
1033 ret = hns_roce_cmd_init(hr_dev); in hns_roce_init()
1040 ret = hr_dev->hw->init_eq(hr_dev); in hns_roce_init()
1046 if (hr_dev->cmd_mod) { in hns_roce_init()
1047 ret = hns_roce_cmd_use_events(hr_dev); in hns_roce_init()
1053 ret = hns_roce_init_hem(hr_dev); in hns_roce_init()
1059 ret = hns_roce_setup_hca(hr_dev); in hns_roce_init()
1065 if (hr_dev->hw->hw_init) { in hns_roce_init()
1066 ret = hr_dev->hw->hw_init(hr_dev); in hns_roce_init()
1073 INIT_LIST_HEAD(&hr_dev->qp_list); in hns_roce_init()
1074 spin_lock_init(&hr_dev->qp_list_lock); in hns_roce_init()
1075 INIT_LIST_HEAD(&hr_dev->dip_list); in hns_roce_init()
1076 spin_lock_init(&hr_dev->dip_list_lock); in hns_roce_init()
1078 ret = hns_roce_register_device(hr_dev); in hns_roce_init()
1085 if (hr_dev->hw->hw_exit) in hns_roce_init()
1086 hr_dev->hw->hw_exit(hr_dev); in hns_roce_init()
1089 hns_roce_cleanup_bitmap(hr_dev); in hns_roce_init()
1092 hns_roce_cleanup_hem(hr_dev); in hns_roce_init()
1095 if (hr_dev->cmd_mod) in hns_roce_init()
1096 hns_roce_cmd_use_polling(hr_dev); in hns_roce_init()
1097 hr_dev->hw->cleanup_eq(hr_dev); in hns_roce_init()
1100 hns_roce_cmd_cleanup(hr_dev); in hns_roce_init()
1103 if (hr_dev->hw->cmq_exit) in hns_roce_init()
1104 hr_dev->hw->cmq_exit(hr_dev); in hns_roce_init()
1109 void hns_roce_exit(struct hns_roce_dev *hr_dev) in hns_roce_exit() argument
1111 hns_roce_unregister_device(hr_dev); in hns_roce_exit()
1113 if (hr_dev->hw->hw_exit) in hns_roce_exit()
1114 hr_dev->hw->hw_exit(hr_dev); in hns_roce_exit()
1115 hns_roce_cleanup_bitmap(hr_dev); in hns_roce_exit()
1116 hns_roce_cleanup_hem(hr_dev); in hns_roce_exit()
1118 if (hr_dev->cmd_mod) in hns_roce_exit()
1119 hns_roce_cmd_use_polling(hr_dev); in hns_roce_exit()
1121 hr_dev->hw->cleanup_eq(hr_dev); in hns_roce_exit()
1122 hns_roce_cmd_cleanup(hr_dev); in hns_roce_exit()
1123 if (hr_dev->hw->cmq_exit) in hns_roce_exit()
1124 hr_dev->hw->cmq_exit(hr_dev); in hns_roce_exit()