Lines Matching refs:hba
20 static void bnx2fc_fastpath_notification(struct bnx2fc_hba *hba,
22 static void bnx2fc_process_ofld_cmpl(struct bnx2fc_hba *hba,
24 static void bnx2fc_process_enable_conn_cmpl(struct bnx2fc_hba *hba,
26 static void bnx2fc_init_failure(struct bnx2fc_hba *hba, u32 err_code);
27 static void bnx2fc_process_conn_destroy_cmpl(struct bnx2fc_hba *hba,
30 int bnx2fc_send_stat_req(struct bnx2fc_hba *hba) in bnx2fc_send_stat_req() argument
42 stat_req.stat_params_addr_lo = (u32) hba->stats_buf_dma; in bnx2fc_send_stat_req()
43 stat_req.stat_params_addr_hi = (u32) ((u64)hba->stats_buf_dma >> 32); in bnx2fc_send_stat_req()
47 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_stat_req()
48 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_stat_req()
62 int bnx2fc_send_fw_fcoe_init_msg(struct bnx2fc_hba *hba) in bnx2fc_send_fw_fcoe_init_msg() argument
71 if (!hba->cnic) { in bnx2fc_send_fw_fcoe_init_msg()
82 fcoe_init1.num_tasks = hba->max_tasks; in bnx2fc_send_fw_fcoe_init_msg()
87 fcoe_init1.dummy_buffer_addr_lo = (u32) hba->dummy_buf_dma; in bnx2fc_send_fw_fcoe_init_msg()
88 fcoe_init1.dummy_buffer_addr_hi = (u32) ((u64)hba->dummy_buf_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
89 fcoe_init1.task_list_pbl_addr_lo = (u32) hba->task_ctx_bd_dma; in bnx2fc_send_fw_fcoe_init_msg()
91 (u32) ((u64) hba->task_ctx_bd_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
109 fcoe_init2.hash_tbl_pbl_addr_lo = (u32) hba->hash_tbl_pbl_dma; in bnx2fc_send_fw_fcoe_init_msg()
111 ((u64) hba->hash_tbl_pbl_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
113 fcoe_init2.t2_hash_tbl_addr_lo = (u32) hba->t2_hash_tbl_dma; in bnx2fc_send_fw_fcoe_init_msg()
115 ((u64) hba->t2_hash_tbl_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
117 fcoe_init2.t2_ptr_hash_tbl_addr_lo = (u32) hba->t2_hash_tbl_ptr_dma; in bnx2fc_send_fw_fcoe_init_msg()
119 ((u64) hba->t2_hash_tbl_ptr_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
141 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_init_msg()
142 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_fw_fcoe_init_msg()
146 int bnx2fc_send_fw_fcoe_destroy_msg(struct bnx2fc_hba *hba) in bnx2fc_send_fw_fcoe_destroy_msg() argument
160 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_destroy_msg()
161 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_fw_fcoe_destroy_msg()
177 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_ofld_req() local
344 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_ofld_req()
345 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_ofld_req()
362 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_enable_req() local
415 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_enable_req()
416 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_enable_req()
431 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_disable_req() local
479 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_disable_req()
480 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_disable_req()
491 int bnx2fc_send_session_destroy_req(struct bnx2fc_hba *hba, in bnx2fc_send_session_destroy_req() argument
509 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_destroy_req()
510 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_destroy_req()
515 static bool is_valid_lport(struct bnx2fc_hba *hba, struct fc_lport *lport) in is_valid_lport() argument
519 spin_lock_bh(&hba->hba_lock); in is_valid_lport()
520 list_for_each_entry(blport, &hba->vports, list) { in is_valid_lport()
522 spin_unlock_bh(&hba->hba_lock); in is_valid_lport()
526 spin_unlock_bh(&hba->hba_lock); in is_valid_lport()
536 struct bnx2fc_hba *hba; in bnx2fc_unsol_els_work() local
542 hba = unsol_els->hba; in bnx2fc_unsol_els_work()
543 if (is_valid_lport(hba, lport)) in bnx2fc_unsol_els_work()
615 unsol_els->hba = interface->hba; in bnx2fc_process_l2_frame_compl()
638 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_process_unsol_compl() local
706 if (xid > hba->max_xid) { in bnx2fc_process_unsol_compl()
715 hba->task_ctx[task_idx]; in bnx2fc_process_unsol_compl()
718 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_unsol_compl()
824 if (xid > hba->max_xid) { in bnx2fc_process_unsol_compl()
843 interface->hba->task_ctx[task_idx]; in bnx2fc_process_unsol_compl()
845 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_unsol_compl()
879 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_process_cq_compl() local
889 if (xid >= hba->max_tasks) { in bnx2fc_process_cq_compl()
896 task_page = (struct fcoe_task_ctx_entry *)hba->task_ctx[task_idx]; in bnx2fc_process_cq_compl()
903 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_cq_compl()
1099 static void bnx2fc_fastpath_notification(struct bnx2fc_hba *hba, in bnx2fc_fastpath_notification() argument
1103 struct bnx2fc_rport *tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_fastpath_notification()
1122 static void bnx2fc_process_ofld_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_ofld_cmpl() argument
1133 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_ofld_cmpl()
1142 if (hba != interface->hba) { in bnx2fc_process_ofld_cmpl()
1176 static void bnx2fc_process_enable_conn_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_enable_conn_cmpl() argument
1186 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_enable_conn_cmpl()
1204 if (hba != interface->hba) { in bnx2fc_process_enable_conn_cmpl()
1217 static void bnx2fc_process_conn_disable_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_conn_disable_cmpl() argument
1225 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_conn_disable_cmpl()
1250 static void bnx2fc_process_conn_destroy_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_conn_destroy_cmpl() argument
1257 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_conn_destroy_cmpl()
1279 static void bnx2fc_init_failure(struct bnx2fc_hba *hba, u32 err_code) in bnx2fc_init_failure() argument
1316 struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context; in bnx2fc_indicate_kcqe() local
1325 bnx2fc_fastpath_notification(hba, kcqe); in bnx2fc_indicate_kcqe()
1329 bnx2fc_process_ofld_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1333 bnx2fc_process_enable_conn_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1339 bnx2fc_init_failure(hba, in bnx2fc_indicate_kcqe()
1342 set_bit(ADAPTER_STATE_UP, &hba->adapter_state); in bnx2fc_indicate_kcqe()
1343 bnx2fc_get_link_state(hba); in bnx2fc_indicate_kcqe()
1345 (u8)hba->pcidev->bus->number); in bnx2fc_indicate_kcqe()
1357 set_bit(BNX2FC_FLAG_DESTROY_CMPL, &hba->flags); in bnx2fc_indicate_kcqe()
1358 wake_up_interruptible(&hba->destroy_wait); in bnx2fc_indicate_kcqe()
1362 bnx2fc_process_conn_disable_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1366 bnx2fc_process_conn_destroy_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1373 complete(&hba->stat_req_done); in bnx2fc_indicate_kcqe()
1423 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_map_doorbell() local
1425 reg_base = pci_resource_start(hba->pcidev, in bnx2fc_map_doorbell()
1534 interface->hba->task_ctx[orig_task_idx]; in bnx2fc_init_seq_cleanup_task()
1846 int bnx2fc_setup_task_ctx(struct bnx2fc_hba *hba) in bnx2fc_setup_task_ctx() argument
1860 hba->task_ctx_bd_tbl = dma_zalloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_task_ctx()
1862 &hba->task_ctx_bd_dma, in bnx2fc_setup_task_ctx()
1864 if (!hba->task_ctx_bd_tbl) { in bnx2fc_setup_task_ctx()
1874 task_ctx_arr_sz = (hba->max_tasks / BNX2FC_TASKS_PER_PAGE); in bnx2fc_setup_task_ctx()
1875 hba->task_ctx = kzalloc((task_ctx_arr_sz * sizeof(void *)), in bnx2fc_setup_task_ctx()
1877 if (!hba->task_ctx) { in bnx2fc_setup_task_ctx()
1886 hba->task_ctx_dma = kmalloc((task_ctx_arr_sz * in bnx2fc_setup_task_ctx()
1888 if (!hba->task_ctx_dma) { in bnx2fc_setup_task_ctx()
1894 task_ctx_bdt = (struct regpair *)hba->task_ctx_bd_tbl; in bnx2fc_setup_task_ctx()
1897 hba->task_ctx[i] = dma_zalloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_task_ctx()
1899 &hba->task_ctx_dma[i], in bnx2fc_setup_task_ctx()
1901 if (!hba->task_ctx[i]) { in bnx2fc_setup_task_ctx()
1906 addr = (u64)hba->task_ctx_dma[i]; in bnx2fc_setup_task_ctx()
1915 if (hba->task_ctx[i]) { in bnx2fc_setup_task_ctx()
1917 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_task_ctx()
1918 hba->task_ctx[i], hba->task_ctx_dma[i]); in bnx2fc_setup_task_ctx()
1919 hba->task_ctx[i] = NULL; in bnx2fc_setup_task_ctx()
1923 kfree(hba->task_ctx_dma); in bnx2fc_setup_task_ctx()
1924 hba->task_ctx_dma = NULL; in bnx2fc_setup_task_ctx()
1926 kfree(hba->task_ctx); in bnx2fc_setup_task_ctx()
1927 hba->task_ctx = NULL; in bnx2fc_setup_task_ctx()
1929 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_task_ctx()
1930 hba->task_ctx_bd_tbl, hba->task_ctx_bd_dma); in bnx2fc_setup_task_ctx()
1931 hba->task_ctx_bd_tbl = NULL; in bnx2fc_setup_task_ctx()
1936 void bnx2fc_free_task_ctx(struct bnx2fc_hba *hba) in bnx2fc_free_task_ctx() argument
1941 if (hba->task_ctx_bd_tbl) { in bnx2fc_free_task_ctx()
1942 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_task_ctx()
1943 hba->task_ctx_bd_tbl, in bnx2fc_free_task_ctx()
1944 hba->task_ctx_bd_dma); in bnx2fc_free_task_ctx()
1945 hba->task_ctx_bd_tbl = NULL; in bnx2fc_free_task_ctx()
1948 task_ctx_arr_sz = (hba->max_tasks / BNX2FC_TASKS_PER_PAGE); in bnx2fc_free_task_ctx()
1949 if (hba->task_ctx) { in bnx2fc_free_task_ctx()
1951 if (hba->task_ctx[i]) { in bnx2fc_free_task_ctx()
1952 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_task_ctx()
1953 hba->task_ctx[i], in bnx2fc_free_task_ctx()
1954 hba->task_ctx_dma[i]); in bnx2fc_free_task_ctx()
1955 hba->task_ctx[i] = NULL; in bnx2fc_free_task_ctx()
1958 kfree(hba->task_ctx); in bnx2fc_free_task_ctx()
1959 hba->task_ctx = NULL; in bnx2fc_free_task_ctx()
1962 kfree(hba->task_ctx_dma); in bnx2fc_free_task_ctx()
1963 hba->task_ctx_dma = NULL; in bnx2fc_free_task_ctx()
1966 static void bnx2fc_free_hash_table(struct bnx2fc_hba *hba) in bnx2fc_free_hash_table() argument
1972 if (hba->hash_tbl_segments) { in bnx2fc_free_hash_table()
1974 pbl = hba->hash_tbl_pbl; in bnx2fc_free_hash_table()
1976 segment_count = hba->hash_tbl_segment_count; in bnx2fc_free_hash_table()
1984 dma_free_coherent(&hba->pcidev->dev, in bnx2fc_free_hash_table()
1986 hba->hash_tbl_segments[i], in bnx2fc_free_hash_table()
1991 kfree(hba->hash_tbl_segments); in bnx2fc_free_hash_table()
1992 hba->hash_tbl_segments = NULL; in bnx2fc_free_hash_table()
1995 if (hba->hash_tbl_pbl) { in bnx2fc_free_hash_table()
1996 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_hash_table()
1997 hba->hash_tbl_pbl, in bnx2fc_free_hash_table()
1998 hba->hash_tbl_pbl_dma); in bnx2fc_free_hash_table()
1999 hba->hash_tbl_pbl = NULL; in bnx2fc_free_hash_table()
2003 static int bnx2fc_allocate_hash_table(struct bnx2fc_hba *hba) in bnx2fc_allocate_hash_table() argument
2018 hba->hash_tbl_segment_count = segment_count; in bnx2fc_allocate_hash_table()
2020 segment_array_size = segment_count * sizeof(*hba->hash_tbl_segments); in bnx2fc_allocate_hash_table()
2021 hba->hash_tbl_segments = kzalloc(segment_array_size, GFP_KERNEL); in bnx2fc_allocate_hash_table()
2022 if (!hba->hash_tbl_segments) { in bnx2fc_allocate_hash_table()
2034 hba->hash_tbl_segments[i] = dma_zalloc_coherent(&hba->pcidev->dev, in bnx2fc_allocate_hash_table()
2038 if (!hba->hash_tbl_segments[i]) { in bnx2fc_allocate_hash_table()
2044 hba->hash_tbl_pbl = dma_zalloc_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_allocate_hash_table()
2045 &hba->hash_tbl_pbl_dma, in bnx2fc_allocate_hash_table()
2047 if (!hba->hash_tbl_pbl) { in bnx2fc_allocate_hash_table()
2052 pbl = hba->hash_tbl_pbl; in bnx2fc_allocate_hash_table()
2060 pbl = hba->hash_tbl_pbl; in bnx2fc_allocate_hash_table()
2076 if (hba->hash_tbl_segments[i]) in bnx2fc_allocate_hash_table()
2077 dma_free_coherent(&hba->pcidev->dev, in bnx2fc_allocate_hash_table()
2079 hba->hash_tbl_segments[i], in bnx2fc_allocate_hash_table()
2086 kfree(hba->hash_tbl_segments); in bnx2fc_allocate_hash_table()
2087 hba->hash_tbl_segments = NULL; in bnx2fc_allocate_hash_table()
2097 int bnx2fc_setup_fw_resc(struct bnx2fc_hba *hba) in bnx2fc_setup_fw_resc() argument
2103 if (bnx2fc_allocate_hash_table(hba)) in bnx2fc_setup_fw_resc()
2107 hba->t2_hash_tbl_ptr = dma_zalloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_fw_resc()
2109 &hba->t2_hash_tbl_ptr_dma, in bnx2fc_setup_fw_resc()
2111 if (!hba->t2_hash_tbl_ptr) { in bnx2fc_setup_fw_resc()
2113 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2119 hba->t2_hash_tbl = dma_zalloc_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_setup_fw_resc()
2120 &hba->t2_hash_tbl_dma, in bnx2fc_setup_fw_resc()
2122 if (!hba->t2_hash_tbl) { in bnx2fc_setup_fw_resc()
2124 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2128 addr = (unsigned long) hba->t2_hash_tbl_dma + in bnx2fc_setup_fw_resc()
2130 hba->t2_hash_tbl[i].next.lo = addr & 0xffffffff; in bnx2fc_setup_fw_resc()
2131 hba->t2_hash_tbl[i].next.hi = addr >> 32; in bnx2fc_setup_fw_resc()
2134 hba->dummy_buffer = dma_alloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_fw_resc()
2135 PAGE_SIZE, &hba->dummy_buf_dma, in bnx2fc_setup_fw_resc()
2137 if (!hba->dummy_buffer) { in bnx2fc_setup_fw_resc()
2139 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2143 hba->stats_buffer = dma_zalloc_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_fw_resc()
2144 &hba->stats_buf_dma, in bnx2fc_setup_fw_resc()
2146 if (!hba->stats_buffer) { in bnx2fc_setup_fw_resc()
2148 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2155 void bnx2fc_free_fw_resc(struct bnx2fc_hba *hba) in bnx2fc_free_fw_resc() argument
2159 if (hba->stats_buffer) { in bnx2fc_free_fw_resc()
2160 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_fw_resc()
2161 hba->stats_buffer, hba->stats_buf_dma); in bnx2fc_free_fw_resc()
2162 hba->stats_buffer = NULL; in bnx2fc_free_fw_resc()
2165 if (hba->dummy_buffer) { in bnx2fc_free_fw_resc()
2166 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_fw_resc()
2167 hba->dummy_buffer, hba->dummy_buf_dma); in bnx2fc_free_fw_resc()
2168 hba->dummy_buffer = NULL; in bnx2fc_free_fw_resc()
2171 if (hba->t2_hash_tbl_ptr) { in bnx2fc_free_fw_resc()
2173 dma_free_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_free_fw_resc()
2174 hba->t2_hash_tbl_ptr, in bnx2fc_free_fw_resc()
2175 hba->t2_hash_tbl_ptr_dma); in bnx2fc_free_fw_resc()
2176 hba->t2_hash_tbl_ptr = NULL; in bnx2fc_free_fw_resc()
2179 if (hba->t2_hash_tbl) { in bnx2fc_free_fw_resc()
2182 dma_free_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_free_fw_resc()
2183 hba->t2_hash_tbl, hba->t2_hash_tbl_dma); in bnx2fc_free_fw_resc()
2184 hba->t2_hash_tbl = NULL; in bnx2fc_free_fw_resc()
2186 bnx2fc_free_hash_table(hba); in bnx2fc_free_fw_resc()