• Home
  • Raw
  • Download

Lines Matching refs:hba

20 static void bnx2fc_fastpath_notification(struct bnx2fc_hba *hba,
22 static void bnx2fc_process_ofld_cmpl(struct bnx2fc_hba *hba,
24 static void bnx2fc_process_enable_conn_cmpl(struct bnx2fc_hba *hba,
26 static void bnx2fc_init_failure(struct bnx2fc_hba *hba, u32 err_code);
27 static void bnx2fc_process_conn_destroy_cmpl(struct bnx2fc_hba *hba,
30 int bnx2fc_send_stat_req(struct bnx2fc_hba *hba) in bnx2fc_send_stat_req() argument
42 stat_req.stat_params_addr_lo = (u32) hba->stats_buf_dma; in bnx2fc_send_stat_req()
43 stat_req.stat_params_addr_hi = (u32) ((u64)hba->stats_buf_dma >> 32); in bnx2fc_send_stat_req()
47 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_stat_req()
48 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_stat_req()
62 int bnx2fc_send_fw_fcoe_init_msg(struct bnx2fc_hba *hba) in bnx2fc_send_fw_fcoe_init_msg() argument
71 if (!hba->cnic) { in bnx2fc_send_fw_fcoe_init_msg()
82 fcoe_init1.num_tasks = hba->max_tasks; in bnx2fc_send_fw_fcoe_init_msg()
87 fcoe_init1.dummy_buffer_addr_lo = (u32) hba->dummy_buf_dma; in bnx2fc_send_fw_fcoe_init_msg()
88 fcoe_init1.dummy_buffer_addr_hi = (u32) ((u64)hba->dummy_buf_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
89 fcoe_init1.task_list_pbl_addr_lo = (u32) hba->task_ctx_bd_dma; in bnx2fc_send_fw_fcoe_init_msg()
91 (u32) ((u64) hba->task_ctx_bd_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
109 fcoe_init2.hash_tbl_pbl_addr_lo = (u32) hba->hash_tbl_pbl_dma; in bnx2fc_send_fw_fcoe_init_msg()
111 ((u64) hba->hash_tbl_pbl_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
113 fcoe_init2.t2_hash_tbl_addr_lo = (u32) hba->t2_hash_tbl_dma; in bnx2fc_send_fw_fcoe_init_msg()
115 ((u64) hba->t2_hash_tbl_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
117 fcoe_init2.t2_ptr_hash_tbl_addr_lo = (u32) hba->t2_hash_tbl_ptr_dma; in bnx2fc_send_fw_fcoe_init_msg()
119 ((u64) hba->t2_hash_tbl_ptr_dma >> 32); in bnx2fc_send_fw_fcoe_init_msg()
141 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_init_msg()
142 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_fw_fcoe_init_msg()
146 int bnx2fc_send_fw_fcoe_destroy_msg(struct bnx2fc_hba *hba) in bnx2fc_send_fw_fcoe_destroy_msg() argument
160 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_destroy_msg()
161 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_fw_fcoe_destroy_msg()
177 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_ofld_req() local
344 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_ofld_req()
345 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_ofld_req()
362 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_enable_req() local
415 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_enable_req()
416 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_enable_req()
431 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_disable_req() local
479 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_disable_req()
480 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_disable_req()
491 int bnx2fc_send_session_destroy_req(struct bnx2fc_hba *hba, in bnx2fc_send_session_destroy_req() argument
509 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_destroy_req()
510 rc = hba->cnic->submit_kwqes(hba->cnic, kwqe_arr, num_kwqes); in bnx2fc_send_session_destroy_req()
515 static bool is_valid_lport(struct bnx2fc_hba *hba, struct fc_lport *lport) in is_valid_lport() argument
519 spin_lock_bh(&hba->hba_lock); in is_valid_lport()
520 list_for_each_entry(blport, &hba->vports, list) { in is_valid_lport()
522 spin_unlock_bh(&hba->hba_lock); in is_valid_lport()
526 spin_unlock_bh(&hba->hba_lock); in is_valid_lport()
536 struct bnx2fc_hba *hba; in bnx2fc_unsol_els_work() local
542 hba = unsol_els->hba; in bnx2fc_unsol_els_work()
543 if (is_valid_lport(hba, lport)) in bnx2fc_unsol_els_work()
615 unsol_els->hba = interface->hba; in bnx2fc_process_l2_frame_compl()
637 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_process_unsol_compl() local
703 if (xid > hba->max_xid) { in bnx2fc_process_unsol_compl()
710 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_unsol_compl()
816 if (xid > hba->max_xid) { in bnx2fc_process_unsol_compl()
832 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_unsol_compl()
866 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_process_cq_compl() local
876 io_req = (struct bnx2fc_cmd *)hba->cmd_mgr->cmds[xid]; in bnx2fc_process_cq_compl()
1001 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_pending_work() local
1010 if (xid >= hba->max_tasks) { in bnx2fc_pending_work()
1017 task_page = (struct fcoe_task_ctx_entry *)hba->task_ctx[task_idx]; in bnx2fc_pending_work()
1131 static void bnx2fc_fastpath_notification(struct bnx2fc_hba *hba, in bnx2fc_fastpath_notification() argument
1135 struct bnx2fc_rport *tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_fastpath_notification()
1154 static void bnx2fc_process_ofld_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_ofld_cmpl() argument
1164 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_ofld_cmpl()
1172 if (hba != interface->hba) { in bnx2fc_process_ofld_cmpl()
1206 static void bnx2fc_process_enable_conn_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_enable_conn_cmpl() argument
1216 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_enable_conn_cmpl()
1234 if (hba != interface->hba) { in bnx2fc_process_enable_conn_cmpl()
1247 static void bnx2fc_process_conn_disable_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_conn_disable_cmpl() argument
1255 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_conn_disable_cmpl()
1280 static void bnx2fc_process_conn_destroy_cmpl(struct bnx2fc_hba *hba, in bnx2fc_process_conn_destroy_cmpl() argument
1287 tgt = hba->tgt_ofld_list[conn_id]; in bnx2fc_process_conn_destroy_cmpl()
1309 static void bnx2fc_init_failure(struct bnx2fc_hba *hba, u32 err_code) in bnx2fc_init_failure() argument
1346 struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context; in bnx2fc_indicate_kcqe() local
1355 bnx2fc_fastpath_notification(hba, kcqe); in bnx2fc_indicate_kcqe()
1359 bnx2fc_process_ofld_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1363 bnx2fc_process_enable_conn_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1369 bnx2fc_init_failure(hba, in bnx2fc_indicate_kcqe()
1372 set_bit(ADAPTER_STATE_UP, &hba->adapter_state); in bnx2fc_indicate_kcqe()
1373 bnx2fc_get_link_state(hba); in bnx2fc_indicate_kcqe()
1375 (u8)hba->pcidev->bus->number); in bnx2fc_indicate_kcqe()
1387 set_bit(BNX2FC_FLAG_DESTROY_CMPL, &hba->flags); in bnx2fc_indicate_kcqe()
1388 wake_up_interruptible(&hba->destroy_wait); in bnx2fc_indicate_kcqe()
1392 bnx2fc_process_conn_disable_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1396 bnx2fc_process_conn_destroy_cmpl(hba, kcqe); in bnx2fc_indicate_kcqe()
1403 complete(&hba->stat_req_done); in bnx2fc_indicate_kcqe()
1451 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_map_doorbell() local
1453 reg_base = pci_resource_start(hba->pcidev, in bnx2fc_map_doorbell()
1864 int bnx2fc_setup_task_ctx(struct bnx2fc_hba *hba) in bnx2fc_setup_task_ctx() argument
1878 hba->task_ctx_bd_tbl = dma_alloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_task_ctx()
1880 &hba->task_ctx_bd_dma, in bnx2fc_setup_task_ctx()
1882 if (!hba->task_ctx_bd_tbl) { in bnx2fc_setup_task_ctx()
1892 task_ctx_arr_sz = (hba->max_tasks / BNX2FC_TASKS_PER_PAGE); in bnx2fc_setup_task_ctx()
1893 hba->task_ctx = kzalloc((task_ctx_arr_sz * sizeof(void *)), in bnx2fc_setup_task_ctx()
1895 if (!hba->task_ctx) { in bnx2fc_setup_task_ctx()
1904 hba->task_ctx_dma = kmalloc((task_ctx_arr_sz * in bnx2fc_setup_task_ctx()
1906 if (!hba->task_ctx_dma) { in bnx2fc_setup_task_ctx()
1912 task_ctx_bdt = (struct regpair *)hba->task_ctx_bd_tbl; in bnx2fc_setup_task_ctx()
1915 hba->task_ctx[i] = dma_alloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_task_ctx()
1917 &hba->task_ctx_dma[i], in bnx2fc_setup_task_ctx()
1919 if (!hba->task_ctx[i]) { in bnx2fc_setup_task_ctx()
1924 addr = (u64)hba->task_ctx_dma[i]; in bnx2fc_setup_task_ctx()
1933 if (hba->task_ctx[i]) { in bnx2fc_setup_task_ctx()
1935 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_task_ctx()
1936 hba->task_ctx[i], hba->task_ctx_dma[i]); in bnx2fc_setup_task_ctx()
1937 hba->task_ctx[i] = NULL; in bnx2fc_setup_task_ctx()
1941 kfree(hba->task_ctx_dma); in bnx2fc_setup_task_ctx()
1942 hba->task_ctx_dma = NULL; in bnx2fc_setup_task_ctx()
1944 kfree(hba->task_ctx); in bnx2fc_setup_task_ctx()
1945 hba->task_ctx = NULL; in bnx2fc_setup_task_ctx()
1947 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_task_ctx()
1948 hba->task_ctx_bd_tbl, hba->task_ctx_bd_dma); in bnx2fc_setup_task_ctx()
1949 hba->task_ctx_bd_tbl = NULL; in bnx2fc_setup_task_ctx()
1954 void bnx2fc_free_task_ctx(struct bnx2fc_hba *hba) in bnx2fc_free_task_ctx() argument
1959 if (hba->task_ctx_bd_tbl) { in bnx2fc_free_task_ctx()
1960 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_task_ctx()
1961 hba->task_ctx_bd_tbl, in bnx2fc_free_task_ctx()
1962 hba->task_ctx_bd_dma); in bnx2fc_free_task_ctx()
1963 hba->task_ctx_bd_tbl = NULL; in bnx2fc_free_task_ctx()
1966 task_ctx_arr_sz = (hba->max_tasks / BNX2FC_TASKS_PER_PAGE); in bnx2fc_free_task_ctx()
1967 if (hba->task_ctx) { in bnx2fc_free_task_ctx()
1969 if (hba->task_ctx[i]) { in bnx2fc_free_task_ctx()
1970 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_task_ctx()
1971 hba->task_ctx[i], in bnx2fc_free_task_ctx()
1972 hba->task_ctx_dma[i]); in bnx2fc_free_task_ctx()
1973 hba->task_ctx[i] = NULL; in bnx2fc_free_task_ctx()
1976 kfree(hba->task_ctx); in bnx2fc_free_task_ctx()
1977 hba->task_ctx = NULL; in bnx2fc_free_task_ctx()
1980 kfree(hba->task_ctx_dma); in bnx2fc_free_task_ctx()
1981 hba->task_ctx_dma = NULL; in bnx2fc_free_task_ctx()
1984 static void bnx2fc_free_hash_table(struct bnx2fc_hba *hba) in bnx2fc_free_hash_table() argument
1990 if (hba->hash_tbl_segments) { in bnx2fc_free_hash_table()
1992 pbl = hba->hash_tbl_pbl; in bnx2fc_free_hash_table()
1994 segment_count = hba->hash_tbl_segment_count; in bnx2fc_free_hash_table()
2002 dma_free_coherent(&hba->pcidev->dev, in bnx2fc_free_hash_table()
2004 hba->hash_tbl_segments[i], in bnx2fc_free_hash_table()
2009 kfree(hba->hash_tbl_segments); in bnx2fc_free_hash_table()
2010 hba->hash_tbl_segments = NULL; in bnx2fc_free_hash_table()
2013 if (hba->hash_tbl_pbl) { in bnx2fc_free_hash_table()
2014 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_hash_table()
2015 hba->hash_tbl_pbl, in bnx2fc_free_hash_table()
2016 hba->hash_tbl_pbl_dma); in bnx2fc_free_hash_table()
2017 hba->hash_tbl_pbl = NULL; in bnx2fc_free_hash_table()
2021 static int bnx2fc_allocate_hash_table(struct bnx2fc_hba *hba) in bnx2fc_allocate_hash_table() argument
2036 hba->hash_tbl_segment_count = segment_count; in bnx2fc_allocate_hash_table()
2038 segment_array_size = segment_count * sizeof(*hba->hash_tbl_segments); in bnx2fc_allocate_hash_table()
2039 hba->hash_tbl_segments = kzalloc(segment_array_size, GFP_KERNEL); in bnx2fc_allocate_hash_table()
2040 if (!hba->hash_tbl_segments) { in bnx2fc_allocate_hash_table()
2052 hba->hash_tbl_segments[i] = dma_alloc_coherent(&hba->pcidev->dev, in bnx2fc_allocate_hash_table()
2056 if (!hba->hash_tbl_segments[i]) { in bnx2fc_allocate_hash_table()
2062 hba->hash_tbl_pbl = dma_alloc_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_allocate_hash_table()
2063 &hba->hash_tbl_pbl_dma, in bnx2fc_allocate_hash_table()
2065 if (!hba->hash_tbl_pbl) { in bnx2fc_allocate_hash_table()
2070 pbl = hba->hash_tbl_pbl; in bnx2fc_allocate_hash_table()
2078 pbl = hba->hash_tbl_pbl; in bnx2fc_allocate_hash_table()
2090 if (hba->hash_tbl_segments[i]) in bnx2fc_allocate_hash_table()
2091 dma_free_coherent(&hba->pcidev->dev, in bnx2fc_allocate_hash_table()
2093 hba->hash_tbl_segments[i], in bnx2fc_allocate_hash_table()
2100 kfree(hba->hash_tbl_segments); in bnx2fc_allocate_hash_table()
2101 hba->hash_tbl_segments = NULL; in bnx2fc_allocate_hash_table()
2111 int bnx2fc_setup_fw_resc(struct bnx2fc_hba *hba) in bnx2fc_setup_fw_resc() argument
2117 if (bnx2fc_allocate_hash_table(hba)) in bnx2fc_setup_fw_resc()
2121 hba->t2_hash_tbl_ptr = dma_alloc_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_setup_fw_resc()
2122 &hba->t2_hash_tbl_ptr_dma, in bnx2fc_setup_fw_resc()
2124 if (!hba->t2_hash_tbl_ptr) { in bnx2fc_setup_fw_resc()
2126 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2132 hba->t2_hash_tbl = dma_alloc_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_setup_fw_resc()
2133 &hba->t2_hash_tbl_dma, in bnx2fc_setup_fw_resc()
2135 if (!hba->t2_hash_tbl) { in bnx2fc_setup_fw_resc()
2137 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2141 addr = (unsigned long) hba->t2_hash_tbl_dma + in bnx2fc_setup_fw_resc()
2143 hba->t2_hash_tbl[i].next.lo = addr & 0xffffffff; in bnx2fc_setup_fw_resc()
2144 hba->t2_hash_tbl[i].next.hi = addr >> 32; in bnx2fc_setup_fw_resc()
2147 hba->dummy_buffer = dma_alloc_coherent(&hba->pcidev->dev, in bnx2fc_setup_fw_resc()
2148 PAGE_SIZE, &hba->dummy_buf_dma, in bnx2fc_setup_fw_resc()
2150 if (!hba->dummy_buffer) { in bnx2fc_setup_fw_resc()
2152 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2156 hba->stats_buffer = dma_alloc_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_setup_fw_resc()
2157 &hba->stats_buf_dma, in bnx2fc_setup_fw_resc()
2159 if (!hba->stats_buffer) { in bnx2fc_setup_fw_resc()
2161 bnx2fc_free_fw_resc(hba); in bnx2fc_setup_fw_resc()
2168 void bnx2fc_free_fw_resc(struct bnx2fc_hba *hba) in bnx2fc_free_fw_resc() argument
2172 if (hba->stats_buffer) { in bnx2fc_free_fw_resc()
2173 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_fw_resc()
2174 hba->stats_buffer, hba->stats_buf_dma); in bnx2fc_free_fw_resc()
2175 hba->stats_buffer = NULL; in bnx2fc_free_fw_resc()
2178 if (hba->dummy_buffer) { in bnx2fc_free_fw_resc()
2179 dma_free_coherent(&hba->pcidev->dev, PAGE_SIZE, in bnx2fc_free_fw_resc()
2180 hba->dummy_buffer, hba->dummy_buf_dma); in bnx2fc_free_fw_resc()
2181 hba->dummy_buffer = NULL; in bnx2fc_free_fw_resc()
2184 if (hba->t2_hash_tbl_ptr) { in bnx2fc_free_fw_resc()
2186 dma_free_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_free_fw_resc()
2187 hba->t2_hash_tbl_ptr, in bnx2fc_free_fw_resc()
2188 hba->t2_hash_tbl_ptr_dma); in bnx2fc_free_fw_resc()
2189 hba->t2_hash_tbl_ptr = NULL; in bnx2fc_free_fw_resc()
2192 if (hba->t2_hash_tbl) { in bnx2fc_free_fw_resc()
2195 dma_free_coherent(&hba->pcidev->dev, mem_size, in bnx2fc_free_fw_resc()
2196 hba->t2_hash_tbl, hba->t2_hash_tbl_dma); in bnx2fc_free_fw_resc()
2197 hba->t2_hash_tbl = NULL; in bnx2fc_free_fw_resc()
2199 bnx2fc_free_hash_table(hba); in bnx2fc_free_fw_resc()