• Home
  • Raw
  • Download

Lines Matching refs:vf

71 static void bnx2x_vf_igu_ack_sb(struct bnx2x *bp, struct bnx2x_virtf *vf,  in bnx2x_vf_igu_ack_sb()  argument
79 u32 func_encode = vf->abs_vfid; in bnx2x_vf_igu_ack_sb()
173 #define bnx2x_vfop_reset_wq(vf) atomic_set(&vf->op_in_progress, 0) argument
175 void bnx2x_vfop_qctor_dump_tx(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vfop_qctor_dump_tx() argument
182 vf->abs_vfid, in bnx2x_vfop_qctor_dump_tx()
191 void bnx2x_vfop_qctor_dump_rx(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vfop_qctor_dump_rx() argument
200 vf->abs_vfid, in bnx2x_vfop_qctor_dump_rx()
216 struct bnx2x_virtf *vf, in bnx2x_vfop_qctor_prep() argument
234 init_p->rx.fw_sb_id = vf_igu_sb(vf, q->sb_idx); in bnx2x_vfop_qctor_prep()
235 init_p->tx.fw_sb_id = vf_igu_sb(vf, q->sb_idx); in bnx2x_vfop_qctor_prep()
243 setup_p->gen_params.spcl_id = vf->sp_cl_id; in bnx2x_vfop_qctor_prep()
244 setup_p->gen_params.stat_id = vfq_stat_id(vf, q); in bnx2x_vfop_qctor_prep()
251 if (vf->cfg_flags & VF_CFG_FW_FC) in bnx2x_vfop_qctor_prep()
253 vf->abs_vfid); in bnx2x_vfop_qctor_prep()
277 rxq_p->cl_qzone_id = vfq_qzone_id(vf, q); in bnx2x_vfop_qctor_prep()
278 rxq_p->fw_sb_id = vf_igu_sb(vf, q->sb_idx); in bnx2x_vfop_qctor_prep()
279 rxq_p->rss_engine_id = FW_VF_HANDLE(vf->abs_vfid); in bnx2x_vfop_qctor_prep()
287 setup_p->txq_params.tss_leading_cl_id = vf->leading_rss; in bnx2x_vfop_qctor_prep()
288 setup_p->txq_params.fw_sb_id = vf_igu_sb(vf, q->sb_idx); in bnx2x_vfop_qctor_prep()
293 static void bnx2x_vfop_qctor(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_qctor() argument
295 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_qctor()
300 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_qctor()
305 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_qctor()
324 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); in bnx2x_vfop_qctor()
337 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); in bnx2x_vfop_qctor()
342 bnx2x_vf_igu_ack_sb(bp, vf, vf_igu_sb(vf, args->sb_idx), in bnx2x_vfop_qctor()
350 vf->abs_vfid, args->qid, q_params->cmd, vfop->rc); in bnx2x_vfop_qctor()
352 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_qctor()
358 struct bnx2x_virtf *vf, in bnx2x_vfop_qctor_cmd() argument
362 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_qctor_cmd()
365 vf->op_params.qctor.qstate.q_obj = &bnx2x_vfq(vf, qid, sp_obj); in bnx2x_vfop_qctor_cmd()
368 vfop->args.qctor.sb_idx = bnx2x_vfq(vf, qid, sb_idx); in bnx2x_vfop_qctor_cmd()
372 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_qctor, in bnx2x_vfop_qctor_cmd()
379 static void bnx2x_vfop_qdtor(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_qdtor() argument
381 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_qdtor()
386 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_qdtor()
391 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_qdtor()
410 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); in bnx2x_vfop_qdtor()
419 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); in bnx2x_vfop_qdtor()
428 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_qdtor()
431 vf->abs_vfid, qdtor->qid, q_params->cmd, vfop->rc); in bnx2x_vfop_qdtor()
437 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_qdtor()
447 struct bnx2x_virtf *vf, in bnx2x_vfop_qdtor_cmd() argument
451 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_qdtor_cmd()
455 &vf->op_params.qctor.qstate; in bnx2x_vfop_qdtor_cmd()
458 qstate->q_obj = &bnx2x_vfq(vf, qid, sp_obj); in bnx2x_vfop_qdtor_cmd()
461 vfop->args.qdtor.cxt = bnx2x_vfq(vf, qid, cxt); in bnx2x_vfop_qdtor_cmd()
465 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_qdtor, in bnx2x_vfop_qdtor_cmd()
468 DP(BNX2X_MSG_IOV, "VF[%d] failed to add a vfop.\n", vf->abs_vfid); in bnx2x_vfop_qdtor_cmd()
475 struct bnx2x_virtf *vf = bnx2x_vf_by_abs_fid(bp, abs_vfid); in bnx2x_vf_set_igu_info() local
476 if (vf) { in bnx2x_vf_set_igu_info()
477 if (!vf_sb_count(vf)) in bnx2x_vf_set_igu_info()
478 vf->igu_base_id = igu_sb_id; in bnx2x_vf_set_igu_info()
479 ++vf_sb_count(vf); in bnx2x_vf_set_igu_info()
588 static void bnx2x_vfop_vlan_mac(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_vlan_mac() argument
590 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_vlan_mac()
600 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_vlan_mac()
602 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_vlan_mac()
614 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_vlan_mac()
625 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_vlan_mac()
629 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_vlan_mac()
642 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_vlan_mac()
661 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); /* fall-through */ in bnx2x_vfop_vlan_mac()
670 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_vlan_mac()
680 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_vlan_mac()
723 struct bnx2x_virtf *vf, in bnx2x_vfop_mac_delall_cmd() argument
727 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_mac_delall_cmd()
741 &vf->op_params.vlan_mac; in bnx2x_vfop_mac_delall_cmd()
747 ramrod->vlan_mac_obj = &bnx2x_vfq(vf, qid, mac_obj); in bnx2x_vfop_mac_delall_cmd()
754 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_vlan_mac, in bnx2x_vfop_mac_delall_cmd()
761 struct bnx2x_virtf *vf, in bnx2x_vfop_mac_list_cmd() argument
766 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_mac_list_cmd()
783 &vf->op_params.vlan_mac; in bnx2x_vfop_mac_list_cmd()
789 ramrod->vlan_mac_obj = &bnx2x_vfq(vf, qid, mac_obj); in bnx2x_vfop_mac_list_cmd()
797 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_vlan_mac, in bnx2x_vfop_mac_list_cmd()
804 struct bnx2x_virtf *vf, in bnx2x_vfop_vlan_set_cmd() argument
808 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_vlan_set_cmd()
813 .credit = &bnx2x_vfq(vf, qid, vlan_count), in bnx2x_vfop_vlan_set_cmd()
822 &vf->op_params.vlan_mac; in bnx2x_vfop_vlan_set_cmd()
829 ramrod->vlan_mac_obj = &bnx2x_vfq(vf, qid, vlan_obj); in bnx2x_vfop_vlan_set_cmd()
836 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_vlan_mac, in bnx2x_vfop_vlan_set_cmd()
843 struct bnx2x_virtf *vf, in bnx2x_vfop_vlan_delall_cmd() argument
847 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_vlan_delall_cmd()
852 .credit = &bnx2x_vfq(vf, qid, vlan_count), in bnx2x_vfop_vlan_delall_cmd()
861 &vf->op_params.vlan_mac; in bnx2x_vfop_vlan_delall_cmd()
867 ramrod->vlan_mac_obj = &bnx2x_vfq(vf, qid, vlan_obj); in bnx2x_vfop_vlan_delall_cmd()
874 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_vlan_mac, in bnx2x_vfop_vlan_delall_cmd()
881 struct bnx2x_virtf *vf, in bnx2x_vfop_vlan_list_cmd() argument
886 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_vlan_list_cmd()
891 .credit = &bnx2x_vfq(vf, qid, vlan_count), in bnx2x_vfop_vlan_list_cmd()
900 &vf->op_params.vlan_mac; in bnx2x_vfop_vlan_list_cmd()
906 ramrod->vlan_mac_obj = &bnx2x_vfq(vf, qid, vlan_obj); in bnx2x_vfop_vlan_list_cmd()
909 filters.multi_filter->add_cnt = vf_vlan_rules_cnt(vf) - in bnx2x_vfop_vlan_list_cmd()
916 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_vlan_mac, in bnx2x_vfop_vlan_list_cmd()
923 static void bnx2x_vfop_qsetup(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_qsetup() argument
925 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_qsetup()
936 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_qsetup()
942 vfop->rc = bnx2x_vfop_qctor_cmd(bp, vf, &cmd, qid); in bnx2x_vfop_qsetup()
954 vfop->rc = bnx2x_vfop_vlan_set_cmd(bp, vf, &cmd, qid, 0, true); in bnx2x_vfop_qsetup()
959 BNX2X_ERR("QSETUP[%d:%d] error: rc %d\n", vf->abs_vfid, qid, vfop->rc); in bnx2x_vfop_qsetup()
962 vf->cfg_flags |= VF_CFG_VLAN; in bnx2x_vfop_qsetup()
968 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_qsetup()
976 struct bnx2x_virtf *vf, in bnx2x_vfop_qsetup_cmd() argument
980 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_qsetup_cmd()
987 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_qsetup, in bnx2x_vfop_qsetup_cmd()
994 static void bnx2x_vfop_qflr(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_qflr() argument
996 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_qflr()
1002 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_qflr()
1007 DP(BNX2X_MSG_IOV, "VF[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_qflr()
1016 vfop->rc = bnx2x_vfop_vlan_delall_cmd(bp, vf, &cmd, qid, true); in bnx2x_vfop_qflr()
1024 vfop->rc = bnx2x_vfop_mac_delall_cmd(bp, vf, &cmd, qid, true); in bnx2x_vfop_qflr()
1027 vf->abs_vfid, vfop->rc); in bnx2x_vfop_qflr()
1035 qstate->q_obj = &bnx2x_vfq(vf, qid, sp_obj); in bnx2x_vfop_qflr()
1039 vf->abs_vfid, qstate->q_obj->state); in bnx2x_vfop_qflr()
1045 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_VERIFY_PEND); in bnx2x_vfop_qflr()
1052 vf->abs_vfid, qid, vfop->rc); in bnx2x_vfop_qflr()
1055 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_qflr()
1065 struct bnx2x_virtf *vf, in bnx2x_vfop_qflr_cmd() argument
1069 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_qflr_cmd()
1075 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_qflr, in bnx2x_vfop_qflr_cmd()
1082 static void bnx2x_vfop_mcast(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_mcast() argument
1084 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_mcast()
1091 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_mcast()
1096 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_mcast()
1103 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_CONT); in bnx2x_vfop_mcast()
1120 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_mcast()
1124 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_mcast()
1132 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_mcast()
1138 struct bnx2x_virtf *vf, in bnx2x_vfop_mcast_cmd() argument
1149 vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_mcast_cmd()
1153 &vf->op_params.mcast; in bnx2x_vfop_mcast_cmd()
1157 ramrod->mcast_obj = &vf->mcast_obj; in bnx2x_vfop_mcast_cmd()
1170 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_mcast, in bnx2x_vfop_mcast_cmd()
1180 static void bnx2x_vfop_rxmode(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_rxmode() argument
1182 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_rxmode()
1186 bnx2x_vfop_reset_wq(vf); in bnx2x_vfop_rxmode()
1191 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_rxmode()
1199 bnx2x_vfop_finalize(vf, vfop->rc, VFOP_DONE); in bnx2x_vfop_rxmode()
1204 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_rxmode()
1214 struct bnx2x_virtf *vf, in bnx2x_vfop_rxmode_cmd() argument
1218 struct bnx2x_vf_queue *vfq = vfq_get(vf, qid); in bnx2x_vfop_rxmode_cmd()
1219 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_rxmode_cmd()
1223 &vf->op_params.rx_mode; in bnx2x_vfop_rxmode_cmd()
1229 ramrod->cl_id = vfq_cl_id(vf, vfq); in bnx2x_vfop_rxmode_cmd()
1231 ramrod->func_id = FW_VF_HANDLE(vf->abs_vfid); in bnx2x_vfop_rxmode_cmd()
1235 ramrod->pstate = &vf->filter_state; in bnx2x_vfop_rxmode_cmd()
1238 set_bit(BNX2X_FILTER_RX_MODE_PENDING, &vf->filter_state); in bnx2x_vfop_rxmode_cmd()
1243 bnx2x_vf_sp(bp, vf, rx_mode_rdata.e2); in bnx2x_vfop_rxmode_cmd()
1245 bnx2x_vf_sp_map(bp, vf, rx_mode_rdata.e2); in bnx2x_vfop_rxmode_cmd()
1249 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_rxmode, in bnx2x_vfop_rxmode_cmd()
1258 static void bnx2x_vfop_qdown(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_qdown() argument
1260 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_qdown()
1268 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_qdown()
1277 vfop->rc = bnx2x_vfop_rxmode_cmd(bp, vf, &cmd, qid, 0); in bnx2x_vfop_qdown()
1285 vfop->rc = bnx2x_vfop_vlan_delall_cmd(bp, vf, &cmd, qid, false); in bnx2x_vfop_qdown()
1293 vfop->rc = bnx2x_vfop_mac_delall_cmd(bp, vf, &cmd, qid, false); in bnx2x_vfop_qdown()
1303 vfop->rc = bnx2x_vfop_qdtor_cmd(bp, vf, &cmd, qid); in bnx2x_vfop_qdown()
1310 vf->abs_vfid, qid, vfop->rc); in bnx2x_vfop_qdown()
1313 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_qdown()
1321 struct bnx2x_virtf *vf, in bnx2x_vfop_qdown_cmd() argument
1325 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_qdown_cmd()
1331 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_qdown, in bnx2x_vfop_qdown_cmd()
1382 static void bnx2x_vf_igu_reset(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_igu_reset() argument
1388 bnx2x_pretend_func(bp, HW_VF_HANDLE(bp, vf->abs_vfid)); in bnx2x_vf_igu_reset()
1399 if (vf->cfg_flags & VF_CFG_INT_SIMD) in bnx2x_vf_igu_reset()
1407 vf->abs_vfid, REG_RD(bp, IGU_REG_VF_CONFIGURATION)); in bnx2x_vf_igu_reset()
1412 for (i = 0; i < vf_sb_count(vf); i++) { in bnx2x_vf_igu_reset()
1413 u8 igu_sb_id = vf_igu_sb(vf, i); in bnx2x_vf_igu_reset()
1419 bnx2x_igu_clear_sb_gen(bp, vf->abs_vfid, igu_sb_id, in bnx2x_vf_igu_reset()
1423 bnx2x_vf_igu_ack_sb(bp, vf, igu_sb_id, USTORM_ID, 0, in bnx2x_vf_igu_reset()
1445 static void bnx2x_vf_enable_traffic(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_enable_traffic() argument
1448 bnx2x_vf_igu_reset(bp, vf); in bnx2x_vf_enable_traffic()
1451 bnx2x_pretend_func(bp, HW_VF_HANDLE(bp, vf->abs_vfid)); in bnx2x_vf_enable_traffic()
1459 struct bnx2x_virtf *vf = bnx2x_vf_by_abs_fid(bp, abs_vfid); in bnx2x_vf_is_pcie_pending() local
1461 if (!vf) in bnx2x_vf_is_pcie_pending()
1464 dev = pci_get_bus_and_slot(vf->bus, vf->devfn); in bnx2x_vf_is_pcie_pending()
1511 static void bnx2x_vf_free_resc(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_free_resc() argument
1514 bnx2x_iov_static_resc(bp, &vf->alloc_resc); in bnx2x_vf_free_resc()
1515 vf->state = VF_FREE; in bnx2x_vf_free_resc()
1518 static void bnx2x_vf_flr_clnup_hw(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_flr_clnup_hw() argument
1523 bnx2x_pretend_func(bp, HW_VF_HANDLE(bp, vf->abs_vfid)); in bnx2x_vf_flr_clnup_hw()
1530 if (bnx2x_send_final_clnup(bp, (u8)FW_VF_HANDLE(vf->abs_vfid), in bnx2x_vf_flr_clnup_hw()
1532 BNX2X_ERR("VF[%d] Final cleanup timed-out\n", vf->abs_vfid); in bnx2x_vf_flr_clnup_hw()
1538 static void bnx2x_vfop_flr(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_flr() argument
1540 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_flr()
1551 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_flr()
1558 if (++(qx->qid) < vf_rxq_count(vf)) { in bnx2x_vfop_flr()
1559 vfop->rc = bnx2x_vfop_qflr_cmd(bp, vf, &cmd, in bnx2x_vfop_flr()
1567 vfop->rc = bnx2x_vfop_mcast_cmd(bp, vf, &cmd, NULL, in bnx2x_vfop_flr()
1575 bnx2x_vf_flr_clnup_hw(bp, vf); in bnx2x_vfop_flr()
1578 bnx2x_vf_free_resc(bp, vf); in bnx2x_vfop_flr()
1581 bnx2x_vf_enable_mbx(bp, vf->abs_vfid); in bnx2x_vfop_flr()
1588 BNX2X_ERR("VF[%d] FLR error: rc %d\n", vf->abs_vfid, vfop->rc); in bnx2x_vfop_flr()
1590 vf->flr_clnup_stage = VF_FLR_ACK; in bnx2x_vfop_flr()
1591 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_flr()
1592 bnx2x_unlock_vf_pf_channel(bp, vf, CHANNEL_TLV_FLR); in bnx2x_vfop_flr()
1596 struct bnx2x_virtf *vf, in bnx2x_vfop_flr_cmd() argument
1599 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_flr_cmd()
1604 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_flr, false); in bnx2x_vfop_flr_cmd()
1612 struct bnx2x_virtf *vf; in bnx2x_vf_flr_clnup() local
1627 vf = BP_VF(bp, i); in bnx2x_vf_flr_clnup()
1630 bnx2x_lock_vf_pf_channel(bp, vf, CHANNEL_TLV_FLR); in bnx2x_vf_flr_clnup()
1633 if (bnx2x_vfop_flr_cmd(bp, vf, bnx2x_vf_flr_clnup)) { in bnx2x_vf_flr_clnup()
1635 vf->abs_vfid); in bnx2x_vf_flr_clnup()
1638 vf->flr_clnup_stage = VF_FLR_ACK; in bnx2x_vf_flr_clnup()
1646 vf = BP_VF(bp, i); in bnx2x_vf_flr_clnup()
1648 if (vf->flr_clnup_stage != VF_FLR_ACK) in bnx2x_vf_flr_clnup()
1651 vf->flr_clnup_stage = VF_FLR_EPILOG; in bnx2x_vf_flr_clnup()
1689 struct bnx2x_virtf *vf = BP_VF(bp, i); in bnx2x_vf_handle_flr_event() local
1692 if (vf->abs_vfid < 32) in bnx2x_vf_handle_flr_event()
1693 reset = bp->vfdb->flrd_vfs[0] & (1 << vf->abs_vfid); in bnx2x_vf_handle_flr_event()
1696 (1 << (vf->abs_vfid - 32)); in bnx2x_vf_handle_flr_event()
1700 vf->state = VF_RESET; in bnx2x_vf_handle_flr_event()
1701 vf->flr_clnup_stage = VF_FLR_CLN; in bnx2x_vf_handle_flr_event()
1705 vf->abs_vfid); in bnx2x_vf_handle_flr_event()
1779 static void bnx2x_vf_set_bars(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_set_bars() argument
1790 vf->bars[n].bar = start + size * vf->abs_vfid; in bnx2x_vf_set_bars()
1791 vf->bars[n].size = size; in bnx2x_vf_set_bars()
2110 static void bnx2x_vfq_init(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vfq_init() argument
2113 u8 cl_id = vfq_cl_id(vf, q); in bnx2x_vfq_init()
2114 u8 func_id = FW_VF_HANDLE(vf->abs_vfid); in bnx2x_vfq_init()
2123 bnx2x_vf_sp(bp, vf, q_data), in bnx2x_vfq_init()
2124 bnx2x_vf_sp_map(bp, vf, q_data), in bnx2x_vfq_init()
2129 vf->abs_vfid, q->sp_obj.func_id); in bnx2x_vfq_init()
2138 bnx2x_vf_sp(bp, vf, mac_rdata), in bnx2x_vfq_init()
2139 bnx2x_vf_sp_map(bp, vf, mac_rdata), in bnx2x_vfq_init()
2141 &vf->filter_state, in bnx2x_vfq_init()
2147 bnx2x_vf_sp(bp, vf, vlan_rdata), in bnx2x_vfq_init()
2148 bnx2x_vf_sp_map(bp, vf, vlan_rdata), in bnx2x_vfq_init()
2150 &vf->filter_state, in bnx2x_vfq_init()
2155 bnx2x_init_mcast_obj(bp, &vf->mcast_obj, cl_id, in bnx2x_vfq_init()
2157 bnx2x_vf_sp(bp, vf, mcast_rdata), in bnx2x_vfq_init()
2158 bnx2x_vf_sp_map(bp, vf, mcast_rdata), in bnx2x_vfq_init()
2160 &vf->filter_state, in bnx2x_vfq_init()
2163 vf->leading_rss = cl_id; in bnx2x_vfq_init()
2181 struct bnx2x_virtf *vf = BP_VF(bp, vfid); in bnx2x_iov_nic_init() local
2192 vf->abs_vfid, vf_sb_count(vf), base_vf_cid, in bnx2x_iov_nic_init()
2196 bnx2x_iov_static_resc(bp, &vf->alloc_resc); in bnx2x_iov_nic_init()
2201 bp->vlans_pool.get(&bp->vlans_pool, vf_vlan_rules_cnt(vf)); in bnx2x_iov_nic_init()
2203 vf->filter_state = 0; in bnx2x_iov_nic_init()
2204 vf->sp_cl_id = bnx2x_fp(bp, 0, cl_id); in bnx2x_iov_nic_init()
2211 bnx2x_init_mcast_obj(bp, &vf->mcast_obj, 0xFF, in bnx2x_iov_nic_init()
2213 bnx2x_vf_sp(bp, vf, mcast_rdata), in bnx2x_iov_nic_init()
2214 bnx2x_vf_sp_map(bp, vf, mcast_rdata), in bnx2x_iov_nic_init()
2216 &vf->filter_state, in bnx2x_iov_nic_init()
2228 bnx2x_vf_enable_mbx(bp, vf->abs_vfid); in bnx2x_iov_nic_init()
2234 struct bnx2x_virtf *vf = BP_VF(bp, i); in bnx2x_iov_nic_init() local
2237 vf->bus = bnx2x_vf_bus(bp, i); in bnx2x_iov_nic_init()
2238 vf->devfn = bnx2x_vf_devfn(bp, i); in bnx2x_iov_nic_init()
2239 bnx2x_vf_set_bars(bp, vf); in bnx2x_iov_nic_init()
2243 vf->abs_vfid, vf->bus, vf->devfn, in bnx2x_iov_nic_init()
2244 (unsigned)vf->bars[0].bar, vf->bars[0].size, in bnx2x_iov_nic_init()
2245 (unsigned)vf->bars[1].bar, vf->bars[1].size, in bnx2x_iov_nic_init()
2246 (unsigned)vf->bars[2].bar, vf->bars[2].size); in bnx2x_iov_nic_init()
2249 vf->vfqs = &bp->vfdb->vfqs[qcount]; in bnx2x_iov_nic_init()
2330 struct bnx2x_virtf *vf) in bnx2x_vf_handle_mcast_eqe() argument
2335 rparam.mcast_obj = &vf->mcast_obj; in bnx2x_vf_handle_mcast_eqe()
2336 vf->mcast_obj.raw.clear_pending(&vf->mcast_obj.raw); in bnx2x_vf_handle_mcast_eqe()
2339 if (vf->mcast_obj.check_pending(&vf->mcast_obj)) { in bnx2x_vf_handle_mcast_eqe()
2349 struct bnx2x_virtf *vf) in bnx2x_vf_handle_filters_eqe() argument
2352 clear_bit(BNX2X_FILTER_RX_MODE_PENDING, &vf->filter_state); in bnx2x_vf_handle_filters_eqe()
2358 struct bnx2x_virtf *vf; in bnx2x_iov_eq_sp_event() local
2411 vf = bnx2x_vf_by_abs_fid(bp, abs_vfid); in bnx2x_iov_eq_sp_event()
2413 if (!vf) { in bnx2x_iov_eq_sp_event()
2422 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
2423 vfq_get(vf, qidx)->sp_obj.complete_cmd(bp, in bnx2x_iov_eq_sp_event()
2424 &vfq_get(vf, in bnx2x_iov_eq_sp_event()
2430 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
2431 bnx2x_vf_handle_classification_eqe(bp, vfq_get(vf, qidx), elem); in bnx2x_iov_eq_sp_event()
2435 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
2436 bnx2x_vf_handle_mcast_eqe(bp, vf); in bnx2x_iov_eq_sp_event()
2440 vf->abs_vfid, qidx); in bnx2x_iov_eq_sp_event()
2441 bnx2x_vf_handle_filters_eqe(bp, vf); in bnx2x_iov_eq_sp_event()
2445 vf->abs_vfid); in bnx2x_iov_eq_sp_event()
2473 struct bnx2x_virtf *vf; in bnx2x_iov_set_queue_sp_obj() local
2478 vf = bnx2x_vf_by_cid(bp, vf_cid); in bnx2x_iov_set_queue_sp_obj()
2480 if (vf) { in bnx2x_iov_set_queue_sp_obj()
2486 *q_obj = &bnx2x_vfq(vf, q_index, sp_obj); in bnx2x_iov_set_queue_sp_obj()
2494 struct bnx2x_virtf *vf; in bnx2x_iov_sp_event() local
2500 vf = bnx2x_vf_by_cid(bp, vf_cid); in bnx2x_iov_sp_event()
2501 if (vf) { in bnx2x_iov_sp_event()
2503 atomic_set(&vf->op_in_progress, 1); in bnx2x_iov_sp_event()
2543 struct bnx2x_virtf *vf = BP_VF(bp, i); in bnx2x_iov_adjust_stats_req() local
2545 if (vf->state != VF_ENABLED) { in bnx2x_iov_adjust_stats_req()
2548 vf->abs_vfid); in bnx2x_iov_adjust_stats_req()
2552 DP(BNX2X_MSG_IOV, "add addresses for vf %d\n", vf->abs_vfid); in bnx2x_iov_adjust_stats_req()
2553 for_each_vfq(vf, j) { in bnx2x_iov_adjust_stats_req()
2554 struct bnx2x_vf_queue *rxq = vfq_get(vf, j); in bnx2x_iov_adjust_stats_req()
2563 cur_query_entry->index = vfq_cl_id(vf, rxq); in bnx2x_iov_adjust_stats_req()
2565 cpu_to_le16(FW_VF_HANDLE(vf->abs_vfid)); in bnx2x_iov_adjust_stats_req()
2567 cpu_to_le32(U64_HI(vf->fw_stat_map)); in bnx2x_iov_adjust_stats_req()
2569 cpu_to_le32(U64_LO(vf->fw_stat_map)); in bnx2x_iov_adjust_stats_req()
2594 struct bnx2x_virtf *vf = BP_VF(bp, i); in bnx2x_iov_sp_task() local
2596 if (!list_empty(&vf->op_list_head) && in bnx2x_iov_sp_task()
2597 atomic_read(&vf->op_in_progress)) { in bnx2x_iov_sp_task()
2599 bnx2x_vfop_cur(bp, vf)->transition(bp, vf); in bnx2x_iov_sp_task()
2608 struct bnx2x_virtf *vf = NULL; in __vf_from_stat_id() local
2611 vf = BP_VF(bp, i); in __vf_from_stat_id()
2612 if (stat_id >= vf->igu_base_id && in __vf_from_stat_id()
2613 stat_id < vf->igu_base_id + vf_sb_count(vf)) in __vf_from_stat_id()
2616 return vf; in __vf_from_stat_id()
2629 static void bnx2x_vf_clr_qtbl(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_clr_qtbl() argument
2633 for_each_vfq(vf, i) in bnx2x_vf_clr_qtbl()
2634 bnx2x_vf_qtbl_set_q(bp, vf->abs_vfid, in bnx2x_vf_clr_qtbl()
2635 vfq_qzone_id(vf, vfq_get(vf, i)), false); in bnx2x_vf_clr_qtbl()
2638 static void bnx2x_vf_igu_disable(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_igu_disable() argument
2643 bnx2x_pretend_func(bp, HW_VF_HANDLE(bp, vf->abs_vfid)); in bnx2x_vf_igu_disable()
2651 u8 bnx2x_vf_max_queue_cnt(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vf_max_queue_cnt() argument
2653 return min_t(u8, min_t(u8, vf_sb_count(vf), BNX2X_CIDS_PER_VF), in bnx2x_vf_max_queue_cnt()
2658 int bnx2x_vf_chk_avail_resc(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vf_chk_avail_resc() argument
2661 u8 rxq_cnt = vf_rxq_count(vf) ? : bnx2x_vf_max_queue_cnt(bp, vf); in bnx2x_vf_chk_avail_resc()
2662 u8 txq_cnt = vf_txq_count(vf) ? : bnx2x_vf_max_queue_cnt(bp, vf); in bnx2x_vf_chk_avail_resc()
2666 (req_resc->num_sbs <= vf_sb_count(vf)) && in bnx2x_vf_chk_avail_resc()
2667 (req_resc->num_mac_filters <= vf_mac_rules_cnt(vf)) && in bnx2x_vf_chk_avail_resc()
2668 (req_resc->num_vlan_filters <= vf_vlan_rules_cnt(vf))); in bnx2x_vf_chk_avail_resc()
2672 int bnx2x_vf_acquire(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vf_acquire() argument
2675 int base_vf_cid = (BP_VFDB(bp)->sriov.first_vf_in_pf + vf->index) * in bnx2x_vf_acquire()
2688 if (vf->state == VF_ACQUIRED) { in bnx2x_vf_acquire()
2690 vf->abs_vfid); in bnx2x_vf_acquire()
2692 if (!bnx2x_vf_chk_avail_resc(bp, vf, resc)) { in bnx2x_vf_acquire()
2694 vf->abs_vfid); in bnx2x_vf_acquire()
2701 if (vf->state != VF_FREE && vf->state != VF_RESET) { in bnx2x_vf_acquire()
2703 vf->abs_vfid, vf->state); in bnx2x_vf_acquire()
2711 if (!bnx2x_vf_chk_avail_resc(bp, vf, resc)) { in bnx2x_vf_acquire()
2719 vf_sb_count(vf) = resc->num_sbs; in bnx2x_vf_acquire()
2720 vf_rxq_count(vf) = resc->num_rxqs ? : bnx2x_vf_max_queue_cnt(bp, vf); in bnx2x_vf_acquire()
2721 vf_txq_count(vf) = resc->num_txqs ? : bnx2x_vf_max_queue_cnt(bp, vf); in bnx2x_vf_acquire()
2723 vf_mac_rules_cnt(vf) = resc->num_mac_filters; in bnx2x_vf_acquire()
2725 vf_vlan_rules_cnt(vf) = resc->num_vlan_filters; in bnx2x_vf_acquire()
2729 vf_sb_count(vf), vf_rxq_count(vf), in bnx2x_vf_acquire()
2730 vf_txq_count(vf), vf_mac_rules_cnt(vf), in bnx2x_vf_acquire()
2731 vf_vlan_rules_cnt(vf)); in bnx2x_vf_acquire()
2734 if (!vf->vfqs) { in bnx2x_vf_acquire()
2739 for_each_vfq(vf, i) { in bnx2x_vf_acquire()
2740 struct bnx2x_vf_queue *q = vfq_get(vf, i); in bnx2x_vf_acquire()
2752 vf->abs_vfid, i, q->index, q->cid, q->cxt); in bnx2x_vf_acquire()
2755 bnx2x_vfq_init(bp, vf, q); in bnx2x_vf_acquire()
2757 vf->state = VF_ACQUIRED; in bnx2x_vf_acquire()
2761 int bnx2x_vf_init(struct bnx2x *bp, struct bnx2x_virtf *vf, dma_addr_t *sb_map) in bnx2x_vf_init() argument
2770 for_each_vf_sb(vf, i) in bnx2x_vf_init()
2771 bnx2x_init_sb(bp, (dma_addr_t)sb_map[i], vf->abs_vfid, true, in bnx2x_vf_init()
2772 vf_igu_sb(vf, i), vf_igu_sb(vf, i)); in bnx2x_vf_init()
2775 if (vf->state != VF_ACQUIRED) { in bnx2x_vf_init()
2777 vf->abs_vfid, vf->state); in bnx2x_vf_init()
2781 if (bnx2x_vf_flr_clnup_epilog(bp, vf->abs_vfid)) in bnx2x_vf_init()
2785 REG_WR(bp, IGU_REG_STATISTIC_NUM_MESSAGE_SENT + vf->abs_vfid * 4 , 0); in bnx2x_vf_init()
2788 if (vf->cfg_flags & VF_CFG_STATS) in bnx2x_vf_init()
2791 if (vf->cfg_flags & VF_CFG_TPA) in bnx2x_vf_init()
2794 if (is_vf_multi(vf)) in bnx2x_vf_init()
2800 func_init.func_id = FW_VF_HANDLE(vf->abs_vfid); in bnx2x_vf_init()
2801 func_init.fw_stat_map = vf->fw_stat_map; in bnx2x_vf_init()
2802 func_init.spq_map = vf->spq_map; in bnx2x_vf_init()
2807 bnx2x_vf_enable_access(bp, vf->abs_vfid); in bnx2x_vf_init()
2808 bnx2x_vf_enable_traffic(bp, vf); in bnx2x_vf_init()
2811 for_each_vfq(vf, i) in bnx2x_vf_init()
2812 bnx2x_vf_qtbl_set_q(bp, vf->abs_vfid, in bnx2x_vf_init()
2813 vfq_qzone_id(vf, vfq_get(vf, i)), true); in bnx2x_vf_init()
2815 vf->state = VF_ENABLED; in bnx2x_vf_init()
2818 bnx2x_post_vf_bulletin(bp, vf->index); in bnx2x_vf_init()
2824 static void bnx2x_vfop_close(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_close() argument
2826 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_close()
2837 DP(BNX2X_MSG_IOV, "vf[%d] STATE: %d\n", vf->abs_vfid, state); in bnx2x_vfop_close()
2842 if (++(qx->qid) < vf_rxq_count(vf)) { in bnx2x_vfop_close()
2843 vfop->rc = bnx2x_vfop_qdown_cmd(bp, vf, &cmd, qx->qid); in bnx2x_vfop_close()
2851 vfop->rc = bnx2x_vfop_mcast_cmd(bp, vf, &cmd, NULL, 0, false); in bnx2x_vfop_close()
2860 bnx2x_vf_igu_disable(bp, vf); in bnx2x_vfop_close()
2864 bnx2x_vf_clr_qtbl(bp, vf); in bnx2x_vfop_close()
2871 BNX2X_ERR("VF[%d] CLOSE error: rc %d\n", vf->abs_vfid, vfop->rc); in bnx2x_vfop_close()
2873 vf->state = VF_ACQUIRED; in bnx2x_vfop_close()
2875 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_close()
2879 struct bnx2x_virtf *vf, in bnx2x_vfop_close_cmd() argument
2882 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_close_cmd()
2887 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_close, in bnx2x_vfop_close_cmd()
2897 static void bnx2x_vfop_release(struct bnx2x *bp, struct bnx2x_virtf *vf) in bnx2x_vfop_release() argument
2899 struct bnx2x_vfop *vfop = bnx2x_vfop_cur(bp, vf); in bnx2x_vfop_release()
2910 DP(BNX2X_MSG_IOV, "VF[%d] STATE: %s\n", vf->abs_vfid, in bnx2x_vfop_release()
2911 vf->state == VF_FREE ? "Free" : in bnx2x_vfop_release()
2912 vf->state == VF_ACQUIRED ? "Acquired" : in bnx2x_vfop_release()
2913 vf->state == VF_ENABLED ? "Enabled" : in bnx2x_vfop_release()
2914 vf->state == VF_RESET ? "Reset" : in bnx2x_vfop_release()
2917 switch (vf->state) { in bnx2x_vfop_release()
2919 vfop->rc = bnx2x_vfop_close_cmd(bp, vf, &cmd); in bnx2x_vfop_release()
2926 bnx2x_vf_free_resc(bp, vf); in bnx2x_vfop_release()
2935 bnx2x_vfop_default(vf->state); in bnx2x_vfop_release()
2938 BNX2X_ERR("VF[%d] RELEASE error: rc %d\n", vf->abs_vfid, vfop->rc); in bnx2x_vfop_release()
2940 bnx2x_vfop_end(bp, vf, vfop); in bnx2x_vfop_release()
2944 struct bnx2x_virtf *vf, in bnx2x_vfop_release_cmd() argument
2947 struct bnx2x_vfop *vfop = bnx2x_vfop_add(bp, vf); in bnx2x_vfop_release_cmd()
2951 return bnx2x_vfop_transition(bp, vf, bnx2x_vfop_release, in bnx2x_vfop_release_cmd()
2961 void bnx2x_vf_release(struct bnx2x *bp, struct bnx2x_virtf *vf, bool block) in bnx2x_vf_release() argument
2968 bnx2x_lock_vf_pf_channel(bp, vf, CHANNEL_TLV_PF_RELEASE_VF); in bnx2x_vf_release()
2970 rc = bnx2x_vfop_release_cmd(bp, vf, &cmd); in bnx2x_vf_release()
2974 vf->abs_vfid, rc); in bnx2x_vf_release()
2978 struct bnx2x_virtf *vf, u32 *sbdf) in bnx2x_vf_get_sbdf() argument
2980 *sbdf = vf->devfn | (vf->bus << 8); in bnx2x_vf_get_sbdf()
2983 static inline void bnx2x_vf_get_bars(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_vf_get_bars() argument
2990 bar_info->bars[n] = vf->bars[n]; in bnx2x_vf_get_bars()
2993 void bnx2x_lock_vf_pf_channel(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_lock_vf_pf_channel() argument
2997 mutex_lock(&vf->op_mutex); in bnx2x_lock_vf_pf_channel()
3000 vf->op_current = tlv; in bnx2x_lock_vf_pf_channel()
3004 vf->abs_vfid, tlv); in bnx2x_lock_vf_pf_channel()
3007 void bnx2x_unlock_vf_pf_channel(struct bnx2x *bp, struct bnx2x_virtf *vf, in bnx2x_unlock_vf_pf_channel() argument
3010 WARN(expected_tlv != vf->op_current, in bnx2x_unlock_vf_pf_channel()
3012 vf->op_current); in bnx2x_unlock_vf_pf_channel()
3015 mutex_unlock(&vf->op_mutex); in bnx2x_unlock_vf_pf_channel()
3019 vf->abs_vfid, vf->op_current); in bnx2x_unlock_vf_pf_channel()
3022 vf->op_current = CHANNEL_TLV_NONE; in bnx2x_unlock_vf_pf_channel()
3087 struct bnx2x_virtf *vf) in bnx2x_vf_ndo_sanity() argument
3100 if (!vf) { in bnx2x_vf_ndo_sanity()
3113 struct bnx2x_virtf *vf = BP_VF(bp, vfidx); in bnx2x_get_vf_config() local
3114 struct bnx2x_vlan_mac_obj *mac_obj = &bnx2x_vfq(vf, 0, mac_obj); in bnx2x_get_vf_config()
3115 struct bnx2x_vlan_mac_obj *vlan_obj = &bnx2x_vfq(vf, 0, vlan_obj); in bnx2x_get_vf_config()
3120 rc = bnx2x_vf_ndo_sanity(bp, vfidx, vf); in bnx2x_get_vf_config()
3128 ivi->vf = vfidx; in bnx2x_get_vf_config()
3132 if (vf->state == VF_ENABLED) { in bnx2x_get_vf_config()
3180 struct bnx2x_virtf *vf = BP_VF(bp, vfidx); in bnx2x_set_vf_mac() local
3184 rc = bnx2x_vf_ndo_sanity(bp, vfidx, vf); in bnx2x_set_vf_mac()
3207 bnx2x_get_q_logical_state(bp, &bnx2x_vfq(vf, 0, sp_obj)); in bnx2x_set_vf_mac()
3208 if (vf->state == VF_ENABLED && in bnx2x_set_vf_mac()
3212 struct bnx2x_vlan_mac_obj *mac_obj = &bnx2x_vfq(vf, 0, mac_obj); in bnx2x_set_vf_mac()
3215 bnx2x_lock_vf_pf_channel(bp, vf, CHANNEL_TLV_PF_SET_MAC); in bnx2x_set_vf_mac()
3236 bnx2x_unlock_vf_pf_channel(bp, vf, CHANNEL_TLV_PF_SET_MAC); in bnx2x_set_vf_mac()
3246 struct bnx2x_virtf *vf = BP_VF(bp, vfidx); in bnx2x_set_vf_vlan() local
3250 rc = bnx2x_vf_ndo_sanity(bp, vfidx, vf); in bnx2x_set_vf_vlan()
3272 bnx2x_get_q_logical_state(bp, &bnx2x_vfq(vf, 0, sp_obj)); in bnx2x_set_vf_vlan()
3273 if (vf->state == VF_ENABLED && in bnx2x_set_vf_vlan()
3279 &bnx2x_vfq(vf, 0, vlan_obj); in bnx2x_set_vf_vlan()
3287 bnx2x_lock_vf_pf_channel(bp, vf, CHANNEL_TLV_PF_SET_VLAN); in bnx2x_set_vf_vlan()
3303 q_params.q_obj = &bnx2x_vfq(vf, 0, sp_obj); in bnx2x_set_vf_vlan()
3354 vf->cfg_flags &= ~VF_CFG_VLAN; in bnx2x_set_vf_vlan()
3356 bnx2x_unlock_vf_pf_channel(bp, vf, CHANNEL_TLV_PF_SET_VLAN); in bnx2x_set_vf_vlan()