Lines Matching refs:rx
184 bfi_msgq_mhdr_set(req->mh, BFI_MC_ENET, req_type, 0, rxf->rx->rid); in bna_bfi_ucast_req()
190 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_ucast_req()
200 0, rxf->rx->rid); in bna_bfi_mcast_add_req()
206 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_add_req()
216 0, rxf->rx->rid); in bna_bfi_mcast_del_req()
222 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_del_req()
231 BFI_ENET_H2I_MAC_MCAST_FILTER_REQ, 0, rxf->rx->rid); in bna_bfi_mcast_filter_req()
237 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_mcast_filter_req()
246 BFI_ENET_H2I_RX_PROMISCUOUS_REQ, 0, rxf->rx->rid); in bna_bfi_rx_promisc_req()
252 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rx_promisc_req()
263 BFI_ENET_H2I_RX_VLAN_SET_REQ, 0, rxf->rx->rid); in bna_bfi_rx_vlan_filter_set()
277 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rx_vlan_filter_set()
286 BFI_ENET_H2I_RX_VLAN_STRIP_ENABLE_REQ, 0, rxf->rx->rid); in bna_bfi_vlan_strip_enable()
292 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_vlan_strip_enable()
301 BFI_ENET_H2I_RIT_CFG_REQ, 0, rxf->rx->rid); in bna_bfi_rit_cfg()
308 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rit_cfg()
318 BFI_ENET_H2I_RSS_CFG_REQ, 0, rxf->rx->rid); in bna_bfi_rss_cfg()
328 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rss_cfg()
337 BFI_ENET_H2I_RSS_ENABLE_REQ, 0, rxf->rx->rid); in bna_bfi_rss_enable()
343 bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); in bna_bfi_rss_enable()
384 mchandle = bna_mcam_mod_handle_get(&rxf->rx->bna->mcam_mod); in bna_rxf_mchandle_attach()
411 bna_mcam_mod_handle_put(&rxf->rx->bna->mcam_mod, mchandle); in bna_rxf_mcast_del()
429 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rxf_mcast_cfg_apply()
477 list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rxf_mcast_cfg_reset()
561 struct bna_rx *rx = rxf->rx; in bna_rit_init() local
566 list_for_each_entry(rxp, &rx->rxp_q, qe) { in bna_rit_init()
609 struct bna_rx *rx, in bna_rxf_init() argument
613 rxf->rx = rx; in bna_rxf_init()
661 list_move_tail(&mac->qe, bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
666 bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
673 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rxf_uninit()
678 if (rxf->rx->bna->promisc_rid == rxf->rx->rid) in bna_rxf_uninit()
679 rxf->rx->bna->promisc_rid = BFI_INVALID_RID; in bna_rxf_uninit()
680 if (rxf->rx->bna->default_mode_rid == rxf->rx->rid) in bna_rxf_uninit()
681 rxf->rx->bna->default_mode_rid = BFI_INVALID_RID; in bna_rxf_uninit()
686 rxf->rx = NULL; in bna_rxf_uninit()
690 bna_rx_cb_rxf_started(struct bna_rx *rx) in bna_rx_cb_rxf_started() argument
692 bfa_fsm_send_event(rx, RX_E_RXF_STARTED); in bna_rx_cb_rxf_started()
699 rxf->start_cbarg = rxf->rx; in bna_rxf_start()
704 bna_rx_cb_rxf_stopped(struct bna_rx *rx) in bna_rx_cb_rxf_stopped() argument
706 bfa_fsm_send_event(rx, RX_E_RXF_STOPPED); in bna_rx_cb_rxf_stopped()
713 rxf->stop_cbarg = rxf->rx; in bna_rxf_stop()
724 bna_rx_ucast_set(struct bna_rx *rx, const u8 *ucmac) in bna_rx_ucast_set() argument
726 struct bna_rxf *rxf = &rx->rxf; in bna_rx_ucast_set()
730 bna_cam_mod_mac_get(bna_ucam_mod_free_q(rxf->rx->bna)); in bna_rx_ucast_set()
738 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_ucast_set()
746 bna_rx_mcast_add(struct bna_rx *rx, const u8 *addr, in bna_rx_mcast_add() argument
749 struct bna_rxf *rxf = &rx->rxf; in bna_rx_mcast_add()
756 cbfn(rx->bna->bnad, rx); in bna_rx_mcast_add()
760 mac = bna_cam_mod_mac_get(bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_add()
767 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_mcast_add()
775 bna_rx_ucast_listset(struct bna_rx *rx, int count, const u8 *uclist) in bna_rx_ucast_listset() argument
777 struct bna_ucam_mod *ucam_mod = &rx->bna->ucam_mod; in bna_rx_ucast_listset()
778 struct bna_rxf *rxf = &rx->rxf; in bna_rx_ucast_listset()
833 bna_rx_mcast_listset(struct bna_rx *rx, int count, const u8 *mclist) in bna_rx_mcast_listset() argument
835 struct bna_mcam_mod *mcam_mod = &rx->bna->mcam_mod; in bna_rx_mcast_listset()
836 struct bna_rxf *rxf = &rx->rxf; in bna_rx_mcast_listset()
893 bna_rx_mcast_delall(struct bna_rx *rx) in bna_rx_mcast_delall() argument
895 struct bna_rxf *rxf = &rx->rxf; in bna_rx_mcast_delall()
903 list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_delall()
911 del_mac = bna_cam_mod_mac_get(bna_mcam_mod_del_q(rxf->rx->bna)); in bna_rx_mcast_delall()
915 list_add_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); in bna_rx_mcast_delall()
924 bna_rx_vlan_add(struct bna_rx *rx, int vlan_id) in bna_rx_vlan_add() argument
926 struct bna_rxf *rxf = &rx->rxf; in bna_rx_vlan_add()
939 bna_rx_vlan_del(struct bna_rx *rx, int vlan_id) in bna_rx_vlan_del() argument
941 struct bna_rxf *rxf = &rx->rxf; in bna_rx_vlan_del()
963 list_move_tail(&mac->qe, bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_apply()
1001 bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_reset()
1006 bna_ucam_mod_del_q(rxf->rx->bna)); in bna_rxf_ucast_cfg_reset()
1039 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_cfg_apply()
1067 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_cfg_reset()
1153 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_enable()
1169 bna->promisc_rid = rxf->rx->rid; in bna_rxf_promisc_enable()
1179 struct bna *bna = rxf->rx->bna; in bna_rxf_promisc_disable()
1270 #define call_rx_stop_cbfn(rx) \ argument
1272 if ((rx)->stop_cbfn) { \
1275 cbfn = (rx)->stop_cbfn; \
1276 cbarg = (rx)->stop_cbarg; \
1277 (rx)->stop_cbfn = NULL; \
1278 (rx)->stop_cbarg = NULL; \
1279 cbfn(cbarg, rx); \
1283 #define call_rx_stall_cbfn(rx) \ argument
1285 if ((rx)->rx_stall_cbfn) \
1286 (rx)->rx_stall_cbfn((rx)->bna->bnad, (rx)); \
1301 static void bna_bfi_rx_enet_start(struct bna_rx *rx);
1302 static void bna_rx_enet_stop(struct bna_rx *rx);
1303 static void bna_rx_mod_cb_rx_stopped(void *arg, struct bna_rx *rx);
1326 static void bna_rx_sm_stopped_entry(struct bna_rx *rx) in bna_rx_sm_stopped_entry() argument
1328 call_rx_stop_cbfn(rx); in bna_rx_sm_stopped_entry()
1331 static void bna_rx_sm_stopped(struct bna_rx *rx, in bna_rx_sm_stopped() argument
1336 bfa_fsm_set_state(rx, bna_rx_sm_start_wait); in bna_rx_sm_stopped()
1340 call_rx_stop_cbfn(rx); in bna_rx_sm_stopped()
1353 static void bna_rx_sm_start_wait_entry(struct bna_rx *rx) in bna_rx_sm_start_wait_entry() argument
1355 bna_bfi_rx_enet_start(rx); in bna_rx_sm_start_wait_entry()
1359 bna_rx_sm_stop_wait_entry(struct bna_rx *rx) in bna_rx_sm_stop_wait_entry() argument
1364 bna_rx_sm_stop_wait(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_stop_wait() argument
1369 bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); in bna_rx_sm_stop_wait()
1370 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_stop_wait()
1374 bna_rx_enet_stop(rx); in bna_rx_sm_stop_wait()
1383 static void bna_rx_sm_start_wait(struct bna_rx *rx, in bna_rx_sm_start_wait() argument
1388 bfa_fsm_set_state(rx, bna_rx_sm_start_stop_wait); in bna_rx_sm_start_wait()
1392 bfa_fsm_set_state(rx, bna_rx_sm_stopped); in bna_rx_sm_start_wait()
1396 bfa_fsm_set_state(rx, bna_rx_sm_rxf_start_wait); in bna_rx_sm_start_wait()
1405 static void bna_rx_sm_rxf_start_wait_entry(struct bna_rx *rx) in bna_rx_sm_rxf_start_wait_entry() argument
1407 rx->rx_post_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_start_wait_entry()
1408 bna_rxf_start(&rx->rxf); in bna_rx_sm_rxf_start_wait_entry()
1412 bna_rx_sm_rxf_stop_wait_entry(struct bna_rx *rx) in bna_rx_sm_rxf_stop_wait_entry() argument
1417 bna_rx_sm_rxf_stop_wait(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_rxf_stop_wait() argument
1421 bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); in bna_rx_sm_rxf_stop_wait()
1422 bna_rxf_fail(&rx->rxf); in bna_rx_sm_rxf_stop_wait()
1423 call_rx_stall_cbfn(rx); in bna_rx_sm_rxf_stop_wait()
1424 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_stop_wait()
1428 bna_rxf_stop(&rx->rxf); in bna_rx_sm_rxf_stop_wait()
1432 bfa_fsm_set_state(rx, bna_rx_sm_stop_wait); in bna_rx_sm_rxf_stop_wait()
1433 call_rx_stall_cbfn(rx); in bna_rx_sm_rxf_stop_wait()
1434 bna_rx_enet_stop(rx); in bna_rx_sm_rxf_stop_wait()
1445 bna_rx_sm_start_stop_wait_entry(struct bna_rx *rx) in bna_rx_sm_start_stop_wait_entry() argument
1450 bna_rx_sm_start_stop_wait(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_start_stop_wait() argument
1455 bfa_fsm_set_state(rx, bna_rx_sm_stopped); in bna_rx_sm_start_stop_wait()
1459 bna_rx_enet_stop(rx); in bna_rx_sm_start_stop_wait()
1468 bna_rx_sm_started_entry(struct bna_rx *rx) in bna_rx_sm_started_entry() argument
1471 int is_regular = (rx->type == BNA_RX_T_REGULAR); in bna_rx_sm_started_entry()
1474 list_for_each_entry(rxp, &rx->rxp_q, qe) in bna_rx_sm_started_entry()
1475 bna_ib_start(rx->bna, &rxp->cq.ib, is_regular); in bna_rx_sm_started_entry()
1477 bna_ethport_cb_rx_started(&rx->bna->ethport); in bna_rx_sm_started_entry()
1481 bna_rx_sm_started(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_started() argument
1485 bfa_fsm_set_state(rx, bna_rx_sm_rxf_stop_wait); in bna_rx_sm_started()
1486 bna_ethport_cb_rx_stopped(&rx->bna->ethport); in bna_rx_sm_started()
1487 bna_rxf_stop(&rx->rxf); in bna_rx_sm_started()
1491 bfa_fsm_set_state(rx, bna_rx_sm_failed); in bna_rx_sm_started()
1492 bna_ethport_cb_rx_stopped(&rx->bna->ethport); in bna_rx_sm_started()
1493 bna_rxf_fail(&rx->rxf); in bna_rx_sm_started()
1494 call_rx_stall_cbfn(rx); in bna_rx_sm_started()
1495 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_started()
1504 static void bna_rx_sm_rxf_start_wait(struct bna_rx *rx, in bna_rx_sm_rxf_start_wait() argument
1509 bfa_fsm_set_state(rx, bna_rx_sm_rxf_stop_wait); in bna_rx_sm_rxf_start_wait()
1513 bfa_fsm_set_state(rx, bna_rx_sm_failed); in bna_rx_sm_rxf_start_wait()
1514 bna_rxf_fail(&rx->rxf); in bna_rx_sm_rxf_start_wait()
1515 call_rx_stall_cbfn(rx); in bna_rx_sm_rxf_start_wait()
1516 rx->rx_cleanup_cbfn(rx->bna->bnad, rx); in bna_rx_sm_rxf_start_wait()
1520 bfa_fsm_set_state(rx, bna_rx_sm_started); in bna_rx_sm_rxf_start_wait()
1530 bna_rx_sm_cleanup_wait_entry(struct bna_rx *rx) in bna_rx_sm_cleanup_wait_entry() argument
1535 bna_rx_sm_cleanup_wait(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_cleanup_wait() argument
1544 bfa_fsm_set_state(rx, bna_rx_sm_stopped); in bna_rx_sm_cleanup_wait()
1554 bna_rx_sm_failed_entry(struct bna_rx *rx) in bna_rx_sm_failed_entry() argument
1559 bna_rx_sm_failed(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_failed() argument
1563 bfa_fsm_set_state(rx, bna_rx_sm_quiesce_wait); in bna_rx_sm_failed()
1567 bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); in bna_rx_sm_failed()
1577 bfa_fsm_set_state(rx, bna_rx_sm_stopped); in bna_rx_sm_failed()
1586 bna_rx_sm_quiesce_wait_entry(struct bna_rx *rx) in bna_rx_sm_quiesce_wait_entry() argument
1591 bna_rx_sm_quiesce_wait(struct bna_rx *rx, enum bna_rx_event event) in bna_rx_sm_quiesce_wait() argument
1595 bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); in bna_rx_sm_quiesce_wait()
1599 bfa_fsm_set_state(rx, bna_rx_sm_failed); in bna_rx_sm_quiesce_wait()
1603 bfa_fsm_set_state(rx, bna_rx_sm_start_wait); in bna_rx_sm_quiesce_wait()
1613 bna_bfi_rx_enet_start(struct bna_rx *rx) in bna_bfi_rx_enet_start() argument
1615 struct bfi_enet_rx_cfg_req *cfg_req = &rx->bfi_enet_cmd.cfg_req; in bna_bfi_rx_enet_start()
1621 BFI_ENET_H2I_RX_CFG_SET_REQ, 0, rx->rid); in bna_bfi_rx_enet_start()
1625 cfg_req->rx_cfg.frame_size = bna_enet_mtu_get(&rx->bna->enet); in bna_bfi_rx_enet_start()
1626 cfg_req->num_queue_sets = rx->num_paths; in bna_bfi_rx_enet_start()
1627 for (i = 0; i < rx->num_paths; i++) { in bna_bfi_rx_enet_start()
1629 : list_first_entry(&rx->rxp_q, struct bna_rxp, qe); in bna_bfi_rx_enet_start()
1655 bna_enet_mtu_get(&rx->bna->enet); in bna_bfi_rx_enet_start()
1695 cfg_req->rx_cfg.hds.type = rx->hds_cfg.hdr_type; in bna_bfi_rx_enet_start()
1696 cfg_req->rx_cfg.hds.force_offset = rx->hds_cfg.forced_offset; in bna_bfi_rx_enet_start()
1697 cfg_req->rx_cfg.hds.max_header_size = rx->hds_cfg.forced_offset; in bna_bfi_rx_enet_start()
1707 cfg_req->rx_cfg.strip_vlan = rx->rxf.vlan_strip_status; in bna_bfi_rx_enet_start()
1709 bfa_msgq_cmd_set(&rx->msgq_cmd, NULL, NULL, in bna_bfi_rx_enet_start()
1711 bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); in bna_bfi_rx_enet_start()
1715 bna_bfi_rx_enet_stop(struct bna_rx *rx) in bna_bfi_rx_enet_stop() argument
1717 struct bfi_enet_req *req = &rx->bfi_enet_cmd.req; in bna_bfi_rx_enet_stop()
1720 BFI_ENET_H2I_RX_CFG_CLR_REQ, 0, rx->rid); in bna_bfi_rx_enet_stop()
1723 bfa_msgq_cmd_set(&rx->msgq_cmd, NULL, NULL, sizeof(struct bfi_enet_req), in bna_bfi_rx_enet_stop()
1725 bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); in bna_bfi_rx_enet_stop()
1729 bna_rx_enet_stop(struct bna_rx *rx) in bna_rx_enet_stop() argument
1734 list_for_each_entry(rxp, &rx->rxp_q, qe) in bna_rx_enet_stop()
1735 bna_ib_stop(rx->bna, &rxp->cq.ib); in bna_rx_enet_stop()
1737 bna_bfi_rx_enet_stop(rx); in bna_rx_enet_stop()
1802 struct bna_rx *rx = NULL; in bna_rx_get() local
1806 rx = list_first_entry(&rx_mod->rx_free_q, struct bna_rx, qe); in bna_rx_get()
1808 rx = list_last_entry(&rx_mod->rx_free_q, struct bna_rx, qe); in bna_rx_get()
1811 list_move_tail(&rx->qe, &rx_mod->rx_active_q); in bna_rx_get()
1812 rx->type = type; in bna_rx_get()
1814 return rx; in bna_rx_get()
1818 bna_rx_put(struct bna_rx_mod *rx_mod, struct bna_rx *rx) in bna_rx_put() argument
1823 if (((struct bna_rx *)qe)->rid < rx->rid) in bna_rx_put()
1826 list_add(&rx->qe, qe); in bna_rx_put()
1930 bna_rx_mod_cb_rx_stopped(void *arg, struct bna_rx *rx) in bna_rx_mod_cb_rx_stopped() argument
1948 bna_rx_start(struct bna_rx *rx) in bna_rx_start() argument
1950 rx->rx_flags |= BNA_RX_F_ENET_STARTED; in bna_rx_start()
1951 if (rx->rx_flags & BNA_RX_F_ENABLED) in bna_rx_start()
1952 bfa_fsm_send_event(rx, RX_E_START); in bna_rx_start()
1956 bna_rx_stop(struct bna_rx *rx) in bna_rx_stop() argument
1958 rx->rx_flags &= ~BNA_RX_F_ENET_STARTED; in bna_rx_stop()
1959 if (rx->fsm == (bfa_fsm_t) bna_rx_sm_stopped) in bna_rx_stop()
1960 bna_rx_mod_cb_rx_stopped(&rx->bna->rx_mod, rx); in bna_rx_stop()
1962 rx->stop_cbfn = bna_rx_mod_cb_rx_stopped; in bna_rx_stop()
1963 rx->stop_cbarg = &rx->bna->rx_mod; in bna_rx_stop()
1964 bfa_fsm_send_event(rx, RX_E_STOP); in bna_rx_stop()
1969 bna_rx_fail(struct bna_rx *rx) in bna_rx_fail() argument
1972 rx->rx_flags &= ~BNA_RX_F_ENET_STARTED; in bna_rx_fail()
1973 bfa_fsm_send_event(rx, RX_E_FAIL); in bna_rx_fail()
1979 struct bna_rx *rx; in bna_rx_mod_start() local
1985 list_for_each_entry(rx, &rx_mod->rx_active_q, qe) in bna_rx_mod_start()
1986 if (rx->type == type) in bna_rx_mod_start()
1987 bna_rx_start(rx); in bna_rx_mod_start()
1993 struct bna_rx *rx; in bna_rx_mod_stop() local
2002 list_for_each_entry(rx, &rx_mod->rx_active_q, qe) in bna_rx_mod_stop()
2003 if (rx->type == type) { in bna_rx_mod_stop()
2005 bna_rx_stop(rx); in bna_rx_mod_stop()
2014 struct bna_rx *rx; in bna_rx_mod_fail() local
2019 list_for_each_entry(rx, &rx_mod->rx_active_q, qe) in bna_rx_mod_fail()
2020 bna_rx_fail(rx); in bna_rx_mod_fail()
2034 rx_mod->rx = (struct bna_rx *) in bna_rx_mod_init()
2052 rx_ptr = &rx_mod->rx[index]; in bna_rx_mod_init()
2086 bna_bfi_rx_enet_start_rsp(struct bna_rx *rx, struct bfi_msgq_mhdr *msghdr) in bna_bfi_rx_enet_start_rsp() argument
2088 struct bfi_enet_rx_cfg_rsp *cfg_rsp = &rx->bfi_enet_cmd.cfg_rsp; in bna_bfi_rx_enet_start_rsp()
2093 bfa_msgq_rsp_copy(&rx->bna->msgq, (u8 *)cfg_rsp, in bna_bfi_rx_enet_start_rsp()
2096 rx->hw_id = cfg_rsp->hw_id; in bna_bfi_rx_enet_start_rsp()
2098 for (i = 0, rxp = list_first_entry(&rx->rxp_q, struct bna_rxp, qe); in bna_bfi_rx_enet_start_rsp()
2099 i < rx->num_paths; i++, rxp = list_next_entry(rxp, qe)) { in bna_bfi_rx_enet_start_rsp()
2104 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2108 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2113 rx->bna->pcidev.pci_bar_kva in bna_bfi_rx_enet_start_rsp()
2126 bfa_fsm_send_event(rx, RX_E_STARTED); in bna_bfi_rx_enet_start_rsp()
2130 bna_bfi_rx_enet_stop_rsp(struct bna_rx *rx, struct bfi_msgq_mhdr *msghdr) in bna_bfi_rx_enet_stop_rsp() argument
2132 bfa_fsm_send_event(rx, RX_E_STOPPED); in bna_bfi_rx_enet_stop_rsp()
2257 struct bna_rx *rx; in bna_rx_create() local
2307 rx = bna_rx_get(rx_mod, rx_cfg->rx_type); in bna_rx_create()
2308 rx->bna = bna; in bna_rx_create()
2309 rx->rx_flags = 0; in bna_rx_create()
2310 INIT_LIST_HEAD(&rx->rxp_q); in bna_rx_create()
2311 rx->stop_cbfn = NULL; in bna_rx_create()
2312 rx->stop_cbarg = NULL; in bna_rx_create()
2313 rx->priv = priv; in bna_rx_create()
2315 rx->rcb_setup_cbfn = rx_cbfn->rcb_setup_cbfn; in bna_rx_create()
2316 rx->rcb_destroy_cbfn = rx_cbfn->rcb_destroy_cbfn; in bna_rx_create()
2317 rx->ccb_setup_cbfn = rx_cbfn->ccb_setup_cbfn; in bna_rx_create()
2318 rx->ccb_destroy_cbfn = rx_cbfn->ccb_destroy_cbfn; in bna_rx_create()
2319 rx->rx_stall_cbfn = rx_cbfn->rx_stall_cbfn; in bna_rx_create()
2321 rx->rx_cleanup_cbfn = rx_cbfn->rx_cleanup_cbfn; in bna_rx_create()
2322 rx->rx_post_cbfn = rx_cbfn->rx_post_cbfn; in bna_rx_create()
2324 if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_STARTED) { in bna_rx_create()
2325 switch (rx->type) { in bna_rx_create()
2327 if (!(rx->bna->rx_mod.flags & in bna_rx_create()
2329 rx->rx_flags |= BNA_RX_F_ENET_STARTED; in bna_rx_create()
2332 if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_LOOPBACK) in bna_rx_create()
2333 rx->rx_flags |= BNA_RX_F_ENET_STARTED; in bna_rx_create()
2338 rx->num_paths = rx_cfg->num_paths; in bna_rx_create()
2340 i < rx->num_paths; i++) { in bna_rx_create()
2342 list_add_tail(&rxp->qe, &rx->rxp_q); in bna_rx_create()
2344 rxp->rx = rx; in bna_rx_create()
2345 rxp->cq.rx = rx; in bna_rx_create()
2379 q0->rx = rx; in bna_rx_create()
2400 if (rx->rcb_setup_cbfn) in bna_rx_create()
2401 rx->rcb_setup_cbfn(bnad, q0->rcb); in bna_rx_create()
2406 q1->rx = rx; in bna_rx_create()
2430 if (rx->rcb_setup_cbfn) in bna_rx_create()
2431 rx->rcb_setup_cbfn(bnad, q1->rcb); in bna_rx_create()
2467 if (rx->ccb_setup_cbfn) in bna_rx_create()
2468 rx->ccb_setup_cbfn(bnad, rxp->cq.ccb); in bna_rx_create()
2471 rx->hds_cfg = rx_cfg->hds_config; in bna_rx_create()
2473 bna_rxf_init(&rx->rxf, rx, rx_cfg, res_info); in bna_rx_create()
2475 bfa_fsm_set_state(rx, bna_rx_sm_stopped); in bna_rx_create()
2477 rx_mod->rid_mask |= BIT(rx->rid); in bna_rx_create()
2479 return rx; in bna_rx_create()
2483 bna_rx_destroy(struct bna_rx *rx) in bna_rx_destroy() argument
2485 struct bna_rx_mod *rx_mod = &rx->bna->rx_mod; in bna_rx_destroy()
2491 bna_rxf_uninit(&rx->rxf); in bna_rx_destroy()
2493 while (!list_empty(&rx->rxp_q)) { in bna_rx_destroy()
2494 rxp = list_first_entry(&rx->rxp_q, struct bna_rxp, qe); in bna_rx_destroy()
2497 if (rx->rcb_destroy_cbfn) in bna_rx_destroy()
2498 rx->rcb_destroy_cbfn(rx->bna->bnad, q0->rcb); in bna_rx_destroy()
2501 q0->rx = NULL; in bna_rx_destroy()
2505 if (rx->rcb_destroy_cbfn) in bna_rx_destroy()
2506 rx->rcb_destroy_cbfn(rx->bna->bnad, q1->rcb); in bna_rx_destroy()
2509 q1->rx = NULL; in bna_rx_destroy()
2515 if (rx->ccb_destroy_cbfn) in bna_rx_destroy()
2516 rx->ccb_destroy_cbfn(rx->bna->bnad, rxp->cq.ccb); in bna_rx_destroy()
2518 rxp->rx = NULL; in bna_rx_destroy()
2523 if (qe == &rx->qe) { in bna_rx_destroy()
2524 list_del(&rx->qe); in bna_rx_destroy()
2528 rx_mod->rid_mask &= ~BIT(rx->rid); in bna_rx_destroy()
2530 rx->bna = NULL; in bna_rx_destroy()
2531 rx->priv = NULL; in bna_rx_destroy()
2532 bna_rx_put(rx_mod, rx); in bna_rx_destroy()
2536 bna_rx_enable(struct bna_rx *rx) in bna_rx_enable() argument
2538 if (rx->fsm != (bfa_sm_t)bna_rx_sm_stopped) in bna_rx_enable()
2541 rx->rx_flags |= BNA_RX_F_ENABLED; in bna_rx_enable()
2542 if (rx->rx_flags & BNA_RX_F_ENET_STARTED) in bna_rx_enable()
2543 bfa_fsm_send_event(rx, RX_E_START); in bna_rx_enable()
2547 bna_rx_disable(struct bna_rx *rx, enum bna_cleanup_type type, in bna_rx_disable() argument
2552 (*cbfn)(rx->bna->bnad, rx); in bna_rx_disable()
2554 rx->stop_cbfn = cbfn; in bna_rx_disable()
2555 rx->stop_cbarg = rx->bna->bnad; in bna_rx_disable()
2557 rx->rx_flags &= ~BNA_RX_F_ENABLED; in bna_rx_disable()
2559 bfa_fsm_send_event(rx, RX_E_STOP); in bna_rx_disable()
2564 bna_rx_cleanup_complete(struct bna_rx *rx) in bna_rx_cleanup_complete() argument
2566 bfa_fsm_send_event(rx, RX_E_CLEANUP_DONE); in bna_rx_cleanup_complete()
2570 bna_rx_vlan_strip_enable(struct bna_rx *rx) in bna_rx_vlan_strip_enable() argument
2572 struct bna_rxf *rxf = &rx->rxf; in bna_rx_vlan_strip_enable()
2582 bna_rx_vlan_strip_disable(struct bna_rx *rx) in bna_rx_vlan_strip_disable() argument
2584 struct bna_rxf *rxf = &rx->rxf; in bna_rx_vlan_strip_disable()
2594 bna_rx_mode_set(struct bna_rx *rx, enum bna_rxmode new_mode, in bna_rx_mode_set() argument
2597 struct bna_rxf *rxf = &rx->rxf; in bna_rx_mode_set()
2604 if ((rx->bna->promisc_rid != BFI_INVALID_RID) && in bna_rx_mode_set()
2605 (rx->bna->promisc_rid != rxf->rx->rid)) in bna_rx_mode_set()
2609 if (rx->bna->default_mode_rid != BFI_INVALID_RID) in bna_rx_mode_set()
2619 if ((rx->bna->default_mode_rid != BFI_INVALID_RID) && in bna_rx_mode_set()
2620 (rx->bna->default_mode_rid != rxf->rx->rid)) { in bna_rx_mode_set()
2625 if (rx->bna->promisc_rid != BFI_INVALID_RID) in bna_rx_mode_set()
2651 rxf->cam_fltr_cbarg = rx->bna->bnad; in bna_rx_mode_set()
2662 bna_rx_vlanfilter_enable(struct bna_rx *rx) in bna_rx_vlanfilter_enable() argument
2664 struct bna_rxf *rxf = &rx->rxf; in bna_rx_vlanfilter_enable()
2674 bna_rx_coalescing_timeo_set(struct bna_rx *rx, int coalescing_timeo) in bna_rx_coalescing_timeo_set() argument
2678 list_for_each_entry(rxp, &rx->rxp_q, qe) { in bna_rx_coalescing_timeo_set()
2697 struct bna *bna = ccb->cq->rx->bna; in bna_rx_dim_update()