Home
last modified time | relevance | path

Searched refs:nif (Results 1 – 16 of 16) sorted by relevance

/xnu-8796.101.5/bsd/skywalk/nexus/netif/
H A Dnx_netif_filter.c83 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_inject() local
84 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_inject()
90 lck_mtx_lock(&nif->nif_filter_lock); in nx_netif_filter_inject()
91 if ((nif->nif_filter_flags & NETIF_FILTER_FLAG_ENABLED) == 0) { in nx_netif_filter_inject()
110 f = STAILQ_FIRST(&nif->nif_filter_list); in nx_netif_filter_inject()
129 lck_mtx_unlock(&nif->nif_filter_lock); in nx_netif_filter_inject()
166 lck_mtx_lock(&nif->nif_filter_lock); in nx_netif_filter_inject()
172 lck_mtx_unlock(&nif->nif_filter_lock); in nx_netif_filter_inject()
177 nx_netif_filter_add(struct nx_netif *nif, nexus_port_t port, void *cb_arg, in nx_netif_filter_add() argument
182 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_add()
[all …]
H A Dnx_netif_flow.c352 nx_netif_flow_deliver(struct nx_netif *nif, struct netif_flow *f, in nx_netif_flow_deliver() argument
355 #pragma unused(nif) in nx_netif_flow_deliver()
360 nx_netif_snoop(struct nx_netif *nif, struct __kern_packet *pkt, in nx_netif_snoop() argument
364 if (!NETIF_IS_LOW_LATENCY(nif)) { in nx_netif_snoop()
368 pktap_input_packet(nif->nif_ifp, AF_INET6, DLT_EN10MB, in nx_netif_snoop()
372 pktap_output_packet(nif->nif_ifp, AF_INET6, DLT_EN10MB, in nx_netif_snoop()
384 nx_netif_validate_macaddr(struct nx_netif *nif, struct __kern_packet *pkt, in nx_netif_validate_macaddr() argument
387 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_validate_macaddr()
388 struct ifnet *ifp = nif->nif_ifp; in nx_netif_validate_macaddr()
401 DTRACE_SKYWALK2(bad__pkt__sz, struct nx_netif *, nif, in nx_netif_validate_macaddr()
[all …]
H A Dnx_netif_netagent.c46 get_mac_addr(struct nx_netif *nif, struct ether_addr *addr) in get_mac_addr() argument
48 struct ifnet *ifp = nif->nif_ifp; in get_mac_addr()
69 get_ipv6_ula(struct nx_netif *nif, struct in6_addr *addr) in get_ipv6_ula() argument
79 err = get_mac_addr(nif, &ether_addr); in get_ipv6_ula()
139 get_ipv6_sockaddr(struct nx_netif *nif, struct sockaddr_in6 *sin6) in get_ipv6_sockaddr() argument
145 err = get_ipv6_ula(nif, &sin6->sin6_addr); in get_ipv6_sockaddr()
243 nx_netif_netagent_fill_port_info(struct nx_netif *nif, struct nx_flow_req *nfr, in nx_netif_netagent_fill_port_info() argument
246 #pragma unused(nif) in nx_netif_netagent_fill_port_info()
249 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_netagent_fill_port_info()
299 nx_netif_netagent_flow_bind(struct nx_netif *nif, struct nx_flow_req *nfr) in nx_netif_netagent_flow_bind() argument
[all …]
H A Dnx_netif_llink.c306 nx_netif_generate_internal_llink_id(struct nx_netif *nif) in nx_netif_generate_internal_llink_id() argument
309 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_generate_internal_llink_id()
314 STAILQ_FOREACH(llink, &nif->nif_llink_list, nll_link) { in nx_netif_generate_internal_llink_id()
329 nx_netif_llink_initialize(struct netif_llink *llink, struct nx_netif *nif, in nx_netif_llink_initialize() argument
333 struct ifnet *ifp = nif->nif_ifp; in nx_netif_llink_initialize()
335 LCK_RW_ASSERT(&nif->nif_llink_lock, LCK_RW_ASSERT_EXCLUSIVE); in nx_netif_llink_initialize()
337 llink->nll_nif = nif; in nx_netif_llink_initialize()
342 llink->nll_link_id_internal = nx_netif_generate_internal_llink_id(nif); in nx_netif_llink_initialize()
414 nx_netif_llink_create_locked(struct nx_netif *nif, in nx_netif_llink_create_locked() argument
418 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_llink_create_locked()
[all …]
H A Dnx_netif_vp.c139 struct nx_netif *nif = nifna->nifna_netif; in netif_deliver_pkt() local
140 struct netif_stats *nifs = &nif->nif_stats; in netif_deliver_pkt()
173 if (NETIF_IS_LOW_LATENCY(nif)) { in netif_deliver_pkt()
320 netif_hwna_setup(struct nx_netif *nif) in netif_hwna_setup() argument
323 struct kern_nexus *nx = nif->nif_nx; in netif_hwna_setup()
328 ASSERT(NETIF_IS_LOW_LATENCY(nif)); in netif_hwna_setup()
329 if (nif->nif_hw_ch != NULL) { in netif_hwna_setup()
330 nif->nif_hw_ch_refcnt++; in netif_hwna_setup()
332 if_name(nif->nif_ifp), nif->nif_hw_ch_refcnt); in netif_hwna_setup()
335 ASSERT(nif->nif_hw_ch_refcnt == 0); in netif_hwna_setup()
[all …]
H A Dnx_netif.c635 struct nx_netif *nif; in nx_netif_get_llink_info() local
641 nif = NX_NETIF_PRIVATE(nx); in nx_netif_get_llink_info()
642 if (!NETIF_LLINK_ENABLED(nif)) { in nx_netif_get_llink_info()
646 lck_rw_lock_shared(&nif->nif_llink_lock); in nx_netif_get_llink_info()
647 llink_cnt = nif->nif_llink_cnt; in nx_netif_get_llink_info()
680 STAILQ_FOREACH(llink, &nif->nif_llink_list, nll_link) { in nx_netif_get_llink_info()
717 lck_rw_unlock_shared(&nif->nif_llink_lock); in nx_netif_get_llink_info()
909 struct nx_netif *nif = NX_NETIF_PRIVATE(nx); in __netif_mib_get_stats() local
910 struct ifnet *ifp = nif->nif_ifp; in __netif_mib_get_stats()
919 sns->sns_nifs = nif->nif_stats; in __netif_mib_get_stats()
[all …]
H A Dnx_netif_filter_compat.c37 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_mbuf_enqueue() local
38 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_tx_mbuf_enqueue()
40 if (nif->nif_filter_cnt == 0) { in nx_netif_filter_tx_mbuf_enqueue()
45 DTRACE_SKYWALK2(mbuf__default__drop, struct nx_netif *, nif, in nx_netif_filter_tx_mbuf_enqueue()
81 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_processed_mbuf_dequeue() local
104 m = get_next_mbuf(nif->nif_tx_processed_mbq, &curr, end); in nx_netif_filter_tx_processed_mbuf_dequeue()
122 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_processed_mbuf_enqueue() local
123 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_tx_processed_mbuf_enqueue()
137 q = &nif->nif_tx_processed_mbq[tc]; in nx_netif_filter_tx_processed_mbuf_enqueue()
162 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_tx_processed_mbuf_get_len() local
[all …]
H A Dnx_netif_filter_native.c37 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_pkt_enqueue() local
38 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_tx_pkt_enqueue()
40 if (nif->nif_filter_cnt == 0) { in nx_netif_filter_tx_pkt_enqueue()
44 DTRACE_SKYWALK2(pkt__default__drop, struct nx_netif *, nif, in nx_netif_filter_tx_pkt_enqueue()
80 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_processed_pkt_dequeue() local
103 p = get_next_pkt(nif->nif_tx_processed_pktq, &curr, end); in nx_netif_filter_tx_processed_pkt_dequeue()
122 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_filter_tx_processed_pkt_enqueue() local
123 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_filter_tx_processed_pkt_enqueue()
137 q = &nif->nif_tx_processed_pktq[tc]; in nx_netif_filter_tx_processed_pkt_enqueue()
164 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_tx_processed_pkt_get_len() local
[all …]
H A Dnx_netif_host.c49 struct nx_netif *nif = ((struct nexus_netif_adapter *)na)->nifna_netif; in nx_netif_host_adjust_if_capabilities() local
56 nif->nif_hwassist = ifp->if_hwassist; in nx_netif_host_adjust_if_capabilities()
57 nif->nif_capabilities = ifp->if_capabilities; in nx_netif_host_adjust_if_capabilities()
58 nif->nif_capenable = ifp->if_capenable; in nx_netif_host_adjust_if_capabilities()
89 (nif->nif_hwassist & in nx_netif_host_adjust_if_capabilities()
92 (nif->nif_capabilities & in nx_netif_host_adjust_if_capabilities()
95 (nif->nif_capenable & in nx_netif_host_adjust_if_capabilities()
111 ifp->if_hwassist |= (nif->nif_hwassist & in nx_netif_host_adjust_if_capabilities()
114 (nif->nif_capabilities & (SK_IFCAP_CSUM | IFCAP_TSO)); in nx_netif_host_adjust_if_capabilities()
116 (nif->nif_capenable & (SK_IFCAP_CSUM | IFCAP_TSO)); in nx_netif_host_adjust_if_capabilities()
[all …]
H A Dnx_netif_filter_vp.c311 struct nx_netif *nif = nx->nx_arg; in netif_filter_na_mem_new() local
315 NETIF_WLOCK_ASSERT_HELD(nif); in netif_filter_na_mem_new()
316 ASSERT(nif->nif_ifp != NULL); in netif_filter_na_mem_new()
327 if (nif->nif_filter_pp == NULL) { in netif_filter_na_mem_new()
333 if_name(nif->nif_ifp)); in netif_filter_na_mem_new()
343 nif->nif_filter_pp = pp; in netif_filter_na_mem_new()
346 &nif->nif_filter_pp, NULL, 0, NULL, &err); in netif_filter_na_mem_new()
485 struct nx_netif *nif = NX_NETIF_PRIVATE(nx); in netif_filter_na_dtor() local
488 NETIF_WLOCK(nif); in netif_filter_na_dtor()
495 nif->nif_filter_vp_cnt--; in netif_filter_na_dtor()
[all …]
H A Dnx_netif_util.c156 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_mbuf_to_filter_pkt() local
157 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_mbuf_to_filter_pkt()
158 struct kern_pbufpool *pp = nif->nif_filter_pp; in nx_netif_mbuf_to_filter_pkt()
159 ifnet_t ifp = nif->nif_ifp; in nx_netif_mbuf_to_filter_pkt()
223 nif->nif_pkt_copy_from_mbuf(type, fph, off, m, 0, in nx_netif_mbuf_to_filter_pkt()
334 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_pkt_to_filter_pkt() local
335 struct netif_stats *nifs = &nif->nif_stats; in nx_netif_pkt_to_filter_pkt()
336 struct kern_pbufpool *pp = nif->nif_filter_pp; in nx_netif_pkt_to_filter_pkt()
337 ifnet_t ifp = nif->nif_ifp; in nx_netif_pkt_to_filter_pkt()
419 nif->nif_pkt_copy_from_mbuf(type, fph, off, m, 0, in nx_netif_pkt_to_filter_pkt()
[all …]
H A Dnx_netif_compat.c395 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_compat_na_activate() local
503 nx_netif_mit_init(nif, na->na_ifp, in nx_netif_compat_na_activate()
516 nx_netif_mit_init(nif, na->na_ifp, in nx_netif_compat_na_activate()
1275 struct nx_netif *nif = nifna->nifna_netif; in nx_netif_compat_na_rxsync() local
1453 nif->nif_pkt_copy_from_mbuf(NR_RX, ph, 0, m, 0, in nx_netif_compat_na_rxsync()
1558 struct nx_netif *nif = NX_NETIF_PRIVATE(nx); in nx_netif_compat_attach() local
1600 devnca->nca_up.nifna_netif = nif; in nx_netif_compat_attach()
1601 nx_netif_retain(nif); in nx_netif_compat_attach()
1669 nif->nif_pkt_copy_from_mbuf = in nx_netif_compat_attach()
1671 nif->nif_pkt_copy_to_mbuf = in nx_netif_compat_attach()
[all …]
H A Dnx_netif_gso.c150 struct nx_netif *nif = NA(ifp)->nifna_netif; in netif_gso_check_netif_active() local
151 struct netif_stats *nifs = &nif->nif_stats; in netif_gso_check_netif_active()
152 struct kern_nexus *nx = nif->nif_nx; in netif_gso_check_netif_active()
189 struct nx_netif *nif = NA(ifp)->nifna_netif; in netif_gso_send() local
190 struct netif_stats *nifs = &nif->nif_stats; in netif_gso_send()
196 if (NX_LLINK_PROV(nif->nif_nx) && in netif_gso_send()
200 qset = nx_netif_find_qset(nif, qset_id); in netif_gso_send()
H A Dnx_netif_mit.c154 nx_netif_mit_init(struct nx_netif *nif, const struct ifnet *ifp, in nx_netif_mit_init() argument
158 #pragma unused(nif) in nx_netif_mit_init()
283 skoid_create(&mit->mit_skoid, SKOID_DNODE(nif->nif_skoid), oid_name, 0); in nx_netif_mit_init()
/xnu-8796.101.5/bsd/skywalk/nexus/flowswitch/flow/
H A Dflow_entry.c370 struct nx_netif *nif; in flow_qset_select_dynamic() local
401 nif = NX_NETIF_PRIVATE(fsw->fsw_dev_ch->ch_na->na_nx); in flow_qset_select_dynamic()
403 fe->fe_qset = nx_netif_find_qset(nif, qset_id); in flow_qset_select_dynamic()
416 struct nx_netif *nif; in flow_entry_alloc() local
510 nif = NX_NETIF_PRIVATE(dev_na->na_nx); in flow_entry_alloc()
511 if (NX_LLINK_PROV(nif->nif_nx) && in flow_entry_alloc()
517 fe->fe_qset = nx_netif_find_qset(nif, req->nfr_qset_id); in flow_entry_alloc()
/xnu-8796.101.5/bsd/skywalk/nexus/
H A Dnexus_traffic_rule.c1031 struct nx_netif *nif; in inet_traffic_rule_notify() local
1042 nif = NA(ifp)->nifna_netif; in inet_traffic_rule_notify()
1043 if (!NX_LLINK_PROV(nif->nif_nx)) { in inet_traffic_rule_notify()
1049 qset = nx_netif_find_qset(nif, ntri->ntri_ra.ras_qset_id); in inet_traffic_rule_notify()
1050 err = nx_netif_notify_steering_info(nif, qset, in inet_traffic_rule_notify()