Lines Matching full:vs

68 static inline bool vxlan_collect_metadata(struct vxlan_sock *vs)  in vxlan_collect_metadata()  argument
70 return vs->flags & VXLAN_F_COLLECT_METADATA || in vxlan_collect_metadata()
81 struct vxlan_sock *vs; in vxlan_find_sock() local
85 hlist_for_each_entry_rcu(vs, vs_head(net, port), hlist) { in vxlan_find_sock()
86 if (inet_sk(vs->sock->sk)->inet_sport == port && in vxlan_find_sock()
87 vxlan_get_sk_family(vs) == family && in vxlan_find_sock()
88 vs->flags == flags && in vxlan_find_sock()
89 vs->sock->sk->sk_bound_dev_if == ifindex) in vxlan_find_sock()
90 return vs; in vxlan_find_sock()
95 static struct vxlan_dev *vxlan_vs_find_vni(struct vxlan_sock *vs, in vxlan_vs_find_vni() argument
103 if (vs->flags & VXLAN_F_COLLECT_METADATA && in vxlan_vs_find_vni()
104 !(vs->flags & VXLAN_F_VNIFILTER)) in vxlan_vs_find_vni()
107 hlist_for_each_entry_rcu(node, vni_head(vs, vni), hlist) { in vxlan_vs_find_vni()
140 struct vxlan_sock *vs; in vxlan_find_vni() local
142 vs = vxlan_find_sock(net, family, port, flags, ifindex); in vxlan_find_vni()
143 if (!vs) in vxlan_find_vni()
146 return vxlan_vs_find_vni(vs, ifindex, vni, NULL); in vxlan_find_vni()
686 struct vxlan_sock *vs = rcu_dereference_sk_user_data(sk); in vxlan_gro_prepare_receive() local
701 if ((flags & VXLAN_HF_RCO) && (vs->flags & VXLAN_F_REMCSUM_RX)) { in vxlan_gro_prepare_receive()
704 !!(vs->flags & in vxlan_gro_prepare_receive()
1500 static bool __vxlan_sock_release_prep(struct vxlan_sock *vs) in __vxlan_sock_release_prep() argument
1504 if (!vs) in __vxlan_sock_release_prep()
1506 if (!refcount_dec_and_test(&vs->refcnt)) in __vxlan_sock_release_prep()
1509 vn = net_generic(sock_net(vs->sock->sk), vxlan_net_id); in __vxlan_sock_release_prep()
1511 hlist_del_rcu(&vs->hlist); in __vxlan_sock_release_prep()
1512 udp_tunnel_notify_del_rx_port(vs->sock, in __vxlan_sock_release_prep()
1513 (vs->flags & VXLAN_F_GPE) ? in __vxlan_sock_release_prep()
1604 struct vxlan_sock *vs, in vxlan_set_mac() argument
1619 if (vxlan_get_sk_family(vs) == AF_INET) { in vxlan_set_mac()
1636 static bool vxlan_ecn_decapsulate(struct vxlan_sock *vs, void *oiph, in vxlan_ecn_decapsulate() argument
1641 if (vxlan_get_sk_family(vs) == AF_INET) in vxlan_ecn_decapsulate()
1649 if (vxlan_get_sk_family(vs) == AF_INET) in vxlan_ecn_decapsulate()
1665 struct vxlan_sock *vs; in vxlan_rcv() local
1690 vs = rcu_dereference_sk_user_data(sk); in vxlan_rcv()
1691 if (!vs) in vxlan_rcv()
1696 vxlan = vxlan_vs_find_vni(vs, skb->dev->ifindex, vni, &vninode); in vxlan_rcv()
1703 if (vs->flags & VXLAN_F_GPE) { in vxlan_rcv()
1714 if (vs->flags & VXLAN_F_REMCSUM_RX) in vxlan_rcv()
1715 if (unlikely(!vxlan_remcsum(&unparsed, skb, vs->flags))) in vxlan_rcv()
1718 if (vxlan_collect_metadata(vs)) { in vxlan_rcv()
1721 tun_dst = udp_tun_rx_dst(skb, vxlan_get_sk_family(vs), TUNNEL_KEY, in vxlan_rcv()
1734 if (vs->flags & VXLAN_F_GBP) in vxlan_rcv()
1735 vxlan_parse_gbp_hdr(&unparsed, skb, vs->flags, md); in vxlan_rcv()
1753 if (!vxlan_set_mac(vxlan, vs, skb, vni)) in vxlan_rcv()
1764 if (!vxlan_ecn_decapsulate(vs, oiph, skb)) { in vxlan_rcv()
1800 struct vxlan_sock *vs; in vxlan_err_lookup() local
1812 vs = rcu_dereference_sk_user_data(sk); in vxlan_err_lookup()
1813 if (!vs) in vxlan_err_lookup()
1817 vxlan = vxlan_vs_find_vni(vs, skb->dev->ifindex, vni, NULL); in vxlan_err_lookup()
2823 static void vxlan_vs_add_dev(struct vxlan_sock *vs, struct vxlan_dev *vxlan, in vxlan_vs_add_dev() argument
2831 hlist_add_head_rcu(&node->hlist, vni_head(vs, vni)); in vxlan_vs_add_dev()
3266 struct vxlan_sock *vs; in vxlan_offload_rx_ports() local
3273 hlist_for_each_entry_rcu(vs, &vn->sock_list[i], hlist) { in vxlan_offload_rx_ports()
3276 if (vs->flags & VXLAN_F_GPE) in vxlan_offload_rx_ports()
3282 udp_tunnel_push_rx_port(dev, vs->sock, type); in vxlan_offload_rx_ports()
3284 udp_tunnel_drop_rx_port(dev, vs->sock, type); in vxlan_offload_rx_ports()
3518 struct vxlan_sock *vs; in vxlan_socket_create() local
3523 vs = kzalloc(sizeof(*vs), GFP_KERNEL); in vxlan_socket_create()
3524 if (!vs) in vxlan_socket_create()
3528 INIT_HLIST_HEAD(&vs->vni_list[h]); in vxlan_socket_create()
3532 kfree(vs); in vxlan_socket_create()
3536 vs->sock = sock; in vxlan_socket_create()
3537 refcount_set(&vs->refcnt, 1); in vxlan_socket_create()
3538 vs->flags = (flags & VXLAN_F_RCV_FLAGS); in vxlan_socket_create()
3541 hlist_add_head_rcu(&vs->hlist, vs_head(net, port)); in vxlan_socket_create()
3543 (vs->flags & VXLAN_F_GPE) ? in vxlan_socket_create()
3550 tunnel_cfg.sk_user_data = vs; in vxlan_socket_create()
3555 if (vs->flags & VXLAN_F_GPE) { in vxlan_socket_create()
3565 return vs; in vxlan_socket_create()
3572 struct vxlan_sock *vs = NULL; in __vxlan_sock_add() local
3582 vs = vxlan_find_sock(vxlan->net, ipv6 ? AF_INET6 : AF_INET, in __vxlan_sock_add()
3585 if (vs && !refcount_inc_not_zero(&vs->refcnt)) { in __vxlan_sock_add()
3591 if (!vs) in __vxlan_sock_add()
3592 vs = vxlan_socket_create(vxlan->net, ipv6, in __vxlan_sock_add()
3595 if (IS_ERR(vs)) in __vxlan_sock_add()
3596 return PTR_ERR(vs); in __vxlan_sock_add()
3599 rcu_assign_pointer(vxlan->vn6_sock, vs); in __vxlan_sock_add()
3604 rcu_assign_pointer(vxlan->vn4_sock, vs); in __vxlan_sock_add()
3609 vxlan_vs_add_vnigrp(vxlan, vs, ipv6); in __vxlan_sock_add()
3611 vxlan_vs_add_dev(vs, vxlan, node); in __vxlan_sock_add()