Lines Matching refs:vn

193 	struct vxlan_net *vn = net_generic(net, vxlan_net_id);  in vs_head()  local
195 return &vn->sock_list[hash_32(ntohs(port), PORT_HASH_BITS)]; in vs_head()
1059 static bool vxlan_group_used(struct vxlan_net *vn, struct vxlan_dev *dev) in vxlan_group_used() argument
1081 list_for_each_entry(vxlan, &vn->vxlan_list, next) { in vxlan_group_used()
1110 struct vxlan_net *vn; in __vxlan_sock_release_prep() local
1117 vn = net_generic(sock_net(vs->sock->sk), vxlan_net_id); in __vxlan_sock_release_prep()
1118 spin_lock(&vn->sock_lock); in __vxlan_sock_release_prep()
1124 spin_unlock(&vn->sock_lock); in __vxlan_sock_release_prep()
2408 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_vs_del_dev() local
2410 spin_lock(&vn->sock_lock); in vxlan_vs_del_dev()
2415 spin_unlock(&vn->sock_lock); in vxlan_vs_del_dev()
2421 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_vs_add_dev() local
2425 spin_lock(&vn->sock_lock); in vxlan_vs_add_dev()
2427 spin_unlock(&vn->sock_lock); in vxlan_vs_add_dev()
2511 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in vxlan_stop() local
2515 !vxlan_group_used(vn, vxlan)) in vxlan_stop()
2636 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_offload_rx_ports() local
2639 spin_lock(&vn->sock_lock); in vxlan_offload_rx_ports()
2641 hlist_for_each_entry_rcu(vs, &vn->sock_list[i], hlist) { in vxlan_offload_rx_ports()
2655 spin_unlock(&vn->sock_lock); in vxlan_offload_rx_ports()
2847 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_socket_create() local
2870 spin_lock(&vn->sock_lock); in vxlan_socket_create()
2876 spin_unlock(&vn->sock_lock); in vxlan_socket_create()
2894 struct vxlan_net *vn = net_generic(vxlan->net, vxlan_net_id); in __vxlan_sock_add() local
2899 spin_lock(&vn->sock_lock); in __vxlan_sock_add()
2903 spin_unlock(&vn->sock_lock); in __vxlan_sock_add()
2906 spin_unlock(&vn->sock_lock); in __vxlan_sock_add()
2955 struct vxlan_net *vn = net_generic(src_net, vxlan_net_id); in vxlan_config_validate() local
3088 list_for_each_entry(tmp, &vn->vxlan_list, next) { in vxlan_config_validate()
3190 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in __vxlan_dev_create() local
3229 list_add(&vxlan->next, &vn->vxlan_list); in __vxlan_dev_create()
3707 static void vxlan_handle_lowerdev_unregister(struct vxlan_net *vn, in vxlan_handle_lowerdev_unregister() argument
3713 list_for_each_entry_safe(vxlan, next, &vn->vxlan_list, next) { in vxlan_handle_lowerdev_unregister()
3733 struct vxlan_net *vn = net_generic(dev_net(dev), vxlan_net_id); in vxlan_netdevice_event() local
3737 vxlan_handle_lowerdev_unregister(vn, dev); in vxlan_netdevice_event()
3754 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_init_net() local
3757 INIT_LIST_HEAD(&vn->vxlan_list); in vxlan_init_net()
3758 spin_lock_init(&vn->sock_lock); in vxlan_init_net()
3761 INIT_HLIST_HEAD(&vn->sock_list[h]); in vxlan_init_net()
3768 struct vxlan_net *vn = net_generic(net, vxlan_net_id); in vxlan_destroy_tunnels() local
3777 list_for_each_entry_safe(vxlan, next, &vn->vxlan_list, next) { in vxlan_destroy_tunnels()
3788 WARN_ON_ONCE(!hlist_empty(&vn->sock_list[h])); in vxlan_destroy_tunnels()