Lines Matching refs:tcp_hashinfo
91 struct inet_hashinfo tcp_hashinfo; variable
92 EXPORT_SYMBOL(tcp_hashinfo);
477 sk = __inet_lookup_established(net, &tcp_hashinfo, iph->daddr, in tcp_v4_err()
739 sk1 = __inet_lookup_listener(net, &tcp_hashinfo, NULL, 0, in tcp_v4_send_reset()
1778 sk = __inet_lookup_established(dev_net(skb->dev), &tcp_hashinfo, in tcp_v4_early_demux()
1999 sk = __inet_lookup_skb(&tcp_hashinfo, skb, __tcp_hdrlen(th), th->source, in tcp_v4_rcv()
2164 &tcp_hashinfo, skb, in tcp_v4_rcv()
2318 for (; st->bucket <= tcp_hashinfo.lhash2_mask; st->bucket++) { in listening_get_first()
2323 ilb2 = &tcp_hashinfo.lhash2[st->bucket]; in listening_get_first()
2361 ilb2 = &tcp_hashinfo.lhash2[st->bucket]; in listening_get_next()
2385 return hlist_nulls_empty(&tcp_hashinfo.ehash[st->bucket].chain); in empty_bucket()
2397 for (; st->bucket <= tcp_hashinfo.ehash_mask; ++st->bucket) { in established_get_first()
2400 spinlock_t *lock = inet_ehash_lockp(&tcp_hashinfo, st->bucket); in established_get_first()
2407 sk_nulls_for_each(sk, node, &tcp_hashinfo.ehash[st->bucket].chain) { in established_get_first()
2433 spin_unlock_bh(inet_ehash_lockp(&tcp_hashinfo, st->bucket)); in established_get_next()
2479 if (st->bucket > tcp_hashinfo.lhash2_mask) in tcp_seek_last_pos()
2491 if (st->bucket > tcp_hashinfo.ehash_mask) in tcp_seek_last_pos()
2564 spin_unlock(&tcp_hashinfo.lhash2[st->bucket].lock); in tcp_seq_stop()
2568 spin_unlock_bh(inet_ehash_lockp(&tcp_hashinfo, st->bucket)); in tcp_seq_stop()
2783 spin_unlock(&tcp_hashinfo.lhash2[st->bucket].lock); in bpf_iter_tcp_listening_batch()
2810 spin_unlock_bh(inet_ehash_lockp(&tcp_hashinfo, st->bucket)); in bpf_iter_tcp_established_batch()
2832 st->bucket > tcp_hashinfo.lhash2_mask) { in bpf_iter_tcp_batch()
3093 .h.hashinfo = &tcp_hashinfo,
3162 cnt = tcp_hashinfo.ehash_mask + 1; in tcp_sk_init()
3164 net->ipv4.tcp_death_row.hashinfo = &tcp_hashinfo; in tcp_sk_init()
3229 inet_twsk_purge(&tcp_hashinfo, AF_INET); in tcp_sk_exit_batch()