Lines Matching refs:rx_sa

134 		struct macsec_rx_sa *rx_sa;  member
728 struct macsec_rx_sa *rx_sa = macsec_skb_cb(skb)->rx_sa; in macsec_post_decrypt() local
729 struct pcpu_rx_sc_stats *rxsc_stats = this_cpu_ptr(rx_sa->sc->stats); in macsec_post_decrypt()
733 spin_lock(&rx_sa->lock); in macsec_post_decrypt()
734 if (rx_sa->next_pn_halves.lower >= secy->replay_window) in macsec_post_decrypt()
735 lowest_pn = rx_sa->next_pn_halves.lower - secy->replay_window; in macsec_post_decrypt()
742 spin_unlock(&rx_sa->lock); in macsec_post_decrypt()
761 spin_unlock(&rx_sa->lock); in macsec_post_decrypt()
769 this_cpu_inc(rx_sa->stats->InPktsNotValid); in macsec_post_decrypt()
777 this_cpu_inc(rx_sa->stats->InPktsInvalid); in macsec_post_decrypt()
790 this_cpu_inc(rx_sa->stats->InPktsOK); in macsec_post_decrypt()
795 if (pn + 1 > rx_sa->next_pn_halves.lower) { in macsec_post_decrypt()
796 rx_sa->next_pn_halves.lower = pn + 1; in macsec_post_decrypt()
798 !pn_same_half(pn, rx_sa->next_pn_halves.lower)) { in macsec_post_decrypt()
799 rx_sa->next_pn_halves.upper++; in macsec_post_decrypt()
800 rx_sa->next_pn_halves.lower = pn + 1; in macsec_post_decrypt()
803 spin_unlock(&rx_sa->lock); in macsec_post_decrypt()
843 struct macsec_rx_sa *rx_sa = macsec_skb_cb(skb)->rx_sa; in macsec_decrypt_done() local
844 struct macsec_rx_sc *rx_sc = rx_sa->sc; in macsec_decrypt_done()
872 macsec_rxsa_put(rx_sa); in macsec_decrypt_done()
879 struct macsec_rx_sa *rx_sa, in macsec_decrypt() argument
902 req = macsec_alloc_req(rx_sa->key.tfm, &iv, &sg, ret); in macsec_decrypt()
912 pn_t recovered_pn = rx_sa->next_pn_halves; in macsec_decrypt()
915 if (hdr_pn < rx_sa->next_pn_halves.lower && in macsec_decrypt()
916 !pn_same_half(hdr_pn, rx_sa->next_pn_halves.lower)) in macsec_decrypt()
919 macsec_fill_iv_xpn(iv, rx_sa->ssci, recovered_pn.full64, in macsec_decrypt()
920 rx_sa->key.salt); in macsec_decrypt()
1092 struct macsec_rx_sa *rx_sa; in macsec_handle_frame() local
1176 rx_sa = macsec_rxsa_get(rx_sc->sa[macsec_skb_cb(skb)->assoc_num]); in macsec_handle_frame()
1177 if (!rx_sa) { in macsec_handle_frame()
1211 spin_lock(&rx_sa->lock); in macsec_handle_frame()
1212 late = rx_sa->next_pn_halves.lower >= secy->replay_window && in macsec_handle_frame()
1213 hdr_pn < (rx_sa->next_pn_halves.lower - secy->replay_window); in macsec_handle_frame()
1216 late = late && pn_same_half(rx_sa->next_pn_halves.lower, hdr_pn); in macsec_handle_frame()
1217 spin_unlock(&rx_sa->lock); in macsec_handle_frame()
1228 macsec_skb_cb(skb)->rx_sa = rx_sa; in macsec_handle_frame()
1233 skb = macsec_decrypt(skb, dev, rx_sa, sci, secy); in macsec_handle_frame()
1238 macsec_rxsa_put(rx_sa); in macsec_handle_frame()
1255 if (rx_sa) in macsec_handle_frame()
1256 macsec_rxsa_put(rx_sa); in macsec_handle_frame()
1272 macsec_rxsa_put(rx_sa); in macsec_handle_frame()
1354 static int init_rx_sa(struct macsec_rx_sa *rx_sa, char *sak, int key_len, in init_rx_sa() argument
1357 rx_sa->stats = alloc_percpu(struct macsec_rx_sa_stats); in init_rx_sa()
1358 if (!rx_sa->stats) in init_rx_sa()
1361 rx_sa->key.tfm = macsec_alloc_tfm(sak, key_len, icv_len); in init_rx_sa()
1362 if (IS_ERR(rx_sa->key.tfm)) { in init_rx_sa()
1363 free_percpu(rx_sa->stats); in init_rx_sa()
1364 return PTR_ERR(rx_sa->key.tfm); in init_rx_sa()
1367 rx_sa->ssci = MACSEC_UNDEF_SSCI; in init_rx_sa()
1368 rx_sa->active = false; in init_rx_sa()
1369 rx_sa->next_pn = 1; in init_rx_sa()
1370 refcount_set(&rx_sa->refcnt, 1); in init_rx_sa()
1371 spin_lock_init(&rx_sa->lock); in init_rx_sa()
1376 static void clear_rx_sa(struct macsec_rx_sa *rx_sa) in clear_rx_sa() argument
1378 rx_sa->active = false; in clear_rx_sa()
1380 macsec_rxsa_put(rx_sa); in clear_rx_sa()
1605 struct macsec_rx_sa *rx_sa; in get_rxsa_from_nl() local
1618 rx_sa = rtnl_dereference(rx_sc->sa[*assoc_num]); in get_rxsa_from_nl()
1619 if (!rx_sa) in get_rxsa_from_nl()
1623 return rx_sa; in get_rxsa_from_nl()
1728 struct macsec_rx_sa *rx_sa; in macsec_add_rxsa() local
1787 rx_sa = rtnl_dereference(rx_sc->sa[assoc_num]); in macsec_add_rxsa()
1788 if (rx_sa) { in macsec_add_rxsa()
1793 rx_sa = kmalloc(sizeof(*rx_sa), GFP_KERNEL); in macsec_add_rxsa()
1794 if (!rx_sa) { in macsec_add_rxsa()
1799 err = init_rx_sa(rx_sa, nla_data(tb_sa[MACSEC_SA_ATTR_KEY]), in macsec_add_rxsa()
1802 kfree(rx_sa); in macsec_add_rxsa()
1808 spin_lock_bh(&rx_sa->lock); in macsec_add_rxsa()
1809 rx_sa->next_pn = nla_get_u64(tb_sa[MACSEC_SA_ATTR_PN]); in macsec_add_rxsa()
1810 spin_unlock_bh(&rx_sa->lock); in macsec_add_rxsa()
1814 rx_sa->active = !!nla_get_u8(tb_sa[MACSEC_SA_ATTR_ACTIVE]); in macsec_add_rxsa()
1816 rx_sa->sc = rx_sc; in macsec_add_rxsa()
1819 rx_sa->ssci = nla_get_ssci(tb_sa[MACSEC_SA_ATTR_SSCI]); in macsec_add_rxsa()
1820 nla_memcpy(rx_sa->key.salt.bytes, tb_sa[MACSEC_SA_ATTR_SALT], in macsec_add_rxsa()
1836 ctx.sa.rx_sa = rx_sa; in macsec_add_rxsa()
1847 nla_memcpy(rx_sa->key.id, tb_sa[MACSEC_SA_ATTR_KEYID], MACSEC_KEYID_LEN); in macsec_add_rxsa()
1848 rcu_assign_pointer(rx_sc->sa[assoc_num], rx_sa); in macsec_add_rxsa()
1855 macsec_rxsa_put(rx_sa); in macsec_add_rxsa()
2110 struct macsec_rx_sa *rx_sa; in macsec_del_rxsa() local
2126 rx_sa = get_rxsa_from_nl(genl_info_net(info), attrs, tb_rxsc, tb_sa, in macsec_del_rxsa()
2128 if (IS_ERR(rx_sa)) { in macsec_del_rxsa()
2130 return PTR_ERR(rx_sa); in macsec_del_rxsa()
2133 if (rx_sa->active) { in macsec_del_rxsa()
2150 ctx.sa.rx_sa = rx_sa; in macsec_del_rxsa()
2159 clear_rx_sa(rx_sa); in macsec_del_rxsa()
2417 struct macsec_rx_sa *rx_sa; in macsec_upd_rxsa() local
2440 rx_sa = get_rxsa_from_nl(genl_info_net(info), attrs, tb_rxsc, tb_sa, in macsec_upd_rxsa()
2442 if (IS_ERR(rx_sa)) { in macsec_upd_rxsa()
2444 return PTR_ERR(rx_sa); in macsec_upd_rxsa()
2458 spin_lock_bh(&rx_sa->lock); in macsec_upd_rxsa()
2459 prev_pn = rx_sa->next_pn_halves; in macsec_upd_rxsa()
2460 rx_sa->next_pn = nla_get_u64(tb_sa[MACSEC_SA_ATTR_PN]); in macsec_upd_rxsa()
2461 spin_unlock_bh(&rx_sa->lock); in macsec_upd_rxsa()
2464 was_active = rx_sa->active; in macsec_upd_rxsa()
2466 rx_sa->active = nla_get_u8(tb_sa[MACSEC_SA_ATTR_ACTIVE]); in macsec_upd_rxsa()
2480 ctx.sa.rx_sa = rx_sa; in macsec_upd_rxsa()
2493 spin_lock_bh(&rx_sa->lock); in macsec_upd_rxsa()
2494 rx_sa->next_pn_halves = prev_pn; in macsec_upd_rxsa()
2495 spin_unlock_bh(&rx_sa->lock); in macsec_upd_rxsa()
2497 rx_sa->active = was_active; in macsec_upd_rxsa()
2715 struct macsec_rx_sa *rx_sa, in get_rx_sa_stats() argument
2729 ctx.sa.rx_sa = rx_sa; in get_rx_sa_stats()
2740 per_cpu_ptr(rx_sa->stats, cpu); in get_rx_sa_stats()
3191 struct macsec_rx_sa *rx_sa = rtnl_dereference(rx_sc->sa[i]); in dump_secy() local
3196 if (!rx_sa) in dump_secy()
3216 get_rx_sa_stats(dev, rx_sc, i, rx_sa, &rx_sa_stats); in dump_secy()
3227 pn = rx_sa->next_pn; in dump_secy()
3230 pn = rx_sa->next_pn_halves.lower; in dump_secy()
3236 nla_put(skb, MACSEC_SA_ATTR_KEYID, MACSEC_KEYID_LEN, rx_sa->key.id) || in dump_secy()
3237 (secy->xpn && nla_put_ssci(skb, MACSEC_SA_ATTR_SSCI, rx_sa->ssci)) || in dump_secy()
3238 nla_put_u8(skb, MACSEC_SA_ATTR_ACTIVE, rx_sa->active)) { in dump_secy()