Lines Matching refs:srx
29 static int siw_rx_umem(struct siw_rx_stream *srx, struct siw_umem *umem, in siw_rx_umem() argument
42 __func__, qp_id(rx_qp(srx)), in siw_rx_umem()
46 srx->skb_copied += copied; in siw_rx_umem()
47 srx->skb_new -= copied; in siw_rx_umem()
54 siw_dbg_qp(rx_qp(srx), "page %pK, bytes=%u\n", p, bytes); in siw_rx_umem()
57 rv = skb_copy_bits(srx->skb, srx->skb_offset, dest + pg_off, in siw_rx_umem()
62 srx->skb_copied += copied; in siw_rx_umem()
63 srx->skb_new -= copied; in siw_rx_umem()
66 qp_id(rx_qp(srx)), __func__, len, p, rv); in siw_rx_umem()
70 if (srx->mpa_crc_hd) { in siw_rx_umem()
71 if (rdma_is_kernel_res(&rx_qp(srx)->base_qp.res)) { in siw_rx_umem()
72 crypto_shash_update(srx->mpa_crc_hd, in siw_rx_umem()
87 siw_crc_skb(srx, bytes); in siw_rx_umem()
92 srx->skb_offset += bytes; in siw_rx_umem()
98 srx->skb_copied += copied; in siw_rx_umem()
99 srx->skb_new -= copied; in siw_rx_umem()
104 static int siw_rx_kva(struct siw_rx_stream *srx, void *kva, int len) in siw_rx_kva() argument
108 siw_dbg_qp(rx_qp(srx), "kva: 0x%pK, len: %u\n", kva, len); in siw_rx_kva()
110 rv = skb_copy_bits(srx->skb, srx->skb_offset, kva, len); in siw_rx_kva()
113 qp_id(rx_qp(srx)), __func__, len, kva, rv); in siw_rx_kva()
117 if (srx->mpa_crc_hd) in siw_rx_kva()
118 crypto_shash_update(srx->mpa_crc_hd, (u8 *)kva, len); in siw_rx_kva()
120 srx->skb_offset += len; in siw_rx_kva()
121 srx->skb_copied += len; in siw_rx_kva()
122 srx->skb_new -= len; in siw_rx_kva()
127 static int siw_rx_pbl(struct siw_rx_stream *srx, int *pbl_idx, in siw_rx_pbl() argument
142 if (siw_rx_kva(srx, (void *)(uintptr_t)buf_addr, bytes) == in siw_rx_pbl()
165 static int siw_rresp_check_ntoh(struct siw_rx_stream *srx, in siw_rresp_check_ntoh() argument
168 struct iwarp_rdma_rresp *rresp = &srx->hdr.rresp; in siw_rresp_check_ntoh()
176 srx->ddp_stag = wqe->sqe.sge[0].lkey; in siw_rresp_check_ntoh()
177 srx->ddp_to = wqe->sqe.sge[0].laddr; in siw_rresp_check_ntoh()
190 if (unlikely(srx->ddp_stag != sink_stag)) { in siw_rresp_check_ntoh()
192 qp_id(rx_qp(srx)), sink_stag, srx->ddp_stag); in siw_rresp_check_ntoh()
196 if (unlikely(srx->ddp_to != sink_to)) { in siw_rresp_check_ntoh()
198 qp_id(rx_qp(srx)), (unsigned long long)sink_to, in siw_rresp_check_ntoh()
199 (unsigned long long)srx->ddp_to); in siw_rresp_check_ntoh()
204 (wqe->processed + srx->fpdu_part_rem != wqe->bytes))) { in siw_rresp_check_ntoh()
206 qp_id(rx_qp(srx)), in siw_rresp_check_ntoh()
207 wqe->processed + srx->fpdu_part_rem, wqe->bytes); in siw_rresp_check_ntoh()
213 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_DDP, in siw_rresp_check_ntoh()
229 static int siw_write_check_ntoh(struct siw_rx_stream *srx, in siw_write_check_ntoh() argument
232 struct iwarp_rdma_write *write = &srx->hdr.rwrite; in siw_write_check_ntoh()
239 srx->ddp_stag = sink_stag; in siw_write_check_ntoh()
240 srx->ddp_to = sink_to; in siw_write_check_ntoh()
243 if (unlikely(srx->ddp_stag != sink_stag)) { in siw_write_check_ntoh()
245 qp_id(rx_qp(srx)), sink_stag, in siw_write_check_ntoh()
246 srx->ddp_stag); in siw_write_check_ntoh()
250 if (unlikely(srx->ddp_to != sink_to)) { in siw_write_check_ntoh()
252 qp_id(rx_qp(srx)), in siw_write_check_ntoh()
254 (unsigned long long)srx->ddp_to); in siw_write_check_ntoh()
261 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_DDP, in siw_write_check_ntoh()
277 static int siw_send_check_ntoh(struct siw_rx_stream *srx, in siw_send_check_ntoh() argument
280 struct iwarp_send_inv *send = &srx->hdr.send_inv; in siw_send_check_ntoh()
290 qp_id(rx_qp(srx)), ddp_qn); in siw_send_check_ntoh()
294 if (unlikely(ddp_msn != srx->ddp_msn[RDMAP_UNTAGGED_QN_SEND])) { in siw_send_check_ntoh()
296 qp_id(rx_qp(srx)), ddp_msn, in siw_send_check_ntoh()
297 srx->ddp_msn[RDMAP_UNTAGGED_QN_SEND]); in siw_send_check_ntoh()
303 qp_id(rx_qp(srx)), ddp_mo, wqe->processed); in siw_send_check_ntoh()
314 srx->inval_stag = be32_to_cpu(send->inval_stag); in siw_send_check_ntoh()
316 if (unlikely(wqe->bytes < wqe->processed + srx->fpdu_part_rem)) { in siw_send_check_ntoh()
317 siw_dbg_qp(rx_qp(srx), "receive space short: %d - %d < %d\n", in siw_send_check_ntoh()
318 wqe->bytes, wqe->processed, srx->fpdu_part_rem); in siw_send_check_ntoh()
325 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_DDP, in siw_send_check_ntoh()
423 struct siw_rx_stream *srx = &qp->rx_stream; in siw_proc_send() local
441 if (srx->state == SIW_GET_DATA_START) { in siw_proc_send()
442 rv = siw_send_check_ntoh(srx, frx); in siw_proc_send()
447 if (!srx->fpdu_part_rem) /* zero length SEND */ in siw_proc_send()
450 data_bytes = min(srx->fpdu_part_rem, srx->skb_new); in siw_proc_send()
489 rv = siw_rx_kva(srx, in siw_proc_send()
493 rv = siw_rx_umem(srx, mem_p->umem, in siw_proc_send()
496 rv = siw_rx_pbl(srx, &frx->pbl_idx, mem_p, in siw_proc_send()
517 srx->fpdu_part_rem -= rv; in siw_proc_send()
518 srx->fpdu_part_rcvd += rv; in siw_proc_send()
522 if (!srx->fpdu_part_rem) in siw_proc_send()
542 struct siw_rx_stream *srx = &qp->rx_stream; in siw_proc_write() local
547 if (srx->state == SIW_GET_DATA_START) { in siw_proc_write()
548 if (!srx->fpdu_part_rem) /* zero length WRITE */ in siw_proc_write()
551 rv = siw_write_check_ntoh(srx, frx); in siw_proc_write()
557 bytes = min(srx->fpdu_part_rem, srx->skb_new); in siw_proc_write()
562 rx_mem(frx) = siw_mem_id2obj(qp->sdev, srx->ddp_stag >> 8); in siw_proc_write()
566 srx->ddp_stag); in siw_proc_write()
583 if (unlikely(mem->stag != srx->ddp_stag)) { in siw_proc_write()
589 rv = siw_check_mem(qp->pd, mem, srx->ddp_to + srx->fpdu_part_rcvd, in siw_proc_write()
602 rv = siw_rx_kva(srx, in siw_proc_write()
603 (void *)(uintptr_t)(srx->ddp_to + srx->fpdu_part_rcvd), in siw_proc_write()
606 rv = siw_rx_umem(srx, mem->umem, in siw_proc_write()
607 srx->ddp_to + srx->fpdu_part_rcvd, bytes); in siw_proc_write()
609 rv = siw_rx_pbl(srx, &frx->pbl_idx, mem, in siw_proc_write()
610 srx->ddp_to + srx->fpdu_part_rcvd, bytes); in siw_proc_write()
618 srx->fpdu_part_rem -= rv; in siw_proc_write()
619 srx->fpdu_part_rcvd += rv; in siw_proc_write()
621 if (!srx->fpdu_part_rem) { in siw_proc_write()
622 srx->ddp_to += srx->fpdu_part_rcvd; in siw_proc_write()
633 struct siw_rx_stream *srx = &qp->rx_stream; in siw_proc_rreq() local
635 if (!srx->fpdu_part_rem) in siw_proc_rreq()
639 be16_to_cpu(srx->hdr.ctrl.mpa_len)); in siw_proc_rreq()
660 static int siw_init_rresp(struct siw_qp *qp, struct siw_rx_stream *srx) in siw_init_rresp() argument
665 uint64_t raddr = be64_to_cpu(srx->hdr.rreq.sink_to), in siw_init_rresp()
666 laddr = be64_to_cpu(srx->hdr.rreq.source_to); in siw_init_rresp()
667 uint32_t length = be32_to_cpu(srx->hdr.rreq.read_size), in siw_init_rresp()
668 lkey = be32_to_cpu(srx->hdr.rreq.source_stag), in siw_init_rresp()
669 rkey = be32_to_cpu(srx->hdr.rreq.sink_stag), in siw_init_rresp()
670 msn = be32_to_cpu(srx->hdr.rreq.ddp_msn); in siw_init_rresp()
675 if (unlikely(msn != srx->ddp_msn[RDMAP_UNTAGGED_QN_RDMA_READ])) { in siw_init_rresp()
788 struct siw_rx_stream *srx = &qp->rx_stream; in siw_proc_rresp() local
811 rv = siw_rresp_check_ntoh(srx, frx); in siw_proc_rresp()
824 if (!srx->fpdu_part_rem) /* zero length RRESPONSE */ in siw_proc_rresp()
851 bytes = min(srx->fpdu_part_rem, srx->skb_new); in siw_proc_rresp()
854 rv = siw_rx_kva(srx, in siw_proc_rresp()
858 rv = siw_rx_umem(srx, mem_p->umem, sge->laddr + wqe->processed, in siw_proc_rresp()
861 rv = siw_rx_pbl(srx, &frx->pbl_idx, mem_p, in siw_proc_rresp()
868 srx->fpdu_part_rem -= rv; in siw_proc_rresp()
869 srx->fpdu_part_rcvd += rv; in siw_proc_rresp()
872 if (!srx->fpdu_part_rem) { in siw_proc_rresp()
873 srx->ddp_to += srx->fpdu_part_rcvd; in siw_proc_rresp()
886 struct siw_rx_stream *srx = &qp->rx_stream; in siw_proc_terminate() local
887 struct sk_buff *skb = srx->skb; in siw_proc_terminate()
888 struct iwarp_terminate *term = &srx->hdr.terminate; in siw_proc_terminate()
916 if (srx->skb_new < sizeof(struct iwarp_ctrl_tagged)) in siw_proc_terminate()
921 skb_copy_bits(skb, srx->skb_offset, infop, to_copy); in siw_proc_terminate()
928 srx->skb_offset += to_copy; in siw_proc_terminate()
929 srx->skb_new -= to_copy; in siw_proc_terminate()
930 srx->skb_copied += to_copy; in siw_proc_terminate()
931 srx->fpdu_part_rcvd += to_copy; in siw_proc_terminate()
932 srx->fpdu_part_rem -= to_copy; in siw_proc_terminate()
937 if (to_copy + MPA_CRC_SIZE > srx->skb_new) in siw_proc_terminate()
940 skb_copy_bits(skb, srx->skb_offset, infop, to_copy); in siw_proc_terminate()
952 srx->skb_new -= to_copy; in siw_proc_terminate()
953 srx->skb_offset += to_copy; in siw_proc_terminate()
954 srx->skb_copied += to_copy; in siw_proc_terminate()
955 srx->fpdu_part_rcvd += to_copy; in siw_proc_terminate()
956 srx->fpdu_part_rem -= to_copy; in siw_proc_terminate()
961 static int siw_get_trailer(struct siw_qp *qp, struct siw_rx_stream *srx) in siw_get_trailer() argument
963 struct sk_buff *skb = srx->skb; in siw_get_trailer()
964 int avail = min(srx->skb_new, srx->fpdu_part_rem); in siw_get_trailer()
965 u8 *tbuf = (u8 *)&srx->trailer.crc - srx->pad; in siw_get_trailer()
969 srx->fpdu_part_rem, srx->skb_new, srx->pad); in siw_get_trailer()
971 skb_copy_bits(skb, srx->skb_offset, tbuf, avail); in siw_get_trailer()
973 srx->skb_new -= avail; in siw_get_trailer()
974 srx->skb_offset += avail; in siw_get_trailer()
975 srx->skb_copied += avail; in siw_get_trailer()
976 srx->fpdu_part_rem -= avail; in siw_get_trailer()
978 if (srx->fpdu_part_rem) in siw_get_trailer()
981 if (!srx->mpa_crc_hd) in siw_get_trailer()
984 if (srx->pad) in siw_get_trailer()
985 crypto_shash_update(srx->mpa_crc_hd, tbuf, srx->pad); in siw_get_trailer()
990 crypto_shash_final(srx->mpa_crc_hd, (u8 *)&crc_own); in siw_get_trailer()
991 crc_in = (__force __wsum)srx->trailer.crc; in siw_get_trailer()
1007 static int siw_get_hdr(struct siw_rx_stream *srx) in siw_get_hdr() argument
1009 struct sk_buff *skb = srx->skb; in siw_get_hdr()
1010 struct siw_qp *qp = rx_qp(srx); in siw_get_hdr()
1011 struct iwarp_ctrl *c_hdr = &srx->hdr.ctrl; in siw_get_hdr()
1016 if (srx->fpdu_part_rcvd < MIN_DDP_HDR) { in siw_get_hdr()
1020 bytes = min_t(int, srx->skb_new, in siw_get_hdr()
1021 MIN_DDP_HDR - srx->fpdu_part_rcvd); in siw_get_hdr()
1023 skb_copy_bits(skb, srx->skb_offset, in siw_get_hdr()
1024 (char *)c_hdr + srx->fpdu_part_rcvd, bytes); in siw_get_hdr()
1026 srx->fpdu_part_rcvd += bytes; in siw_get_hdr()
1028 srx->skb_new -= bytes; in siw_get_hdr()
1029 srx->skb_offset += bytes; in siw_get_hdr()
1030 srx->skb_copied += bytes; in siw_get_hdr()
1032 if (srx->fpdu_part_rcvd < MIN_DDP_HDR) in siw_get_hdr()
1049 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_DDP, in siw_get_hdr()
1057 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_RDMAP, in siw_get_hdr()
1068 siw_init_terminate(rx_qp(srx), TERM_ERROR_LAYER_RDMAP, in siw_get_hdr()
1073 siw_dbg_qp(rx_qp(srx), "new header, opcode %u\n", opcode); in siw_get_hdr()
1089 bytes = min_t(int, hdrlen - MIN_DDP_HDR, srx->skb_new); in siw_get_hdr()
1091 skb_copy_bits(skb, srx->skb_offset, in siw_get_hdr()
1092 (char *)c_hdr + srx->fpdu_part_rcvd, bytes); in siw_get_hdr()
1094 srx->fpdu_part_rcvd += bytes; in siw_get_hdr()
1096 srx->skb_new -= bytes; in siw_get_hdr()
1097 srx->skb_offset += bytes; in siw_get_hdr()
1098 srx->skb_copied += bytes; in siw_get_hdr()
1100 if (srx->fpdu_part_rcvd < hdrlen) in siw_get_hdr()
1116 if (srx->mpa_crc_hd) { in siw_get_hdr()
1120 crypto_shash_init(srx->mpa_crc_hd); in siw_get_hdr()
1121 crypto_shash_update(srx->mpa_crc_hd, (u8 *)c_hdr, in siw_get_hdr()
1122 srx->fpdu_part_rcvd); in siw_get_hdr()
1221 struct siw_rx_stream *srx = &qp->rx_stream; in siw_rdmap_complete() local
1224 u8 opcode = __rdmap_get_opcode(&srx->hdr.ctrl); in siw_rdmap_complete()
1238 srx->ddp_msn[RDMAP_UNTAGGED_QN_SEND]++; in siw_rdmap_complete()
1248 rv = siw_invalidate_stag(qp->pd, srx->inval_stag); in siw_rdmap_complete()
1260 rv ? 0 : srx->inval_stag, in siw_rdmap_complete()
1274 if ((srx->state == SIW_GET_HDR && in siw_rdmap_complete()
1317 rv = siw_init_rresp(qp, srx); in siw_rdmap_complete()
1318 srx->ddp_msn[RDMAP_UNTAGGED_QN_RDMA_READ]++; in siw_rdmap_complete()
1360 struct siw_rx_stream *srx = &qp->rx_stream; in siw_tcp_rx_data() local
1363 srx->skb = skb; in siw_tcp_rx_data()
1364 srx->skb_new = skb->len - off; in siw_tcp_rx_data()
1365 srx->skb_offset = off; in siw_tcp_rx_data()
1366 srx->skb_copied = 0; in siw_tcp_rx_data()
1368 siw_dbg_qp(qp, "new data, len %d\n", srx->skb_new); in siw_tcp_rx_data()
1370 while (srx->skb_new) { in siw_tcp_rx_data()
1373 if (unlikely(srx->rx_suspend)) { in siw_tcp_rx_data()
1375 srx->skb_copied += srx->skb_new; in siw_tcp_rx_data()
1378 switch (srx->state) { in siw_tcp_rx_data()
1380 rv = siw_get_hdr(srx); in siw_tcp_rx_data()
1382 srx->fpdu_part_rem = in siw_tcp_rx_data()
1383 be16_to_cpu(srx->hdr.ctrl.mpa_len) - in siw_tcp_rx_data()
1384 srx->fpdu_part_rcvd + MPA_HDR_SIZE; in siw_tcp_rx_data()
1386 if (srx->fpdu_part_rem) in siw_tcp_rx_data()
1387 srx->pad = -srx->fpdu_part_rem & 0x3; in siw_tcp_rx_data()
1389 srx->pad = 0; in siw_tcp_rx_data()
1391 srx->state = SIW_GET_DATA_START; in siw_tcp_rx_data()
1392 srx->fpdu_part_rcvd = 0; in siw_tcp_rx_data()
1414 be16_to_cpu(srx->hdr.ctrl.mpa_len) in siw_tcp_rx_data()
1417 srx->fpdu_part_rem = (-mpa_len & 0x3) in siw_tcp_rx_data()
1419 srx->fpdu_part_rcvd = 0; in siw_tcp_rx_data()
1420 srx->state = SIW_GET_TRAILER; in siw_tcp_rx_data()
1425 srx->state = SIW_GET_DATA_MORE; in siw_tcp_rx_data()
1433 rv = siw_get_trailer(qp, srx); in siw_tcp_rx_data()
1439 srx->state = SIW_GET_HDR; in siw_tcp_rx_data()
1440 srx->fpdu_part_rcvd = 0; in siw_tcp_rx_data()
1442 if (!(srx->hdr.ctrl.ddp_rdmap_ctrl & in siw_tcp_rx_data()
1458 if ((srx->state > SIW_GET_HDR || in siw_tcp_rx_data()
1463 srx->state); in siw_tcp_rx_data()
1471 srx->state, srx->fpdu_part_rem); in siw_tcp_rx_data()
1475 return srx->skb_copied; in siw_tcp_rx_data()