Lines Matching refs:rxm
134 struct strp_msg *rxm = strp_msg(skb); in padding_length() local
144 if (back > rxm->full_len - prot->prepend_size) in padding_length()
147 rxm->offset + rxm->full_len - back, in padding_length()
187 struct strp_msg *rxm = strp_msg(skb); in tls_decrypt_done() local
195 rxm->full_len -= pad; in tls_decrypt_done()
196 rxm->offset += prot->prepend_size; in tls_decrypt_done()
197 rxm->full_len -= prot->overhead_size; in tls_decrypt_done()
1418 struct strp_msg *rxm = strp_msg(skb); in decrypt_internal() local
1425 const int data_len = rxm->full_len - prot->overhead_size + in decrypt_internal()
1434 n_sgin = skb_nsg(skb, rxm->offset + prot->prepend_size, in decrypt_internal()
1435 rxm->full_len - prot->prepend_size); in decrypt_internal()
1477 err = skb_copy_bits(skb, rxm->offset + TLS_HEADER_SIZE, in decrypt_internal()
1493 tls_make_aad(aad, rxm->full_len - prot->overhead_size + in decrypt_internal()
1502 rxm->offset + prot->prepend_size, in decrypt_internal()
1503 rxm->full_len - prot->prepend_size); in decrypt_internal()
1554 struct strp_msg *rxm = strp_msg(skb); in decrypt_skb_update() local
1559 err = tls_device_decrypted(sk, tls_ctx, skb, rxm); in decrypt_skb_update()
1585 rxm->full_len -= pad; in decrypt_skb_update()
1586 rxm->offset += prot->prepend_size; in decrypt_skb_update()
1587 rxm->full_len -= prot->overhead_size; in decrypt_skb_update()
1614 struct strp_msg *rxm = strp_msg(skb); in tls_sw_advance_skb() local
1616 if (len < rxm->full_len) { in tls_sw_advance_skb()
1617 rxm->offset += len; in tls_sw_advance_skb()
1618 rxm->full_len -= len; in tls_sw_advance_skb()
1658 struct strp_msg *rxm = strp_msg(skb); in process_rx_list() local
1665 if (skip < rxm->full_len) in process_rx_list()
1668 skip = skip - rxm->full_len; in process_rx_list()
1674 struct strp_msg *rxm = strp_msg(skb); in process_rx_list() local
1675 int chunk = min_t(unsigned int, rxm->full_len - skip, len); in process_rx_list()
1698 if (!zc || (rxm->full_len - skip) > len) { in process_rx_list()
1699 int err = skb_copy_datagram_msg(skb, rxm->offset + skip, in process_rx_list()
1710 rxm->offset = rxm->offset + chunk; in process_rx_list()
1711 rxm->full_len = rxm->full_len - chunk; in process_rx_list()
1714 if (rxm->full_len - skip) in process_rx_list()
1753 struct strp_msg *rxm; in tls_sw_recvmsg() local
1821 rxm = strp_msg(skb); in tls_sw_recvmsg()
1823 to_decrypt = rxm->full_len - prot->overhead_size; in tls_sw_recvmsg()
1885 rxm->offset = rxm->offset + rxm->full_len; in tls_sw_recvmsg()
1886 rxm->full_len = 0; in tls_sw_recvmsg()
1895 if (rxm->full_len > len) { in tls_sw_recvmsg()
1899 chunk = rxm->full_len; in tls_sw_recvmsg()
1902 err = skb_copy_datagram_msg(skb, rxm->offset, in tls_sw_recvmsg()
1908 rxm->offset = rxm->offset + chunk; in tls_sw_recvmsg()
1909 rxm->full_len = rxm->full_len - chunk; in tls_sw_recvmsg()
1993 struct strp_msg *rxm = NULL; in tls_sw_splice_read() local
2022 rxm = strp_msg(skb); in tls_sw_splice_read()
2024 chunk = min_t(unsigned int, rxm->full_len, len); in tls_sw_splice_read()
2025 copied = skb_splice_bits(skb, sk, rxm->offset, pipe, chunk, flags); in tls_sw_splice_read()
2060 struct strp_msg *rxm = strp_msg(skb); in tls_read_size() local
2066 if (rxm->offset + prot->prepend_size > skb->len) in tls_read_size()
2076 ret = skb_copy_bits(skb, rxm->offset, header, prot->prepend_size); in tls_read_size()
2107 TCP_SKB_CB(skb)->seq + rxm->offset); in tls_read_size()