Home
last modified time | relevance | path

Searched refs:end_seq (Results 1 – 22 of 22) sorted by relevance

/linux/net/ipv4/
A Dtcp_input.c1305 !before(end_seq, TCP_SKB_CB(skb)->end_seq); in tcp_match_skb_to_sack()
1578 !before(end_seq, TCP_SKB_CB(skb)->end_seq); in tcp_shift_skb_data()
1914 u32 end_seq = sp[i].end_seq; in tcp_sacktag_write_queue() local
1941 if (!after(end_seq, cache->end_seq)) in tcp_sacktag_write_queue()
4407 if (after(end_seq, sp->end_seq)) in tcp_sack_extend()
4408 sp->end_seq = end_seq; in tcp_sack_extend()
4430 tp->duplicate_sack[0].end_seq = end_seq; in tcp_dsack_set()
4466 u32 end_seq = TCP_SKB_CB(skb)->end_seq; in tcp_send_dupack() local
4577 sp->end_seq = end_seq; in tcp_sack_new_ofo_skb()
4785 end_seq = TCP_SKB_CB(skb)->end_seq; in tcp_data_queue_ofo()
[all …]
A Dtcp_recovery.c82 tp->rack.end_seq, scb->end_seq)) in tcp_rack_detect_loss()
122 void tcp_rack_advance(struct tcp_sock *tp, u8 sacked, u32 end_seq, in tcp_rack_advance() argument
144 end_seq, tp->rack.end_seq)) { in tcp_rack_advance()
146 tp->rack.end_seq = end_seq; in tcp_rack_advance()
A Dtcp_minisocks.c26 static bool tcp_in_window(u32 seq, u32 end_seq, u32 s_win, u32 e_win) in tcp_in_window() argument
30 if (after(end_seq, s_win) && before(seq, e_win)) in tcp_in_window()
32 return seq == e_win && seq == end_seq; in tcp_in_window()
110 !tcp_in_window(TCP_SKB_CB(skb)->seq, TCP_SKB_CB(skb)->end_seq, in tcp_timewait_state_process()
124 !after(TCP_SKB_CB(skb)->end_seq, tcptw->tw_rcv_nxt) || in tcp_timewait_state_process()
125 TCP_SKB_CB(skb)->end_seq == TCP_SKB_CB(skb)->seq) { in tcp_timewait_state_process()
134 TCP_SKB_CB(skb)->end_seq != tcptw->tw_rcv_nxt + 1) in tcp_timewait_state_process()
139 tcptw->tw_rcv_nxt = TCP_SKB_CB(skb)->end_seq; in tcp_timewait_state_process()
168 (TCP_SKB_CB(skb)->seq == TCP_SKB_CB(skb)->end_seq || th->rst))) { in tcp_timewait_state_process()
704 if (paws_reject || !tcp_in_window(TCP_SKB_CB(skb)->seq, TCP_SKB_CB(skb)->end_seq, in tcp_check_req()
[all …]
A Dtcp_output.c403 TCP_SKB_CB(skb)->end_seq = seq; in tcp_init_nondata_skb()
672 *ptr++ = htonl(sp[this_sack].end_seq); in tcp_options_write()
1384 if (after(tcb->end_seq, tp->snd_nxt) || tcb->seq == tcb->end_seq) in __tcp_transmit_skb()
1580 TCP_SKB_CB(buff)->end_seq = TCP_SKB_CB(skb)->end_seq; in tcp_fragment()
1935 tp->snd_sml = TCP_SKB_CB(skb)->end_seq; in tcp_minshall_update()
2094 u32 end_seq = TCP_SKB_CB(skb)->end_seq; in tcp_snd_wnd_test() local
2099 return !after(end_seq, tcp_wnd_end(tp)); in tcp_snd_wnd_test()
2134 TCP_SKB_CB(buff)->end_seq = TCP_SKB_CB(skb)->end_seq; in tso_fragment()
3046 TCP_SKB_CB(skb)->end_seq = TCP_SKB_CB(next_skb)->end_seq; in tcp_collapse_retrans()
3384 TCP_SKB_CB(tskb)->end_seq++; in tcp_send_fin()
[all …]
A Dtcp_illinois.c49 u32 end_seq; /* right edge of current RTT */ member
62 ca->end_seq = tp->snd_nxt; in rtt_reset()
265 if (after(ack, ca->end_seq)) in tcp_illinois_cong_avoid()
A Dtcp_cubic.c102 u32 end_seq; /* end_seq of the round */ member
124 ca->end_seq = tp->snd_nxt; in bictcp_hystart_reset()
392 if (after(tp->snd_una, ca->end_seq)) in hystart_update()
A Dtcp_fastopen.c173 if (TCP_SKB_CB(skb)->end_seq == tp->rcv_nxt) in tcp_fastopen_add_skb()
196 tp->rcv_nxt = TCP_SKB_CB(skb)->end_seq; in tcp_fastopen_add_skb()
349 bool syn_data = TCP_SKB_CB(skb)->end_seq != TCP_SKB_CB(skb)->seq + 1; in tcp_try_fastopen()
A Dtcp.c657 tcb->seq = tcb->end_seq = tp->write_seq; in tcp_skb_entail()
931 if (skb && TCP_SKB_CB(skb)->seq == TCP_SKB_CB(skb)->end_seq) { in tcp_remove_empty_skb()
993 TCP_SKB_CB(skb)->end_seq += copy; in tcp_build_frag()
1376 TCP_SKB_CB(skb)->end_seq += copy; in tcp_sendmsg_locked()
1534 WARN(skb && !before(tp->copied_seq, TCP_SKB_CB(skb)->end_seq), in tcp_cleanup_rbuf()
1536 tp->copied_seq, TCP_SKB_CB(skb)->end_seq, tp->rcv_nxt); in tcp_cleanup_rbuf()
2736 u32 len = TCP_SKB_CB(skb)->end_seq - TCP_SKB_CB(skb)->seq; in __tcp_close()
3027 tp->duplicate_sack[0].end_seq = 0; in tcp_disconnect()
A Dtcp_ipv4.c1848 if (TCP_SKB_CB(tail)->end_seq != TCP_SKB_CB(skb)->seq || in tcp_add_backlog()
1874 TCP_SKB_CB(tail)->end_seq = TCP_SKB_CB(skb)->end_seq; in tcp_add_backlog()
1951 TCP_SKB_CB(skb)->end_seq = (TCP_SKB_CB(skb)->seq + th->syn + th->fin + in tcp_v4_fill_cb()
/linux/net/netfilter/
A Dnf_conntrack_seqadj.c94 if (after(ntohl(sack->end_seq) - seq->offset_before, in nf_ct_sack_block_adjust()
96 new_end_seq = htonl(ntohl(sack->end_seq) - in nf_ct_sack_block_adjust()
99 new_end_seq = htonl(ntohl(sack->end_seq) - in nf_ct_sack_block_adjust()
104 ntohl(sack->end_seq), ntohl(new_end_seq)); in nf_ct_sack_block_adjust()
109 sack->end_seq, new_end_seq, false); in nf_ct_sack_block_adjust()
111 sack->end_seq = new_end_seq; in nf_ct_sack_block_adjust()
/linux/net/mptcp/
A Dprotocol.c40 u64 end_seq; member
147 MPTCP_SKB_CB(to)->end_seq = MPTCP_SKB_CB(from)->end_seq; in mptcp_try_coalesce()
209 u64 seq, end_seq, max_seq; in mptcp_data_queue_ofo() local
213 end_seq = MPTCP_SKB_CB(skb)->end_seq; in mptcp_data_queue_ofo()
218 if (after64(end_seq, max_seq)) { in mptcp_data_queue_ofo()
264 if (!after64(end_seq, MPTCP_SKB_CB(skb1)->end_seq)) { in mptcp_data_queue_ofo()
301 if (before64(end_seq, MPTCP_SKB_CB(skb1)->end_seq)) in mptcp_data_queue_ofo()
712 u64 end_seq; in __mptcp_ofo_queue() local
731 end_seq = MPTCP_SKB_CB(skb)->end_seq; in __mptcp_ofo_queue()
743 msk->ack_seq = end_seq; in __mptcp_ofo_queue()
[all …]
A Doptions.c404 subflow->snd_isn = TCP_SKB_CB(skb)->end_seq; in mptcp_syn_options()
902 TCP_SKB_CB(skb)->end_seq == TCP_SKB_CB(skb)->seq && in check_fully_established()
1152 if (TCP_SKB_CB(skb)->seq == TCP_SKB_CB(skb)->end_seq) { in mptcp_incoming_options()
A Dsubflow.c954 TCP_SKB_CB(skb)->end_seq, in get_mapping_status()
1083 if (!before(tcp_sk(ssk)->copied_seq, TCP_SKB_CB(skb)->end_seq)) in mptcp_subflow_discard_data()
/linux/net/tls/
A Dtls_device.c162 if (info && !before(acked_seq, info->end_seq)) in tls_icsk_clean_acked()
166 if (before(acked_seq, info->end_seq)) in tls_icsk_clean_acked()
280 record->end_seq = tp->write_seq + record->len; in tls_push_record()
603 before(seq, info->end_seq - info->len)) { in tls_get_record()
626 last->end_seq)) in tls_get_record()
635 if (before(seq, info->end_seq)) { in tls_get_record()
637 after(info->end_seq, in tls_get_record()
638 context->retransmit_hint->end_seq)) { in tls_get_record()
1100 start_marker_record->end_seq = tcp_sk(sk)->write_seq; in tls_set_device_offload()
/linux/include/linux/
A Dtcp.h69 __be32 end_seq; member
74 u32 end_seq; member
216 u32 end_seq; /* Ending TCP sequence of the skb */ member
/linux/tools/testing/selftests/bpf/progs/
A Dbpf_cubic.c89 __u32 end_seq; /* end_seq of the round */ member
167 ca->end_seq = tp->snd_nxt; in bictcp_hystart_reset()
390 if (hystart && after(ack, ca->end_seq)) in BPF_STRUCT_OPS()
/linux/include/net/
A Dtls.h166 u32 end_seq; member
402 return rec->end_seq - rec->len; in tls_record_start_seq()
A Dtcp.h844 __u32 end_seq; /* SEQ + FIN + SYN + datalen */ member
2093 extern void tcp_rack_advance(struct tcp_sock *tp, u8 sacked, u32 end_seq,
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/
A Dchcr_ktls.c1744 tx_info->prev_seq = record->end_seq; in chcr_end_part_handler()
1995 tls_end_offset = record->end_seq - tcp_seq; in chcr_ktls_xmit()
1998 tcp_seq, record->end_seq, tx_info->prev_seq, data_len); in chcr_ktls_xmit()
2005 tx_max = record->end_seq - in chcr_ktls_xmit()
2050 tcp_seq = record->end_seq; in chcr_ktls_xmit()
/linux/drivers/infiniband/hw/irdma/
A Dpuda.c1161 u32 marker_seq, end_seq, blk_start; in irdma_ieq_get_fpdu_len() local
1185 end_seq = rcv_seq + total_len; in irdma_ieq_get_fpdu_len()
1186 while ((int)(marker_seq - end_seq) < 0) { in irdma_ieq_get_fpdu_len()
1188 end_seq += marker_len; in irdma_ieq_get_fpdu_len()
/linux/net/ipv6/
A Dtcp_ipv6.c1578 if (TCP_SKB_CB(opt_skb)->end_seq == tp->rcv_nxt && in tcp_v6_do_rcv()
1615 TCP_SKB_CB(skb)->end_seq = (TCP_SKB_CB(skb)->seq + th->syn + th->fin + in tcp_v6_fill_cb()
/linux/net/sched/
A Dsch_cake.c1024 u32 end_a = get_unaligned_be32(&sack_a->end_seq); in cake_tcph_sack_compare()
1036 u32 end_b = get_unaligned_be32(&sack_tmp->end_seq); in cake_tcph_sack_compare()

Completed in 96 milliseconds