xref: /OK3568_Linux_fs/kernel/net/rxrpc/call_event.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /* Management of Tx window, Tx resend, ACKs and out-of-sequence reception
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
5*4882a593Smuzhiyun  * Written by David Howells (dhowells@redhat.com)
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/module.h>
11*4882a593Smuzhiyun #include <linux/circ_buf.h>
12*4882a593Smuzhiyun #include <linux/net.h>
13*4882a593Smuzhiyun #include <linux/skbuff.h>
14*4882a593Smuzhiyun #include <linux/slab.h>
15*4882a593Smuzhiyun #include <linux/udp.h>
16*4882a593Smuzhiyun #include <net/sock.h>
17*4882a593Smuzhiyun #include <net/af_rxrpc.h>
18*4882a593Smuzhiyun #include "ar-internal.h"
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun /*
21*4882a593Smuzhiyun  * Propose a PING ACK be sent.
22*4882a593Smuzhiyun  */
rxrpc_propose_ping(struct rxrpc_call * call,bool immediate,bool background)23*4882a593Smuzhiyun static void rxrpc_propose_ping(struct rxrpc_call *call,
24*4882a593Smuzhiyun 			       bool immediate, bool background)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun 	if (immediate) {
27*4882a593Smuzhiyun 		if (background &&
28*4882a593Smuzhiyun 		    !test_and_set_bit(RXRPC_CALL_EV_PING, &call->events))
29*4882a593Smuzhiyun 			rxrpc_queue_call(call);
30*4882a593Smuzhiyun 	} else {
31*4882a593Smuzhiyun 		unsigned long now = jiffies;
32*4882a593Smuzhiyun 		unsigned long ping_at = now + rxrpc_idle_ack_delay;
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun 		if (time_before(ping_at, call->ping_at)) {
35*4882a593Smuzhiyun 			WRITE_ONCE(call->ping_at, ping_at);
36*4882a593Smuzhiyun 			rxrpc_reduce_call_timer(call, ping_at, now,
37*4882a593Smuzhiyun 						rxrpc_timer_set_for_ping);
38*4882a593Smuzhiyun 		}
39*4882a593Smuzhiyun 	}
40*4882a593Smuzhiyun }
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun /*
43*4882a593Smuzhiyun  * propose an ACK be sent
44*4882a593Smuzhiyun  */
__rxrpc_propose_ACK(struct rxrpc_call * call,u8 ack_reason,u32 serial,bool immediate,bool background,enum rxrpc_propose_ack_trace why)45*4882a593Smuzhiyun static void __rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
46*4882a593Smuzhiyun 				u32 serial, bool immediate, bool background,
47*4882a593Smuzhiyun 				enum rxrpc_propose_ack_trace why)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun 	enum rxrpc_propose_ack_outcome outcome = rxrpc_propose_ack_use;
50*4882a593Smuzhiyun 	unsigned long expiry = rxrpc_soft_ack_delay;
51*4882a593Smuzhiyun 	s8 prior = rxrpc_ack_priority[ack_reason];
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	/* Pings are handled specially because we don't want to accidentally
54*4882a593Smuzhiyun 	 * lose a ping response by subsuming it into a ping.
55*4882a593Smuzhiyun 	 */
56*4882a593Smuzhiyun 	if (ack_reason == RXRPC_ACK_PING) {
57*4882a593Smuzhiyun 		rxrpc_propose_ping(call, immediate, background);
58*4882a593Smuzhiyun 		goto trace;
59*4882a593Smuzhiyun 	}
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	/* Update DELAY, IDLE, REQUESTED and PING_RESPONSE ACK serial
62*4882a593Smuzhiyun 	 * numbers, but we don't alter the timeout.
63*4882a593Smuzhiyun 	 */
64*4882a593Smuzhiyun 	_debug("prior %u %u vs %u %u",
65*4882a593Smuzhiyun 	       ack_reason, prior,
66*4882a593Smuzhiyun 	       call->ackr_reason, rxrpc_ack_priority[call->ackr_reason]);
67*4882a593Smuzhiyun 	if (ack_reason == call->ackr_reason) {
68*4882a593Smuzhiyun 		if (RXRPC_ACK_UPDATEABLE & (1 << ack_reason)) {
69*4882a593Smuzhiyun 			outcome = rxrpc_propose_ack_update;
70*4882a593Smuzhiyun 			call->ackr_serial = serial;
71*4882a593Smuzhiyun 		}
72*4882a593Smuzhiyun 		if (!immediate)
73*4882a593Smuzhiyun 			goto trace;
74*4882a593Smuzhiyun 	} else if (prior > rxrpc_ack_priority[call->ackr_reason]) {
75*4882a593Smuzhiyun 		call->ackr_reason = ack_reason;
76*4882a593Smuzhiyun 		call->ackr_serial = serial;
77*4882a593Smuzhiyun 	} else {
78*4882a593Smuzhiyun 		outcome = rxrpc_propose_ack_subsume;
79*4882a593Smuzhiyun 	}
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun 	switch (ack_reason) {
82*4882a593Smuzhiyun 	case RXRPC_ACK_REQUESTED:
83*4882a593Smuzhiyun 		if (rxrpc_requested_ack_delay < expiry)
84*4882a593Smuzhiyun 			expiry = rxrpc_requested_ack_delay;
85*4882a593Smuzhiyun 		if (serial == 1)
86*4882a593Smuzhiyun 			immediate = false;
87*4882a593Smuzhiyun 		break;
88*4882a593Smuzhiyun 
89*4882a593Smuzhiyun 	case RXRPC_ACK_DELAY:
90*4882a593Smuzhiyun 		if (rxrpc_soft_ack_delay < expiry)
91*4882a593Smuzhiyun 			expiry = rxrpc_soft_ack_delay;
92*4882a593Smuzhiyun 		break;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	case RXRPC_ACK_IDLE:
95*4882a593Smuzhiyun 		if (rxrpc_idle_ack_delay < expiry)
96*4882a593Smuzhiyun 			expiry = rxrpc_idle_ack_delay;
97*4882a593Smuzhiyun 		break;
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun 	default:
100*4882a593Smuzhiyun 		immediate = true;
101*4882a593Smuzhiyun 		break;
102*4882a593Smuzhiyun 	}
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	if (test_bit(RXRPC_CALL_EV_ACK, &call->events)) {
105*4882a593Smuzhiyun 		_debug("already scheduled");
106*4882a593Smuzhiyun 	} else if (immediate || expiry == 0) {
107*4882a593Smuzhiyun 		_debug("immediate ACK %lx", call->events);
108*4882a593Smuzhiyun 		if (!test_and_set_bit(RXRPC_CALL_EV_ACK, &call->events) &&
109*4882a593Smuzhiyun 		    background)
110*4882a593Smuzhiyun 			rxrpc_queue_call(call);
111*4882a593Smuzhiyun 	} else {
112*4882a593Smuzhiyun 		unsigned long now = jiffies, ack_at;
113*4882a593Smuzhiyun 
114*4882a593Smuzhiyun 		if (call->peer->srtt_us != 0)
115*4882a593Smuzhiyun 			ack_at = usecs_to_jiffies(call->peer->srtt_us >> 3);
116*4882a593Smuzhiyun 		else
117*4882a593Smuzhiyun 			ack_at = expiry;
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 		ack_at += READ_ONCE(call->tx_backoff);
120*4882a593Smuzhiyun 		ack_at += now;
121*4882a593Smuzhiyun 		if (time_before(ack_at, call->ack_at)) {
122*4882a593Smuzhiyun 			WRITE_ONCE(call->ack_at, ack_at);
123*4882a593Smuzhiyun 			rxrpc_reduce_call_timer(call, ack_at, now,
124*4882a593Smuzhiyun 						rxrpc_timer_set_for_ack);
125*4882a593Smuzhiyun 		}
126*4882a593Smuzhiyun 	}
127*4882a593Smuzhiyun 
128*4882a593Smuzhiyun trace:
129*4882a593Smuzhiyun 	trace_rxrpc_propose_ack(call, why, ack_reason, serial, immediate,
130*4882a593Smuzhiyun 				background, outcome);
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun /*
134*4882a593Smuzhiyun  * propose an ACK be sent, locking the call structure
135*4882a593Smuzhiyun  */
rxrpc_propose_ACK(struct rxrpc_call * call,u8 ack_reason,u32 serial,bool immediate,bool background,enum rxrpc_propose_ack_trace why)136*4882a593Smuzhiyun void rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
137*4882a593Smuzhiyun 		       u32 serial, bool immediate, bool background,
138*4882a593Smuzhiyun 		       enum rxrpc_propose_ack_trace why)
139*4882a593Smuzhiyun {
140*4882a593Smuzhiyun 	spin_lock_bh(&call->lock);
141*4882a593Smuzhiyun 	__rxrpc_propose_ACK(call, ack_reason, serial,
142*4882a593Smuzhiyun 			    immediate, background, why);
143*4882a593Smuzhiyun 	spin_unlock_bh(&call->lock);
144*4882a593Smuzhiyun }
145*4882a593Smuzhiyun 
146*4882a593Smuzhiyun /*
147*4882a593Smuzhiyun  * Handle congestion being detected by the retransmit timeout.
148*4882a593Smuzhiyun  */
rxrpc_congestion_timeout(struct rxrpc_call * call)149*4882a593Smuzhiyun static void rxrpc_congestion_timeout(struct rxrpc_call *call)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun 	set_bit(RXRPC_CALL_RETRANS_TIMEOUT, &call->flags);
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun /*
155*4882a593Smuzhiyun  * Perform retransmission of NAK'd and unack'd packets.
156*4882a593Smuzhiyun  */
rxrpc_resend(struct rxrpc_call * call,unsigned long now_j)157*4882a593Smuzhiyun static void rxrpc_resend(struct rxrpc_call *call, unsigned long now_j)
158*4882a593Smuzhiyun {
159*4882a593Smuzhiyun 	struct sk_buff *skb;
160*4882a593Smuzhiyun 	unsigned long resend_at;
161*4882a593Smuzhiyun 	rxrpc_seq_t cursor, seq, top;
162*4882a593Smuzhiyun 	ktime_t now, max_age, oldest, ack_ts;
163*4882a593Smuzhiyun 	int ix;
164*4882a593Smuzhiyun 	u8 annotation, anno_type, retrans = 0, unacked = 0;
165*4882a593Smuzhiyun 
166*4882a593Smuzhiyun 	_enter("{%d,%d}", call->tx_hard_ack, call->tx_top);
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	now = ktime_get_real();
169*4882a593Smuzhiyun 	max_age = ktime_sub_us(now, jiffies_to_usecs(call->peer->rto_j));
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	spin_lock_bh(&call->lock);
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 	cursor = call->tx_hard_ack;
174*4882a593Smuzhiyun 	top = call->tx_top;
175*4882a593Smuzhiyun 	ASSERT(before_eq(cursor, top));
176*4882a593Smuzhiyun 	if (cursor == top)
177*4882a593Smuzhiyun 		goto out_unlock;
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun 	/* Scan the packet list without dropping the lock and decide which of
180*4882a593Smuzhiyun 	 * the packets in the Tx buffer we're going to resend and what the new
181*4882a593Smuzhiyun 	 * resend timeout will be.
182*4882a593Smuzhiyun 	 */
183*4882a593Smuzhiyun 	trace_rxrpc_resend(call, (cursor + 1) & RXRPC_RXTX_BUFF_MASK);
184*4882a593Smuzhiyun 	oldest = now;
185*4882a593Smuzhiyun 	for (seq = cursor + 1; before_eq(seq, top); seq++) {
186*4882a593Smuzhiyun 		ix = seq & RXRPC_RXTX_BUFF_MASK;
187*4882a593Smuzhiyun 		annotation = call->rxtx_annotations[ix];
188*4882a593Smuzhiyun 		anno_type = annotation & RXRPC_TX_ANNO_MASK;
189*4882a593Smuzhiyun 		annotation &= ~RXRPC_TX_ANNO_MASK;
190*4882a593Smuzhiyun 		if (anno_type == RXRPC_TX_ANNO_ACK)
191*4882a593Smuzhiyun 			continue;
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 		skb = call->rxtx_buffer[ix];
194*4882a593Smuzhiyun 		rxrpc_see_skb(skb, rxrpc_skb_seen);
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 		if (anno_type == RXRPC_TX_ANNO_UNACK) {
197*4882a593Smuzhiyun 			if (ktime_after(skb->tstamp, max_age)) {
198*4882a593Smuzhiyun 				if (ktime_before(skb->tstamp, oldest))
199*4882a593Smuzhiyun 					oldest = skb->tstamp;
200*4882a593Smuzhiyun 				continue;
201*4882a593Smuzhiyun 			}
202*4882a593Smuzhiyun 			if (!(annotation & RXRPC_TX_ANNO_RESENT))
203*4882a593Smuzhiyun 				unacked++;
204*4882a593Smuzhiyun 		}
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 		/* Okay, we need to retransmit a packet. */
207*4882a593Smuzhiyun 		call->rxtx_annotations[ix] = RXRPC_TX_ANNO_RETRANS | annotation;
208*4882a593Smuzhiyun 		retrans++;
209*4882a593Smuzhiyun 		trace_rxrpc_retransmit(call, seq, annotation | anno_type,
210*4882a593Smuzhiyun 				       ktime_to_ns(ktime_sub(skb->tstamp, max_age)));
211*4882a593Smuzhiyun 	}
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun 	resend_at = nsecs_to_jiffies(ktime_to_ns(ktime_sub(now, oldest)));
214*4882a593Smuzhiyun 	resend_at += jiffies + rxrpc_get_rto_backoff(call->peer, retrans);
215*4882a593Smuzhiyun 	WRITE_ONCE(call->resend_at, resend_at);
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	if (unacked)
218*4882a593Smuzhiyun 		rxrpc_congestion_timeout(call);
219*4882a593Smuzhiyun 
220*4882a593Smuzhiyun 	/* If there was nothing that needed retransmission then it's likely
221*4882a593Smuzhiyun 	 * that an ACK got lost somewhere.  Send a ping to find out instead of
222*4882a593Smuzhiyun 	 * retransmitting data.
223*4882a593Smuzhiyun 	 */
224*4882a593Smuzhiyun 	if (!retrans) {
225*4882a593Smuzhiyun 		rxrpc_reduce_call_timer(call, resend_at, now_j,
226*4882a593Smuzhiyun 					rxrpc_timer_set_for_resend);
227*4882a593Smuzhiyun 		spin_unlock_bh(&call->lock);
228*4882a593Smuzhiyun 		ack_ts = ktime_sub(now, call->acks_latest_ts);
229*4882a593Smuzhiyun 		if (ktime_to_us(ack_ts) < (call->peer->srtt_us >> 3))
230*4882a593Smuzhiyun 			goto out;
231*4882a593Smuzhiyun 		rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
232*4882a593Smuzhiyun 				  rxrpc_propose_ack_ping_for_lost_ack);
233*4882a593Smuzhiyun 		rxrpc_send_ack_packet(call, true, NULL);
234*4882a593Smuzhiyun 		goto out;
235*4882a593Smuzhiyun 	}
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	/* Now go through the Tx window and perform the retransmissions.  We
238*4882a593Smuzhiyun 	 * have to drop the lock for each send.  If an ACK comes in whilst the
239*4882a593Smuzhiyun 	 * lock is dropped, it may clear some of the retransmission markers for
240*4882a593Smuzhiyun 	 * packets that it soft-ACKs.
241*4882a593Smuzhiyun 	 */
242*4882a593Smuzhiyun 	for (seq = cursor + 1; before_eq(seq, top); seq++) {
243*4882a593Smuzhiyun 		ix = seq & RXRPC_RXTX_BUFF_MASK;
244*4882a593Smuzhiyun 		annotation = call->rxtx_annotations[ix];
245*4882a593Smuzhiyun 		anno_type = annotation & RXRPC_TX_ANNO_MASK;
246*4882a593Smuzhiyun 		if (anno_type != RXRPC_TX_ANNO_RETRANS)
247*4882a593Smuzhiyun 			continue;
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 		/* We need to reset the retransmission state, but we need to do
250*4882a593Smuzhiyun 		 * so before we drop the lock as a new ACK/NAK may come in and
251*4882a593Smuzhiyun 		 * confuse things
252*4882a593Smuzhiyun 		 */
253*4882a593Smuzhiyun 		annotation &= ~RXRPC_TX_ANNO_MASK;
254*4882a593Smuzhiyun 		annotation |= RXRPC_TX_ANNO_UNACK | RXRPC_TX_ANNO_RESENT;
255*4882a593Smuzhiyun 		call->rxtx_annotations[ix] = annotation;
256*4882a593Smuzhiyun 
257*4882a593Smuzhiyun 		skb = call->rxtx_buffer[ix];
258*4882a593Smuzhiyun 		if (!skb)
259*4882a593Smuzhiyun 			continue;
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 		rxrpc_get_skb(skb, rxrpc_skb_got);
262*4882a593Smuzhiyun 		spin_unlock_bh(&call->lock);
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 		if (rxrpc_send_data_packet(call, skb, true) < 0) {
265*4882a593Smuzhiyun 			rxrpc_free_skb(skb, rxrpc_skb_freed);
266*4882a593Smuzhiyun 			return;
267*4882a593Smuzhiyun 		}
268*4882a593Smuzhiyun 
269*4882a593Smuzhiyun 		if (rxrpc_is_client_call(call))
270*4882a593Smuzhiyun 			rxrpc_expose_client_call(call);
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun 		rxrpc_free_skb(skb, rxrpc_skb_freed);
273*4882a593Smuzhiyun 		spin_lock_bh(&call->lock);
274*4882a593Smuzhiyun 		if (after(call->tx_hard_ack, seq))
275*4882a593Smuzhiyun 			seq = call->tx_hard_ack;
276*4882a593Smuzhiyun 	}
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun out_unlock:
279*4882a593Smuzhiyun 	spin_unlock_bh(&call->lock);
280*4882a593Smuzhiyun out:
281*4882a593Smuzhiyun 	_leave("");
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun /*
285*4882a593Smuzhiyun  * Handle retransmission and deferred ACK/abort generation.
286*4882a593Smuzhiyun  */
rxrpc_process_call(struct work_struct * work)287*4882a593Smuzhiyun void rxrpc_process_call(struct work_struct *work)
288*4882a593Smuzhiyun {
289*4882a593Smuzhiyun 	struct rxrpc_call *call =
290*4882a593Smuzhiyun 		container_of(work, struct rxrpc_call, processor);
291*4882a593Smuzhiyun 	rxrpc_serial_t *send_ack;
292*4882a593Smuzhiyun 	unsigned long now, next, t;
293*4882a593Smuzhiyun 	unsigned int iterations = 0;
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	rxrpc_see_call(call);
296*4882a593Smuzhiyun 
297*4882a593Smuzhiyun 	//printk("\n--------------------\n");
298*4882a593Smuzhiyun 	_enter("{%d,%s,%lx}",
299*4882a593Smuzhiyun 	       call->debug_id, rxrpc_call_states[call->state], call->events);
300*4882a593Smuzhiyun 
301*4882a593Smuzhiyun recheck_state:
302*4882a593Smuzhiyun 	/* Limit the number of times we do this before returning to the manager */
303*4882a593Smuzhiyun 	iterations++;
304*4882a593Smuzhiyun 	if (iterations > 5)
305*4882a593Smuzhiyun 		goto requeue;
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_ABORT, &call->events)) {
308*4882a593Smuzhiyun 		rxrpc_send_abort_packet(call);
309*4882a593Smuzhiyun 		goto recheck_state;
310*4882a593Smuzhiyun 	}
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun 	if (call->state == RXRPC_CALL_COMPLETE) {
313*4882a593Smuzhiyun 		rxrpc_delete_call_timer(call);
314*4882a593Smuzhiyun 		goto out_put;
315*4882a593Smuzhiyun 	}
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	/* Work out if any timeouts tripped */
318*4882a593Smuzhiyun 	now = jiffies;
319*4882a593Smuzhiyun 	t = READ_ONCE(call->expect_rx_by);
320*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
321*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_normal, now);
322*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
323*4882a593Smuzhiyun 	}
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	t = READ_ONCE(call->expect_req_by);
326*4882a593Smuzhiyun 	if (call->state == RXRPC_CALL_SERVER_RECV_REQUEST &&
327*4882a593Smuzhiyun 	    time_after_eq(now, t)) {
328*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_idle, now);
329*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
330*4882a593Smuzhiyun 	}
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 	t = READ_ONCE(call->expect_term_by);
333*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
334*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_hard, now);
335*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
336*4882a593Smuzhiyun 	}
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 	t = READ_ONCE(call->ack_at);
339*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
340*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_ack, now);
341*4882a593Smuzhiyun 		cmpxchg(&call->ack_at, t, now + MAX_JIFFY_OFFSET);
342*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_ACK, &call->events);
343*4882a593Smuzhiyun 	}
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	t = READ_ONCE(call->ack_lost_at);
346*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
347*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_lost_ack, now);
348*4882a593Smuzhiyun 		cmpxchg(&call->ack_lost_at, t, now + MAX_JIFFY_OFFSET);
349*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_ACK_LOST, &call->events);
350*4882a593Smuzhiyun 	}
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 	t = READ_ONCE(call->keepalive_at);
353*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
354*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_keepalive, now);
355*4882a593Smuzhiyun 		cmpxchg(&call->keepalive_at, t, now + MAX_JIFFY_OFFSET);
356*4882a593Smuzhiyun 		rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, true,
357*4882a593Smuzhiyun 				  rxrpc_propose_ack_ping_for_keepalive);
358*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_PING, &call->events);
359*4882a593Smuzhiyun 	}
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun 	t = READ_ONCE(call->ping_at);
362*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
363*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_ping, now);
364*4882a593Smuzhiyun 		cmpxchg(&call->ping_at, t, now + MAX_JIFFY_OFFSET);
365*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_PING, &call->events);
366*4882a593Smuzhiyun 	}
367*4882a593Smuzhiyun 
368*4882a593Smuzhiyun 	t = READ_ONCE(call->resend_at);
369*4882a593Smuzhiyun 	if (time_after_eq(now, t)) {
370*4882a593Smuzhiyun 		trace_rxrpc_timer(call, rxrpc_timer_exp_resend, now);
371*4882a593Smuzhiyun 		cmpxchg(&call->resend_at, t, now + MAX_JIFFY_OFFSET);
372*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_RESEND, &call->events);
373*4882a593Smuzhiyun 	}
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 	/* Process events */
376*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_EXPIRED, &call->events)) {
377*4882a593Smuzhiyun 		if (test_bit(RXRPC_CALL_RX_HEARD, &call->flags) &&
378*4882a593Smuzhiyun 		    (int)call->conn->hi_serial - (int)call->rx_serial > 0) {
379*4882a593Smuzhiyun 			trace_rxrpc_call_reset(call);
380*4882a593Smuzhiyun 			rxrpc_abort_call("EXP", call, 0, RX_CALL_DEAD, -ECONNRESET);
381*4882a593Smuzhiyun 		} else {
382*4882a593Smuzhiyun 			rxrpc_abort_call("EXP", call, 0, RX_CALL_TIMEOUT, -ETIME);
383*4882a593Smuzhiyun 		}
384*4882a593Smuzhiyun 		set_bit(RXRPC_CALL_EV_ABORT, &call->events);
385*4882a593Smuzhiyun 		goto recheck_state;
386*4882a593Smuzhiyun 	}
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	send_ack = NULL;
389*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_ACK_LOST, &call->events)) {
390*4882a593Smuzhiyun 		call->acks_lost_top = call->tx_top;
391*4882a593Smuzhiyun 		rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
392*4882a593Smuzhiyun 				  rxrpc_propose_ack_ping_for_lost_ack);
393*4882a593Smuzhiyun 		send_ack = &call->acks_lost_ping;
394*4882a593Smuzhiyun 	}
395*4882a593Smuzhiyun 
396*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_ACK, &call->events) ||
397*4882a593Smuzhiyun 	    send_ack) {
398*4882a593Smuzhiyun 		if (call->ackr_reason) {
399*4882a593Smuzhiyun 			rxrpc_send_ack_packet(call, false, send_ack);
400*4882a593Smuzhiyun 			goto recheck_state;
401*4882a593Smuzhiyun 		}
402*4882a593Smuzhiyun 	}
403*4882a593Smuzhiyun 
404*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_PING, &call->events)) {
405*4882a593Smuzhiyun 		rxrpc_send_ack_packet(call, true, NULL);
406*4882a593Smuzhiyun 		goto recheck_state;
407*4882a593Smuzhiyun 	}
408*4882a593Smuzhiyun 
409*4882a593Smuzhiyun 	if (test_and_clear_bit(RXRPC_CALL_EV_RESEND, &call->events) &&
410*4882a593Smuzhiyun 	    call->state != RXRPC_CALL_CLIENT_RECV_REPLY) {
411*4882a593Smuzhiyun 		rxrpc_resend(call, now);
412*4882a593Smuzhiyun 		goto recheck_state;
413*4882a593Smuzhiyun 	}
414*4882a593Smuzhiyun 
415*4882a593Smuzhiyun 	/* Make sure the timer is restarted */
416*4882a593Smuzhiyun 	next = call->expect_rx_by;
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun #define set(T) { t = READ_ONCE(T); if (time_before(t, next)) next = t; }
419*4882a593Smuzhiyun 
420*4882a593Smuzhiyun 	set(call->expect_req_by);
421*4882a593Smuzhiyun 	set(call->expect_term_by);
422*4882a593Smuzhiyun 	set(call->ack_at);
423*4882a593Smuzhiyun 	set(call->ack_lost_at);
424*4882a593Smuzhiyun 	set(call->resend_at);
425*4882a593Smuzhiyun 	set(call->keepalive_at);
426*4882a593Smuzhiyun 	set(call->ping_at);
427*4882a593Smuzhiyun 
428*4882a593Smuzhiyun 	now = jiffies;
429*4882a593Smuzhiyun 	if (time_after_eq(now, next))
430*4882a593Smuzhiyun 		goto recheck_state;
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun 	rxrpc_reduce_call_timer(call, next, now, rxrpc_timer_restart);
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	/* other events may have been raised since we started checking */
435*4882a593Smuzhiyun 	if (call->events && call->state < RXRPC_CALL_COMPLETE)
436*4882a593Smuzhiyun 		goto requeue;
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun out_put:
439*4882a593Smuzhiyun 	rxrpc_put_call(call, rxrpc_call_put);
440*4882a593Smuzhiyun out:
441*4882a593Smuzhiyun 	_leave("");
442*4882a593Smuzhiyun 	return;
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun requeue:
445*4882a593Smuzhiyun 	__rxrpc_queue_call(call);
446*4882a593Smuzhiyun 	goto out;
447*4882a593Smuzhiyun }
448