0001
0002
0003
0004
0005
0006
0007
0008 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
0009
0010 #include <linux/module.h>
0011 #include <linux/circ_buf.h>
0012 #include <linux/net.h>
0013 #include <linux/skbuff.h>
0014 #include <linux/slab.h>
0015 #include <linux/udp.h>
0016 #include <net/sock.h>
0017 #include <net/af_rxrpc.h>
0018 #include "ar-internal.h"
0019
0020
0021
0022
0023 static void rxrpc_propose_ping(struct rxrpc_call *call,
0024 bool immediate, bool background)
0025 {
0026 if (immediate) {
0027 if (background &&
0028 !test_and_set_bit(RXRPC_CALL_EV_PING, &call->events))
0029 rxrpc_queue_call(call);
0030 } else {
0031 unsigned long now = jiffies;
0032 unsigned long ping_at = now + rxrpc_idle_ack_delay;
0033
0034 if (time_before(ping_at, call->ping_at)) {
0035 WRITE_ONCE(call->ping_at, ping_at);
0036 rxrpc_reduce_call_timer(call, ping_at, now,
0037 rxrpc_timer_set_for_ping);
0038 }
0039 }
0040 }
0041
0042
0043
0044
0045 static void __rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
0046 u32 serial, bool immediate, bool background,
0047 enum rxrpc_propose_ack_trace why)
0048 {
0049 enum rxrpc_propose_ack_outcome outcome = rxrpc_propose_ack_use;
0050 unsigned long expiry = rxrpc_soft_ack_delay;
0051 s8 prior = rxrpc_ack_priority[ack_reason];
0052
0053
0054
0055
0056 if (ack_reason == RXRPC_ACK_PING) {
0057 rxrpc_propose_ping(call, immediate, background);
0058 goto trace;
0059 }
0060
0061
0062
0063
0064 _debug("prior %u %u vs %u %u",
0065 ack_reason, prior,
0066 call->ackr_reason, rxrpc_ack_priority[call->ackr_reason]);
0067 if (ack_reason == call->ackr_reason) {
0068 if (RXRPC_ACK_UPDATEABLE & (1 << ack_reason)) {
0069 outcome = rxrpc_propose_ack_update;
0070 call->ackr_serial = serial;
0071 }
0072 if (!immediate)
0073 goto trace;
0074 } else if (prior > rxrpc_ack_priority[call->ackr_reason]) {
0075 call->ackr_reason = ack_reason;
0076 call->ackr_serial = serial;
0077 } else {
0078 outcome = rxrpc_propose_ack_subsume;
0079 }
0080
0081 switch (ack_reason) {
0082 case RXRPC_ACK_REQUESTED:
0083 if (rxrpc_requested_ack_delay < expiry)
0084 expiry = rxrpc_requested_ack_delay;
0085 if (serial == 1)
0086 immediate = false;
0087 break;
0088
0089 case RXRPC_ACK_DELAY:
0090 if (rxrpc_soft_ack_delay < expiry)
0091 expiry = rxrpc_soft_ack_delay;
0092 break;
0093
0094 case RXRPC_ACK_IDLE:
0095 if (rxrpc_idle_ack_delay < expiry)
0096 expiry = rxrpc_idle_ack_delay;
0097 break;
0098
0099 default:
0100 immediate = true;
0101 break;
0102 }
0103
0104 if (test_bit(RXRPC_CALL_EV_ACK, &call->events)) {
0105 _debug("already scheduled");
0106 } else if (immediate || expiry == 0) {
0107 _debug("immediate ACK %lx", call->events);
0108 if (!test_and_set_bit(RXRPC_CALL_EV_ACK, &call->events) &&
0109 background)
0110 rxrpc_queue_call(call);
0111 } else {
0112 unsigned long now = jiffies, ack_at;
0113
0114 if (call->peer->srtt_us != 0)
0115 ack_at = usecs_to_jiffies(call->peer->srtt_us >> 3);
0116 else
0117 ack_at = expiry;
0118
0119 ack_at += READ_ONCE(call->tx_backoff);
0120 ack_at += now;
0121 if (time_before(ack_at, call->ack_at)) {
0122 WRITE_ONCE(call->ack_at, ack_at);
0123 rxrpc_reduce_call_timer(call, ack_at, now,
0124 rxrpc_timer_set_for_ack);
0125 }
0126 }
0127
0128 trace:
0129 trace_rxrpc_propose_ack(call, why, ack_reason, serial, immediate,
0130 background, outcome);
0131 }
0132
0133
0134
0135
0136 void rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
0137 u32 serial, bool immediate, bool background,
0138 enum rxrpc_propose_ack_trace why)
0139 {
0140 spin_lock_bh(&call->lock);
0141 __rxrpc_propose_ACK(call, ack_reason, serial,
0142 immediate, background, why);
0143 spin_unlock_bh(&call->lock);
0144 }
0145
0146
0147
0148
0149 static void rxrpc_congestion_timeout(struct rxrpc_call *call)
0150 {
0151 set_bit(RXRPC_CALL_RETRANS_TIMEOUT, &call->flags);
0152 }
0153
0154
0155
0156
0157 static void rxrpc_resend(struct rxrpc_call *call, unsigned long now_j)
0158 {
0159 struct sk_buff *skb;
0160 unsigned long resend_at;
0161 rxrpc_seq_t cursor, seq, top;
0162 ktime_t now, max_age, oldest, ack_ts;
0163 int ix;
0164 u8 annotation, anno_type, retrans = 0, unacked = 0;
0165
0166 _enter("{%d,%d}", call->tx_hard_ack, call->tx_top);
0167
0168 now = ktime_get_real();
0169 max_age = ktime_sub_us(now, jiffies_to_usecs(call->peer->rto_j));
0170
0171 spin_lock_bh(&call->lock);
0172
0173 cursor = call->tx_hard_ack;
0174 top = call->tx_top;
0175 ASSERT(before_eq(cursor, top));
0176 if (cursor == top)
0177 goto out_unlock;
0178
0179
0180
0181
0182
0183 trace_rxrpc_resend(call, (cursor + 1) & RXRPC_RXTX_BUFF_MASK);
0184 oldest = now;
0185 for (seq = cursor + 1; before_eq(seq, top); seq++) {
0186 ix = seq & RXRPC_RXTX_BUFF_MASK;
0187 annotation = call->rxtx_annotations[ix];
0188 anno_type = annotation & RXRPC_TX_ANNO_MASK;
0189 annotation &= ~RXRPC_TX_ANNO_MASK;
0190 if (anno_type == RXRPC_TX_ANNO_ACK)
0191 continue;
0192
0193 skb = call->rxtx_buffer[ix];
0194 rxrpc_see_skb(skb, rxrpc_skb_seen);
0195
0196 if (anno_type == RXRPC_TX_ANNO_UNACK) {
0197 if (ktime_after(skb->tstamp, max_age)) {
0198 if (ktime_before(skb->tstamp, oldest))
0199 oldest = skb->tstamp;
0200 continue;
0201 }
0202 if (!(annotation & RXRPC_TX_ANNO_RESENT))
0203 unacked++;
0204 }
0205
0206
0207 call->rxtx_annotations[ix] = RXRPC_TX_ANNO_RETRANS | annotation;
0208 retrans++;
0209 trace_rxrpc_retransmit(call, seq, annotation | anno_type,
0210 ktime_to_ns(ktime_sub(skb->tstamp, max_age)));
0211 }
0212
0213 resend_at = nsecs_to_jiffies(ktime_to_ns(ktime_sub(now, oldest)));
0214 resend_at += jiffies + rxrpc_get_rto_backoff(call->peer, retrans);
0215 WRITE_ONCE(call->resend_at, resend_at);
0216
0217 if (unacked)
0218 rxrpc_congestion_timeout(call);
0219
0220
0221
0222
0223
0224 if (!retrans) {
0225 rxrpc_reduce_call_timer(call, resend_at, now_j,
0226 rxrpc_timer_set_for_resend);
0227 spin_unlock_bh(&call->lock);
0228 ack_ts = ktime_sub(now, call->acks_latest_ts);
0229 if (ktime_to_us(ack_ts) < (call->peer->srtt_us >> 3))
0230 goto out;
0231 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
0232 rxrpc_propose_ack_ping_for_lost_ack);
0233 rxrpc_send_ack_packet(call, true, NULL);
0234 goto out;
0235 }
0236
0237
0238
0239
0240
0241
0242 for (seq = cursor + 1; before_eq(seq, top); seq++) {
0243 ix = seq & RXRPC_RXTX_BUFF_MASK;
0244 annotation = call->rxtx_annotations[ix];
0245 anno_type = annotation & RXRPC_TX_ANNO_MASK;
0246 if (anno_type != RXRPC_TX_ANNO_RETRANS)
0247 continue;
0248
0249
0250
0251
0252
0253 annotation &= ~RXRPC_TX_ANNO_MASK;
0254 annotation |= RXRPC_TX_ANNO_UNACK | RXRPC_TX_ANNO_RESENT;
0255 call->rxtx_annotations[ix] = annotation;
0256
0257 skb = call->rxtx_buffer[ix];
0258 if (!skb)
0259 continue;
0260
0261 rxrpc_get_skb(skb, rxrpc_skb_got);
0262 spin_unlock_bh(&call->lock);
0263
0264 if (rxrpc_send_data_packet(call, skb, true) < 0) {
0265 rxrpc_free_skb(skb, rxrpc_skb_freed);
0266 return;
0267 }
0268
0269 if (rxrpc_is_client_call(call))
0270 rxrpc_expose_client_call(call);
0271
0272 rxrpc_free_skb(skb, rxrpc_skb_freed);
0273 spin_lock_bh(&call->lock);
0274 if (after(call->tx_hard_ack, seq))
0275 seq = call->tx_hard_ack;
0276 }
0277
0278 out_unlock:
0279 spin_unlock_bh(&call->lock);
0280 out:
0281 _leave("");
0282 }
0283
0284
0285
0286
0287 void rxrpc_process_call(struct work_struct *work)
0288 {
0289 struct rxrpc_call *call =
0290 container_of(work, struct rxrpc_call, processor);
0291 rxrpc_serial_t *send_ack;
0292 unsigned long now, next, t;
0293 unsigned int iterations = 0;
0294
0295 rxrpc_see_call(call);
0296
0297
0298 _enter("{%d,%s,%lx}",
0299 call->debug_id, rxrpc_call_states[call->state], call->events);
0300
0301 recheck_state:
0302
0303 iterations++;
0304 if (iterations > 5)
0305 goto requeue;
0306
0307 if (test_and_clear_bit(RXRPC_CALL_EV_ABORT, &call->events)) {
0308 rxrpc_send_abort_packet(call);
0309 goto recheck_state;
0310 }
0311
0312 if (call->state == RXRPC_CALL_COMPLETE) {
0313 rxrpc_delete_call_timer(call);
0314 goto out_put;
0315 }
0316
0317
0318 now = jiffies;
0319 t = READ_ONCE(call->expect_rx_by);
0320 if (time_after_eq(now, t)) {
0321 trace_rxrpc_timer(call, rxrpc_timer_exp_normal, now);
0322 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
0323 }
0324
0325 t = READ_ONCE(call->expect_req_by);
0326 if (call->state == RXRPC_CALL_SERVER_RECV_REQUEST &&
0327 time_after_eq(now, t)) {
0328 trace_rxrpc_timer(call, rxrpc_timer_exp_idle, now);
0329 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
0330 }
0331
0332 t = READ_ONCE(call->expect_term_by);
0333 if (time_after_eq(now, t)) {
0334 trace_rxrpc_timer(call, rxrpc_timer_exp_hard, now);
0335 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
0336 }
0337
0338 t = READ_ONCE(call->ack_at);
0339 if (time_after_eq(now, t)) {
0340 trace_rxrpc_timer(call, rxrpc_timer_exp_ack, now);
0341 cmpxchg(&call->ack_at, t, now + MAX_JIFFY_OFFSET);
0342 set_bit(RXRPC_CALL_EV_ACK, &call->events);
0343 }
0344
0345 t = READ_ONCE(call->ack_lost_at);
0346 if (time_after_eq(now, t)) {
0347 trace_rxrpc_timer(call, rxrpc_timer_exp_lost_ack, now);
0348 cmpxchg(&call->ack_lost_at, t, now + MAX_JIFFY_OFFSET);
0349 set_bit(RXRPC_CALL_EV_ACK_LOST, &call->events);
0350 }
0351
0352 t = READ_ONCE(call->keepalive_at);
0353 if (time_after_eq(now, t)) {
0354 trace_rxrpc_timer(call, rxrpc_timer_exp_keepalive, now);
0355 cmpxchg(&call->keepalive_at, t, now + MAX_JIFFY_OFFSET);
0356 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, true,
0357 rxrpc_propose_ack_ping_for_keepalive);
0358 set_bit(RXRPC_CALL_EV_PING, &call->events);
0359 }
0360
0361 t = READ_ONCE(call->ping_at);
0362 if (time_after_eq(now, t)) {
0363 trace_rxrpc_timer(call, rxrpc_timer_exp_ping, now);
0364 cmpxchg(&call->ping_at, t, now + MAX_JIFFY_OFFSET);
0365 set_bit(RXRPC_CALL_EV_PING, &call->events);
0366 }
0367
0368 t = READ_ONCE(call->resend_at);
0369 if (time_after_eq(now, t)) {
0370 trace_rxrpc_timer(call, rxrpc_timer_exp_resend, now);
0371 cmpxchg(&call->resend_at, t, now + MAX_JIFFY_OFFSET);
0372 set_bit(RXRPC_CALL_EV_RESEND, &call->events);
0373 }
0374
0375
0376 if (test_and_clear_bit(RXRPC_CALL_EV_EXPIRED, &call->events)) {
0377 if (test_bit(RXRPC_CALL_RX_HEARD, &call->flags) &&
0378 (int)call->conn->hi_serial - (int)call->rx_serial > 0) {
0379 trace_rxrpc_call_reset(call);
0380 rxrpc_abort_call("EXP", call, 0, RX_CALL_DEAD, -ECONNRESET);
0381 } else {
0382 rxrpc_abort_call("EXP", call, 0, RX_CALL_TIMEOUT, -ETIME);
0383 }
0384 set_bit(RXRPC_CALL_EV_ABORT, &call->events);
0385 goto recheck_state;
0386 }
0387
0388 send_ack = NULL;
0389 if (test_and_clear_bit(RXRPC_CALL_EV_ACK_LOST, &call->events)) {
0390 call->acks_lost_top = call->tx_top;
0391 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
0392 rxrpc_propose_ack_ping_for_lost_ack);
0393 send_ack = &call->acks_lost_ping;
0394 }
0395
0396 if (test_and_clear_bit(RXRPC_CALL_EV_ACK, &call->events) ||
0397 send_ack) {
0398 if (call->ackr_reason) {
0399 rxrpc_send_ack_packet(call, false, send_ack);
0400 goto recheck_state;
0401 }
0402 }
0403
0404 if (test_and_clear_bit(RXRPC_CALL_EV_PING, &call->events)) {
0405 rxrpc_send_ack_packet(call, true, NULL);
0406 goto recheck_state;
0407 }
0408
0409 if (test_and_clear_bit(RXRPC_CALL_EV_RESEND, &call->events) &&
0410 call->state != RXRPC_CALL_CLIENT_RECV_REPLY) {
0411 rxrpc_resend(call, now);
0412 goto recheck_state;
0413 }
0414
0415
0416 next = call->expect_rx_by;
0417
0418 #define set(T) { t = READ_ONCE(T); if (time_before(t, next)) next = t; }
0419
0420 set(call->expect_req_by);
0421 set(call->expect_term_by);
0422 set(call->ack_at);
0423 set(call->ack_lost_at);
0424 set(call->resend_at);
0425 set(call->keepalive_at);
0426 set(call->ping_at);
0427
0428 now = jiffies;
0429 if (time_after_eq(now, next))
0430 goto recheck_state;
0431
0432 rxrpc_reduce_call_timer(call, next, now, rxrpc_timer_restart);
0433
0434
0435 if (call->events && call->state < RXRPC_CALL_COMPLETE)
0436 goto requeue;
0437
0438 out_put:
0439 rxrpc_put_call(call, rxrpc_call_put);
0440 out:
0441 _leave("");
0442 return;
0443
0444 requeue:
0445 __rxrpc_queue_call(call);
0446 goto out;
0447 }