1use crate::{Jitter, RetryDecision, RetryPolicy};
2use std::{
3 cmp::{self, min},
4 time::{Duration, SystemTime},
5};
6
7#[derive(Debug, Clone, Copy, PartialEq, Eq)]
9#[non_exhaustive]
10pub struct ExponentialBackoff {
11 pub max_n_retries: Option<u32>,
13 pub min_retry_interval: Duration,
15 pub max_retry_interval: Duration,
17 pub jitter: Jitter,
19 pub base: u32,
21}
22
23#[derive(Debug, Clone, Copy, PartialEq, Eq)]
25pub struct ExponentialBackoffTimed {
26 max_total_retry_duration: Duration,
28
29 backoff: ExponentialBackoff,
30}
31
32pub struct ExponentialBackoffBuilder {
48 min_retry_interval: Duration,
49 max_retry_interval: Duration,
50 jitter: Jitter,
51 base: u32,
52}
53
54impl ExponentialBackoff {
55 pub fn builder() -> ExponentialBackoffBuilder {
71 <_>::default()
72 }
73
74 fn too_many_attempts(&self, n_past_retries: u32) -> bool {
75 self.max_n_retries
76 .is_some_and(|max_n| max_n <= n_past_retries)
77 }
78}
79
80impl RetryPolicy for ExponentialBackoff {
81 fn should_retry(&self, _request_start_time: SystemTime, n_past_retries: u32) -> RetryDecision {
82 if self.too_many_attempts(n_past_retries) {
83 RetryDecision::DoNotRetry
84 } else {
85 let unjittered_wait_for = min(
86 self.max_retry_interval,
87 self.min_retry_interval * calculate_exponential(self.base, n_past_retries),
88 );
89
90 let jittered_wait_for = self.jitter.apply(
91 unjittered_wait_for,
92 self.min_retry_interval,
93 &mut rand::rng(),
94 );
95
96 let execute_after =
97 SystemTime::now() + clip(jittered_wait_for, self.max_retry_interval);
98 RetryDecision::Retry { execute_after }
99 }
100 }
101}
102
103fn clip(duration: Duration, max_duration: Duration) -> Duration {
105 cmp::min(duration, max_duration)
106}
107
108fn calculate_exponential(base: u32, n_past_retries: u32) -> u32 {
110 base.checked_pow(n_past_retries).unwrap_or(u32::MAX)
111}
112
113impl ExponentialBackoffTimed {
114 pub fn max_retries(&self) -> Option<u32> {
116 self.backoff.max_n_retries
117 }
118
119 fn trying_for_too_long(&self, started_at: SystemTime) -> bool {
120 self.max_total_retry_duration <= Self::elapsed(started_at)
121 }
122
123 fn elapsed(started_at: SystemTime) -> Duration {
124 SystemTime::now()
125 .duration_since(started_at)
126 .unwrap_or_default()
128 }
129}
130
131impl Default for ExponentialBackoffBuilder {
132 fn default() -> Self {
133 Self {
134 min_retry_interval: Duration::from_secs(1),
135 max_retry_interval: Duration::from_secs(30 * 60),
136 jitter: Jitter::Full,
137 base: 2,
138 }
139 }
140}
141
142impl RetryPolicy for ExponentialBackoffTimed {
143 fn should_retry(&self, request_start_time: SystemTime, n_past_retries: u32) -> RetryDecision {
144 if self.trying_for_too_long(request_start_time) {
145 return RetryDecision::DoNotRetry;
146 }
147 self.backoff
148 .should_retry(request_start_time, n_past_retries)
149 }
150}
151
152impl ExponentialBackoffBuilder {
153 pub fn retry_bounds(
159 mut self,
160 min_retry_interval: Duration,
161 max_retry_interval: Duration,
162 ) -> Self {
163 assert!(
164 min_retry_interval <= max_retry_interval,
165 "The maximum interval between retries should be greater or equal than the minimum retry interval."
166 );
167 self.min_retry_interval = min_retry_interval;
168 self.max_retry_interval = max_retry_interval;
169 self
170 }
171
172 pub fn jitter(mut self, jitter: Jitter) -> Self {
174 self.jitter = jitter;
175 self
176 }
177
178 pub fn base(mut self, base: u32) -> Self {
180 self.base = base;
181 self
182 }
183
184 pub fn build_with_max_retries(self, n: u32) -> ExponentialBackoff {
188 ExponentialBackoff {
189 min_retry_interval: self.min_retry_interval,
190 max_retry_interval: self.max_retry_interval,
191 max_n_retries: Some(n),
192 jitter: self.jitter,
193 base: self.base,
194 }
195 }
196
197 pub fn build_with_total_retry_duration(
218 self,
219 total_duration: Duration,
220 ) -> ExponentialBackoffTimed {
221 ExponentialBackoffTimed {
222 max_total_retry_duration: total_duration,
223 backoff: ExponentialBackoff {
224 min_retry_interval: self.min_retry_interval,
225 max_retry_interval: self.max_retry_interval,
226 max_n_retries: None,
227 jitter: self.jitter,
228 base: self.base,
229 },
230 }
231 }
232
233 pub fn build_with_total_retry_duration_and_limit_retries(
281 self,
282 total_duration: Duration,
283 ) -> ExponentialBackoffTimed {
284 let mut max_n_retries = None;
285
286 let delays = (0u32..).map(|n| {
287 min(
288 self.max_retry_interval,
289 self.min_retry_interval * calculate_exponential(self.base, n),
290 )
291 });
292
293 let mut total = Duration::from_secs(0);
294 for (n, delay) in delays.enumerate() {
295 total += delay;
296 if total >= total_duration {
297 max_n_retries = Some(n as _);
298 break;
299 }
300 }
301
302 ExponentialBackoffTimed {
303 max_total_retry_duration: total_duration,
304 backoff: ExponentialBackoff {
305 min_retry_interval: self.min_retry_interval,
306 max_retry_interval: self.max_retry_interval,
307 max_n_retries,
308 jitter: self.jitter,
309 base: self.base,
310 },
311 }
312 }
313
314 pub fn build_with_total_retry_duration_and_max_retries(
316 self,
317 total_duration: Duration,
318 max_n_retries: u32,
319 ) -> ExponentialBackoffTimed {
320 ExponentialBackoffTimed {
321 max_total_retry_duration: total_duration,
322 backoff: ExponentialBackoff {
323 min_retry_interval: self.min_retry_interval,
324 max_retry_interval: self.max_retry_interval,
325 max_n_retries: Some(max_n_retries),
326 jitter: self.jitter,
327 base: self.base,
328 },
329 }
330 }
331}
332#[cfg(test)]
333mod tests {
334 use std::convert::TryFrom as _;
335
336 use rand::distr::{Distribution, Uniform};
337
338 use super::*;
339
340 fn get_retry_policy() -> ExponentialBackoff {
341 ExponentialBackoff {
342 max_n_retries: Some(6),
343 min_retry_interval: Duration::from_secs(1),
344 max_retry_interval: Duration::from_secs(5 * 60),
345 jitter: Jitter::Full,
346 base: 2,
347 }
348 }
349
350 #[test]
351 fn if_n_past_retries_is_below_maximum_it_decides_to_retry() {
352 let policy = get_retry_policy();
354 let n_past_retries = Uniform::try_from(0..policy.max_n_retries.unwrap())
355 .unwrap()
356 .sample(&mut rand::rng());
357 assert!(n_past_retries < policy.max_n_retries.unwrap());
358
359 let decision = policy.should_retry(SystemTime::now(), n_past_retries);
361
362 matches!(decision, RetryDecision::Retry { .. });
364 }
365
366 #[test]
367 fn if_n_past_retries_is_above_maximum_it_decides_to_mark_as_failed() {
368 let policy = get_retry_policy();
370 let n_past_retries = Uniform::try_from(policy.max_n_retries.unwrap()..=u32::MAX)
371 .unwrap()
372 .sample(&mut rand::rng());
373 assert!(n_past_retries >= policy.max_n_retries.unwrap());
374
375 let decision = policy.should_retry(SystemTime::now(), n_past_retries);
377
378 matches!(decision, RetryDecision::DoNotRetry);
380 }
381
382 #[test]
383 fn maximum_retry_interval_is_never_exceeded() {
384 let policy = get_retry_policy();
386 let max_interval = policy.max_retry_interval;
387
388 let decision = policy.should_retry(SystemTime::now(), policy.max_n_retries.unwrap() - 1);
390
391 match decision {
393 RetryDecision::Retry { execute_after } => {
394 assert!(execute_after.duration_since(SystemTime::now()).unwrap() <= max_interval)
395 }
396 RetryDecision::DoNotRetry => panic!("Expected Retry decision."),
397 }
398 }
399
400 #[test]
401 fn overflow_backoff_exponent_does_not_cause_a_panic() {
402 let policy = ExponentialBackoff {
403 max_n_retries: Some(u32::MAX),
404 ..get_retry_policy()
405 };
406 let max_interval = policy.max_retry_interval;
407 let n_failed_attempts = u32::MAX - 1;
408
409 let decision = policy.should_retry(SystemTime::now(), n_failed_attempts);
411
412 match decision {
414 RetryDecision::Retry { execute_after } => {
415 assert!(execute_after.duration_since(SystemTime::now()).unwrap() <= max_interval)
416 }
417 RetryDecision::DoNotRetry => panic!("Expected Retry decision."),
418 }
419 }
420
421 #[test]
422 #[should_panic]
423 fn builder_invalid_retry_bounds() {
424 ExponentialBackoff::builder().retry_bounds(Duration::from_secs(3), Duration::from_secs(2));
426 }
427
428 #[test]
429 fn does_not_retry_after_total_retry_duration() {
430 let backoff = ExponentialBackoff::builder()
431 .build_with_total_retry_duration(Duration::from_secs(24 * 60 * 60));
432
433 {
434 let started_at = SystemTime::now()
435 .checked_sub(Duration::from_secs(23 * 60 * 60))
436 .unwrap();
437
438 let decision = backoff.should_retry(started_at, 0);
439
440 match decision {
441 RetryDecision::Retry { .. } => {}
442 _ => panic!("should retry"),
443 }
444 }
445 {
446 let started_at = SystemTime::now()
447 .checked_sub(Duration::from_secs(25 * 60 * 60))
448 .unwrap();
449
450 let decision = backoff.should_retry(started_at, 0);
451
452 match decision {
453 RetryDecision::DoNotRetry => {}
454 _ => panic!("should not retry"),
455 }
456 }
457 }
458
459 #[test]
460 fn does_not_retry_before_total_retry_duration_if_max_retries_exceeded() {
461 let backoff = ExponentialBackoff::builder()
462 .retry_bounds(Duration::from_secs(1), Duration::from_secs(6 * 60 * 60))
464 .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(24 * 60 * 60));
465
466 {
467 let started_at = SystemTime::now()
468 .checked_sub(Duration::from_secs(23 * 60 * 60))
469 .unwrap();
470
471 let decision = backoff.should_retry(started_at, 0);
472
473 match decision {
474 RetryDecision::Retry { .. } => {}
475 _ => panic!("should retry"),
476 }
477 }
478 {
479 let started_at = SystemTime::now()
480 .checked_sub(Duration::from_secs(23 * 60 * 60))
481 .unwrap();
482
483 let decision = backoff.should_retry(started_at, 17);
485
486 match decision {
487 RetryDecision::DoNotRetry => {}
488 _ => panic!("should not retry"),
489 }
490 }
491 {
492 let started_at = SystemTime::now()
493 .checked_sub(Duration::from_secs(25 * 60 * 60))
494 .unwrap();
495
496 let decision = backoff.should_retry(started_at, 0);
497
498 match decision {
499 RetryDecision::DoNotRetry => {}
500 _ => panic!("should not retry"),
501 }
502 }
503 }
504
505 #[test]
506 fn different_exponential_base_produce_different_max_retries_for_the_same_duration() {
507 let backoff_base_2 = ExponentialBackoff::builder()
508 .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
509 .base(2)
510 .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
511
512 let backoff_base_3 = ExponentialBackoff::builder()
513 .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
514 .base(3)
515 .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
516
517 let backoff_base_4 = ExponentialBackoff::builder()
518 .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
519 .base(4)
520 .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
521
522 assert_eq!(backoff_base_2.max_retries().unwrap(), 11);
523 assert_eq!(backoff_base_3.max_retries().unwrap(), 8);
524 assert_eq!(backoff_base_4.max_retries().unwrap(), 6);
525 }
526
527 #[test]
528 fn total_retry_duration_and_max_retries() {
529 let backoff = ExponentialBackoff::builder()
531 .retry_bounds(Duration::from_secs(1), Duration::from_secs(30))
532 .build_with_total_retry_duration_and_max_retries(Duration::from_secs(120), 5);
533
534 assert_eq!(backoff.max_retries(), Some(5));
536
537 {
539 let started_at = SystemTime::now()
540 .checked_sub(Duration::from_secs(60))
541 .unwrap();
542
543 let decision = backoff.should_retry(started_at, 3);
544
545 match decision {
546 RetryDecision::Retry { .. } => {}
547 _ => panic!("Should retry when within both retry count and duration limits"),
548 }
549 }
550
551 {
553 let started_at = SystemTime::now()
554 .checked_sub(Duration::from_secs(60))
555 .unwrap();
556
557 let decision = backoff.should_retry(started_at, 5);
558
559 match decision {
560 RetryDecision::DoNotRetry => {}
561 _ => panic!("Should not retry when max retries exceeded"),
562 }
563 }
564
565 {
567 let started_at = SystemTime::now()
568 .checked_sub(Duration::from_secs(150))
569 .unwrap();
570
571 let decision = backoff.should_retry(started_at, 3);
572
573 match decision {
574 RetryDecision::DoNotRetry => {}
575 _ => panic!("Should not retry when time duration exceeded"),
576 }
577 }
578 }
579}