retry_policies/policies/
exponential_backoff.rs

1use crate::{Jitter, RetryDecision, RetryPolicy};
2use std::{
3    cmp::{self, min},
4    time::{Duration, SystemTime},
5};
6
7/// Exponential backoff with optional jitter.
8#[derive(Debug, Clone, Copy, PartialEq, Eq)]
9#[non_exhaustive]
10pub struct ExponentialBackoff {
11    /// Maximum number of allowed retries attempts.
12    pub max_n_retries: Option<u32>,
13    /// Minimum waiting time between two retry attempts (it can end up being lower due to jitter).
14    pub min_retry_interval: Duration,
15    /// Maximum waiting time between two retry attempts.
16    pub max_retry_interval: Duration,
17    /// How we apply jitter to the calculated backoff intervals.
18    pub jitter: Jitter,
19    /// Base of the exponential
20    pub base: u32,
21}
22
23/// Exponential backoff with a maximum retry duration.
24#[derive(Debug, Clone, Copy, PartialEq, Eq)]
25pub struct ExponentialBackoffTimed {
26    /// Maximum duration the retries can continue for, after which retries will stop.
27    max_total_retry_duration: Duration,
28
29    backoff: ExponentialBackoff,
30}
31
32/// Builds an exponential backoff policy.
33///
34/// # Example
35///
36/// ```rust
37/// use retry_policies::{RetryDecision, RetryPolicy, Jitter};
38/// use retry_policies::policies::ExponentialBackoff;
39/// use std::time::Duration;
40///
41/// let backoff = ExponentialBackoff::builder()
42///     .retry_bounds(Duration::from_secs(1), Duration::from_secs(60))
43///     .jitter(Jitter::Bounded)
44///     .base(2)
45///     .build_with_total_retry_duration(Duration::from_secs(24 * 60 * 60));
46/// ```
47pub struct ExponentialBackoffBuilder {
48    min_retry_interval: Duration,
49    max_retry_interval: Duration,
50    jitter: Jitter,
51    base: u32,
52}
53
54impl ExponentialBackoff {
55    /// Returns a builder.
56    ///
57    /// # Example
58    /// ```
59    /// use retry_policies::policies::ExponentialBackoff;
60    /// use std::time::Duration;
61    ///
62    /// let backoff = ExponentialBackoff::builder()
63    ///     .build_with_max_retries(5);
64    ///
65    /// assert_eq!(backoff.min_retry_interval, Duration::from_secs(1));
66    /// assert_eq!(backoff.max_retry_interval, Duration::from_secs(30 * 60));
67    /// assert_eq!(backoff.max_n_retries, Some(5));
68    /// assert_eq!(backoff.base, 2);
69    /// ```
70    pub fn builder() -> ExponentialBackoffBuilder {
71        <_>::default()
72    }
73
74    fn too_many_attempts(&self, n_past_retries: u32) -> bool {
75        self.max_n_retries
76            .is_some_and(|max_n| max_n <= n_past_retries)
77    }
78}
79
80impl RetryPolicy for ExponentialBackoff {
81    fn should_retry(&self, _request_start_time: SystemTime, n_past_retries: u32) -> RetryDecision {
82        if self.too_many_attempts(n_past_retries) {
83            RetryDecision::DoNotRetry
84        } else {
85            let unjittered_wait_for = min(
86                self.max_retry_interval,
87                self.min_retry_interval * calculate_exponential(self.base, n_past_retries),
88            );
89
90            let jittered_wait_for = self.jitter.apply(
91                unjittered_wait_for,
92                self.min_retry_interval,
93                &mut rand::rng(),
94            );
95
96            let execute_after =
97                SystemTime::now() + clip(jittered_wait_for, self.max_retry_interval);
98            RetryDecision::Retry { execute_after }
99        }
100    }
101}
102
103/// Clip to the maximum allowed retry interval.
104fn clip(duration: Duration, max_duration: Duration) -> Duration {
105    cmp::min(duration, max_duration)
106}
107
108/// Calculate exponential using base and number of past retries
109fn calculate_exponential(base: u32, n_past_retries: u32) -> u32 {
110    base.checked_pow(n_past_retries).unwrap_or(u32::MAX)
111}
112
113impl ExponentialBackoffTimed {
114    /// Maximum number of allowed retries attempts.
115    pub fn max_retries(&self) -> Option<u32> {
116        self.backoff.max_n_retries
117    }
118
119    fn trying_for_too_long(&self, started_at: SystemTime) -> bool {
120        self.max_total_retry_duration <= Self::elapsed(started_at)
121    }
122
123    fn elapsed(started_at: SystemTime) -> Duration {
124        SystemTime::now()
125            .duration_since(started_at)
126            // If `started_at` is in the future then return a zero duration.
127            .unwrap_or_default()
128    }
129}
130
131impl Default for ExponentialBackoffBuilder {
132    fn default() -> Self {
133        Self {
134            min_retry_interval: Duration::from_secs(1),
135            max_retry_interval: Duration::from_secs(30 * 60),
136            jitter: Jitter::Full,
137            base: 2,
138        }
139    }
140}
141
142impl RetryPolicy for ExponentialBackoffTimed {
143    fn should_retry(&self, request_start_time: SystemTime, n_past_retries: u32) -> RetryDecision {
144        if self.trying_for_too_long(request_start_time) {
145            return RetryDecision::DoNotRetry;
146        }
147        self.backoff
148            .should_retry(request_start_time, n_past_retries)
149    }
150}
151
152impl ExponentialBackoffBuilder {
153    /// Add min & max retry interval bounds. _Default [1s, 30m]_.
154    ///
155    /// See [`ExponentialBackoff::min_retry_interval`], [`ExponentialBackoff::max_retry_interval`].
156    ///
157    /// Panics if `min_retry_interval` > `max_retry_interval`.
158    pub fn retry_bounds(
159        mut self,
160        min_retry_interval: Duration,
161        max_retry_interval: Duration,
162    ) -> Self {
163        assert!(
164            min_retry_interval <= max_retry_interval,
165            "The maximum interval between retries should be greater or equal than the minimum retry interval."
166        );
167        self.min_retry_interval = min_retry_interval;
168        self.max_retry_interval = max_retry_interval;
169        self
170    }
171
172    /// Set what type of jitter to apply.
173    pub fn jitter(mut self, jitter: Jitter) -> Self {
174        self.jitter = jitter;
175        self
176    }
177
178    /// Set what base to use for the exponential.
179    pub fn base(mut self, base: u32) -> Self {
180        self.base = base;
181        self
182    }
183
184    /// Builds an [`ExponentialBackoff`] with the given maximum retries.
185    ///
186    /// See [`ExponentialBackoff::max_n_retries`].
187    pub fn build_with_max_retries(self, n: u32) -> ExponentialBackoff {
188        ExponentialBackoff {
189            min_retry_interval: self.min_retry_interval,
190            max_retry_interval: self.max_retry_interval,
191            max_n_retries: Some(n),
192            jitter: self.jitter,
193            base: self.base,
194        }
195    }
196
197    /// Builds an [`ExponentialBackoff`] with the given maximum total duration for which retries will
198    /// continue to be performed.
199    ///
200    /// # Example
201    ///
202    /// ```rust
203    /// use retry_policies::{RetryDecision, RetryPolicy};
204    /// use retry_policies::policies::ExponentialBackoff;
205    /// use std::time::{Duration, SystemTime};
206    ///
207    /// let backoff = ExponentialBackoff::builder()
208    ///     .build_with_total_retry_duration(Duration::from_secs(24 * 60 * 60));
209    ///
210    /// let started_at = SystemTime::now()
211    ///     .checked_sub(Duration::from_secs(25 * 60 * 60))
212    ///     .unwrap();
213    ///
214    /// let should_retry = backoff.should_retry(started_at, 0);
215    /// assert!(matches!(RetryDecision::DoNotRetry, should_retry));
216    /// ```
217    pub fn build_with_total_retry_duration(
218        self,
219        total_duration: Duration,
220    ) -> ExponentialBackoffTimed {
221        ExponentialBackoffTimed {
222            max_total_retry_duration: total_duration,
223            backoff: ExponentialBackoff {
224                min_retry_interval: self.min_retry_interval,
225                max_retry_interval: self.max_retry_interval,
226                max_n_retries: None,
227                jitter: self.jitter,
228                base: self.base,
229            },
230        }
231    }
232
233    /// Builds an [`ExponentialBackoffTimed`] with the given maximum total duration and limits the
234    /// number of retries to a calculated maximum.
235    ///
236    /// This calculated maximum is based on how many attempts would be made without jitter applied.
237    ///
238    /// For example if we set total duration 24 hours, with retry bounds [1s, 24h] and 2 as base of
239    /// the exponential, we would calculate 17 max retries, as 1s * pow(2, 16) = 65536s = ~18 hours
240    /// and 18th attempt would be way after the 24 hours total duration.
241    ///
242    /// If the 17th retry ends up being scheduled after 10 hours due to jitter,
243    /// [`ExponentialBackoff::should_retry`] will return false anyway: no retry will be allowed
244    /// after total duration.
245    ///
246    /// If one of the 17 allowed retries for some reason (e.g. previous attempts taking a long time)
247    /// ends up being scheduled after total duration, [`ExponentialBackoff::should_retry`] will
248    /// return false.
249    ///
250    /// Basically we will enforce whatever comes first, max retries or total duration.
251    ///
252    /// # Example
253    ///
254    /// ```rust
255    /// use retry_policies::{RetryDecision, RetryPolicy};
256    /// use retry_policies::policies::ExponentialBackoff;
257    /// use std::time::{Duration, SystemTime};
258    ///
259    /// let exponential_backoff_timed = ExponentialBackoff::builder()
260    ///     .retry_bounds(Duration::from_secs(1), Duration::from_secs(6 * 60 * 60))
261    ///     .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(24 * 60 * 60));
262    ///
263    /// assert_eq!(exponential_backoff_timed.max_retries(), Some(17));
264    ///
265    /// let started_at = SystemTime::now()
266    ///     .checked_sub(Duration::from_secs(25 * 60 * 60))
267    ///     .unwrap();
268    ///
269    /// let should_retry = exponential_backoff_timed.should_retry(started_at, 0);
270    /// assert!(matches!(RetryDecision::DoNotRetry, should_retry));
271    ///
272    /// let started_at = SystemTime::now()
273    ///     .checked_sub(Duration::from_secs(1 * 60 * 60))
274    ///     .unwrap();
275    ///
276    /// let should_retry = exponential_backoff_timed.should_retry(started_at, 18);
277    /// assert!(matches!(RetryDecision::DoNotRetry, should_retry));
278    ///
279    /// ```
280    pub fn build_with_total_retry_duration_and_limit_retries(
281        self,
282        total_duration: Duration,
283    ) -> ExponentialBackoffTimed {
284        let mut max_n_retries = None;
285
286        let delays = (0u32..).map(|n| {
287            min(
288                self.max_retry_interval,
289                self.min_retry_interval * calculate_exponential(self.base, n),
290            )
291        });
292
293        let mut total = Duration::from_secs(0);
294        for (n, delay) in delays.enumerate() {
295            total += delay;
296            if total >= total_duration {
297                max_n_retries = Some(n as _);
298                break;
299            }
300        }
301
302        ExponentialBackoffTimed {
303            max_total_retry_duration: total_duration,
304            backoff: ExponentialBackoff {
305                min_retry_interval: self.min_retry_interval,
306                max_retry_interval: self.max_retry_interval,
307                max_n_retries,
308                jitter: self.jitter,
309                base: self.base,
310            },
311        }
312    }
313
314    /// Builds an [`ExponentialBackoffTimed`] with the given maximum total duration and maximum retries.
315    pub fn build_with_total_retry_duration_and_max_retries(
316        self,
317        total_duration: Duration,
318        max_n_retries: u32,
319    ) -> ExponentialBackoffTimed {
320        ExponentialBackoffTimed {
321            max_total_retry_duration: total_duration,
322            backoff: ExponentialBackoff {
323                min_retry_interval: self.min_retry_interval,
324                max_retry_interval: self.max_retry_interval,
325                max_n_retries: Some(max_n_retries),
326                jitter: self.jitter,
327                base: self.base,
328            },
329        }
330    }
331}
332#[cfg(test)]
333mod tests {
334    use std::convert::TryFrom as _;
335
336    use rand::distr::{Distribution, Uniform};
337
338    use super::*;
339
340    fn get_retry_policy() -> ExponentialBackoff {
341        ExponentialBackoff {
342            max_n_retries: Some(6),
343            min_retry_interval: Duration::from_secs(1),
344            max_retry_interval: Duration::from_secs(5 * 60),
345            jitter: Jitter::Full,
346            base: 2,
347        }
348    }
349
350    #[test]
351    fn if_n_past_retries_is_below_maximum_it_decides_to_retry() {
352        // Arrange
353        let policy = get_retry_policy();
354        let n_past_retries = Uniform::try_from(0..policy.max_n_retries.unwrap())
355            .unwrap()
356            .sample(&mut rand::rng());
357        assert!(n_past_retries < policy.max_n_retries.unwrap());
358
359        // Act
360        let decision = policy.should_retry(SystemTime::now(), n_past_retries);
361
362        // Assert
363        matches!(decision, RetryDecision::Retry { .. });
364    }
365
366    #[test]
367    fn if_n_past_retries_is_above_maximum_it_decides_to_mark_as_failed() {
368        // Arrange
369        let policy = get_retry_policy();
370        let n_past_retries = Uniform::try_from(policy.max_n_retries.unwrap()..=u32::MAX)
371            .unwrap()
372            .sample(&mut rand::rng());
373        assert!(n_past_retries >= policy.max_n_retries.unwrap());
374
375        // Act
376        let decision = policy.should_retry(SystemTime::now(), n_past_retries);
377
378        // Assert
379        matches!(decision, RetryDecision::DoNotRetry);
380    }
381
382    #[test]
383    fn maximum_retry_interval_is_never_exceeded() {
384        // Arrange
385        let policy = get_retry_policy();
386        let max_interval = policy.max_retry_interval;
387
388        // Act
389        let decision = policy.should_retry(SystemTime::now(), policy.max_n_retries.unwrap() - 1);
390
391        // Assert
392        match decision {
393            RetryDecision::Retry { execute_after } => {
394                assert!(execute_after.duration_since(SystemTime::now()).unwrap() <= max_interval)
395            }
396            RetryDecision::DoNotRetry => panic!("Expected Retry decision."),
397        }
398    }
399
400    #[test]
401    fn overflow_backoff_exponent_does_not_cause_a_panic() {
402        let policy = ExponentialBackoff {
403            max_n_retries: Some(u32::MAX),
404            ..get_retry_policy()
405        };
406        let max_interval = policy.max_retry_interval;
407        let n_failed_attempts = u32::MAX - 1;
408
409        // Act
410        let decision = policy.should_retry(SystemTime::now(), n_failed_attempts);
411
412        // Assert
413        match decision {
414            RetryDecision::Retry { execute_after } => {
415                assert!(execute_after.duration_since(SystemTime::now()).unwrap() <= max_interval)
416            }
417            RetryDecision::DoNotRetry => panic!("Expected Retry decision."),
418        }
419    }
420
421    #[test]
422    #[should_panic]
423    fn builder_invalid_retry_bounds() {
424        // bounds are the wrong way round or invalid
425        ExponentialBackoff::builder().retry_bounds(Duration::from_secs(3), Duration::from_secs(2));
426    }
427
428    #[test]
429    fn does_not_retry_after_total_retry_duration() {
430        let backoff = ExponentialBackoff::builder()
431            .build_with_total_retry_duration(Duration::from_secs(24 * 60 * 60));
432
433        {
434            let started_at = SystemTime::now()
435                .checked_sub(Duration::from_secs(23 * 60 * 60))
436                .unwrap();
437
438            let decision = backoff.should_retry(started_at, 0);
439
440            match decision {
441                RetryDecision::Retry { .. } => {}
442                _ => panic!("should retry"),
443            }
444        }
445        {
446            let started_at = SystemTime::now()
447                .checked_sub(Duration::from_secs(25 * 60 * 60))
448                .unwrap();
449
450            let decision = backoff.should_retry(started_at, 0);
451
452            match decision {
453                RetryDecision::DoNotRetry => {}
454                _ => panic!("should not retry"),
455            }
456        }
457    }
458
459    #[test]
460    fn does_not_retry_before_total_retry_duration_if_max_retries_exceeded() {
461        let backoff = ExponentialBackoff::builder()
462            // This configuration should allow 17 max retries inside a 24h total duration
463            .retry_bounds(Duration::from_secs(1), Duration::from_secs(6 * 60 * 60))
464            .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(24 * 60 * 60));
465
466        {
467            let started_at = SystemTime::now()
468                .checked_sub(Duration::from_secs(23 * 60 * 60))
469                .unwrap();
470
471            let decision = backoff.should_retry(started_at, 0);
472
473            match decision {
474                RetryDecision::Retry { .. } => {}
475                _ => panic!("should retry"),
476            }
477        }
478        {
479            let started_at = SystemTime::now()
480                .checked_sub(Duration::from_secs(23 * 60 * 60))
481                .unwrap();
482
483            // Zero based, so this is the 18th retry
484            let decision = backoff.should_retry(started_at, 17);
485
486            match decision {
487                RetryDecision::DoNotRetry => {}
488                _ => panic!("should not retry"),
489            }
490        }
491        {
492            let started_at = SystemTime::now()
493                .checked_sub(Duration::from_secs(25 * 60 * 60))
494                .unwrap();
495
496            let decision = backoff.should_retry(started_at, 0);
497
498            match decision {
499                RetryDecision::DoNotRetry => {}
500                _ => panic!("should not retry"),
501            }
502        }
503    }
504
505    #[test]
506    fn different_exponential_base_produce_different_max_retries_for_the_same_duration() {
507        let backoff_base_2 = ExponentialBackoff::builder()
508            .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
509            .base(2)
510            .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
511
512        let backoff_base_3 = ExponentialBackoff::builder()
513            .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
514            .base(3)
515            .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
516
517        let backoff_base_4 = ExponentialBackoff::builder()
518            .retry_bounds(Duration::from_secs(1), Duration::from_secs(60 * 60))
519            .base(4)
520            .build_with_total_retry_duration_and_limit_retries(Duration::from_secs(60 * 60));
521
522        assert_eq!(backoff_base_2.max_retries().unwrap(), 11);
523        assert_eq!(backoff_base_3.max_retries().unwrap(), 8);
524        assert_eq!(backoff_base_4.max_retries().unwrap(), 6);
525    }
526
527    #[test]
528    fn total_retry_duration_and_max_retries() {
529        // Create backoff policy with specific duration and retry limits
530        let backoff = ExponentialBackoff::builder()
531            .retry_bounds(Duration::from_secs(1), Duration::from_secs(30))
532            .build_with_total_retry_duration_and_max_retries(Duration::from_secs(120), 5);
533
534        // Verify the max retries was set correctly
535        assert_eq!(backoff.max_retries(), Some(5));
536
537        // Test retry within limits
538        {
539            let started_at = SystemTime::now()
540                .checked_sub(Duration::from_secs(60))
541                .unwrap();
542
543            let decision = backoff.should_retry(started_at, 3);
544
545            match decision {
546                RetryDecision::Retry { .. } => {}
547                _ => panic!("Should retry when within both retry count and duration limits"),
548            }
549        }
550
551        // Test retry exceed max retries
552        {
553            let started_at = SystemTime::now()
554                .checked_sub(Duration::from_secs(60))
555                .unwrap();
556
557            let decision = backoff.should_retry(started_at, 5);
558
559            match decision {
560                RetryDecision::DoNotRetry => {}
561                _ => panic!("Should not retry when max retries exceeded"),
562            }
563        }
564
565        // Test retry exceed duration
566        {
567            let started_at = SystemTime::now()
568                .checked_sub(Duration::from_secs(150))
569                .unwrap();
570
571            let decision = backoff.should_retry(started_at, 3);
572
573            match decision {
574                RetryDecision::DoNotRetry => {}
575                _ => panic!("Should not retry when time duration exceeded"),
576            }
577        }
578    }
579}