console_subscriber/callsites.rs
1use crate::sync::RwLock;
2use std::{
3 collections::HashSet,
4 fmt, ptr,
5 sync::atomic::{AtomicPtr, AtomicUsize, Ordering},
6};
7use tracing_core::{callsite, Metadata};
8
9pub(crate) struct Callsites<const MAX_CALLSITES: usize> {
10 ptrs: [AtomicPtr<Metadata<'static>>; MAX_CALLSITES],
11 len: AtomicUsize,
12 spill: RwLock<HashSet<callsite::Identifier>>,
13}
14
15impl<const MAX_CALLSITES: usize> Callsites<MAX_CALLSITES> {
16 #[track_caller]
17 pub(crate) fn insert(&self, callsite: &'static Metadata<'static>) {
18 // The callsite may already have been inserted, if the callsite cache
19 // was invalidated and is being rebuilt. In that case, don't insert it
20 // again.'
21 if self.contains(callsite) {
22 return;
23 }
24
25 let idx = self.len.fetch_add(1, Ordering::AcqRel);
26 if idx < MAX_CALLSITES {
27 // If there's still room in the callsites array, stick the address
28 // in there.
29 self.ptrs[idx]
30 .compare_exchange(
31 ptr::null_mut(),
32 callsite as *const _ as *mut _,
33 Ordering::AcqRel,
34 Ordering::Acquire,
35 )
36 .expect("a callsite would have been clobbered by `insert` (this is a bug)");
37 } else {
38 // Otherwise, we've filled the callsite array (sad!). Spill over
39 // into a hash set.
40 self.spill.write().insert(callsite.callsite());
41 }
42 }
43
44 pub(crate) fn contains(&self, callsite: &'static Metadata<'static>) -> bool {
45 let mut start = 0;
46 let mut len = self.len.load(Ordering::Acquire);
47 loop {
48 for cs in &self.ptrs[start..len] {
49 if ptr::eq(cs.load(Ordering::Acquire), callsite) {
50 return true;
51 }
52 }
53
54 // Did the length change while we were iterating over the callsite array?
55 let new_len = self.len.load(Ordering::Acquire);
56 if new_len > len {
57 // If so, check again to see if the callsite is contained in any
58 // callsites that were pushed since the last time we loaded `self.len`.
59 start = len;
60 len = new_len;
61 continue;
62 }
63
64 // If the callsite array is not full, we have checked everything.
65 if len <= MAX_CALLSITES {
66 return false;
67 }
68
69 // Otherwise, we may have spilled over to the slower fallback hash
70 // set. Check that.
71 return self.check_spill(callsite);
72 }
73 }
74
75 #[cold]
76 fn check_spill(&self, callsite: &'static Metadata<'static>) -> bool {
77 self.spill.read().contains(&callsite.callsite())
78 }
79}
80
81impl<const MAX_CALLSITES: usize> Default for Callsites<MAX_CALLSITES> {
82 fn default() -> Self {
83 // It's necessary to use a `const` value here to initialize the array,
84 // because `AtomicPtr` is not `Copy`.
85 //
86 // Clippy does not like when `const` values have interior mutability. See:
87 // https://rust-lang.github.io/rust-clippy/master/index.html#declare_interior_mutable_const
88 //
89 // This is a warning because the const value is always copied when it's
90 // used, so mutations to it will not be reflected in the `const` itself.
91 // In some cases, this is a footgun (when you meant to use a `static`
92 // item instead). However, in this case, that is *precisely* what we
93 // want; the `const` value is being used as an initializer for the array
94 // and it is *supposed* to be copied. Clippy's docs recommend ignoring
95 // the lint when used as a legacy const initializer for a static item;
96 // this is a very similar case.
97 #[allow(clippy::declare_interior_mutable_const)]
98 const NULLPTR: AtomicPtr<Metadata<'static>> = AtomicPtr::new(ptr::null_mut());
99 Self {
100 ptrs: [NULLPTR; MAX_CALLSITES],
101 len: AtomicUsize::new(0),
102 spill: Default::default(),
103 }
104 }
105}
106
107impl<const MAX_CALLSITES: usize> fmt::Debug for Callsites<MAX_CALLSITES> {
108 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
109 let len = self.len.load(Ordering::Acquire);
110 f.debug_struct("Callsites")
111 .field("ptrs", &&self.ptrs[..len])
112 .field("len", &len)
113 .field("max_callsites", &MAX_CALLSITES)
114 .field("spill", &self.spill)
115 .finish()
116 }
117}