snap/
bytes.rs

1use std::convert::TryInto;
2use std::io;
3
4/// Read a u16 in little endian format from the beginning of the given slice.
5/// This panics if the slice has length less than 2.
6pub fn read_u16_le(slice: &[u8]) -> u16 {
7    u16::from_le_bytes(slice[..2].try_into().unwrap())
8}
9
10/// Read a u24 (returned as a u32 with the most significant 8 bits always set
11/// to 0) in little endian format from the beginning of the given slice. This
12/// panics if the slice has length less than 3.
13pub fn read_u24_le(slice: &[u8]) -> u32 {
14    slice[0] as u32 | (slice[1] as u32) << 8 | (slice[2] as u32) << 16
15}
16
17/// Read a u32 in little endian format from the beginning of the given slice.
18/// This panics if the slice has length less than 4.
19pub fn read_u32_le(slice: &[u8]) -> u32 {
20    u32::from_le_bytes(slice[..4].try_into().unwrap())
21}
22
23/// Like read_u32_le, but from an io::Read implementation. If io::Read does
24/// not yield at least 4 bytes, then this returns an unexpected EOF error.
25pub fn io_read_u32_le<R: io::Read>(mut rdr: R) -> io::Result<u32> {
26    let mut buf = [0; 4];
27    rdr.read_exact(&mut buf)?;
28    Ok(u32::from_le_bytes(buf))
29}
30
31/// Write a u16 in little endian format to the beginning of the given slice.
32/// This panics if the slice has length less than 2.
33pub fn write_u16_le(n: u16, slice: &mut [u8]) {
34    assert!(slice.len() >= 2);
35    let bytes = n.to_le_bytes();
36    slice[0] = bytes[0];
37    slice[1] = bytes[1];
38}
39
40/// Write a u24 (given as a u32 where the most significant 8 bits are ignored)
41/// in little endian format to the beginning of the given slice. This panics
42/// if the slice has length less than 3.
43pub fn write_u24_le(n: u32, slice: &mut [u8]) {
44    slice[0] = n as u8;
45    slice[1] = (n >> 8) as u8;
46    slice[2] = (n >> 16) as u8;
47}
48
49/// Write a u32 in little endian format to the beginning of the given slice.
50/// This panics if the slice has length less than 4.
51pub fn write_u32_le(n: u32, slice: &mut [u8]) {
52    assert!(slice.len() >= 4);
53    let bytes = n.to_le_bytes();
54    slice[0] = bytes[0];
55    slice[1] = bytes[1];
56    slice[2] = bytes[2];
57    slice[3] = bytes[3];
58}
59
60/// https://developers.google.com/protocol-buffers/docs/encoding#varints
61pub fn write_varu64(data: &mut [u8], mut n: u64) -> usize {
62    let mut i = 0;
63    while n >= 0b1000_0000 {
64        data[i] = (n as u8) | 0b1000_0000;
65        n >>= 7;
66        i += 1;
67    }
68    data[i] = n as u8;
69    i + 1
70}
71
72/// https://developers.google.com/protocol-buffers/docs/encoding#varints
73pub fn read_varu64(data: &[u8]) -> (u64, usize) {
74    let mut n: u64 = 0;
75    let mut shift: u32 = 0;
76    for (i, &b) in data.iter().enumerate() {
77        if b < 0b1000_0000 {
78            return match (b as u64).checked_shl(shift) {
79                None => (0, 0),
80                Some(b) => (n | b, i + 1),
81            };
82        }
83        match ((b as u64) & 0b0111_1111).checked_shl(shift) {
84            None => return (0, 0),
85            Some(b) => n |= b,
86        }
87        shift += 7;
88    }
89    (0, 0)
90}
91
92/// Does an unaligned load of a little endian encoded u32.
93///
94/// This is unsafe because `data` must point to some memory of size at least 4.
95pub unsafe fn loadu_u32_le(data: *const u8) -> u32 {
96    loadu_u32_ne(data).to_le()
97}
98
99/// Does an unaligned load of a native endian encoded u32.
100///
101/// This is unsafe because `data` must point to some memory of size at least 4.
102pub unsafe fn loadu_u32_ne(data: *const u8) -> u32 {
103    (data as *const u32).read_unaligned()
104}
105
106/// Does an unaligned load of a little endian encoded u64.
107///
108/// This is unsafe because `data` must point to some memory of size at least 8.
109pub unsafe fn loadu_u64_le(data: *const u8) -> u64 {
110    loadu_u64_ne(data).to_le()
111}
112
113/// Does an unaligned load of a native endian encoded u64.
114///
115/// This is unsafe because `data` must point to some memory of size at least 8.
116pub unsafe fn loadu_u64_ne(data: *const u8) -> u64 {
117    (data as *const u64).read_unaligned()
118}