1use crate::{
22 arithmetic::inout::{AliasingSlices2, AliasingSlices3},
23 bb, c,
24 error::{self, LenMismatchError},
25 polyfill::{sliceutil, usize_from_u32, ArrayFlatMap},
26};
27use core::{iter, num::NonZeroUsize};
28
29#[cfg(any(test, feature = "alloc"))]
30use crate::bits;
31
32#[cfg(feature = "alloc")]
33use core::num::Wrapping;
34
35pub type Limb = bb::Word;
37pub type LeakyLimb = bb::LeakyWord;
38pub const LIMB_BITS: usize = usize_from_u32(Limb::BITS);
39pub const LIMB_BYTES: usize = (LIMB_BITS + 7) / 8;
40
41pub type LimbMask = bb::BoolMask;
42
43#[inline]
44pub fn limbs_equal_limbs_consttime(a: &[Limb], b: &[Limb]) -> Result<LimbMask, LenMismatchError> {
45 if a.len() != b.len() {
46 return Err(LenMismatchError::new(a.len()));
47 }
48 let all = a.iter().zip(b).fold(0, |running, (a, b)| running | (a ^ b));
49 Ok(limb_is_zero(all))
50}
51
52#[inline]
53fn limbs_less_than_limbs(a: &[Limb], b: &[Limb]) -> Result<LimbMask, LenMismatchError> {
54 prefixed_extern! {
55 fn LIMBS_less_than(a: *const Limb, b: *const Limb, num_limbs: c::NonZero_size_t)
56 -> LimbMask;
57 }
58 let len = NonZeroUsize::new(b.len()).ok_or_else(|| LenMismatchError::new(a.len()))?;
63 if a.len() != len.get() {
64 return Err(LenMismatchError::new(a.len()));
65 }
66 Ok(unsafe { LIMBS_less_than(a.as_ptr(), b.as_ptr(), len) })
67}
68
69#[inline]
70pub(crate) fn verify_limbs_less_than_limbs_leak_bit(
71 a: &[Limb],
72 b: &[Limb],
73) -> Result<(), error::Unspecified> {
74 let r = limbs_less_than_limbs(a, b).map_err(error::erase::<LenMismatchError>)?;
75 if r.leak() {
76 Ok(())
77 } else {
78 Err(error::Unspecified)
79 }
80}
81
82#[inline]
83pub fn limbs_less_than_limbs_vartime(a: &[Limb], b: &[Limb]) -> Result<bool, LenMismatchError> {
84 let r = limbs_less_than_limbs(a, b)?;
85 Ok(r.leak())
86}
87
88#[inline]
89fn limb_is_zero(limb: Limb) -> LimbMask {
90 prefixed_extern! {
91 fn LIMB_is_zero(limb: Limb) -> LimbMask;
92 }
93 unsafe { LIMB_is_zero(limb) }
94}
95
96#[inline]
97pub fn limbs_are_zero(limbs: &[Limb]) -> LimbMask {
98 limb_is_zero(limbs.iter().fold(0, |a, b| a | b))
99}
100
101#[cfg(any(test, feature = "alloc"))]
104#[inline]
105pub fn limbs_reject_even_leak_bit(limbs: &[Limb]) -> Result<(), error::Unspecified> {
106 let bottom = *limbs.first().ok_or(error::Unspecified)?;
107 if limb_is_zero(bottom & 1).leak() {
108 return Err(error::Unspecified);
109 }
110 Ok(())
111}
112
113#[cfg(any(test, feature = "alloc"))]
114#[inline]
115pub fn verify_limbs_equal_1_leak_bit(a: &[Limb]) -> Result<(), error::Unspecified> {
116 if let [bottom, ref rest @ ..] = *a {
117 let equal = limb_is_zero(bottom ^ 1) & limbs_are_zero(rest);
118 if equal.leak() {
119 return Ok(());
120 }
121 }
122 Err(error::Unspecified)
123}
124
125#[cfg(any(test, feature = "alloc"))]
133pub fn limbs_minimal_bits(a: &[Limb]) -> bits::BitLength {
134 for num_limbs in (1..=a.len()).rev() {
135 let high_limb = a[num_limbs - 1];
136
137 for high_limb_num_bits in (1..=LIMB_BITS).rev() {
142 let shifted = unsafe { LIMB_shr(high_limb, high_limb_num_bits - 1) };
143 if shifted != 0 {
144 return bits::BitLength::from_bits(
145 ((num_limbs - 1) * LIMB_BITS) + high_limb_num_bits,
146 );
147 }
148 }
149 }
150
151 bits::BitLength::from_bits(0)
153}
154
155#[inline]
157pub fn limbs_reduce_once(r: &mut [Limb], m: &[Limb]) -> Result<(), LenMismatchError> {
158 prefixed_extern! {
159 fn LIMBS_reduce_once(r: *mut Limb, m: *const Limb, num_limbs: c::NonZero_size_t);
160 }
161 let num_limbs = NonZeroUsize::new(r.len()).ok_or_else(|| LenMismatchError::new(m.len()))?;
162 let r = r.as_mut_ptr(); let m = m.as_ptr(); unsafe { LIMBS_reduce_once(r, m, num_limbs) };
165 Ok(())
166}
167
168#[derive(Clone, Copy, PartialEq)]
169pub enum AllowZero {
170 No,
171 Yes,
172}
173
174pub fn parse_big_endian_in_range_and_pad_consttime(
183 input: untrusted::Input,
184 allow_zero: AllowZero,
185 max_exclusive: &[Limb],
186 result: &mut [Limb],
187) -> Result<(), error::Unspecified> {
188 parse_big_endian_and_pad_consttime(input, result)?;
189 verify_limbs_less_than_limbs_leak_bit(result, max_exclusive)?;
190 if allow_zero != AllowZero::Yes {
191 if limbs_are_zero(result).leak() {
192 return Err(error::Unspecified);
193 }
194 }
195 Ok(())
196}
197
198pub fn parse_big_endian_and_pad_consttime(
202 input: untrusted::Input,
203 result: &mut [Limb],
204) -> Result<(), error::Unspecified> {
205 if input.is_empty() {
206 return Err(error::Unspecified);
207 }
208 let input_limbs = input.as_slice_less_safe().rchunks(LIMB_BYTES).map(|chunk| {
209 let mut padded = [0; LIMB_BYTES];
210 sliceutil::overwrite_at_start(&mut padded[(LIMB_BYTES - chunk.len())..], chunk);
211 Limb::from_be_bytes(padded)
212 });
213 if input_limbs.len() > result.len() {
214 return Err(error::Unspecified);
215 }
216
217 result
218 .iter_mut()
219 .zip(input_limbs.chain(iter::repeat(0)))
220 .for_each(|(r, i)| *r = i);
221
222 Ok(())
223}
224
225pub fn big_endian_from_limbs(limbs: &[Limb], out: &mut [u8]) {
226 let be_bytes = unstripped_be_bytes(limbs);
227 assert_eq!(out.len(), be_bytes.len());
228 out.iter_mut().zip(be_bytes).for_each(|(o, i)| {
229 *o = i;
230 });
231}
232
233pub fn unstripped_be_bytes(limbs: &[Limb]) -> impl ExactSizeIterator<Item = u8> + Clone + '_ {
238 ArrayFlatMap::new(limbs.iter().rev().copied(), Limb::to_be_bytes).unwrap()
240}
241
242pub type Window = bb::Word;
244
245pub type LeakyWindow = bb::LeakyWord;
247
248#[cfg(feature = "alloc")]
261pub fn fold_5_bit_windows<R, I: FnOnce(Window) -> R, F: Fn(R, Window) -> R>(
262 limbs: &[Limb],
263 init: I,
264 fold: F,
265) -> R {
266 #[derive(Clone, Copy)]
267 #[repr(transparent)]
268 struct BitIndex(Wrapping<c::size_t>);
269
270 const WINDOW_BITS: Wrapping<c::size_t> = Wrapping(5);
271
272 prefixed_extern! {
273 fn LIMBS_window5_split_window(
274 lower_limb: Limb,
275 higher_limb: Limb,
276 index_within_word: BitIndex,
277 ) -> Window;
278 fn LIMBS_window5_unsplit_window(limb: Limb, index_within_word: BitIndex) -> Window;
279 }
280
281 let num_limbs = limbs.len();
282 let mut window_low_bit = {
283 let num_whole_windows = (num_limbs * LIMB_BITS) / 5;
284 let mut leading_bits = (num_limbs * LIMB_BITS) - (num_whole_windows * 5);
285 if leading_bits == 0 {
286 leading_bits = WINDOW_BITS.0;
287 }
288 BitIndex(Wrapping(LIMB_BITS - leading_bits))
289 };
290
291 let initial_value = {
292 let leading_partial_window =
293 unsafe { LIMBS_window5_split_window(*limbs.first().unwrap(), 0, window_low_bit) };
294 window_low_bit.0 -= WINDOW_BITS;
295 init(leading_partial_window)
296 };
297
298 let mut low_limb = Limb::from(0 as LeakyWindow);
299 limbs.iter().fold(initial_value, |mut acc, current_limb| {
300 let higher_limb = low_limb;
301 low_limb = *current_limb;
302
303 if window_low_bit.0 > Wrapping(LIMB_BITS) - WINDOW_BITS {
304 let window =
305 unsafe { LIMBS_window5_split_window(low_limb, higher_limb, window_low_bit) };
306 window_low_bit.0 -= WINDOW_BITS;
307 acc = fold(acc, window);
308 };
309 while window_low_bit.0 < Wrapping(LIMB_BITS) {
310 let window = unsafe { LIMBS_window5_unsplit_window(low_limb, window_low_bit) };
311 window_low_bit.0 -= WINDOW_BITS;
314 acc = fold(acc, window);
315 }
316 window_low_bit.0 += Wrapping(LIMB_BITS); acc
319 })
320}
321
322#[inline]
323pub(crate) fn limbs_add_assign_mod(
324 a: &mut [Limb],
325 b: &[Limb],
326 m: &[Limb],
327) -> Result<(), LenMismatchError> {
328 prefixed_extern! {
329 fn LIMBS_add_mod(
331 r: *mut Limb,
332 a: *const Limb,
333 b: *const Limb,
334 m: *const Limb,
335 num_limbs: c::NonZero_size_t,
336 );
337 }
338 let num_limbs = NonZeroUsize::new(m.len()).ok_or_else(|| LenMismatchError::new(m.len()))?;
339 (a, b).with_non_dangling_non_null_pointers_rab(num_limbs, |r, a, b| {
340 let m = m.as_ptr(); unsafe { LIMBS_add_mod(r, a, b, m, num_limbs) }
342 })
343}
344
345pub(crate) fn limbs_double_mod(r: &mut [Limb], m: &[Limb]) -> Result<(), LenMismatchError> {
347 prefixed_extern! {
348 fn LIMBS_shl_mod(
350 r: *mut Limb,
351 a: *const Limb,
352 m: *const Limb,
353 num_limbs: c::NonZero_size_t);
354 }
355 let num_limbs = NonZeroUsize::new(m.len()).ok_or_else(|| LenMismatchError::new(m.len()))?;
356 r.with_non_dangling_non_null_pointers_ra(num_limbs, |r, a| {
357 let m = m.as_ptr(); unsafe {
359 LIMBS_shl_mod(r, a, m, num_limbs);
360 }
361 })
362}
363
364pub(crate) fn limbs_negative_odd(r: &mut [Limb], a: &[Limb]) {
366 debug_assert_eq!(r.len(), a.len());
367 r.iter_mut().zip(a.iter()).for_each(|(r, &a)| {
370 *r = !a;
371 });
372 r[0] |= 1;
375}
376
377#[cfg(any(test, feature = "alloc"))]
378prefixed_extern! {
379 fn LIMB_shr(a: Limb, shift: c::size_t) -> Limb;
380}
381
382#[allow(clippy::useless_conversion)]
383#[cfg(test)]
384mod tests {
385 use super::*;
386 use alloc::vec::Vec;
387 use cfg_if::cfg_if;
388
389 const MAX: LeakyLimb = LeakyLimb::MAX;
390
391 fn leak_in_test(a: LimbMask) -> bool {
392 a.leak()
393 }
394
395 #[test]
396 fn test_limbs_are_even() {
397 static EVENS: &[&[LeakyLimb]] = &[
398 &[],
399 &[0],
400 &[2],
401 &[0, 0],
402 &[2, 0],
403 &[0, 1],
404 &[0, 2],
405 &[0, 3],
406 &[0, 0, 0, 0, MAX],
407 ];
408 for even in EVENS {
409 let even = &Vec::from_iter(even.iter().copied().map(Limb::from));
410 assert!(matches!(
411 limbs_reject_even_leak_bit(even),
412 Err(error::Unspecified)
413 ));
414 }
415 static ODDS: &[&[LeakyLimb]] = &[
416 &[1],
417 &[3],
418 &[1, 0],
419 &[3, 0],
420 &[1, 1],
421 &[1, 2],
422 &[1, 3],
423 &[1, 0, 0, 0, MAX],
424 ];
425 for odd in ODDS {
426 let odd = &Vec::from_iter(odd.iter().copied().map(Limb::from));
427 assert!(matches!(limbs_reject_even_leak_bit(odd), Ok(())));
428 }
429 }
430
431 static ZEROES: &[&[LeakyLimb]] = &[
432 &[],
433 &[0],
434 &[0, 0],
435 &[0, 0, 0],
436 &[0, 0, 0, 0],
437 &[0, 0, 0, 0, 0],
438 &[0, 0, 0, 0, 0, 0, 0],
439 &[0, 0, 0, 0, 0, 0, 0, 0],
440 &[0, 0, 0, 0, 0, 0, 0, 0, 0],
441 ];
442
443 static NONZEROES: &[&[LeakyLimb]] = &[
444 &[1],
445 &[0, 1],
446 &[1, 1],
447 &[1, 0, 0, 0],
448 &[0, 1, 0, 0],
449 &[0, 0, 1, 0],
450 &[0, 0, 0, 1],
451 ];
452
453 #[test]
454 fn test_limbs_are_zero() {
455 for zero in ZEROES {
456 let zero = &Vec::from_iter(zero.iter().copied().map(Limb::from));
457 assert!(leak_in_test(limbs_are_zero(zero)));
458 }
459 for nonzero in NONZEROES {
460 let nonzero = &Vec::from_iter(nonzero.iter().copied().map(Limb::from));
461 assert!(!leak_in_test(limbs_are_zero(nonzero)));
462 }
463 }
464
465 #[test]
466 fn test_limbs_equal_limb() {
467 static EQUAL: &[&[LeakyLimb]] = &[&[1], &[1, 0], &[1, 0, 0], &[1, 0, 0, 0, 0, 0, 0]];
469 for a in EQUAL {
470 let a = &Vec::from_iter(a.iter().copied().map(Limb::from));
471 assert!(matches!(verify_limbs_equal_1_leak_bit(a), Ok(())));
472 }
473
474 static UNEQUAL: &[&[LeakyLimb]] = &[
476 &[0],
477 &[2],
478 &[3],
479 &[MAX],
480 &[0, 1],
481 &[1, 1],
482 &[0, 0, 0, 0, 0, 0, 0, 1],
483 &[0, 0, 0, 0, 1, 0, 0, 0],
484 &[0, 0, 0, 0, 1, 0, 0, 1],
485 &[MAX, 1],
486 ];
487 for a in UNEQUAL {
488 let a = &Vec::from_iter(a.iter().copied().map(Limb::from));
489 assert!(matches!(
490 verify_limbs_equal_1_leak_bit(a),
491 Err(error::Unspecified)
492 ));
493 }
494 }
495
496 #[test]
497 fn test_parse_big_endian_and_pad_consttime() {
498 const LIMBS: usize = 4;
499
500 {
501 let inp = untrusted::Input::from(&[]);
503 let mut result = [0; LIMBS].map(From::<LeakyLimb>::from);
504 assert!(parse_big_endian_and_pad_consttime(inp, &mut result).is_err());
505 }
506
507 {
509 let inp = [1, 2, 3, 4, 5, 6, 7, 8, 9];
510 let inp = untrusted::Input::from(&inp);
511 let mut result = [0; 8 / LIMB_BYTES].map(From::<LeakyLimb>::from);
512 assert!(parse_big_endian_and_pad_consttime(inp, &mut result[..]).is_err());
513 }
514
515 {
517 let inp = [0xfe];
518 let inp = untrusted::Input::from(&inp);
519 let mut result = [0; LIMBS].map(From::<LeakyLimb>::from);
520 assert_eq!(
521 Ok(()),
522 parse_big_endian_and_pad_consttime(inp, &mut result[..])
523 );
524 assert_eq!(&[0xfe, 0, 0, 0], &result);
525 }
526
527 {
529 let inp = [0xbe, 0xef, 0xf0, 0x0d];
530 let inp = untrusted::Input::from(&inp);
531 let mut result = [0; LIMBS].map(From::<LeakyLimb>::from);
532 assert_eq!(Ok(()), parse_big_endian_and_pad_consttime(inp, &mut result));
533 assert_eq!(&[0xbeeff00d, 0, 0, 0], &result);
534 }
535
536 cfg_if! {
537 if #[cfg(target_pointer_width = "64")] {
538 static TEST_CASES: &[(&[u8], &[Limb])] = &[
539 (&[1], &[1, 0]),
540 (&[1, 2], &[0x102, 0]),
541 (&[1, 2, 3], &[0x10203, 0]),
542 (&[1, 2, 3, 4], &[0x102_0304, 0]),
543 (&[1, 2, 3, 4, 5], &[0x1_0203_0405, 0]),
544 (&[1, 2, 3, 4, 5, 6], &[0x102_0304_0506, 0]),
545 (&[1, 2, 3, 4, 5, 6, 7], &[0x1_0203_0405_0607, 0]),
546 (&[1, 2, 3, 4, 5, 6, 7, 8], &[0x102_0304_0506_0708, 0]),
547 (&[1, 2, 3, 4, 5, 6, 7, 8, 9], &[0x0203_0405_0607_0809, 0x1]),
548 (&[1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa], &[0x0304_0506_0708_090a, 0x102]),
549 (&[1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb], &[0x0405_0607_0809_0a0b, 0x1_0203]),
550 ];
551 for (be_bytes, limbs) in TEST_CASES {
552 let mut buf = [0; 2];
553 parse_big_endian_and_pad_consttime(untrusted::Input::from(be_bytes), &mut buf)
554 .unwrap();
555 assert_eq!(limbs, &buf, "({be_bytes:x?}, {limbs:x?}");
556 }
557 } else if #[cfg(target_pointer_width = "32")] {
558 static TEST_CASES: &[(&[u8], &[Limb])] = &[
559 (&[1], &[1, 0, 0]),
560 (&[1, 2], &[0x102, 0, 0]),
561 (&[1, 2, 3], &[0x10203, 0, 0]),
562 (&[1, 2, 3, 4], &[0x102_0304, 0, 0]),
563 (&[1, 2, 3, 4, 5], &[0x0203_0405, 0x1, 0]),
564 (&[1, 2, 3, 4, 5, 6], &[0x0304_0506, 0x102, 0]),
565 (&[1, 2, 3, 4, 5, 6, 7], &[0x0405_0607, 0x1_0203, 0]),
566 (&[1, 2, 3, 4, 5, 6, 7, 8], &[0x0506_0708, 0x102_0304, 0]),
567 (&[1, 2, 3, 4, 5, 6, 7, 8, 9], &[0x0607_0809, 0x0203_0405, 0x1]),
568 (&[1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa], &[0x0708_090a, 0x0304_0506, 0x102]),
569 (&[1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb], &[0x0809_0a0b, 0x0405_0607, 0x1_0203]),
570 ];
571 for (be_bytes, limbs) in TEST_CASES {
572 let mut buf = [0; 3];
573 parse_big_endian_and_pad_consttime(untrusted::Input::from(be_bytes), &mut buf)
574 .unwrap();
575 assert_eq!(limbs, &buf, "({be_bytes:x?}, {limbs:x?}");
576 }
577 } else {
578 panic!("Unsupported target_pointer_width");
579 }
580
581 }
583 }
584
585 #[test]
586 fn test_big_endian_from_limbs_same_length() {
587 #[cfg(target_pointer_width = "32")]
588 let limbs = [
589 0xbccddeef, 0x89900aab, 0x45566778, 0x01122334, 0xddeeff00, 0x99aabbcc, 0x55667788,
590 0x11223344,
591 ];
592
593 #[cfg(target_pointer_width = "64")]
594 let limbs = [
595 0x8990_0aab_bccd_deef,
596 0x0112_2334_4556_6778,
597 0x99aa_bbcc_ddee_ff00,
598 0x1122_3344_5566_7788,
599 ];
600
601 let limbs = limbs.map(From::<LeakyLimb>::from);
602
603 let expected = [
604 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,
605 0xff, 0x00, 0x01, 0x12, 0x23, 0x34, 0x45, 0x56, 0x67, 0x78, 0x89, 0x90, 0x0a, 0xab,
606 0xbc, 0xcd, 0xde, 0xef,
607 ];
608
609 let mut out = [0xabu8; 32];
610 big_endian_from_limbs(&limbs[..], &mut out);
611 assert_eq!(&out[..], &expected[..]);
612 }
613
614 #[should_panic]
615 #[test]
616 fn test_big_endian_from_limbs_fewer_limbs() {
617 #[cfg(target_pointer_width = "32")]
618 let limbs = [
620 0xbccddeef, 0x89900aab, 0x45566778, 0x01122334, 0xddeeff00, 0x99aabbcc,
621 ];
622
623 #[cfg(target_pointer_width = "64")]
625 let limbs = [
626 0x8990_0aab_bccd_deef,
627 0x0112_2334_4556_6778,
628 0x99aa_bbcc_ddee_ff00,
629 ];
630
631 let limbs = limbs.map(From::<LeakyLimb>::from);
632
633 let mut out = [0xabu8; 32];
634
635 big_endian_from_limbs(&limbs[..], &mut out);
636 }
637
638 #[test]
639 fn test_limbs_minimal_bits() {
640 const ALL_ONES: LeakyLimb = LeakyLimb::MAX;
641 static CASES: &[(&[LeakyLimb], usize)] = &[
642 (&[], 0),
643 (&[0], 0),
644 (&[ALL_ONES], LIMB_BITS),
645 (&[ALL_ONES, 0], LIMB_BITS),
646 (&[ALL_ONES, 1], LIMB_BITS + 1),
647 (&[0, 0], 0),
648 (&[1, 0], 1),
649 (&[0, 1], LIMB_BITS + 1),
650 (&[0, ALL_ONES], 2 * LIMB_BITS),
651 (&[ALL_ONES, ALL_ONES], 2 * LIMB_BITS),
652 (&[ALL_ONES, ALL_ONES >> 1], 2 * LIMB_BITS - 1),
653 (&[ALL_ONES, 0b100_0000], LIMB_BITS + 7),
654 (&[ALL_ONES, 0b101_0000], LIMB_BITS + 7),
655 (&[ALL_ONES, ALL_ONES >> 1], LIMB_BITS + (LIMB_BITS) - 1),
656 ];
657 for (limbs, bits) in CASES {
658 let limbs = &Vec::from_iter(limbs.iter().copied().map(Limb::from));
659 assert_eq!(limbs_minimal_bits(limbs).as_bits(), *bits);
660 }
661 }
662}