1#![no_std]
20#![deny(missing_docs)]
21#![deny(missing_copy_implementations)]
22#![deny(missing_debug_implementations)]
23
24#[cfg(feature = "serde")]
25mod serde;
26
27macro_rules! impl_display {
28 ($t:ident) => {
29 impl core::fmt::Display for $t {
30 #[inline]
31 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
32 write!(f, "{}", self.get())
33 }
34 }
35 };
36}
37
38#[cfg(feature = "approx-eq")]
39pub use float_cmp::{ApproxEq, ApproxEqUlps, Ulps};
40
41#[cfg(feature = "approx-eq")]
42macro_rules! impl_approx_32 {
43 ($t:ident) => {
44 impl float_cmp::ApproxEq for $t {
45 type Margin = float_cmp::F32Margin;
46
47 #[inline]
48 fn approx_eq<M: Into<Self::Margin>>(self, other: Self, margin: M) -> bool {
49 self.0.approx_eq(other.0, margin)
50 }
51 }
52
53 impl float_cmp::ApproxEqUlps for $t {
54 type Flt = f32;
55
56 #[inline]
57 fn approx_eq_ulps(&self, other: &Self, ulps: i32) -> bool {
58 self.0.approx_eq_ulps(&other.0, ulps)
59 }
60 }
61 };
62}
63
64#[cfg(not(feature = "approx-eq"))]
65macro_rules! impl_approx_32 {
66 ($t:ident) => {};
67}
68
69#[cfg(feature = "approx-eq")]
70macro_rules! impl_approx_64 {
71 ($t:ident) => {
72 #[cfg(feature = "approx-eq")]
73 impl float_cmp::ApproxEq for $t {
74 type Margin = float_cmp::F64Margin;
75
76 #[inline]
77 fn approx_eq<M: Into<Self::Margin>>(self, other: Self, margin: M) -> bool {
78 self.0.approx_eq(other.0, margin)
79 }
80 }
81
82 #[cfg(feature = "approx-eq")]
83 impl float_cmp::ApproxEqUlps for $t {
84 type Flt = f64;
85
86 #[inline]
87 fn approx_eq_ulps(&self, other: &Self, ulps: i64) -> bool {
88 self.0.approx_eq_ulps(&other.0, ulps)
89 }
90 }
91 };
92}
93
94#[cfg(not(feature = "approx-eq"))]
95macro_rules! impl_approx_64 {
96 ($t:ident) => {};
97}
98
99#[derive(Copy, Clone, Default, Debug)]
103#[repr(transparent)]
104pub struct FiniteF32(f32);
105
106impl FiniteF32 {
107 #[inline]
111 pub fn new(n: f32) -> Option<Self> {
112 if n.is_finite() {
113 Some(FiniteF32(n))
114 } else {
115 None
116 }
117 }
118
119 #[inline]
125 pub const unsafe fn new_unchecked(n: f32) -> Self {
126 FiniteF32(n)
127 }
128
129 #[inline]
131 pub const fn get(&self) -> f32 {
132 self.0
133 }
134}
135
136impl Eq for FiniteF32 {}
137
138impl PartialEq for FiniteF32 {
139 #[inline]
140 fn eq(&self, other: &Self) -> bool {
141 self.0 == other.0
142 }
143}
144
145impl Ord for FiniteF32 {
146 #[inline]
147 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
148 if self.0 < other.0 {
149 core::cmp::Ordering::Less
150 } else if self.0 > other.0 {
151 core::cmp::Ordering::Greater
152 } else {
153 core::cmp::Ordering::Equal
154 }
155 }
156}
157
158impl PartialOrd for FiniteF32 {
159 #[inline]
160 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
161 Some(self.cmp(other))
162 }
163}
164
165impl core::hash::Hash for FiniteF32 {
166 #[inline]
167 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
168 self.0.to_bits().hash(state);
169 }
170}
171
172impl PartialEq<f32> for FiniteF32 {
173 #[inline]
174 fn eq(&self, other: &f32) -> bool {
175 self.get() == *other
176 }
177}
178
179impl_display!(FiniteF32);
180impl_approx_32!(FiniteF32);
181
182#[derive(Copy, Clone, Default, Debug)]
186#[repr(transparent)]
187pub struct FiniteF64(f64);
188
189impl FiniteF64 {
190 #[inline]
194 pub fn new(n: f64) -> Option<Self> {
195 if n.is_finite() {
196 Some(FiniteF64(n))
197 } else {
198 None
199 }
200 }
201
202 #[inline]
208 pub const unsafe fn new_unchecked(n: f64) -> Self {
209 FiniteF64(n)
210 }
211
212 #[inline]
214 pub const fn get(&self) -> f64 {
215 self.0
216 }
217}
218
219impl Eq for FiniteF64 {}
220
221impl PartialEq for FiniteF64 {
222 #[inline]
223 fn eq(&self, other: &Self) -> bool {
224 self.0 == other.0
225 }
226}
227
228impl Ord for FiniteF64 {
229 #[inline]
230 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
231 if self.0 < other.0 {
232 core::cmp::Ordering::Less
233 } else if self.0 > other.0 {
234 core::cmp::Ordering::Greater
235 } else {
236 core::cmp::Ordering::Equal
237 }
238 }
239}
240
241impl PartialOrd for FiniteF64 {
242 #[inline]
243 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
244 Some(self.cmp(other))
245 }
246}
247
248impl core::hash::Hash for FiniteF64 {
249 #[inline]
250 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
251 self.0.to_bits().hash(state);
252 }
253}
254
255impl PartialEq<f64> for FiniteF64 {
256 #[inline]
257 fn eq(&self, other: &f64) -> bool {
258 self.get() == *other
259 }
260}
261
262impl_display!(FiniteF64);
263impl_approx_64!(FiniteF64);
264
265#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Default, Debug)]
267#[repr(transparent)]
268pub struct PositiveF32(FiniteF32);
269
270impl PositiveF32 {
271 pub const ZERO: Self = PositiveF32(FiniteF32(0.0));
273
274 #[inline]
278 pub fn new(n: f32) -> Option<Self> {
279 if n.is_finite() && n >= 0.0 {
280 Some(PositiveF32(FiniteF32(n)))
281 } else {
282 None
283 }
284 }
285
286 #[inline]
292 pub const unsafe fn new_unchecked(n: f32) -> Self {
293 PositiveF32(FiniteF32(n))
294 }
295
296 #[inline]
298 pub const fn get(&self) -> f32 {
299 self.0.get()
300 }
301
302 #[inline]
304 pub const fn get_finite(&self) -> FiniteF32 {
305 self.0
306 }
307}
308
309impl PartialEq<f32> for PositiveF32 {
310 #[inline]
311 fn eq(&self, other: &f32) -> bool {
312 self.get() == *other
313 }
314}
315
316impl_display!(PositiveF32);
317impl_approx_32!(PositiveF32);
318
319#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Default, Debug)]
321#[repr(transparent)]
322pub struct PositiveF64(FiniteF64);
323
324impl PositiveF64 {
325 pub const ZERO: Self = PositiveF64(FiniteF64(0.0));
327
328 #[inline]
332 pub fn new(n: f64) -> Option<Self> {
333 if n.is_finite() && n >= 0.0 {
334 Some(PositiveF64(FiniteF64(n)))
335 } else {
336 None
337 }
338 }
339
340 #[inline]
346 pub const unsafe fn new_unchecked(n: f64) -> Self {
347 PositiveF64(FiniteF64(n))
348 }
349
350 #[inline]
352 pub const fn get(&self) -> f64 {
353 self.0.get()
354 }
355
356 #[inline]
358 pub const fn get_finite(&self) -> FiniteF64 {
359 self.0
360 }
361}
362
363impl PartialEq<f64> for PositiveF64 {
364 #[inline]
365 fn eq(&self, other: &f64) -> bool {
366 self.get() == *other
367 }
368}
369
370impl_display!(PositiveF64);
371impl_approx_64!(PositiveF64);
372
373#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
375#[repr(transparent)]
376pub struct NonZeroPositiveF32(FiniteF32);
377
378impl NonZeroPositiveF32 {
379 #[inline]
383 pub fn new(n: f32) -> Option<Self> {
384 if n.is_finite() && n > 0.0 {
385 Some(NonZeroPositiveF32(FiniteF32(n)))
386 } else {
387 None
388 }
389 }
390
391 #[inline]
397 pub const unsafe fn new_unchecked(n: f32) -> Self {
398 NonZeroPositiveF32(FiniteF32(n))
399 }
400
401 #[inline]
403 pub const fn get(&self) -> f32 {
404 self.0.get()
405 }
406
407 #[inline]
409 pub const fn get_finite(&self) -> FiniteF32 {
410 self.0
411 }
412}
413
414impl PartialEq<f32> for NonZeroPositiveF32 {
415 #[inline]
416 fn eq(&self, other: &f32) -> bool {
417 self.get() == *other
418 }
419}
420
421impl_display!(NonZeroPositiveF32);
422impl_approx_32!(NonZeroPositiveF32);
423
424#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
426#[repr(transparent)]
427pub struct NonZeroPositiveF64(FiniteF64);
428
429impl NonZeroPositiveF64 {
430 #[inline]
434 pub fn new(n: f64) -> Option<Self> {
435 if n.is_finite() && n > 0.0 {
436 Some(NonZeroPositiveF64(FiniteF64(n)))
437 } else {
438 None
439 }
440 }
441
442 #[inline]
448 pub const unsafe fn new_unchecked(n: f64) -> Self {
449 NonZeroPositiveF64(FiniteF64(n))
450 }
451
452 #[inline]
454 pub const fn get(&self) -> f64 {
455 self.0.get()
456 }
457
458 #[inline]
460 pub const fn get_finite(&self) -> FiniteF64 {
461 self.0
462 }
463}
464
465impl PartialEq<f64> for NonZeroPositiveF64 {
466 #[inline]
467 fn eq(&self, other: &f64) -> bool {
468 self.get() == *other
469 }
470}
471
472impl_display!(NonZeroPositiveF64);
473impl_approx_64!(NonZeroPositiveF64);
474
475#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
477#[repr(transparent)]
478pub struct NormalizedF32(FiniteF32);
479
480impl NormalizedF32 {
481 pub const ZERO: Self = NormalizedF32(FiniteF32(0.0));
483 pub const ONE: Self = NormalizedF32(FiniteF32(1.0));
485
486 #[inline]
488 pub fn new(n: f32) -> Option<Self> {
489 if n.is_finite() && n >= 0.0 && n <= 1.0 {
490 Some(NormalizedF32(FiniteF32(n)))
491 } else {
492 None
493 }
494 }
495
496 #[inline]
502 pub const unsafe fn new_unchecked(n: f32) -> Self {
503 NormalizedF32(FiniteF32(n))
504 }
505
506 #[inline]
510 pub fn new_clamped(n: f32) -> Self {
511 if n.is_finite() {
512 NormalizedF32(FiniteF32(clamp_f32(0.0, n, 1.0)))
513 } else {
514 Self::ZERO
515 }
516 }
517
518 #[inline]
520 pub fn new_u8(n: u8) -> Self {
521 NormalizedF32(FiniteF32(f32::from(n) / 255.0))
522 }
523
524 #[inline]
526 pub fn new_u16(n: u16) -> Self {
527 NormalizedF32(FiniteF32(f32::from(n) / 65535.0))
528 }
529
530 #[inline]
532 pub const fn get(self) -> f32 {
533 self.0.get()
534 }
535
536 #[inline]
538 pub const fn get_finite(&self) -> FiniteF32 {
539 self.0
540 }
541
542 #[inline]
544 pub fn to_u8(&self) -> u8 {
545 ((self.0).0 * 255.0 + 0.5) as u8
546 }
547
548 #[inline]
550 pub fn to_u16(&self) -> u16 {
551 ((self.0).0 * 65535.0 + 0.5) as u16
552 }
553}
554
555impl core::ops::Mul<NormalizedF32> for NormalizedF32 {
556 type Output = Self;
557
558 #[inline]
559 fn mul(self, rhs: Self) -> Self::Output {
560 Self::new_clamped((self.0).0 * (rhs.0).0)
561 }
562}
563
564impl PartialEq<f32> for NormalizedF32 {
565 #[inline]
566 fn eq(&self, other: &f32) -> bool {
567 self.get() == *other
568 }
569}
570
571impl_display!(NormalizedF32);
572impl_approx_32!(NormalizedF32);
573
574#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
576#[repr(transparent)]
577pub struct NormalizedF64(FiniteF64);
578
579impl NormalizedF64 {
580 pub const ZERO: Self = NormalizedF64(FiniteF64(0.0));
582 pub const ONE: Self = NormalizedF64(FiniteF64(1.0));
584
585 #[inline]
587 pub fn new(n: f64) -> Option<Self> {
588 if n >= 0.0 && n <= 1.0 {
589 Some(NormalizedF64(FiniteF64(n)))
590 } else {
591 None
592 }
593 }
594
595 #[inline]
601 pub const unsafe fn new_unchecked(n: f64) -> Self {
602 NormalizedF64(FiniteF64(n))
603 }
604
605 #[inline]
609 pub fn new_clamped(n: f64) -> Self {
610 if n.is_finite() {
611 NormalizedF64(FiniteF64(clamp_f64(0.0, n, 1.0)))
612 } else {
613 Self::ZERO
614 }
615 }
616
617 #[inline]
619 pub fn new_u8(n: u8) -> Self {
620 NormalizedF64(FiniteF64(f64::from(n) / 255.0))
621 }
622
623 #[inline]
625 pub fn new_u16(n: u16) -> Self {
626 NormalizedF64(FiniteF64(f64::from(n) / 65535.0))
627 }
628
629 #[inline]
631 pub const fn get(self) -> f64 {
632 self.0.get()
633 }
634
635 #[inline]
637 pub const fn get_finite(&self) -> FiniteF64 {
638 self.0
639 }
640
641 #[inline]
643 pub fn to_u8(&self) -> u8 {
644 ((self.0).0 * 255.0 + 0.5) as u8
645 }
646
647 #[inline]
649 pub fn to_u16(&self) -> u16 {
650 ((self.0).0 * 65535.0 + 0.5) as u16
651 }
652}
653
654impl core::ops::Mul<NormalizedF64> for NormalizedF64 {
655 type Output = Self;
656
657 #[inline]
658 fn mul(self, rhs: Self) -> Self::Output {
659 Self::new_clamped((self.0).0 * (rhs.0).0)
660 }
661}
662
663impl PartialEq<f64> for NormalizedF64 {
664 #[inline]
665 fn eq(&self, other: &f64) -> bool {
666 self.get() == *other
667 }
668}
669
670impl_display!(NormalizedF64);
671impl_approx_64!(NormalizedF64);
672
673#[inline]
674fn clamp_f32(min: f32, val: f32, max: f32) -> f32 {
675 max.min(val).max(min)
676}
677
678#[inline]
679fn clamp_f64(min: f64, val: f64, max: f64) -> f64 {
680 max.min(val).max(min)
681}
682
683#[cfg(test)]
684mod tests {
685 use super::*;
686
687 #[test]
688 fn finite_f32() {
689 assert_eq!(FiniteF32::new(0.0).map(|n| n.get()), Some(0.0));
690 assert_eq!(FiniteF32::new(core::f32::NAN), None);
691 assert_eq!(FiniteF32::new(core::f32::INFINITY), None);
692 assert_eq!(FiniteF32::new(core::f32::NEG_INFINITY), None);
693 }
694
695 #[test]
696 fn positive_f32() {
697 assert_eq!(NonZeroPositiveF32::new(-1.0).map(|n| n.get()), None);
698 assert_eq!(NonZeroPositiveF32::new(0.0).map(|n| n.get()), None);
699 assert_eq!(NonZeroPositiveF32::new(1.0).map(|n| n.get()), Some(1.0));
700 assert_eq!(
701 NonZeroPositiveF32::new(core::f32::EPSILON).map(|n| n.get()),
702 Some(core::f32::EPSILON)
703 );
704 assert_eq!(
705 NonZeroPositiveF32::new(-core::f32::EPSILON).map(|n| n.get()),
706 None
707 );
708 assert_eq!(NonZeroPositiveF32::new(core::f32::NAN), None);
709 assert_eq!(NonZeroPositiveF32::new(core::f32::INFINITY), None);
710 assert_eq!(NonZeroPositiveF32::new(core::f32::NEG_INFINITY), None);
711 }
712
713 #[test]
714 fn positive_f64() {
715 assert_eq!(NonZeroPositiveF32::new(-1.0).map(|n| n.get()), None);
716 assert_eq!(NonZeroPositiveF64::new(0.0).map(|n| n.get()), None);
717 assert_eq!(NonZeroPositiveF64::new(1.0).map(|n| n.get()), Some(1.0));
718 assert_eq!(
719 NonZeroPositiveF64::new(core::f64::EPSILON).map(|n| n.get()),
720 Some(core::f64::EPSILON)
721 );
722 assert_eq!(
723 NonZeroPositiveF64::new(-core::f64::EPSILON).map(|n| n.get()),
724 None
725 );
726 assert_eq!(NonZeroPositiveF64::new(core::f64::NAN), None);
727 assert_eq!(NonZeroPositiveF64::new(core::f64::INFINITY), None);
728 assert_eq!(NonZeroPositiveF64::new(core::f64::NEG_INFINITY), None);
729 }
730
731 #[test]
732 fn norm_f32() {
733 assert_eq!(NormalizedF32::new(-0.5), None);
734 assert_eq!(
735 NormalizedF32::new(-core::f32::EPSILON).map(|n| n.get()),
736 None
737 );
738 assert_eq!(NormalizedF32::new(0.0).map(|n| n.get()), Some(0.0));
739 assert_eq!(NormalizedF32::new(0.5).map(|n| n.get()), Some(0.5));
740 assert_eq!(NormalizedF32::new(1.0).map(|n| n.get()), Some(1.0));
741 assert_eq!(NormalizedF32::new(1.5), None);
742 assert_eq!(NormalizedF32::new(core::f32::NAN), None);
743 assert_eq!(NormalizedF32::new(core::f32::INFINITY), None);
744 assert_eq!(NormalizedF32::new(core::f32::NEG_INFINITY), None);
745 }
746
747 #[test]
748 fn clamped_norm_f32() {
749 assert_eq!(NormalizedF32::new_clamped(-0.5).get(), 0.0);
750 assert_eq!(NormalizedF32::new_clamped(0.5).get(), 0.5);
751 assert_eq!(NormalizedF32::new_clamped(1.5).get(), 1.0);
752 assert_eq!(NormalizedF32::new_clamped(core::f32::NAN).get(), 0.0);
753 assert_eq!(NormalizedF32::new_clamped(core::f32::INFINITY).get(), 0.0);
754 assert_eq!(
755 NormalizedF32::new_clamped(core::f32::NEG_INFINITY).get(),
756 0.0
757 );
758 }
759
760 #[test]
761 fn norm_f64() {
762 assert_eq!(NormalizedF64::new(-0.5), None);
763 assert_eq!(
764 NormalizedF64::new(-core::f64::EPSILON).map(|n| n.get()),
765 None
766 );
767 assert_eq!(NormalizedF64::new(0.0).map(|n| n.get()), Some(0.0));
768 assert_eq!(NormalizedF64::new(0.5).map(|n| n.get()), Some(0.5));
769 assert_eq!(NormalizedF64::new(1.0).map(|n| n.get()), Some(1.0));
770 assert_eq!(NormalizedF64::new(1.5), None);
771 assert_eq!(NormalizedF64::new(core::f64::NAN), None);
772 assert_eq!(NormalizedF64::new(core::f64::INFINITY), None);
773 assert_eq!(NormalizedF64::new(core::f64::NEG_INFINITY), None);
774 }
775
776 #[test]
777 fn clamped_norm_f64() {
778 assert_eq!(NormalizedF64::new_clamped(-0.5).get(), 0.0);
779 assert_eq!(NormalizedF64::new_clamped(0.5).get(), 0.5);
780 assert_eq!(NormalizedF64::new_clamped(1.5).get(), 1.0);
781 assert_eq!(NormalizedF64::new_clamped(core::f64::NAN).get(), 0.0);
782 assert_eq!(NormalizedF64::new_clamped(core::f64::INFINITY).get(), 0.0);
783 assert_eq!(
784 NormalizedF64::new_clamped(core::f64::NEG_INFINITY).get(),
785 0.0
786 );
787 }
788}