cooked_waker/
lib.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
#![no_std]

//! cooked_waker provides safe traits for working with
//! [`std::task::Waker`][Waker] and creating those wakers out of regular, safe
//! Rust structs. It cooks `RawWaker` and `RawWakerVTable`, making them safe
//! for consumption.
//!
//! It provides the [`Wake`] and [`WakeRef`] traits, which correspond to the
//! [`wake`][Waker::wake] and [`wake_by_ref`][Waker::wake_by_ref] methods
//! on [`std::task::Waker`][Waker], and it provides implenetations of these
//! types for the common reference & pointer types (`Arc`, `Rc`, `&'static`,
//! etc).
//!
//! Additionally, it provides [`IntoWaker`], which allows converting any
//! `Wake + Clone` type into a [`Waker`]. This trait is automatically derived
//! for any `Wake + Clone + Send + Sync + 'static` type.
//!
//! # Basic example
//!
//! ```
//! use cooked_waker::{Wake, WakeRef, IntoWaker, ViaRawPointer};
//! use std::sync::atomic::{AtomicUsize, Ordering};
//! use std::task::Waker;
//!
//! static wake_ref_count: AtomicUsize = AtomicUsize::new(0);
//! static wake_value_count: AtomicUsize = AtomicUsize::new(0);
//! static drop_count: AtomicUsize = AtomicUsize::new(0);
//!
//! // A simple Waker struct that atomically increments the relevant static
//! // counters.
//! #[derive(Debug, Clone)]
//! struct StaticWaker;
//!
//! impl WakeRef for StaticWaker {
//!     fn wake_by_ref(&self) {
//!         wake_ref_count.fetch_add(1, Ordering::SeqCst);
//!     }
//! }
//!
//! impl Wake for StaticWaker {
//!     fn wake(self) {
//!         wake_value_count.fetch_add(1, Ordering::SeqCst);
//!     }
//! }
//!
//! impl Drop for StaticWaker {
//!     fn drop(&mut self) {
//!         drop_count.fetch_add(1, Ordering::SeqCst);
//!     }
//! }
//!
//! // Usually in practice you'll be using an Arc or Box, which already
//! // implement this, so there will be no need to implement it yourself.
//! unsafe impl ViaRawPointer for StaticWaker {
//!     type Target = ();
//!
//!     fn into_raw(self) -> *mut () {
//!         // Need to forget self because we're being converted into a pointer,
//!         // so destructors should not run.
//!         std::mem::forget(self);
//!         std::ptr::null_mut()
//!     }
//!
//!     unsafe fn from_raw(ptr: *mut ()) -> Self {
//!         StaticWaker
//!     }
//! }
//!
//! assert_eq!(drop_count.load(Ordering::SeqCst), 0);
//!
//! let waker = StaticWaker;
//! {
//!     let waker1: Waker = waker.into_waker();
//!
//!     waker1.wake_by_ref();
//!     assert_eq!(wake_ref_count.load(Ordering::SeqCst), 1);
//!
//!     let waker2: Waker = waker1.clone();
//!     waker2.wake_by_ref();
//!     assert_eq!(wake_ref_count.load(Ordering::SeqCst), 2);
//!
//!     waker1.wake();
//!     assert_eq!(wake_value_count.load(Ordering::SeqCst), 1);
//!     assert_eq!(drop_count.load(Ordering::SeqCst), 1);
//! }
//! assert_eq!(drop_count.load(Ordering::SeqCst), 2);
//! ```
//!
//! # Arc example
//!
//! ```
//! use cooked_waker::{Wake, WakeRef, IntoWaker};
//! use std::sync::atomic::{AtomicUsize, Ordering};
//! use std::sync::Arc;
//! use std::task::Waker;
//!
//! // A simple struct that counts the number of times it is awoken. Can't
//! // be awoken by value (because that would discard the counter), so we
//! // must instead wrap it in an Arc.
//! #[derive(Debug, Default)]
//! struct Counter {
//!     // We use atomic usize because we need Send + Sync and also interior
//!     // mutability
//!     count: AtomicUsize,
//! }
//!
//! impl Counter {
//!     fn get(&self) -> usize {
//!         self.count.load(Ordering::SeqCst)
//!     }
//! }
//!
//! impl WakeRef for Counter {
//!     fn wake_by_ref(&self) {
//!         let _prev = self.count.fetch_add(1, Ordering::SeqCst);
//!     }
//! }
//!
//! let counter_handle = Arc::new(Counter::default());
//!
//! // Create an std::task::Waker
//! let waker: Waker = counter_handle.clone().into_waker();
//!
//! waker.wake_by_ref();
//! waker.wake_by_ref();
//!
//! let waker2 = waker.clone();
//! waker2.wake_by_ref();
//!
//! // Because IntoWaker wrap the pointer directly, without additional
//! // boxing, we can use will_wake
//! assert!(waker.will_wake(&waker2));
//!
//! // This calls Counter::wake_by_ref because the Arc doesn't have exclusive
//! // ownership of the underlying Counter
//! waker2.wake();
//!
//! assert_eq!(counter_handle.get(), 4);
//! ```

extern crate alloc;

use alloc::boxed::Box;
use alloc::rc;
use alloc::sync as arc;
use core::{
    mem::ManuallyDrop,
    ptr,
    task::{RawWaker, RawWakerVTable, Waker},
};

/// Trait for types that can be converted into raw pointers and back again.
///
/// # Safety
///
/// - Implementors must ensure that, for a given object, the pointer remains
///   fixed as long as no mutable operations are performed (that is, calling
///   from_ptr() followed by into_ptr(), with no mutable operations in between,
///   the returned pointer has the same value.)
/// - Implementors also must not panic when the interface is used correctly.
///   The Waker constructed by IntoWaker can cause a double drop if either of
///   these functions panic.
///
/// In the future, we hope to have a similar trait added to the standard
/// library; see https://github.com/rust-lang/rust/issues/75846 for details.
pub unsafe trait ViaRawPointer {
    type Target: ?Sized;

    /// Convert this object into a raw pointer.
    fn into_raw(self) -> *mut Self::Target;

    /// Convert a raw pointer back into this object.
    ///
    /// # Safety
    ///
    /// This method must ONLY be called on a pointer that was received via
    /// `Self::into_raw`, and that pointer must not be used afterwards.
    unsafe fn from_raw(ptr: *mut Self::Target) -> Self;
}

/// Wakers that can wake by reference. This trait is used to enable a [`Wake`]
/// implementation for types that don't own an underlying handle, like `Arc<T>`
/// and `&T`.
///
/// This trait is implemented for most container and reference types, like
/// `&T where T: WakeRef`, `Box<T: WakeRef>`, and `Arc<T: WakeRef>`.
pub trait WakeRef {
    /// Wake up the task by reference. In general [`Wake::wake`] should be
    /// preferred, if available, as it's probably more efficient.
    ///
    /// A [`Waker`] created by [`IntoWaker`] will call this method through
    /// [`Waker::wake_by_ref`].
    fn wake_by_ref(&self);
}

/// Wakers that can wake by value. This is the primary means of waking a task.
///
/// This trait is implemented for most container types, like `Box<T: Wake>`
/// and `Option<T: Wake>`. It is also implemented for shared pointer types like
/// `Arc<T>` and `&T`, but those implementations call `T::wake_by_ref`, because
/// they don't have ownership of the underlying `T`.
pub trait Wake: WakeRef + Sized {
    /// Wake up the task by value. By default, this simply calls
    /// [`WakeRef::wake_by_ref`].
    ///
    /// A [`Waker`] created by [`IntoWaker`] will call this method through
    /// [`Waker::wake`].
    #[inline]
    fn wake(self) {
        self.wake_by_ref()
    }
}

/// Objects that can be converted into an [`Waker`]. This trait is
/// automatically implemented for types that fulfill the waker interface.
/// Such types must be:
/// - [`Clone`]
/// - `Send + Sync`
/// - `'static`
/// - [`Wake`]
/// - [`ViaRawPointer`]
///
/// The implementation of this trait sets up a [`RawWakerVTable`] for the type,
/// and arranges a conversion into a [`Waker`] through the [`ViaRawPointer`]
/// trait, which should be implemented for types that be converted to and from
/// pointers. This trait is implemented for all the standard library pointer
/// types (such as `Arc` and `Box`), and you can implement it on your own types
/// if you want to use them for wakers.
///
/// It should never be necessary to implement this trait manually.
///
/// [`RawWakerVTable`]: core::task::RawWakerVTable
/// [`Waker`]: core::task::Waker
/// [`Clone`]: core::clone::Clone
pub trait IntoWaker {
    /// The RawWakerVTable for this type. This should never be used directly;
    /// it is entirely handled by `into_waker`. It is present as an associated
    /// const because that's the only way for it to work in generic contexts.
    #[doc(hidden)]
    const VTABLE: &'static RawWakerVTable;

    /// Convert this object into a `Waker`.
    #[must_use]
    fn into_waker(self) -> Waker;
}

impl<T> IntoWaker for T
where
    T: Wake + Clone + Send + Sync + 'static + ViaRawPointer,
    T::Target: Sized,
{
    const VTABLE: &'static RawWakerVTable = &RawWakerVTable::new(
        // clone
        |raw| {
            let raw = raw as *mut T::Target;

            let waker = ManuallyDrop::<T>::new(unsafe { ViaRawPointer::from_raw(raw) });
            let cloned: T = (*waker).clone();

            // We can't save the `into_raw` back into the raw waker, so we must
            // simply assert that the pointer has remained the same. This is
            // part of the ViaRawPointer safety contract, so we only check it
            // in debug builds.
            debug_assert_eq!(ManuallyDrop::into_inner(waker).into_raw(), raw);

            let cloned_raw = cloned.into_raw();
            let cloned_raw = cloned_raw as *const ();
            RawWaker::new(cloned_raw, T::VTABLE)
        },
        // wake by value
        |raw| {
            let raw = raw as *mut T::Target;
            let waker: T = unsafe { ViaRawPointer::from_raw(raw) };
            waker.wake();
        },
        // wake by ref
        |raw| {
            let raw = raw as *mut T::Target;
            let waker = ManuallyDrop::<T>::new(unsafe { ViaRawPointer::from_raw(raw) });
            waker.wake_by_ref();

            debug_assert_eq!(ManuallyDrop::into_inner(waker).into_raw(), raw);
        },
        // Drop
        |raw| {
            let raw = raw as *mut T::Target;
            let _waker: T = unsafe { ViaRawPointer::from_raw(raw) };
        },
    );

    fn into_waker(self) -> Waker {
        let raw = self.into_raw();
        let raw = raw as *const ();
        let raw_waker = RawWaker::new(raw, T::VTABLE);
        unsafe { Waker::from_raw(raw_waker) }
    }
}

// Waker implementations for std types. Feel free to open PRs for additional
// stdlib types here.

// We'd prefer to implement WakeRef for T: Deref<Target=WakeRef>, but that
// results in type coherence issues with non-deref stdlib types.

impl<T: WakeRef + ?Sized> WakeRef for &T {
    #[inline]
    fn wake_by_ref(&self) {
        T::wake_by_ref(*self)
    }
}

impl<T: WakeRef + ?Sized> Wake for &T {}

unsafe impl<T: ?Sized> ViaRawPointer for Box<T> {
    type Target = T;

    fn into_raw(self) -> *mut T {
        Box::into_raw(self)
    }

    unsafe fn from_raw(ptr: *mut T) -> Self {
        Box::from_raw(ptr)
    }
}

impl<T: WakeRef + ?Sized> WakeRef for Box<T> {
    #[inline]
    fn wake_by_ref(&self) {
        T::wake_by_ref(self.as_ref())
    }
}

impl<T: Wake> Wake for Box<T> {
    #[inline]
    fn wake(self) {
        T::wake(*self)
    }
}

unsafe impl<T: ?Sized> ViaRawPointer for arc::Arc<T> {
    type Target = T;

    fn into_raw(self) -> *mut T {
        arc::Arc::into_raw(self) as *mut T
    }

    unsafe fn from_raw(ptr: *mut T) -> Self {
        arc::Arc::from_raw(ptr as *const T)
    }
}

impl<T: WakeRef + ?Sized> WakeRef for arc::Arc<T> {
    #[inline]
    fn wake_by_ref(&self) {
        T::wake_by_ref(self.as_ref())
    }
}

impl<T: WakeRef + ?Sized> Wake for arc::Arc<T> {}

unsafe impl<T> ViaRawPointer for arc::Weak<T> {
    type Target = T;

    fn into_raw(self) -> *mut T {
        arc::Weak::into_raw(self) as *mut T
    }

    unsafe fn from_raw(ptr: *mut T) -> Self {
        arc::Weak::from_raw(ptr as *const T)
    }
}

impl<T: WakeRef + ?Sized> WakeRef for arc::Weak<T> {
    #[inline]
    fn wake_by_ref(&self) {
        self.upgrade().wake()
    }
}

impl<T: WakeRef + ?Sized> Wake for arc::Weak<T> {}

impl<T: WakeRef + ?Sized> WakeRef for rc::Rc<T> {
    #[inline]
    fn wake_by_ref(&self) {
        T::wake_by_ref(self.as_ref())
    }
}

unsafe impl<T: ?Sized> ViaRawPointer for rc::Rc<T> {
    type Target = T;

    fn into_raw(self) -> *mut T {
        rc::Rc::into_raw(self) as *mut T
    }

    unsafe fn from_raw(ptr: *mut T) -> Self {
        rc::Rc::from_raw(ptr as *const T)
    }
}

impl<T: WakeRef + ?Sized> Wake for rc::Rc<T> {
    #[inline]
    fn wake(self) {
        T::wake_by_ref(self.as_ref())
    }
}

unsafe impl<T> ViaRawPointer for rc::Weak<T> {
    type Target = T;

    fn into_raw(self) -> *mut T {
        rc::Weak::into_raw(self) as *mut T
    }

    unsafe fn from_raw(ptr: *mut T) -> Self {
        rc::Weak::from_raw(ptr as *const T)
    }
}

impl<T: WakeRef + ?Sized> WakeRef for rc::Weak<T> {
    #[inline]
    fn wake_by_ref(&self) {
        self.upgrade().wake()
    }
}

impl<T: WakeRef + ?Sized> Wake for rc::Weak<T> {}

unsafe impl<T: ViaRawPointer> ViaRawPointer for Option<T>
where
    T::Target: Sized,
{
    type Target = T::Target;

    fn into_raw(self) -> *mut Self::Target {
        match self {
            Some(value) => match value.into_raw() {
                ptr if ptr.is_null() => {
                    let _ = unsafe { T::from_raw(ptr) };
                    ptr::null_mut()
                }
                ptr => ptr,
            },
            None => ptr::null_mut(),
        }
    }

    unsafe fn from_raw(ptr: *mut Self::Target) -> Self {
        match ptr.is_null() {
            false => Some(T::from_raw(ptr)),
            true => None,
        }
    }
}

impl<T: WakeRef> WakeRef for Option<T> {
    #[inline]
    fn wake_by_ref(&self) {
        if let Some(waker) = self {
            waker.wake_by_ref()
        }
    }
}

impl<T: Wake> Wake for Option<T> {
    #[inline]
    fn wake(self) {
        if let Some(waker) = self {
            waker.wake()
        }
    }
}

impl WakeRef for Waker {
    #[inline]
    fn wake_by_ref(&self) {
        Waker::wake_by_ref(self)
    }
}

impl Wake for Waker {
    #[inline]
    fn wake(self) {
        Waker::wake(self)
    }
}

#[cfg(test)]
mod test {
    extern crate std;

    use super::*;
    use std::panic;
    use std::sync::atomic::{AtomicUsize, Ordering};
    use std::task::Waker;

    static PANIC_WAKE_REF_COUNT: AtomicUsize = AtomicUsize::new(0);
    static PANIC_WAKE_VALUE_COUNT: AtomicUsize = AtomicUsize::new(0);
    static PANIC_DROP_COUNT: AtomicUsize = AtomicUsize::new(0);

    #[derive(Debug, Clone)]
    struct PanicWaker;

    impl WakeRef for PanicWaker {
        fn wake_by_ref(&self) {
            PANIC_WAKE_REF_COUNT.fetch_add(1, Ordering::SeqCst);
            panic!();
        }
    }

    impl Wake for PanicWaker {
        fn wake(self) {
            PANIC_WAKE_VALUE_COUNT.fetch_add(1, Ordering::SeqCst);
        }
    }

    impl Drop for PanicWaker {
        fn drop(&mut self) {
            PANIC_DROP_COUNT.fetch_add(1, Ordering::SeqCst);
        }
    }

    unsafe impl ViaRawPointer for PanicWaker {
        type Target = ();

        fn into_raw(self) -> *mut () {
            std::mem::forget(self);
            std::ptr::null_mut()
        }

        unsafe fn from_raw(_ptr: *mut ()) -> Self {
            PanicWaker
        }
    }

    // Test that the wake_by_ref() behaves correctly even if it panics.
    #[test]
    fn panic_wake() {
        assert_eq!(PANIC_DROP_COUNT.load(Ordering::SeqCst), 0);

        let waker = PanicWaker;
        {
            let waker1: Waker = waker.into_waker();

            let waker2: Waker = waker1.clone();

            let result = panic::catch_unwind(|| {
                waker2.wake_by_ref();
            });
            assert!(result.is_err());
            assert_eq!(PANIC_WAKE_REF_COUNT.load(Ordering::SeqCst), 1);
            assert_eq!(PANIC_DROP_COUNT.load(Ordering::SeqCst), 0);

            let result = panic::catch_unwind(|| {
                waker1.wake_by_ref();
            });
            assert!(result.is_err());
            assert_eq!(PANIC_WAKE_REF_COUNT.load(Ordering::SeqCst), 2);
            assert_eq!(PANIC_DROP_COUNT.load(Ordering::SeqCst), 0);

            let result = panic::catch_unwind(|| {
                waker1.wake();
            });
            assert!(result.is_ok());
            assert_eq!(PANIC_WAKE_VALUE_COUNT.load(Ordering::SeqCst), 1);
            assert_eq!(PANIC_DROP_COUNT.load(Ordering::SeqCst), 1);
        }
        assert_eq!(PANIC_DROP_COUNT.load(Ordering::SeqCst), 2);
    }
}