Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

View file

@ -0,0 +1,67 @@
use crate::rt;
use crate::thread;
use std::sync::Mutex;
use std::task::Waker;
/// Mock implementation of `tokio::sync::AtomicWaker`.
#[derive(Debug)]
pub struct AtomicWaker {
waker: Mutex<Option<Waker>>,
object: rt::Mutex,
}
impl AtomicWaker {
/// Create a new instance of `AtomicWaker`.
pub fn new() -> AtomicWaker {
AtomicWaker {
waker: Mutex::new(None),
object: rt::Mutex::new(false),
}
}
/// Registers the current task to be notified on calls to `wake`.
#[track_caller]
pub fn register(&self, waker: Waker) {
if dbg!(!self.object.try_acquire_lock(location!())) {
waker.wake();
// yield the task and try again... this is a spin lock.
thread::yield_now();
return;
}
*self.waker.lock().unwrap() = Some(waker);
dbg!(self.object.release_lock());
}
/// Registers the current task to be woken without consuming the value.
pub fn register_by_ref(&self, waker: &Waker) {
self.register(waker.clone());
}
/// Notifies the task that last called `register`.
pub fn wake(&self) {
if let Some(waker) = self.take_waker() {
waker.wake();
}
}
/// Attempts to take the `Waker` value out of the `AtomicWaker` with the
/// intention that the caller will wake the task later.
#[track_caller]
pub fn take_waker(&self) -> Option<Waker> {
dbg!(self.object.acquire_lock(location!()));
let ret = self.waker.lock().unwrap().take();
dbg!(self.object.release_lock());
ret
}
}
impl Default for AtomicWaker {
fn default() -> Self {
AtomicWaker::new()
}
}

View file

@ -0,0 +1,78 @@
//! Future related synchronization primitives.
mod atomic_waker;
pub use self::atomic_waker::AtomicWaker;
use crate::rt;
use crate::sync::Arc;
use pin_utils::pin_mut;
use std::future::Future;
use std::mem;
use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker};
/// Block the current thread, driving `f` to completion.
#[track_caller]
pub fn block_on<F>(f: F) -> F::Output
where
F: Future,
{
pin_mut!(f);
let notify = Arc::new(rt::Notify::new(false, true));
let waker = unsafe {
mem::ManuallyDrop::new(Waker::from_raw(RawWaker::new(
&*notify as *const _ as *const (),
waker_vtable(),
)))
};
let mut cx = Context::from_waker(&waker);
loop {
match f.as_mut().poll(&mut cx) {
Poll::Ready(val) => return val,
Poll::Pending => {}
}
notify.wait(location!());
}
}
pub(super) fn waker_vtable() -> &'static RawWakerVTable {
&RawWakerVTable::new(
clone_arc_raw,
wake_arc_raw,
wake_by_ref_arc_raw,
drop_arc_raw,
)
}
unsafe fn increase_refcount(data: *const ()) {
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
let arc = mem::ManuallyDrop::new(Arc::<rt::Notify>::from_raw(data as *const _));
// Now increase refcount, but don't drop new refcount either
let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
}
unsafe fn clone_arc_raw(data: *const ()) -> RawWaker {
increase_refcount(data);
RawWaker::new(data, waker_vtable())
}
unsafe fn wake_arc_raw(data: *const ()) {
let notify: Arc<rt::Notify> = Arc::from_raw(data as *const _);
notify.notify(location!());
}
unsafe fn wake_by_ref_arc_raw(data: *const ()) {
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
let arc = mem::ManuallyDrop::new(Arc::<rt::Notify>::from_raw(data as *const _));
arc.notify(location!());
}
unsafe fn drop_arc_raw(data: *const ()) {
drop(Arc::<rt::Notify>::from_raw(data as *const _))
}