summaryrefslogtreecommitdiffstats
path: root/tokio/src/loom/std
diff options
context:
space:
mode:
authorCarl Lerche <me@carllerche.com>2019-11-04 22:22:40 -0800
committerGitHub <noreply@github.com>2019-11-04 22:22:40 -0800
commita6253ed05a1e0d14bc64915f5937c29092df9497 (patch)
tree15c96ca9b66e5bca424769e8c715f732feaa1b1c /tokio/src/loom/std
parent94f9b04b066cfc3da5c3ee2c961c21a9496135dd (diff)
chore: unify all mocked `loom` files (#1732)
When the crates were merged, each component kept its own `loom` file containing mocked types it needed. This patch unifies them all in one location.
Diffstat (limited to 'tokio/src/loom/std')
-rw-r--r--tokio/src/loom/std/alloc.rs18
-rw-r--r--tokio/src/loom/std/atomic_u32.rs44
-rw-r--r--tokio/src/loom/std/atomic_usize.rs51
-rw-r--r--tokio/src/loom/std/causal_cell.rs49
-rw-r--r--tokio/src/loom/std/mod.rs64
5 files changed, 226 insertions, 0 deletions
diff --git a/tokio/src/loom/std/alloc.rs b/tokio/src/loom/std/alloc.rs
new file mode 100644
index 00000000..25b199b1
--- /dev/null
+++ b/tokio/src/loom/std/alloc.rs
@@ -0,0 +1,18 @@
+#[derive(Debug)]
+pub(crate) struct Track<T> {
+ value: T,
+}
+
+impl<T> Track<T> {
+ pub(crate) fn new(value: T) -> Track<T> {
+ Track { value }
+ }
+
+ pub(crate) fn get_mut(&mut self) -> &mut T {
+ &mut self.value
+ }
+
+ pub(crate) fn into_inner(self) -> T {
+ self.value
+ }
+}
diff --git a/tokio/src/loom/std/atomic_u32.rs b/tokio/src/loom/std/atomic_u32.rs
new file mode 100644
index 00000000..0128ab2b
--- /dev/null
+++ b/tokio/src/loom/std/atomic_u32.rs
@@ -0,0 +1,44 @@
+use std::cell::UnsafeCell;
+use std::fmt;
+use std::ops::Deref;
+
+/// `AtomicU32` providing an additional `load_unsync` function.
+pub(crate) struct AtomicU32 {
+ inner: UnsafeCell<std::sync::atomic::AtomicU32>,
+}
+
+unsafe impl Send for AtomicU32 {}
+unsafe impl Sync for AtomicU32 {}
+
+impl AtomicU32 {
+ pub(crate) fn new(val: u32) -> AtomicU32 {
+ let inner = UnsafeCell::new(std::sync::atomic::AtomicU32::new(val));
+ AtomicU32 { inner }
+ }
+
+ /// Perform an unsynchronized load.
+ ///
+ /// # Safety
+ ///
+ /// All mutations must have happened before the unsynchronized load.
+ /// Additionally, there must be no concurrent mutations.
+ pub(crate) unsafe fn unsync_load(&self) -> u32 {
+ *(*self.inner.get()).get_mut()
+ }
+}
+
+impl Deref for AtomicU32 {
+ type Target = std::sync::atomic::AtomicU32;
+
+ fn deref(&self) -> &Self::Target {
+ // safety: it is always safe to access `&self` fns on the inner value as
+ // we never perform unsafe mutations.
+ unsafe { &*self.inner.get() }
+ }
+}
+
+impl fmt::Debug for AtomicU32 {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.deref().fmt(fmt)
+ }
+}
diff --git a/tokio/src/loom/std/atomic_usize.rs b/tokio/src/loom/std/atomic_usize.rs
new file mode 100644
index 00000000..d255d087
--- /dev/null
+++ b/tokio/src/loom/std/atomic_usize.rs
@@ -0,0 +1,51 @@
+use std::cell::UnsafeCell;
+use std::fmt;
+use std::ops;
+
+/// `AtomicUsize` providing an additional `load_unsync` function.
+pub(crate) struct AtomicUsize {
+ inner: UnsafeCell<std::sync::atomic::AtomicUsize>,
+}
+
+unsafe impl Send for AtomicUsize {}
+unsafe impl Sync for AtomicUsize {}
+
+impl AtomicUsize {
+ pub(crate) fn new(val: usize) -> AtomicUsize {
+ let inner = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val));
+ AtomicUsize { inner }
+ }
+
+ /// Perform an unsynchronized load.
+ ///
+ /// # Safety
+ ///
+ /// All mutations must have happened before the unsynchronized load.
+ /// Additionally, there must be no concurrent mutations.
+ pub(crate) unsafe fn unsync_load(&self) -> usize {
+ *(*self.inner.get()).get_mut()
+ }
+}
+
+impl ops::Deref for AtomicUsize {
+ type Target = std::sync::atomic::AtomicUsize;
+
+ fn deref(&self) -> &Self::Target {
+ // safety: it is always safe to access `&self` fns on the inner value as
+ // we never perform unsafe mutations.
+ unsafe { &*self.inner.get() }
+ }
+}
+
+impl ops::DerefMut for AtomicUsize {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ // safety: we hold `&mut self`
+ unsafe { &mut *self.inner.get() }
+ }
+}
+
+impl fmt::Debug for AtomicUsize {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (**self).fmt(fmt)
+ }
+}
diff --git a/tokio/src/loom/std/causal_cell.rs b/tokio/src/loom/std/causal_cell.rs
new file mode 100644
index 00000000..c4917e5f
--- /dev/null
+++ b/tokio/src/loom/std/causal_cell.rs
@@ -0,0 +1,49 @@
+use std::cell::UnsafeCell;
+
+#[derive(Debug)]
+pub(crate) struct CausalCell<T>(UnsafeCell<T>);
+
+#[derive(Default)]
+pub(crate) struct CausalCheck(());
+
+impl<T> CausalCell<T> {
+ pub(crate) fn new(data: T) -> CausalCell<T> {
+ CausalCell(UnsafeCell::new(data))
+ }
+
+ pub(crate) fn with<F, R>(&self, f: F) -> R
+ where
+ F: FnOnce(*const T) -> R,
+ {
+ f(self.0.get())
+ }
+
+ pub(crate) fn with_unchecked<F, R>(&self, f: F) -> R
+ where
+ F: FnOnce(*const T) -> R,
+ {
+ f(self.0.get())
+ }
+
+ pub(crate) fn check(&self) {}
+
+ pub(crate) fn with_deferred<F, R>(&self, f: F) -> (R, CausalCheck)
+ where
+ F: FnOnce(*const T) -> R,
+ {
+ (f(self.0.get()), CausalCheck::default())
+ }
+
+ pub(crate) fn with_mut<F, R>(&self, f: F) -> R
+ where
+ F: FnOnce(*mut T) -> R,
+ {
+ f(self.0.get())
+ }
+}
+
+impl CausalCheck {
+ pub(crate) fn check(self) {}
+
+ pub(crate) fn join(&mut self, _other: CausalCheck) {}
+}
diff --git a/tokio/src/loom/std/mod.rs b/tokio/src/loom/std/mod.rs
new file mode 100644
index 00000000..5bbf1531
--- /dev/null
+++ b/tokio/src/loom/std/mod.rs
@@ -0,0 +1,64 @@
+// rt-full implies rt-current-thread
+
+#![cfg_attr(not(feature = "rt-full"), allow(unused_imports, dead_code))]
+
+mod atomic_u32;
+mod atomic_usize;
+mod causal_cell;
+
+pub(crate) mod alloc;
+
+pub(crate) mod cell {
+ pub(crate) use super::causal_cell::{CausalCell, CausalCheck};
+}
+
+pub(crate) mod future {
+ pub(crate) use crate::sync::AtomicWaker;
+}
+
+pub(crate) mod rand {
+ use std::collections::hash_map::RandomState;
+ use std::hash::{BuildHasher, Hash, Hasher};
+ use std::sync::atomic::AtomicU32;
+ use std::sync::atomic::Ordering::Relaxed;
+
+ static COUNTER: AtomicU32 = AtomicU32::new(1);
+
+ pub(crate) fn seed() -> u64 {
+ let rand_state = RandomState::new();
+
+ let mut hasher = rand_state.build_hasher();
+
+ // Hash some unique-ish data to generate some new state
+ COUNTER.fetch_add(1, Relaxed).hash(&mut hasher);
+
+ // Get the seed
+ hasher.finish()
+ }
+}
+
+pub(crate) mod sync {
+ pub(crate) use std::sync::*;
+
+ pub(crate) mod atomic {
+ pub(crate) use crate::loom::std::atomic_u32::AtomicU32;
+ pub(crate) use crate::loom::std::atomic_usize::AtomicUsize;
+
+ pub(crate) use std::sync::atomic::spin_loop_hint;
+ pub(crate) use std::sync::atomic::{fence, AtomicPtr};
+ }
+}
+
+pub(crate) mod sys {
+ #[cfg(feature = "rt-full")]
+ pub(crate) fn num_cpus() -> usize {
+ usize::max(1, num_cpus::get_physical())
+ }
+
+ #[cfg(not(feature = "rt-full"))]
+ pub(crate) fn num_cpus() -> usize {
+ 1
+ }
+}
+
+pub(crate) use std::thread;