Skip to content

Commit

Permalink
Merge pull request #149 from yoshuawuyts/streamset
Browse files Browse the repository at this point in the history
Add the `StreamGroup` type
  • Loading branch information
yoshuawuyts authored Aug 12, 2023
2 parents 9271cac + fd4eeb7 commit 7e1402a
Show file tree
Hide file tree
Showing 27 changed files with 785 additions and 167 deletions.
7 changes: 6 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@ repository = "https://github.com/yoshuawuyts/futures-concurrency"
documentation = "https://docs.rs/futures-concurrency"
description = "Structured concurrency operations for async Rust"
readme = "README.md"
edition = "2018"
edition = "2021"
keywords = []
categories = []
authors = [
"Yoshua Wuyts <[email protected]>"
]

[profile.bench]
debug = true

[lib]
bench = false

Expand All @@ -28,6 +31,8 @@ harness = false
bitvec = { version = "1.0.1", default-features = false, features = ["alloc"] }
futures-core = "0.3"
pin-project = "1.0.8"
slab = "0.4.8"
smallvec = "1.11.0"

[dev-dependencies]
futures = "0.3.25"
Expand Down
67 changes: 65 additions & 2 deletions benches/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,70 @@

mod utils;

criterion::criterion_main!(merge::merge_benches, join::join_benches, race::race_benches);
// #[global_allocator]
// static ALLOC: dhat::Alloc = dhat::Alloc;

fn main() {
// let _profiler = dhat::Profiler::new_heap();
criterion::criterion_main!(
merge::merge_benches,
join::join_benches,
race::race_benches,
stream_group::stream_group_benches
);
main()
}

mod stream_group {
use criterion::async_executor::FuturesExecutor;
use criterion::{black_box, criterion_group, BatchSize, BenchmarkId, Criterion};
use futures::stream::SelectAll;
use futures_concurrency::stream::StreamGroup;
use futures_lite::prelude::*;

use crate::utils::{make_select_all, make_stream_group};
criterion_group! {
name = stream_group_benches;
// This can be any expression that returns a `Criterion` object.
config = Criterion::default();
targets = stream_set_bench
}

fn stream_set_bench(c: &mut Criterion) {
let mut group = c.benchmark_group("stream_group");
for i in [10, 100, 1000].iter() {
group.bench_with_input(BenchmarkId::new("StreamGroup", i), i, |b, i| {
let setup = || make_stream_group(*i);
let routine = |mut group: StreamGroup<_>| async move {
let mut counter = 0;
black_box({
while group.next().await.is_some() {
counter += 1;
}
assert_eq!(counter, *i);
});
};
b.to_async(FuturesExecutor)
.iter_batched(setup, routine, BatchSize::SmallInput)
});
group.bench_with_input(BenchmarkId::new("SelectAll", i), i, |b, i| {
let setup = || make_select_all(*i);
let routine = |mut group: SelectAll<_>| async move {
let mut counter = 0;
black_box({
while group.next().await.is_some() {
counter += 1;
}
assert_eq!(counter, *i);
});
};
b.to_async(FuturesExecutor)
.iter_batched(setup, routine, BatchSize::SmallInput)
});
}
group.finish();
}
}

mod merge {
use criterion::async_executor::FuturesExecutor;
Expand All @@ -17,7 +80,7 @@ mod merge {
merge_benches,
vec_merge_bench,
array_merge_bench,
tuple_merge_bench
tuple_merge_bench,
);

fn vec_merge_bench(c: &mut Criterion) {
Expand Down
19 changes: 19 additions & 0 deletions benches/utils/countdown_streams.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use futures_concurrency::stream::StreamGroup;
use futures_core::Stream;

use std::cell::{Cell, RefCell};
Expand All @@ -19,6 +20,24 @@ pub fn streams_vec(len: usize) -> Vec<CountdownStream> {
streams
}

#[allow(unused)]
pub fn make_stream_group(len: usize) -> StreamGroup<CountdownStream> {
let wakers = Rc::new(RefCell::new(BinaryHeap::new()));
let completed = Rc::new(Cell::new(0));
(0..len)
.map(|n| CountdownStream::new(n, len, wakers.clone(), completed.clone()))
.collect()
}

#[allow(unused)]
pub fn make_select_all(len: usize) -> futures::stream::SelectAll<CountdownStream> {
let wakers = Rc::new(RefCell::new(BinaryHeap::new()));
let completed = Rc::new(Cell::new(0));
(0..len)
.map(|n| CountdownStream::new(n, len, wakers.clone(), completed.clone()))
.collect()
}

pub fn streams_array<const N: usize>() -> [CountdownStream; N] {
let wakers = Rc::new(RefCell::new(BinaryHeap::new()));
let completed = Rc::new(Cell::new(0));
Expand Down
6 changes: 3 additions & 3 deletions src/future/join/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ where
pending: N,
items: OutputArray::uninit(),
wakers: WakerArray::new(),
state: PollArray::new(),
state: PollArray::new_pending(),
futures: FutureArray::new(futures),
}
}
Expand Down Expand Up @@ -138,7 +138,7 @@ where
state.is_ready(),
"Future should have reached a `Ready` state"
);
state.set_consumed();
state.set_none();
}

// SAFETY: we've checked with the state that all of our outputs have been
Expand Down Expand Up @@ -202,6 +202,6 @@ mod test {
let waker = Arc::new(DummyWaker()).into();
let mut cx = Context::from_waker(&waker);
let _ = fut.as_mut().poll(&mut cx);
assert_eq!(format!("{:?}", fut), "[Consumed, Consumed]");
assert_eq!(format!("{:?}", fut), "[None, None]");
}
}
4 changes: 2 additions & 2 deletions src/future/join/tuple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ macro_rules! drop_initialized_values {
// SAFETY: we've just filtered down to *only* the initialized values.
// We can assume they're initialized, and this is where we drop them.
unsafe { $output.assume_init_drop() };
$states[$state_idx].set_consumed();
$states[$state_idx].set_none();
}
drop_initialized_values!(@drop $($rem_outs,)* | $states, $($rem_idx,)*);
};
Expand Down Expand Up @@ -267,7 +267,7 @@ macro_rules! impl_join_tuple {
let ($($F,)+): ($($F,)+) = self;
$StructName {
futures: $mod_name::Futures {$($F: ManuallyDrop::new($F.into_future()),)+},
state: PollArray::new(),
state: PollArray::new_pending(),
outputs: ($(MaybeUninit::<$F::Output>::uninit(),)+),
wakers: WakerArray::new(),
completed: 0,
Expand Down
6 changes: 3 additions & 3 deletions src/future/join/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ where
pending: len,
items: OutputVec::uninit(len),
wakers: WakerVec::new(len),
state: PollVec::new(len),
state: PollVec::new_pending(len),
futures: FutureVec::new(futures),
}
}
Expand Down Expand Up @@ -132,7 +132,7 @@ where
state.is_ready(),
"Future should have reached a `Ready` state"
);
state.set_consumed();
state.set_none();
});

// SAFETY: we've checked with the state that all of our outputs have been
Expand Down Expand Up @@ -196,6 +196,6 @@ mod test {
let waker = Arc::new(DummyWaker()).into();
let mut cx = Context::from_waker(&waker);
let _ = fut.as_mut().poll(&mut cx);
assert_eq!(format!("{:?}", fut), "[Consumed, Consumed]");
assert_eq!(format!("{:?}", fut), "[None, None]");
}
}
4 changes: 2 additions & 2 deletions src/future/race_ok/tuple/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ macro_rules! impl_race_ok_tuple {
done: false,
indexer: utils::Indexer::new($StructName),
errors: array::from_fn(|_| MaybeUninit::uninit()),
errors_states: PollArray::new(),
errors_states: PollArray::new_pending(),
$($F: $F.into_future()),*
}
}
Expand Down Expand Up @@ -154,7 +154,7 @@ macro_rules! impl_race_ok_tuple {
.for_each(|(st, err)| {
// SAFETY: we've filtered down to only the `ready`/initialized data
unsafe { err.assume_init_drop() };
st.set_consumed();
st.set_none();
});
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/future/try_join/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ where
pending: N,
items: OutputArray::uninit(),
wakers: WakerArray::new(),
state: PollArray::new(),
state: PollArray::new_pending(),
futures: FutureArray::new(futures),
}
}
Expand Down Expand Up @@ -147,7 +147,7 @@ where
state.is_ready(),
"Future should have reached a `Ready` state"
);
state.set_consumed();
state.set_none();
}

// SAFETY: we've checked with the state that all of our outputs have been
Expand Down
4 changes: 2 additions & 2 deletions src/future/try_join/tuple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ macro_rules! drop_initialized_values {
// SAFETY: we've just filtered down to *only* the initialized values.
// We can assume they're initialized, and this is where we drop them.
unsafe { $output.assume_init_drop() };
$states[$state_idx].set_consumed();
$states[$state_idx].set_none();
}
drop_initialized_values!(@drop $($rem_outs,)* | $states, $($rem_idx,)*);
};
Expand Down Expand Up @@ -289,7 +289,7 @@ macro_rules! impl_try_join_tuple {
futures: $mod_name::Futures {$(
$F: ManuallyDrop::new($F.into_future()),
)+},
state: PollArray::new(),
state: PollArray::new_pending(),
outputs: ($(MaybeUninit::<$T>::uninit(),)+),
wakers: WakerArray::new(),
completed: 0,
Expand Down
4 changes: 2 additions & 2 deletions src/future/try_join/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ where
pending: len,
items: OutputVec::uninit(len),
wakers: WakerVec::new(len),
state: PollVec::new(len),
state: PollVec::new_pending(len),
futures: FutureVec::new(futures),
}
}
Expand Down Expand Up @@ -148,7 +148,7 @@ where
state.is_ready(),
"Future should have reached a `Ready` state"
);
state.set_consumed();
state.set_none();
}

// SAFETY: we've checked with the state that all of our outputs have been
Expand Down
6 changes: 3 additions & 3 deletions src/stream/merge/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ where
streams,
indexer: Indexer::new(N),
wakers: WakerArray::new(),
state: PollArray::new(),
state: PollArray::new_pending(),
complete: 0,
done: false,
}
Expand Down Expand Up @@ -72,7 +72,7 @@ where
if !readiness.any_ready() {
// Nothing is ready yet
return Poll::Pending;
} else if !readiness.clear_ready(index) || this.state[index].is_consumed() {
} else if !readiness.clear_ready(index) || this.state[index].is_none() {
continue;
}

Expand All @@ -91,7 +91,7 @@ where
}
Poll::Ready(None) => {
*this.complete += 1;
this.state[index].set_consumed();
this.state[index].set_none();
if *this.complete == this.streams.len() {
return Poll::Ready(None);
}
Expand Down
6 changes: 3 additions & 3 deletions src/stream/merge/tuple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ macro_rules! poll_stream {
}
Poll::Ready(None) => {
*$this.completed += 1;
$this.state[$stream_idx].set_consumed();
$this.state[$stream_idx].set_none();
if *$this.completed == $len_streams {
return Poll::Ready(None);
}
Expand Down Expand Up @@ -132,7 +132,7 @@ macro_rules! impl_merge_tuple {
if !readiness.any_ready() {
// Nothing is ready yet
return Poll::Pending;
} else if !readiness.clear_ready(index) || this.state[index].is_consumed() {
} else if !readiness.clear_ready(index) || this.state[index].is_none() {
continue;
}

Expand Down Expand Up @@ -175,7 +175,7 @@ macro_rules! impl_merge_tuple {
streams: $mod_name::Streams { $($F: $F.into_stream()),+ },
indexer: utils::Indexer::new(utils::tuple_len!($($F,)*)),
wakers: WakerArray::new(),
state: PollArray::new(),
state: PollArray::new_pending(),
completed: 0,
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/stream/merge/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ where
let len = streams.len();
Self {
wakers: WakerVec::new(len),
state: PollVec::new(len),
state: PollVec::new_pending(len),
indexer: Indexer::new(len),
streams,
complete: 0,
Expand Down Expand Up @@ -73,7 +73,7 @@ where
if !readiness.any_ready() {
// Nothing is ready yet
return Poll::Pending;
} else if !readiness.clear_ready(index) || this.state[index].is_consumed() {
} else if !readiness.clear_ready(index) || this.state[index].is_none() {
continue;
}

Expand All @@ -92,7 +92,7 @@ where
}
Poll::Ready(None) => {
*this.complete += 1;
this.state[index].set_consumed();
this.state[index].set_none();
if *this.complete == this.streams.len() {
return Poll::Ready(None);
}
Expand Down
5 changes: 5 additions & 0 deletions src/stream/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,13 @@ pub use chain::Chain;
pub use into_stream::IntoStream;
pub use merge::Merge;
pub use stream_ext::StreamExt;
#[doc(inline)]
pub use stream_group::StreamGroup;
pub use zip::Zip;

/// A growable group of streams which act as a single unit.
pub mod stream_group;

pub(crate) mod chain;
mod into_stream;
pub(crate) mod merge;
Expand Down
Loading

0 comments on commit 7e1402a

Please sign in to comment.