Skip to content

Commit

Permalink
fix: do not panic on unexpected end of archive
Browse files Browse the repository at this point in the history
When cloning from an archive which ends unexpectedly we now fail with an error instead of panicking.
The panic previously occurred when the caller continued to read from the chunk stream after the first error.
Now the stream is closed after the first error.
  • Loading branch information
oll3 committed Apr 6, 2024
1 parent ba10c05 commit bc8fed9
Show file tree
Hide file tree
Showing 4 changed files with 89 additions and 17 deletions.
79 changes: 62 additions & 17 deletions bitar/src/archive.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
use blake2::{Blake2b512, Digest};
use futures_util::{stream::Stream, StreamExt};
use std::{convert::TryInto, fmt};
use std::{
convert::TryInto,
fmt,
task::{ready, Poll},
};

use crate::{
archive_reader::ArchiveReader, chunk_dictionary as dict, chunker,
Expand Down Expand Up @@ -279,29 +283,34 @@ impl<R> Archive<R> {
.map(|cd| ChunkOffset::new(cd.archive_offset, cd.archive_size))
.collect();
let archive_chunk_compression = self.chunk_compression().map(|c| c.algorithm);
self.reader
let stream = self
.reader
.read_chunks(read_at)
.enumerate()
.map(move |(index, result)| {
let source_size = descriptors[index].source_size as usize;
match result {
Ok(chunk) => Ok(CompressedArchiveChunk {
chunk: CompressedChunk {
compression: if source_size == chunk.len() {
// When chunk size matches the source chunk size chunk has not been compressed
// since compressing it probably made it bigger.
None
} else {
archive_chunk_compression
Ok(chunk) => {
let descriptor = descriptors[index];
let source_size: usize = descriptor.source_size.try_into().unwrap();
Ok(CompressedArchiveChunk {
chunk: CompressedChunk {
compression: if source_size == chunk.len() {
// When chunk size matches the source chunk size chunk has not been compressed
// since compressing it probably made it bigger.
None
} else {
archive_chunk_compression
},
data: chunk,
source_size,
},
data: chunk,
source_size,
},
expected_hash: descriptors[index].checksum.clone(),
}),
expected_hash: descriptor.checksum.clone(),
})
}
Err(err) => Err(err),
}
})
});
StreamUntilFirstError::new(stream)
}
}

Expand Down Expand Up @@ -360,3 +369,39 @@ fn compression_from_dictionary<R>(
Err(_err) => Err(ArchiveError::invalid_archive("unknown compression")),
}
}

/// The first error returned by the underlying stream will be emitted.
/// Any following read from the stream will result in end of stream (None).
struct StreamUntilFirstError<S> {
stream: S,
end: bool,
}

impl<S> StreamUntilFirstError<S> {
fn new(stream: S) -> Self {
Self { stream, end: false }
}
}

impl<S, T, E> Stream for StreamUntilFirstError<S>
where
S: Stream<Item = Result<T, E>> + Unpin,
{
type Item = S::Item;

fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
if self.end {
return Poll::Ready(None);
}
Poll::Ready(match ready!(self.stream.poll_next_unpin(cx)) {
Some(Err(r)) => {
self.end = true;
Some(Err(r))
}
other => other,
})
}
}
26 changes: 26 additions & 0 deletions bitar/tests/clone_archives_default.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
mod common;

use std::io::ErrorKind;

use bitar::{archive_reader::IoReader, Archive};
use futures_util::stream::StreamExt;
use tokio::fs::File;
Expand Down Expand Up @@ -76,3 +78,27 @@ async fn clone_local_v0_7_1_corrupt_chunk() {
}
panic!("no hashsum mismatch error?!");
}

#[tokio::test]
async fn clone_local_v0_11_0_brotli_expect_unexpected_end() {
let mut archive = Archive::try_init(IoReader::new(
File::open(ARCHIVE_0_11_0_BROTLI_TRUNCATED).await.unwrap(),
))
.await
.unwrap();
let mut chunk_stream = archive.chunk_stream(&archive.build_source_index());
let _chunk1 = chunk_stream
.next()
.await
.unwrap()
.unwrap()
.decompress()
.unwrap()
.verify()
.unwrap();
let first_err = chunk_stream.next().await.unwrap().unwrap_err();
assert_eq!(first_err.kind(), ErrorKind::UnexpectedEof);
assert!(chunk_stream.next().await.is_none());
assert!(chunk_stream.next().await.is_none());
assert!(chunk_stream.next().await.is_none());
}
1 change: 1 addition & 0 deletions bitar/tests/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ pub static ARCHIVE_0_1_1_ZSTD: &str = "tests/resources/zero-0_1_1-zstd.cba";
pub static ARCHIVE_0_7_1_CORRUPT_HEADER: &str = "tests/resources/rand-0_7_1-corrupt-header.cba";
pub static ARCHIVE_0_7_1_CORRUPT_CHUNK: &str = "tests/resources/rand-0_7_1-corrupt-chunk.cba";
pub static ARCHIVE_0_7_1_BROTLI: &str = "tests/resources/zero-0_7_1-brotli.cba";
pub static ARCHIVE_0_11_0_BROTLI_TRUNCATED: &str = "tests/resources/rand-0_11_0-brotli-trunc.cba";

pub async fn clone_local_expect_checksum(path: &str, b2sum: &[u8]) {
clone_expect_checksum(
Expand Down
Binary file not shown.

0 comments on commit bc8fed9

Please sign in to comment.