Skip to content

Commit

Permalink
Add async tests
Browse files Browse the repository at this point in the history
  • Loading branch information
zacps committed Oct 25, 2020
1 parent bcb3e54 commit 0db5741
Show file tree
Hide file tree
Showing 5 changed files with 244 additions and 19 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Expand Up @@ -30,6 +30,7 @@ async-compression = { version = "0.3.5", features = ["futures-io", "deflate", "b
bencher = "0.1"
rand = "0.7"
walkdir = "2"
futures-await-test = "0.3.0"

[features]
deflate = ["flate2/rust_backend"]
Expand Down
127 changes: 125 additions & 2 deletions src/read.rs
Expand Up @@ -889,8 +889,7 @@ impl<R: AsyncRead + AsyncSeek> AsyncZipArchive<R> {
match make_reader_async(data.compression_method, data.crc32, limit_reader, password).await {
Ok(Ok(reader)) => Ok(Ok(AsyncZipFile {
reader,
// TODO: Avoid clone?
data: Cow::Owned(data.clone()),
data: Cow::Borrowed(data),
})),
Err(e) => Err(e),
Ok(Err(e)) => Ok(Err(e)),
Expand Down Expand Up @@ -1485,6 +1484,91 @@ pub fn read_zipfile_from_stream<'a, R: io::Read>(
}))
}

/// See [read_zipfile_from_stream_async]
pub async fn read_zipfile_from_stream_async<'a, R: AsyncRead>(
mut reader: Pin<&'a mut R>,
) -> ZipResult<Option<AsyncZipFile<'_>>> {
let mut r = reader.compat_mut();
let signature = r.read_u32_le().await?;

match signature {
spec::LOCAL_FILE_HEADER_SIGNATURE => (),
spec::CENTRAL_DIRECTORY_HEADER_SIGNATURE => return Ok(None),
_ => return Err(ZipError::InvalidArchive("Invalid local file header")),
}

let version_made_by = r.read_u16_le().await?;
let flags = r.read_u16_le().await?;
let encrypted = flags & 1 == 1;
let is_utf8 = flags & (1 << 11) != 0;
let using_data_descriptor = flags & (1 << 3) != 0;
#[allow(deprecated)]
let compression_method = CompressionMethod::from_u16(r.read_u16_le().await?);
let last_mod_time = r.read_u16_le().await?;
let last_mod_date = r.read_u16_le().await?;
let crc32 = r.read_u32_le().await?;
let compressed_size = r.read_u32_le().await?;
let uncompressed_size = r.read_u32_le().await?;
let file_name_length = r.read_u16_le().await? as usize;
let extra_field_length = r.read_u16_le().await? as usize;

let mut file_name_raw = vec![0; file_name_length];
reader.read_exact(&mut file_name_raw).await?;
let mut extra_field = vec![0; extra_field_length];
reader.read_exact(&mut extra_field).await?;

let file_name = match is_utf8 {
true => String::from_utf8_lossy(&*file_name_raw).into_owned(),
false => file_name_raw.clone().from_cp437(),
};

let mut result = ZipFileData {
system: System::from_u8((version_made_by >> 8) as u8),
version_made_by: version_made_by as u8,
encrypted,
compression_method,
last_modified_time: DateTime::from_msdos(last_mod_date, last_mod_time),
crc32,
compressed_size: compressed_size as u64,
uncompressed_size: uncompressed_size as u64,
file_name,
file_name_raw,
file_comment: String::new(), // file comment is only available in the central directory
// header_start and data start are not available, but also don't matter, since seeking is
// not available.
header_start: 0,
data_start: 0,
central_header_start: 0,
// The external_attributes field is only available in the central directory.
// We set this to zero, which should be valid as the docs state 'If input came
// from standard input, this field is set to zero.'
external_attributes: 0,
};

match parse_extra_field(&mut result, &extra_field) {
Ok(..) | Err(ZipError::Io(..)) => {}
Err(e) => return Err(e),
}

if encrypted {
return unsupported_zip_error("Encrypted files are not supported");
}
if using_data_descriptor {
return unsupported_zip_error("The file length is not available in the local header");
}

let limit_reader = (reader as Pin<&'a mut dyn AsyncRead>).take(result.compressed_size as u64);

let result_crc32 = result.crc32;
let result_compression_method = result.compression_method;
Ok(Some(AsyncZipFile {
data: Cow::Owned(result),
reader: make_reader_async(result_compression_method, result_crc32, limit_reader, None)
.await?
.unwrap(),
}))
}

#[cfg(test)]
mod test {
#[test]
Expand Down Expand Up @@ -1610,3 +1694,42 @@ mod test {
}
}
}

#[cfg(all(test, feature = "async"))]
mod async_tests {
use futures::io::Cursor;
use futures_await_test::async_test;
use std::pin::Pin;

#[async_test]
async fn async_contents() {
use super::AsyncZipArchive;

let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
let cursor = Cursor::new(v);
let mut reader = AsyncZipArchive::new(cursor).await.unwrap();
let reader = Pin::new(&mut reader);
assert!(reader.comment() == b"");
assert_eq!(reader.by_index(0).await.unwrap().central_header_start(), 77);
}

#[ignore = "Async drop implementation is currently broken"]
#[async_test]
async fn zip_read_streaming_async() {
use super::read_zipfile_from_stream_async;

let mut v = Vec::new();
v.extend_from_slice(include_bytes!("../tests/data/mimetype.zip"));
let mut reader = Cursor::new(v);
loop {
match read_zipfile_from_stream_async(Pin::new(&mut reader))
.await
.unwrap()
{
None => break,
_ => (),
}
}
}
}
4 changes: 3 additions & 1 deletion src/spec.rs
Expand Up @@ -138,7 +138,9 @@ impl CentralDirectoryEnd {
let mut pos = file_length - HEADER_SIZE;
while pos >= search_upper_bound {
reader.seek(io::SeekFrom::Start(pos as u64)).await?;
if reader.compat_mut().read_u32_le().await? == CENTRAL_DIRECTORY_END_SIGNATURE {
if reader.compat_mut().read_u32_le().await.expect("fff")
== CENTRAL_DIRECTORY_END_SIGNATURE
{
reader
.seek(io::SeekFrom::Current(
BYTES_BETWEEN_MAGIC_AND_COMMENT_SIZE as i64,
Expand Down
38 changes: 38 additions & 0 deletions tests/end_to_end.rs
Expand Up @@ -2,6 +2,11 @@ use std::collections::HashSet;
use std::io::prelude::*;
use std::io::Cursor;
use std::iter::FromIterator;
use std::pin::Pin;

use futures::AsyncReadExt;
use futures_await_test::async_test;

use zip::write::FileOptions;

// This test asserts that after creating a zip file, then reading its contents back out,
Expand All @@ -17,6 +22,21 @@ fn end_to_end() {
assert!(file_contents.as_bytes() == LOREM_IPSUM);
}

#[async_test]
async fn async_end_to_end() {
let mut file = Cursor::new(Vec::new());

write_to_zip_file(&mut file).expect("file written");

let position = file.position();
let mut file = futures::io::Cursor::new(file.into_inner());
file.set_position(position);

let file_contents: String = read_zip_file_async(&mut file).await.unwrap();

assert!(file_contents.as_bytes() == LOREM_IPSUM);
}

fn write_to_zip_file(file: &mut Cursor<Vec<u8>>) -> zip::result::ZipResult<()> {
let mut zip = zip::ZipWriter::new(file);

Expand Down Expand Up @@ -50,6 +70,24 @@ fn read_zip_file(zip_file: &mut Cursor<Vec<u8>>) -> zip::result::ZipResult<Strin
Ok(contents)
}

async fn read_zip_file_async(
zip_file: &mut futures::io::Cursor<Vec<u8>>,
) -> zip::result::ZipResult<String> {
let mut archive = zip::AsyncZipArchive::new(zip_file).await.unwrap();
let archive = Pin::new(&mut archive);

let expected_file_names = ["test/", "test/☃.txt", "test/lorem_ipsum.txt"];
let expected_file_names = HashSet::from_iter(expected_file_names.iter().map(|&v| v));
let file_names = archive.file_names().collect::<HashSet<_>>();
assert_eq!(file_names, expected_file_names);

let mut file = archive.by_name("test/lorem_ipsum.txt").await?;

let mut contents = String::new();
file.read_to_string(&mut contents).await.unwrap();
Ok(contents)
}

const LOREM_IPSUM : &'static [u8] = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet
molestie. Proin blandit ornare dui, a tempor nisl accumsan in. Praesent a consequat felis. Morbi metus diam, auctor in auctor vel, feugiat id odio. Curabitur ex ex,
dictum quis auctor quis, suscipit id lorem. Aliquam vestibulum dolor nec enim vehicula, porta tristique augue tincidunt. Vivamus ut gravida est. Sed pellentesque, dolor
Expand Down
93 changes: 77 additions & 16 deletions tests/zip_crypto.rs
Expand Up @@ -19,25 +19,30 @@

use std::io::Cursor;
use std::io::Read;
use std::pin::Pin;

use futures::AsyncReadExt;
use futures_await_test::async_test;

const ZIP_FILE_BYTES: &'static [u8; 197] = &[
0x50, 0x4b, 0x03, 0x04, 0x14, 0x00, 0x01, 0x00, 0x00, 0x00, 0x54, 0xbd, 0xb5, 0x50, 0x2f, 0x20,
0x79, 0x55, 0x2f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x74, 0x65,
0x73, 0x74, 0x2e, 0x74, 0x78, 0x74, 0xca, 0x2d, 0x1d, 0x27, 0x19, 0x19, 0x63, 0x43, 0x77, 0x9a,
0x71, 0x76, 0xc9, 0xec, 0xd1, 0x6f, 0xd9, 0xf5, 0x22, 0x67, 0xb3, 0x8f, 0x52, 0xb5, 0x41, 0xbc,
0x5c, 0x36, 0xf2, 0x1d, 0x84, 0xc3, 0xc0, 0x28, 0x3b, 0xfd, 0xe1, 0x70, 0xc2, 0xcc, 0x0c, 0x11,
0x0c, 0xc5, 0x95, 0x2f, 0xa4, 0x50, 0x4b, 0x01, 0x02, 0x3f, 0x00, 0x14, 0x00, 0x01, 0x00, 0x00,
0x00, 0x54, 0xbd, 0xb5, 0x50, 0x2f, 0x20, 0x79, 0x55, 0x2f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00,
0x00, 0x08, 0x00, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x74, 0x78, 0x74, 0x0a, 0x00, 0x20, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01, 0x00, 0x18, 0x00, 0x31, 0xb2, 0x3b, 0xbf, 0xb8, 0x2f, 0xd6, 0x01, 0x31,
0xb2, 0x3b, 0xbf, 0xb8, 0x2f, 0xd6, 0x01, 0xa8, 0xc4, 0x45, 0xbd, 0xb8, 0x2f, 0xd6, 0x01, 0x50,
0x4b, 0x05, 0x06, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x5a, 0x00, 0x00, 0x00, 0x55,
0x00, 0x00, 0x00, 0x00, 0x00,
];

#[test]
fn encrypted_file() {
let zip_file_bytes = &mut Cursor::new(vec![
0x50, 0x4b, 0x03, 0x04, 0x14, 0x00, 0x01, 0x00, 0x00, 0x00, 0x54, 0xbd, 0xb5, 0x50, 0x2f,
0x20, 0x79, 0x55, 0x2f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
0x74, 0x65, 0x73, 0x74, 0x2e, 0x74, 0x78, 0x74, 0xca, 0x2d, 0x1d, 0x27, 0x19, 0x19, 0x63,
0x43, 0x77, 0x9a, 0x71, 0x76, 0xc9, 0xec, 0xd1, 0x6f, 0xd9, 0xf5, 0x22, 0x67, 0xb3, 0x8f,
0x52, 0xb5, 0x41, 0xbc, 0x5c, 0x36, 0xf2, 0x1d, 0x84, 0xc3, 0xc0, 0x28, 0x3b, 0xfd, 0xe1,
0x70, 0xc2, 0xcc, 0x0c, 0x11, 0x0c, 0xc5, 0x95, 0x2f, 0xa4, 0x50, 0x4b, 0x01, 0x02, 0x3f,
0x00, 0x14, 0x00, 0x01, 0x00, 0x00, 0x00, 0x54, 0xbd, 0xb5, 0x50, 0x2f, 0x20, 0x79, 0x55,
0x2f, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x08, 0x00, 0x24, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x74, 0x65, 0x73, 0x74,
0x2e, 0x74, 0x78, 0x74, 0x0a, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x18,
0x00, 0x31, 0xb2, 0x3b, 0xbf, 0xb8, 0x2f, 0xd6, 0x01, 0x31, 0xb2, 0x3b, 0xbf, 0xb8, 0x2f,
0xd6, 0x01, 0xa8, 0xc4, 0x45, 0xbd, 0xb8, 0x2f, 0xd6, 0x01, 0x50, 0x4b, 0x05, 0x06, 0x00,
0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x5a, 0x00, 0x00, 0x00, 0x55, 0x00, 0x00, 0x00,
0x00, 0x00,
]);
let zip_file_bytes = &mut Cursor::new(ZIP_FILE_BYTES);

let mut archive = zip::ZipArchive::new(zip_file_bytes).unwrap();

Expand Down Expand Up @@ -84,3 +89,59 @@ fn encrypted_file() {
assert_eq!(data, "abcdefghijklmnopqrstuvwxyz123456789".as_bytes());
}
}

#[async_test]
async fn encrypted_file_async() {
let zip_file_bytes = &mut futures::io::Cursor::new(ZIP_FILE_BYTES);

let mut archive = zip::AsyncZipArchive::new(zip_file_bytes).await.unwrap();
let mut archive = Pin::new(&mut archive);

assert_eq!(archive.len(), 1); //Only one file inside archive: `test.txt`

{
// No password
let file = archive.as_mut().by_index(0).await;
match file {
Err(zip::result::ZipError::UnsupportedArchive("Password required to decrypt file")) => {
()
}
Err(_) => panic!(
"Expected PasswordRequired error when opening encrypted file without password"
),
Ok(_) => panic!("Error: Successfully opened encrypted file without password?!"),
}
}

{
// Wrong password
let file = archive
.as_mut()
.by_index_decrypt(0, b"wrong password")
.await;
match file {
Ok(Err(zip::result::InvalidPassword)) => (),
Err(_) => panic!(
"Expected InvalidPassword error when opening encrypted file with wrong password"
),
Ok(Ok(_)) => panic!("Error: Successfully opened encrypted file with wrong password?!"),
}
}

{
// Correct password, read contents
let mut file = archive
.as_mut()
.by_index_decrypt(0, "test".as_bytes())
.await
.unwrap()
.unwrap();
#[allow(deprecated)]
let file_name = file.name();
assert_eq!(file_name, "test.txt");

let mut data = Vec::new();
file.read_to_end(&mut data).await.unwrap();
assert_eq!(data, "abcdefghijklmnopqrstuvwxyz123456789".as_bytes());
}
}

0 comments on commit 0db5741

Please sign in to comment.