Bump async_zip from 0.0.13 to 0.0.15 (#1022)

* Bump async_zip from 0.0.13 to 0.0.15

Bumps [async_zip](https://github.com/Majored/rs-async-zip) from 0.0.13 to 0.0.15.
- [Release notes](https://github.com/Majored/rs-async-zip/releases)
- [Commits](https://github.com/Majored/rs-async-zip/commits)

---
updated-dependencies:
- dependency-name: async_zip
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

* Fix build error

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
dependabot[bot] 2023-05-01 21:23:43 +10:00 committed by GitHub
parent 52e2368e6c
commit 763ddd0427
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 48 additions and 20 deletions

4
Cargo.lock generated
View file

@ -133,9 +133,9 @@ dependencies = [
[[package]]
name = "async_zip"
version = "0.0.13"
version = "0.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79eaa2b44cfdce59cfff6cb013c96900635085fe7c28fbcbe926c9e5ad0ddfbc"
checksum = "795310de3218cde15219fc98c1cf7d8fe9db4865aab27fcf1d535d6cb61c6b54"
dependencies = [
"async-compression",
"crc32fast",

View file

@ -12,7 +12,7 @@ license = "GPL-3.0"
[dependencies]
async-trait = "0.1.68"
async-compression = { version = "0.3.15", features = ["gzip", "zstd", "xz", "bzip2", "tokio"] }
async_zip = { version = "0.0.13", features = ["deflate", "bzip2", "lzma", "zstd", "xz", "tokio"] }
async_zip = { version = "0.0.15", features = ["deflate", "bzip2", "lzma", "zstd", "xz", "tokio"] }
binstalk-types = { version = "0.4.0", path = "../binstalk-types" }
bytes = "1.4.0"
bzip2 = "0.4.4"

View file

@ -6,7 +6,7 @@ use std::{
path::{Component, Path, PathBuf},
};
use async_zip::tokio::read::stream::ZipFileReader;
use async_zip::base::read::stream::ZipFileReader;
use bytes::{Bytes, BytesMut};
use futures_lite::stream::Stream;
use tokio::sync::mpsc;
@ -50,12 +50,18 @@ where
debug!("Decompressing from zip archive to `{}`", path.display());
let reader = StreamReader::new(stream);
let mut zip = ZipFileReader::new(reader);
let mut zip = ZipFileReader::with_tokio(reader);
let mut buf = BytesMut::with_capacity(4 * 4096);
let mut extracted_files = ExtractedFiles::new();
while let Some(mut zip_reader) = zip.next_entry().await.map_err(ZipError::from_inner)? {
extract_zip_entry(&mut zip_reader, path, &mut buf, &mut extracted_files).await?;
while let Some(mut zip_reader) = zip.next_with_entry().await.map_err(ZipError::from_inner)? {
extract_zip_entry(
zip_reader.reader_mut(),
path,
&mut buf,
&mut extracted_files,
)
.await?;
// extract_zip_entry would read the zip_reader until read the file until
// eof unless extract_zip itself is cancelled or an error is raised.

View file

@ -1,9 +1,13 @@
use std::{
borrow::Cow,
io::Write,
path::{Component, Path, PathBuf},
};
use async_zip::{base::read::stream::Reading, tokio::read::stream::ZipFileReader};
use async_zip::{
base::{read::WithEntry, read::ZipEntryReader},
ZipString,
};
use bytes::{Bytes, BytesMut};
use futures_lite::future::try_zip as try_join;
use futures_util::io::Take;
@ -37,7 +41,7 @@ impl ZipError {
}
pub(super) async fn extract_zip_entry<R>(
zip_reader: &mut ZipFileReader<Reading<'_, Take<Compat<R>>>>,
zip_reader: &mut ZipEntryReader<'_, Take<Compat<R>>, WithEntry<'_>>,
path: &Path,
buf: &mut BytesMut,
extracted_files: &mut ExtractedFiles,
@ -47,8 +51,7 @@ where
{
// Sanitize filename
let raw_filename = zip_reader.entry().filename();
let filename = check_filename_and_normalize(raw_filename)
.ok_or_else(|| ZipError(ZipErrorInner::InvalidFilePath(raw_filename.into())))?;
let (filename, is_dir) = check_filename_and_normalize(raw_filename)?;
// Calculates the outpath
let outpath = path.join(&filename);
@ -57,8 +60,6 @@ where
#[cfg_attr(not(unix), allow(unused_mut))]
let mut perms = None;
let is_dir = raw_filename.ends_with('/');
#[cfg(unix)]
{
use std::{fs::Permissions, os::unix::fs::PermissionsExt};
@ -115,7 +116,17 @@ where
Ok(())
});
let read_task = copy_file_to_mpsc(zip_reader.reader().compat(), tx, buf);
let read_task = async move {
// Read everything into `tx`
copy_file_to_mpsc(zip_reader.compat(), tx, buf).await?;
// Check crc32 checksum.
//
// NOTE that since everything is alread read into the channel,
// this function should not read any byte into the `Vec` and
// should return `0`.
assert_eq!(zip_reader.read_to_end_checked(&mut Vec::new()).await?, 0);
Ok(())
};
try_join(
async move { write_task.await.map_err(From::from) },
@ -183,29 +194,40 @@ where
/// to path-based exploits.
///
/// This function is adapted from `zip::ZipFile::enclosed_name`.
fn check_filename_and_normalize(filename: &str) -> Option<PathBuf> {
fn check_filename_and_normalize(filename: &ZipString) -> Result<(PathBuf, bool), DownloadError> {
let filename = filename
.as_str()
.map(Cow::Borrowed)
.unwrap_or_else(|_| String::from_utf8_lossy(filename.as_bytes()));
let bail = |filename: Cow<'_, str>| {
Err(ZipError(ZipErrorInner::InvalidFilePath(
filename.into_owned().into(),
)))
};
if filename.contains('\0') {
return None;
return bail(filename)?;
}
let mut path = PathBuf::new();
// The following loop is adapted from
// `normalize_path::NormalizePath::normalize`.
for component in Path::new(filename).components() {
for component in Path::new(&*filename).components() {
match component {
Component::Prefix(_) | Component::RootDir => return None,
Component::Prefix(_) | Component::RootDir => return bail(filename)?,
Component::CurDir => (),
Component::ParentDir => {
if !path.pop() {
// `PathBuf::pop` returns false if there is no parent.
// which means the path is invalid.
return None;
return bail(filename)?;
}
}
Component::Normal(c) => path.push(c),
}
}
Some(path)
Ok((path, filename.ends_with('/')))
}