mirror of
https://github.com/cargo-bins/cargo-binstall.git
synced 2025-04-24 22:30:03 +00:00
Refactor: Extract new crate binstalk-{signal, downloader} (#518)
* Refactor: Extract new crate binstalk-downloader * Re-export `PkgFmt` from `binstalk_manifests` * Update release-pr.yml * Update dependabot Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
parent
3841762a5b
commit
89fa5b1769
21 changed files with 456 additions and 260 deletions
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
|
@ -24,6 +24,10 @@ updates:
|
||||||
directory: "/crates/binstalk"
|
directory: "/crates/binstalk"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
|
- package-ecosystem: "cargo"
|
||||||
|
directory: "/crates/binstalk-downloader"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
- package-ecosystem: "cargo"
|
- package-ecosystem: "cargo"
|
||||||
directory: "/crates/detect-wasi"
|
directory: "/crates/detect-wasi"
|
||||||
schedule:
|
schedule:
|
||||||
|
|
1
.github/workflows/release-pr.yml
vendored
1
.github/workflows/release-pr.yml
vendored
|
@ -10,6 +10,7 @@ on:
|
||||||
- bin
|
- bin
|
||||||
- binstalk
|
- binstalk
|
||||||
- binstalk-manifests
|
- binstalk-manifests
|
||||||
|
- binstalk-downloader
|
||||||
- detect-targets
|
- detect-targets
|
||||||
- detect-wasi
|
- detect-wasi
|
||||||
- fs-lock
|
- fs-lock
|
||||||
|
|
33
Cargo.lock
generated
33
Cargo.lock
generated
|
@ -109,29 +109,21 @@ name = "binstalk"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"binstalk-downloader",
|
||||||
"binstalk-manifests",
|
"binstalk-manifests",
|
||||||
"binstall-tar",
|
|
||||||
"bytes",
|
|
||||||
"bzip2",
|
|
||||||
"cargo_toml",
|
"cargo_toml",
|
||||||
"compact_str",
|
"compact_str",
|
||||||
"crates_io_api",
|
"crates_io_api",
|
||||||
"detect-targets",
|
"detect-targets",
|
||||||
"digest",
|
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"flate2",
|
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"generic-array",
|
|
||||||
"home",
|
"home",
|
||||||
"httpdate",
|
|
||||||
"itertools",
|
"itertools",
|
||||||
"jobslot",
|
"jobslot",
|
||||||
"log",
|
"log",
|
||||||
"miette",
|
"miette",
|
||||||
"normalize-path",
|
"normalize-path",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"reqwest",
|
|
||||||
"scopeguard",
|
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"strum",
|
"strum",
|
||||||
|
@ -139,6 +131,29 @@ dependencies = [
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"tinytemplate",
|
"tinytemplate",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"url",
|
||||||
|
"xz2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binstalk-downloader"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"binstalk-manifests",
|
||||||
|
"binstall-tar",
|
||||||
|
"bytes",
|
||||||
|
"bzip2",
|
||||||
|
"digest",
|
||||||
|
"flate2",
|
||||||
|
"futures-util",
|
||||||
|
"generic-array",
|
||||||
|
"httpdate",
|
||||||
|
"log",
|
||||||
|
"reqwest",
|
||||||
|
"scopeguard",
|
||||||
|
"tempfile",
|
||||||
|
"thiserror",
|
||||||
|
"tokio",
|
||||||
"tower",
|
"tower",
|
||||||
"trust-dns-resolver",
|
"trust-dns-resolver",
|
||||||
"url",
|
"url",
|
||||||
|
|
|
@ -3,6 +3,7 @@ members = [
|
||||||
"crates/bin",
|
"crates/bin",
|
||||||
"crates/binstalk",
|
"crates/binstalk",
|
||||||
"crates/binstalk-manifests",
|
"crates/binstalk-manifests",
|
||||||
|
"crates/binstalk-downloader",
|
||||||
"crates/detect-wasi",
|
"crates/detect-wasi",
|
||||||
"crates/fs-lock",
|
"crates/fs-lock",
|
||||||
"crates/normalize-path",
|
"crates/normalize-path",
|
||||||
|
|
|
@ -35,7 +35,8 @@ pub async fn install_crates(mut args: Args, jobserver_client: LazyJobserverClien
|
||||||
args.min_tls_version.map(|v| v.into()),
|
args.min_tls_version.map(|v| v.into()),
|
||||||
Duration::from_millis(rate_limit.duration.get()),
|
Duration::from_millis(rate_limit.duration.get()),
|
||||||
rate_limit.request_count,
|
rate_limit.request_count,
|
||||||
)?;
|
)
|
||||||
|
.map_err(BinstallError::from)?;
|
||||||
|
|
||||||
// Build crates.io api client
|
// Build crates.io api client
|
||||||
let crates_io_api_client = crates_io_api::AsyncClient::with_http_client(
|
let crates_io_api_client = crates_io_api::AsyncClient::with_http_client(
|
||||||
|
|
70
crates/binstalk-downloader/Cargo.toml
Normal file
70
crates/binstalk-downloader/Cargo.toml
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
[package]
|
||||||
|
name = "binstalk-downloader"
|
||||||
|
description = "The binstall toolkit for downloading and extracting file"
|
||||||
|
repository = "https://github.com/cargo-bins/cargo-binstall"
|
||||||
|
documentation = "https://docs.rs/binstalk-downloader"
|
||||||
|
version = "0.1.0"
|
||||||
|
rust-version = "1.61.0"
|
||||||
|
authors = ["ryan <ryan@kurte.nz>"]
|
||||||
|
edition = "2021"
|
||||||
|
license = "GPL-3.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
binstalk-manifests = { version = "0.1.0", path = "../binstalk-manifests" }
|
||||||
|
bytes = "1.2.1"
|
||||||
|
bzip2 = "0.4.3"
|
||||||
|
digest = "0.10.5"
|
||||||
|
flate2 = { version = "1.0.24", default-features = false }
|
||||||
|
futures-util = { version = "0.3.25", default-features = false, features = ["std"] }
|
||||||
|
generic-array = "0.14.6"
|
||||||
|
httpdate = "1.0.2"
|
||||||
|
log = { version = "0.4.17", features = ["std"] }
|
||||||
|
reqwest = { version = "0.11.12", features = ["stream", "gzip", "brotli", "deflate"], default-features = false }
|
||||||
|
scopeguard = "1.1.0"
|
||||||
|
# Use a fork here since we need PAX support, but the upstream
|
||||||
|
# does not hav the PR merged yet.
|
||||||
|
#
|
||||||
|
#tar = "0.4.38"
|
||||||
|
tar = { package = "binstall-tar", version = "0.4.39" }
|
||||||
|
tempfile = "3.3.0"
|
||||||
|
thiserror = "1.0.37"
|
||||||
|
tokio = { version = "1.21.2", features = ["macros", "rt-multi-thread", "sync", "time"], default-features = false }
|
||||||
|
tower = { version = "0.4.13", features = ["limit", "util"] }
|
||||||
|
trust-dns-resolver = { version = "0.21.2", optional = true, default-features = false, features = ["dnssec-ring"] }
|
||||||
|
url = "2.3.1"
|
||||||
|
|
||||||
|
xz2 = "0.1.7"
|
||||||
|
|
||||||
|
# Disable all features of zip except for features of compression algorithms:
|
||||||
|
# Disabled features include:
|
||||||
|
# - aes-crypto: Enables decryption of files which were encrypted with AES, absolutely zero use for
|
||||||
|
# this crate.
|
||||||
|
# - time: Enables features using the [time](https://github.com/time-rs/time) crate,
|
||||||
|
# which is not used by this crate.
|
||||||
|
zip = { version = "0.6.3", default-features = false, features = ["deflate", "bzip2", "zstd"] }
|
||||||
|
|
||||||
|
# zstd is also depended by zip.
|
||||||
|
# Since zip 0.6.3 depends on zstd 0.11, we also have to use 0.11 here,
|
||||||
|
# otherwise there will be a link conflict.
|
||||||
|
zstd = { version = "0.11.2", default-features = false }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["static", "rustls"]
|
||||||
|
|
||||||
|
static = ["bzip2/static", "xz2/static"]
|
||||||
|
pkg-config = ["zstd/pkg-config"]
|
||||||
|
|
||||||
|
zlib-ng = ["flate2/zlib-ng"]
|
||||||
|
|
||||||
|
rustls = [
|
||||||
|
"reqwest/rustls-tls",
|
||||||
|
|
||||||
|
# Enable the following features only if trust-dns-resolver is enabled.
|
||||||
|
"trust-dns-resolver?/dns-over-rustls",
|
||||||
|
# trust-dns-resolver currently supports https with rustls
|
||||||
|
"trust-dns-resolver?/dns-over-https-rustls",
|
||||||
|
]
|
||||||
|
native-tls = ["reqwest/native-tls", "trust-dns-resolver?/dns-over-native-tls"]
|
||||||
|
|
||||||
|
# Enable trust-dns-resolver so that features on it will also be enabled.
|
||||||
|
trust-dns = ["trust-dns-resolver", "reqwest/trust-dns"]
|
170
crates/binstalk-downloader/src/download.rs
Normal file
170
crates/binstalk-downloader/src/download.rs
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
use std::{fmt::Debug, future::Future, io, marker::PhantomData, path::Path, pin::Pin};
|
||||||
|
|
||||||
|
use binstalk_manifests::cargo_toml_binstall::{PkgFmtDecomposed, TarBasedFmt};
|
||||||
|
use digest::{Digest, FixedOutput, HashMarker, Output, OutputSizeUser, Update};
|
||||||
|
use log::debug;
|
||||||
|
use thiserror::Error as ThisError;
|
||||||
|
|
||||||
|
pub use binstalk_manifests::cargo_toml_binstall::PkgFmt;
|
||||||
|
pub use tar::Entries;
|
||||||
|
pub use zip::result::ZipError;
|
||||||
|
|
||||||
|
use crate::remote::{Client, Error as RemoteError, Url};
|
||||||
|
|
||||||
|
mod async_extracter;
|
||||||
|
pub use async_extracter::TarEntriesVisitor;
|
||||||
|
use async_extracter::*;
|
||||||
|
|
||||||
|
mod extracter;
|
||||||
|
mod stream_readable;
|
||||||
|
|
||||||
|
pub type CancellationFuture = Option<Pin<Box<dyn Future<Output = Result<(), io::Error>> + Send>>>;
|
||||||
|
|
||||||
|
#[derive(Debug, ThisError)]
|
||||||
|
pub enum DownloadError {
|
||||||
|
#[error(transparent)]
|
||||||
|
Unzip(#[from] ZipError),
|
||||||
|
|
||||||
|
#[error(transparent)]
|
||||||
|
Remote(#[from] RemoteError),
|
||||||
|
|
||||||
|
/// A generic I/O error.
|
||||||
|
///
|
||||||
|
/// - Code: `binstall::io`
|
||||||
|
/// - Exit: 74
|
||||||
|
#[error(transparent)]
|
||||||
|
Io(io::Error),
|
||||||
|
|
||||||
|
#[error("installation cancelled by user")]
|
||||||
|
UserAbort,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<io::Error> for DownloadError {
|
||||||
|
fn from(err: io::Error) -> Self {
|
||||||
|
if err.get_ref().is_some() {
|
||||||
|
let kind = err.kind();
|
||||||
|
|
||||||
|
let inner = err
|
||||||
|
.into_inner()
|
||||||
|
.expect("err.get_ref() returns Some, so err.into_inner() should also return Some");
|
||||||
|
|
||||||
|
inner
|
||||||
|
.downcast()
|
||||||
|
.map(|b| *b)
|
||||||
|
.unwrap_or_else(|err| DownloadError::Io(io::Error::new(kind, err)))
|
||||||
|
} else {
|
||||||
|
DownloadError::Io(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DownloadError> for io::Error {
|
||||||
|
fn from(e: DownloadError) -> io::Error {
|
||||||
|
match e {
|
||||||
|
DownloadError::Io(io_error) => io_error,
|
||||||
|
e => io::Error::new(io::ErrorKind::Other, e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Download<D: Digest = NoDigest> {
|
||||||
|
client: Client,
|
||||||
|
url: Url,
|
||||||
|
_digest: PhantomData<D>,
|
||||||
|
_checksum: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Download {
|
||||||
|
pub fn new(client: Client, url: Url) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
url,
|
||||||
|
_digest: PhantomData::default(),
|
||||||
|
_checksum: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Download a file from the provided URL and process them in memory.
|
||||||
|
///
|
||||||
|
/// This does not support verifying a checksum due to the partial extraction
|
||||||
|
/// and will ignore one if specified.
|
||||||
|
///
|
||||||
|
/// `cancellation_future` can be used to cancel the extraction and return
|
||||||
|
/// [`DownloadError::UserAbort`] error.
|
||||||
|
pub async fn and_visit_tar<V: TarEntriesVisitor + Debug + Send + 'static>(
|
||||||
|
self,
|
||||||
|
fmt: TarBasedFmt,
|
||||||
|
visitor: V,
|
||||||
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<V::Target, DownloadError> {
|
||||||
|
let stream = self.client.get_stream(self.url).await?;
|
||||||
|
|
||||||
|
debug!("Downloading and extracting then in-memory processing");
|
||||||
|
|
||||||
|
let ret =
|
||||||
|
extract_tar_based_stream_and_visit(stream, fmt, visitor, cancellation_future).await?;
|
||||||
|
|
||||||
|
debug!("Download, extraction and in-memory procession OK");
|
||||||
|
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Download a file from the provided URL and extract it to the provided path.
|
||||||
|
///
|
||||||
|
/// `cancellation_future` can be used to cancel the extraction and return
|
||||||
|
/// [`DownloadError::UserAbort`] error.
|
||||||
|
pub async fn and_extract(
|
||||||
|
self,
|
||||||
|
fmt: PkgFmt,
|
||||||
|
path: impl AsRef<Path>,
|
||||||
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<(), DownloadError> {
|
||||||
|
let stream = self.client.get_stream(self.url).await?;
|
||||||
|
|
||||||
|
let path = path.as_ref();
|
||||||
|
debug!("Downloading and extracting to: '{}'", path.display());
|
||||||
|
|
||||||
|
match fmt.decompose() {
|
||||||
|
PkgFmtDecomposed::Tar(fmt) => {
|
||||||
|
extract_tar_based_stream(stream, path, fmt, cancellation_future).await?
|
||||||
|
}
|
||||||
|
PkgFmtDecomposed::Bin => extract_bin(stream, path, cancellation_future).await?,
|
||||||
|
PkgFmtDecomposed::Zip => extract_zip(stream, path, cancellation_future).await?,
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Download OK, extracted to: '{}'", path.display());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<D: Digest> Download<D> {
|
||||||
|
pub fn new_with_checksum(client: Client, url: Url, checksum: Vec<u8>) -> Self {
|
||||||
|
Self {
|
||||||
|
client,
|
||||||
|
url,
|
||||||
|
_digest: PhantomData::default(),
|
||||||
|
_checksum: checksum,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: implement checking the sum, may involve bringing (parts of) and_extract() back in here
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
|
pub struct NoDigest;
|
||||||
|
|
||||||
|
impl FixedOutput for NoDigest {
|
||||||
|
fn finalize_into(self, _out: &mut Output<Self>) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OutputSizeUser for NoDigest {
|
||||||
|
type OutputSize = generic_array::typenum::U0;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Update for NoDigest {
|
||||||
|
fn update(&mut self, _data: &[u8]) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HashMarker for NoDigest {}
|
|
@ -13,15 +13,20 @@ use tar::Entries;
|
||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
use tokio::task::block_in_place;
|
use tokio::task::block_in_place;
|
||||||
|
|
||||||
use super::{extracter::*, stream_readable::StreamReadable};
|
use super::{
|
||||||
use crate::{errors::BinstallError, manifests::cargo_toml_binstall::TarBasedFmt};
|
extracter::*, stream_readable::StreamReadable, CancellationFuture, DownloadError, TarBasedFmt,
|
||||||
|
};
|
||||||
|
|
||||||
pub async fn extract_bin<S, E>(stream: S, path: &Path) -> Result<(), BinstallError>
|
pub async fn extract_bin<S, E>(
|
||||||
|
stream: S,
|
||||||
|
path: &Path,
|
||||||
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<(), DownloadError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
let mut reader = StreamReadable::new(stream).await;
|
let mut reader = StreamReadable::new(stream, cancellation_future).await;
|
||||||
block_in_place(move || {
|
block_in_place(move || {
|
||||||
fs::create_dir_all(path.parent().unwrap())?;
|
fs::create_dir_all(path.parent().unwrap())?;
|
||||||
|
|
||||||
|
@ -43,12 +48,16 @@ where
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn extract_zip<S, E>(stream: S, path: &Path) -> Result<(), BinstallError>
|
pub async fn extract_zip<S, E>(
|
||||||
|
stream: S,
|
||||||
|
path: &Path,
|
||||||
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<(), DownloadError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
let mut reader = StreamReadable::new(stream).await;
|
let mut reader = StreamReadable::new(stream, cancellation_future).await;
|
||||||
block_in_place(move || {
|
block_in_place(move || {
|
||||||
fs::create_dir_all(path.parent().unwrap())?;
|
fs::create_dir_all(path.parent().unwrap())?;
|
||||||
|
|
||||||
|
@ -67,12 +76,13 @@ pub async fn extract_tar_based_stream<S, E>(
|
||||||
stream: S,
|
stream: S,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
fmt: TarBasedFmt,
|
fmt: TarBasedFmt,
|
||||||
) -> Result<(), BinstallError>
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<(), DownloadError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
let reader = StreamReadable::new(stream).await;
|
let reader = StreamReadable::new(stream, cancellation_future).await;
|
||||||
block_in_place(move || {
|
block_in_place(move || {
|
||||||
fs::create_dir_all(path.parent().unwrap())?;
|
fs::create_dir_all(path.parent().unwrap())?;
|
||||||
|
|
||||||
|
@ -89,21 +99,22 @@ where
|
||||||
pub trait TarEntriesVisitor {
|
pub trait TarEntriesVisitor {
|
||||||
type Target;
|
type Target;
|
||||||
|
|
||||||
fn visit<R: Read>(&mut self, entries: Entries<'_, R>) -> Result<(), BinstallError>;
|
fn visit<R: Read>(&mut self, entries: Entries<'_, R>) -> Result<(), DownloadError>;
|
||||||
fn finish(self) -> Result<Self::Target, BinstallError>;
|
fn finish(self) -> Result<Self::Target, DownloadError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn extract_tar_based_stream_and_visit<S, V, E>(
|
pub async fn extract_tar_based_stream_and_visit<S, V, E>(
|
||||||
stream: S,
|
stream: S,
|
||||||
fmt: TarBasedFmt,
|
fmt: TarBasedFmt,
|
||||||
mut visitor: V,
|
mut visitor: V,
|
||||||
) -> Result<V::Target, BinstallError>
|
cancellation_future: CancellationFuture,
|
||||||
|
) -> Result<V::Target, DownloadError>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
S: Stream<Item = Result<Bytes, E>> + Unpin + 'static,
|
||||||
V: TarEntriesVisitor + Debug + Send + 'static,
|
V: TarEntriesVisitor + Debug + Send + 'static,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
let reader = StreamReadable::new(stream).await;
|
let reader = StreamReadable::new(stream, cancellation_future).await;
|
||||||
block_in_place(move || {
|
block_in_place(move || {
|
||||||
debug!("Extracting from {fmt} archive to process it in memory");
|
debug!("Extracting from {fmt} archive to process it in memory");
|
||||||
|
|
|
@ -12,7 +12,7 @@ use xz2::bufread::XzDecoder;
|
||||||
use zip::read::ZipArchive;
|
use zip::read::ZipArchive;
|
||||||
use zstd::stream::Decoder as ZstdDecoder;
|
use zstd::stream::Decoder as ZstdDecoder;
|
||||||
|
|
||||||
use crate::{errors::BinstallError, manifests::cargo_toml_binstall::TarBasedFmt};
|
use super::{DownloadError, TarBasedFmt};
|
||||||
|
|
||||||
pub fn create_tar_decoder(
|
pub fn create_tar_decoder(
|
||||||
dat: impl BufRead + 'static,
|
dat: impl BufRead + 'static,
|
||||||
|
@ -36,7 +36,7 @@ pub fn create_tar_decoder(
|
||||||
Ok(Archive::new(r))
|
Ok(Archive::new(r))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn unzip(dat: File, dst: &Path) -> Result<(), BinstallError> {
|
pub fn unzip(dat: File, dst: &Path) -> Result<(), DownloadError> {
|
||||||
debug!("Decompressing from zip archive to `{dst:?}`");
|
debug!("Decompressing from zip archive to `{dst:?}`");
|
||||||
|
|
||||||
let mut zip = ZipArchive::new(dat)?;
|
let mut zip = ZipArchive::new(dat)?;
|
|
@ -1,15 +1,13 @@
|
||||||
use std::{
|
use std::{
|
||||||
cmp::min,
|
cmp::min,
|
||||||
future::Future,
|
|
||||||
io::{self, BufRead, Read, Write},
|
io::{self, BufRead, Read, Write},
|
||||||
pin::Pin,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use bytes::{Buf, Bytes};
|
use bytes::{Buf, Bytes};
|
||||||
use futures_util::stream::{Stream, StreamExt};
|
use futures_util::stream::{Stream, StreamExt};
|
||||||
use tokio::runtime::Handle;
|
use tokio::runtime::Handle;
|
||||||
|
|
||||||
use crate::{errors::BinstallError, helpers::signal::wait_on_cancellation_signal};
|
use super::{CancellationFuture, DownloadError};
|
||||||
|
|
||||||
/// This wraps an AsyncIterator as a `Read`able.
|
/// This wraps an AsyncIterator as a `Read`able.
|
||||||
/// It must be used in non-async context only,
|
/// It must be used in non-async context only,
|
||||||
|
@ -20,16 +18,16 @@ pub struct StreamReadable<S> {
|
||||||
stream: S,
|
stream: S,
|
||||||
handle: Handle,
|
handle: Handle,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
cancellation_future: Pin<Box<dyn Future<Output = Result<(), io::Error>> + Send>>,
|
cancellation_future: CancellationFuture,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> StreamReadable<S> {
|
impl<S> StreamReadable<S> {
|
||||||
pub(super) async fn new(stream: S) -> Self {
|
pub(super) async fn new(stream: S, cancellation_future: CancellationFuture) -> Self {
|
||||||
Self {
|
Self {
|
||||||
stream,
|
stream,
|
||||||
handle: Handle::current(),
|
handle: Handle::current(),
|
||||||
bytes: Bytes::new(),
|
bytes: Bytes::new(),
|
||||||
cancellation_future: Box::pin(wait_on_cancellation_signal()),
|
cancellation_future,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,7 +35,7 @@ impl<S> StreamReadable<S> {
|
||||||
impl<S, E> StreamReadable<S>
|
impl<S, E> StreamReadable<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
/// Copies from `self` to `writer`.
|
/// Copies from `self` to `writer`.
|
||||||
///
|
///
|
||||||
|
@ -69,7 +67,7 @@ where
|
||||||
impl<S, E> Read for StreamReadable<S>
|
impl<S, E> Read for StreamReadable<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
if buf.is_empty() {
|
if buf.is_empty() {
|
||||||
|
@ -96,14 +94,14 @@ where
|
||||||
async fn next_stream<S, E>(stream: &mut S) -> io::Result<Option<Bytes>>
|
async fn next_stream<S, E>(stream: &mut S) -> io::Result<Option<Bytes>>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
let option = stream
|
let option = stream
|
||||||
.next()
|
.next()
|
||||||
.await
|
.await
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(BinstallError::from)?;
|
.map_err(DownloadError::from)?;
|
||||||
|
|
||||||
match option {
|
match option {
|
||||||
Some(bytes) if bytes.is_empty() => continue,
|
Some(bytes) if bytes.is_empty() => continue,
|
||||||
|
@ -115,18 +113,22 @@ where
|
||||||
impl<S, E> BufRead for StreamReadable<S>
|
impl<S, E> BufRead for StreamReadable<S>
|
||||||
where
|
where
|
||||||
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
S: Stream<Item = Result<Bytes, E>> + Unpin,
|
||||||
BinstallError: From<E>,
|
DownloadError: From<E>,
|
||||||
{
|
{
|
||||||
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
||||||
let bytes = &mut self.bytes;
|
let bytes = &mut self.bytes;
|
||||||
|
|
||||||
if !bytes.has_remaining() {
|
if !bytes.has_remaining() {
|
||||||
let option = self.handle.block_on(async {
|
let option = self.handle.block_on(async {
|
||||||
tokio::select! {
|
if let Some(cancellation_future) = self.cancellation_future.as_mut() {
|
||||||
res = next_stream(&mut self.stream) => res,
|
tokio::select! {
|
||||||
res = self.cancellation_future.as_mut() => {
|
res = next_stream(&mut self.stream) => res,
|
||||||
Err(res.err().unwrap_or_else(|| io::Error::from(BinstallError::UserAbort)))
|
res = cancellation_future => {
|
||||||
},
|
Err(res.err().unwrap_or_else(|| io::Error::from(DownloadError::UserAbort)))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
next_stream(&mut self.stream).await
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
2
crates/binstalk-downloader/src/lib.rs
Normal file
2
crates/binstalk-downloader/src/lib.rs
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
pub mod download;
|
||||||
|
pub mod remote;
|
|
@ -6,24 +6,41 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use futures_util::stream::Stream;
|
use futures_util::stream::{Stream, StreamExt};
|
||||||
use httpdate::parse_http_date;
|
use httpdate::parse_http_date;
|
||||||
use log::{debug, info};
|
use log::{debug, info};
|
||||||
use reqwest::{
|
use reqwest::{
|
||||||
header::{HeaderMap, RETRY_AFTER},
|
header::{HeaderMap, RETRY_AFTER},
|
||||||
Request, Response, StatusCode,
|
Request, Response, StatusCode,
|
||||||
};
|
};
|
||||||
|
use thiserror::Error as ThisError;
|
||||||
use tokio::{sync::Mutex, time::sleep};
|
use tokio::{sync::Mutex, time::sleep};
|
||||||
use tower::{limit::rate::RateLimit, Service, ServiceBuilder, ServiceExt};
|
use tower::{limit::rate::RateLimit, Service, ServiceBuilder, ServiceExt};
|
||||||
|
|
||||||
use crate::errors::BinstallError;
|
pub use reqwest::{tls, Error as ReqwestError, Method};
|
||||||
|
|
||||||
pub use reqwest::{tls, Method};
|
|
||||||
pub use url::Url;
|
pub use url::Url;
|
||||||
|
|
||||||
const MAX_RETRY_DURATION: Duration = Duration::from_secs(120);
|
const MAX_RETRY_DURATION: Duration = Duration::from_secs(120);
|
||||||
const MAX_RETRY_COUNT: u8 = 3;
|
const MAX_RETRY_COUNT: u8 = 3;
|
||||||
|
|
||||||
|
#[derive(Debug, ThisError)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
Reqwest(#[from] reqwest::Error),
|
||||||
|
|
||||||
|
#[error(transparent)]
|
||||||
|
Http(HttpError),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, ThisError)]
|
||||||
|
#[error("could not {method} {url}: {err}")]
|
||||||
|
pub struct HttpError {
|
||||||
|
method: reqwest::Method,
|
||||||
|
url: url::Url,
|
||||||
|
#[source]
|
||||||
|
err: reqwest::Error,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Client {
|
pub struct Client {
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
|
@ -32,11 +49,13 @@ pub struct Client {
|
||||||
|
|
||||||
impl Client {
|
impl Client {
|
||||||
/// * `per` - must not be 0.
|
/// * `per` - must not be 0.
|
||||||
|
/// * `num_request` - maximum number of requests to be processed for
|
||||||
|
/// each `per` duration.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
min_tls: Option<tls::Version>,
|
min_tls: Option<tls::Version>,
|
||||||
per: Duration,
|
per: Duration,
|
||||||
num_request: NonZeroU64,
|
num_request: NonZeroU64,
|
||||||
) -> Result<Self, BinstallError> {
|
) -> Result<Self, Error> {
|
||||||
const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
|
const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
|
||||||
|
|
||||||
let mut builder = reqwest::ClientBuilder::new()
|
let mut builder = reqwest::ClientBuilder::new()
|
||||||
|
@ -61,6 +80,7 @@ impl Client {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return inner reqwest client.
|
||||||
pub fn get_inner(&self) -> &reqwest::Client {
|
pub fn get_inner(&self) -> &reqwest::Client {
|
||||||
&self.client
|
&self.client
|
||||||
}
|
}
|
||||||
|
@ -69,7 +89,7 @@ impl Client {
|
||||||
&self,
|
&self,
|
||||||
method: &Method,
|
method: &Method,
|
||||||
url: &Url,
|
url: &Url,
|
||||||
) -> Result<Response, reqwest::Error> {
|
) -> Result<Response, ReqwestError> {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -108,7 +128,7 @@ impl Client {
|
||||||
method: Method,
|
method: Method,
|
||||||
url: Url,
|
url: Url,
|
||||||
error_for_status: bool,
|
error_for_status: bool,
|
||||||
) -> Result<Response, BinstallError> {
|
) -> Result<Response, Error> {
|
||||||
self.send_request_inner(&method, &url)
|
self.send_request_inner(&method, &url)
|
||||||
.await
|
.await
|
||||||
.and_then(|response| {
|
.and_then(|response| {
|
||||||
|
@ -118,10 +138,11 @@ impl Client {
|
||||||
Ok(response)
|
Ok(response)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map_err(|err| BinstallError::Http { method, url, err })
|
.map_err(|err| Error::Http(HttpError { method, url, err }))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn remote_exists(&self, url: Url, method: Method) -> Result<bool, BinstallError> {
|
/// Check if remote exists using `method`.
|
||||||
|
pub async fn remote_exists(&self, url: Url, method: Method) -> Result<bool, Error> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.send_request(method, url, false)
|
.send_request(method, url, false)
|
||||||
.await?
|
.await?
|
||||||
|
@ -129,7 +150,8 @@ impl Client {
|
||||||
.is_success())
|
.is_success())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_redirected_final_url(&self, url: Url) -> Result<Url, BinstallError> {
|
/// Attempt to get final redirected url.
|
||||||
|
pub async fn get_redirected_final_url(&self, url: Url) -> Result<Url, Error> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.send_request(Method::HEAD, url, true)
|
.send_request(Method::HEAD, url, true)
|
||||||
.await?
|
.await?
|
||||||
|
@ -137,15 +159,17 @@ impl Client {
|
||||||
.clone())
|
.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn create_request(
|
/// Create `GET` request to `url` and return a stream of the response data.
|
||||||
|
/// On status code other than 200, it will return an error.
|
||||||
|
pub async fn get_stream(
|
||||||
&self,
|
&self,
|
||||||
url: Url,
|
url: Url,
|
||||||
) -> Result<impl Stream<Item = reqwest::Result<Bytes>>, BinstallError> {
|
) -> Result<impl Stream<Item = Result<Bytes, Error>>, Error> {
|
||||||
debug!("Downloading from: '{url}'");
|
debug!("Downloading from: '{url}'");
|
||||||
|
|
||||||
self.send_request(Method::GET, url, true)
|
self.send_request(Method::GET, url, true)
|
||||||
.await
|
.await
|
||||||
.map(Response::bytes_stream)
|
.map(|response| response.bytes_stream().map(|res| res.map_err(Error::from)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,79 +11,43 @@ license = "GPL-3.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-trait = "0.1.58"
|
async-trait = "0.1.58"
|
||||||
|
binstalk-downloader = { version = "0.1.0", path = "../binstalk-downloader" }
|
||||||
binstalk-manifests = { version = "0.1.0", path = "../binstalk-manifests" }
|
binstalk-manifests = { version = "0.1.0", path = "../binstalk-manifests" }
|
||||||
bytes = "1.2.1"
|
|
||||||
bzip2 = "0.4.3"
|
|
||||||
cargo_toml = "0.13.0"
|
cargo_toml = "0.13.0"
|
||||||
compact_str = { version = "0.6.0", features = ["serde"] }
|
compact_str = { version = "0.6.0", features = ["serde"] }
|
||||||
crates_io_api = { version = "0.8.1", default-features = false }
|
crates_io_api = { version = "0.8.1", default-features = false }
|
||||||
detect-targets = { version = "0.1.2", path = "../detect-targets" }
|
detect-targets = { version = "0.1.2", path = "../detect-targets" }
|
||||||
digest = "0.10.5"
|
|
||||||
flate2 = { version = "1.0.24", default-features = false }
|
|
||||||
futures-util = { version = "0.3.25", default-features = false, features = ["std"] }
|
futures-util = { version = "0.3.25", default-features = false, features = ["std"] }
|
||||||
generic-array = "0.14.6"
|
|
||||||
home = "0.5.4"
|
home = "0.5.4"
|
||||||
httpdate = "1.0.2"
|
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
jobslot = { version = "0.2.6", features = ["tokio"] }
|
jobslot = { version = "0.2.6", features = ["tokio"] }
|
||||||
log = { version = "0.4.17", features = ["std"] }
|
log = { version = "0.4.17", features = ["std"] }
|
||||||
miette = "5.4.1"
|
miette = "5.4.1"
|
||||||
normalize-path = { version = "0.2.0", path = "../normalize-path" }
|
normalize-path = { version = "0.2.0", path = "../normalize-path" }
|
||||||
once_cell = "1.16.0"
|
once_cell = "1.16.0"
|
||||||
reqwest = { version = "0.11.12", features = ["stream", "gzip", "brotli", "deflate"], default-features = false }
|
|
||||||
scopeguard = "1.1.0"
|
|
||||||
semver = { version = "1.0.14", features = ["serde"] }
|
semver = { version = "1.0.14", features = ["serde"] }
|
||||||
serde = { version = "1.0.147", features = ["derive"] }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
strum = "0.24.1"
|
strum = "0.24.1"
|
||||||
# Use a fork here since we need PAX support, but the upstream
|
|
||||||
# does not hav the PR merged yet.
|
|
||||||
#
|
|
||||||
#tar = "0.4.38"
|
|
||||||
tar = { package = "binstall-tar", version = "0.4.39" }
|
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.3.0"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.37"
|
||||||
tinytemplate = "1.2.1"
|
tinytemplate = "1.2.1"
|
||||||
# parking_lot - for OnceCell::const_new
|
# parking_lot for `tokio::sync::OnceCell::const_new`
|
||||||
tokio = { version = "1.21.2", features = ["macros", "rt", "process", "sync", "signal", "time", "parking_lot"], default-features = false }
|
tokio = { version = "1.21.2", features = ["rt", "process", "sync", "signal", "parking_lot"], default-features = false }
|
||||||
tower = { version = "0.4.13", features = ["limit", "util"] }
|
|
||||||
trust-dns-resolver = { version = "0.21.2", optional = true, default-features = false, features = ["dnssec-ring"] }
|
|
||||||
url = { version = "2.3.1", features = ["serde"] }
|
url = { version = "2.3.1", features = ["serde"] }
|
||||||
xz2 = "0.1.7"
|
xz2 = "0.1.7"
|
||||||
|
|
||||||
# Disable all features of zip except for features of compression algorithms:
|
|
||||||
# Disabled features include:
|
|
||||||
# - aes-crypto: Enables decryption of files which were encrypted with AES, absolutely zero use for
|
|
||||||
# this crate.
|
|
||||||
# - time: Enables features using the [time](https://github.com/time-rs/time) crate,
|
|
||||||
# which is not used by this crate.
|
|
||||||
zip = { version = "0.6.3", default-features = false, features = ["deflate", "bzip2", "zstd"] }
|
|
||||||
|
|
||||||
# zstd is also depended by zip.
|
|
||||||
# Since zip 0.6.3 depends on zstd 0.11, we also have to use 0.11 here,
|
|
||||||
# otherwise there will be a link conflict.
|
|
||||||
zstd = { version = "0.11.2", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = "0.9.3"
|
env_logger = "0.9.3"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["static", "rustls"]
|
default = ["static", "rustls"]
|
||||||
|
|
||||||
static = ["bzip2/static", "xz2/static"]
|
static = ["binstalk-downloader/static"]
|
||||||
pkg-config = ["zstd/pkg-config"]
|
pkg-config = ["binstalk-downloader/pkg-config"]
|
||||||
|
|
||||||
zlib-ng = ["flate2/zlib-ng"]
|
zlib-ng = ["binstalk-downloader/zlib-ng"]
|
||||||
|
|
||||||
rustls = [
|
rustls = ["crates_io_api/rustls", "binstalk-downloader/rustls"]
|
||||||
"crates_io_api/rustls",
|
native-tls = ["binstalk-downloader/native-tls"]
|
||||||
"reqwest/rustls-tls",
|
|
||||||
|
|
||||||
# Enable the following features only if trust-dns-resolver is enabled.
|
trust-dns = ["binstalk-downloader/trust-dns"]
|
||||||
"trust-dns-resolver?/dns-over-rustls",
|
|
||||||
# trust-dns-resolver currently supports https with rustls
|
|
||||||
"trust-dns-resolver?/dns-over-https-rustls",
|
|
||||||
]
|
|
||||||
native-tls = ["reqwest/native-tls", "trust-dns-resolver?/dns-over-native-tls"]
|
|
||||||
|
|
||||||
# Enable trust-dns-resolver so that features on it will also be enabled.
|
|
||||||
trust-dns = ["trust-dns-resolver", "reqwest/trust-dns"]
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
||||||
helpers::{
|
helpers::{
|
||||||
download::Download,
|
download::Download,
|
||||||
remote::{Client, Url},
|
remote::{Client, Url},
|
||||||
|
signal::wait_on_cancellation_signal,
|
||||||
},
|
},
|
||||||
manifests::cargo_toml_binstall::{Meta, TarBasedFmt},
|
manifests::cargo_toml_binstall::{Meta, TarBasedFmt},
|
||||||
};
|
};
|
||||||
|
@ -53,7 +54,11 @@ pub async fn fetch_crate_cratesio(
|
||||||
|
|
||||||
let manifest_dir_path: PathBuf = format!("{name}-{version_name}").into();
|
let manifest_dir_path: PathBuf = format!("{name}-{version_name}").into();
|
||||||
|
|
||||||
Download::new(client, Url::parse(&crate_url)?)
|
Ok(Download::new(client, Url::parse(&crate_url)?)
|
||||||
.and_visit_tar(TarBasedFmt::Tgz, ManifestVisitor::new(manifest_dir_path))
|
.and_visit_tar(
|
||||||
.await
|
TarBasedFmt::Tgz,
|
||||||
|
ManifestVisitor::new(manifest_dir_path),
|
||||||
|
Some(Box::pin(wait_on_cancellation_signal())),
|
||||||
|
)
|
||||||
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
use std::{
|
use std::{
|
||||||
io::Read,
|
io::{self, Read},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use cargo_toml::Manifest;
|
use cargo_toml::Manifest;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use normalize_path::NormalizePath;
|
use normalize_path::NormalizePath;
|
||||||
use tar::Entries;
|
|
||||||
|
|
||||||
use super::vfs::Vfs;
|
use super::vfs::Vfs;
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::BinstallError, helpers::download::TarEntriesVisitor,
|
errors::BinstallError,
|
||||||
|
helpers::download::{DownloadError, Entries, TarEntriesVisitor},
|
||||||
manifests::cargo_toml_binstall::Meta,
|
manifests::cargo_toml_binstall::Meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ impl ManifestVisitor {
|
||||||
impl TarEntriesVisitor for ManifestVisitor {
|
impl TarEntriesVisitor for ManifestVisitor {
|
||||||
type Target = Manifest<Meta>;
|
type Target = Manifest<Meta>;
|
||||||
|
|
||||||
fn visit<R: Read>(&mut self, entries: Entries<'_, R>) -> Result<(), BinstallError> {
|
fn visit<R: Read>(&mut self, entries: Entries<'_, R>) -> Result<(), DownloadError> {
|
||||||
for res in entries {
|
for res in entries {
|
||||||
let mut entry = res?;
|
let mut entry = res?;
|
||||||
let path = entry.path()?;
|
let path = entry.path()?;
|
||||||
|
@ -71,16 +71,20 @@ impl TarEntriesVisitor for ManifestVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load binstall metadata using the extracted information stored in memory.
|
/// Load binstall metadata using the extracted information stored in memory.
|
||||||
fn finish(self) -> Result<Self::Target, BinstallError> {
|
fn finish(self) -> Result<Self::Target, DownloadError> {
|
||||||
debug!("Loading manifest directly from extracted file");
|
Ok(load_manifest(&self.cargo_toml_content, &self.vfs).map_err(io::Error::from)?)
|
||||||
|
|
||||||
// Load and parse manifest
|
|
||||||
let mut manifest = Manifest::from_slice_with_metadata(&self.cargo_toml_content)?;
|
|
||||||
|
|
||||||
// Checks vfs for binary output names
|
|
||||||
manifest.complete_from_abstract_filesystem(&self.vfs)?;
|
|
||||||
|
|
||||||
// Return metadata
|
|
||||||
Ok(manifest)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn load_manifest(slice: &[u8], vfs: &Vfs) -> Result<Manifest<Meta>, BinstallError> {
|
||||||
|
debug!("Loading manifest directly from extracted file");
|
||||||
|
|
||||||
|
// Load and parse manifest
|
||||||
|
let mut manifest = Manifest::from_slice_with_metadata(slice)?;
|
||||||
|
|
||||||
|
// Checks vfs for binary output names
|
||||||
|
manifest.complete_from_abstract_filesystem(vfs)?;
|
||||||
|
|
||||||
|
// Return metadata
|
||||||
|
Ok(manifest)
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,10 @@ use std::{
|
||||||
process::{ExitCode, ExitStatus, Termination},
|
process::{ExitCode, ExitStatus, Termination},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use binstalk_downloader::{
|
||||||
|
download::{DownloadError, ZipError},
|
||||||
|
remote::{Error as RemoteError, HttpError, ReqwestError},
|
||||||
|
};
|
||||||
use compact_str::CompactString;
|
use compact_str::CompactString;
|
||||||
use miette::{Diagnostic, Report};
|
use miette::{Diagnostic, Report};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
@ -47,7 +51,7 @@ pub enum BinstallError {
|
||||||
/// - Exit: 66
|
/// - Exit: 66
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
#[diagnostic(severity(error), code(binstall::unzip))]
|
#[diagnostic(severity(error), code(binstall::unzip))]
|
||||||
Unzip(#[from] zip::result::ZipError),
|
Unzip(#[from] ZipError),
|
||||||
|
|
||||||
/// A rendering error in a template.
|
/// A rendering error in a template.
|
||||||
///
|
///
|
||||||
|
@ -65,7 +69,7 @@ pub enum BinstallError {
|
||||||
/// - Exit: 68
|
/// - Exit: 68
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
#[diagnostic(severity(error), code(binstall::reqwest))]
|
#[diagnostic(severity(error), code(binstall::reqwest))]
|
||||||
Reqwest(#[from] reqwest::Error),
|
Reqwest(#[from] ReqwestError),
|
||||||
|
|
||||||
/// An HTTP request failed.
|
/// An HTTP request failed.
|
||||||
///
|
///
|
||||||
|
@ -74,14 +78,9 @@ pub enum BinstallError {
|
||||||
///
|
///
|
||||||
/// - Code: `binstall::http`
|
/// - Code: `binstall::http`
|
||||||
/// - Exit: 69
|
/// - Exit: 69
|
||||||
#[error("could not {method} {url}")]
|
#[error(transparent)]
|
||||||
#[diagnostic(severity(error), code(binstall::http))]
|
#[diagnostic(severity(error), code(binstall::http))]
|
||||||
Http {
|
Http(#[from] HttpError),
|
||||||
method: reqwest::Method,
|
|
||||||
url: url::Url,
|
|
||||||
#[source]
|
|
||||||
err: reqwest::Error,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// A subprocess failed.
|
/// A subprocess failed.
|
||||||
///
|
///
|
||||||
|
@ -418,3 +417,27 @@ impl From<BinstallError> for io::Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<RemoteError> for BinstallError {
|
||||||
|
fn from(e: RemoteError) -> Self {
|
||||||
|
use RemoteError::*;
|
||||||
|
|
||||||
|
match e {
|
||||||
|
Reqwest(reqwest_error) => reqwest_error.into(),
|
||||||
|
Http(http_error) => http_error.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DownloadError> for BinstallError {
|
||||||
|
fn from(e: DownloadError) -> Self {
|
||||||
|
use DownloadError::*;
|
||||||
|
|
||||||
|
match e {
|
||||||
|
Unzip(zip_error) => zip_error.into(),
|
||||||
|
Remote(remote_error) => remote_error.into(),
|
||||||
|
Io(io_error) => io_error.into(),
|
||||||
|
UserAbort => BinstallError::UserAbort,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ use crate::{
|
||||||
helpers::{
|
helpers::{
|
||||||
download::Download,
|
download::Download,
|
||||||
remote::{Client, Method},
|
remote::{Client, Method},
|
||||||
|
signal::wait_on_cancellation_signal,
|
||||||
tasks::AutoAbortJoinHandle,
|
tasks::AutoAbortJoinHandle,
|
||||||
},
|
},
|
||||||
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
||||||
|
@ -146,9 +147,9 @@ impl super::Fetcher for GhCrateMeta {
|
||||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||||
let (url, pkg_fmt) = self.resolution.get().unwrap(); // find() is called first
|
let (url, pkg_fmt) = self.resolution.get().unwrap(); // find() is called first
|
||||||
debug!("Downloading package from: '{url}' dst:{dst:?} fmt:{pkg_fmt:?}");
|
debug!("Downloading package from: '{url}' dst:{dst:?} fmt:{pkg_fmt:?}");
|
||||||
Download::new(self.client.clone(), url.clone())
|
Ok(Download::new(self.client.clone(), url.clone())
|
||||||
.and_extract(*pkg_fmt, dst)
|
.and_extract(*pkg_fmt, dst, Some(Box::pin(wait_on_cancellation_signal())))
|
||||||
.await
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pkg_fmt(&self) -> PkgFmt {
|
fn pkg_fmt(&self) -> PkgFmt {
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
||||||
helpers::{
|
helpers::{
|
||||||
download::Download,
|
download::Download,
|
||||||
remote::{Client, Method},
|
remote::{Client, Method},
|
||||||
|
signal::wait_on_cancellation_signal,
|
||||||
},
|
},
|
||||||
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
||||||
};
|
};
|
||||||
|
@ -44,17 +45,22 @@ impl super::Fetcher for QuickInstall {
|
||||||
let url = self.package_url();
|
let url = self.package_url();
|
||||||
self.report();
|
self.report();
|
||||||
debug!("Checking for package at: '{url}'");
|
debug!("Checking for package at: '{url}'");
|
||||||
self.client
|
Ok(self
|
||||||
|
.client
|
||||||
.remote_exists(Url::parse(&url)?, Method::HEAD)
|
.remote_exists(Url::parse(&url)?, Method::HEAD)
|
||||||
.await
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||||
let url = self.package_url();
|
let url = self.package_url();
|
||||||
debug!("Downloading package from: '{url}'");
|
debug!("Downloading package from: '{url}'");
|
||||||
Download::new(self.client.clone(), Url::parse(&url)?)
|
Ok(Download::new(self.client.clone(), Url::parse(&url)?)
|
||||||
.and_extract(self.pkg_fmt(), dst)
|
.and_extract(
|
||||||
.await
|
self.pkg_fmt(),
|
||||||
|
dst,
|
||||||
|
Some(Box::pin(wait_on_cancellation_signal())),
|
||||||
|
)
|
||||||
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pkg_fmt(&self) -> PkgFmt {
|
fn pkg_fmt(&self) -> PkgFmt {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
pub mod download;
|
|
||||||
pub mod jobserver_client;
|
pub mod jobserver_client;
|
||||||
pub mod remote;
|
|
||||||
pub mod signal;
|
pub mod signal;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
|
pub use binstalk_downloader::{download, remote};
|
||||||
|
|
|
@ -1,112 +0,0 @@
|
||||||
use std::{fmt::Debug, marker::PhantomData, path::Path};
|
|
||||||
|
|
||||||
use digest::{Digest, FixedOutput, HashMarker, Output, OutputSizeUser, Update};
|
|
||||||
use log::debug;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
errors::BinstallError,
|
|
||||||
helpers::remote::{Client, Url},
|
|
||||||
manifests::cargo_toml_binstall::{PkgFmt, PkgFmtDecomposed, TarBasedFmt},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use async_extracter::TarEntriesVisitor;
|
|
||||||
use async_extracter::*;
|
|
||||||
|
|
||||||
mod async_extracter;
|
|
||||||
mod extracter;
|
|
||||||
mod stream_readable;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Download<D: Digest = NoDigest> {
|
|
||||||
client: Client,
|
|
||||||
url: Url,
|
|
||||||
_digest: PhantomData<D>,
|
|
||||||
_checksum: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Download {
|
|
||||||
pub fn new(client: Client, url: Url) -> Self {
|
|
||||||
Self {
|
|
||||||
client,
|
|
||||||
url,
|
|
||||||
_digest: PhantomData::default(),
|
|
||||||
_checksum: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download a file from the provided URL and extract part of it to
|
|
||||||
/// the provided path.
|
|
||||||
///
|
|
||||||
/// * `filter` - If Some, then it will pass the path of the file to it
|
|
||||||
/// and only extract ones which filter returns `true`.
|
|
||||||
///
|
|
||||||
/// This does not support verifying a checksum due to the partial extraction
|
|
||||||
/// and will ignore one if specified.
|
|
||||||
pub async fn and_visit_tar<V: TarEntriesVisitor + Debug + Send + 'static>(
|
|
||||||
self,
|
|
||||||
fmt: TarBasedFmt,
|
|
||||||
visitor: V,
|
|
||||||
) -> Result<V::Target, BinstallError> {
|
|
||||||
let stream = self.client.create_request(self.url).await?;
|
|
||||||
|
|
||||||
debug!("Downloading and extracting then in-memory processing");
|
|
||||||
|
|
||||||
let ret = extract_tar_based_stream_and_visit(stream, fmt, visitor).await?;
|
|
||||||
|
|
||||||
debug!("Download, extraction and in-memory procession OK");
|
|
||||||
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download a file from the provided URL and extract it to the provided path.
|
|
||||||
pub async fn and_extract(
|
|
||||||
self,
|
|
||||||
fmt: PkgFmt,
|
|
||||||
path: impl AsRef<Path>,
|
|
||||||
) -> Result<(), BinstallError> {
|
|
||||||
let stream = self.client.create_request(self.url).await?;
|
|
||||||
|
|
||||||
let path = path.as_ref();
|
|
||||||
debug!("Downloading and extracting to: '{}'", path.display());
|
|
||||||
|
|
||||||
match fmt.decompose() {
|
|
||||||
PkgFmtDecomposed::Tar(fmt) => extract_tar_based_stream(stream, path, fmt).await?,
|
|
||||||
PkgFmtDecomposed::Bin => extract_bin(stream, path).await?,
|
|
||||||
PkgFmtDecomposed::Zip => extract_zip(stream, path).await?,
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Download OK, extracted to: '{}'", path.display());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<D: Digest> Download<D> {
|
|
||||||
pub fn new_with_checksum(client: Client, url: Url, checksum: Vec<u8>) -> Self {
|
|
||||||
Self {
|
|
||||||
client,
|
|
||||||
url,
|
|
||||||
_digest: PhantomData::default(),
|
|
||||||
_checksum: checksum,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: implement checking the sum, may involve bringing (parts of) and_extract() back in here
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default)]
|
|
||||||
pub struct NoDigest;
|
|
||||||
|
|
||||||
impl FixedOutput for NoDigest {
|
|
||||||
fn finalize_into(self, _out: &mut Output<Self>) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OutputSizeUser for NoDigest {
|
|
||||||
type OutputSize = generic_array::typenum::U0;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Update for NoDigest {
|
|
||||||
fn update(&mut self, _data: &[u8]) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HashMarker for NoDigest {}
|
|
|
@ -1,11 +1,10 @@
|
||||||
use std::io;
|
use std::{future::pending, io};
|
||||||
|
|
||||||
use futures_util::future::pending;
|
|
||||||
use tokio::{signal, sync::OnceCell};
|
|
||||||
|
|
||||||
use super::tasks::AutoAbortJoinHandle;
|
use super::tasks::AutoAbortJoinHandle;
|
||||||
use crate::errors::BinstallError;
|
use crate::errors::BinstallError;
|
||||||
|
|
||||||
|
use tokio::{signal, sync::OnceCell};
|
||||||
|
|
||||||
/// This function will poll the handle while listening for ctrl_c,
|
/// This function will poll the handle while listening for ctrl_c,
|
||||||
/// `SIGINT`, `SIGHUP`, `SIGTERM` and `SIGQUIT`.
|
/// `SIGINT`, `SIGHUP`, `SIGTERM` and `SIGQUIT`.
|
||||||
///
|
///
|
||||||
|
@ -18,19 +17,24 @@ use crate::errors::BinstallError;
|
||||||
pub async fn cancel_on_user_sig_term<T>(
|
pub async fn cancel_on_user_sig_term<T>(
|
||||||
handle: AutoAbortJoinHandle<T>,
|
handle: AutoAbortJoinHandle<T>,
|
||||||
) -> Result<T, BinstallError> {
|
) -> Result<T, BinstallError> {
|
||||||
#[cfg(unix)]
|
ignore_signals()?;
|
||||||
unix::ignore_signals_on_unix()?;
|
|
||||||
|
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
res = handle => res,
|
res = handle => res,
|
||||||
res = wait_on_cancellation_signal() => {
|
res = wait_on_cancellation_signal() => {
|
||||||
res
|
res.map_err(BinstallError::Io)
|
||||||
.map_err(BinstallError::Io)
|
|
||||||
.and(Err(BinstallError::UserAbort))
|
.and(Err(BinstallError::UserAbort))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn ignore_signals() -> io::Result<()> {
|
||||||
|
#[cfg(unix)]
|
||||||
|
unix::ignore_signals_on_unix()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// If call to it returns `Ok(())`, then all calls to this function after
|
/// If call to it returns `Ok(())`, then all calls to this function after
|
||||||
/// that also returns `Ok(())`.
|
/// that also returns `Ok(())`.
|
||||||
pub async fn wait_on_cancellation_signal() -> Result<(), io::Error> {
|
pub async fn wait_on_cancellation_signal() -> Result<(), io::Error> {
|
||||||
|
@ -86,7 +90,7 @@ mod unix {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ignore_signals_on_unix() -> Result<(), BinstallError> {
|
pub fn ignore_signals_on_unix() -> Result<(), io::Error> {
|
||||||
drop(signal(SignalKind::user_defined1())?);
|
drop(signal(SignalKind::user_defined1())?);
|
||||||
drop(signal(SignalKind::user_defined2())?);
|
drop(signal(SignalKind::user_defined2())?);
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue