mirror of
https://github.com/cargo-bins/cargo-binstall.git
synced 2025-04-24 22:30:03 +00:00
Add phantom digest support to download (#315)
This commit is contained in:
parent
280bc974eb
commit
1cf6076d62
6 changed files with 149 additions and 48 deletions
|
@ -18,9 +18,11 @@ clap = { version = "3.2.17", features = ["derive"] }
|
|||
compact_str = { version = "0.6.0", features = ["serde"] }
|
||||
crates_io_api = { version = "0.8.0", default-features = false }
|
||||
detect-targets = { version = "0.1.0", path = "../detect-targets" }
|
||||
digest = "0.10.3"
|
||||
flate2 = { version = "1.0.24", default-features = false }
|
||||
flock = { version = "0.1.0", path = "../flock" }
|
||||
futures-util = { version = "0.3.23", default-features = false, features = ["std"] }
|
||||
generic-array = "0.14.6"
|
||||
home = "0.5.3"
|
||||
itertools = "0.10.3"
|
||||
jobserver = "0.1.24"
|
||||
|
|
|
@ -9,7 +9,7 @@ use url::Url;
|
|||
|
||||
use crate::{
|
||||
errors::BinstallError,
|
||||
helpers::download::download_tar_based_and_visit,
|
||||
helpers::download::Download,
|
||||
manifests::cargo_toml_binstall::{Meta, TarBasedFmt},
|
||||
};
|
||||
|
||||
|
@ -52,11 +52,7 @@ pub async fn fetch_crate_cratesio(
|
|||
|
||||
let manifest_dir_path: PathBuf = format!("{name}-{version_name}").into();
|
||||
|
||||
download_tar_based_and_visit(
|
||||
client,
|
||||
Url::parse(&crate_url)?,
|
||||
TarBasedFmt::Tgz,
|
||||
ManifestVisitor::new(manifest_dir_path),
|
||||
)
|
||||
.await
|
||||
Download::new(client, Url::parse(&crate_url)?)
|
||||
.and_visit_tar(TarBasedFmt::Tgz, ManifestVisitor::new(manifest_dir_path))
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ use url::Url;
|
|||
use crate::{
|
||||
errors::BinstallError,
|
||||
helpers::{
|
||||
download::download_and_extract,
|
||||
download::Download,
|
||||
remote::{get_redirected_final_url, remote_exists},
|
||||
tasks::AutoAbortJoinHandle,
|
||||
},
|
||||
|
@ -145,7 +145,9 @@ impl super::Fetcher for GhCrateMeta {
|
|||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||
let (url, pkg_fmt) = self.resolution.get().unwrap(); // find() is called first
|
||||
debug!("Downloading package from: '{url}'");
|
||||
download_and_extract(&self.client, url, *pkg_fmt, dst).await
|
||||
Download::new(&self.client, url.clone())
|
||||
.and_extract(self.pkg_fmt(), dst)
|
||||
.await
|
||||
}
|
||||
|
||||
fn pkg_fmt(&self) -> PkgFmt {
|
||||
|
|
|
@ -9,7 +9,7 @@ use url::Url;
|
|||
|
||||
use crate::{
|
||||
errors::BinstallError,
|
||||
helpers::{download::download_and_extract, remote::remote_exists},
|
||||
helpers::{download::Download, remote::remote_exists},
|
||||
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
||||
};
|
||||
|
||||
|
@ -49,7 +49,9 @@ impl super::Fetcher for QuickInstall {
|
|||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||
let url = self.package_url();
|
||||
debug!("Downloading package from: '{url}'");
|
||||
download_and_extract(&self.client, &Url::parse(&url)?, self.pkg_fmt(), dst).await
|
||||
Download::new(&self.client, Url::parse(&url)?)
|
||||
.and_extract(self.pkg_fmt(), dst)
|
||||
.await
|
||||
}
|
||||
|
||||
fn pkg_fmt(&self) -> PkgFmt {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::{fmt::Debug, path::Path};
|
||||
use std::{fmt::Debug, marker::PhantomData, path::Path};
|
||||
|
||||
use digest::{Digest, FixedOutput, HashMarker, Output, OutputSizeUser, Update};
|
||||
use log::debug;
|
||||
use reqwest::{Client, Url};
|
||||
|
||||
|
@ -15,47 +16,98 @@ use async_extracter::*;
|
|||
mod async_extracter;
|
||||
mod extracter;
|
||||
mod stream_readable;
|
||||
/// Download a file from the provided URL and extract it to the provided path.
|
||||
pub async fn download_and_extract<P: AsRef<Path>>(
|
||||
client: &Client,
|
||||
url: &Url,
|
||||
fmt: PkgFmt,
|
||||
path: P,
|
||||
) -> Result<(), BinstallError> {
|
||||
let stream = create_request(client, url.clone()).await?;
|
||||
|
||||
let path = path.as_ref();
|
||||
debug!("Downloading and extracting to: '{}'", path.display());
|
||||
#[derive(Debug)]
|
||||
pub struct Download<'client, D: Digest = NoDigest> {
|
||||
client: &'client Client,
|
||||
url: Url,
|
||||
_digest: PhantomData<D>,
|
||||
_checksum: Vec<u8>,
|
||||
}
|
||||
|
||||
match fmt.decompose() {
|
||||
PkgFmtDecomposed::Tar(fmt) => extract_tar_based_stream(stream, path, fmt).await?,
|
||||
PkgFmtDecomposed::Bin => extract_bin(stream, path).await?,
|
||||
PkgFmtDecomposed::Zip => extract_zip(stream, path).await?,
|
||||
impl<'client> Download<'client> {
|
||||
pub fn new(client: &'client Client, url: Url) -> Self {
|
||||
Self {
|
||||
client,
|
||||
url,
|
||||
_digest: PhantomData::default(),
|
||||
_checksum: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Download OK, extracted to: '{}'", path.display());
|
||||
/// Download a file from the provided URL and extract part of it to
|
||||
/// the provided path.
|
||||
///
|
||||
/// * `filter` - If Some, then it will pass the path of the file to it
|
||||
/// and only extract ones which filter returns `true`.
|
||||
///
|
||||
/// This does not support verifying a checksum due to the partial extraction
|
||||
/// and will ignore one if specified.
|
||||
pub async fn and_visit_tar<V: TarEntriesVisitor + Debug + Send + 'static>(
|
||||
self,
|
||||
fmt: TarBasedFmt,
|
||||
visitor: V,
|
||||
) -> Result<V::Target, BinstallError> {
|
||||
let stream = create_request(self.client, self.url).await?;
|
||||
|
||||
Ok(())
|
||||
debug!("Downloading and extracting then in-memory processing");
|
||||
|
||||
let ret = extract_tar_based_stream_and_visit(stream, fmt, visitor).await?;
|
||||
|
||||
debug!("Download, extraction and in-memory procession OK");
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// Download a file from the provided URL and extract it to the provided path.
|
||||
pub async fn and_extract(
|
||||
self,
|
||||
fmt: PkgFmt,
|
||||
path: impl AsRef<Path>,
|
||||
) -> Result<(), BinstallError> {
|
||||
let stream = create_request(self.client, self.url).await?;
|
||||
|
||||
let path = path.as_ref();
|
||||
debug!("Downloading and extracting to: '{}'", path.display());
|
||||
|
||||
match fmt.decompose() {
|
||||
PkgFmtDecomposed::Tar(fmt) => extract_tar_based_stream(stream, path, fmt).await?,
|
||||
PkgFmtDecomposed::Bin => extract_bin(stream, path).await?,
|
||||
PkgFmtDecomposed::Zip => extract_zip(stream, path).await?,
|
||||
}
|
||||
|
||||
debug!("Download OK, extracted to: '{}'", path.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Download a file from the provided URL and extract part of it to
|
||||
/// the provided path.
|
||||
///
|
||||
/// * `filter` - If Some, then it will pass the path of the file to it
|
||||
/// and only extract ones which filter returns `true`.
|
||||
pub async fn download_tar_based_and_visit<V: TarEntriesVisitor + Debug + Send + 'static>(
|
||||
client: &Client,
|
||||
url: Url,
|
||||
fmt: TarBasedFmt,
|
||||
visitor: V,
|
||||
) -> Result<V::Target, BinstallError> {
|
||||
let stream = create_request(client, url).await?;
|
||||
impl<'client, D: Digest> Download<'client, D> {
|
||||
pub fn new_with_checksum(client: &'client Client, url: Url, checksum: Vec<u8>) -> Self {
|
||||
Self {
|
||||
client,
|
||||
url,
|
||||
_digest: PhantomData::default(),
|
||||
_checksum: checksum,
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Downloading and extracting then in-memory processing");
|
||||
|
||||
let ret = extract_tar_based_stream_and_visit(stream, fmt, visitor).await?;
|
||||
|
||||
debug!("Download, extraction and in-memory procession OK");
|
||||
|
||||
Ok(ret)
|
||||
// TODO: implement checking the sum, may involve bringing (parts of) and_extract() back in here
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct NoDigest;
|
||||
|
||||
impl FixedOutput for NoDigest {
|
||||
fn finalize_into(self, _out: &mut Output<Self>) {}
|
||||
}
|
||||
|
||||
impl OutputSizeUser for NoDigest {
|
||||
type OutputSize = generic_array::typenum::U0;
|
||||
}
|
||||
|
||||
impl Update for NoDigest {
|
||||
fn update(&mut self, _data: &[u8]) {}
|
||||
}
|
||||
|
||||
impl HashMarker for NoDigest {}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue