mirror of
https://github.com/cargo-bins/cargo-binstall.git
synced 2025-04-22 21:48:42 +00:00
Find best download source out of alternatives (format extension) (#236)
This commit is contained in:
parent
19266a4fb6
commit
adef01f3dd
7 changed files with 93 additions and 52 deletions
|
@ -10,8 +10,8 @@ edition = "2021"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
|
|
||||||
[package.metadata.binstall]
|
[package.metadata.binstall]
|
||||||
pkg-url = "{ repo }/releases/download/v{ version }/{ name }-{ target }.{ format }"
|
pkg-url = "{ repo }/releases/download/v{ version }/{ name }-{ target }.{ archive-format }"
|
||||||
bin-dir = "{ bin }{ format }"
|
bin-dir = "{ bin }{ binary-ext }"
|
||||||
|
|
||||||
[package.metadata.binstall.overrides.x86_64-pc-windows-msvc]
|
[package.metadata.binstall.overrides.x86_64-pc-windows-msvc]
|
||||||
pkg-fmt = "zip"
|
pkg-fmt = "zip"
|
||||||
|
|
|
@ -21,8 +21,15 @@ pub trait Fetcher: Send + Sync {
|
||||||
/// Fetch a package and extract
|
/// Fetch a package and extract
|
||||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError>;
|
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError>;
|
||||||
|
|
||||||
/// Check if a package is available for download
|
/// Find the package, if it is available for download
|
||||||
async fn check(&self) -> Result<bool, BinstallError>;
|
///
|
||||||
|
/// This may look for multiple remote targets, but must write (using some form of interior
|
||||||
|
/// mutability) the best one to the implementing struct in some way so `fetch_and_extract` can
|
||||||
|
/// proceed without additional work.
|
||||||
|
///
|
||||||
|
/// Must return `true` if a package is available, `false` if none is, and reserve errors to
|
||||||
|
/// fatal conditions only.
|
||||||
|
async fn find(&self) -> Result<bool, BinstallError>;
|
||||||
|
|
||||||
/// Return the package format
|
/// Return the package format
|
||||||
fn pkg_fmt(&self) -> PkgFmt;
|
fn pkg_fmt(&self) -> PkgFmt;
|
||||||
|
@ -56,7 +63,7 @@ impl MultiFetcher {
|
||||||
pub fn add(&mut self, fetcher: Arc<dyn Fetcher>) {
|
pub fn add(&mut self, fetcher: Arc<dyn Fetcher>) {
|
||||||
self.0.push((
|
self.0.push((
|
||||||
fetcher.clone(),
|
fetcher.clone(),
|
||||||
AutoAbortJoinHandle::new(tokio::spawn(async move { fetcher.check().await })),
|
AutoAbortJoinHandle::new(tokio::spawn(async move { fetcher.find().await })),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,25 +2,21 @@ use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use log::{debug, info, warn};
|
use log::{debug, info, warn};
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
use reqwest::Method;
|
use reqwest::Method;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::Data;
|
use super::Data;
|
||||||
use crate::{download_and_extract, remote_exists, BinstallError, PkgFmt, Template};
|
use crate::{
|
||||||
|
download_and_extract, remote_exists, AutoAbortJoinHandle, BinstallError, PkgFmt, Template,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct GhCrateMeta {
|
pub struct GhCrateMeta {
|
||||||
client: Client,
|
client: Client,
|
||||||
data: Data,
|
data: Data,
|
||||||
}
|
url: OnceCell<Url>,
|
||||||
|
|
||||||
impl GhCrateMeta {
|
|
||||||
fn url(&self) -> Result<Url, BinstallError> {
|
|
||||||
let ctx = Context::from_data(&self.data);
|
|
||||||
debug!("Using context: {:?}", ctx);
|
|
||||||
ctx.render_url(&self.data.meta.pkg_url)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
|
@ -29,24 +25,52 @@ impl super::Fetcher for GhCrateMeta {
|
||||||
Arc::new(Self {
|
Arc::new(Self {
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
data: data.clone(),
|
data: data.clone(),
|
||||||
|
url: OnceCell::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn check(&self) -> Result<bool, BinstallError> {
|
async fn find(&self) -> Result<bool, BinstallError> {
|
||||||
let url = self.url()?;
|
// build up list of potential URLs
|
||||||
|
let urls = self.data.meta.pkg_fmt.extensions().iter().map(|ext| {
|
||||||
|
let ctx = Context::from_data(&self.data, ext);
|
||||||
|
ctx.render_url(&self.data.meta.pkg_url)
|
||||||
|
});
|
||||||
|
|
||||||
|
// go check all potential URLs at once
|
||||||
|
let checks = urls
|
||||||
|
.map(|url| {
|
||||||
|
let client = self.client.clone();
|
||||||
|
AutoAbortJoinHandle::new(tokio::spawn(async move {
|
||||||
|
let url = url?;
|
||||||
|
info!("Checking for package at: '{url}'");
|
||||||
|
remote_exists(client, url.clone(), Method::HEAD)
|
||||||
|
.await
|
||||||
|
.map(|exists| (url.clone(), exists))
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// get the first URL that exists
|
||||||
|
for check in checks {
|
||||||
|
let (url, exists) = check.await??;
|
||||||
|
if exists {
|
||||||
if url.scheme() != "https" {
|
if url.scheme() != "https" {
|
||||||
warn!(
|
warn!(
|
||||||
"URL is not HTTPS! This may become a hard error in the future, tell the upstream!"
|
"URL is not HTTPS! This may become a hard error in the future, tell the upstream!"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Checking for package at: '{url}'");
|
info!("Winning URL is {url}");
|
||||||
remote_exists(&self.client, url, Method::HEAD).await
|
self.url.set(url).unwrap(); // find() is called first
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||||
let url = self.url()?;
|
let url = self.url.get().unwrap(); // find() is called first
|
||||||
info!("Downloading package from: '{url}'");
|
info!("Downloading package from: '{url}'");
|
||||||
download_and_extract(&self.client, url, self.pkg_fmt(), dst).await
|
download_and_extract(&self.client, url, self.pkg_fmt(), dst).await
|
||||||
}
|
}
|
||||||
|
@ -56,7 +80,8 @@ impl super::Fetcher for GhCrateMeta {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn source_name(&self) -> String {
|
fn source_name(&self) -> String {
|
||||||
self.url()
|
self.url
|
||||||
|
.get()
|
||||||
.map(|url| {
|
.map(|url| {
|
||||||
if let Some(domain) = url.domain() {
|
if let Some(domain) = url.domain() {
|
||||||
domain.to_string()
|
domain.to_string()
|
||||||
|
@ -66,7 +91,7 @@ impl super::Fetcher for GhCrateMeta {
|
||||||
url.to_string()
|
url.to_string()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|_| "invalid url template".to_string())
|
.unwrap_or_else(|| "invalid url".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_third_party(&self) -> bool {
|
fn is_third_party(&self) -> bool {
|
||||||
|
@ -87,11 +112,11 @@ struct Context<'c> {
|
||||||
pub version: &'c str,
|
pub version: &'c str,
|
||||||
|
|
||||||
/// Soft-deprecated alias for archive-format
|
/// Soft-deprecated alias for archive-format
|
||||||
pub format: String,
|
pub format: &'c str,
|
||||||
|
|
||||||
/// Archive format e.g. tar.gz, zip
|
/// Archive format e.g. tar.gz, zip
|
||||||
#[serde(rename = "archive-format")]
|
#[serde(rename = "archive-format")]
|
||||||
pub archive_format: String,
|
pub archive_format: &'c str,
|
||||||
|
|
||||||
/// Filename extension on the binary, i.e. .exe on Windows, nothing otherwise
|
/// Filename extension on the binary, i.e. .exe on Windows, nothing otherwise
|
||||||
#[serde(rename = "binary-ext")]
|
#[serde(rename = "binary-ext")]
|
||||||
|
@ -101,15 +126,14 @@ struct Context<'c> {
|
||||||
impl<'c> Template for Context<'c> {}
|
impl<'c> Template for Context<'c> {}
|
||||||
|
|
||||||
impl<'c> Context<'c> {
|
impl<'c> Context<'c> {
|
||||||
pub(self) fn from_data(data: &'c Data) -> Self {
|
pub(self) fn from_data(data: &'c Data, archive_format: &'c str) -> Self {
|
||||||
let pkg_fmt = data.meta.pkg_fmt.to_string();
|
|
||||||
Self {
|
Self {
|
||||||
name: &data.name,
|
name: &data.name,
|
||||||
repo: data.repo.as_ref().map(|s| &s[..]),
|
repo: data.repo.as_ref().map(|s| &s[..]),
|
||||||
target: &data.target,
|
target: &data.target,
|
||||||
version: &data.version,
|
version: &data.version,
|
||||||
format: pkg_fmt.clone(),
|
format: archive_format,
|
||||||
archive_format: pkg_fmt,
|
archive_format,
|
||||||
binary_ext: if data.target.contains("windows") {
|
binary_ext: if data.target.contains("windows") {
|
||||||
".exe"
|
".exe"
|
||||||
} else {
|
} else {
|
||||||
|
@ -119,6 +143,7 @@ impl<'c> Context<'c> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(self) fn render_url(&self, template: &str) -> Result<Url, BinstallError> {
|
pub(self) fn render_url(&self, template: &str) -> Result<Url, BinstallError> {
|
||||||
|
debug!("Render {template:?} using context: {:?}", self);
|
||||||
Ok(Url::parse(&self.render(template)?)?)
|
Ok(Url::parse(&self.render(template)?)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -144,7 +169,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "tgz");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://github.com/ryankurte/cargo-binstall/releases/download/v1.2.3/cargo-binstall-x86_64-unknown-linux-gnu-v1.2.3.tgz")
|
url("https://github.com/ryankurte/cargo-binstall/releases/download/v1.2.3/cargo-binstall-x86_64-unknown-linux-gnu-v1.2.3.tgz")
|
||||||
|
@ -163,7 +188,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "tgz");
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap();
|
ctx.render_url(&data.meta.pkg_url).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,7 +207,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "tgz");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://example.com/releases/download/v1.2.3/cargo-binstall-x86_64-unknown-linux-gnu-v1.2.3.tgz")
|
url("https://example.com/releases/download/v1.2.3/cargo-binstall-x86_64-unknown-linux-gnu-v1.2.3.tgz")
|
||||||
|
@ -206,7 +231,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "tgz");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://github.com/rust-iot/rust-radio-sx128x/releases/download/v0.14.1-alpha.5/sx128x-util-x86_64-unknown-linux-gnu-v0.14.1-alpha.5.tgz")
|
url("https://github.com/rust-iot/rust-radio-sx128x/releases/download/v0.14.1-alpha.5/sx128x-util-x86_64-unknown-linux-gnu-v0.14.1-alpha.5.tgz")
|
||||||
|
@ -228,7 +253,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "tgz");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://github.com/rust-iot/rust-radio-sx128x/releases/download/v0.14.1-alpha.5/sx128x-util-x86_64-unknown-linux-gnu-v0.14.1-alpha.5.tgz")
|
url("https://github.com/rust-iot/rust-radio-sx128x/releases/download/v0.14.1-alpha.5/sx128x-util-x86_64-unknown-linux-gnu-v0.14.1-alpha.5.tgz")
|
||||||
|
@ -253,7 +278,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "txz");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://github.com/watchexec/cargo-watch/releases/download/v9.0.0/cargo-watch-v9.0.0-aarch64-apple-darwin.tar.xz")
|
url("https://github.com/watchexec/cargo-watch/releases/download/v9.0.0/cargo-watch-v9.0.0-aarch64-apple-darwin.tar.xz")
|
||||||
|
@ -276,7 +301,7 @@ mod test {
|
||||||
meta,
|
meta,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Context::from_data(&data);
|
let ctx = Context::from_data(&data, "bin");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
ctx.render_url(&data.meta.pkg_url).unwrap(),
|
||||||
url("https://github.com/watchexec/cargo-watch/releases/download/v9.0.0/cargo-watch-v9.0.0-aarch64-pc-windows-msvc.exe")
|
url("https://github.com/watchexec/cargo-watch/releases/download/v9.0.0/cargo-watch-v9.0.0-aarch64-pc-windows-msvc.exe")
|
||||||
|
|
|
@ -32,17 +32,17 @@ impl super::Fetcher for QuickInstall {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn check(&self) -> Result<bool, BinstallError> {
|
async fn find(&self) -> Result<bool, BinstallError> {
|
||||||
let url = self.package_url();
|
let url = self.package_url();
|
||||||
self.report();
|
self.report();
|
||||||
info!("Checking for package at: '{url}'");
|
info!("Checking for package at: '{url}'");
|
||||||
remote_exists(&self.client, Url::parse(&url)?, Method::HEAD).await
|
remote_exists(self.client.clone(), Url::parse(&url)?, Method::HEAD).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
|
||||||
let url = self.package_url();
|
let url = self.package_url();
|
||||||
info!("Downloading package from: '{url}'");
|
info!("Downloading package from: '{url}'");
|
||||||
download_and_extract(&self.client, Url::parse(&url)?, self.pkg_fmt(), dst).await
|
download_and_extract(&self.client, &Url::parse(&url)?, self.pkg_fmt(), dst).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pkg_fmt(&self) -> PkgFmt {
|
fn pkg_fmt(&self) -> PkgFmt {
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use strum_macros::{Display, EnumString, EnumVariantNames};
|
use strum_macros::{Display, EnumString};
|
||||||
|
|
||||||
/// Binary format enumeration
|
/// Binary format enumeration
|
||||||
#[derive(
|
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize, EnumString)]
|
||||||
Debug, Copy, Clone, PartialEq, Serialize, Deserialize, Display, EnumString, EnumVariantNames,
|
|
||||||
)]
|
|
||||||
#[strum(serialize_all = "snake_case")]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum PkgFmt {
|
pub enum PkgFmt {
|
||||||
/// Download format is TAR (uncompressed)
|
/// Download format is TAR (uncompressed)
|
||||||
|
@ -31,8 +28,7 @@ impl Default for PkgFmt {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PkgFmt {
|
impl PkgFmt {
|
||||||
/// If self is one of the tar based formats,
|
/// If self is one of the tar based formats, return Some.
|
||||||
/// return Some.
|
|
||||||
pub fn decompose(self) -> PkgFmtDecomposed {
|
pub fn decompose(self) -> PkgFmtDecomposed {
|
||||||
match self {
|
match self {
|
||||||
PkgFmt::Tar => PkgFmtDecomposed::Tar(TarBasedFmt::Tar),
|
PkgFmt::Tar => PkgFmtDecomposed::Tar(TarBasedFmt::Tar),
|
||||||
|
@ -44,6 +40,19 @@ impl PkgFmt {
|
||||||
PkgFmt::Zip => PkgFmtDecomposed::Zip,
|
PkgFmt::Zip => PkgFmtDecomposed::Zip,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// List of possible file extensions for the format.
|
||||||
|
pub fn extensions(self) -> &'static [&'static str] {
|
||||||
|
match self {
|
||||||
|
PkgFmt::Tar => &["tar"],
|
||||||
|
PkgFmt::Tbz2 => &["tbz2", "tar.bz2"],
|
||||||
|
PkgFmt::Tgz => &["tgz", "tar.gz"],
|
||||||
|
PkgFmt::Txz => &["txz", "tar.xz"],
|
||||||
|
PkgFmt::Tzstd => &["tzstd", "tzst", "tar.zst"],
|
||||||
|
PkgFmt::Bin => &["bin", "exe"],
|
||||||
|
PkgFmt::Zip => &["zip"],
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||||
|
|
|
@ -113,7 +113,7 @@ pub fn create_reqwest_client(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn remote_exists(
|
pub async fn remote_exists(
|
||||||
client: &Client,
|
client: Client,
|
||||||
url: Url,
|
url: Url,
|
||||||
method: Method,
|
method: Method,
|
||||||
) -> Result<bool, BinstallError> {
|
) -> Result<bool, BinstallError> {
|
||||||
|
@ -147,11 +147,11 @@ async fn create_request(
|
||||||
/// Download a file from the provided URL and extract it to the provided path.
|
/// Download a file from the provided URL and extract it to the provided path.
|
||||||
pub async fn download_and_extract<P: AsRef<Path>>(
|
pub async fn download_and_extract<P: AsRef<Path>>(
|
||||||
client: &Client,
|
client: &Client,
|
||||||
url: Url,
|
url: &Url,
|
||||||
fmt: PkgFmt,
|
fmt: PkgFmt,
|
||||||
path: P,
|
path: P,
|
||||||
) -> Result<(), BinstallError> {
|
) -> Result<(), BinstallError> {
|
||||||
let stream = create_request(client, url).await?;
|
let stream = create_request(client, url.clone()).await?;
|
||||||
|
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
debug!("Downloading and extracting to: '{}'", path.display());
|
debug!("Downloading and extracting to: '{}'", path.display());
|
||||||
|
|
|
@ -137,7 +137,7 @@ mod test {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&meta.pkg_url,
|
&meta.pkg_url,
|
||||||
"{ repo }/releases/download/v{ version }/{ name }-{ target }.{ format }"
|
"{ repo }/releases/download/v{ version }/{ name }-{ target }.{ archive-format }"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
Loading…
Add table
Reference in a new issue