mirror of
https://github.com/cargo-bins/cargo-binstall.git
synced 2025-04-24 22:30:03 +00:00
Use Quickinstall new release schema (#867)
- Refactor: Extract new fn `binstalk::helpers::remote::does_url_exist` - Use new quickinstall release schema in `binstalk::fetchers::QuickInstall` - Optimize `fetchers::QuickInstall`: Generate url once in `Fetcher::new` Avoid repeated string allocation plus `Url` parsing. This also makes changing package_url and stats_url easier. - Optimize `QuickInstall::report`: Use HEAD instead of GET Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
parent
01c8ecb778
commit
72789874f0
5 changed files with 72 additions and 53 deletions
|
@ -15,8 +15,8 @@ use crate::{
|
|||
helpers::{
|
||||
download::{Download, ExtractedFiles},
|
||||
futures_resolver::FuturesResolver,
|
||||
gh_api_client::{GhApiClient, GhReleaseArtifact, HasReleaseArtifact},
|
||||
remote::Client,
|
||||
gh_api_client::GhApiClient,
|
||||
remote::{does_url_exist, Client},
|
||||
tasks::AutoAbortJoinHandle,
|
||||
},
|
||||
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
||||
|
@ -69,28 +69,7 @@ impl GhCrateMeta {
|
|||
let gh_api_client = self.gh_api_client.clone();
|
||||
|
||||
async move {
|
||||
debug!("Checking for package at: '{url}'");
|
||||
|
||||
if let Some(artifact) = GhReleaseArtifact::try_extract_from_url(&url) {
|
||||
debug!("Using GitHub Restful API to check for existence of artifact, which will also cache the API response");
|
||||
|
||||
match gh_api_client.has_release_artifact(artifact).await? {
|
||||
HasReleaseArtifact::Yes => return Ok(Some((url, pkg_fmt))),
|
||||
HasReleaseArtifact::No | HasReleaseArtifact::NoSuchRelease=> return Ok(None),
|
||||
|
||||
HasReleaseArtifact::RateLimit { retry_after } => {
|
||||
warn!("Your GitHub API token (if any) has reached its rate limit and cannot be used again until {retry_after:?}, so we will fallback to HEAD/GET on the url.");
|
||||
warn!("If you did not supply the github token, consider supply one since GitHub by default limit the number of requests for unauthoized user to 60 requests per hour per origin IP address.");
|
||||
}
|
||||
HasReleaseArtifact::Unauthorized => {
|
||||
warn!("GitHub API somehow requires a token for the API access, so we will fallback to HEAD/GET on the url.");
|
||||
warn!("Please consider supplying a token to cargo-binstall to speedup resolution.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(client
|
||||
.remote_gettable(url.clone())
|
||||
Ok(does_url_exist(client, gh_api_client, &url)
|
||||
.await?
|
||||
.then_some((url, pkg_fmt)))
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
helpers::{
|
||||
download::{Download, ExtractedFiles},
|
||||
gh_api_client::GhApiClient,
|
||||
remote::Client,
|
||||
remote::{does_url_exist, Client, Method},
|
||||
tasks::AutoAbortJoinHandle,
|
||||
},
|
||||
manifests::cargo_toml_binstall::{PkgFmt, PkgMeta},
|
||||
|
@ -22,7 +22,12 @@ const STATS_URL: &str = "https://warehouse-clerk-tmp.vercel.app/api/crate";
|
|||
|
||||
pub struct QuickInstall {
|
||||
client: Client,
|
||||
gh_api_client: GhApiClient,
|
||||
|
||||
package: String,
|
||||
package_url: Url,
|
||||
stats_url: Url,
|
||||
|
||||
target_data: Arc<TargetData>,
|
||||
}
|
||||
|
||||
|
@ -30,16 +35,28 @@ pub struct QuickInstall {
|
|||
impl super::Fetcher for QuickInstall {
|
||||
fn new(
|
||||
client: Client,
|
||||
_gh_api_client: GhApiClient,
|
||||
gh_api_client: GhApiClient,
|
||||
data: Arc<Data>,
|
||||
target_data: Arc<TargetData>,
|
||||
) -> Arc<dyn super::Fetcher> {
|
||||
let crate_name = &data.name;
|
||||
let version = &data.version;
|
||||
let target = &target_data.target;
|
||||
|
||||
let package = format!("{crate_name}-{version}-{target}");
|
||||
|
||||
Arc::new(Self {
|
||||
client,
|
||||
package: format!("{crate_name}-{version}-{target}"),
|
||||
gh_api_client,
|
||||
|
||||
package_url: Url::parse(&format!(
|
||||
"{BASE_URL}/{crate_name}-{version}/{package}.tar.gz",
|
||||
))
|
||||
.expect("package_url is pre-generated and should never be invalid url"),
|
||||
stats_url: Url::parse(&format!("{STATS_URL}/{package}.tar.gz",))
|
||||
.expect("stats_url is pre-generated and should never be invalid url"),
|
||||
package,
|
||||
|
||||
target_data,
|
||||
})
|
||||
}
|
||||
|
@ -60,16 +77,19 @@ impl super::Fetcher for QuickInstall {
|
|||
});
|
||||
}
|
||||
|
||||
let url = self.package_url();
|
||||
debug!("Checking for package at: '{url}'");
|
||||
Ok(self.client.remote_gettable(Url::parse(&url)?).await?)
|
||||
does_url_exist(
|
||||
self.client.clone(),
|
||||
self.gh_api_client.clone(),
|
||||
&self.package_url,
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_and_extract(&self, dst: &Path) -> Result<ExtractedFiles, BinstallError> {
|
||||
let url = self.package_url();
|
||||
let url = &self.package_url;
|
||||
debug!("Downloading package from: '{url}'");
|
||||
Ok(Download::new(self.client.clone(), Url::parse(&url)?)
|
||||
Ok(Download::new(self.client.clone(), url.clone())
|
||||
.and_extract(self.pkg_fmt(), dst)
|
||||
.await?)
|
||||
}
|
||||
|
@ -103,27 +123,11 @@ impl super::Fetcher for QuickInstall {
|
|||
}
|
||||
|
||||
impl QuickInstall {
|
||||
fn package_url(&self) -> String {
|
||||
format!(
|
||||
"{base_url}/{package}/{package}.tar.gz",
|
||||
base_url = BASE_URL,
|
||||
package = self.package
|
||||
)
|
||||
}
|
||||
|
||||
fn stats_url(&self) -> String {
|
||||
format!(
|
||||
"{stats_url}/{package}.tar.gz",
|
||||
stats_url = STATS_URL,
|
||||
package = self.package
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn report(&self) -> Result<(), BinstallError> {
|
||||
let url = Url::parse(&self.stats_url())?;
|
||||
let url = self.stats_url.clone();
|
||||
debug!("Sending installation report to quickinstall ({url})");
|
||||
|
||||
self.client.remote_gettable(url).await?;
|
||||
self.client.request(Method::HEAD, url).send(true).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
pub mod futures_resolver;
|
||||
pub mod jobserver_client;
|
||||
pub mod remote;
|
||||
pub mod signal;
|
||||
pub mod tasks;
|
||||
|
||||
pub use binstalk_downloader::{download, gh_api_client, remote};
|
||||
pub use binstalk_downloader::{download, gh_api_client};
|
||||
|
|
37
crates/binstalk/src/helpers/remote.rs
Normal file
37
crates/binstalk/src/helpers/remote.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
pub use binstalk_downloader::remote::*;
|
||||
|
||||
use binstalk_downloader::gh_api_client::{GhApiClient, GhReleaseArtifact, HasReleaseArtifact};
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::errors::BinstallError;
|
||||
|
||||
/// This function returns a future where its size should be at most size of
|
||||
/// 2 pointers.
|
||||
pub async fn does_url_exist(
|
||||
client: Client,
|
||||
gh_api_client: GhApiClient,
|
||||
url: &Url,
|
||||
) -> Result<bool, BinstallError> {
|
||||
debug!("Checking for package at: '{url}'");
|
||||
|
||||
if let Some(artifact) = GhReleaseArtifact::try_extract_from_url(url) {
|
||||
debug!("Using GitHub Restful API to check for existence of artifact, which will also cache the API response");
|
||||
|
||||
// The future returned has the same size as a pointer
|
||||
match gh_api_client.has_release_artifact(artifact).await? {
|
||||
HasReleaseArtifact::Yes => return Ok(true),
|
||||
HasReleaseArtifact::No | HasReleaseArtifact::NoSuchRelease => return Ok(false),
|
||||
|
||||
HasReleaseArtifact::RateLimit { retry_after } => {
|
||||
warn!("Your GitHub API token (if any) has reached its rate limit and cannot be used again until {retry_after:?}, so we will fallback to HEAD/GET on the url.");
|
||||
warn!("If you did not supply a github token, consider doing so: GitHub limits unauthorized users to 60 requests per hour per origin IP address.");
|
||||
}
|
||||
HasReleaseArtifact::Unauthorized => {
|
||||
warn!("GitHub API somehow requires a token for the API access, so we will fallback to HEAD/GET on the url.");
|
||||
warn!("Please consider supplying a token to cargo-binstall to speedup resolution.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Box::pin(client.remote_gettable(url.clone())).await?)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue