mirror of
https://github.com/cargo-bins/cargo-binstall.git
synced 2025-04-20 20:48:43 +00:00
feat: Add GraphQL support to GhApiClient
(#1124)
Fixed #868 - Add new fn `remote::Client::post` - Add new fn `remote::RequestBuilder::body` - Re-export `reqwest::Body` in `remote` - Add dep percent-encoding v2.2.0 to binstalk-downloader - Add dep serde-tuple-vec-map v1.0.1 to binstalk-downloader - Add GraphQL to `GhApiClient`, fallback to Restful API if token is not provided or authorization failed. - Fixed `GhReleaseArtifact::try_extract_artifact_from_str`: decode percent encoded http url path and add regression tests - Added variant `GhApiError::Context` & `GhApiContextError` - Added variant `GhApiError::GraphQLErrors` & `GhGraphQLErrors` Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
parent
e87e3534a8
commit
22b3419fce
8 changed files with 518 additions and 99 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -265,8 +265,10 @@ dependencies = [
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"generic-array",
|
"generic-array",
|
||||||
"httpdate",
|
"httpdate",
|
||||||
|
"percent-encoding",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde-tuple-vec-map",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
|
|
@ -24,7 +24,9 @@ futures-util = "0.3.28"
|
||||||
generic-array = "0.14.7"
|
generic-array = "0.14.7"
|
||||||
httpdate = "1.0.2"
|
httpdate = "1.0.2"
|
||||||
reqwest = { version = "0.11.18", features = ["stream", "gzip", "brotli", "deflate"], default-features = false }
|
reqwest = { version = "0.11.18", features = ["stream", "gzip", "brotli", "deflate"], default-features = false }
|
||||||
|
percent-encoding = "2.2.0"
|
||||||
serde = { version = "1.0.163", features = ["derive"], optional = true }
|
serde = { version = "1.0.163", features = ["derive"], optional = true }
|
||||||
|
serde-tuple-vec-map = "1.0.1"
|
||||||
serde_json = { version = "1.0.96", optional = true }
|
serde_json = { version = "1.0.96", optional = true }
|
||||||
# Use a fork here since we need PAX support, but the upstream
|
# Use a fork here since we need PAX support, but the upstream
|
||||||
# does not hav the PR merged yet.
|
# does not hav the PR merged yet.
|
||||||
|
|
|
@ -8,18 +8,43 @@ use std::{
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
use compact_str::{CompactString, ToCompactString};
|
use compact_str::CompactString;
|
||||||
|
use percent_encoding::{
|
||||||
|
percent_decode_str, utf8_percent_encode, AsciiSet, PercentEncode, CONTROLS,
|
||||||
|
};
|
||||||
use tokio::sync::OnceCell;
|
use tokio::sync::OnceCell;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use crate::remote;
|
use crate::remote;
|
||||||
|
|
||||||
mod request;
|
mod request;
|
||||||
pub use request::GhApiError;
|
pub use request::{GhApiContextError, GhApiError, GhGraphQLErrors};
|
||||||
|
|
||||||
/// default retry duration if x-ratelimit-reset is not found in response header
|
/// default retry duration if x-ratelimit-reset is not found in response header
|
||||||
const DEFAULT_RETRY_DURATION: Duration = Duration::from_secs(3);
|
const DEFAULT_RETRY_DURATION: Duration = Duration::from_secs(3);
|
||||||
|
|
||||||
|
fn percent_encode_http_url_path(path: &str) -> PercentEncode<'_> {
|
||||||
|
/// https://url.spec.whatwg.org/#fragment-percent-encode-set
|
||||||
|
const FRAGMENT: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`');
|
||||||
|
|
||||||
|
/// https://url.spec.whatwg.org/#path-percent-encode-set
|
||||||
|
const PATH: &AsciiSet = &FRAGMENT.add(b'#').add(b'?').add(b'{').add(b'}');
|
||||||
|
|
||||||
|
const PATH_SEGMENT: &AsciiSet = &PATH.add(b'/').add(b'%');
|
||||||
|
|
||||||
|
// The backslash (\) character is treated as a path separator in special URLs
|
||||||
|
// so it needs to be additionally escaped in that case.
|
||||||
|
//
|
||||||
|
// http is considered to have special path.
|
||||||
|
const SPECIAL_PATH_SEGMENT: &AsciiSet = &PATH_SEGMENT.add(b'\\');
|
||||||
|
|
||||||
|
utf8_percent_encode(path, SPECIAL_PATH_SEGMENT)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn percent_decode_http_url_path(input: &str) -> CompactString {
|
||||||
|
percent_decode_str(input).decode_utf8_lossy().into()
|
||||||
|
}
|
||||||
|
|
||||||
/// The keys required to identify a github release.
|
/// The keys required to identify a github release.
|
||||||
#[derive(Clone, Eq, PartialEq, Hash, Debug)]
|
#[derive(Clone, Eq, PartialEq, Hash, Debug)]
|
||||||
pub struct GhRelease {
|
pub struct GhRelease {
|
||||||
|
@ -57,11 +82,11 @@ impl GhReleaseArtifact {
|
||||||
(path_segments.next().is_none() && url.fragment().is_none() && url.query().is_none()).then(
|
(path_segments.next().is_none() && url.fragment().is_none() && url.query().is_none()).then(
|
||||||
|| Self {
|
|| Self {
|
||||||
release: GhRelease {
|
release: GhRelease {
|
||||||
owner: owner.to_compact_string(),
|
owner: percent_decode_http_url_path(owner),
|
||||||
repo: repo.to_compact_string(),
|
repo: percent_decode_http_url_path(repo),
|
||||||
tag: tag.to_compact_string(),
|
tag: percent_decode_http_url_path(tag),
|
||||||
},
|
},
|
||||||
artifact_name: artifact_name.to_compact_string(),
|
artifact_name: percent_decode_http_url_path(artifact_name),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -258,6 +283,8 @@ pub enum HasReleaseArtifact {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use compact_str::{CompactString, ToCompactString};
|
||||||
|
use std::env;
|
||||||
|
|
||||||
mod cargo_binstall_v0_20_1 {
|
mod cargo_binstall_v0_20_1 {
|
||||||
use super::{CompactString, GhRelease};
|
use super::{CompactString, GhRelease};
|
||||||
|
@ -347,35 +374,34 @@ mod test {
|
||||||
|
|
||||||
/// Mark this as an async fn so that you won't accidentally use it in
|
/// Mark this as an async fn so that you won't accidentally use it in
|
||||||
/// sync context.
|
/// sync context.
|
||||||
async fn create_client() -> GhApiClient {
|
async fn create_client() -> Vec<GhApiClient> {
|
||||||
GhApiClient::new(
|
let client = remote::Client::new(
|
||||||
remote::Client::new(
|
|
||||||
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")),
|
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")),
|
||||||
None,
|
None,
|
||||||
Duration::from_millis(10),
|
Duration::from_millis(10),
|
||||||
1.try_into().unwrap(),
|
1.try_into().unwrap(),
|
||||||
[],
|
[],
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap();
|
||||||
None,
|
|
||||||
)
|
let mut gh_clients = vec![GhApiClient::new(client.clone(), None)];
|
||||||
|
|
||||||
|
if let Ok(token) = env::var("GITHUB_TOKEN") {
|
||||||
|
gh_clients.push(GhApiClient::new(client, Some(token.into())));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
gh_clients
|
||||||
async fn test_gh_api_client_cargo_binstall_v0_20_1() {
|
}
|
||||||
let client = create_client().await;
|
|
||||||
|
|
||||||
let release = cargo_binstall_v0_20_1::RELEASE;
|
async fn test_specific_release(release: &GhRelease, artifacts: &[&str]) {
|
||||||
|
for client in create_client().await {
|
||||||
let artifacts = cargo_binstall_v0_20_1::ARTIFACTS
|
eprintln!("In client {client:?}");
|
||||||
.iter()
|
|
||||||
.map(ToCompactString::to_compact_string);
|
|
||||||
|
|
||||||
for artifact_name in artifacts {
|
for artifact_name in artifacts {
|
||||||
let ret = client
|
let ret = client
|
||||||
.has_release_artifact(GhReleaseArtifact {
|
.has_release_artifact(GhReleaseArtifact {
|
||||||
release: release.clone(),
|
release: release.clone(),
|
||||||
artifact_name,
|
artifact_name: artifact_name.to_compact_string(),
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -385,14 +411,14 @@ mod test {
|
||||||
ret,
|
ret,
|
||||||
HasReleaseArtifact::Yes | HasReleaseArtifact::RateLimit { .. }
|
HasReleaseArtifact::Yes | HasReleaseArtifact::RateLimit { .. }
|
||||||
),
|
),
|
||||||
"ret = {:#?}",
|
"for '{artifact_name}': answer is {:#?}",
|
||||||
ret
|
ret
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let ret = client
|
let ret = client
|
||||||
.has_release_artifact(GhReleaseArtifact {
|
.has_release_artifact(GhReleaseArtifact {
|
||||||
release,
|
release: release.clone(),
|
||||||
artifact_name: "123z".to_compact_string(),
|
artifact_name: "123z".to_compact_string(),
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@ -407,11 +433,20 @@ mod test {
|
||||||
ret
|
ret
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_gh_api_client_cargo_binstall_v0_20_1() {
|
||||||
|
test_specific_release(
|
||||||
|
&cargo_binstall_v0_20_1::RELEASE,
|
||||||
|
cargo_binstall_v0_20_1::ARTIFACTS,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_gh_api_client_cargo_binstall_no_such_release() {
|
async fn test_gh_api_client_cargo_binstall_no_such_release() {
|
||||||
let client = create_client().await;
|
for client in create_client().await {
|
||||||
|
|
||||||
let release = GhRelease {
|
let release = GhRelease {
|
||||||
owner: "cargo-bins".to_compact_string(),
|
owner: "cargo-bins".to_compact_string(),
|
||||||
repo: "cargo-binstall".to_compact_string(),
|
repo: "cargo-binstall".to_compact_string(),
|
||||||
|
@ -438,3 +473,45 @@ mod test {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod cargo_audit_v_0_17_6 {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
const RELEASE: GhRelease = GhRelease {
|
||||||
|
owner: CompactString::new_inline("rustsec"),
|
||||||
|
repo: CompactString::new_inline("rustsec"),
|
||||||
|
tag: CompactString::new_inline("cargo-audit/v0.17.6"),
|
||||||
|
};
|
||||||
|
|
||||||
|
const ARTIFACTS: &[&str] = &[
|
||||||
|
"cargo-audit-aarch64-unknown-linux-gnu-v0.17.6.tgz",
|
||||||
|
"cargo-audit-armv7-unknown-linux-gnueabihf-v0.17.6.tgz",
|
||||||
|
"cargo-audit-x86_64-apple-darwin-v0.17.6.tgz",
|
||||||
|
"cargo-audit-x86_64-pc-windows-msvc-v0.17.6.zip",
|
||||||
|
"cargo-audit-x86_64-unknown-linux-gnu-v0.17.6.tgz",
|
||||||
|
"cargo-audit-x86_64-unknown-linux-gnu-v0.17.6.tgz",
|
||||||
|
];
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extract_with_escaped_characters() {
|
||||||
|
let release_artifact = try_extract_artifact_from_str(
|
||||||
|
"https://github.com/rustsec/rustsec/releases/download/cargo-audit%2Fv0.17.6/cargo-audit-aarch64-unknown-linux-gnu-v0.17.6.tgz"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
release_artifact,
|
||||||
|
GhReleaseArtifact {
|
||||||
|
release: RELEASE,
|
||||||
|
artifact_name: CompactString::from(
|
||||||
|
"cargo-audit-aarch64-unknown-linux-gnu-v0.17.6.tgz",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_gh_api_client_cargo_audit_v_0_17_6() {
|
||||||
|
test_specific_release(&RELEASE, ARTIFACTS).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,17 +1,30 @@
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
|
error, fmt,
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
io,
|
io,
|
||||||
|
sync::OnceLock,
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use compact_str::CompactString;
|
use compact_str::{CompactString, ToCompactString};
|
||||||
use serde::Deserialize;
|
use reqwest::{header::HeaderMap, StatusCode};
|
||||||
|
use serde::{de::Deserializer, Deserialize, Serialize};
|
||||||
|
use serde_json::to_string as to_json_string;
|
||||||
use thiserror::Error as ThisError;
|
use thiserror::Error as ThisError;
|
||||||
|
use tracing::debug;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::{remote, GhRelease};
|
use super::{percent_encode_http_url_path, remote, GhRelease};
|
||||||
|
|
||||||
|
#[derive(ThisError, Debug)]
|
||||||
|
#[error("Context: '{context}', err: '{err}'")]
|
||||||
|
pub struct GhApiContextError {
|
||||||
|
context: CompactString,
|
||||||
|
#[source]
|
||||||
|
err: GhApiError,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(ThisError, Debug)]
|
#[derive(ThisError, Debug)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
|
@ -24,6 +37,23 @@ pub enum GhApiError {
|
||||||
|
|
||||||
#[error("Failed to parse url: {0}")]
|
#[error("Failed to parse url: {0}")]
|
||||||
InvalidUrl(#[from] url::ParseError),
|
InvalidUrl(#[from] url::ParseError),
|
||||||
|
|
||||||
|
/// A wrapped error providing the context the error is about.
|
||||||
|
#[error(transparent)]
|
||||||
|
Context(Box<GhApiContextError>),
|
||||||
|
|
||||||
|
#[error("Remote failed to process GraphQL query: {0}")]
|
||||||
|
GraphQLErrors(#[from] GhGraphQLErrors),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GhApiError {
|
||||||
|
/// Attach context to [`GhApiError`]
|
||||||
|
pub fn context(self, context: impl fmt::Display) -> Self {
|
||||||
|
Self::Context(Box::new(GhApiContextError {
|
||||||
|
context: context.to_compact_string(),
|
||||||
|
err: self,
|
||||||
|
}))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only include fields we do care about
|
// Only include fields we do care about
|
||||||
|
@ -62,7 +92,7 @@ impl Borrow<str> for Artifact {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Default, Deserialize)]
|
||||||
pub(super) struct Artifacts {
|
pub(super) struct Artifacts {
|
||||||
assets: HashSet<Artifact>,
|
assets: HashSet<Artifact>,
|
||||||
}
|
}
|
||||||
|
@ -80,15 +110,43 @@ pub(super) enum FetchReleaseRet {
|
||||||
Unauthorized,
|
Unauthorized,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns 404 if not found
|
fn check_for_status(status: StatusCode, headers: &HeaderMap) -> Option<FetchReleaseRet> {
|
||||||
pub(super) async fn fetch_release_artifacts(
|
if status == remote::StatusCode::FORBIDDEN
|
||||||
|
&& headers
|
||||||
|
.get("x-ratelimit-remaining")
|
||||||
|
.map(|val| val == "0")
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
return Some(FetchReleaseRet::ReachedRateLimit {
|
||||||
|
retry_after: headers.get("x-ratelimit-reset").and_then(|value| {
|
||||||
|
let secs = value.to_str().ok()?.parse().ok()?;
|
||||||
|
Some(Duration::from_secs(secs))
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == remote::StatusCode::UNAUTHORIZED {
|
||||||
|
return Some(FetchReleaseRet::Unauthorized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if status == remote::StatusCode::NOT_FOUND {
|
||||||
|
return Some(FetchReleaseRet::ReleaseNotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_release_artifacts_restful_api(
|
||||||
client: &remote::Client,
|
client: &remote::Client,
|
||||||
GhRelease { owner, repo, tag }: &GhRelease,
|
GhRelease { owner, repo, tag }: &GhRelease,
|
||||||
auth_token: Option<&str>,
|
auth_token: Option<&str>,
|
||||||
) -> Result<FetchReleaseRet, GhApiError> {
|
) -> Result<FetchReleaseRet, GhApiError> {
|
||||||
let mut request_builder = client
|
let mut request_builder = client
|
||||||
.get(Url::parse(&format!(
|
.get(Url::parse(&format!(
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases/tags/{tag}"
|
"https://api.github.com/repos/{owner}/{repo}/releases/tags/{tag}",
|
||||||
|
owner = percent_encode_http_url_path(owner),
|
||||||
|
repo = percent_encode_http_url_path(repo),
|
||||||
|
tag = percent_encode_http_url_path(tag),
|
||||||
))?)
|
))?)
|
||||||
.header("Accept", "application/vnd.github+json")
|
.header("Accept", "application/vnd.github+json")
|
||||||
.header("X-GitHub-Api-Version", "2022-11-28");
|
.header("X-GitHub-Api-Version", "2022-11-28");
|
||||||
|
@ -102,27 +160,291 @@ pub(super) async fn fetch_release_artifacts(
|
||||||
let status = response.status();
|
let status = response.status();
|
||||||
let headers = response.headers();
|
let headers = response.headers();
|
||||||
|
|
||||||
if status == remote::StatusCode::FORBIDDEN
|
if let Some(ret) = check_for_status(status, headers) {
|
||||||
&& headers
|
return Ok(ret);
|
||||||
.get("x-ratelimit-remaining")
|
|
||||||
.map(|val| val == "0")
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
return Ok(FetchReleaseRet::ReachedRateLimit {
|
|
||||||
retry_after: headers.get("x-ratelimit-reset").and_then(|value| {
|
|
||||||
let secs = value.to_str().ok()?.parse().ok()?;
|
|
||||||
Some(Duration::from_secs(secs))
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == remote::StatusCode::UNAUTHORIZED {
|
|
||||||
return Ok(FetchReleaseRet::Unauthorized);
|
|
||||||
}
|
|
||||||
|
|
||||||
if status == remote::StatusCode::NOT_FOUND {
|
|
||||||
return Ok(FetchReleaseRet::ReleaseNotFound);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(FetchReleaseRet::Artifacts(response.json().await?))
|
Ok(FetchReleaseRet::Artifacts(response.json().await?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
enum GraphQLResponse {
|
||||||
|
#[serde(rename = "data")]
|
||||||
|
Data(GraphQLData),
|
||||||
|
|
||||||
|
#[serde(rename = "errors")]
|
||||||
|
Errors(GhGraphQLErrors),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct GhGraphQLErrors(Box<[GraphQLError]>);
|
||||||
|
|
||||||
|
impl GhGraphQLErrors {
|
||||||
|
fn is_rate_limited(&self) -> bool {
|
||||||
|
self.0
|
||||||
|
.iter()
|
||||||
|
.any(|error| matches!(error.error_type, GraphQLErrorType::RateLimited))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl error::Error for GhGraphQLErrors {}
|
||||||
|
|
||||||
|
impl fmt::Display for GhGraphQLErrors {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let last_error_index = self.0.len() - 1;
|
||||||
|
|
||||||
|
for (i, error) in self.0.iter().enumerate() {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"type: '{error_type}', msg: '{msg}'",
|
||||||
|
error_type = error.error_type,
|
||||||
|
msg = error.message,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
for location in error.locations.as_deref().into_iter().flatten() {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
", occured on query line {line} col {col}",
|
||||||
|
line = location.line,
|
||||||
|
col = location.column
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (k, v) in &error.others {
|
||||||
|
write!(f, ", {k}: {v}")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if i < last_error_index {
|
||||||
|
f.write_str("\n")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct GraphQLError {
|
||||||
|
message: CompactString,
|
||||||
|
locations: Option<Box<[GraphQLLocation]>>,
|
||||||
|
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
error_type: GraphQLErrorType,
|
||||||
|
|
||||||
|
#[serde(flatten, with = "tuple_vec_map")]
|
||||||
|
others: Vec<(CompactString, serde_json::Value)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum GraphQLErrorType {
|
||||||
|
RateLimited,
|
||||||
|
Other(CompactString),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for GraphQLErrorType {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
GraphQLErrorType::RateLimited => "RATE_LIMITED",
|
||||||
|
GraphQLErrorType::Other(s) => s,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for GraphQLErrorType {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let s = CompactString::deserialize(deserializer)?;
|
||||||
|
Ok(match &*s {
|
||||||
|
"RATE_LIMITED" => GraphQLErrorType::RateLimited,
|
||||||
|
_ => GraphQLErrorType::Other(s),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct GraphQLLocation {
|
||||||
|
line: u64,
|
||||||
|
column: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GraphQLData {
|
||||||
|
repository: Option<GraphQLRepo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GraphQLRepo {
|
||||||
|
release: Option<GraphQLRelease>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GraphQLRelease {
|
||||||
|
#[serde(rename = "releaseAssets")]
|
||||||
|
assets: GraphQLReleaseAssets,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GraphQLReleaseAssets {
|
||||||
|
nodes: Vec<Artifact>,
|
||||||
|
#[serde(rename = "pageInfo")]
|
||||||
|
page_info: GraphQLPageInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GraphQLPageInfo {
|
||||||
|
#[serde(rename = "endCursor")]
|
||||||
|
end_cursor: CompactString,
|
||||||
|
#[serde(rename = "hasNextPage")]
|
||||||
|
has_next_page: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum FilterCondition {
|
||||||
|
Init,
|
||||||
|
After(CompactString),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for FilterCondition {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
// GitHub imposes a limit of 100 for the value passed to param "first"
|
||||||
|
FilterCondition::Init => f.write_str("first:100"),
|
||||||
|
FilterCondition::After(end_cursor) => write!(f, r#"first:100,after:"{end_cursor}""#),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct GraphQLQuery {
|
||||||
|
query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_release_artifacts_graphql_api(
|
||||||
|
client: &remote::Client,
|
||||||
|
GhRelease { owner, repo, tag }: &GhRelease,
|
||||||
|
auth_token: &str,
|
||||||
|
) -> Result<FetchReleaseRet, GhApiError> {
|
||||||
|
static GRAPHQL_ENDPOINT: OnceLock<Url> = OnceLock::new();
|
||||||
|
|
||||||
|
let graphql_endpoint = GRAPHQL_ENDPOINT.get_or_init(|| {
|
||||||
|
Url::parse("https://api.github.com/graphql").expect("Literal provided must be a valid url")
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut artifacts = Artifacts::default();
|
||||||
|
let mut cond = FilterCondition::Init;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let query = format!(
|
||||||
|
r#"
|
||||||
|
query {{
|
||||||
|
repository(owner:"{owner}",name:"{repo}") {{
|
||||||
|
release(tagName:"{tag}") {{
|
||||||
|
releaseAssets({cond}) {{
|
||||||
|
nodes {{ name }}
|
||||||
|
pageInfo {{ endCursor hasNextPage }}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let graphql_query = to_json_string(&GraphQLQuery { query }).map_err(remote::Error::from)?;
|
||||||
|
|
||||||
|
debug!("Sending graphql query to https://api.github.com/graphql: '{graphql_query}'");
|
||||||
|
|
||||||
|
let request_builder = client
|
||||||
|
.post(graphql_endpoint.clone(), graphql_query)
|
||||||
|
.header("Accept", "application/vnd.github+json")
|
||||||
|
.bearer_auth(&auth_token);
|
||||||
|
|
||||||
|
let response = request_builder.send(false).await?;
|
||||||
|
|
||||||
|
if let Some(ret) = check_for_status(response.status(), response.headers()) {
|
||||||
|
return Ok(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
let response: GraphQLResponse = response.json().await?;
|
||||||
|
|
||||||
|
let data = match response {
|
||||||
|
GraphQLResponse::Data(data) => data,
|
||||||
|
GraphQLResponse::Errors(errors) if errors.is_rate_limited() => {
|
||||||
|
return Ok(FetchReleaseRet::ReachedRateLimit { retry_after: None })
|
||||||
|
}
|
||||||
|
GraphQLResponse::Errors(errors) => return Err(errors.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let assets = data
|
||||||
|
.repository
|
||||||
|
.and_then(|repository| repository.release)
|
||||||
|
.map(|release| release.assets);
|
||||||
|
|
||||||
|
if let Some(assets) = assets {
|
||||||
|
artifacts.assets.extend(assets.nodes);
|
||||||
|
|
||||||
|
let page_info = assets.page_info;
|
||||||
|
if !page_info.has_next_page {
|
||||||
|
break Ok(FetchReleaseRet::Artifacts(artifacts));
|
||||||
|
} else {
|
||||||
|
cond = FilterCondition::After(page_info.end_cursor);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break Ok(FetchReleaseRet::ReleaseNotFound);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) async fn fetch_release_artifacts(
|
||||||
|
client: &remote::Client,
|
||||||
|
release: &GhRelease,
|
||||||
|
auth_token: Option<&str>,
|
||||||
|
) -> Result<FetchReleaseRet, GhApiError> {
|
||||||
|
if let Some(auth_token) = auth_token {
|
||||||
|
let res = fetch_release_artifacts_graphql_api(client, release, auth_token)
|
||||||
|
.await
|
||||||
|
.map_err(|err| err.context("GraphQL API"));
|
||||||
|
|
||||||
|
match res {
|
||||||
|
// Fallback to Restful API
|
||||||
|
Ok(FetchReleaseRet::Unauthorized) => (),
|
||||||
|
res => return res,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetch_release_artifacts_restful_api(client, release, auth_token)
|
||||||
|
.await
|
||||||
|
.map_err(|err| err.context("Restful API"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use serde::de::value::{BorrowedStrDeserializer, Error};
|
||||||
|
|
||||||
|
macro_rules! assert_matches {
|
||||||
|
($expression:expr, $pattern:pat $(if $guard:expr)? $(,)?) => {
|
||||||
|
match $expression {
|
||||||
|
$pattern $(if $guard)? => true,
|
||||||
|
expr => {
|
||||||
|
panic!(
|
||||||
|
"assertion failed: `{expr:?}` does not match `{}`",
|
||||||
|
stringify!($pattern $(if $guard)?)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_graph_ql_error_type() {
|
||||||
|
let deserialize = |input: &str| {
|
||||||
|
GraphQLErrorType::deserialize(BorrowedStrDeserializer::<'_, Error>::new(input)).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_matches!(deserialize("RATE_LIMITED"), GraphQLErrorType::RateLimited);
|
||||||
|
assert_matches!(
|
||||||
|
deserialize("rATE_LIMITED"),
|
||||||
|
GraphQLErrorType::Other(val) if val == CompactString::new("rATE_LIMITED")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ mod certificate;
|
||||||
pub use certificate::Certificate;
|
pub use certificate::Certificate;
|
||||||
|
|
||||||
mod request_builder;
|
mod request_builder;
|
||||||
pub use request_builder::{RequestBuilder, Response};
|
pub use request_builder::{Body, RequestBuilder, Response};
|
||||||
|
|
||||||
#[cfg(feature = "json")]
|
#[cfg(feature = "json")]
|
||||||
pub use request_builder::JsonError;
|
pub use request_builder::JsonError;
|
||||||
|
@ -303,6 +303,7 @@ impl Client {
|
||||||
Ok(self.get(url).send(true).await?.bytes_stream())
|
Ok(self.get(url).send(true).await?.bytes_stream())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new request.
|
||||||
pub fn request(&self, method: Method, url: Url) -> RequestBuilder {
|
pub fn request(&self, method: Method, url: Url) -> RequestBuilder {
|
||||||
RequestBuilder {
|
RequestBuilder {
|
||||||
client: self.clone(),
|
client: self.clone(),
|
||||||
|
@ -310,9 +311,15 @@ impl Client {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new GET request.
|
||||||
pub fn get(&self, url: Url) -> RequestBuilder {
|
pub fn get(&self, url: Url) -> RequestBuilder {
|
||||||
self.request(Method::GET, url)
|
self.request(Method::GET, url)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new POST request.
|
||||||
|
pub fn post(&self, url: Url, body: impl Into<Body>) -> RequestBuilder {
|
||||||
|
self.request(Method::POST, url).body(body.into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_header_retry_after(headers: &HeaderMap) -> Option<Duration> {
|
fn parse_header_retry_after(headers: &HeaderMap) -> Option<Duration> {
|
||||||
|
|
|
@ -6,6 +6,8 @@ use reqwest::Method;
|
||||||
|
|
||||||
use super::{header, Client, Error, HttpError, StatusCode, Url};
|
use super::{header, Client, Error, HttpError, StatusCode, Url};
|
||||||
|
|
||||||
|
pub use reqwest::Body;
|
||||||
|
|
||||||
#[cfg(feature = "json")]
|
#[cfg(feature = "json")]
|
||||||
pub use serde_json::Error as JsonError;
|
pub use serde_json::Error as JsonError;
|
||||||
|
|
||||||
|
@ -16,20 +18,27 @@ pub struct RequestBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RequestBuilder {
|
impl RequestBuilder {
|
||||||
pub fn bearer_auth(self, token: &dyn fmt::Display) -> RequestBuilder {
|
pub fn bearer_auth(self, token: &dyn fmt::Display) -> Self {
|
||||||
Self {
|
Self {
|
||||||
client: self.client,
|
client: self.client,
|
||||||
inner: self.inner.bearer_auth(token),
|
inner: self.inner.bearer_auth(token),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn header(self, key: &str, value: &str) -> RequestBuilder {
|
pub fn header(self, key: &str, value: &str) -> Self {
|
||||||
Self {
|
Self {
|
||||||
client: self.client,
|
client: self.client,
|
||||||
inner: self.inner.header(key, value),
|
inner: self.inner.header(key, value),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn body(self, body: impl Into<Body>) -> Self {
|
||||||
|
Self {
|
||||||
|
client: self.client,
|
||||||
|
inner: self.inner.body(body.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn send(self, error_for_status: bool) -> Result<Response, Error> {
|
pub async fn send(self, error_for_status: bool) -> Result<Response, Error> {
|
||||||
let request = self.inner.build()?;
|
let request = self.inner.build()?;
|
||||||
let method = request.method().clone();
|
let method = request.method().clone();
|
||||||
|
|
|
@ -306,12 +306,12 @@ pub enum BinstallError {
|
||||||
#[diagnostic(severity(error), code(binstall::invalid_pkg_fmt))]
|
#[diagnostic(severity(error), code(binstall::invalid_pkg_fmt))]
|
||||||
InvalidPkgFmt(Box<InvalidPkgFmtError>),
|
InvalidPkgFmt(Box<InvalidPkgFmtError>),
|
||||||
|
|
||||||
/// Request to GitHub Restful API failed
|
/// Request to GitHub API failed
|
||||||
///
|
///
|
||||||
/// - Code: `binstall::gh_restful_api_failure`
|
/// - Code: `binstall::gh_api_failure`
|
||||||
/// - Exit: 96
|
/// - Exit: 96
|
||||||
#[error("Request to GitHub Restful API failed: {0}")]
|
#[error("Request to GitHub API failed: {0}")]
|
||||||
#[diagnostic(severity(error), code(binstall::gh_restful_api_failure))]
|
#[diagnostic(severity(error), code(binstall::gh_api_failure))]
|
||||||
GhApiErr(#[source] Box<GhApiError>),
|
GhApiErr(#[source] Box<GhApiError>),
|
||||||
|
|
||||||
/// A wrapped error providing the context of which crate the error is about.
|
/// A wrapped error providing the context of which crate the error is about.
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub async fn does_url_exist(
|
||||||
debug!("Checking for package at: '{url}'");
|
debug!("Checking for package at: '{url}'");
|
||||||
|
|
||||||
if let Some(artifact) = GhReleaseArtifact::try_extract_from_url(url) {
|
if let Some(artifact) = GhReleaseArtifact::try_extract_from_url(url) {
|
||||||
debug!("Using GitHub Restful API to check for existence of artifact, which will also cache the API response");
|
debug!("Using GitHub API to check for existence of artifact, which will also cache the API response");
|
||||||
|
|
||||||
// The future returned has the same size as a pointer
|
// The future returned has the same size as a pointer
|
||||||
match gh_api_client.has_release_artifact(artifact).await? {
|
match gh_api_client.has_release_artifact(artifact).await? {
|
||||||
|
|
Loading…
Add table
Reference in a new issue