Initial signing support (#1345)

* Add CLI options

* Add manifest types

* Thread signature policy through to fetchers

* Thread signing section through from metadata

* Implement signing validation

* Clippy

* Attempt testing

* Yes and

* Why

* fmt

* Update crates/bin/src/args.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/binstalk-fetchers/src/gh_crate_meta.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/bin/src/args.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/binstalk-fetchers/src/signing.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/binstalk-fetchers/src/signing.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/binstalk-fetchers/src/signing.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Update crates/binstalk-fetchers/src/signing.rs

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* fixes

* Finish feature

* Document

* Include all fields in the signing.file template

* Readme document

* Review fixes

* Fail on non-utf8 sig

* Thank goodness for tests

* Run test in ci

* Add rsign2 commands

* Log utf8 error

* Update e2e-tests/signing.sh

Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fix `e2e-tests/signing.sh` MacOS CI failure

Move the tls cert creation into `signing.sh` and sleep for 10s to wait
for https server to start.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Refactor e2e-tests-signing files

 - Use a tempdir generated by `mktemp` for all certificates-related
   files
 - Put other checked-in files into `e2e-tests/signing`

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fixed `e2e-tests-signing` connection err in MacOS CI

Wait for server to start up by trying to connect to it.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fix `e2e-tests-signing` passing `-subj` to `openssl` on Windows

Use single quote instead of double quote to avoid automatic expansion
from bash

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fix `e2e-tests-signing` waiting for server to startup

Remove `timeout` since it is not supported on MacOS.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Try to fix windows CI by setting `MSYS_NO_PATHCONV=1` on `openssl` cmds

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fixed `e2e-tests-signing` on windows

By using double `//` for the value passed to option `-subj`

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fixed infinite loop in `signing/wait-for-server` on Windows

Pass `--ssl-revoke-best-effort` to prevent schannel from checking ssl
revocation status.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Add cap on retry attempt in `signing/wait-for-server.sh`

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Let `singing/server.py` print output to stderr

so that we can see the error message there.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

* Fix running `signing/server.py` on MacOS CI

use `python3` since macos-latest still has python2 installed and
`python` is a symlink to `python2` there.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>

---------

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
Co-authored-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
Félix Saparelli 2023-09-23 16:02:56 +12:00 committed by GitHub
parent efbd20857b
commit 32beba507b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 723 additions and 150 deletions

View file

@ -286,12 +286,29 @@ pub struct Args {
/// specified (which is also shown by clap's auto generated doc below), or
/// try environment variable `GH_TOKEN`, which is also used by `gh` cli.
///
/// If none of them is present, then binstal will try to extract github
/// If none of them is present, then binstall will try to extract github
/// token from `$HOME/.git-credentials` or `$HOME/.config/gh/hosts.yml`
/// unless `--no-discover-github-token` is specified.
#[clap(help_heading = "Options", long, env = "GITHUB_TOKEN")]
pub(crate) github_token: Option<CompactString>,
/// Only install packages that are signed
///
/// The default is to verify signatures if they are available, but to allow
/// unsigned packages as well.
#[clap(help_heading = "Options", long)]
pub(crate) only_signed: bool,
/// Don't check any signatures
///
/// The default is to verify signatures if they are available. This option
/// disables that behaviour entirely, which will also stop downloading
/// signature files in the first place.
///
/// Note that this is insecure and not recommended outside of testing.
#[clap(help_heading = "Options", long, conflicts_with = "only_signed")]
pub(crate) skip_signatures: bool,
/// Print version information
#[clap(help_heading = "Meta", short = 'V')]
pub version: bool,

View file

@ -7,7 +7,7 @@ use std::{
use binstalk::{
errors::BinstallError,
fetchers::{Fetcher, GhCrateMeta, QuickInstall},
fetchers::{Fetcher, GhCrateMeta, QuickInstall, SignaturePolicy},
get_desired_targets,
helpers::{
gh_api_client::GhApiClient,
@ -88,6 +88,7 @@ pub fn install_crates(
pkg_url: args.pkg_url,
pkg_fmt: args.pkg_fmt,
bin_dir: args.bin_dir,
signing: None,
};
// Initialize reqwest client
@ -183,6 +184,14 @@ pub fn install_crates(
} else {
Default::default()
},
signature_policy: if args.only_signed {
SignaturePolicy::Require
} else if args.skip_signatures {
SignaturePolicy::Ignore
} else {
SignaturePolicy::IfPresent
},
});
// Destruct args before any async function to reduce size of the future

View file

@ -76,14 +76,17 @@ pub trait DataVerifier: Send + Sync {
/// This method can be called repeatedly for use with streaming messages,
/// it will be called in the order of the message received.
fn update(&mut self, data: &Bytes);
/// Finalise the data verification.
///
/// Return false if the data is invalid.
fn validate(&mut self) -> bool;
}
impl<T> DataVerifier for T
where
T: FnMut(&Bytes) + Send + Sync,
{
fn update(&mut self, data: &Bytes) {
(*self)(data)
impl DataVerifier for () {
fn update(&mut self, _: &Bytes) {}
fn validate(&mut self) -> bool {
true
}
}
@ -136,9 +139,7 @@ impl<'a> Download<'a> {
data_verifier: Some(data_verifier),
}
}
}
impl<'a> Download<'a> {
async fn get_stream(
self,
) -> Result<
@ -182,7 +183,7 @@ where
}
impl Download<'_> {
/// Download a file from the provided URL and process them in memory.
/// Download a file from the provided URL and process it in memory.
///
/// This does not support verifying a checksum due to the partial extraction
/// and will ignore one if specified.
@ -216,7 +217,7 @@ impl Download<'_> {
/// Download a file from the provided URL and extract it to the provided path.
///
/// NOTE that this would only extract directory and regular files.
/// NOTE that this will only extract directory and regular files.
#[instrument(skip(path))]
pub async fn and_extract(
self,
@ -257,6 +258,15 @@ impl Download<'_> {
inner(self, fmt, path.as_ref()).await
}
#[instrument]
pub async fn into_bytes(self) -> Result<Bytes, DownloadError> {
let bytes = self.client.get(self.url).send(true).await?.bytes().await?;
if let Some(verifier) = self.data_verifier {
verifier.update(&bytes);
}
Ok(bytes)
}
}
#[cfg(test)]

View file

@ -14,12 +14,14 @@ license = "GPL-3.0-only"
async-trait = "0.1.68"
binstalk-downloader = { version = "0.8.0", path = "../binstalk-downloader", default-features = false, features = ["gh-api-client"] }
binstalk-types = { version = "0.5.0", path = "../binstalk-types" }
bytes = "1.4.0"
compact_str = { version = "0.7.0" }
either = "1.8.1"
itertools = "0.11.0"
leon = { version = "2.0.1", path = "../leon" }
leon-macros = { version = "1.0.0", path = "../leon-macros" }
miette = "5.9.0"
minisign-verify = "0.2.1"
once_cell = "1.18.0"
strum = "0.25.0"
thiserror = "1.0.40"

View file

@ -1,16 +1,16 @@
use std::{borrow::Cow, fmt, iter, marker::PhantomData, path::Path, sync::Arc};
use std::{borrow::Cow, fmt, iter, path::Path, sync::Arc};
use compact_str::{CompactString, ToCompactString};
use either::Either;
use leon::Template;
use once_cell::sync::OnceCell;
use strum::IntoEnumIterator;
use tracing::{debug, warn};
use tracing::{debug, info, trace, warn};
use url::Url;
use crate::{
common::*, futures_resolver::FuturesResolver, Data, FetchError, InvalidPkgFmtError, RepoInfo,
TargetDataErased,
SignaturePolicy, SignatureVerifier, TargetDataErased,
};
pub(crate) mod hosting;
@ -20,13 +20,23 @@ pub struct GhCrateMeta {
gh_api_client: GhApiClient,
data: Arc<Data>,
target_data: Arc<TargetDataErased>,
resolution: OnceCell<(Url, PkgFmt)>,
signature_policy: SignaturePolicy,
resolution: OnceCell<Resolved>,
}
#[derive(Debug)]
struct Resolved {
url: Url,
pkg_fmt: PkgFmt,
archive_suffix: Option<String>,
repo: Option<String>,
subcrate: Option<String>,
}
impl GhCrateMeta {
fn launch_baseline_find_tasks(
&self,
futures_resolver: &FuturesResolver<(Url, PkgFmt), FetchError>,
futures_resolver: &FuturesResolver<Resolved, FetchError>,
pkg_fmt: PkgFmt,
pkg_url: &Template<'_>,
repo: Option<&str>,
@ -41,7 +51,7 @@ impl GhCrateMeta {
repo,
subcrate,
);
match ctx.render_url_with_compiled_tt(pkg_url) {
match ctx.render_url_with(pkg_url) {
Ok(url) => Some(url),
Err(err) => {
warn!("Failed to render url for {ctx:#?}: {err}");
@ -58,21 +68,30 @@ impl GhCrateMeta {
pkg_fmt
.extensions(is_windows)
.iter()
.filter_map(|ext| render_url(Some(ext))),
.filter_map(|ext| render_url(Some(ext)).map(|url| (url, Some(ext)))),
)
} else {
Either::Right(render_url(None).into_iter())
Either::Right(render_url(None).map(|url| (url, None)).into_iter())
};
// go check all potential URLs at once
futures_resolver.extend(urls.map(move |url| {
futures_resolver.extend(urls.map(move |(url, ext)| {
let client = self.client.clone();
let gh_api_client = self.gh_api_client.clone();
let repo = repo.map(ToString::to_string);
let subcrate = subcrate.map(ToString::to_string);
let archive_suffix = ext.map(ToString::to_string);
async move {
Ok(does_url_exist(client, gh_api_client, &url)
.await?
.then_some((url, pkg_fmt)))
.then_some(Resolved {
url,
pkg_fmt,
repo,
subcrate,
archive_suffix,
}))
}
}));
}
@ -85,12 +104,14 @@ impl super::Fetcher for GhCrateMeta {
gh_api_client: GhApiClient,
data: Arc<Data>,
target_data: Arc<TargetDataErased>,
signature_policy: SignaturePolicy,
) -> Arc<dyn super::Fetcher> {
Arc::new(Self {
client,
gh_api_client,
data,
target_data,
signature_policy,
resolution: OnceCell::new(),
})
}
@ -131,7 +152,8 @@ impl super::Fetcher for GhCrateMeta {
pkg_url: pkg_url.into(),
reason:
&"pkg-fmt is not specified, yet pkg-url does not contain format, \
archive-format or archive-suffix which is required for automatically deducing pkg-fmt",
archive-format or archive-suffix which is required for automatically \
deducing pkg-fmt",
}
.into());
}
@ -212,9 +234,9 @@ archive-format or archive-suffix which is required for automatically deducing pk
}
}
if let Some((url, pkg_fmt)) = resolver.resolve().await? {
debug!("Winning URL is {url}, with pkg_fmt {pkg_fmt}");
self.resolution.set((url, pkg_fmt)).unwrap(); // find() is called first
if let Some(resolved) = resolver.resolve().await? {
debug!(?resolved, "Winning URL found!");
self.resolution.set(resolved).unwrap(); // find() is called first
Ok(true)
} else {
Ok(false)
@ -223,18 +245,75 @@ archive-format or archive-suffix which is required for automatically deducing pk
}
async fn fetch_and_extract(&self, dst: &Path) -> Result<ExtractedFiles, FetchError> {
let (url, pkg_fmt) = self.resolution.get().unwrap(); // find() is called first
let resolved = self.resolution.get().unwrap(); // find() is called first
trace!(?resolved, "preparing to fetch");
let verifier = match (self.signature_policy, &self.target_data.meta.signing) {
(SignaturePolicy::Ignore, _) | (SignaturePolicy::IfPresent, None) => {
SignatureVerifier::Noop
}
(SignaturePolicy::Require, None) => {
debug_assert!(false, "missing signing section should be caught earlier");
return Err(FetchError::MissingSignature);
}
(_, Some(config)) => {
let template = match config.file.as_deref() {
Some(file) => Template::parse(file)?,
None => leon_macros::template!("{ url }.sig"),
};
trace!(?template, "parsed signature file template");
let sign_url = Context::from_data_with_repo(
&self.data,
&self.target_data.target,
&self.target_data.target_related_info,
resolved.archive_suffix.as_deref(),
resolved.repo.as_deref(),
resolved.subcrate.as_deref(),
)
.with_url(&resolved.url)
.render_url_with(&template)?;
debug!(?sign_url, "Downloading signature");
let signature = Download::new(self.client.clone(), sign_url)
.into_bytes()
.await?;
trace!(?signature, "got signature contents");
SignatureVerifier::new(config, &signature)?
}
};
debug!(
"Downloading package from: '{url}' dst:{} fmt:{pkg_fmt:?}",
dst.display()
url=%resolved.url,
dst=%dst.display(),
fmt=?resolved.pkg_fmt,
"Downloading package",
);
Ok(Download::new(self.client.clone(), url.clone())
.and_extract(*pkg_fmt, dst)
.await?)
let mut data_verifier = verifier.data_verifier()?;
let files = Download::new_with_data_verifier(
self.client.clone(),
resolved.url.clone(),
data_verifier.as_mut(),
)
.and_extract(resolved.pkg_fmt, dst)
.await?;
trace!("validating signature (if any)");
if data_verifier.validate() {
if let Some(info) = verifier.info() {
info!(
"Verified signature for package '{}': {info}",
self.data.name
);
}
Ok(files)
} else {
Err(FetchError::InvalidSignature)
}
}
fn pkg_fmt(&self) -> PkgFmt {
self.resolution.get().unwrap().1
self.resolution.get().unwrap().pkg_fmt
}
fn target_meta(&self) -> PkgMeta {
@ -246,13 +325,13 @@ archive-format or archive-suffix which is required for automatically deducing pk
fn source_name(&self) -> CompactString {
self.resolution
.get()
.map(|(url, _pkg_fmt)| {
if let Some(domain) = url.domain() {
.map(|resolved| {
if let Some(domain) = resolved.url.domain() {
domain.to_compact_string()
} else if let Some(host) = url.host_str() {
} else if let Some(host) = resolved.url.host_str() {
host.to_compact_string()
} else {
url.to_compact_string()
resolved.url.to_compact_string()
}
})
.unwrap_or_else(|| "invalid url".into())
@ -294,49 +373,24 @@ struct Context<'c> {
/// Workspace of the crate inside the repository.
subcrate: Option<&'c str>,
/// Url of the file being downloaded (only for signing.file)
url: Option<&'c Url>,
target_related_info: &'c dyn leon::Values,
}
impl fmt::Debug for Context<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#[allow(dead_code)]
#[derive(Debug)]
struct Context<'c> {
name: &'c str,
repo: Option<&'c str>,
target: &'c str,
version: &'c str,
archive_format: Option<&'c str>,
archive_suffix: Option<&'c str>,
binary_ext: &'c str,
subcrate: Option<&'c str>,
target_related_info: PhantomData<&'c dyn leon::Values>,
}
fmt::Debug::fmt(
&Context {
name: self.name,
repo: self.repo,
target: self.target,
version: self.version,
archive_format: self.archive_format,
archive_suffix: self.archive_suffix,
binary_ext: self.binary_ext,
subcrate: self.subcrate,
target_related_info: PhantomData,
},
f,
)
f.debug_struct("Context")
.field("name", &self.name)
.field("repo", &self.repo)
.field("target", &self.target)
.field("version", &self.version)
.field("archive_format", &self.archive_format)
.field("binary_ext", &self.binary_ext)
.field("subcrate", &self.subcrate)
.field("url", &self.url)
.finish_non_exhaustive()
}
}
@ -359,6 +413,8 @@ impl leon::Values for Context<'_> {
"subcrate" => self.subcrate.map(Cow::Borrowed),
"url" => self.url.map(|url| Cow::Borrowed(url.as_str())),
key => self.target_related_info.get_value(key),
}
}
@ -398,24 +454,25 @@ impl<'c> Context<'c> {
""
},
subcrate,
url: None,
target_related_info,
}
}
/// * `tt` - must have added a template named "pkg_url".
fn render_url_with_compiled_tt(&self, tt: &Template<'_>) -> Result<Url, FetchError> {
debug!("Render {tt:#?} using context: {self:?}");
fn with_url(&mut self, url: &'c Url) -> &mut Self {
self.url = Some(url);
self
}
Ok(Url::parse(&tt.render(self)?)?)
fn render_url_with(&self, template: &Template<'_>) -> Result<Url, FetchError> {
debug!(?template, context=?self, "render url template");
Ok(Url::parse(&template.render(self)?)?)
}
#[cfg(test)]
fn render_url(&self, template: &str) -> Result<Url, FetchError> {
debug!("Render {template} using context in render_url: {self:?}");
let tt = Template::parse(template)?;
self.render_url_with_compiled_tt(&tt)
self.render_url_with(&Template::parse(template)?)
}
}

View file

@ -5,6 +5,7 @@ use std::{path::Path, sync::Arc};
use binstalk_downloader::{
download::DownloadError, gh_api_client::GhApiError, remote::Error as RemoteError,
};
use binstalk_types::cargo_toml_binstall::SigningAlgorithm;
use thiserror::Error as ThisError;
use tokio::sync::OnceCell;
pub use url::ParseError as UrlParseError;
@ -20,6 +21,9 @@ pub use quickinstall::*;
mod common;
use common::*;
mod signing;
use signing::*;
mod futures_resolver;
use gh_crate_meta::hosting::RepositoryHost;
@ -57,6 +61,15 @@ pub enum FetchError {
#[error("Failed to parse url: {0}")]
UrlParse(#[from] UrlParseError),
#[error("Signing algorithm not supported: {0:?}")]
UnsupportedSigningAlgorithm(SigningAlgorithm),
#[error("No signature present")]
MissingSignature,
#[error("Failed to verify signature")]
InvalidSignature,
}
impl From<RemoteError> for FetchError {
@ -80,6 +93,7 @@ pub trait Fetcher: Send + Sync {
gh_api_client: GhApiClient,
data: Arc<Data>,
target_data: Arc<TargetDataErased>,
signature_policy: SignaturePolicy,
) -> Arc<dyn Fetcher>
where
Self: Sized;
@ -133,6 +147,19 @@ struct RepoInfo {
subcrate: Option<CompactString>,
}
/// What to do about package signatures
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum SignaturePolicy {
/// Don't process any signing information at all
Ignore,
/// Verify and fail if a signature is found, but pass a signature-less package
IfPresent,
/// Require signatures to be present (and valid)
Require,
}
/// Data required to fetch a package
#[derive(Clone, Debug)]
pub struct Data {

View file

@ -5,7 +5,7 @@ use binstalk_types::cargo_toml_binstall::{PkgFmt, PkgMeta};
use tokio::sync::OnceCell;
use url::Url;
use crate::{common::*, Data, FetchError, TargetDataErased};
use crate::{common::*, Data, FetchError, SignaturePolicy, TargetDataErased};
const BASE_URL: &str = "https://github.com/cargo-bins/cargo-quickinstall/releases/download";
const STATS_URL: &str = "https://warehouse-clerk-tmp.vercel.app/api/crate";
@ -51,6 +51,7 @@ pub struct QuickInstall {
package: String,
package_url: Url,
stats_url: Url,
signature_policy: SignaturePolicy,
target_data: Arc<TargetDataErased>,
}
@ -76,6 +77,7 @@ impl super::Fetcher for QuickInstall {
gh_api_client: GhApiClient,
data: Arc<Data>,
target_data: Arc<TargetDataErased>,
signature_policy: SignaturePolicy,
) -> Arc<dyn super::Fetcher> {
let crate_name = &data.name;
let version = &data.version;
@ -95,6 +97,7 @@ impl super::Fetcher for QuickInstall {
stats_url: Url::parse(&format!("{STATS_URL}/{package}.tar.gz",))
.expect("stats_url is pre-generated and should never be invalid url"),
package,
signature_policy,
target_data,
})
@ -102,6 +105,11 @@ impl super::Fetcher for QuickInstall {
fn find(self: Arc<Self>) -> JoinHandle<Result<bool, FetchError>> {
tokio::spawn(async move {
// until quickinstall supports signatures, blanket deny:
if self.signature_policy == SignaturePolicy::Require {
return Err(FetchError::MissingSignature);
}
if !self.is_supported().await? {
return Ok(false);
}

View file

@ -0,0 +1,91 @@
use binstalk_downloader::download::DataVerifier;
use binstalk_types::cargo_toml_binstall::{PkgSigning, SigningAlgorithm};
use bytes::Bytes;
use minisign_verify::{PublicKey, Signature, StreamVerifier};
use tracing::{error, trace};
use crate::FetchError;
pub enum SignatureVerifier {
Noop,
Minisign(Box<MinisignVerifier>),
}
impl SignatureVerifier {
pub fn new(config: &PkgSigning, signature: &[u8]) -> Result<Self, FetchError> {
match config.algorithm {
SigningAlgorithm::Minisign => MinisignVerifier::new(config, signature)
.map(Box::new)
.map(Self::Minisign),
algorithm => Err(FetchError::UnsupportedSigningAlgorithm(algorithm)),
}
}
pub fn data_verifier(&self) -> Result<Box<dyn DataVerifier + '_>, FetchError> {
match self {
Self::Noop => Ok(Box::new(())),
Self::Minisign(v) => v.data_verifier(),
}
}
pub fn info(&self) -> Option<String> {
match self {
Self::Noop => None,
Self::Minisign(v) => Some(v.signature.trusted_comment().into()),
}
}
}
pub struct MinisignVerifier {
pubkey: PublicKey,
signature: Signature,
}
impl MinisignVerifier {
pub fn new(config: &PkgSigning, signature: &[u8]) -> Result<Self, FetchError> {
trace!(key=?config.pubkey, "parsing public key");
let pubkey = PublicKey::from_base64(&config.pubkey).map_err(|err| {
error!("Package public key is invalid: {err}");
FetchError::InvalidSignature
})?;
trace!(?signature, "parsing signature");
let signature = Signature::decode(std::str::from_utf8(signature).map_err(|err| {
error!(?signature, "Signature file is not UTF-8! {err}");
FetchError::InvalidSignature
})?)
.map_err(|err| {
error!("Signature file is invalid: {err}");
FetchError::InvalidSignature
})?;
Ok(Self { pubkey, signature })
}
pub fn data_verifier(&self) -> Result<Box<dyn DataVerifier + '_>, FetchError> {
self.pubkey
.verify_stream(&self.signature)
.map(|vs| Box::new(MinisignDataVerifier(vs)) as _)
.map_err(|err| {
error!("Failed to setup stream verifier: {err}");
FetchError::InvalidSignature
})
}
}
pub struct MinisignDataVerifier<'a>(StreamVerifier<'a>);
impl<'a> DataVerifier for MinisignDataVerifier<'a> {
fn update(&mut self, data: &Bytes) {
self.0.update(data);
}
fn validate(&mut self) -> bool {
if let Err(err) = self.0.finalize() {
error!("Failed to finalize signature verify: {err}");
false
} else {
true
}
}
}

View file

@ -23,17 +23,35 @@ pub(super) struct RegistryConfig {
pub(super) dl: CompactString,
}
struct Sha256Digest(Sha256);
struct Sha256Digest {
expected: Vec<u8>,
actual: Option<Vec<u8>>,
state: Option<Sha256>,
}
impl Default for Sha256Digest {
fn default() -> Self {
Sha256Digest(Sha256::new())
impl Sha256Digest {
fn new(checksum: Vec<u8>) -> Self {
Self {
expected: checksum,
actual: None,
state: Some(Sha256::new()),
}
}
}
impl DataVerifier for Sha256Digest {
fn update(&mut self, data: &Bytes) {
self.0.update(data);
if let Some(ref mut state) = &mut self.state {
state.update(data);
}
}
fn validate(&mut self) -> bool {
if let Some(state) = self.state.take() {
self.actual = Some(state.finalize().to_vec());
}
self.actual.as_ref().unwrap() == &self.expected
}
}
@ -49,18 +67,16 @@ pub(super) async fn parse_manifest(
let mut manifest_visitor = ManifestVisitor::new(format!("{crate_name}-{version}").into());
let checksum = decode_base16(cksum.as_bytes()).map_err(RegistryError::from)?;
let mut sha256_digest = Sha256Digest::default();
let mut digest = Sha256Digest::new(checksum);
Download::new_with_data_verifier(client, crate_url, &mut sha256_digest)
Download::new_with_data_verifier(client, crate_url, &mut digest)
.and_visit_tar(TarBasedFmt::Tgz, &mut manifest_visitor)
.await?;
let digest_checksum = sha256_digest.0.finalize();
if digest_checksum.as_slice() != checksum.as_slice() {
if !digest.validate() {
Err(RegistryError::UnmatchedChecksum {
expected: cksum.into(),
actual: encode_base16(digest_checksum.as_slice()).into(),
expected: encode_base16(digest.expected.as_slice()).into(),
actual: encode_base16(digest.actual.unwrap().as_slice()).into(),
})
} else {
manifest_visitor.load_manifest()

View file

@ -34,8 +34,8 @@ pub struct PkgMeta {
/// Path template for binary files in packages
pub bin_dir: Option<String>,
/// Public key for package verification (base64 encoded)
pub pub_key: Option<String>,
/// Package signing configuration
pub signing: Option<PkgSigning>,
/// Target specific overrides
pub overrides: BTreeMap<String, PkgOverride>,
@ -76,11 +76,16 @@ impl PkgMeta {
.or(self.pkg_fmt),
bin_dir: pkg_overrides
.clone()
.into_iter()
.find_map(|pkg_override| pkg_override.bin_dir.clone())
.or_else(|| self.bin_dir.clone()),
pub_key: self.pub_key.clone(),
signing: pkg_overrides
.into_iter()
.find_map(|pkg_override| pkg_override.signing.clone())
.or_else(|| self.signing.clone()),
overrides: Default::default(),
}
}
@ -100,6 +105,9 @@ pub struct PkgOverride {
/// Path template override for binary files in packages
pub bin_dir: Option<String>,
/// Package signing configuration
pub signing: Option<PkgSigning>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
@ -107,6 +115,29 @@ pub struct PkgOverride {
pub struct BinMeta {
/// Binary name
pub name: String,
/// Binary template path (within package)
/// Binary template (path within package)
pub path: String,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct PkgSigning {
/// Signing algorithm supported by Binstall.
pub algorithm: SigningAlgorithm,
/// Signing public key
pub pubkey: String,
/// Signature file override template (url to download)
#[serde(default)]
pub file: Option<String>,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
#[non_exhaustive]
pub enum SigningAlgorithm {
/// [minisign](https://jedisct1.github.io/minisign/)
Minisign,
}

View file

@ -72,6 +72,25 @@ pub enum BinstallError {
#[diagnostic(severity(info), code(binstall::user_abort))]
UserAbort,
/// Package is not signed and policy requires it.
///
/// - Code: `binstall::signature::invalid`
/// - Exit: 40
#[error("Crate {crate_name} is signed and package {package_name} failed verification")]
#[diagnostic(severity(error), code(binstall::signature::invalid))]
InvalidSignature {
crate_name: CompactString,
package_name: CompactString,
},
/// Package is not signed and policy requires it.
///
/// - Code: `binstall::signature::missing`
/// - Exit: 41
#[error("Crate {0} does not have signing information")]
#[diagnostic(severity(error), code(binstall::signature::missing))]
MissingSignature(CompactString),
/// A URL is invalid.
///
/// This may be the result of a template in a Cargo manifest.
@ -333,6 +352,8 @@ impl BinstallError {
let code: u8 = match self {
TaskJoinError(_) => 17,
UserAbort => 32,
InvalidSignature { .. } => 40,
MissingSignature(_) => 41,
UrlParse(_) => 65,
TemplateParseError(..) => 67,
FetchError(..) => 68,

View file

@ -5,6 +5,7 @@ use tokio::sync::OnceCell;
use crate::errors::BinstallError;
#[derive(Debug)]
pub struct LazyJobserverClient(OnceCell<Client>);
impl LazyJobserverClient {

View file

@ -5,7 +5,7 @@ use std::{path::PathBuf, sync::Arc};
use semver::VersionReq;
use crate::{
fetchers::{Data, Fetcher, TargetDataErased},
fetchers::{Data, Fetcher, SignaturePolicy, TargetDataErased},
helpers::{
self, gh_api_client::GhApiClient, jobserver_client::LazyJobserverClient, remote::Client,
},
@ -16,8 +16,10 @@ use crate::{
pub mod resolve;
pub type Resolver = fn(Client, GhApiClient, Arc<Data>, Arc<TargetDataErased>) -> Arc<dyn Fetcher>;
pub type Resolver =
fn(Client, GhApiClient, Arc<Data>, Arc<TargetDataErased>, SignaturePolicy) -> Arc<dyn Fetcher>;
#[derive(Debug)]
#[non_exhaustive]
pub enum CargoTomlFetchOverride {
#[cfg(feature = "git")]
@ -25,6 +27,7 @@ pub enum CargoTomlFetchOverride {
Path(PathBuf),
}
#[derive(Debug)]
pub struct Options {
pub no_symlinks: bool,
pub dry_run: bool,
@ -49,4 +52,6 @@ pub struct Options {
pub gh_api_client: GhApiClient,
pub jobserver_client: LazyJobserverClient,
pub registry: Registry,
pub signature_policy: SignaturePolicy,
}

View file

@ -19,7 +19,7 @@ use tracing::{debug, error, info, instrument, warn};
use crate::{
bins,
errors::{BinstallError, VersionParseError},
fetchers::{Data, Fetcher, TargetData},
fetchers::{Data, Fetcher, SignaturePolicy, TargetData},
helpers::{
self, cargo_toml::Manifest, cargo_toml_workspace::load_manifest_from_workspace,
download::ExtractedFiles, remote::Client, target_triple::TargetTriple,
@ -83,6 +83,10 @@ async fn resolve_inner(
return Ok(Resolution::AlreadyUpToDate);
};
if opts.signature_policy == SignaturePolicy::Require && !package_info.signing {
return Err(BinstallError::MissingSignature(package_info.name));
}
let desired_targets = opts
.desired_targets
.get()
@ -126,6 +130,7 @@ async fn resolve_inner(
opts.gh_api_client.clone(),
data.clone(),
target_data,
opts.signature_policy,
);
(fetcher.clone(), AutoAbortJoinHandle::new(fetcher.find()))
}),
@ -216,36 +221,11 @@ async fn download_extract_and_verify(
// Download and extract it.
// If that fails, then ignore this fetcher.
let extracted_files = fetcher.fetch_and_extract(bin_path).await?;
debug!("extracted_files = {extracted_files:#?}");
// Build final metadata
let meta = fetcher.target_meta();
#[cfg(incomplete)]
{
// Fetch and check package signature if available
if let Some(pub_key) = meta.as_ref().map(|m| m.pub_key.clone()).flatten() {
debug!("Found public key: {pub_key}");
// Generate signature file URL
let mut sig_ctx = ctx.clone();
sig_ctx.format = "sig".to_string();
let sig_url = sig_ctx.render(&pkg_url)?;
debug!("Fetching signature file: {sig_url}");
// Download signature file
let sig_path = temp_dir.join(format!("{pkg_name}.sig"));
download(&sig_url, &sig_path).await?;
// TODO: do the signature check
unimplemented!()
} else {
warn!("No public key found, package signature could not be validated");
}
}
// Verify that all non-optional bin_files exist
let bin_files = collect_bin_files(
fetcher,
@ -357,6 +337,7 @@ struct PackageInfo {
version: Version,
repo: Option<String>,
overrides: BTreeMap<String, PkgOverride>,
signing: bool,
}
struct Bin {
@ -465,6 +446,7 @@ impl PackageInfo {
} else {
Ok(Some(Self {
overrides: mem::take(&mut meta.overrides),
signing: meta.signing.is_some(),
meta,
binaries,
name,