diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 3b26bb66..ad99d9a5 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -12,7 +12,7 @@ env: jobs: build: - name: Build + name: Build and Test runs-on: ${{ matrix.os }} strategy: @@ -24,36 +24,43 @@ jobs: output: cargo-binstall archive: tgz use-cross: false + test: true - target: x86_64-apple-darwin os: macos-latest output: cargo-binstall archive: zip use-cross: false + test: true - target: aarch64-apple-darwin os: macos-latest output: cargo-binstall archive: zip use-cross: false + test: false - target: x86_64-pc-windows-msvc os: windows-latest output: cargo-binstall.exe archive: zip use-cross: false + test: false - target: x86_64-unknown-linux-musl os: ubuntu-latest output: cargo-binstall archive: tgz use-cross: false + test: true - target: armv7-unknown-linux-musleabihf os: ubuntu-20.04 output: cargo-binstall archive: tgz use-cross: true + test: false - target: aarch64-unknown-linux-musl os: ubuntu-latest output: cargo-binstall archive: tgz use-cross: true + test: false steps: - uses: actions/checkout@v2 @@ -68,12 +75,11 @@ jobs: - name: Configure caching uses: actions/cache@v2 - # Caching disabled on macos due to https://github.com/actions/cache/issues/403 - if: ${{ matrix.os != 'macos-latest' }} with: key: ${{ matrix.os }}-${{ matrix.target }} path: | - ${{ env.HOME }}/.cargo + ${{ env.HOME }}/.cargo/git + ${{ env.HOME }}/.cargo/registry target - name: Install musl-tools @@ -90,6 +96,21 @@ jobs: - name: Copy and rename utility run: cp target/${{ matrix.target }}/release/${{ matrix.output }} ${{ matrix.output }} + - name: Test (Unix) + if: ${{ matrix.test && matrix.os != 'windows-latest' }} + run: | + set -euxo pipefail + for bin in $bins; do ./${{ matrix.output }} binstall --no-confirm $bin; done + ./${{ matrix.output }} binstall --manifest-path . --no-confirm cargo-binstall + env: + bins: cargo-deb cargo-llvm-cov cargo-binstall + + - name: Test (Windows) + if: ${{ matrix.os == 'windows-latest' }} + run: | + ./${{ matrix.output }} binstall --no-confirm cargo-binstall + ./${{ matrix.output }} binstall --manifest-path . --no-confirm cargo-binstall + - name: Create archive (tgz, linux) if: ${{ matrix.os != 'macos-latest' && matrix.os != 'windows-latest' }} run: tar -czvf cargo-binstall-${{ matrix.target }}.tgz ${{ matrix.output }} @@ -117,51 +138,3 @@ jobs: asset_name: cargo-binstall-${{ matrix.target }}.${{ matrix.archive }} tag: ${{ github.ref }} overwrite: true - - test: - name: Test - runs-on: ${{ matrix.os }} - needs: build - strategy: - fail-fast: false - matrix: - include: - - target: x86_64-unknown-linux-gnu - os: ubuntu-latest - output: cargo-binstall - archive: tgz - - target: x86_64-apple-darwin - os: macos-latest - output: cargo-binstall - archive: zip - - target: x86_64-pc-windows-msvc - os: windows-latest - output: cargo-binstall.exe - archive: zip - - target: x86_64-unknown-linux-musl - os: ubuntu-latest - output: cargo-binstall - archive: tgz - - steps: - - uses: actions/checkout@v2 - - uses: FranzDiebold/github-env-vars-action@v1.2.1 - - - uses: actions/download-artifact@v2 - with: - name: cargo-binstall-${{ matrix.target }}.${{ matrix.archive }} - - - name: "Extract build artifact (tgz, linux)" - if: ${{ matrix.os != 'windows-latest' && matrix.os != 'macos-latest' }} - run: tar -xvf cargo-binstall-${{ matrix.target }}.tgz - - - name: "Extract build artifact (zip, windows)" - if: ${{ matrix.os == 'windows-latest' }} - run: tar.exe -xvf cargo-binstall-${{ matrix.target }}.zip - - - name: "Extract build artifact (zip, macos)" - if: ${{ matrix.os == 'macos-latest' }} - run: unzip cargo-binstall-${{ matrix.target }}.zip - - - name: "Run binstall" - run: ./${{ matrix.output }} cargo-binstall --manifest-path . --no-confirm diff --git a/src/drivers.rs b/src/drivers.rs index 3ddf654d..b16513d9 100644 --- a/src/drivers.rs +++ b/src/drivers.rs @@ -102,16 +102,22 @@ pub async fn fetch_crate_cratesio( debug!("Fetching crate from: {crate_url} and extracting Cargo.toml from it"); - download_and_extract( + let crate_dir: PathBuf = format!("{name}-{version_name}").into(); + let crate_path = temp_dir.join(&crate_dir); + + let cargo_toml = crate_dir.join("Cargo.toml"); + let src = crate_dir.join("src"); + let main = src.join("main.rs"); + let bin = src.join("bin"); + + download_and_extract_with_filter( Url::parse(&crate_url)?, PkgFmt::Tgz, &temp_dir, - Some([Path::new("Cargo.toml").into()]), + Some(move |path: &Path| path == cargo_toml || path == main || path.starts_with(&bin)), ) .await?; - let crate_path = temp_dir.join(format!("{name}-{version_name}")); - // Return crate directory Ok(crate_path) } diff --git a/src/fetchers/gh_crate_meta.rs b/src/fetchers/gh_crate_meta.rs index c6ed669b..82bdf3bc 100644 --- a/src/fetchers/gh_crate_meta.rs +++ b/src/fetchers/gh_crate_meta.rs @@ -43,7 +43,7 @@ impl super::Fetcher for GhCrateMeta { async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> { let url = self.url()?; info!("Downloading package from: '{url}'"); - download_and_extract::<_, 0>(url, self.pkg_fmt(), dst, None).await + download_and_extract(url, self.pkg_fmt(), dst).await } fn pkg_fmt(&self) -> PkgFmt { diff --git a/src/fetchers/quickinstall.rs b/src/fetchers/quickinstall.rs index f048de03..7d6ed14a 100644 --- a/src/fetchers/quickinstall.rs +++ b/src/fetchers/quickinstall.rs @@ -40,7 +40,7 @@ impl super::Fetcher for QuickInstall { async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> { let url = self.package_url(); info!("Downloading package from: '{url}'"); - download_and_extract::<_, 0>(Url::parse(&url)?, self.pkg_fmt(), dst, None).await + download_and_extract(Url::parse(&url)?, self.pkg_fmt(), dst).await } fn pkg_fmt(&self) -> PkgFmt { diff --git a/src/helpers.rs b/src/helpers.rs index da0a54c0..679328cd 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -1,5 +1,4 @@ use std::{ - borrow::Cow, io::{stderr, stdin, Write}, path::{Path, PathBuf}, }; @@ -44,16 +43,30 @@ pub async fn remote_exists(url: Url, method: Method) -> Result, const N: usize>( +/// Download a file from the provided URL and extract it to the provided path. +pub async fn download_and_extract>( url: Url, fmt: PkgFmt, path: P, - desired_outputs: Option<[Cow<'static, Path>; N]>, +) -> Result<(), BinstallError> { + download_and_extract_with_filter:: bool, _>(url, fmt, path.as_ref(), None).await +} + +/// Download a file from the provided URL and extract part of it to +/// the provided path. +/// +/// * `filter` - If Some, then it will pass the path of the file to it +/// and only extract ones which filter returns `true`. +/// Note that this is a best-effort and it only works when `fmt` +/// is not `PkgFmt::Bin` or `PkgFmt::Zip`. +pub async fn download_and_extract_with_filter< + Filter: FnMut(&Path) -> bool + Send + 'static, + P: AsRef, +>( + url: Url, + fmt: PkgFmt, + path: P, + filter: Option, ) -> Result<(), BinstallError> { debug!("Downloading from: '{url}'"); @@ -69,7 +82,7 @@ pub async fn download_and_extract, const N: usize>( let path = path.as_ref(); debug!("Downloading to file: '{}'", path.display()); - extract_archive_stream(resp.bytes_stream(), path, fmt, desired_outputs).await?; + extract_archive_stream(resp.bytes_stream(), path, fmt, filter).await?; debug!("Download OK, written to file: '{}'", path.display()); diff --git a/src/helpers/async_extracter.rs b/src/helpers/async_extracter.rs index a6a139b9..7f858a1b 100644 --- a/src/helpers/async_extracter.rs +++ b/src/helpers/async_extracter.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::fs; use std::io::{self, Seek, Write}; use std::path::Path; @@ -32,12 +31,14 @@ struct AsyncExtracterInner { } impl AsyncExtracterInner { - /// * `desired_outputs - If Some(_), then it will filter the tar - /// and only extract files specified in it. - fn new( + /// * `filter` - If Some, then it will pass the path of the file to it + /// and only extract ones which filter returns `true`. + /// Note that this is a best-effort and it only works when `fmt` + /// is not `PkgFmt::Bin` or `PkgFmt::Zip`. + fn new bool + Send + 'static>( path: &Path, fmt: PkgFmt, - desired_outputs: Option<[Cow<'static, Path>; N]>, + filter: Option, ) -> Self { let path = path.to_owned(); let (tx, mut rx) = mpsc::channel::(100); @@ -71,12 +72,9 @@ impl AsyncExtracterInner { unzip(file, &path)?; } - _ => extract_compressed_from_readable( - ReadableRx::new(&mut rx), - fmt, - &path, - desired_outputs.as_ref().map(|arr| &arr[..]), - )?, + _ => { + extract_compressed_from_readable(ReadableRx::new(&mut rx), fmt, &path, filter)? + } } Ok(()) @@ -177,16 +175,16 @@ impl AsyncExtracter { /// for the bin. /// Otherwise, it is the directory where the extracted content will be put. /// * `fmt` - The format of the archive to feed in. - /// * `desired_outputs - If Some(_), then it will filter the tar and - /// only extract files specified in it. + /// * `filter` - If Some, then it will pass the path of the file to it + /// and only extract ones which filter returns `true`. /// Note that this is a best-effort and it only works when `fmt` /// is not `PkgFmt::Bin` or `PkgFmt::Zip`. - fn new( + fn new bool + Send + 'static>( path: &Path, fmt: PkgFmt, - desired_outputs: Option<[Cow<'static, Path>; N]>, + filter: Option, ) -> Self { - let inner = AsyncExtracterInner::new(path, fmt, desired_outputs); + let inner = AsyncExtracterInner::new(path, fmt, filter); Self(guard(inner, AsyncExtracterInner::abort)) } @@ -205,20 +203,20 @@ impl AsyncExtracter { /// for the bin. /// Otherwise, it is the directory where the extracted content will be put. /// * `fmt` - The format of the archive to feed in. -/// * `desired_outputs - If Some(_), then it will filter the tar and -/// only extract files specified in it. +/// * `filter` - If Some, then it will pass the path of the file to it +/// and only extract ones which filter returns `true`. /// Note that this is a best-effort and it only works when `fmt` /// is not `PkgFmt::Bin` or `PkgFmt::Zip`. -pub async fn extract_archive_stream( +pub async fn extract_archive_stream bool + Send + 'static, E>( mut stream: impl Stream> + Unpin, output: &Path, fmt: PkgFmt, - desired_outputs: Option<[Cow<'static, Path>; N]>, + filter: Option, ) -> Result<(), BinstallError> where BinstallError: From, { - let mut extracter = AsyncExtracter::new(output, fmt, desired_outputs); + let mut extracter = AsyncExtracter::new(output, fmt, filter); while let Some(res) = stream.next().await { extracter.feed(res?).await?; diff --git a/src/helpers/extracter.rs b/src/helpers/extracter.rs index 41797210..fbb9d5c0 100644 --- a/src/helpers/extracter.rs +++ b/src/helpers/extracter.rs @@ -1,5 +1,4 @@ -use std::borrow::Cow; -use std::fs::File; +use std::fs::{self, File}; use std::io::Read; use std::path::Path; @@ -12,27 +11,36 @@ use zstd::stream::Decoder as ZstdDecoder; use crate::{BinstallError, PkgFmt}; -/// * `desired_outputs - If Some(_), then it will filter the tar -/// and only extract files specified in it. -fn untar( +/// * `filter` - If Some, then it will pass the path of the file to it +/// and only extract ones which filter returns `true`. +/// Note that this is a best-effort and it only works when `fmt` +/// is not `PkgFmt::Bin` or `PkgFmt::Zip`. +fn untar bool>( dat: impl Read, path: &Path, - desired_outputs: Option<&[Cow<'_, Path>]>, + filter: Option, ) -> Result<(), BinstallError> { let mut tar = Archive::new(dat); - if let Some(desired_outputs) = desired_outputs { + if let Some(mut filter) = filter { + debug!("Untaring with filter"); + for res in tar.entries()? { let mut entry = res?; let entry_path = entry.path()?; - if desired_outputs.contains(&entry_path) { + if filter(&entry_path) { + debug!("Extracting {entry_path:#?}"); + let dst = path.join(entry_path); + fs::create_dir_all(dst.parent().unwrap())?; + entry.unpack(dst)?; } } } else { + debug!("Untaring entire tar"); tar.unpack(path)?; } @@ -42,34 +50,36 @@ fn untar( /// Extract files from the specified source onto the specified path. /// /// * `fmt` - must not be `PkgFmt::Bin` or `PkgFmt::Zip`. -/// * `desired_outputs - If Some(_), then it will filter the tar -/// and only extract files specified in it. -pub(crate) fn extract_compressed_from_readable( +/// * `filter` - If Some, then it will pass the path of the file to it +/// and only extract ones which filter returns `true`. +/// Note that this is a best-effort and it only works when `fmt` +/// is not `PkgFmt::Bin` or `PkgFmt::Zip`. +pub(crate) fn extract_compressed_from_readable bool>( dat: impl Read, fmt: PkgFmt, path: &Path, - desired_outputs: Option<&[Cow<'_, Path>]>, + filter: Option, ) -> Result<(), BinstallError> { match fmt { PkgFmt::Tar => { // Extract to install dir debug!("Extracting from tar archive to `{path:?}`"); - untar(dat, path, desired_outputs)? + untar(dat, path, filter)? } PkgFmt::Tgz => { // Extract to install dir debug!("Decompressing from tgz archive to `{path:?}`"); let tar = GzDecoder::new(dat); - untar(tar, path, desired_outputs)?; + untar(tar, path, filter)?; } PkgFmt::Txz => { // Extract to install dir debug!("Decompressing from txz archive to `{path:?}`"); let tar = XzDecoder::new(dat); - untar(tar, path, desired_outputs)?; + untar(tar, path, filter)?; } PkgFmt::Tzstd => { // Extract to install dir @@ -80,7 +90,7 @@ pub(crate) fn extract_compressed_from_readable( // as &[] by ZstdDecoder::new, thus ZstdDecoder::new // should not return any error. let tar = ZstdDecoder::new(dat)?; - untar(tar, path, desired_outputs)?; + untar(tar, path, filter)?; } PkgFmt::Zip => panic!("Unexpected PkgFmt::Zip!"), PkgFmt::Bin => panic!("Unexpected PkgFmt::Bin!"),