Use download_and_extract in fetchers

to improve efficiency by avoiding disk io (except for `PkgFmt::Zip`
and `PkgFmt::Bin`) and run the compresser in parallel to the downloader.

Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com>
This commit is contained in:
Jiahao XU 2022-06-09 14:46:00 +10:00
parent b6bfd40c3a
commit c9b0d45a24
No known key found for this signature in database
GPG key ID: 591C0B03040416D6
4 changed files with 19 additions and 24 deletions
src/fetchers

View file

@ -7,7 +7,7 @@ use serde::Serialize;
use url::Url;
use super::Data;
use crate::{download, remote_exists, BinstallError, PkgFmt, Template};
use crate::{download_and_extract, remote_exists, BinstallError, PkgFmt, Template};
pub struct GhCrateMeta {
data: Data,
@ -40,10 +40,10 @@ impl super::Fetcher for GhCrateMeta {
remote_exists(url, Method::HEAD).await
}
async fn fetch(&self, dst: &Path) -> Result<(), BinstallError> {
async fn fetch_and_extract(&self, dst: &Path) -> Result<(), BinstallError> {
let url = self.url()?;
info!("Downloading package from: '{url}'");
download(url.as_str(), dst).await
download_and_extract::<_, 0>(url, self.pkg_fmt(), dst, None).await
}
fn pkg_fmt(&self) -> PkgFmt {