use crate::{utils, versions::patches::*};
use displaydoc::Display;
use flate2::read::GzDecoder;
use fslock;
use hex::ToHex;
use reqwest;
use sha2::{Digest, Sha256};
use tar;
use thiserror::Error;
use tokio::{
fs,
io::{self, AsyncReadExt, AsyncWriteExt},
task,
};
use std::{
env,
path::{Path, PathBuf},
};
#[derive(Debug, Display, Error)]
pub enum SummoningError {
Http(#[from] reqwest::Error),
Io(#[from] io::Error),
Checksum(String, String, String),
UnknownError(String),
}
#[derive(Clone, Debug)]
pub struct CacheDir {
location: PathBuf,
}
impl CacheDir {
pub async fn get_or_create() -> Result<Self, SummoningError> {
let path = PathBuf::from(env::var("HOME").expect("$HOME should always be defined!"))
.join(".spack")
.join("summonings");
let p = path.clone();
task::spawn_blocking(move || utils::safe_create_dir_all_ioerror(&p))
.await
.unwrap()?;
Ok(Self { location: path })
}
pub fn location(&self) -> &Path { &self.location }
pub fn dirname(&self) -> String { PATCHES_SHA256SUM.encode_hex() }
pub fn unpacking_path(&self) -> PathBuf { self.location.join(PATCHES_TOPLEVEL_COMPONENT) }
pub fn tarball_path(&self) -> PathBuf { self.location.join(format!("{}.tar.gz", self.dirname())) }
pub fn repo_root(&self) -> PathBuf { self.unpacking_path().join(PATCHES_TOPLEVEL_COMPONENT) }
pub fn spack_script(&self) -> PathBuf { self.repo_root().join("bin").join("spack") }
}
struct SpackTarball {
downloaded_location: PathBuf,
}
impl SpackTarball {
pub fn downloaded_path(&self) -> &Path { self.downloaded_location.as_ref() }
async fn check_tarball_digest(
tgz_path: &Path,
tgz: &mut fs::File,
) -> Result<Self, SummoningError> {
let mut tarball_bytes: Vec<u8> = vec![];
tgz.read_to_end(&mut tarball_bytes).await?;
let mut hasher = Sha256::new();
hasher.update(&tarball_bytes);
let checksum: [u8; 32] = hasher.finalize().into();
if checksum == PATCHES_SHA256SUM {
Ok(Self {
downloaded_location: tgz_path.to_path_buf(),
})
} else {
Err(SummoningError::Checksum(
format!("file://{}", tgz_path.display()),
PATCHES_SHA256SUM.encode_hex(),
checksum.encode_hex(),
))
}
}
pub async fn fetch_spack_tarball(cache_dir: CacheDir) -> Result<Self, SummoningError> {
let tgz_path = cache_dir.tarball_path();
match fs::File::open(&tgz_path).await {
Ok(mut tgz) => Self::check_tarball_digest(&tgz_path, &mut tgz).await,
Err(e) if e.kind() == io::ErrorKind::NotFound => {
let lockfile_name: PathBuf = format!("{}.tgz.lock", cache_dir.dirname()).into();
let lockfile_path = cache_dir.location().join(lockfile_name);
let mut lockfile = task::spawn_blocking(move || fslock::LockFile::open(&lockfile_path))
.await
.unwrap()?;
let _lockfile = task::spawn_blocking(move || {
lockfile.lock_with_pid()?;
Ok::<_, io::Error>(lockfile)
})
.await
.unwrap()?;
if let Ok(mut tgz) = fs::File::open(&tgz_path).await {
return Self::check_tarball_digest(&tgz_path, &mut tgz).await;
}
eprintln!(
"downloading spack {} from {}...",
PATCHES_TOPLEVEL_COMPONENT, PATCHES_SPACK_URL,
);
let resp = reqwest::get(PATCHES_SPACK_URL).await?;
let tarball_bytes = resp.bytes().await?;
let mut hasher = Sha256::new();
hasher.update(&tarball_bytes);
let checksum: [u8; 32] = hasher.finalize().into();
if checksum == PATCHES_SHA256SUM {
let mut tgz = fs::File::create(&tgz_path).await?;
tgz.write_all(&tarball_bytes).await?;
tgz.sync_all().await?;
Ok(Self {
downloaded_location: tgz_path.to_path_buf(),
})
} else {
Err(SummoningError::Checksum(
PATCHES_SPACK_URL.to_string(),
PATCHES_SHA256SUM.encode_hex(),
checksum.encode_hex(),
))
}
},
Err(e) => Err(e.into()),
}
}
}
#[derive(Debug, Clone)]
pub struct SpackRepo {
pub script_path: PathBuf,
pub repo_path: PathBuf,
cache_dir: CacheDir,
}
impl SpackRepo {
pub(crate) fn cache_location(&self) -> &Path { self.cache_dir.location() }
pub(crate) fn unzip_archive(from: &Path, into: &Path) -> Result<Option<()>, SummoningError> {
match std::fs::File::open(from) {
Ok(tgz) => {
let gz_decoded = GzDecoder::new(tgz);
let mut archive = tar::Archive::new(gz_decoded);
Ok(Some(archive.unpack(into)?))
},
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(None),
Err(e) => Err(e.into()),
}
}
async fn unzip_spack_archive(cache_dir: CacheDir) -> Result<Option<()>, SummoningError> {
let from = cache_dir.tarball_path();
let into = cache_dir.unpacking_path();
task::spawn_blocking(move || Self::unzip_archive(&from, &into))
.await
.unwrap()
}
pub(crate) async fn get_spack_script(cache_dir: CacheDir) -> Result<Self, SummoningError> {
let path = cache_dir.spack_script();
let _ = fs::File::open(&path).await?;
Ok(Self {
script_path: path,
repo_path: cache_dir.repo_root(),
cache_dir,
})
}
async fn ensure_unpacked(
current_link_path: PathBuf,
cache_dir: &CacheDir,
) -> Result<(), SummoningError> {
match fs::read_dir(¤t_link_path).await {
Ok(_) => Ok(()),
Err(e) if e.kind() == io::ErrorKind::NotFound => {
let lockfile_name: PathBuf = format!("{}.lock", cache_dir.dirname()).into();
let lockfile_path = cache_dir.location().join(lockfile_name);
let mut lockfile = task::spawn_blocking(move || fslock::LockFile::open(&lockfile_path))
.await
.unwrap()?;
let _lockfile = task::spawn_blocking(move || {
lockfile.lock_with_pid()?;
Ok::<_, io::Error>(lockfile)
})
.await
.unwrap()?;
match fs::read_dir(¤t_link_path).await {
Ok(_) => Ok::<_, SummoningError>(()),
Err(e) if e.kind() == io::ErrorKind::NotFound => {
eprintln!("extracting spack {}...", PATCHES_TOPLEVEL_COMPONENT,);
assert!(Self::unzip_spack_archive(cache_dir.clone())
.await?
.is_some());
Ok(())
},
Err(e) => Err(e.into()),
}
},
Err(e) => Err(e.into()),
}
}
pub async fn summon(cache_dir: CacheDir) -> Result<Self, SummoningError> {
let spack_tarball = SpackTarball::fetch_spack_tarball(cache_dir.clone()).await?;
dbg!(spack_tarball.downloaded_path());
let current_link_path = cache_dir.unpacking_path();
Self::ensure_unpacked(current_link_path, &cache_dir).await?;
Self::get_spack_script(cache_dir).await
}
}