diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 370aea37..f7ffe3c2 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -150,7 +150,7 @@ jobs: path: dist - name: pytest (native) if: ${{ matrix.platform.native }} - uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3 + uses: maus007/docker-run-action-fork@5ddaad0f7eedd03f64e412b1931852bd3031b273 # v1 with: image: ${{ matrix.platform.target == 'x86' && 'i386/alpine:latest' || 'alpine:latest' }} options: -v ${{ github.workspace }}:/io -w /io diff --git a/Cargo.lock b/Cargo.lock index a2331472..d77825fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -601,6 +601,12 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + [[package]] name = "encoding_rs" version = "0.8.35" @@ -2021,6 +2027,15 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.17" @@ -3439,6 +3454,7 @@ dependencies = [ "serde_json", "sha2", "spdx", + "sysand-macros", "tempfile", "thiserror 2.0.18", "tokio", @@ -3488,6 +3504,16 @@ dependencies = [ "web-sys", ] +[[package]] +name = "sysand-macros" +version = "0.0.9" +dependencies = [ + "itertools", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sysand-py" version = "0.0.9" diff --git a/Cargo.toml b/Cargo.toml index 5499ecaa..c597f919 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,13 @@ [workspace] resolver = "3" -members = ["sysand", "core", "bindings/py", "bindings/js", "bindings/java"] +members = [ + "sysand", + "core", + "macros", + "bindings/py", + "bindings/js", + "bindings/java", +] [workspace.package] version = "0.0.9" diff --git a/bindings/java/src/lib.rs b/bindings/java/src/lib.rs index a893a68e..f85902e9 100644 --- a/bindings/java/src/lib.rs +++ b/bindings/java/src/lib.rs @@ -162,6 +162,7 @@ pub extern "system" fn Java_com_sensmetry_sysand_Sysand_infoPath<'local>( return JObject::default(); }; let project = LocalSrcProject { + nominal_path: None, project_path: Utf8PathBuf::from(&path), }; @@ -334,6 +335,7 @@ pub extern "system" fn Java_com_sensmetry_sysand_Sysand_buildProject<'local>( return; }; let project = LocalSrcProject { + nominal_path: None, project_path: Utf8PathBuf::from(project_path), }; let command_result = sysand_core::commands::build::do_build_kpar(&project, &output_path, true); diff --git a/bindings/py/src/lib.rs b/bindings/py/src/lib.rs index a540cbc4..aa26c975 100644 --- a/bindings/py/src/lib.rs +++ b/bindings/py/src/lib.rs @@ -33,6 +33,7 @@ use sysand_core::{ ProjectRead as _, local_kpar::LocalKParProject, local_src::{LocalSrcError, LocalSrcProject}, + utils::wrapfs, }, remove::do_remove, resolve::standard::standard_resolver, @@ -113,6 +114,7 @@ fn do_info_py_path( let _ = pyo3_log::try_init(); let project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -184,6 +186,7 @@ fn do_build_py(output_path: String, project_path: Option) -> PyResult<() return Err(pyo3::exceptions::PyNotImplementedError::new_err("TODO")); }; let project = LocalSrcProject { + nominal_path: None, project_path: current_project_path.into(), }; @@ -336,6 +339,7 @@ pub fn do_sources_project_py( let mut result = vec![]; let current_project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -400,6 +404,7 @@ fn do_add_py(path: String, iri: String, version: Option) -> PyResult<()> let _ = pyo3_log::try_init(); let mut project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -414,6 +419,7 @@ fn do_remove_py(path: String, iri: String) -> PyResult<()> { let _ = pyo3_log::try_init(); let mut project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -436,6 +442,7 @@ fn do_include_py( let _ = pyo3_log::try_init(); let mut project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -470,6 +477,7 @@ fn do_exclude_py(path: String, src_path: String) -> PyResult<()> { let _ = pyo3_log::try_init(); let mut project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; @@ -491,7 +499,9 @@ fn do_env_install_path_py(env_path: String, iri: String, location: String) -> Py environment_path: env_path.into(), }; - if location.is_file() { + let metadata = + wrapfs::metadata(&location).map_err(|e| PyErr::new::(e.to_string()))?; + if metadata.is_file() { let project = LocalKParProject::new_guess_root(&location) .map_err(|e| PyErr::new::(e.to_string()))?; @@ -509,8 +519,9 @@ fn do_env_install_path_py(env_path: String, iri: String, location: String) -> Py clone_project(&project, to, true).map(|_| ()) }) .map_err(|e| PyRuntimeError::new_err(e.to_string()))?; - } else if location.is_dir() { + } else if metadata.is_dir() { let project = LocalSrcProject { + nominal_path: None, project_path: location, }; diff --git a/core/Cargo.toml b/core/Cargo.toml index 371fa3cc..32b84663 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -46,6 +46,7 @@ serde = { version = "1.0.228", features = ["derive"] } serde_json = { version = "1.0.145", default-features = false, features = ["preserve_order"] } sha2 = { version = "0.10.9", default-features = false } spdx = "0.13.2" +sysand-macros = { path = "../macros"} thiserror = { version = "2.0.17", default-features = false } toml = "0.9.8" typed-path = { version = "0.12.0", default-features = false } diff --git a/core/src/commands/add.rs b/core/src/commands/add.rs index 74068d8a..c68b41df 100644 --- a/core/src/commands/add.rs +++ b/core/src/commands/add.rs @@ -2,7 +2,10 @@ // SPDX-License-Identifier: MIT OR Apache-2.0 use thiserror::Error; -use crate::{model::InterchangeProjectValidationError, project::ProjectMut}; +use crate::{ + model::{InterchangeProjectUsageRaw, InterchangeProjectValidationError}, + project::ProjectMut, +}; #[derive(Error, Debug)] pub enum AddError { @@ -19,19 +22,18 @@ pub fn do_add>( iri: S, versions_constraint: Option, ) -> Result<(), AddError> { - let usage: crate::model::InterchangeProjectUsageRaw = - crate::model::InterchangeProjectUsageRaw { - resource: iri.as_ref().to_owned(), - version_constraint: versions_constraint.clone(), - } - .validate()? - .into(); + let iri = iri.as_ref(); + let usage = InterchangeProjectUsageRaw { + resource: iri.to_owned(), + version_constraint: versions_constraint.clone(), + } + .validate()? + .into(); let adding = "Adding"; let header = crate::style::get_style_config().header; log::info!( - "{header}{adding:>12}{header:#} usage: `{}` {}", - iri.as_ref(), + "{header}{adding:>12}{header:#} usage: `{iri}` {}", versions_constraint .as_ref() .map(|vr| vr.to_string()) diff --git a/core/src/commands/build.rs b/core/src/commands/build.rs index 37953277..918c8c28 100644 --- a/core/src/commands/build.rs +++ b/core/src/commands/build.rs @@ -169,7 +169,8 @@ pub fn do_build_workspace_kpars>( }; for project in projects { let project = LocalSrcProject { - project_path: workspace.workspace_path.join(project.path), + nominal_path: None, + project_path: workspace.root_path().join(&project.path), }; let file_name = default_kpar_file_name(&project)?; let output_path = path.as_ref().join(file_name); diff --git a/core/src/commands/env/mod.rs b/core/src/commands/env/mod.rs index 2b3cb865..6e862584 100644 --- a/core/src/commands/env/mod.rs +++ b/core/src/commands/env/mod.rs @@ -1,15 +1,18 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use crate::env::{ - memory::{MemoryStorageEnvironment, MemoryWriteError}, - utils::ErrorBound, -}; #[cfg(feature = "filesystem")] use crate::{ env::local_directory::{ENTRIES_PATH, LocalDirectoryEnvironment, LocalWriteError}, project::utils::{ToPathBuf, wrapfs}, }; +use crate::{ + env::{ + memory::{MemoryStorageEnvironment, MemoryWriteError}, + utils::ErrorBound, + }, + project::memory::InMemoryProject, +}; #[cfg(feature = "filesystem")] use camino::Utf8Path; @@ -33,7 +36,8 @@ pub enum EnvError { Write(#[from] WriteError), } -pub fn do_env_memory() -> Result> { +pub fn do_env_memory() +-> Result, EnvError> { Ok(MemoryStorageEnvironment::default()) } diff --git a/core/src/commands/init.rs b/core/src/commands/init.rs index 770cecb3..d3fd01ee 100644 --- a/core/src/commands/init.rs +++ b/core/src/commands/init.rs @@ -13,7 +13,7 @@ use crate::{ }; #[cfg(feature = "filesystem")] -use crate::project::local_src::LocalSrcProject; +use crate::project::local_src::{LocalSrcError, LocalSrcProject}; use thiserror::Error; @@ -27,14 +27,14 @@ pub enum InitError { SPDXLicenseParse(Box, spdx::error::ParseError), } -pub fn do_init_ext( +pub fn do_init_ext( name: String, version: String, no_semver: bool, license: Option, no_spdx: bool, - storage: &mut S, -) -> Result<(), InitError> { + storage: &mut P, +) -> Result<(), InitError> { if !no_semver { Version::parse(&version).map_err(|e| InitError::SemVerParse(version.as_str().into(), e))?; } @@ -54,9 +54,9 @@ pub fn do_init_ext( storage.put_project( &InterchangeProjectInfoRaw { - name, + name: name.to_owned(), description: None, - version, + version: version.to_owned(), license, maintainer: vec![], topic: vec![], @@ -78,23 +78,28 @@ pub fn do_init_ext( Ok(()) } -pub fn do_init( +pub fn do_init( name: String, version: String, license: Option, - storage: &mut S, -) -> Result<(), InitError> { + storage: &mut P, +) -> Result<(), InitError> { do_init_ext(name, version, false, license, false, storage) } -pub fn do_init_memory( - name: String, - version: String, +pub fn do_init_memory, V: AsRef>( + name: N, + version: V, license: Option, ) -> Result> { let mut storage = InMemoryProject::default(); - do_init(name, version, license, &mut storage)?; + do_init( + name.as_ref().to_owned(), + version.as_ref().to_owned(), + license, + &mut storage, + )?; Ok(storage) } @@ -105,8 +110,11 @@ pub fn do_init_local_file( version: String, license: Option, path: Utf8PathBuf, -) -> Result> { - let mut storage = LocalSrcProject { project_path: path }; +) -> Result> { + let mut storage = LocalSrcProject { + nominal_path: None, + project_path: path, + }; do_init(name, version, license, &mut storage)?; diff --git a/core/src/commands/lock.rs b/core/src/commands/lock.rs index 93e927ff..a8731d82 100644 --- a/core/src/commands/lock.rs +++ b/core/src/commands/lock.rs @@ -14,7 +14,7 @@ use crate::project::{editable::EditableProject, local_src::LocalSrcProject, util use crate::{ lock::{Lock, Project, Usage}, model::{InterchangeProjectUsage, InterchangeProjectValidationError}, - project::{CanonicalisationError, ProjectRead, utils::FsIoError}, + project::{CanonicalizationError, ProjectRead, utils::FsIoError}, resolve::ResolveRead, solve::pubgrub::{SolverError, solve}, }; @@ -24,7 +24,7 @@ pub enum LockProjectError), + InputProjectCanonicalizationError(CanonicalizationError), #[error(transparent)] LockError(#[from] LockError), } @@ -34,7 +34,7 @@ pub enum LockError { #[error(transparent)] DependencyProject(PD::Error), #[error(transparent)] - DependencyProjectCanonicalisation(CanonicalisationError), + DependencyProjectCanonicalization(CanonicalizationError), #[error(transparent)] Io(#[from] Box), #[error("incomplete project{0}")] @@ -86,16 +86,19 @@ pub fn do_lock_projects< let canonical_hash = project .checksum_canonical_hex() - .map_err(LockProjectError::InputProjectCanonicalisationError)? + .map_err(LockProjectError::InputProjectCanonicalizationError)? .ok_or_else(|| LockError::IncompleteInputProject(format!("\n{:?}", project)))?; + let sources = project.sources(); + debug_assert!(!sources.is_empty()); + lock.projects.push(Project { name: Some(info.name), version: info.version, - exports: meta.index.keys().cloned().collect(), + exports: meta.index.into_keys().collect(), identifiers: vec![], checksum: canonical_hash, - sources: project.sources(), + sources, usages: info.usage.iter().cloned().map(Usage::from).collect(), }); @@ -131,17 +134,20 @@ pub fn do_lock_extend< for (iri, (info, meta, project)) in solution { let canonical_hash = project .checksum_canonical_hex() - .map_err(LockError::DependencyProjectCanonicalisation)? + .map_err(LockError::DependencyProjectCanonicalization)? .ok_or_else(|| LockError::IncompleteInputProject(format!("\n{:?}", project)))?; + let sources = project.sources(); + debug_assert!(!sources.is_empty()); + lock.projects.push(Project { name: Some(info.name), version: info.version.to_string(), - exports: meta.index.keys().cloned().collect(), + exports: meta.index.into_keys().collect(), identifiers: vec![iri.to_string()], checksum: canonical_hash, - sources: project.sources(), - usages: info.usage.iter().cloned().map(Usage::from).collect(), + sources, + usages: info.usage.into_iter().map(Usage::from).collect(), }); dependencies.push((iri, project)); @@ -157,17 +163,28 @@ pub type EditableLocalSrcProject = EditableProject; #[cfg(feature = "filesystem")] pub fn do_lock_local_editable< P: AsRef, + PR: AsRef, PD: ProjectRead + Debug, R: ResolveRead + Debug, >( path: P, + project_root: PR, resolver: R, ) -> Result, LockProjectError> { let project = EditableProject::new( // TODO: this is incorrect if project is in a subdir of workspace ".".into(), LocalSrcProject { - project_path: path.to_path_buf(), + nominal_path: Some(path.to_path_buf()), + project_path: project_root + .as_ref() + .join(&path) + .canonicalize_utf8() + .map_err(|e| { + LockError::Io( + FsIoError::Canonicalize(project_root.as_ref().join(path), e).into(), + ) + })?, }, ); diff --git a/core/src/commands/root.rs b/core/src/commands/root.rs index cf69d8ef..be587711 100644 --- a/core/src/commands/root.rs +++ b/core/src/commands/root.rs @@ -3,8 +3,9 @@ use camino::{Utf8Path, Utf8PathBuf}; -use crate::discover::discover_project; +use crate::{discover::discover_project, project::utils::FsIoError}; -pub fn do_root>(path: P) -> Option { - discover_project(path).map(|e| e.root_path()) +pub fn do_root>(path: P) -> Result, Box> { + let root = discover_project(path)?.map(|e| e.root_path().to_owned()); + Ok(root) } diff --git a/core/src/commands/sync.rs b/core/src/commands/sync.rs index 7202c6de..809cbe1a 100644 --- a/core/src/commands/sync.rs +++ b/core/src/commands/sync.rs @@ -14,7 +14,7 @@ use crate::{ }; #[derive(Error, Debug)] -pub enum SyncError { +pub enum SyncError { #[error("incorrect checksum for project with IRI `{0}` in lockfile")] BadChecksum(String), #[error("project with IRI `{0}` is missing `.project.json` or `.meta.json`")] @@ -29,6 +29,8 @@ pub enum SyncError { MissingIriLocalKparPath(Box), #[error("no IRI given for project with remote_kpar = `{0}` in lockfile")] MissingIriRemoteKparPath(Box), + #[error("no IRI given for project with remote_git = `{0}` in lockfile")] + MissingIriRemoteGitPath(Box), #[error( "cannot handle project with IRI `{0}` residing in local file (type `local_src`) storage" )] @@ -43,6 +45,12 @@ pub enum SyncError { "cannot handle project with IRI `{0}` residing in remote kpar (type `remote_kpar`) storage" )] MissingRemoteKparStorage(Box), + #[error( + "cannot handle project with IRI `{0}` residing in remote git repo (type `remote_git`) storage" + )] + MissingRemoteGitStorage(Box), + #[error("failed to download git project from {0}: {1}")] + GitDownload(Box, GitError), #[error("invalid remote source URL `{0}`:\n{1}")] InvalidRemoteSource(Box, UrlParseError), #[error("no supported sources for project with IRI `{0}`")] @@ -61,6 +69,8 @@ pub enum SyncError { ProjectRead(String), } +// TODO: Use AnyProject::try_from_source to avoid having so many arguments +#[allow(clippy::too_many_arguments)] pub fn do_sync< Environment, CreateSrcPathStorage, @@ -72,6 +82,9 @@ pub fn do_sync< CreateRemoteKParStorage, RemoteKParStorage, UrlParseError: ErrorBound, + CreateRemoteGitStorage, + RemoteGitStorage, + GitError: ErrorBound, >( lockfile: &Lock, env: &mut Environment, @@ -79,8 +92,9 @@ pub fn do_sync< remote_src_storage: Option, kpar_path_storage: Option, remote_kpar_storage: Option, + remote_git_storage: Option, provided_iris: &HashMap>, -) -> Result<(), SyncError> +) -> Result<(), SyncError> where Environment: ReadEnvironment + WriteEnvironment, CreateSrcPathStorage: Fn(&Utf8Path) -> SrcPathStorage, @@ -91,6 +105,8 @@ where KParPathStorage: ProjectRead, CreateRemoteKParStorage: Fn(String) -> Result, RemoteKParStorage: ProjectRead, + CreateRemoteGitStorage: Fn(String) -> Result, + RemoteGitStorage: ProjectRead, { let syncing = "Syncing"; let header = crate::style::get_style_config().header; @@ -117,15 +133,14 @@ where project_version.checksum_canonical_hex().ok().flatten() { if checksum == &provided_checksum { - log::debug!("`{}` is marked as provided, skipping installation", iri); + log::debug!("`{iri}` is marked as provided, skipping installation"); continue 'main_loop; } provided.push(provided_checksum); } else { log::debug!( - "failed to get checksum for provided project: {:?}", - project_version + "failed to get checksum for provided project: {project_version:?}" ); } } @@ -146,7 +161,7 @@ where for uri in &project.identifiers { if is_installed(uri, &project.checksum, env)? { - log::debug!("{} found in sysand_env", &uri); + log::debug!("`{uri}` found in sysand_env"); continue 'main_loop; } } @@ -209,6 +224,18 @@ where log::debug!("trying to install `{uri}` from remote_kpar: {remote_kpar}"); try_install(uri, &project.checksum, storage, env)?; } + Source::RemoteGit { remote_git } => { + let uri = main_uri.as_ref().ok_or_else(|| { + SyncError::MissingIriRemoteGitPath(remote_git.as_str().into()) + })?; + let remote_git_storage = remote_git_storage.as_ref().ok_or_else(|| { + SyncError::MissingRemoteGitStorage(remote_git.as_str().into()) + })?; + let storage = remote_git_storage(remote_git.clone()) + .map_err(|e| SyncError::GitDownload(remote_git.as_str().into(), e))?; + log::debug!("trying to install `{uri}` from remote_git: {remote_git}"); + try_install(uri, &project.checksum, storage, env)?; + } _ => supported = false, } if supported { @@ -228,11 +255,11 @@ where Ok(()) } -fn is_installed, P: AsRef>( +fn is_installed, P: AsRef>( uri: S, checksum: P, env: &E, -) -> Result> { +) -> Result> { if !env .has(&uri) .map_err(|e| SyncError::ProjectRead(e.to_string()))? @@ -247,7 +274,7 @@ fn is_installed, P: AsRef> let project_checksum = env .get_project(&uri, version) .map_err(|e| SyncError::ProjectRead(e.to_string()))? - .checksum_noncanonical_hex() + .checksum_non_canonical_hex() .map_err(|e| SyncError::ProjectRead(e.to_string()))? .ok_or_else(|| SyncError::BadProject(uri.as_ref().to_owned()))?; if checksum.as_ref() == project_checksum { @@ -261,6 +288,7 @@ fn try_install< E: ReadEnvironment + WriteEnvironment, P: ProjectRead, U: ErrorBound, + G: ErrorBound, S1: AsRef, S2: AsRef, >( @@ -268,24 +296,26 @@ fn try_install< checksum: S2, storage: P, env: &mut E, -) -> Result<(), SyncError> { +) -> Result<(), SyncError> { + let uri = uri.as_ref(); + let checksum = checksum.as_ref(); let project_checksum = storage .checksum_canonical_hex() .map_err(|e| SyncError::ProjectRead(e.to_string()))? - .ok_or_else(|| SyncError::BadProject(uri.as_ref().to_owned()))?; - if checksum.as_ref() == project_checksum { + .ok_or_else(|| SyncError::BadProject(uri.to_owned()))?; + if checksum == project_checksum { // TODO: Need to decide how to handle existing installations and possible flags to modify behavior - do_env_install_project(&uri, &storage, env, true, true).map_err(|e| { + do_env_install_project(uri, &storage, env, true, true).map_err(|e| { SyncError::InstallFail { - uri: uri.as_ref().into(), + uri: uri.into(), cause: e.to_string(), } })?; } else { - log::debug!("incorrect checksum for `{}` in lockfile", uri.as_ref()); - log::debug!("lockfile checksum = `{}`", checksum.as_ref()); - log::debug!("project checksum = `{}`", project_checksum); - return Err(SyncError::BadChecksum(uri.as_ref().into())); + log::debug!("incorrect checksum for `{uri}` in lockfile"); + log::debug!("lockfile checksum = `{checksum}`"); + log::debug!("project checksum = `{project_checksum}`"); + return Err(SyncError::BadChecksum(uri.into())); } Ok(()) } @@ -347,8 +377,10 @@ mod tests { let env = MemoryStorageEnvironment::new(); assert!( - !is_installed::(uri, checksum, &env) - .unwrap() + !is_installed::, Infallible, Infallible, _, _>( + uri, checksum, &env + ) + .unwrap() ); } @@ -357,7 +389,7 @@ mod tests { let storage = storage_example(); let uri = "urn:kpar:install_test"; - let checksum = storage.checksum_noncanonical_hex().unwrap().unwrap(); + let checksum = storage.checksum_non_canonical_hex().unwrap().unwrap(); let mut env = MemoryStorageEnvironment::new(); env.put_project(uri, "1,2,3", |p| { clone_project(&storage, p, true).map(|_| ()) @@ -365,17 +397,24 @@ mod tests { .unwrap(); assert!( - is_installed::(uri, &checksum, &env) - .unwrap() + is_installed::, Infallible, Infallible, _, _>( + uri, &checksum, &env + ) + .unwrap() ); assert!( - !is_installed::(uri, "00", &env).unwrap() + !is_installed::, Infallible, Infallible, _, _>( + uri, "00", &env + ) + .unwrap() ); assert!( - !is_installed::("not_uri", &checksum, &env) - .unwrap() + !is_installed::, Infallible, Infallible, _, _>( + "not_uri", &checksum, &env + ) + .unwrap() ); } @@ -384,12 +423,17 @@ mod tests { let storage = storage_example(); let uri = "urn:kpar:install_test"; - let checksum = storage.checksum_noncanonical_hex().unwrap().unwrap(); + let checksum = storage.checksum_non_canonical_hex().unwrap().unwrap(); let mut env = MemoryStorageEnvironment::new(); - try_install::( - uri, &checksum, storage, &mut env, - ) + try_install::< + MemoryStorageEnvironment, + InMemoryProject, + Infallible, + Infallible, + _, + _, + >(uri, &checksum, storage, &mut env) .unwrap(); let uris = env.uris().unwrap(); @@ -411,12 +455,15 @@ mod tests { let checksum = "00"; let mut env = MemoryStorageEnvironment::new(); - let SyncError::BadChecksum(msg) = - try_install::( - &uri, &checksum, storage, &mut env, - ) - .unwrap_err() - else { + let SyncError::BadChecksum(msg) = try_install::< + MemoryStorageEnvironment, + InMemoryProject, + Infallible, + Infallible, + _, + _, + >(&uri, &checksum, storage, &mut env) + .unwrap_err() else { panic!() }; diff --git a/core/src/config/local_fs.rs b/core/src/config/local_fs.rs index 42f12c59..446bf98c 100644 --- a/core/src/config/local_fs.rs +++ b/core/src/config/local_fs.rs @@ -1,13 +1,17 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use std::{fs, path::Path}; +use std::{fs, io::ErrorKind, str::FromStr}; -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use thiserror::Error; +use toml_edit::{ArrayOfTables, DocumentMut, Item, Table, Value}; use super::Config; -use crate::project::utils::FsIoError; +use crate::{ + lock::{Source, multiline_array}, + project::utils::{FsIoError, wrapfs}, +}; pub const CONFIG_DIR: &str = "sysand"; pub const CONFIG_FILE: &str = "sysand.toml"; @@ -26,19 +30,14 @@ impl From for ConfigReadError { } } -pub fn get_config>(path: P) -> Result { - if path.as_ref().is_file() { +pub fn get_config>(path: P) -> Result { + if wrapfs::is_file(path.as_ref())? { let contents = { - fs::read_to_string(path.as_ref()).map_err(|e| { - Box::new(FsIoError::ReadFile( - path.as_ref().to_string_lossy().into_owned().into(), - e, - )) - }) + fs::read_to_string(path.as_ref()) + .map_err(|e| Box::new(FsIoError::ReadFile(path.as_ref().to_owned(), e))) }?; - Ok(toml::from_str(&contents).map_err(|e| { - ConfigReadError::Toml(Utf8Path::new(&path.as_ref().to_string_lossy()).into(), e) - })?) + Ok(toml::from_str(&contents) + .map_err(|e| ConfigReadError::Toml(path.as_ref().to_owned().into(), e))?) } else { Ok(Config::default()) } @@ -50,7 +49,7 @@ pub fn load_configs>(working_dir: P) -> Result>(working_dir: P) -> Result), + #[error("`{0}` is not a file")] + NotAFile(String), + #[error("failed to parse configuration file at `{0}`:\n{1}")] + TomlEdit(Utf8PathBuf, toml_edit::TomlError), + #[error("{0}")] + InvalidProjects(String), +} + +pub fn add_project_source_to_config, S: AsRef>( + config_path: P, + iri: S, + source: &Source, +) -> Result<(), ConfigProjectSourceError> { + let config_path = config_path.as_ref(); + let sources = multiline_array(std::iter::once(source.to_toml())); + let contents = match wrapfs::metadata(config_path) { + Ok(metadata) if metadata.is_file() => wrapfs::read_to_string(config_path)?, + Ok(_) => { + return Err(ConfigProjectSourceError::NotAFile(config_path.to_string())); + } + Err(err) if matches!(err.as_ref(), FsIoError::Metadata(_, e) if e.kind() == ErrorKind::NotFound) => + { + let creating = "Creating"; + let header = crate::style::get_style_config().header; + log::info!( + "{header}{creating:>12}{header:#} configuration file at `{}`", + config_path, + ); + String::new() + } + Err(err) => return Err(ConfigProjectSourceError::Io(err)), + }; + let mut config = DocumentMut::from_str(&contents) + .map_err(|err| ConfigProjectSourceError::TomlEdit(config_path.to_owned(), err))?; + let projects = config + .as_table_mut() + .entry("project") + .or_insert(Item::ArrayOfTables(ArrayOfTables::new())) + .as_array_of_tables_mut() + .ok_or(ConfigProjectSourceError::InvalidProjects( + "`project` should always be an array of tables".to_string(), + ))?; + + if let Some(project) = projects.iter_mut().find(|table| { + table + .get("identifiers") + .and_then(|n| n.as_array()) + .is_some_and(|arr| { + arr.iter() + .any(|identifier| identifier.as_str() == Some(iri.as_ref())) + }) + }) { + project["sources"] = toml_edit::value(sources); + } else { + let mut project = Table::new(); + project["identifiers"] = + toml_edit::value(multiline_array(std::iter::once(Value::from(iri.as_ref())))); + project["sources"] = toml_edit::value(sources); + + projects.push(project); + } + + let adding = "Adding"; + let header = crate::style::get_style_config().header; + log::info!( + "{header}{adding:>12}{header:#} source for `{}` to configuration file at `{}`", + iri.as_ref(), + config_path, + ); + + wrapfs::write(config_path, config.to_string())?; + + Ok(()) +} + +pub fn remove_project_source_from_config, S: AsRef>( + config_path: P, + iri: S, +) -> Result { + let config_path = config_path.as_ref(); + let contents = match wrapfs::metadata(config_path) { + Ok(metadata) if metadata.is_file() => wrapfs::read_to_string(config_path)?, + Ok(_) => { + return Err(ConfigProjectSourceError::NotAFile(config_path.to_string())); + } + Err(err) if matches!(err.as_ref(), FsIoError::Metadata(_, e) if e.kind() == ErrorKind::NotFound) => + { + return Ok(false); + } + Err(err) => return Err(ConfigProjectSourceError::Io(err)), + }; + let mut config = DocumentMut::from_str(&contents) + .map_err(|err| ConfigProjectSourceError::TomlEdit(config_path.to_owned(), err))?; + let Some(projects) = config + .as_table_mut() + .get_mut("project") + .and_then(Item::as_array_of_tables_mut) + else { + return Ok(false); + }; + + let remove_index = projects.iter().position(|project| { + project + .get("identifiers") + .and_then(|n| n.as_array()) + .is_some_and(|arr| { + arr.iter() + .any(|identifier| identifier.as_str() == Some(iri.as_ref())) + }) + }); + + if let Some(index) = remove_index { + let removing = "Removing"; + let header = crate::style::get_style_config().header; + log::info!( + "{header}{removing:>12}{header:#} source for `{}` from configuration file at `{}`", + iri.as_ref(), + config_path, + ); + + projects.remove(index); + let contents = config.to_string(); + + if contents.is_empty() { + let removing = "Removing"; + log::info!( + "{header}{removing:>12}{header:#} empty configuration file at `{}`", + config_path, + ); + wrapfs::remove_file(config_path)?; + } else { + wrapfs::write(config_path, contents)?; + } + + return Ok(true); + } + + Ok(false) +} + #[cfg(test)] mod tests { - use std::io::Write; + use std::{error::Error, io::Write}; - use crate::config::{Config, Index, local_fs}; use camino_tempfile::tempdir; + use crate::{ + config::{Config, ConfigProject, Index, local_fs}, + lock::Source, + project::utils::wrapfs, + }; + #[test] - fn load_configs() { - let dir = tempdir().unwrap(); + fn load_configs() -> Result<(), Box> { + let dir = tempdir()?; let config_path = dir.path().join(local_fs::CONFIG_FILE); - let mut config_file = std::fs::File::create(config_path).unwrap(); + let mut config_file = wrapfs::File::create(config_path)?; let config = Config { - quiet: Some(true), - verbose: Some(false), - index: Some(vec![Index { + indexes: vec![Index { url: "http://www.example.com".to_string(), ..Default::default() - }]), + }], + projects: vec![], // auth: None, }; - config_file - .write_all(toml::to_string_pretty(&config).unwrap().as_bytes()) - .unwrap(); + config_file.write_all(toml::to_string_pretty(&config)?.as_bytes())?; - let config_read = local_fs::load_configs(dir.path()).unwrap(); + let config_read = local_fs::load_configs(dir.path())?; assert_eq!(config_read, config); + + Ok(()) + } + + #[test] + fn add_project_source_to_config() -> Result<(), Box> { + let dir = tempdir()?; + let config_path = dir.path().join(local_fs::CONFIG_FILE); + let iri = "urn:kpar:test"; + let source = Source::LocalSrc { + src_path: "local/test".into(), + }; + + local_fs::add_project_source_to_config(&config_path, iri, &source)?; + + let config = Config { + indexes: vec![], + projects: vec![ConfigProject { + identifiers: vec![iri.to_string()], + sources: vec![source], + }], + }; + + assert_eq!( + config, + toml::from_str(wrapfs::read_to_string(config_path)?.as_str())?, + ); + + Ok(()) + } + + #[test] + fn remove_project_source_from_config() -> Result<(), Box> { + let dir = tempdir()?; + let config_path = dir.path().join(local_fs::CONFIG_FILE); + let mut config_file = wrapfs::File::create(&config_path)?; + let iri = "urn:kpar:test"; + let source = Source::LocalSrc { + src_path: "local/test".into(), + }; + let config = Config { + indexes: vec![], + projects: vec![ConfigProject { + identifiers: vec![iri.to_string()], + sources: vec![source], + }], + }; + config_file.write_all(toml::to_string_pretty(&config)?.as_bytes())?; + + local_fs::remove_project_source_from_config(&config_path, iri)?; + + assert!(!config_path.is_file()); + + Ok(()) } } diff --git a/core/src/config/mod.rs b/core/src/config/mod.rs index 6e0d9ae7..f3130cd6 100644 --- a/core/src/config/mod.rs +++ b/core/src/config/mod.rs @@ -4,22 +4,36 @@ use serde::{Deserialize, Serialize}; use url::Url; +use crate::lock::Source; + #[cfg(feature = "filesystem")] pub mod local_fs; #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub struct Config { - pub quiet: Option, - pub verbose: Option, - pub index: Option>, + #[serde(rename = "index", skip_serializing_if = "Vec::is_empty", default)] + pub indexes: Vec, + #[serde(rename = "project", skip_serializing_if = "Vec::is_empty", default)] + pub projects: Vec, // pub auth: Option>, } +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ConfigProject { + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub identifiers: Vec, + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub sources: Vec, +} + impl Config { pub fn merge(&mut self, config: Config) { - self.quiet = self.quiet.or(config.quiet); - self.verbose = self.verbose.or(config.verbose); - extend_option_vec(&mut self.index, config.index); + let Config { + mut indexes, + mut projects, + } = config; + self.indexes.append(&mut indexes); + self.projects.append(&mut projects); // if let Some(auth) = config.auth { // self.auth = Some(auth.clone()); @@ -44,7 +58,7 @@ impl Config { index_urls: Vec, default_urls: Vec, ) -> Result, url::ParseError> { - let mut indexes: Vec<_> = self.index.iter().flat_map(|v| v.iter()).collect(); + let mut indexes = self.indexes.clone(); indexes.sort_by_key(|i| i.default.unwrap_or(false)); @@ -53,11 +67,7 @@ impl Config { .and_then(|index| index.default) .unwrap_or(false); - let end: Vec = if has_default { - std::iter::empty::().collect() - } else { - default_urls - }; + let end = if has_default { vec![] } else { default_urls }; index_urls .iter() @@ -77,9 +87,8 @@ impl Config { .iter() .map(|url| url.as_str()) .chain( - self.index + self.indexes .iter() - .flat_map(|v| v.iter()) .filter(|i| !i.default.unwrap_or(false)) .map(|i| i.url.as_str()), ) @@ -89,12 +98,6 @@ impl Config { } } -fn extend_option_vec(target: &mut Option>, src: Option>) { - if let Some(mut src_vec) = src { - target.get_or_insert_with(Vec::new).append(&mut src_vec); - } -} - #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub struct Index { pub name: Option, @@ -113,15 +116,17 @@ pub enum AuthSource { mod tests { use url::Url; - use crate::config::{Config, Index}; + use crate::{ + config::{Config, ConfigProject, Index}, + lock::Source, + }; #[test] fn default_config() { let config = Config::default(); - assert_eq!(config.quiet, None); - assert_eq!(config.verbose, None); - assert_eq!(config.index, None); + assert_eq!(config.indexes, vec![]); + assert_eq!(config.projects, vec![]); } #[test] @@ -138,12 +143,16 @@ mod tests { fn merge() { let mut defaults = Config::default(); let config = Config { - quiet: Some(true), - verbose: Some(false), - index: Some(vec![Index { + indexes: vec![Index { url: "http://www.example.com".to_string(), ..Default::default() - }]), + }], + projects: vec![ConfigProject { + identifiers: vec!["urn:kpar:test".to_string()], + sources: vec![Source::LocalSrc { + src_path: "./path/to project".into(), + }], + }], // auth: None, }; defaults.merge(config.clone()); @@ -154,10 +163,10 @@ mod tests { #[test] fn index_urls_without_default() { let config = Config { - index: Some(vec![Index { + indexes: vec![Index { url: "http://www.index.com".to_string(), ..Default::default() - }]), + }], ..Default::default() }; let index = vec!["http://www.extra-index.com".to_string()]; @@ -181,7 +190,7 @@ mod tests { #[test] fn index_urls_with_default() { let config = Config { - index: Some(vec![ + indexes: vec![ Index { url: "http://www.config-default.com".to_string(), default: Some(true), @@ -191,7 +200,7 @@ mod tests { url: "http://www.index.com".to_string(), ..Default::default() }, - ]), + ], ..Default::default() }; let index = vec!["http://www.extra-index.com".to_string()]; @@ -215,7 +224,7 @@ mod tests { #[test] fn index_urls_with_override() { let config = Config { - index: Some(vec![ + indexes: vec![ Index { url: "http://www.config-default.com".to_string(), default: Some(true), @@ -225,7 +234,7 @@ mod tests { url: "http://www.index.com".to_string(), ..Default::default() }, - ]), + ], ..Default::default() }; let index = vec!["http://www.extra-index.com".to_string()]; diff --git a/core/src/discover.rs b/core/src/discover.rs index 380548d8..4b6c67c8 100644 --- a/core/src/discover.rs +++ b/core/src/discover.rs @@ -12,55 +12,65 @@ use crate::{ }; pub fn current_project() -> Result, Box> { - Ok(discover_project(wrapfs::current_dir()?)) + discover_project(wrapfs::current_dir()?) } -pub fn discover_project>(working_directory: P) -> Option { - let path = discover(working_directory, |path| { - path.join(".project.json").is_file() || path.join(".meta.json").is_file() - })?; - Some(LocalSrcProject { project_path: path }) +fn is_project_file(path: &Utf8Path) -> Result> { + Ok(wrapfs::is_file(path.join(".project.json"))? || wrapfs::is_file(path.join(".meta.json"))?) +} + +pub fn discover_project>( + working_directory: P, +) -> Result, Box> { + let project = discover(working_directory, is_project_file)?.map(|path| LocalSrcProject { + nominal_path: Some(Utf8PathBuf::from(".")), + project_path: path, + }); + Ok(project) } pub fn current_workspace() -> Result, Box> { - Ok(discover_workspace(wrapfs::current_dir()?)) + discover_workspace(wrapfs::current_dir()?) } -pub fn discover_workspace>(working_directory: P) -> Option { - let path = discover(working_directory, |path| { - path.join(".workspace.json").is_file() - })?; - Some(Workspace { +pub fn discover_workspace>( + working_directory: P, +) -> Result, Box> { + let workspace = discover(working_directory, |path| { + wrapfs::is_file(path.join(".workspace.json")) + })? + .map(|path| Workspace { workspace_path: path, - }) + }); + Ok(workspace) } // TODO: Improve the logic here, this is probably too simple -fn discover, F: Fn(&Utf8Path) -> bool>( +fn discover, F: Fn(&Utf8Path) -> Result>>( working_directory: P, predicate: F, -) -> Option { +) -> Result, Box> { let mut current = working_directory.to_path_buf(); log::debug!("trying to discover project in `{}`", current); - while !predicate(¤t) { + while !predicate(¤t)? { match current.parent() { Some(parent) if parent.as_str().is_empty() => { - log::debug!("hit empty relative path, trying to canonicalise"); + log::debug!("hit empty relative path, trying to canonicalize"); match current.canonicalize_utf8() { Ok(current_canonical) => match current_canonical.parent() { Some(parent_canonical) => current = parent_canonical.to_path_buf(), None => { log::debug!( - "canonicalised path `{}` has no parent either", + "canonicalized path `{}` has no parent either", current_canonical ); - return None; + return Ok(None); } }, Err(e) => { - log::debug!("unable to canonicalise path `{}`: {e}", current); + log::debug!("unable to canonicalize path `{}`: {e}", current); } } } @@ -69,10 +79,10 @@ fn discover, F: Fn(&Utf8Path) -> bool>( current = parent.to_path_buf(); } None => { - return None; + return Ok(None); } } } - Some(current) + Ok(Some(current)) } diff --git a/core/src/env/local_directory.rs b/core/src/env/local_directory.rs index 22303ae9..6e96feb6 100644 --- a/core/src/env/local_directory.rs +++ b/core/src/env/local_directory.rs @@ -283,8 +283,8 @@ fn try_move_files(paths: &Vec<(&Utf8Path, &Utf8Path)>) -> Result<(), TryMoveErro } impl LocalDirectoryEnvironment { - pub fn root_path(&self) -> Utf8PathBuf { - self.environment_path.clone() + pub fn root_path(&self) -> &Utf8Path { + &self.environment_path } pub fn entries_path(&self) -> Utf8PathBuf { @@ -375,10 +375,18 @@ impl ReadEnvironment for LocalDirectoryEnvironment { uri: S, version: T, ) -> Result { - let path = self.project_path(uri, version); + let path = self.project_path(&uri, version); let project_path = wrapfs::canonicalize(path)?; - - Ok(LocalSrcProject { project_path }) + let root_path = wrapfs::canonicalize(self.root_path())?; + let nominal_path = root_path + .parent() + .and_then(|r| project_path.strip_prefix(r).ok()) + .map(|p| p.to_path_buf()); + + Ok(LocalSrcProject { + nominal_path, + project_path, + }) } } @@ -503,6 +511,7 @@ impl WriteEnvironment for LocalDirectoryEnvironment { .map_err(|e| LocalWriteError::from(FsIoError::MkTempDir(e)))?; let mut tentative_project = LocalSrcProject { + nominal_path: None, project_path: project_temp.path().to_path_buf(), }; @@ -525,7 +534,13 @@ impl WriteEnvironment for LocalDirectoryEnvironment { ]) .map_err(LocalWriteError::from)?; - Ok(LocalSrcProject { project_path }) + Ok(LocalSrcProject { + nominal_path: project_path + .parent() + .and_then(|p| p.strip_prefix(self.root_path()).ok()) + .map(|p| p.to_path_buf()), + project_path, + }) } fn del_project_version, T: AsRef>( diff --git a/core/src/env/memory.rs b/core/src/env/memory.rs index ec124bdb..d7dd855b 100644 --- a/core/src/env/memory.rs +++ b/core/src/env/memory.rs @@ -3,21 +3,243 @@ use crate::{ env::{PutProjectError, ReadEnvironment, WriteEnvironment}, - project::memory::InMemoryProject, + project::{ProjectMut, ProjectRead}, +}; +use std::{ + collections::{HashMap, hash_map::Entry}, + fmt::Debug, }; -use std::collections::{HashMap, hash_map::Entry}; use thiserror::Error; -/// Project stored in a local directory -#[derive(Clone, Default, Debug)] -pub struct MemoryStorageEnvironment { - pub projects: HashMap>, +#[derive(Debug)] +pub struct MemoryStorageEnvironment { + pub projects: HashMap>, } -impl MemoryStorageEnvironment { +impl Default for MemoryStorageEnvironment { + fn default() -> Self { + Self { + projects: HashMap::default(), + } + } +} + +impl MemoryStorageEnvironment { pub fn new() -> Self { - Self::default() + Default::default() + } + + pub fn try_from_iter>( + iter: T, + ) -> Result> { + let mut map = HashMap::>::new(); + for (iri, project) in iter { + if let Some(version) = project.version().map_err(TryFromError::Read)? { + map.entry(iri).or_default().insert(version, project); + } else { + return Err(TryFromError::MissingVersion(iri)); + } + } + Ok(Self { projects: map }) + } +} + +#[derive(Error, Debug)] +pub enum TryFromError { + #[error(transparent)] + Read(Project::Error), + #[error("missing version for project with IRI `{0}`")] + MissingVersion(String), +} + +/// Try to construct a `MemoryStorageEnvironment` from an array of pairs of IRIs +/// and project storages. +/// +/// All projects must have versions. +/// +/// # Returns +/// +/// - `Ok(env)` where `env` is a `MemoryStorageEnvironment` with all +/// projects given. +/// - `Err(error)` where `error` is +/// - `TryFromError::Read` if cannot be read +/// - `TryFromError::MissingVersion` if version is `None` +/// +/// # Example +/// +/// ```rust +/// # use sysand_core::commands::init::do_init_memory; +/// # use sysand_core::env::memory::MemoryStorageEnvironment; +/// # use sysand_core::env::ReadEnvironment; +/// # use sysand_core::project::memory::InMemoryProject; +/// let project1 = do_init_memory("First", "0.0.1", None).unwrap(); +/// let project2 = do_init_memory("First", "0.1.0", None).unwrap(); +/// let env = MemoryStorageEnvironment::::try_from([ +/// ("urn:kpar:first".into(), project1.clone()), +/// ("urn:kpar:first".into(), project2.clone()), +/// ]) +/// .unwrap(); +/// +/// assert_eq!( +/// project1, +/// env.get_project("urn:kpar:first", "0.0.1").unwrap() +/// ); +/// assert_eq!( +/// project2, +/// env.get_project("urn:kpar:first", "0.1.0").unwrap() +/// ); +/// ``` +impl TryFrom<[(String, Project); N]> + for MemoryStorageEnvironment +{ + type Error = TryFromError; + + fn try_from(value: [(String, Project); N]) -> Result { + Self::try_from_iter(value) + } +} + +/// Try to construct a `MemoryStorageEnvironment` from a Vec of pairs of IRIs and +/// project storages. +/// +/// All projects must have versions. +/// +/// # Returns +/// +/// - `Ok(env)` where `env` is a `MemoryStorageEnvironment` with all +/// projects given. +/// - `Err(error)` where `error` is +/// - `TryFromError::Read` if cannot be read +/// - `TryFromError::MissingVersion` if version is `None` +/// +/// # Example +/// +/// ```rust +/// # use sysand_core::commands::init::do_init_memory; +/// # use sysand_core::env::memory::MemoryStorageEnvironment; +/// # use sysand_core::env::ReadEnvironment; +/// # use sysand_core::project::memory::InMemoryProject; +/// let project1 = do_init_memory("First", "0.0.1", None).unwrap(); +/// let project2 = do_init_memory("First", "0.1.0", None).unwrap(); +/// let env = MemoryStorageEnvironment::::try_from(vec![ +/// ("urn:kpar:first".into(), project1.clone()), +/// ("urn:kpar:first".into(), project2.clone()), +/// ]) +/// .unwrap(); +/// +/// assert_eq!( +/// project1, +/// env.get_project("urn:kpar:first", "0.0.1").unwrap() +/// ); +/// assert_eq!( +/// project2, +/// env.get_project("urn:kpar:first", "0.1.0").unwrap() +/// ); +/// ``` +impl TryFrom> + for MemoryStorageEnvironment +{ + type Error = TryFromError; + + fn try_from(value: Vec<(String, Project)>) -> Result { + Self::try_from_iter(value) + } +} + +impl FromIterator<(String, String, Project)> + for MemoryStorageEnvironment +{ + fn from_iter>(iter: T) -> Self { + let mut map = HashMap::>::new(); + for (iri, version, project) in iter { + map.entry(iri).or_default().insert(version, project); + } + Self { projects: map } + } +} + +/// Construct a `MemoryStorageEnvironment` from an array of triples of IRIs, versions +/// and project storages. +/// +/// All projects must have versions. +/// +/// # Returns +/// +/// A `MemoryStorageEnvironment` with all projects given. +/// +/// # Example +/// +/// ```rust +/// # use sysand_core::commands::init::do_init_memory; +/// # use sysand_core::env::memory::MemoryStorageEnvironment; +/// # use sysand_core::env::ReadEnvironment; +/// # use sysand_core::project::memory::InMemoryProject; +/// let version1 = "0.0.1".to_string(); +/// let version2 = "0.1.0".to_string(); +/// let project1 = do_init_memory("First", &version1, None).unwrap(); +/// let project2 = do_init_memory("First", &version2, None).unwrap(); +/// let env = MemoryStorageEnvironment::::from([ +/// ("urn:kpar:first".into(), version1.clone(), project1.clone()), +/// ("urn:kpar:first".into(), version2.clone(), project2.clone()), +/// ]); +/// +/// assert_eq!( +/// project1, +/// env.get_project("urn:kpar:first", version1).unwrap() +/// ); +/// assert_eq!( +/// project2, +/// env.get_project("urn:kpar:first", version2).unwrap() +/// ); +/// ``` +impl From<[(String, String, Project); N]> + for MemoryStorageEnvironment +{ + fn from(value: [(String, String, Project); N]) -> Self { + Self::from_iter(value) + } +} + +/// Construct a `MemoryStorageEnvironment` from Vec of triples of IRIs, versions and +/// project storages. +/// +/// All projects must have versions. +/// +/// # Returns +/// +/// A `MemoryStorageEnvironment` with all projects given. +/// +/// # Example +/// +/// ```rust +/// # use sysand_core::commands::init::do_init_memory; +/// # use sysand_core::env::memory::MemoryStorageEnvironment; +/// # use sysand_core::env::ReadEnvironment; +/// # use sysand_core::project::memory::InMemoryProject; +/// let version1 = "0.0.1".to_string(); +/// let version2 = "0.1.0".to_string(); +/// let project1 = do_init_memory("First", &version1, None).unwrap(); +/// let project2 = do_init_memory("First", &version2, None).unwrap(); +/// let env = MemoryStorageEnvironment::::from(vec![ +/// ("urn:kpar:first".into(), version1.clone(), project1.clone()), +/// ("urn:kpar:first".into(), version2.clone(), project2.clone()), +/// ]); +/// +/// assert_eq!( +/// project1, +/// env.get_project("urn:kpar:first", version1).unwrap() +/// ); +/// assert_eq!( +/// project2, +/// env.get_project("urn:kpar:first", version2).unwrap() +/// ); +/// ``` +impl From> + for MemoryStorageEnvironment +{ + fn from(value: Vec<(String, String, Project)>) -> Self { + Self::from_iter(value) } } @@ -25,10 +247,10 @@ impl MemoryStorageEnvironment { #[derive(Error, Debug)] pub enum MemoryWriteError {} -impl WriteEnvironment for MemoryStorageEnvironment { +impl WriteEnvironment for MemoryStorageEnvironment { type WriteError = MemoryWriteError; - type InterchangeProjectMut = InMemoryProject; + type InterchangeProjectMut = Project; fn put_project, T: AsRef, F, E>( &mut self, @@ -39,7 +261,7 @@ impl WriteEnvironment for MemoryStorageEnvironment { where F: FnOnce(&mut Self::InterchangeProjectMut) -> Result<(), E>, { - let mut tentative_project = InMemoryProject::default(); + let mut tentative_project = Project::default(); write_project(&mut tentative_project).map_err(PutProjectError::Callback)?; @@ -60,6 +282,9 @@ impl WriteEnvironment for MemoryStorageEnvironment { match &mut self.projects.entry(uri.as_ref().to_string()) { Entry::Occupied(occupied_entry) => { occupied_entry.get_mut().remove(version.as_ref()); + if occupied_entry.get().is_empty() { + self.projects.remove(uri.as_ref()); + } Ok(()) } Entry::Vacant(_) => Ok(()), @@ -80,7 +305,7 @@ pub enum MemoryReadError { MissingVersion(String, String), } -impl ReadEnvironment for MemoryStorageEnvironment { +impl ReadEnvironment for MemoryStorageEnvironment { type ReadError = MemoryReadError; type UriIter = Vec>; @@ -106,7 +331,7 @@ impl ReadEnvironment for MemoryStorageEnvironment { Ok(version_vec) } - type InterchangeProjectRead = InMemoryProject; + type InterchangeProjectRead = Project; fn get_project, T: AsRef>( &self, @@ -127,3 +352,155 @@ impl ReadEnvironment for MemoryStorageEnvironment { .clone()) } } + +#[cfg(test)] +mod test { + use std::collections::HashMap; + + use crate::{ + env::{ + ReadEnvironment, WriteEnvironment, + memory::MemoryStorageEnvironment, + utils::{CloneError, clone_project}, + }, + init::do_init_memory, + project::memory::{InMemoryError, InMemoryProject}, + }; + + #[test] + fn write_environment() { + let uri1 = "urn:kpar:first".to_string(); + let uri2 = "urn:kpar:second".to_string(); + let version = "0.0.1".to_string(); + let project1 = do_init_memory("First", &version, None).unwrap(); + let project2 = do_init_memory("Second", &version, None).unwrap(); + let mut env = MemoryStorageEnvironment::::new(); + + env.put_project(&uri1, &version, |p| { + clone_project(&project1, p, true)?; + + Ok::<(), CloneError>(()) + }) + .unwrap(); + + assert_eq!(env.projects.len(), 1); + assert_eq!( + &project1, + env.projects.get(&uri1).unwrap().get(&version).unwrap() + ); + + env.put_project(&uri2, &version, |p| { + clone_project(&project2, p, true)?; + + Ok::<(), CloneError>(()) + }) + .unwrap(); + + assert_eq!(env.projects.len(), 2); + assert_eq!( + &project2, + env.projects.get(&uri2).unwrap().get(&version).unwrap() + ); + + env.del_project_version(&uri1, version).unwrap(); + + assert_eq!(env.projects.len(), 1); + assert!(!env.projects.contains_key(&uri1)); + + env.del_uri(&uri2).unwrap(); + + assert!(env.projects.is_empty()); + assert!(!env.projects.contains_key(&uri2)); + } + + #[test] + fn read_environment() { + let iri = "urn:kpar:first".to_string(); + let version = "0.0.1".to_string(); + let project = do_init_memory("First", &version, None).unwrap(); + let env = MemoryStorageEnvironment { + projects: HashMap::from([( + iri.clone(), + HashMap::from([(version.clone(), project.clone())]), + )]), + }; + + let uris = env.uris().unwrap(); + assert_eq!( + vec![&iri], + uris.iter() + .map(|uri| uri.as_ref().unwrap()) + .collect::>() + ); + + let versions = env.versions(&iri).unwrap(); + assert_eq!( + vec![&version], + versions + .iter() + .map(|version| version.as_ref().unwrap()) + .collect::>() + ); + + let get_project = env.get_project(iri, version).unwrap(); + assert_eq!(project, get_project); + } + + #[test] + fn from() { + let version1 = "0.0.1".to_string(); + let version2 = "0.1.0".to_string(); + let version3 = "0.0.1".to_string(); + let project1 = do_init_memory("First 0.0.1", &version1, None).unwrap(); + let project2 = do_init_memory("First 0.1.0", &version2, None).unwrap(); + let project3 = do_init_memory("Second", &version3, None).unwrap(); + let env = MemoryStorageEnvironment::::from([ + ("urn:kpar:first".into(), version1.clone(), project1.clone()), + ("urn:kpar:first".into(), version2.clone(), project2.clone()), + ("urn:kpar:second".into(), version3.clone(), project3.clone()), + ]); + + assert_eq!( + project1, + env.get_project("urn:kpar:first", version1).unwrap() + ); + assert_eq!( + project2, + env.get_project("urn:kpar:first", version2).unwrap() + ); + assert_eq!( + project3, + env.get_project("urn:kpar:second", version3).unwrap() + ); + assert_eq!(env.projects.len(), 2); + assert_eq!(env.projects.get("urn:kpar:first").unwrap().len(), 2); + } + + #[test] + fn try_from() { + let project1 = do_init_memory("First 0.0.1", "0.0.1", None).unwrap(); + let project2 = do_init_memory("First 0.1.0", "0.1.0", None).unwrap(); + let project3 = do_init_memory("Second", "0.0.1", None).unwrap(); + let env = MemoryStorageEnvironment::::try_from([ + ("urn:kpar:first".into(), project1.clone()), + ("urn:kpar:first".into(), project2.clone()), + ("urn:kpar:second".into(), project3.clone()), + ]) + .unwrap(); + + assert_eq!( + project1, + env.get_project("urn:kpar:first", "0.0.1").unwrap() + ); + assert_eq!( + project2, + env.get_project("urn:kpar:first", "0.1.0").unwrap() + ); + assert_eq!( + project3, + env.get_project("urn:kpar:second", "0.0.1").unwrap() + ); + assert_eq!(env.projects.len(), 2); + assert_eq!(env.projects.get("urn:kpar:first").unwrap().len(), 2); + } +} diff --git a/core/src/lock.rs b/core/src/lock.rs index 40086982..dc9773e6 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -9,14 +9,18 @@ use std::{ }; use semver::{Version, VersionReq}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use thiserror::Error; use toml_edit::{ Array, ArrayOfTables, DocumentMut, Formatted, InlineTable, Item, Table, Value, value, }; use typed_path::Utf8UnixPathBuf; -use crate::{env::ReadEnvironment, project::ProjectRead}; +use crate::{ + env::ReadEnvironment, + model::{InterchangeProjectUsage, InterchangeProjectUsageRaw}, + project::ProjectRead, +}; pub const LOCKFILE_PREFIX: &str = "# This file is automatically generated by Sysand and is not intended to be edited manually.\n\n"; pub const CURRENT_LOCK_VERSION: &str = "0.2"; @@ -81,7 +85,7 @@ impl FromStr for Lock { for (field, _) in document.iter() { if !LOCKFILE_ENTRIES.contains(&field) { - log::warn!("unknown field '{}' in lockfile", field); + log::warn!("unknown field '{field}' in lockfile"); } } for (field, value, name) in document @@ -270,9 +274,13 @@ impl Lock { let version = Version::parse(&project.version) .inspect_err(|err| { log::warn!( - "invalid semantic version '{}' for project '{:?}'\n{}", + "invalid semantic version '{}' for project {}\n{}", project.version, - project.name, + project + .name + .as_ref() + .map(|n| format!("`{n}`")) + .unwrap_or("without name".to_owned()), err ); }) @@ -404,19 +412,19 @@ impl Project { table.insert("name", value(name)); } table.insert("version", value(&self.version)); - let exports = multiline_list(self.exports.iter().map(Value::from)); + let exports = multiline_array(self.exports.iter().map(Value::from)); if !exports.is_empty() { table.insert("exports", value(exports)); } - let identifiers = multiline_list(self.identifiers.iter().map(Value::from)); + let identifiers = multiline_array(self.identifiers.iter().map(Value::from)); if !identifiers.is_empty() { table.insert("identifiers", value(identifiers)); } - let usages = multiline_list(self.usages.iter().map(|u| u.to_toml())); + let usages = multiline_array(self.usages.iter().map(|u| u.to_toml())); if !usages.is_empty() { table.insert("usages", value(usages)); } - let sources = multiline_list(self.sources.iter().map(|s| s.to_toml())); + let sources = multiline_array(self.sources.iter().map(|s| s.to_toml())); if !sources.is_empty() { table.insert("sources", value(sources)); } @@ -437,20 +445,29 @@ const SOURCE_ENTRIES: &[&str] = &[ "remote_api", ]; -#[derive(Clone, Eq, Debug, Deserialize, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Eq, Debug, Deserialize, Ord, PartialEq, PartialOrd, Serialize)] #[serde(untagged)] pub enum Source { // Path must be a Unix path relative to workspace root Editable { - #[serde(deserialize_with = "parse_unix_path")] + #[serde( + deserialize_with = "parse_unix_path", + serialize_with = "serialize_unix_path" + )] editable: Utf8UnixPathBuf, }, LocalSrc { - #[serde(deserialize_with = "parse_unix_path")] + #[serde( + deserialize_with = "parse_unix_path", + serialize_with = "serialize_unix_path" + )] src_path: Utf8UnixPathBuf, }, LocalKpar { - #[serde(deserialize_with = "parse_unix_path")] + #[serde( + deserialize_with = "parse_unix_path", + serialize_with = "serialize_unix_path" + )] kpar_path: Utf8UnixPathBuf, }, Registry { @@ -471,6 +488,13 @@ pub enum Source { }, } +fn serialize_unix_path(x: &Utf8UnixPathBuf, s: S) -> Result +where + S: serde::Serializer, +{ + s.serialize_str(x.as_str()) +} + fn parse_unix_path<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, @@ -542,8 +566,8 @@ impl Usage { } } -impl From for Usage { - fn from(value: crate::model::InterchangeProjectUsageRaw) -> Usage { +impl From for Usage { + fn from(value: InterchangeProjectUsageRaw) -> Usage { Usage { resource: value.resource, version_constraint: value.version_constraint, @@ -551,8 +575,8 @@ impl From for Usage { } } -impl From for Usage { - fn from(value: crate::model::InterchangeProjectUsage) -> Usage { +impl From for Usage { + fn from(value: InterchangeProjectUsage) -> Usage { Usage { resource: value.resource.to_string(), version_constraint: value.version_constraint.map(|vr| vr.to_string()), @@ -560,7 +584,7 @@ impl From for Usage { } } -fn multiline_list(elements: impl Iterator>) -> Array { +pub fn multiline_array(elements: impl Iterator>) -> Array { let mut array: Array = elements .map(|item| { let mut value = item.into(); diff --git a/core/src/project/any.rs b/core/src/project/any.rs new file mode 100644 index 00000000..8117696d --- /dev/null +++ b/core/src/project/any.rs @@ -0,0 +1,120 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::{result::Result, sync::Arc}; + +use camino::Utf8Path; +use reqwest_middleware::ClientWithMiddleware; +use thiserror::Error; +use typed_path::Utf8UnixPath; + +use crate::{ + auth::HTTPAuthentication, + env::memory::MemoryStorageEnvironment, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ + AsSyncProjectTokio, ProjectRead, ProjectReadAsync, + editable::EditableProject, + gix_git_download::{GixDownloadedError, GixDownloadedProject}, + local_kpar::LocalKParProject, + local_src::LocalSrcProject, + reference::ProjectReference, + reqwest_kpar_download::{ReqwestKparDownloadedError, ReqwestKparDownloadedProject}, + reqwest_src::ReqwestSrcProjectAsync, + utils::FsIoError, + }, + resolve::memory::{AcceptAll, MemoryResolver}, +}; + +#[derive(Debug, ProjectRead)] +pub enum AnyProject { + Editable(EditableProject), + LocalSrc(LocalSrcProject), + LocalKpar(LocalKParProject), + RemoteSrc(AsSyncProjectTokio>), + RemoteKpar(AsSyncProjectTokio>), + RemoteGit(GixDownloadedProject), +} + +#[derive(Error, Debug)] +pub enum TryFromSourceError { + #[error("unsupported source\n{0}")] + UnsupportedSource(String), + #[error(transparent)] + LocalKpar(Box), + #[error(transparent)] + RemoteKpar(ReqwestKparDownloadedError), + #[error(transparent)] + RemoteSrc(url::ParseError), + #[error(transparent)] + RemoteGit(GixDownloadedError), +} + +// TODO: Find a better solution going from source to project. +impl AnyProject { + pub fn try_from_source>( + source: Source, + project_root: P, + auth_policy: Arc, + client: ClientWithMiddleware, + runtime: Arc, + ) -> Result { + match source { + Source::Editable { editable } => { + let project = LocalSrcProject { + nominal_path: Some(editable.to_string().into()), + project_path: project_root.as_ref().join(editable.as_str()), + }; + Ok(AnyProject::Editable( + EditableProject::::new(editable.as_str().into(), project), + )) + } + Source::LocalKpar { kpar_path } => Ok(AnyProject::LocalKpar( + LocalKParProject::new_guess_root_nominal( + project_root.as_ref().join(kpar_path.as_str()), + kpar_path.as_str(), + ) + .map_err(TryFromSourceError::LocalKpar)?, + )), + Source::LocalSrc { src_path } => { + let nominal_path = src_path.into_string().into(); + let project_path = project_root.as_ref().join(&nominal_path); + Ok(AnyProject::LocalSrc(LocalSrcProject { + nominal_path: Some(nominal_path), + project_path, + })) + } + Source::RemoteKpar { + remote_kpar, + remote_kpar_size: _, + } => Ok(AnyProject::RemoteKpar( + ReqwestKparDownloadedProject::::new_guess_root( + remote_kpar, + client, + auth_policy, + ) + .map_err(TryFromSourceError::RemoteKpar)? + .to_tokio_sync(runtime), + )), + Source::RemoteSrc { remote_src } => Ok(AnyProject::RemoteSrc( + ReqwestSrcProjectAsync:: { + client, + url: reqwest::Url::parse(&remote_src).map_err(TryFromSourceError::RemoteSrc)?, + auth_policy, + } + .to_tokio_sync(runtime), + )), + Source::RemoteGit { remote_git } => Ok(AnyProject::RemoteGit( + GixDownloadedProject::new(remote_git).map_err(TryFromSourceError::RemoteGit)?, + )), + _ => Err(TryFromSourceError::UnsupportedSource(format!("{source:?}"))), + } + } +} + +pub type OverrideProject = ProjectReference>; + +pub type OverrideEnvironment = MemoryStorageEnvironment>; + +pub type OverrideResolver = MemoryResolver>; diff --git a/core/src/project/cached.rs b/core/src/project/cached.rs new file mode 100644 index 00000000..203cbb85 --- /dev/null +++ b/core/src/project/cached.rs @@ -0,0 +1,61 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use crate::{ + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ProjectRead, Utf8UnixPath}, +}; + +/// Pair of project storages where `local` and `remote` contain the same project +/// content, while `local` is easier and faster to access. The CachedProject is +/// to be used in place of `remote` (in particular they return the same sources, +/// unlike `local`) but faster access. +#[derive(Clone, Debug)] +pub struct CachedProject { + local: Local, + remote: Remote, +} + +impl CachedProject { + /// Create a new CachedProject. Assume that `local` is a cached version of remote. + pub fn new(local: Local, remote: Remote) -> Self { + CachedProject:: { local, remote } + } +} + +impl ProjectRead for CachedProject { + type Error = Local::Error; + + fn get_project( + &self, + ) -> Result< + ( + Option, + Option, + ), + Self::Error, + > { + self.local.get_project() + } + + type SourceReader<'a> + = Local::SourceReader<'a> + where + Self: 'a; + + fn read_source>( + &self, + path: P, + ) -> Result, Self::Error> { + self.local.read_source(path) + } + + fn sources(&self) -> Vec { + self.remote.sources() + } + + fn is_definitely_invalid(&self) -> bool { + self.local.is_definitely_invalid() + } +} diff --git a/core/src/project/editable.rs b/core/src/project/editable.rs index 0eb8df5f..d0f5cf76 100644 --- a/core/src/project/editable.rs +++ b/core/src/project/editable.rs @@ -3,7 +3,11 @@ use camino::{Utf8Path, Utf8PathBuf}; -use crate::project::ProjectRead; +use crate::{ + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::ProjectRead, +}; #[derive(Debug)] /// Treat a project type `P` as an "Editable" project. This simply adds @@ -46,8 +50,8 @@ impl ProjectRead for EditableProject

{ &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -66,7 +70,7 @@ impl ProjectRead for EditableProject

{ self.inner.read_source(path) } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { let mut inner_sources = if self.include_original_sources { self.inner.sources() } else { @@ -75,7 +79,7 @@ impl ProjectRead for EditableProject

{ inner_sources.insert( 0, - crate::lock::Source::Editable { + Source::Editable { editable: self.nominal_path.as_str().into(), }, ); diff --git a/core/src/project/gix_git_download.rs b/core/src/project/gix_git_download.rs index 3ceb8a40..460d3763 100644 --- a/core/src/project/gix_git_download.rs +++ b/core/src/project/gix_git_download.rs @@ -3,7 +3,8 @@ use gix::prepare_clone; use thiserror::Error; use crate::{ - lock, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::{ ProjectRead, local_src::{LocalSrcError, LocalSrcProject, PathError}, @@ -70,6 +71,7 @@ impl GixDownloadedProject { url: gix::url::parse(url.as_ref().into()) .map_err(|e| GixDownloadedError::UrlParse(url.as_ref().into(), Box::new(e)))?, inner: LocalSrcProject { + nominal_path: None, project_path: wrapfs::canonicalize(tmp_dir.path())?, }, tmp_dir, @@ -102,8 +104,8 @@ impl ProjectRead for GixDownloadedProject { &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -126,8 +128,8 @@ impl ProjectRead for GixDownloadedProject { Ok(FileWithLifetime::new(self.inner.read_source(path)?)) } - fn sources(&self) -> Vec { - vec![lock::Source::RemoteGit { + fn sources(&self) -> Vec { + vec![Source::RemoteGit { remote_git: self.url.to_string(), }] } diff --git a/core/src/project/local_kpar.rs b/core/src/project/local_kpar.rs index e0912744..3705068f 100644 --- a/core/src/project/local_kpar.rs +++ b/core/src/project/local_kpar.rs @@ -1,20 +1,21 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use crate::{ - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, - project::{self, ProjectRead, editable::GetPath, utils::ZipArchiveError}, -}; use std::io::Write as _; use camino::{Utf8Path, Utf8PathBuf}; use camino_tempfile::{Utf8TempDir, tempdir}; use sha2::Digest as _; -use typed_path::{Utf8Component, Utf8UnixPath}; - use thiserror::Error; +use typed_path::{Utf8Component, Utf8UnixPath}; use zip::ZipArchive; +use crate::{ + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{self, ProjectRead, editable::GetPath, utils::ZipArchiveError}, +}; + use super::utils::{FsIoError, ProjectDeserializationError, ToPathBuf, wrapfs}; /// Project stored in as a KPar (Zip) archive in the local filesystem. @@ -23,7 +24,7 @@ use super::utils::{FsIoError, ProjectDeserializationError, ToPathBuf, wrapfs}; /// guessed based on the location of the `.project.json`-file. /// /// Paths used in the archive are expected to match those used in the metadata -/// manifest (.meta.json)! Sysand *MAY* try to normalise paths in order +/// manifest (.meta.json)! Sysand *MAY* try to normalize paths in order /// to match filenames, but no guarantees are made. /// /// Use `LocalKParProject::new_guess_root` to guess `root` based on the @@ -32,8 +33,18 @@ use super::utils::{FsIoError, ProjectDeserializationError, ToPathBuf, wrapfs}; /// The archive is read directly without extracting it. #[derive(Debug)] pub struct LocalKParProject { - pub tmp_dir: Utf8TempDir, + /// Temporary directory for unpacking files in archive. + tmp_dir: Utf8TempDir, + /// Path used in `Source::LocalSrc` returned by `.sources()`. + /// If `None` no source will be given. + /// E.g. if used in lockfile would be the path relative to the lockfile. + // TODO: Consider removing this and replacing it with some way of + // relativizing `archive_path` at the call site of .sources(). + pub nominal_path: Option, + /// Path used when locating the project archive internally. + /// Should be absolute. pub archive_path: Utf8PathBuf, + /// Optionally specify name of root directory inside archive. pub root: Option, } @@ -136,6 +147,19 @@ impl LocalKParProject { ) -> Result> { Ok(LocalKParProject { tmp_dir: tempdir().map_err(FsIoError::MkTempDir)?, + nominal_path: None, + archive_path: path.to_path_buf(), + root: Some(root.to_path_buf()), + }) + } + pub fn new_nominal, Q: AsRef, N: AsRef>( + path: P, + root: Q, + nominal: N, + ) -> Result> { + Ok(LocalKParProject { + tmp_dir: tempdir().map_err(FsIoError::MkTempDir)?, + nominal_path: Some(nominal.to_path_buf()), archive_path: path.to_path_buf(), root: Some(root.to_path_buf()), }) @@ -144,11 +168,34 @@ impl LocalKParProject { pub fn new_guess_root>(path: P) -> Result> { Ok(LocalKParProject { tmp_dir: tempdir().map_err(FsIoError::MkTempDir)?, + nominal_path: None, + archive_path: path.to_path_buf(), + root: None, + }) + } + + pub fn new_guess_root_nominal, N: AsRef>( + path: P, + nominal: N, + ) -> Result> { + Ok(LocalKParProject { + tmp_dir: tempdir().map_err(FsIoError::MkTempDir)?, + nominal_path: Some(nominal.to_path_buf()), archive_path: path.to_path_buf(), root: None, }) } + pub fn new_temporary() -> Result> { + let tmp_dir = tempdir().map_err(FsIoError::MkTempDir)?; + Ok(LocalKParProject { + nominal_path: None, + archive_path: tmp_dir.path().join("project.kpar"), + tmp_dir, + root: None, + }) + } + pub fn from_project>( from: &Pr, path: P, @@ -278,15 +325,7 @@ impl ProjectRead for LocalKParProject { path: P, ) -> Result, Self::Error> { let tmp_name = format!("{:X}", sha2::Sha256::digest(path.as_ref())); - let tmp_file_path = { - let mut p = self - .tmp_dir - .path() - .canonicalize_utf8() - .map_err(|e| FsIoError::Canonicalize(self.tmp_dir.to_path_buf(), e))?; - p.push(tmp_name); - p - }; + let tmp_file_path = self.tmp_dir.path().join(tmp_name); if !tmp_file_path.is_file() { let mut tmp_file = wrapfs::File::create(&tmp_file_path)?; @@ -310,10 +349,15 @@ impl ProjectRead for LocalKParProject { // Ok(KparFile { archive: archive, file: &mut archive.by_index(idx)? }) } - fn sources(&self) -> Vec { - vec![crate::lock::Source::LocalKpar { - kpar_path: self.archive_path.as_str().into(), - }] + fn sources(&self) -> Vec { + self.nominal_path + .as_ref() + .map(|path| { + vec![Source::LocalKpar { + kpar_path: path.as_str().into(), + }] + }) + .expect("`LocalKparProject` without `nominal_path` does not have any project sources") } } diff --git a/core/src/project/local_src.rs b/core/src/project/local_src.rs index 018e8121..1b8150c5 100644 --- a/core/src/project/local_src.rs +++ b/core/src/project/local_src.rs @@ -1,15 +1,6 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use crate::{ - env::utils::{CloneError, clone_project}, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, - project::{ - ProjectMut, ProjectRead, - editable::GetPath, - utils::{ToPathBuf, wrapfs}, - }, -}; use std::{ collections::HashSet, fs::File, @@ -17,17 +8,35 @@ use std::{ }; use camino::{Utf8Path, Utf8PathBuf}; +use thiserror::Error; use typed_path::{Utf8UnixPath, Utf8UnixPathBuf}; -use thiserror::Error; +use crate::{ + env::utils::{CloneError, clone_project}, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ + ProjectMut, ProjectRead, + editable::GetPath, + utils::{ToPathBuf, wrapfs}, + }, +}; use super::utils::{FsIoError, ProjectDeserializationError, ProjectSerializationError}; /// Project stored in a local directory as an extracted kpar archive. -/// Source file paths with (unix) segments `segment1/.../segmentn` are +/// Source file paths with (unix) segments `segment1/.../segmentN` are /// re-interpreted as filesystem-native paths relative to `project_path`. #[derive(Clone, Debug)] pub struct LocalSrcProject { + /// Path used in `Source::LocalSrc` returned by `.sources()`. + /// If `None` no source will be given. + /// E.g. if used in lockfile would be the path relative to the lockfile. + // TODO: Consider removing this and replacing it with some way of + // relativizing `project_path` at the call site of .sources(). + pub nominal_path: Option, + /// Path used when locating the project internally. + /// Should be absolute. pub project_path: Utf8PathBuf, } @@ -37,10 +46,10 @@ impl GetPath for LocalSrcProject { } } -/// Tries to canonicalise the (longest possible) prefix of a path. +/// Tries to canonicalize the (longest possible) prefix of a path. /// Useful if you have /path/to/file/that/does/not/exist -/// but where some prefix, say, /path/to/file can be canonicalised. -fn canonicalise_prefix>(path: P) -> Utf8PathBuf { +/// but where some prefix, say, /path/to/file can be canonicalized. +fn canonicalize_prefix>(path: P) -> Utf8PathBuf { let mut relative_part = Utf8PathBuf::new(); let mut absolute_part = path.to_path_buf(); @@ -65,25 +74,25 @@ fn canonicalise_prefix>(path: P) -> Utf8PathBuf { absolute_part } -fn relativise_path, Q: AsRef>( +fn relativize_path, Q: AsRef>( path: P, relative_to: Q, ) -> Option { let path = if !path.as_ref().is_absolute() { let path = camino::absolute_utf8(path.as_ref()).ok()?; - canonicalise_prefix(path) + canonicalize_prefix(path) } else { - canonicalise_prefix(path) + canonicalize_prefix(path) }; - path.strip_prefix(canonicalise_prefix(relative_to)) + path.strip_prefix(canonicalize_prefix(relative_to)) .ok() .map(|x| x.to_path_buf()) } impl LocalSrcProject { - pub fn root_path(&self) -> Utf8PathBuf { - self.project_path.clone() + pub fn root_path(&self) -> &Utf8Path { + &self.project_path } pub fn info_path(&self) -> Utf8PathBuf { @@ -109,9 +118,9 @@ impl LocalSrcProject { let root_path = self.root_path(); let project_path = root_path .canonicalize_utf8() - .map_err(|e| UnixPathError::Canonicalize(root_path, e))?; + .map_err(|e| UnixPathError::Canonicalize(root_path.to_owned(), e))?; - let path = relativise_path(&path, project_path) + let path = relativize_path(&path, project_path) .ok_or_else(|| UnixPathError::PathOutsideProject(path.to_path_buf()))?; let mut unix_path = Utf8UnixPathBuf::new(); @@ -131,22 +140,22 @@ impl LocalSrcProject { &self, path: P, ) -> Result { - let utf_path = if path.as_ref().is_absolute() { + let path = path.as_ref(); + let utf_path = if path.is_absolute() { if !cfg!(feature = "lenient_checks") { - return Err(PathError::AbsolutePath(path.as_ref().to_owned())); + return Err(PathError::AbsolutePath(path.to_owned())); } // This should never fail, as the only way for a Unix path to be absolute is to begin // at root /. - path.as_ref() - .strip_prefix("/") + path.strip_prefix("/") .expect("internal path processing error") } else { - path.as_ref() + path }; assert!(utf_path.is_relative()); - let mut final_path = self.root_path(); + let mut final_path = self.root_path().to_owned(); let mut added_components = 0; for component in utf_path.components() { match component { @@ -215,6 +224,7 @@ impl LocalSrcProject { > { let tmp = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; let mut tmp_project = Self { + nominal_path: None, project_path: wrapfs::canonicalize(tmp.path())?, }; @@ -401,9 +411,14 @@ impl ProjectRead for LocalSrcProject { Ok(f) } - fn sources(&self) -> Vec { - vec![crate::lock::Source::LocalSrc { - src_path: self.project_path.as_str().into(), - }] + fn sources(&self) -> Vec { + self.nominal_path + .as_ref() + .map(|path| { + vec![Source::LocalSrc { + src_path: path.as_str().into(), + }] + }) + .expect("`LocalSrcProject` without `nominal_path` does not have any project sources") } } diff --git a/core/src/project/memory.rs b/core/src/project/memory.rs index fcb8e26e..2244963a 100644 --- a/core/src/project/memory.rs +++ b/core/src/project/memory.rs @@ -1,11 +1,6 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use crate::{ - env::utils::{CloneError, clone_project}, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, - project::{ProjectMut, ProjectRead}, -}; use std::{ collections::{HashMap, hash_map::Entry}, io::Read, @@ -14,13 +9,20 @@ use std::{ use thiserror::Error; use typed_path::{Utf8UnixPath, Utf8UnixPathBuf}; +use crate::{ + env::utils::{CloneError, clone_project}, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ProjectMut, ProjectRead}, +}; + /// Project stored in a local directory -#[derive(Clone, Default, Debug)] +#[derive(Clone, Eq, Default, Debug, PartialEq)] pub struct InMemoryProject { pub info: Option, pub meta: Option, pub files: HashMap, - pub nominal_sources: Vec, + pub nominal_sources: Vec, } impl InMemoryProject { @@ -154,7 +156,7 @@ impl ProjectRead for InMemoryProject { Ok(contents.as_bytes()) } - fn sources(&self) -> Vec { - vec![] + fn sources(&self) -> Vec { + panic!("`InMemoryProject` cannot have any project sources") } } diff --git a/core/src/project/mod.rs b/core/src/project/mod.rs index a5eba207..88336c33 100644 --- a/core/src/project/mod.rs +++ b/core/src/project/mod.rs @@ -1,13 +1,6 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use crate::{ - env::utils::ErrorBound, - model::{ - InterchangeProjectChecksumRaw, InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, - InterchangeProjectUsageRaw, KerMlChecksumAlg, ProjectHash, project_hash_raw, - }, -}; use futures::io::{AsyncBufReadExt as _, AsyncRead}; use indexmap::IndexMap; use sha2::{Digest, Sha256}; @@ -18,10 +11,24 @@ use std::{ sync::Arc, }; use thiserror::Error; -use typed_path::Utf8UnixPath; use utils::FsIoError; +pub use sysand_macros::ProjectMut; +pub use sysand_macros::ProjectRead; +pub use typed_path::Utf8UnixPath; + +use crate::{ + env::utils::ErrorBound, + lock::Source, + model::{ + InterchangeProjectChecksumRaw, InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, + InterchangeProjectUsageRaw, KerMlChecksumAlg, ProjectHash, project_hash_raw, + }, +}; + // Implementations +#[cfg(all(feature = "filesystem", feature = "networking"))] +pub mod any; pub mod editable; #[cfg(all(feature = "filesystem", feature = "networking"))] pub mod gix_git_download; @@ -39,6 +46,10 @@ pub mod reqwest_kpar_download; #[cfg(feature = "networking")] pub mod reqwest_src; +// Generic implementations +pub mod cached; +pub mod reference; + pub mod utils; fn hash_reader(reader: &mut R) -> Result { @@ -82,7 +93,7 @@ async fn hash_reader_async(reader: &mut R) -> Result { +pub enum CanonicalizationError { #[error(transparent)] ProjectRead(ReadError), #[error("failed to read from file\n `{0}`:\n {1}")] @@ -136,8 +147,8 @@ pub trait ProjectRead { /// multiple ones are listed they should aim to be in /// some typical order of preference. /// - /// May be empty if no valid sources are known. - fn sources(&self) -> Vec; + /// Should panic if no sources are available. + fn sources(&self) -> Vec; // Optional and helpers @@ -177,13 +188,13 @@ pub trait ProjectRead { Ok(self.get_meta()?.and_then(|meta| meta.checksum)) } - /// Produces canonicalised project metadata, replacing all source file hashes by SHA256. + /// Produces canonicalized project metadata, replacing all source file hashes by SHA256. fn canonical_meta( &self, - ) -> Result, CanonicalisationError> { + ) -> Result, CanonicalizationError> { let Some(mut meta) = self .get_meta() - .map_err(CanonicalisationError::ProjectRead)? + .map_err(CanonicalizationError::ProjectRead)? else { return Ok(None); }; @@ -200,11 +211,11 @@ pub trait ProjectRead { let mut src = self .read_source(path) - .map_err(CanonicalisationError::ProjectRead)?; + .map_err(CanonicalizationError::ProjectRead)?; checksum.value = format!( "{:x}", hash_reader(&mut src) - .map_err(|e| CanonicalisationError::FileRead(path.as_str().into(), e))? + .map_err(|e| CanonicalizationError::FileRead(path.as_str().into(), e))? ); } else { checksum.value = checksum.value.to_lowercase(); @@ -214,19 +225,19 @@ pub trait ProjectRead { Ok(Some(meta)) } - /// Produces a project hash based on project information and the *non-canonicalised* metadata. - fn checksum_noncanonical_hex(&self) -> Result, Self::Error> { + /// Produces a project hash based on project information and the *non-canonicalized* metadata. + fn checksum_non_canonical_hex(&self) -> Result, Self::Error> { Ok(self .get_project() .map(|(info, meta)| info.zip(meta))? .map(|(info, meta)| format!("{:x}", project_hash_raw(&info, &meta)))) } - /// Produces a project hash based on project information and the *canonicalised* metadata. - fn checksum_canonical_hex(&self) -> Result, CanonicalisationError> { + /// Produces a project hash based on project information and the *canonicalized* metadata. + fn checksum_canonical_hex(&self) -> Result, CanonicalizationError> { let info = self .get_info() - .map_err(CanonicalisationError::ProjectRead)?; + .map_err(CanonicalizationError::ProjectRead)?; let meta = self.canonical_meta()?; Ok(info @@ -271,7 +282,7 @@ impl ProjectRead for &T { (*self).read_source(path) } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { (*self).sources() } @@ -307,15 +318,15 @@ impl ProjectRead for &T { fn canonical_meta( &self, - ) -> Result, CanonicalisationError> { + ) -> Result, CanonicalizationError> { (*self).canonical_meta() } - fn checksum_noncanonical_hex(&self) -> Result, Self::Error> { - (*self).checksum_noncanonical_hex() + fn checksum_non_canonical_hex(&self) -> Result, Self::Error> { + (*self).checksum_non_canonical_hex() } - fn checksum_canonical_hex(&self) -> Result, CanonicalisationError> { + fn checksum_canonical_hex(&self) -> Result, CanonicalizationError> { (*self).checksum_canonical_hex() } } @@ -347,7 +358,7 @@ impl ProjectRead for &mut T { (**self).read_source(path) } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { (**self).sources() } @@ -383,15 +394,15 @@ impl ProjectRead for &mut T { fn canonical_meta( &self, - ) -> Result, CanonicalisationError> { + ) -> Result, CanonicalizationError> { (**self).canonical_meta() } - fn checksum_noncanonical_hex(&self) -> Result, Self::Error> { - (**self).checksum_noncanonical_hex() + fn checksum_non_canonical_hex(&self) -> Result, Self::Error> { + (**self).checksum_non_canonical_hex() } - fn checksum_canonical_hex(&self) -> Result, CanonicalisationError> { + fn checksum_canonical_hex(&self) -> Result, CanonicalizationError> { (**self).checksum_canonical_hex() } } @@ -431,7 +442,7 @@ pub trait ProjectReadAsync { /// some typical order of preference. /// /// May be empty if no valid sources are known. - fn sources_async(&self) -> impl Future>; + fn sources_async(&self) -> impl Future>; // Optional and helpers @@ -478,17 +489,17 @@ pub trait ProjectReadAsync { async { Ok(self.get_meta_async().await?.and_then(|meta| meta.checksum)) } } - /// Produces canonicalised project metadata, replacing all source file hashes by SHA256. + /// Produces canonicalized project metadata, replacing all source file hashes by SHA256. fn canonical_meta_async( &self, ) -> impl Future< - Output = Result, CanonicalisationError>, + Output = Result, CanonicalizationError>, > { async move { let Some(mut meta) = self .get_meta_async() .await - .map_err(CanonicalisationError::ProjectRead)? + .map_err(CanonicalizationError::ProjectRead)? else { return Ok(None); }; @@ -502,11 +513,11 @@ pub trait ProjectReadAsync { let mut src = self .read_source_async(&path) .await - .map_err(CanonicalisationError::ProjectRead)?; + .map_err(CanonicalizationError::ProjectRead)?; checksum.value = format!( "{:x}", hash_reader_async(&mut src).await.map_err(|e| { - CanonicalisationError::FileRead(path.to_string().into(), e) + CanonicalizationError::FileRead(path.clone().into(), e) })? ); } else { @@ -529,8 +540,8 @@ pub trait ProjectReadAsync { } } - /// Produces a project hash based on project information and the *non-canonicalised* metadata. - fn checksum_noncanonical_hex_async( + /// Produces a project hash based on project information and the *non-canonicalized* metadata. + fn checksum_non_canonical_hex_async( &self, ) -> impl Future, Self::Error>> { async { @@ -542,15 +553,15 @@ pub trait ProjectReadAsync { } } - /// Produces a project hash based on project information and the *canonicalised* metadata. + /// Produces a project hash based on project information and the *canonicalized* metadata. fn checksum_canonical_hex_async( &self, - ) -> impl Future, CanonicalisationError>> { + ) -> impl Future, CanonicalizationError>> { async { let info = self .get_info_async() .await - .map_err(CanonicalisationError::ProjectRead)?; + .map_err(CanonicalizationError::ProjectRead)?; let meta = self.canonical_meta_async().await?; Ok(info @@ -600,7 +611,7 @@ impl ProjectReadAsync for &T { (**self).read_source_async(path) } - fn sources_async(&self) -> impl Future> { + fn sources_async(&self) -> impl Future> { (**self).sources_async() } @@ -644,20 +655,20 @@ impl ProjectReadAsync for &T { fn canonical_meta_async( &self, ) -> impl Future< - Output = Result, CanonicalisationError>, + Output = Result, CanonicalizationError>, > { (**self).canonical_meta_async() } - fn checksum_noncanonical_hex_async( + fn checksum_non_canonical_hex_async( &self, ) -> impl Future, Self::Error>> { - (**self).checksum_noncanonical_hex_async() + (**self).checksum_non_canonical_hex_async() } fn checksum_canonical_hex_async( &self, - ) -> impl Future, CanonicalisationError>> { + ) -> impl Future, CanonicalizationError>> { (**self).checksum_canonical_hex_async() } } @@ -691,7 +702,7 @@ impl ProjectReadAsync for &mut T { (**self).read_source_async(path) } - fn sources_async(&self) -> impl Future> { + fn sources_async(&self) -> impl Future> { (**self).sources_async() } @@ -735,20 +746,20 @@ impl ProjectReadAsync for &mut T { fn canonical_meta_async( &self, ) -> impl Future< - Output = Result, CanonicalisationError>, + Output = Result, CanonicalizationError>, > { (**self).canonical_meta_async() } - fn checksum_noncanonical_hex_async( + fn checksum_non_canonical_hex_async( &self, ) -> impl Future, Self::Error>> { - (**self).checksum_noncanonical_hex_async() + (**self).checksum_non_canonical_hex_async() } fn checksum_canonical_hex_async( &self, - ) -> impl Future, CanonicalisationError>> { + ) -> impl Future, CanonicalizationError>> { (**self).checksum_canonical_hex_async() } } @@ -818,26 +829,27 @@ pub trait ProjectMut: ProjectRead { compute_checksum: bool, overwrite: bool, ) -> Result<(), ProjectOrIOError> { + let path = path.as_ref(); let mut meta = self .get_meta() .map_err(ProjectOrIOError::Project)? .unwrap_or_else(InterchangeProjectMetadataRaw::generate_blank); { - let mut reader = self.read_source(&path).map_err(ProjectOrIOError::Project)?; + let mut reader = self.read_source(path).map_err(ProjectOrIOError::Project)?; if compute_checksum { let sha256_checksum = hash_reader(&mut reader) - .map_err(|e| FsIoError::ReadFile(path.as_ref().as_str().into(), e))?; + .map_err(|e| FsIoError::ReadFile(path.as_str().into(), e))?; meta.add_checksum( - &path, + path, KerMlChecksumAlg::Sha256, format!("{:x}", sha256_checksum), overwrite, ); } else { - meta.add_checksum(&path, KerMlChecksumAlg::None, "", overwrite); + meta.add_checksum(path, KerMlChecksumAlg::None, "", overwrite); } } @@ -1019,7 +1031,7 @@ where }) } - async fn sources_async(&self) -> Vec { + async fn sources_async(&self) -> Vec { self.inner.sources() } } @@ -1079,7 +1091,7 @@ impl ProjectRead for AsSyncProjectTokio { }) } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { self.runtime.block_on(self.inner.sources_async()) } @@ -1118,10 +1130,10 @@ mod tests { } #[test] - fn test_canonicalisation_no_checksums() -> Result<(), Box> { + fn test_canonicalization_no_checksums() -> Result<(), Box> { let project = InMemoryProject { info: Some(InterchangeProjectInfoRaw { - name: "test_canonicalisation".to_string(), + name: "test_canonicalization".to_string(), description: None, version: "1.2.3".to_string(), license: None, @@ -1172,3 +1184,54 @@ mod tests { Ok(()) } } + +#[cfg(test)] +mod macro_tests { + use crate::project::{ProjectMut, ProjectRead, memory::InMemoryProject}; + + // Have to have these in scope for ProjectRead + // TODO: Find a better solution (that works both inside and outside sysand_core) + use crate::lock::Source; + use crate::model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}; + use typed_path::Utf8UnixPath; + + #[derive(ProjectRead)] + enum NonGenericProjectRead { + Variant(InMemoryProject), + } + + #[test] + fn test_macro_read() { + let _project = NonGenericProjectRead::Variant(InMemoryProject::new()); + } + + #[derive(ProjectRead, ProjectMut)] + enum NonGenericProjectMut { + Variant(InMemoryProject), + } + + #[test] + fn test_macro_mut() { + let _project = NonGenericProjectMut::Variant(InMemoryProject::new()); + } + + #[derive(ProjectRead)] + enum GenericProjectRead { + Variant(SomeProject), + } + + #[test] + fn test_macro_generic_read() { + let _project = GenericProjectRead::::Variant(InMemoryProject::new()); + } + + #[derive(ProjectRead, ProjectMut)] + enum GenericProjectMut { + Variant(SomeProject), + } + + #[test] + fn test_macro_generic_mut() { + let _project = GenericProjectMut::::Variant(InMemoryProject::new()); + } +} diff --git a/core/src/project/null.rs b/core/src/project/null.rs index baefe635..5500c4f9 100644 --- a/core/src/project/null.rs +++ b/core/src/project/null.rs @@ -7,11 +7,15 @@ use std::{ pin::Pin, }; -use crate::project::{ProjectRead, ProjectReadAsync}; - use futures::AsyncRead; use thiserror::Error; +use crate::{ + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ProjectRead, ProjectReadAsync}, +}; + #[derive(Debug)] pub struct NullProject { nothing: Infallible, @@ -50,8 +54,8 @@ impl ProjectRead for NullProject { &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -70,7 +74,7 @@ impl ProjectRead for NullProject { match self.nothing {} } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { match self.nothing {} } } @@ -82,8 +86,8 @@ impl ProjectReadAsync for NullProject { &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -102,7 +106,7 @@ impl ProjectReadAsync for NullProject { match self.nothing {} } - async fn sources_async(&self) -> Vec { + async fn sources_async(&self) -> Vec { match self.nothing {} } } diff --git a/core/src/project/reference.rs b/core/src/project/reference.rs new file mode 100644 index 00000000..ada099d3 --- /dev/null +++ b/core/src/project/reference.rs @@ -0,0 +1,76 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::sync::Arc; + +use crate::{ + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::ProjectRead, +}; + +// Arc wrapper around project to make cloning possible +// (necessary for compatibility with `MemoryResolver`) +#[derive(Debug)] +pub struct ProjectReference { + project: Arc, +} + +impl ProjectReference { + pub fn new(project: Project) -> Self { + Self { + project: Arc::new(project), + } + } +} + +impl Clone for ProjectReference { + fn clone(&self) -> Self { + Self { + project: self.project.clone(), + } + } +} + +impl ProjectRead for ProjectReference { + type Error = Project::Error; + + fn get_project( + &self, + ) -> Result< + ( + Option, + Option, + ), + Self::Error, + > { + self.project.get_project() + } + + type SourceReader<'a> + = Project::SourceReader<'a> + where + Self: 'a; + + fn read_source>( + &self, + path: P, + ) -> Result, Self::Error> { + self.project.read_source(path) + } + + fn sources(&self) -> Vec { + self.project.sources() + } +} + +#[cfg(feature = "filesystem")] +#[cfg(test)] +mod test { + use crate::project::{local_kpar::LocalKParProject, reference::ProjectReference}; + #[test] + fn test_kpar() { + let kpar = ProjectReference::new(LocalKParProject::new("path", "root").unwrap()); + let _clone = kpar.clone(); + } +} diff --git a/core/src/project/reqwest_kpar_download.rs b/core/src/project/reqwest_kpar_download.rs index ecf75c3d..baa2a48e 100644 --- a/core/src/project/reqwest_kpar_download.rs +++ b/core/src/project/reqwest_kpar_download.rs @@ -8,13 +8,14 @@ use std::{ sync::Arc, }; -use camino_tempfile::tempdir; use futures::AsyncRead; use reqwest_middleware::{ClientWithMiddleware, RequestBuilder}; use thiserror::Error; use crate::{ auth::HTTPAuthentication, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::{ ProjectRead, ProjectReadAsync, local_kpar::{LocalKParError, LocalKParProject}, @@ -69,20 +70,10 @@ impl ReqwestKparDownloadedProject { client: reqwest_middleware::ClientWithMiddleware, auth_policy: Arc, ) -> Result { - let tmp_dir = tempdir().map_err(FsIoError::MkTempDir)?; - Ok(ReqwestKparDownloadedProject { url: reqwest::Url::parse(url.as_ref()) .map_err(|e| ReqwestKparDownloadedError::ParseUrl(url.as_ref().into(), e))?, - inner: LocalKParProject { - archive_path: { - let mut p = wrapfs::canonicalize(tmp_dir.path())?; - p.push("project.kpar"); - p - }, - tmp_dir, - root: None, - }, + inner: LocalKParProject::new_temporary()?, client, auth_policy, }) @@ -154,8 +145,8 @@ impl ProjectReadAsync for ReqwestKparDownloadedProje &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -180,8 +171,8 @@ impl ProjectReadAsync for ReqwestKparDownloadedProje }) } - async fn sources_async(&self) -> Vec { - vec![crate::lock::Source::RemoteKpar { + async fn sources_async(&self) -> Vec { + vec![Source::RemoteKpar { remote_kpar: self.url.to_string(), remote_kpar_size: self.inner.file_size().ok(), }] diff --git a/core/src/project/reqwest_src.rs b/core/src/project/reqwest_src.rs index da000fef..c8cef939 100644 --- a/core/src/project/reqwest_src.rs +++ b/core/src/project/reqwest_src.rs @@ -14,6 +14,7 @@ use typed_path::Utf8UnixPath; use crate::{ auth::HTTPAuthentication, + lock::Source, model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::ProjectReadAsync, }; @@ -207,8 +208,8 @@ impl ProjectReadAsync for ReqwestSrcProjectAsync Vec { - vec![crate::lock::Source::RemoteSrc { + async fn sources_async(&self) -> Vec { + vec![Source::RemoteSrc { remote_src: self.url.to_string(), }] } diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index 346249e3..d411014d 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use camino::{Utf8Path, Utf8PathBuf}; +use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; use thiserror::Error; #[cfg(feature = "filesystem")] use zip::{self, result::ZipError}; @@ -85,6 +85,10 @@ pub enum FsIoError { RmDir(Utf8PathBuf, io::Error), #[error("failed to get path to current directory:\n {0}")] CurrentDir(io::Error), + #[error("failed to get metadata to determine if\n `{0}` is a regular file:\n {1}")] + IsFile(Utf8PathBuf, io::Error), + #[error("failed to get metadata to determine if\n `{0}` is a directory:\n {1}")] + IsDir(Utf8PathBuf, io::Error), } /// Wrappers for filesystem I/O functions to return `FsIoError`. @@ -93,6 +97,7 @@ pub mod wrapfs { use std::fs; use std::io; + use std::io::ErrorKind; use camino::Utf8Path; use camino::Utf8PathBuf; @@ -203,6 +208,54 @@ pub mod wrapfs { }) .map_err(|e| Box::new(FsIoError::CurrentDir(e))) } + + /// Returns `true` if the given path exists and is a regular file. + /// + /// This function attempts to retrieve the metadata for `path` and checks + /// whether it represents a regular file. + /// + /// # Returns + /// + /// - `Ok(true)` if the path exists and is a regular file. + /// - `Ok(false)` if the path does not exist or it is not a regular file. + /// - `Err(_)` if an I/O error occurs while retrieving metadata for reasons + /// other than the path not being found (e.g., permission denied). + /// + /// # Errors + /// + /// Returns an [`FsIoError`] if metadata retrieval fails for any reason + /// other than [`std::io::ErrorKind::NotFound`]. + pub fn is_file>(path: P) -> Result> { + match fs::metadata(path.as_ref()) { + Ok(metadata) => Ok(metadata.is_file()), + Err(err) if err.kind() == ErrorKind::NotFound => Ok(false), + Err(err) => Err(Box::new(FsIoError::IsFile(path.as_ref().into(), err))), + } + } + + /// Returns `true` if the given path exists and is a directory. + /// + /// This function attempts to retrieve the metadata for `path` and checks + /// whether it represents a directory. + /// + /// # Returns + /// + /// - `Ok(true)` if the path exists and is a directory. + /// - `Ok(false)` if the path does not exist or it is not a directory. + /// - `Err(_)` if an I/O error occurs while retrieving metadata for reasons + /// other than the path not being found (e.g., permission denied). + /// + /// # Errors + /// + /// Returns an [`FsIoError`] if metadata retrieval fails for any reason + /// other than [`std::io::ErrorKind::NotFound`]. + pub fn is_dir>(path: P) -> Result> { + match fs::metadata(path.as_ref()) { + Ok(metadata) => Ok(metadata.is_dir()), + Err(err) if err.kind() == ErrorKind::NotFound => Ok(false), + Err(err) => Err(Box::new(FsIoError::IsFile(path.as_ref().into(), err))), + } + } } #[derive(Debug, Error)] @@ -250,3 +303,351 @@ pub enum ZipArchiveError { #[error("failed to finish creating zip archive at `{0}`: {1}")] Finish(Box, ZipError), } + +#[derive(Debug, Error)] +pub enum RelativizePathError { + #[error("path `{0}` is not absolute")] + RelativePath(Utf8PathBuf), + #[error("root `{0}` is not absolute")] + RelativeRoot(Utf8PathBuf), + #[error("path `{0}` contains invalid components (`.` or `..`)")] + NonCanonicalPath(Utf8PathBuf), + #[error("root `{0}` contains invalid components (`.` or `..`)")] + NonCanonicalRoot(Utf8PathBuf), + #[error("unable to relativize path `{path}` with respect to `{root}`")] + NoCommonPrefix { + path: Utf8PathBuf, + root: Utf8PathBuf, + }, +} + +// Note: `components()` ignores non-leading `CurDir` so paths like `/a/./b` +// will not register as non-canonical. +fn contains_non_canonical_components(path: &Utf8Path) -> bool { + path.components() + .any(|c| matches!(c, Utf8Component::CurDir | Utf8Component::ParentDir)) +} + +/// Computes the relative path from `root` to `path`. +/// +/// Both `path` and `root` must be absolute and structurally canonical: +/// +/// - They must be absolute paths. +/// - They must not contain `.` (`CurDir`) components. +/// - They must not contain `..` (`ParentDir`) components. +/// - They must share the same path prefix (e.g., drive letter on Windows). +/// +/// This function performs purely syntactic path manipulation. It does **not** +/// access the filesystem and does not resolve symlinks. Callers are expected +/// to pass paths that have been canonicalized beforehand (e.g., via +/// [`wrapfs::canonicalize`] or equivalent). +/// +/// # Returns +/// +/// - `Ok(relative_path)` if a relative path from `root` to `path` can be computed. +/// - `Err(RelativizePathError)` if: +/// - Either input path is relative. +/// - Either input contains `.` or `..` components. +/// - The paths do not share a common prefix. +/// +/// If `path` and `root` are identical, a `Utf8PathBuf` with a single `.` component +/// is returned. +/// +/// # Examples +/// +/// ```rust +/// # use camino::Utf8Path; +/// # use sysand_core::project::utils::relativize_path; +/// let path = if cfg!(windows) { +/// Utf8Path::new(r"C:\a\b\c") +/// } else { +/// Utf8Path::new("/a/b/c") +/// }; +/// let root = if cfg!(windows) { +/// Utf8Path::new(r"C:\a\b") +/// } else { +/// Utf8Path::new("/a/b") +/// }; +/// +/// let relative = relativize_path(path, root).unwrap(); +/// assert_eq!(relative, "c"); +/// ``` +/// +/// ```rust +/// # use camino::Utf8Path; +/// # use sysand_core::project::utils::relativize_path; +/// let path = if cfg!(windows) { +/// Utf8Path::new(r"C:\a\b") +/// } else { +/// Utf8Path::new("/a/b") +/// }; +/// let root = if cfg!(windows) { +/// Utf8Path::new(r"C:\a\b\c") +/// } else { +/// Utf8Path::new("/a/b/c") +/// }; +/// +/// let relative = relativize_path(path, root).unwrap(); +/// assert_eq!(relative, ".."); +/// ``` +pub fn relativize_path, R: AsRef>( + path: P, + root: R, +) -> Result { + let path = path.as_ref(); + let root = root.as_ref(); + + if path.is_relative() { + return Err(RelativizePathError::RelativePath(path.to_path_buf())); + } + if root.is_relative() { + return Err(RelativizePathError::RelativeRoot(root.to_path_buf())); + } + + if contains_non_canonical_components(path) { + return Err(RelativizePathError::NonCanonicalPath(path.to_path_buf())); + } + + if contains_non_canonical_components(root) { + return Err(RelativizePathError::NonCanonicalRoot(root.to_path_buf())); + } + + let mut path_iter = path.components().peekable(); + let mut root_iter = root.components().peekable(); + + // Both paths are absolute, so prefixes can only differ on Windows in the cases when: + // - paths point to different drives + // - path kinds differ (regular vs UNC vs DOS device paths) + // Failure should not be possible on Unix-like systems, since absolute + // paths all start with the common prefix `/`. + match (path_iter.peek(), root_iter.peek()) { + (Some(p0), Some(r0)) if p0 == r0 => { + path_iter.next(); + root_iter.next(); + } + _ => { + return Err(RelativizePathError::NoCommonPrefix { + path: path.to_path_buf(), + root: root.to_path_buf(), + }); + } + } + + while let (Some(p), Some(r)) = (path_iter.peek(), root_iter.peek()) { + if p == r { + path_iter.next(); + root_iter.next(); + } else { + break; + } + } + + let mut result = Utf8PathBuf::new(); + + for r in root_iter { + if let Utf8Component::Normal(_) = r { + result.push(".."); + } + } + + for p in path_iter { + result.push(p.as_str()); + } + + if result.as_str().is_empty() { + result.push("."); + } + + Ok(result) +} + +#[cfg(test)] +mod tests { + use std::error::Error; + + use camino::Utf8Path; + + use crate::project::utils::{RelativizePathError, relativize_path}; + + #[test] + fn simple_relativize_path() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\") + } else { + Utf8Path::new("/") + }; + let relative = if cfg!(windows) { + Utf8Path::new(r"a\b\c") + } else { + Utf8Path::new("a/b/c") + }; + assert_eq!(relativize_path(path, root)?, relative.as_str()); + Ok(()) + } + + #[test] + fn backtracking_relativize_path() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\d\e\f") + } else { + Utf8Path::new("/d/e/f") + }; + let relative = if cfg!(windows) { + Utf8Path::new(r"..\..\..\a\b\c") + } else { + Utf8Path::new("../../../a/b/c") + }; + assert_eq!(relativize_path(path, root)?, relative.as_str()); + Ok(()) + } + + #[test] + fn trivial_relativize_path() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let relative = Utf8Path::new("."); + assert_eq!(relativize_path(path, root)?, relative.as_str()); + Ok(()) + } + + #[test] + fn relativize_path_error_relative_path() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"a\b\c") + } else { + Utf8Path::new("a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let Err(err) = relativize_path(path, root) else { + panic!("`relativize_path` did not return error"); + }; + let RelativizePathError::RelativePath(err_path) = err else { + panic!( + "expected `RelativizePathError::RelativePath`, got:\n{:?}", + err + ); + }; + assert_eq!(err_path, path); + Ok(()) + } + + #[test] + fn relativize_path_error_relative_root() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"a\b\c") + } else { + Utf8Path::new("a/b/c") + }; + let Err(err) = relativize_path(path, root) else { + panic!("`relativize_path` did not return error"); + }; + let RelativizePathError::RelativeRoot(err_root) = err else { + panic!( + "expected `RelativizePathError::RelativeRoot`, got:\n{:?}", + err + ); + }; + assert_eq!(err_root, root); + Ok(()) + } + + #[test] + fn relativize_path_error_non_canonical() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\..\c") + } else { + Utf8Path::new("/a/../c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let Err(err) = relativize_path(path, root) else { + panic!("`relativize_path` did not return error"); + }; + let RelativizePathError::NonCanonicalPath(err_path) = err else { + panic!( + "expected `RelativizePathError::NonCanonicalPath`, got:\n{:?}", + err + ); + }; + assert_eq!(err_path, path); + Ok(()) + } + + #[test] + fn relativize_path_error_non_canonical_root() -> Result<(), Box> { + let path = if cfg!(windows) { + Utf8Path::new(r"C:\a\b\c") + } else { + Utf8Path::new("/a/b/c") + }; + let root = if cfg!(windows) { + Utf8Path::new(r"C:\a\..\c") + } else { + Utf8Path::new("/a/../c") + }; + let Err(err) = relativize_path(path, root) else { + panic!("`relativize_path` did not return error"); + }; + let RelativizePathError::NonCanonicalRoot(err_root) = err else { + panic!( + "expected `RelativizePathError::NonCanonicalRoot`, got:\n{:?}", + err + ); + }; + assert_eq!(err_root, root); + Ok(()) + } + + #[cfg(target_os = "windows")] + #[test] + fn relativize_path_error_non_common_prefix() -> Result<(), Box> { + let path = Utf8Path::new(r"C:\a\b\c"); + let root = Utf8Path::new(r"D:\a\b\c"); + let Err(err) = relativize_path(path, root) else { + panic!("`relativize_path` did not return error"); + }; + let RelativizePathError::NoCommonPrefix { + path: err_path, + root: err_root, + } = err + else { + panic!( + "expected `RelativizePathError::NoCommonPrefix`, got:\n{:?}", + err + ); + }; + assert_eq!(err_path, path); + assert_eq!(err_root, root); + Ok(()) + } +} diff --git a/core/src/resolve/combined.rs b/core/src/resolve/combined.rs index 12dde267..c3e71f69 100644 --- a/core/src/resolve/combined.rs +++ b/core/src/resolve/combined.rs @@ -1,17 +1,18 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use std::{ - io::{self, Read}, - iter::Peekable, -}; +use std::{fmt::Debug, iter::Peekable}; use indexmap::IndexMap; use thiserror::Error; +use typed_path::Utf8UnixPath; use crate::{ - model::{ProjectHash, project_hash_raw}, - project::ProjectRead, + lock::Source, + model::{ + InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, ProjectHash, project_hash_raw, + }, + project::{ProjectRead, cached::CachedProject}, resolve::{ResolutionOutcome, ResolveRead, null::NullResolver}, }; @@ -70,171 +71,23 @@ pub enum CombinedResolverError { - #[error(transparent)] - File(FileError), - #[error(transparent)] - Local(LocalError), - #[error(transparent)] - Remote(RemoteError), - #[error(transparent)] - Registry(RegistryError), -} - /// Outcome of a standard resolution remembers the (resolver) source of the project. /// Can either be taken apart or used directly as a project storage. -#[derive(Debug)] +#[derive(Debug, ProjectRead)] pub enum CombinedProjectStorage< - FileProjectStorage, - LocalProjectStorage, - RemoteProjectStorage, - RegistryProjectStorage, + FileProjectStorage: ProjectRead, + LocalProjectStorage: ProjectRead, + RemoteProjectStorage: ProjectRead, + RegistryProjectStorage: ProjectRead, > { FileProject(FileProjectStorage), RemoteProject(RemoteProjectStorage), RegistryProject(RegistryProjectStorage), - CachedRemoteProject(LocalProjectStorage, RemoteProjectStorage), - CachedRegistryProject(LocalProjectStorage, RegistryProjectStorage), + CachedRemoteProject(CachedProject), + CachedRegistryProject(CachedProject), DanglingLocalProject(LocalProjectStorage), } -pub enum CombinedSourceReader { - FileProject(FileReader), - LocalProject(LocalReader), - RemoteProject(RemoteReader), - RegistryProject(RegistryReader), -} - -impl Read - for CombinedSourceReader -{ - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match self { - CombinedSourceReader::FileProject(reader) => reader.read(buf), - CombinedSourceReader::LocalProject(reader) => reader.read(buf), - CombinedSourceReader::RemoteProject(reader) => reader.read(buf), - CombinedSourceReader::RegistryProject(reader) => reader.read(buf), - } - } -} - -impl< - FileProjectStorage: ProjectRead, - LocalProjectStorage: ProjectRead, - RemoteProjectStorage: ProjectRead, - RegistryProjectStorage: ProjectRead, -> ProjectRead - for CombinedProjectStorage< - FileProjectStorage, - LocalProjectStorage, - RemoteProjectStorage, - RegistryProjectStorage, - > -{ - type Error = CombinedReadError< - FileProjectStorage::Error, - LocalProjectStorage::Error, - RemoteProjectStorage::Error, - RegistryProjectStorage::Error, - >; - - fn get_project( - &self, - ) -> Result< - ( - Option, - Option, - ), - Self::Error, - > { - match self { - CombinedProjectStorage::FileProject(project) => { - project.get_project().map_err(CombinedReadError::File) - } - CombinedProjectStorage::RemoteProject(project) => { - project.get_project().map_err(CombinedReadError::Remote) - } - CombinedProjectStorage::RegistryProject(project) => { - project.get_project().map_err(CombinedReadError::Registry) - } - CombinedProjectStorage::CachedRemoteProject(project, _) => { - project.get_project().map_err(CombinedReadError::Local) - } - CombinedProjectStorage::CachedRegistryProject(project, _) => { - project.get_project().map_err(CombinedReadError::Local) - } - CombinedProjectStorage::DanglingLocalProject(project) => { - project.get_project().map_err(CombinedReadError::Local) - } - } - } - - type SourceReader<'a> - = CombinedSourceReader< - FileProjectStorage::SourceReader<'a>, - LocalProjectStorage::SourceReader<'a>, - RemoteProjectStorage::SourceReader<'a>, - RegistryProjectStorage::SourceReader<'a>, - > - where - Self: 'a; - - fn read_source>( - &self, - path: P, - ) -> Result, Self::Error> { - match self { - CombinedProjectStorage::FileProject(project) => project - .read_source(path) - .map_err(CombinedReadError::File) - .map(CombinedSourceReader::FileProject), - CombinedProjectStorage::RemoteProject(project) => project - .read_source(path) - .map_err(CombinedReadError::Remote) - .map(CombinedSourceReader::RemoteProject), - CombinedProjectStorage::RegistryProject(project) => project - .read_source(path) - .map_err(CombinedReadError::Registry) - .map(CombinedSourceReader::RegistryProject), - CombinedProjectStorage::CachedRemoteProject(project, _) => project - .read_source(path) - .map_err(CombinedReadError::Local) - .map(CombinedSourceReader::LocalProject), - CombinedProjectStorage::CachedRegistryProject(project, _) => project - .read_source(path) - .map_err(CombinedReadError::Local) - .map(CombinedSourceReader::LocalProject), - CombinedProjectStorage::DanglingLocalProject(project) => project - .read_source(path) - .map_err(CombinedReadError::Local) - .map(CombinedSourceReader::LocalProject), - } - } - - fn is_definitely_invalid(&self) -> bool { - match self { - CombinedProjectStorage::FileProject(proj) => proj.is_definitely_invalid(), - CombinedProjectStorage::RemoteProject(proj) => proj.is_definitely_invalid(), - CombinedProjectStorage::RegistryProject(proj) => proj.is_definitely_invalid(), - CombinedProjectStorage::CachedRemoteProject(proj, _) => proj.is_definitely_invalid(), - CombinedProjectStorage::CachedRegistryProject(proj, _) => proj.is_definitely_invalid(), - CombinedProjectStorage::DanglingLocalProject(proj) => proj.is_definitely_invalid(), - } - } - - fn sources(&self) -> Vec { - match self { - CombinedProjectStorage::FileProject(proj) => proj.sources(), - CombinedProjectStorage::RemoteProject(proj) => proj.sources(), - CombinedProjectStorage::RegistryProject(proj) => proj.sources(), - CombinedProjectStorage::CachedRemoteProject(_, proj) => proj.sources(), - CombinedProjectStorage::CachedRegistryProject(_, proj) => proj.sources(), - CombinedProjectStorage::DanglingLocalProject(proj) => proj.sources(), - } - } -} - pub enum CombinedIteratorState< FileResolver: ResolveRead, RemoteResolver: ResolveRead, @@ -307,7 +160,10 @@ impl< }); if let Some(local_project) = cached { - CombinedProjectStorage::CachedRemoteProject(local_project, project) + CombinedProjectStorage::CachedRemoteProject(CachedProject::new( + local_project, + project, + )) } else { CombinedProjectStorage::RemoteProject(project) } @@ -328,7 +184,10 @@ impl< }); if let Some(local_project) = cached { - CombinedProjectStorage::CachedRegistryProject(local_project, project) + CombinedProjectStorage::CachedRegistryProject(CachedProject::new( + local_project, + project, + )) } else { CombinedProjectStorage::RegistryProject(project) } @@ -377,13 +236,12 @@ impl< // use it. // TODO: autodetect git (and possibly other VCSs), and use appropriate (e.g. git) resolver for them. if let Some(file_resolver) = &self.file_resolver { - let mut rejected = vec![]; match file_resolver .resolve_read(uri) .map_err(CombinedResolverError::File)? { ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("File resolver rejects IRI '{}' due to: {}", uri, msg); + log::debug!("file resolver rejects IRI `{uri}` due to: {msg}"); } // Just continue ResolutionOutcome::Resolved(r) => { //at_least_one_supports = true; @@ -393,16 +251,11 @@ impl< })); } ResolutionOutcome::Unresolvable(msg) => { - rejected.push(msg); + return Ok(ResolutionOutcome::Unresolvable(format!( + "failed to resolve as file: {msg}" + ))); } } - - if !rejected.is_empty() { - return Ok(ResolutionOutcome::Unresolvable(format!( - "failed to resolve as file: {:?}", - rejected - ))); - } } // Collect local cached projects @@ -419,9 +272,7 @@ impl< match res { Err(err) => { log::debug!( - "Local resolver rejected project with IRI {} due to: {:?}", - uri, - err + "local resolver rejected project with IRI `{uri}` due to: {err}" ); } Ok(project) => match project.get_project() { @@ -430,15 +281,12 @@ impl< } Ok(_) => { log::debug!( - "Local resolver rejected project with IRI {} due to missing project/info", - uri + "local resolver rejected project with IRI `{uri}` due to missing project/info" ); } Err(err) => { log::debug!( - "Local resolver rejected project with IRI {} due to: {:?}", - uri, - err + "local resolver rejected project with IRI `{uri}` due to: {err}" ); } }, @@ -446,15 +294,11 @@ impl< } } ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("Local resolver rejected IRI {} due to: {}", uri, msg); + log::debug!("local resolver rejected IRI `{uri}` due to: {msg}"); } ResolutionOutcome::Unresolvable(msg) => { at_least_one_supports = true; - log::debug!( - "Local resolver unable to resolve IRI {} due to: {}", - uri, - msg - ); + log::debug!("local resolver unable to resolve IRI `{uri}` due to: {msg}"); } }; } @@ -469,15 +313,11 @@ impl< .map_err(CombinedResolverError::Remote)? { ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("Remote resolver rejects IRI {} due to: {}", uri, msg); + log::debug!("remote resolver rejects IRI `{uri}` due to: {msg}"); } ResolutionOutcome::Unresolvable(msg) => { at_least_one_supports = true; - log::debug!( - "Remote resolver unable to resolve IRI {} due to: {}", - uri, - msg - ); + log::debug!("remote resolver unable to resolve IRI `{uri}` due to: {msg}"); } ResolutionOutcome::Resolved(remote_projects) => { at_least_one_supports = true; @@ -488,9 +328,7 @@ impl< match remote_projects.peek() { Some(Err(err)) => { log::debug!( - "Remote resolver skipping projrect for IRI {} due to: {}", - uri, - err + "remote resolver skipping project for IRI `{uri}` due to: {err}" ); remote_projects.next(); } @@ -512,16 +350,13 @@ impl< } Ok(_) => { log::debug!( - "Remote resolver skipping projrect for IRI {} due to missing info/meta", - uri + "remote resolver skipping project for IRI `{uri}` due to missing info/meta" ); remote_projects.next(); } Err(err) => { log::debug!( - "Remote resolver skipping projrect for IRI {} due to: {:?}", - uri, - err + "remote resolver skipping project for IRI `{uri}` due to: {err}" ); remote_projects.next(); } @@ -529,8 +364,7 @@ impl< } None => { log::debug!( - "Remote resolver unable to find valid project for IRI {}", - uri + "remote resolver unable to find valid project for IRI `{uri}`" ); break; } @@ -553,15 +387,11 @@ impl< })); } ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("Registry resolver rejects IRI {} due to: {}", uri, msg); + log::debug!("registry resolver rejects IRI `{uri}` due to: {msg}"); } ResolutionOutcome::Unresolvable(msg) => { at_least_one_supports = true; - log::debug!( - "Registry resolver unable to resolve IRI {} due to: {}", - uri, - msg - ); + log::debug!("registry resolver unable to resolve IRI `{uri}` due to: {msg}"); } }; } @@ -569,11 +399,11 @@ impl< // As a last resort, use only locally cached projects, if any were found if !at_least_one_supports { Ok(ResolutionOutcome::UnsupportedIRIType( - "No resolver accepted the IRI".to_string(), + "no resolver accepted the IRI".to_owned(), )) } else if locals.is_empty() { Ok(ResolutionOutcome::Unresolvable( - "No resolver was able to resolve the IRI".to_string(), + "no resolver was able to resolve the IRI".to_owned(), )) } else { Ok(ResolutionOutcome::Resolved(CombinedIterator { diff --git a/core/src/resolve/file.rs b/core/src/resolve/file.rs index 12c4752b..35f708d4 100644 --- a/core/src/resolve/file.rs +++ b/core/src/resolve/file.rs @@ -8,7 +8,12 @@ use std::{ path::PathBuf, }; +use camino::{Utf8Path, Utf8PathBuf}; +use fluent_uri::component::Scheme; +use thiserror::Error; + use crate::{ + lock::Source, model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::{ self, ProjectRead, @@ -20,10 +25,6 @@ use crate::{ resolve::{ResolutionOutcome, ResolveRead}, }; -use camino::{Utf8Path, Utf8PathBuf}; -use fluent_uri::component::Scheme; -use thiserror::Error; - /// Resolver for resolving `file://` URIs. #[derive(Debug)] pub struct FileResolver { @@ -259,7 +260,7 @@ impl ProjectRead for FileResolverProject { } } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { match self { FileResolverProject::LocalSrcProject(proj) => proj.sources(), FileResolverProject::LocalKParProject(proj) => proj.sources(), @@ -281,6 +282,7 @@ impl ResolveRead for FileResolver { Ok(match self.resolve_general(uri)? { ResolutionOutcome::Resolved(path) => ResolutionOutcome::Resolved(vec![ Ok(FileResolverProject::LocalSrcProject(LocalSrcProject { + nominal_path: None, project_path: path.clone(), })), Ok(FileResolverProject::LocalKParProject( diff --git a/core/src/resolve/memory.rs b/core/src/resolve/memory.rs index b941c2dd..a3ea2ecb 100644 --- a/core/src/resolve/memory.rs +++ b/core/src/resolve/memory.rs @@ -3,7 +3,7 @@ use std::{collections::HashMap, convert::Infallible}; -use fluent_uri::component::Scheme; +use fluent_uri::{Iri, component::Scheme}; use crate::{ project::ProjectRead, @@ -13,14 +13,41 @@ use crate::{ #[derive(Debug)] pub struct MemoryResolver { pub iri_predicate: Predicate, - pub projects: HashMap, Vec>, + pub projects: HashMap, Vec>, +} + +impl FromIterator<(Iri, Vec)> + for MemoryResolver +{ + fn from_iter, Vec)>>(iter: T) -> Self { + Self { + iri_predicate: AcceptAll {}, + projects: HashMap::from_iter(iter), + } + } +} + +impl From<[(Iri, Vec); N]> + for MemoryResolver +{ + fn from(value: [(Iri, Vec); N]) -> Self { + Self::from_iter(value) + } +} + +impl From, Vec)>> + for MemoryResolver +{ + fn from(value: Vec<(Iri, Vec)>) -> Self { + Self::from_iter(value) + } } pub trait IRIPredicate { - fn accept_iri(&self, iri: &fluent_uri::Iri) -> bool; + fn accept_iri(&self, iri: &Iri) -> bool; fn accept_iri_raw(&self, iri: &str) -> bool { - match fluent_uri::Iri::parse(iri.to_string()) { + match Iri::parse(iri.to_string()) { Ok(iri) => self.accept_iri(&iri), Err(_) => false, } @@ -31,7 +58,7 @@ pub trait IRIPredicate { pub struct AcceptAll {} impl IRIPredicate for AcceptAll { - fn accept_iri(&self, _iri: &fluent_uri::Iri) -> bool { + fn accept_iri(&self, _iri: &Iri) -> bool { true } } @@ -42,7 +69,7 @@ pub struct AcceptScheme<'a> { } impl IRIPredicate for AcceptScheme<'_> { - fn accept_iri(&self, iri: &fluent_uri::Iri) -> bool { + fn accept_iri(&self, iri: &Iri) -> bool { iri.scheme() == self.scheme } } @@ -58,7 +85,7 @@ impl ResolveRead fn resolve_read( &self, - uri: &fluent_uri::Iri, + uri: &Iri, ) -> Result, Self::Error> { if !self.iri_predicate.accept_iri(uri) { return Ok(ResolutionOutcome::UnsupportedIRIType(format!( diff --git a/core/src/resolve/priority.rs b/core/src/resolve/priority.rs index 6a47f6bc..55b59af6 100644 --- a/core/src/resolve/priority.rs +++ b/core/src/resolve/priority.rs @@ -5,7 +5,13 @@ use std::{ use thiserror::Error; -use crate::{env::utils::ErrorBound, project::ProjectRead, resolve::ResolveRead}; +use crate::{ + env::utils::ErrorBound, + lock::Source, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::ProjectRead, + resolve::ResolveRead, +}; /// Resolver that overrides the resolution of some underlying (lower priority) /// resolver by that of another (higher priority) resolver. @@ -100,8 +106,8 @@ impl ProjectRead &self, ) -> Result< ( - Option, - Option, + Option, + Option, ), Self::Error, > { @@ -136,7 +142,7 @@ impl ProjectRead } } - fn sources(&self) -> Vec { + fn sources(&self) -> Vec { match self { PriorityProject::HigherProject(project) => project.sources(), PriorityProject::LowerProject(project) => project.sources(), diff --git a/core/src/resolve/standard.rs b/core/src/resolve/standard.rs index 49419b70..46faff8b 100644 --- a/core/src/resolve/standard.rs +++ b/core/src/resolve/standard.rs @@ -3,6 +3,9 @@ use std::{fmt, result::Result, sync::Arc}; +use camino::Utf8PathBuf; +use reqwest_middleware::ClientWithMiddleware; + use crate::{ auth::HTTPAuthentication, env::{local_directory::LocalDirectoryEnvironment, reqwest_http::HTTPEnvironmentAsync}, @@ -17,8 +20,6 @@ use crate::{ sequential::SequentialResolver, }, }; -use camino::Utf8PathBuf; -use reqwest_middleware::ClientWithMiddleware; pub type LocalEnvResolver = EnvResolver; @@ -32,9 +33,9 @@ type StandardResolverInner = CombinedResolver< AsSyncResolveTokio>, >; -pub struct StandardResolver(StandardResolverInner); +pub struct StandardResolver(StandardResolverInner); -impl fmt::Debug for StandardResolver { +impl fmt::Debug for StandardResolver { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("CliResolver").field(&self.0).finish() } @@ -117,10 +118,10 @@ pub fn standard_resolver( auth_policy: Arc, ) -> StandardResolver { let file_resolver = standard_file_resolver(cwd); + let local_resolver = local_env_path.map(standard_local_resolver); let remote_resolver = client .clone() .map(|x| standard_remote_resolver(x, runtime.clone(), auth_policy.clone())); - let local_resolver = local_env_path.map(standard_local_resolver); let index_resolver = client .zip(index_urls) .map(|(client, urls)| standard_index_resolver(client, urls, runtime, auth_policy)); diff --git a/core/src/solve/pubgrub.rs b/core/src/solve/pubgrub.rs index 35cd46c8..5ad58167 100644 --- a/core/src/solve/pubgrub.rs +++ b/core/src/solve/pubgrub.rs @@ -546,7 +546,7 @@ mod tests { fn simple_resolver_environment( structure: &[(&str, &[InMemoryProject])], - ) -> EnvResolver { + ) -> EnvResolver> { EnvResolver { env: MemoryStorageEnvironment { projects: structure diff --git a/core/src/workspace.rs b/core/src/workspace.rs index be1ae876..d0b6cda5 100644 --- a/core/src/workspace.rs +++ b/core/src/workspace.rs @@ -1,4 +1,4 @@ -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; #[cfg(feature = "python")] use pyo3::{FromPyObject, IntoPyObject}; use serde::{Deserialize, Serialize}; @@ -50,8 +50,8 @@ pub struct Workspace { } impl Workspace { - pub fn root_path(&self) -> Utf8PathBuf { - self.workspace_path.clone() + pub fn root_path(&self) -> &Utf8Path { + &self.workspace_path } pub fn info_path(&self) -> Utf8PathBuf { diff --git a/core/tests/filesystem_env.rs b/core/tests/filesystem_env.rs index 629aa9d0..4d9ad276 100644 --- a/core/tests/filesystem_env.rs +++ b/core/tests/filesystem_env.rs @@ -64,7 +64,6 @@ mod filesystem_tests { if path.is_dir() { assert_eq!(path.strip_prefix(&cwd)?, env_path); } else { - // if path.is_file() assert_eq!(path.strip_prefix(&cwd)?, env_path.join("entries.txt")); } } diff --git a/core/tests/memory_init.rs b/core/tests/memory_init.rs index 27fc0ed4..cb3e0e87 100644 --- a/core/tests/memory_init.rs +++ b/core/tests/memory_init.rs @@ -8,11 +8,7 @@ use sysand_core::{commands::init::do_init, init::do_init_memory, model::Intercha /// and .meta.json files in the current working directory. (Non-interactive use) #[test] fn init_basic() -> Result<(), Box> { - let memory_storage = do_init_memory( - "init_basic".to_string(), - "1.2.3".to_string(), - Some("Apache-2.0".to_string()), - )?; + let memory_storage = do_init_memory("init_basic", "1.2.3", Some("Apache-2.0".to_string()))?; assert_eq!( memory_storage.info.unwrap(), @@ -59,8 +55,8 @@ fn init_basic() -> Result<(), Box> { #[test] fn init_fail_on_double_init() -> Result<(), Box> { let mut memory_storage = do_init_memory( - "init_fail_on_double_init".to_string(), - "1.2.3".to_string(), + "init_fail_on_double_init", + "1.2.3", Some("Apache-2.0 OR MIT".to_string()), )?; diff --git a/core/tests/project_derive.rs b/core/tests/project_derive.rs new file mode 100644 index 00000000..249cac36 --- /dev/null +++ b/core/tests/project_derive.rs @@ -0,0 +1,180 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::{ + collections::HashMap, + io::{Cursor, Read}, +}; + +use sysand_core::{ + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + project::{ProjectMut, ProjectRead, memory::InMemoryProject}, +}; + +// Have to have these in scope for ProjectRead +// TODO: Find a better solution (that works both inside and outside sysand_core) +use sysand_core::lock::Source; +use typed_path::Utf8UnixPath; + +#[derive(ProjectRead)] +enum OneVariantProjectRead { + Variant(InMemoryProject), +} + +#[derive(ProjectRead)] +enum TwoVariantProjectRead { + First(InMemoryProject), + Second(InMemoryProject), +} + +#[derive(ProjectRead, ProjectMut)] +enum OneVariantProjectMut { + Variant(InMemoryProject), +} + +#[test] +fn test_macro_one_variant() { + let _project = OneVariantProjectRead::Variant(InMemoryProject::new()); +} + +#[test] +fn test_macro_two_variants() { + let _project_first = TwoVariantProjectRead::First(InMemoryProject::new()); + let _project_second = TwoVariantProjectRead::Second(InMemoryProject::new()); +} + +#[test] +fn test_error_to_string() { + let error = ::Error::Variant( + ::Error::AlreadyExists("project".to_string()), + ); + let _string = error.to_string(); +} + +#[test] +fn test_macro_get_project() { + let info = InterchangeProjectInfoRaw { + name: "get_project".to_string(), + description: None, + version: "1.2.3".to_string(), + license: None, + maintainer: vec![], + website: None, + topic: vec![], + usage: vec![], + }; + let meta = InterchangeProjectMetadataRaw { + index: indexmap::IndexMap::new(), + created: "0000-00-00T00:00:00.123456789Z".to_string(), + metamodel: None, + includes_derived: None, + includes_implied: None, + checksum: None, + }; + let test_double = OneVariantProjectRead::Variant(InMemoryProject { + info: Some(info.clone()), + meta: Some(meta.clone()), + files: HashMap::new(), + nominal_sources: vec![], + }); + + assert_eq!(test_double.get_project().unwrap(), (Some(info), Some(meta))); +} + +#[test] +fn test_macro_read_source() { + let mut files = HashMap::new(); + let path = "path"; + let file_content = "file content".to_string(); + files.insert(path.into(), file_content.clone()); + let project = OneVariantProjectRead::Variant(InMemoryProject { + info: None, + meta: None, + files, + nominal_sources: vec![], + }); + + let mut buffer = String::new(); + + project + .read_source(path) + .unwrap() + .read_to_string(&mut buffer) + .unwrap(); + + assert_eq!(buffer, file_content); +} + +#[test] +#[should_panic] +fn test_macro_sources() { + let project = OneVariantProjectRead::Variant(InMemoryProject::new()); + + project.sources(); +} + +#[test] +fn test_macro_put_info() { + let info = InterchangeProjectInfoRaw { + name: "single_get_info".to_string(), + description: None, + version: "1.2.3".to_string(), + license: None, + maintainer: vec![], + website: None, + topic: vec![], + usage: vec![], + }; + let mut project = OneVariantProjectMut::Variant(InMemoryProject::new()); + + assert!(project.get_info().unwrap().is_none()); + + project.put_info(&info, false).unwrap(); + + assert_eq!(project.get_info().unwrap().unwrap(), info); +} + +#[test] +fn test_macro_put_meta() { + let meta = InterchangeProjectMetadataRaw { + index: indexmap::IndexMap::new(), + created: "0000-00-00T00:00:00.123456789Z".to_string(), + metamodel: None, + includes_derived: None, + includes_implied: None, + checksum: None, + }; + let mut project = OneVariantProjectMut::Variant(InMemoryProject::new()); + + assert!(project.get_meta().unwrap().is_none()); + + project.put_meta(&meta, false).unwrap(); + + assert_eq!(project.get_meta().unwrap().unwrap(), meta); +} + +#[test] +fn test_macro_write_source() { + let path = "path"; + let file_content = "file content".to_string(); + let mut project = OneVariantProjectMut::Variant(InMemoryProject { + info: None, + meta: None, + files: HashMap::new(), + nominal_sources: vec![], + }); + + project + .write_source(path, &mut Cursor::new(file_content.as_str()), false) + .unwrap(); + + let mut buffer = String::new(); + + project + .read_source(path) + .unwrap() + .read_to_string(&mut buffer) + .unwrap(); + + assert_eq!(buffer, file_content); +} diff --git a/core/tests/project_no_derive.rs b/core/tests/project_no_derive.rs new file mode 100644 index 00000000..ee018b32 --- /dev/null +++ b/core/tests/project_no_derive.rs @@ -0,0 +1,190 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::io::Read; + +use thiserror::Error; + +use sysand_core::project::{ProjectMut, ProjectRead, memory::InMemoryProject}; + +pub enum GenericProject +where + A: ProjectRead, + B: ProjectRead, +{ + Variant1(A), + Variant2(B), + Variant3(Box), +} + +// What comes after there should essentially be what the ProjectRead and ProjectMut macros expand to, +// so in case the macros are not working properly first make sure this here works. + +#[derive(Debug, Error)] +pub enum GenericProjectError { + #[error(transparent)] + Variant1(Variant1), + #[error(transparent)] + Variant2(Variant2), + #[error(transparent)] + Variant3(Variant3), +} + +pub enum GenericProjectSourceReader { + Variant1(Variant1), + Variant2(Variant2), + Variant3(Variant3), +} + +impl Read + for GenericProjectSourceReader +{ + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match self { + GenericProjectSourceReader::Variant1(reader) => reader.read(buf), + GenericProjectSourceReader::Variant2(reader) => reader.read(buf), + GenericProjectSourceReader::Variant3(reader) => reader.read(buf), + } + } +} + +impl ProjectRead for GenericProject +where + A: ProjectRead, + B: ProjectRead, +{ + type Error = GenericProjectError< + ::Error, + ::Error, + ::Error, + >; + + fn get_project( + &self, + ) -> Result< + ( + Option, + Option, + ), + Self::Error, + > { + match self { + GenericProject::Variant1(project) => { + project.get_project().map_err(GenericProjectError::Variant1) + } + GenericProject::Variant2(project) => { + project.get_project().map_err(GenericProjectError::Variant2) + } + GenericProject::Variant3(project) => { + project.get_project().map_err(GenericProjectError::Variant3) + } + } + } + + type SourceReader<'a> + = GenericProjectSourceReader< + ::SourceReader<'a>, + ::SourceReader<'a>, + ::SourceReader<'a>, + > + where + Self: 'a; + + fn read_source>( + &self, + path: P, + ) -> Result, Self::Error> { + match self { + GenericProject::Variant1(project) => project + .read_source(path) + .map(GenericProjectSourceReader::Variant1) + .map_err(GenericProjectError::Variant1), + GenericProject::Variant2(project) => project + .read_source(path) + .map(GenericProjectSourceReader::Variant2) + .map_err(GenericProjectError::Variant2), + GenericProject::Variant3(project) => project + .read_source(path) + .map(GenericProjectSourceReader::Variant3) + .map_err(GenericProjectError::Variant3), + } + } + + fn sources(&self) -> Vec { + match self { + GenericProject::Variant1(project) => project.sources(), + GenericProject::Variant2(project) => project.sources(), + GenericProject::Variant3(project) => project.sources(), + } + } +} + +impl ProjectMut for GenericProject +where + A: ProjectMut, + B: ProjectMut, +{ + fn put_info( + &mut self, + info: &sysand_core::model::InterchangeProjectInfoRaw, + overwrite: bool, + ) -> Result<(), Self::Error> { + match self { + GenericProject::Variant1(project) => project + .put_info(info, overwrite) + .map_err(GenericProjectError::Variant1), + GenericProject::Variant2(project) => project + .put_info(info, overwrite) + .map_err(GenericProjectError::Variant2), + GenericProject::Variant3(project) => project + .put_info(info, overwrite) + .map_err(GenericProjectError::Variant3), + } + } + fn put_meta( + &mut self, + meta: &sysand_core::model::InterchangeProjectMetadataRaw, + overwrite: bool, + ) -> Result<(), Self::Error> { + match self { + GenericProject::Variant1(project) => project + .put_meta(meta, overwrite) + .map_err(GenericProjectError::Variant1), + GenericProject::Variant2(project) => project + .put_meta(meta, overwrite) + .map_err(GenericProjectError::Variant2), + GenericProject::Variant3(project) => project + .put_meta(meta, overwrite) + .map_err(GenericProjectError::Variant3), + } + } + fn write_source, R: Read>( + &mut self, + path: P, + source: &mut R, + overwrite: bool, + ) -> Result<(), Self::Error> { + match self { + GenericProject::Variant1(project) => project + .write_source(path, source, overwrite) + .map_err(GenericProjectError::Variant1), + GenericProject::Variant2(project) => project + .write_source(path, source, overwrite) + .map_err(GenericProjectError::Variant2), + GenericProject::Variant3(project) => project + .write_source(path, source, overwrite) + .map_err(GenericProjectError::Variant3), + } + } +} + +#[test] +fn test_basic() { + let _project1 = + GenericProject::::Variant1(InMemoryProject::new()); + let _project2 = + GenericProject::::Variant2(InMemoryProject::new()); + let _project3 = GenericProject::::Variant3(Box::new( + InMemoryProject::new(), + )); +} diff --git a/docs/src/commands/add.md b/docs/src/commands/add.md index b69cc9f2..bbacef84 100644 --- a/docs/src/commands/add.md +++ b/docs/src/commands/add.md @@ -14,6 +14,20 @@ Adds IRI and optional version constraint to list of usages in the project information file `.project.json`. By default this will also update the lockfile and sync the local environment (creating one if not already present). +When adding a usage with one of the `--from-*` or `--as-*` flags the +configuration file will be automatically updated with a project source +override as described in [Dependencies](../config/dependencies.md). If one of +the `--from-*` flags are used, Sysand will attempt to guess the type of +project source, while the `--as-*` flags let you specify the type explicitly. +Sysand cannot determine if a project is to be editable, so for that you need to +specify the path with the `--as-editable` flag. + +The affected configuration file will either be the one given with +`--config-file` or (if `--no-config` is not present) the `sysand.toml` at the +root of the project. If no configuration file is given and `--no-config` is +set, the usage will be added to the project, but no source will be configured, +so future syncing will not take this into account. + ## Arguments - ``: IRI/URI/URL identifying the project to be used. See @@ -26,6 +40,22 @@ and sync the local environment (creating one if not already present). - `--no-lock`: Do not automatically resolve usages (and generate lockfile) - `--no-sync`: Do not automatically install dependencies +- `--from-path `: Add usage as a local interchange project at PATH and + update configuration file attempting to guess the source from the PATH +- `--from-url `: Add usage as a remote interchange project at URL and + update configuration file attempting to guess the source from the URL +- `--as-editable `: Add usage as an editable interchange project at PATH + and update configuration file with appropriate source +- `--as-local-src `: Add usage as a local interchange project at PATH and + update configuration file with appropriate source +- `--as-local-kpar `: Add usage as a local interchange project archive at + PATH and update configuration file with appropriate source +- `--as-remote-src `: Add usage as a remote interchange project at URL and + update configuration file with appropriate source +- `--as-remote-kpar `: Add usage as a remote interchange project archive at + URL and update configuration file with appropriate source +- `--as-remote-git `: Add usage as a remote git interchange project at URL + and update configuration file with appropriate source {{#include ./partials/resolution_opts.md}} diff --git a/docs/src/commands/remove.md b/docs/src/commands/remove.md index 48afcdbc..f515e569 100644 --- a/docs/src/commands/remove.md +++ b/docs/src/commands/remove.md @@ -2,6 +2,8 @@ Remove usage from project information +Will also remove project source overrides from configuration file if available. + ## Usage ```sh diff --git a/docs/src/config.md b/docs/src/config.md index 0dbc32f1..4f70f67c 100644 --- a/docs/src/config.md +++ b/docs/src/config.md @@ -23,3 +23,4 @@ can be used to specify the path to an additional config file. ## Configurable options - [Indexes](config/indexes.md) +- [Dependencies](config/dependencies.md) diff --git a/docs/src/config/dependencies.md b/docs/src/config/dependencies.md new file mode 100644 index 00000000..b9d68b88 --- /dev/null +++ b/docs/src/config/dependencies.md @@ -0,0 +1,132 @@ +# Dependencies + +Sometimes you may wish to use a project that isn't resolvable through an +available index or you want to override the dependency resolution for other +reasons. Or you may just want to replace a URL usage with a URN usage for +better readability. In any case you can do this by adding the appropriate IRI +and `sources` to a `project` entry in the `sysand.toml` configuration file at +the root of your project. This follows the same structure as found in the +lockfile, where `identifiers` are given as a list of IRIs and `sources` are a +list of sources. A project may have multiple identifiers in case it is referred +to differently by different projects, and multiple sources where the additional +ones after the first serve as backups in case the previous ones fail to +resolve. Note that these should be sources of the exact same project as +determined by its checksum, as otherwise you are likely to run into problems +when syncing against a lockfile. + +Below we describe how to add overriding sources directly to the configuration +file, but it is also possible to do through the command line interface with the +[`sysand add`](../commands/add.md) command by using one of the `--from-*` flags +to have Sysand try to guess a project source from the path/URL, or using one of +the `--as-*` flags if you want to specify exactly which type of project source +you want. + +All paths in project sources are assumed to be relative to the root of your +project. + +## Local projects + +To specify the source of a project that you have locally in a directory +`./path/to/project` by the identifier `urn:kpar:my-project`, is done by adding +the following entry to your `sysand.toml`. + +```toml +[[project]] +identifiers = [ + "urn:kpar:my-project", +] +sources = [ + { src_path = "path/to/project" }, +] +``` + +This source corresponds to the `--as-local-src` flag. + +## Local editable projects + +Normally when you add a project as a usage, Sysand will copy and install it, +so any changes made to the original project afterwards will not affect the +installed project. For local projects you also have the option to add them as +"editable" usages, meaning the project won't be copied and will instead just be +referred to where it is originally located. A local project is specified as +editable in `sysand.toml` by adding + +```toml +[[project]] +identifiers = [ + "urn:kpar:my-project", +] +sources = [ + { editable = "path/to/project" }, +] +``` + +This source corresponds to the `--as-editable` flag. + +## Local KPARs + +If you have a project locally available as a compressed KPAR this can be identified +by `urn:kpar:my-kpar-project` by adding + +```toml +[[project]] +identifiers = [ + "urn:kpar:my-kpar-project", +] +sources = [ + { kpar_path = "path/to/project.kpar" }, +] +``` + +to your `sysand.toml`. This source corresponds to the `--as-local-kpar` flag. + +## Remote projects + +Remote project are those available through URLs, and similar to local projects +the way to specify them as sources depends on which format they come in. + +To specify a KPAR available at a URL as a source, add + +```toml +[[project]] +identifiers = [ + "urn:kpar:remote-kpar-project", +] +sources = [ + { remote_kpar = "https://www.example.com/path/to/project.kpar" }, +] +``` + +to your `sysand.toml`. This source corresponds to the `--as-remote-kpar` flag. + +For projects that are not packaged you can either use + +```toml +[[project]] +identifiers = [ + "urn:kpar:remote-project", +] +sources = [ + { remote_src = "https://www.example.com/path/to/project" }, +] +``` + +or, if the project is hosted on a Git forge like GitHub, GitLab etc. use + +```toml +[[project]] +identifiers = [ + "urn:kpar:remote-git-project", +] +sources = [ + { remote_git = "https://github.com/my_user/project.git" }, +] +``` + +These sources corresponds to the `--as-remote-src` and `--as-remote-git` flags +respectively. + +> [!note] +> Currently there is no way to specify a particular git reference like e.g. a +> branch, tag or commit. +> It is also currently not possible to specify a project path in a repository. diff --git a/macros/Cargo.toml b/macros/Cargo.toml new file mode 100644 index 00000000..738e0007 --- /dev/null +++ b/macros/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "sysand-macros" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +publish.workspace = true +authors.workspace = true +license.workspace = true +description.workspace = true +repository.workspace = true + +[lib] +proc-macro = true + +[dependencies] +itertools = { version = "0.14.0", default-features = false } +proc-macro2 = "1.0.95" +quote = "1.0.40" +syn = "2.0.104" diff --git a/macros/scripts/run_chores.sh b/macros/scripts/run_chores.sh new file mode 100755 index 00000000..934a4b79 --- /dev/null +++ b/macros/scripts/run_chores.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -eu + +# Compute the root directory based on the location of this script. +SCRIPT_DIR=$(dirname "$(realpath "$0")") +PACKAGE_DIR=$(dirname "$SCRIPT_DIR") + +cd "$PACKAGE_DIR" + +cargo fmt +cargo clippy --all-targets -- --deny warnings diff --git a/macros/src/lib.rs b/macros/src/lib.rs new file mode 100644 index 00000000..0fe6d63b --- /dev/null +++ b/macros/src/lib.rs @@ -0,0 +1,416 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use itertools::Itertools; +use proc_macro::TokenStream; +use quote::quote; +use syn::{Data, DataEnum, DeriveInput, parse_macro_input}; + +/// Derives `ProjectRead` for an enum by delegating to its variants and +/// synthesizing unified associated types. +/// +/// This macro implements `ProjectRead` for an enum whose variants each +/// contain a type that already implements `ProjectRead`. The derived +/// implementation delegates all trait methods to the active variant. +/// +/// In addition, the macro generates two new enums to unify associated types +/// across variants: +/// +/// - `Error` +/// - `SourceReader<'a>` +/// +/// These enums contain one variant per original enum variant and wrap the +/// corresponding associated types from each inner `ProjectRead` implementation. +/// +/// # Generated Associated Types +/// +/// The derived implementation defines: +/// +/// - `type Error = Error` +/// - `type SourceReader<'a> = SourceReader<'a>` +/// +/// where: +/// +/// - `Error` is an enum with one variant per original enum variant, +/// wrapping that variant’s `ProjectRead::Error` type. +/// - `SourceReader<'a>` is an enum with one variant per original +/// enum variant, wrapping that variant’s `ProjectRead::SourceReader<'a>` +/// type. +/// +/// This allows each variant to use its own concrete error and reader types, +/// while presenting a single unified `ProjectRead` implementation for the +/// outer enum. +/// +/// # Method Delegation +/// +/// - [`ProjectRead::get_project`] delegates to the active variant, mapping +/// errors into `Error`. +/// - [`ProjectRead::read_source`] delegates to the active variant and wraps +/// the returned reader in `SourceReader<'_>`. +/// - [`ProjectRead::sources`] delegates directly to the active variant. +/// +/// All other methods are handled by the default implementation of the +/// `ProjectRead` trait. +/// +/// All delegation is performed via a `match` on `self`. No dynamic dispatch +/// is introduced. +/// +/// # Requirements +/// +/// - Each variant must contain exactly one value whose type implements +/// `ProjectRead`. +/// - No additional fields are permitted in a variant. +/// - The enum may be generic, provided the generics are compatible with the +/// synthesized associated types. +/// +/// # Design Rationale +/// +/// This derive is useful when modeling multiple project backends behind a +/// single enum type while preserving static dispatch and allowing each +/// backend to retain its own concrete error and reader types. +/// +/// The generated implementation is zero-cost beyond the enum match required +/// for delegation and wrapping. +#[proc_macro_derive(ProjectRead)] +pub fn project_read_derive(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + let Data::Enum(DataEnum { variants, .. }) = &ast.data else { + return syn::Error::new_spanned(&ast.ident, "ProjectRead can only be derived on an enum") + .to_compile_error() + .into(); + }; + + let (impl_generics, type_generics, where_clause) = ast.generics.split_for_impl(); + let enum_ident = &ast.ident; + let error_ident = syn::Ident::new(format!("{}Error", enum_ident).as_str(), enum_ident.span()); + let source_reader_ident = syn::Ident::new( + format!("{}SourceReader", enum_ident).as_str(), + enum_ident.span(), + ); + + let variant_parts: Result, _> = variants + .iter() + .map(|variant| { + let variant_ident = variant.ident.clone(); + let variant_type = match &variant.fields { + syn::Fields::Unnamed(fields) if fields.unnamed.len() != 1 => { + return Err(syn::Error::new_spanned( + &variant.ident, + "each variant must contain exactly one field", + )); + } + syn::Fields::Unnamed(fields) => fields.unnamed.first().unwrap().ty.clone(), + _ => { + return Err(syn::Error::new_spanned( + &variant.ident, + "only tuple variants supported", + )); + } + }; + Ok(( + // variant_list + quote! { + #variant_ident + }, + // error_variants + quote! { + #[error(transparent)] + #variant_ident(#variant_ident) + }, + // error_args + quote! { + <#variant_type as ProjectRead>::Error + }, + // source_reader_variants + quote! { + #variant_ident(#variant_ident) + }, + // variants_read + quote! { + #variant_ident: ::std::io::Read + }, + // source_reader_match + quote! { + #source_reader_ident::#variant_ident(reader) => reader.read(buf) + }, + // source_reader_args + quote! { + <#variant_type as ProjectRead>::SourceReader<'a> + }, + // get_project_match + quote! { + #enum_ident::#variant_ident(project) => project + .get_project() + .map_err(#error_ident::#variant_ident) + }, + // read_source_match + quote! { + #enum_ident::#variant_ident(project) => project + .read_source(path) + .map(#source_reader_ident::#variant_ident) + .map_err(#error_ident::#variant_ident) + }, + // sources_match + quote! { + #enum_ident::#variant_ident(project) => project.sources() + }, + )) + }) + .collect(); + + let variant_parts = match variant_parts { + Ok(var) => var, + Err(err) => { + return err.to_compile_error().into(); + } + }; + + let ( + variant_list, + error_variants, + error_args, + source_reader_variants, + variants_read, + source_reader_match, + source_reader_args, + get_project_match, + read_source_match, + sources_match, + ): ( + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + Vec<_>, + ) = variant_parts.iter().cloned().multiunzip(); + + let expanded = quote! { + #[derive(::std::fmt::Debug, ::thiserror::Error)] + pub enum #error_ident< + #( #variant_list ),* + > { + #( #error_variants ),* + } + + pub enum #source_reader_ident< + #( #variant_list ),* + > { + #( #source_reader_variants ),* + } + + impl< + #( #variants_read ),* + > ::std::io::Read + for #source_reader_ident< + #( #variant_list ),* + > { + fn read(&mut self, buf: &mut [u8]) -> ::std::io::Result { + match self { + #( #source_reader_match ),* + } + } + } + + impl #impl_generics ProjectRead for #enum_ident #type_generics #where_clause { + type Error = #error_ident< + #( #error_args ),* + >; + + fn get_project( + &self, + ) -> ::std::result::Result< + ( + ::std::option::Option, + ::std::option::Option, + ), + Self::Error, + > { + match self { + #( #get_project_match ),* + } + } + + type SourceReader<'a> + = #source_reader_ident< + #( #source_reader_args ),* + > + where + Self: 'a; + + fn read_source>( + &self, + path: P, + ) -> ::std::result::Result, Self::Error> { + match self { + #( #read_source_match ),* + } + } + + fn sources(&self) -> ::std::vec::Vec { + match &self { + #( #sources_match ),* + } + } + } + }; + + TokenStream::from(expanded) +} + +/// Derives `ProjectMut` for an enum by delegating to its variants. +/// +/// This macro implements `ProjectMut` for an enum whose variants each +/// contain a type that already implements `ProjectMut`. All trait methods +/// are delegated to the active variant. +/// +/// Because `ProjectMut` extends `ProjectRead`, this derive requires +/// that the enum also implement `ProjectRead`. In typical usage, this is +/// provided by the corresponding [`ProjectRead`] derive macro. +/// +/// # Associated Types +/// +/// This derive does **not** introduce new associated types. +/// +/// Instead, it reuses the `Error` type defined by the enum’s +/// `ProjectRead` implementation (typically the synthesized +/// `Error` type generated by the [`ProjectRead`] derive). +/// +/// All errors produced by delegated methods are forwarded unchanged. +/// +/// # Method Delegation +/// +/// - [`ProjectMut::put_info`] delegates to the active variant. +/// - [`ProjectMut::put_meta`] delegates to the active variant. +/// - [`ProjectMut::write_source`] delegates to the active variant. +/// +/// All other methods are handled by the default implementation of the +/// `ProjectMut` trait. +/// +/// Delegation is implemented via a `match` on `self`. No dynamic dispatch +/// is introduced. +/// +/// # Requirements +/// +/// - Each variant must contain exactly one value whose type implements +/// `ProjectMut`. +/// - The enum must also implement `ProjectRead` (typically via the +/// corresponding derive macro). +/// - No additional fields are permitted in a variant. +/// +/// # Design Rationale +/// +/// This derive enables modeling multiple mutable project backends behind +/// a single enum while preserving static dispatch and maintaining a unified +/// error type. +/// +/// The generated implementation is zero-cost beyond the enum match required +/// for delegation. +#[proc_macro_derive(ProjectMut)] +pub fn project_mut_derive(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + let Data::Enum(DataEnum { variants, .. }) = &ast.data else { + return syn::Error::new_spanned(&ast.ident, "ProjectMut can only be derived on an enum") + .to_compile_error() + .into(); + }; + + let (impl_generics, type_generics, where_clause) = ast.generics.split_for_impl(); + let enum_ident = &ast.ident; + let error_ident = syn::Ident::new(format!("{}Error", enum_ident).as_str(), enum_ident.span()); + + let variant_parts: Result, _> = variants + .iter() + .map(|variant| { + let variant_ident = variant.ident.clone(); + match &variant.fields { + syn::Fields::Unnamed(fields) if fields.unnamed.len() != 1 => { + return Err(syn::Error::new_spanned( + &variant.ident, + "each variant must contain exactly one field", + )); + } + syn::Fields::Unnamed(_) => {} + _ => { + return Err(syn::Error::new_spanned( + &variant.ident, + "only tuple variants supported", + )); + } + }; + Ok(( + // put_info_match + quote! { + #enum_ident::#variant_ident(project) => project + .put_info(info, overwrite) + .map_err(#error_ident::#variant_ident) + }, + // put_meta_match + quote! { + #enum_ident::#variant_ident(project) => project + .put_meta(meta, overwrite) + .map_err(#error_ident::#variant_ident) + }, + // write_source_match + quote! { + #enum_ident::#variant_ident(project) => project + .write_source(path, source, overwrite) + .map_err(#error_ident::#variant_ident) + }, + )) + }) + .collect(); + + let variant_parts = match variant_parts { + Ok(var) => var, + Err(err) => { + return err.to_compile_error().into(); + } + }; + + let (put_info_match, put_meta_match, write_source_match): (Vec<_>, Vec<_>, Vec<_>) = + variant_parts.iter().cloned().multiunzip(); + + let expanded = quote! { + impl #impl_generics ProjectMut for #enum_ident #type_generics #where_clause { + fn put_info( + &mut self, + info: &InterchangeProjectInfoRaw, + overwrite: bool, + ) -> ::std::result::Result<(), Self::Error> { + match self { + #( #put_info_match ),* + } + } + + fn put_meta( + &mut self, + meta: &InterchangeProjectMetadataRaw, + overwrite: bool, + ) -> ::std::result::Result<(), Self::Error> { + match self { + #( #put_meta_match ),* + } + } + + fn write_source, R: ::std::io::Read>( + &mut self, + path: P, + source: &mut R, + overwrite: bool, + ) -> ::std::result::Result<(), Self::Error> { + match self { + #( #write_source_match ),* + } + } + } + }; + + TokenStream::from(expanded) +} diff --git a/scripts/run_chores.sh b/scripts/run_chores.sh index d2859aa9..86e61c60 100755 --- a/scripts/run_chores.sh +++ b/scripts/run_chores.sh @@ -8,6 +8,8 @@ ROOT_DIR=$(dirname "$SCRIPT_DIR") "$ROOT_DIR"/core/scripts/run_chores.sh +"$ROOT_DIR"/macros/scripts/run_chores.sh + "$ROOT_DIR"/sysand/scripts/run_chores.sh "$ROOT_DIR"/bindings/py/scripts/run_chores.sh diff --git a/sysand/src/cli.rs b/sysand/src/cli.rs index 77993ee3..ccb96c2c 100644 --- a/sysand/src/cli.rs +++ b/sysand/src/cli.rs @@ -9,6 +9,7 @@ use std::{ use camino::Utf8PathBuf; use clap::{ValueEnum, builder::StyledStr, crate_authors}; +use fluent_uri::Iri; use semver::VersionReq; use crate::env_vars; @@ -96,6 +97,8 @@ pub enum Command { #[command(flatten)] resolution_opts: ResolutionOptions, + #[command(flatten)] + source_opts: Box, }, /// Remove usage from project information #[clap(alias = "rm")] @@ -1270,6 +1273,44 @@ pub struct ResolutionOptions { pub include_std: bool, } +#[derive(clap::Args, Debug, Clone)] +pub struct ProjectSourceOptions { + /// Add usage as a local interchange project at PATH and + /// update configuration file attempting to guess the + /// source from the PATH + #[arg(long, value_name = "PATH", group = "source")] + pub from_path: Option, + /// Add usage as a remote interchange project at URL and + /// update configuration file attempting to guess the + /// source from the URL + #[arg(long, value_name = "URL", group = "source")] + pub from_url: Option>, + /// Add usage as an editable interchange project at PATH and + /// update configuration file with appropriate source + #[arg(long, value_name = "PATH", group = "source")] + pub as_editable: Option, + /// Add usage as a local interchange project at PATH and + /// update configuration file with appropriate source + #[arg(long, value_name = "PATH", group = "source")] + pub as_local_src: Option, + /// Add usage as a local interchange project archive at PATH + /// and update configuration file with appropriate source + #[arg(long, value_name = "PATH", group = "source")] + pub as_local_kpar: Option, + /// Add usage as a remote interchange project at URL and + /// update configuration file with appropriate source + #[arg(long, value_name = "URL", group = "source")] + pub as_remote_src: Option>, + /// Add usage as a remote interchange project archive at URL + /// and update configuration file with appropriate source + #[arg(long, value_name = "URL", group = "source")] + pub as_remote_kpar: Option>, + /// Add usage as a remote git interchange project at URL and + /// update configuration file with appropriate source + #[arg(long, value_name = "URL", group = "source")] + pub as_remote_git: Option>, +} + #[derive(clap::Args, Debug, Clone)] pub struct SourcesOptions { /// Do not include sources for dependencies @@ -1311,12 +1352,6 @@ pub struct GlobalOptions { pub help: Option, } -impl GlobalOptions { - pub fn sets_log_level(&self) -> bool { - self.verbose || self.quiet - } -} - /// Parse an IRI. Tolerates missing IRI scheme, uses /// `https://` scheme in that case. fn parse_https_iri(s: &str) -> Result, fluent_uri::ParseError> { diff --git a/sysand/src/commands/add.rs b/sysand/src/commands/add.rs index 35564386..dbce297f 100644 --- a/sysand/src/commands/add.rs +++ b/sysand/src/commands/add.rs @@ -3,17 +3,30 @@ use std::{collections::HashMap, str::FromStr, sync::Arc}; -use anyhow::Result; +use anyhow::{Result, bail}; +use camino::{Utf8Path, Utf8PathBuf}; use sysand_core::{ add::do_add, auth::HTTPAuthentication, - config::Config, + config::{ + Config, ConfigProject, + local_fs::{CONFIG_FILE, add_project_source_to_config}, + }, lock::Lock, - project::{local_src::LocalSrcProject, utils::wrapfs}, + project::{ + ProjectRead, + local_src::LocalSrcProject, + utils::{relativize_path, wrapfs}, + }, + resolve::{ResolutionOutcome, ResolveRead, standard::standard_resolver}, }; -use crate::{CliError, cli::ResolutionOptions, command_sync}; +use crate::{ + CliError, DEFAULT_INDEX_URL, + cli::{ProjectSourceOptions, ResolutionOptions}, + command_sync, +}; // TODO: Collect common arguments #[allow(clippy::too_many_arguments)] @@ -23,18 +36,130 @@ pub fn command_add, Policy: HTTPAuthentication>( no_lock: bool, no_sync: bool, resolution_opts: ResolutionOptions, - config: &Config, + source_opts: Box, + mut config: Config, + config_file: Option, + no_config: bool, current_project: Option, client: reqwest_middleware::ClientWithMiddleware, runtime: Arc, auth_policy: Arc, ) -> Result<()> { + let iri = iri.as_ref(); let mut current_project = current_project.ok_or(CliError::MissingProjectCurrentDir)?; - let project_root = current_project.root_path(); + let project_root = current_project.root_path().to_owned(); + + #[allow(clippy::manual_map)] // For readability and compactness + let source = if let Some(path) = source_opts.from_path { + let metadata = wrapfs::metadata(&path)?; + if metadata.is_dir() { + Some(sysand_core::lock::Source::LocalSrc { + src_path: get_relative(path, &project_root)?.as_str().into(), + }) + } else if metadata.is_file() { + Some(sysand_core::lock::Source::LocalKpar { + kpar_path: get_relative(path, &project_root)?.as_str().into(), + }) + } else { + bail!("path `{path}` is neither a directory nor a file"); + } + } else if let Some(url) = source_opts.from_url { + let ResolutionOptions { + index, + default_index, + no_index, + include_std: _, + } = resolution_opts.clone(); + + let index_urls = if no_index { + None + } else { + Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) + }; + let std_resolver = standard_resolver( + None, + None, + Some(client.clone()), + index_urls, + runtime.clone(), + auth_policy.clone(), + ); + let outcome = std_resolver.resolve_read_raw(&url)?; + let mut source = None; + match outcome { + ResolutionOutcome::Resolved(alternatives) => { + for candidate in alternatives { + match candidate { + Ok(project) => { + source = project.sources().first().cloned(); + if source.is_some() { + break; + } + } + Err(err) => { + log::debug!("skipping candidate project: {err}"); + } + } + } + } + ResolutionOutcome::UnsupportedIRIType(e) => bail!("unsupported URL `{url}`:\n{e}"), + ResolutionOutcome::Unresolvable(e) => { + bail!("failed to resolve URL `{url}`:\n{e}") + } + } + if source.is_none() { + bail!("unable to find project at URL `{url}`") + } + source + } else if let Some(editable) = source_opts.as_editable { + Some(sysand_core::lock::Source::Editable { + editable: get_relative(editable, &project_root)?.as_str().into(), + }) + } else if let Some(src_path) = source_opts.as_local_src { + Some(sysand_core::lock::Source::LocalSrc { + src_path: get_relative(src_path, &project_root)?.as_str().into(), + }) + } else if let Some(kpar_path) = source_opts.as_local_kpar { + Some(sysand_core::lock::Source::LocalKpar { + kpar_path: get_relative(kpar_path, &project_root)?.as_str().into(), + }) + } else if let Some(remote_src) = source_opts.as_remote_src { + Some(sysand_core::lock::Source::RemoteSrc { + remote_src: remote_src.into_string(), + }) + } else if let Some(remote_kpar) = source_opts.as_remote_kpar { + Some(sysand_core::lock::Source::RemoteKpar { + remote_kpar: remote_kpar.into_string(), + remote_kpar_size: None, + }) + } else if let Some(remote_git) = source_opts.as_remote_git { + Some(sysand_core::lock::Source::RemoteGit { + remote_git: remote_git.into_string(), + }) + } else { + None + }; + + if let Some(source) = source { + let config_path = config_file + .map(Utf8PathBuf::from) + .or((!no_config).then(|| project_root.join(CONFIG_FILE))); + + if let Some(path) = config_path { + add_project_source_to_config(&path, iri, &source)?; + } else { + log::warn!("project source for `{iri}` not added to any config file"); + } + + config.projects.push(ConfigProject { + identifiers: vec![iri.to_owned()], + sources: vec![source], + }); + } let provided_iris = if !resolution_opts.include_std { let sysml_std = crate::known_std_libs(); - if sysml_std.contains_key(iri.as_ref()) { + if sysml_std.contains_key(iri) { crate::logger::warn_std(iri); return Ok(()); } @@ -49,14 +174,15 @@ pub fn command_add, Policy: HTTPAuthentication>( crate::commands::lock::command_lock( ".", resolution_opts, - config, + &config, + &project_root, client.clone(), runtime.clone(), auth_policy.clone(), )?; if !no_sync { - let mut env = crate::get_or_create_env(project_root.as_path())?; + let mut env = crate::get_or_create_env(&project_root)?; let lock = Lock::from_str(&wrapfs::read_to_string( project_root.join(sysand_core::commands::lock::DEFAULT_LOCKFILE_NAME), )?)?; @@ -74,3 +200,21 @@ pub fn command_add, Policy: HTTPAuthentication>( Ok(()) } + +/// `project_root` must be absolute. On Windows, its kind (DOS/UNC) +/// must match the kind of `current_dir()` +fn get_relative + AsRef>( + src_path: P, + project_root: &Utf8Path, +) -> Result { + let src_path = if src_path.as_ref().is_absolute() || wrapfs::current_dir()? != project_root { + let path = relativize_path(wrapfs::canonicalize(src_path.as_ref())?, project_root)?; + if path == "." { + bail!("cannot add current project as usage of itself"); + } + path + } else { + src_path.into() + }; + Ok(src_path) +} diff --git a/sysand/src/commands/clone.rs b/sysand/src/commands/clone.rs index ad907785..fdfbb6f1 100644 --- a/sysand/src/commands/clone.rs +++ b/sysand/src/commands/clone.rs @@ -76,7 +76,7 @@ pub fn command_clone( } (canonical, DirCleaner(&target)) }; - if let Some(existing_project) = discover_project(&project_path) { + if let Some(existing_project) = discover_project(&project_path)? { log::warn!( "found an existing project in one of target path's parent\n\ {:>8} directories `{}`", @@ -114,7 +114,10 @@ pub fn command_clone( let cloned = "Cloned"; let header = sysand_core::style::get_style_config().header; - let mut local_project = LocalSrcProject { project_path }; + let mut local_project = LocalSrcProject { + nominal_path: None, + project_path, + }; let std_resolver = standard_resolver( None, None, @@ -142,6 +145,7 @@ pub fn command_clone( } ProjectLocator::Path(path) => { let remote_project = LocalSrcProject { + nominal_path: None, project_path: path.into(), }; if let Some(version) = version { diff --git a/sysand/src/commands/env.rs b/sysand/src/commands/env.rs index 25eab94e..a116cf96 100644 --- a/sysand/src/commands/env.rs +++ b/sysand/src/commands/env.rs @@ -7,6 +7,7 @@ use anyhow::{Result, anyhow, bail}; use camino::{Utf8Path, Utf8PathBuf}; use fluent_uri::Iri; + use sysand_core::{ auth::HTTPAuthentication, commands::{env::do_env_local_dir, lock::LockOutcome}, @@ -30,6 +31,7 @@ use crate::{ DEFAULT_INDEX_URL, cli::{InstallOptions, ResolutionOptions}, commands::sync::command_sync, + get_overrides, }; pub fn command_env>(path: P) -> Result { @@ -81,17 +83,28 @@ pub fn command_env_install( Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) }; + let overrides = get_overrides( + config, + &project_root, + &client, + runtime.clone(), + auth_policy.clone(), + )?; + let mut memory_projects = HashMap::default(); for (k, v) in &provided_iris { memory_projects.insert(fluent_uri::Iri::parse(k.clone()).unwrap(), v.to_vec()); } - - // TODO: Move out the runtime - let resolver = PriorityResolver::new( + let override_resolver = PriorityResolver::new( + MemoryResolver::from(overrides), MemoryResolver { iri_predicate: AcceptAll {}, projects: memory_projects, }, + ); + // TODO: Move out the runtime + let resolver = PriorityResolver::new( + override_resolver, standard_resolver( None, None, @@ -177,13 +190,16 @@ pub fn command_env_install_path, Policy: HTTPAuthentication>( include_std, } = resolution_opts; - let m = wrapfs::metadata(&path)?; - let project = if m.is_dir() { + let metadata = wrapfs::metadata(&path)?; + let project = if metadata.is_dir() { FileResolverProject::LocalSrcProject(LocalSrcProject { + nominal_path: None, project_path: path.as_str().into(), }) - } else if m.is_file() { - FileResolverProject::LocalKParProject(LocalKParProject::new_guess_root(&path)?) + } else if metadata.is_file() { + FileResolverProject::LocalKParProject(LocalKParProject::new_guess_root_nominal( + &path, &path, + )?) } else { bail!("path `{path}` is neither a directory nor a file"); }; @@ -227,17 +243,28 @@ pub fn command_env_install_path, Policy: HTTPAuthentication>( if !no_deps { let project = EditableProject::new(Utf8PathBuf::new(), project); + let overrides = get_overrides( + config, + &project_root, + &client, + runtime.clone(), + auth_policy.clone(), + )?; + let mut memory_projects = HashMap::default(); for (k, v) in provided_iris.iter() { memory_projects.insert(fluent_uri::Iri::parse(k.clone()).unwrap(), v.to_vec()); } - - // TODO: Move out the runtime - let resolver = PriorityResolver::new( + let override_resolver = PriorityResolver::new( + MemoryResolver::from(overrides), MemoryResolver { iri_predicate: AcceptAll {}, projects: memory_projects, }, + ); + // TODO: Move out the runtime + let resolver = PriorityResolver::new( + override_resolver, standard_resolver( Some(path), None, diff --git a/sysand/src/commands/info.rs b/sysand/src/commands/info.rs index 7bafcac9..ceb21673 100644 --- a/sysand/src/commands/info.rs +++ b/sysand/src/commands/info.rs @@ -15,8 +15,11 @@ use sysand_core::{ model::{ InterchangeProjectChecksumRaw, InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, }, - project::{ProjectMut, ProjectRead, utils::ToPathBuf}, - resolve::{file::FileResolverProject, standard::standard_resolver}, + project::{ProjectMut, ProjectRead, any::OverrideProject}, + resolve::{ + file::FileResolverProject, memory::MemoryResolver, priority::PriorityResolver, + standard::standard_resolver, + }, }; use anstream::{print, println}; @@ -71,11 +74,13 @@ pub fn pprint_interchange_project( } fn interpret_project_path>(path: P) -> Result { - Ok(if path.as_ref().is_file() { + let metadata = wrapfs::metadata(&path)?; + Ok(if metadata.is_file() { FileResolverProject::LocalKParProject(LocalKParProject::new_guess_root(path)?) - } else if path.as_ref().is_dir() { + } else if metadata.is_dir() { FileResolverProject::LocalSrcProject(LocalSrcProject { - project_path: path.to_path_buf(), + nominal_path: None, + project_path: path.as_ref().as_str().into(), }) } else { // TODO: NoResolve is for IRIs, this is a path @@ -99,12 +104,14 @@ pub fn command_info_path>( } } +#[allow(clippy::too_many_arguments)] pub fn command_info_uri( uri: Iri, _normalise: bool, client: reqwest_middleware::ClientWithMiddleware, index_urls: Option>, excluded_iris: &HashSet, + overrides: Vec<(Iri, Vec>)>, runtime: Arc, auth_policy: Arc, ) -> Result<()> { @@ -112,17 +119,20 @@ pub fn command_info_uri( let local_env_path = Utf8Path::new(".").join(DEFAULT_ENV_NAME); - let combined_resolver = standard_resolver( - cwd, - if local_env_path.is_dir() { - Some(local_env_path) - } else { - None - }, - Some(client), - index_urls, - runtime, - auth_policy, + let combined_resolver = PriorityResolver::new( + MemoryResolver::from(overrides), + standard_resolver( + cwd, + if wrapfs::is_dir(&local_env_path)? { + Some(local_env_path) + } else { + None + }, + Some(client), + index_urls, + runtime, + auth_policy, + ), ); let mut found = false; @@ -188,12 +198,14 @@ pub fn command_info_verb_path>( } } +#[allow(clippy::too_many_arguments)] pub fn command_info_verb_uri( uri: Iri, verb: InfoCommandVerb, numbered: bool, client: reqwest_middleware::ClientWithMiddleware, index_urls: Option>, + overrides: Vec<(Iri, Vec>)>, runtime: Arc, auth_policy: Arc, ) -> Result<()> { @@ -203,17 +215,20 @@ pub fn command_info_verb_uri( let local_env_path = Utf8Path::new(".").join(DEFAULT_ENV_NAME); - let combined_resolver = standard_resolver( - cwd, - if local_env_path.is_dir() { - Some(local_env_path) - } else { - None - }, - Some(client), - index_urls, - runtime, - auth_policy, + let combined_resolver = PriorityResolver::new( + MemoryResolver::from(overrides), + standard_resolver( + cwd, + if wrapfs::is_dir(&local_env_path)? { + Some(local_env_path) + } else { + None + }, + Some(client), + index_urls, + runtime, + auth_policy, + ), ); let mut found = false; diff --git a/sysand/src/commands/init.rs b/sysand/src/commands/init.rs index a2425973..77bf77a5 100644 --- a/sysand/src/commands/init.rs +++ b/sysand/src/commands/init.rs @@ -34,7 +34,10 @@ pub fn command_init( no_semver, license, no_spdx, - &mut sysand_core::project::local_src::LocalSrcProject { project_path: path }, + &mut sysand_core::project::local_src::LocalSrcProject { + nominal_path: None, + project_path: path, + }, )?; Ok(()) } diff --git a/sysand/src/commands/lock.rs b/sysand/src/commands/lock.rs index ec5a0eb5..7b8512cc 100644 --- a/sysand/src/commands/lock.rs +++ b/sysand/src/commands/lock.rs @@ -25,16 +25,17 @@ use sysand_core::{ stdlib::known_std_libs, }; -use crate::{DEFAULT_INDEX_URL, cli::ResolutionOptions}; +use crate::{DEFAULT_INDEX_URL, cli::ResolutionOptions, get_overrides}; /// Generate a lockfile for project at `path`. /// `path` must be relative to workspace root. // TODO: this will not work properly if run in subdir of workspace, // as `path` will then refer to a deeper subdir -pub fn command_lock, Policy: HTTPAuthentication>( +pub fn command_lock, Policy: HTTPAuthentication, R: AsRef>( path: P, resolution_opts: ResolutionOptions, config: &Config, + project_root: R, client: reqwest_middleware::ClientWithMiddleware, runtime: Arc, auth_policy: Arc, @@ -47,8 +48,6 @@ pub fn command_lock, Policy: HTTPAuthentication>( include_std, } = resolution_opts; - let cwd = wrapfs::current_dir().ok(); - let local_env_path = path.as_ref().join(DEFAULT_ENV_NAME); let index_urls = if no_index { @@ -57,6 +56,14 @@ pub fn command_lock, Policy: HTTPAuthentication>( Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) }; + let overrides = get_overrides( + config, + &project_root, + &client, + runtime.clone(), + auth_policy.clone(), + )?; + let provided_iris = if !include_std { known_std_libs() } else { @@ -69,14 +76,18 @@ pub fn command_lock, Policy: HTTPAuthentication>( memory_projects.insert(fluent_uri::Iri::parse(k.clone()).unwrap(), v.to_vec()); } - let wrapped_resolver = PriorityResolver::new( + let override_resolver = PriorityResolver::new( + MemoryResolver::from(overrides), MemoryResolver { iri_predicate: AcceptAll {}, projects: memory_projects, }, + ); + let wrapped_resolver = PriorityResolver::new( + override_resolver, standard_resolver( - cwd, - if local_env_path.is_dir() { + None, + if wrapfs::is_dir(&local_env_path)? { Some(local_env_path) } else { None @@ -91,7 +102,7 @@ pub fn command_lock, Policy: HTTPAuthentication>( let LockOutcome { lock, dependencies: _dependencies, - } = match do_lock_local_editable(&path, wrapped_resolver) { + } = match do_lock_local_editable(&path, &project_root, wrapped_resolver) { Ok(lock_outcome) => lock_outcome, Err(LockProjectError::LockError(lock_error)) => { if let LockError::Solver(solver_error) = lock_error { diff --git a/sysand/src/commands/print_root.rs b/sysand/src/commands/print_root.rs index 294fa52b..68a6fc78 100644 --- a/sysand/src/commands/print_root.rs +++ b/sysand/src/commands/print_root.rs @@ -9,7 +9,7 @@ use sysand_core::root::do_root; use crate::CliError; pub fn command_print_root>(path: P) -> Result<()> { - match do_root(path) { + match do_root(path)? { Some(root) => { println!("{}", root.canonicalize()?.display()); Ok(()) diff --git a/sysand/src/commands/remove.rs b/sysand/src/commands/remove.rs index e1f9d26f..ee26b3b2 100644 --- a/sysand/src/commands/remove.rs +++ b/sysand/src/commands/remove.rs @@ -2,16 +2,32 @@ // SPDX-License-Identifier: MIT OR Apache-2.0 use anyhow::Result; -use sysand_core::{project::local_src::LocalSrcProject, remove::do_remove}; +use camino::Utf8PathBuf; + +use sysand_core::{ + config::local_fs::{CONFIG_FILE, remove_project_source_from_config}, + project::local_src::LocalSrcProject, + remove::do_remove, +}; use crate::CliError; pub fn command_remove>( iri: S, current_project: Option, + config_file: Option, + no_config: bool, ) -> Result<()> { let mut current_project = current_project.ok_or(CliError::MissingProjectCurrentDir)?; + let config_path = config_file + .map(Utf8PathBuf::from) + .or((!no_config).then(|| current_project.root_path().join(CONFIG_FILE))); + + if let Some(path) = config_path { + remove_project_source_from_config(path, &iri)?; + } + let usages = do_remove(&mut current_project, &iri)?; let removed = "Removed"; diff --git a/sysand/src/commands/sync.rs b/sysand/src/commands/sync.rs index 5753d62a..a89bb19c 100644 --- a/sysand/src/commands/sync.rs +++ b/sysand/src/commands/sync.rs @@ -12,9 +12,13 @@ use sysand_core::{ env::local_directory::LocalDirectoryEnvironment, lock::Lock, project::{ - AsSyncProjectTokio, ProjectReadAsync, local_kpar::LocalKParProject, - local_src::LocalSrcProject, memory::InMemoryProject, - reqwest_kpar_download::ReqwestKparDownloadedProject, reqwest_src::ReqwestSrcProjectAsync, + AsSyncProjectTokio, ProjectReadAsync, + gix_git_download::{GixDownloadedError, GixDownloadedProject}, + local_kpar::LocalKParProject, + local_src::LocalSrcProject, + memory::InMemoryProject, + reqwest_kpar_download::ReqwestKparDownloadedProject, + reqwest_src::ReqwestSrcProjectAsync, }, }; @@ -31,6 +35,7 @@ pub fn command_sync, Policy: HTTPAuthentication>( lock, env, Some(|src_path: &Utf8Path| LocalSrcProject { + nominal_path: Some(src_path.to_path_buf()), project_path: project_root.as_ref().join(src_path), }), Some( @@ -44,7 +49,7 @@ pub fn command_sync, Policy: HTTPAuthentication>( }, ), // TODO: Fix error handling here - Some(|kpar_path: &Utf8Path| LocalKParProject::new_guess_root(kpar_path).unwrap()), + Some(|kpar_path: &Utf8Path| LocalKParProject::new_guess_root_nominal(project_root.as_ref().join(kpar_path), kpar_path).unwrap()), Some( |remote_kpar: String| -> Result>, ParseError> { Ok( @@ -55,6 +60,9 @@ pub fn command_sync, Policy: HTTPAuthentication>( ) }, ), + Some(|remote_git: String| -> Result { + GixDownloadedProject::new(remote_git) + }), provided_iris, )?; Ok(()) diff --git a/sysand/src/lib.rs b/sysand/src/lib.rs index 89e6f55b..a97a2b73 100644 --- a/sysand/src/lib.rs +++ b/sysand/src/lib.rs @@ -15,12 +15,13 @@ use std::{ use anstream::{eprint, eprintln}; use anyhow::{Result, bail}; +use fluent_uri::Iri; use camino::{Utf8Path, Utf8PathBuf}; use clap::Parser; use sysand_core::{ - auth::StandardHTTPAuthenticationBuilder, + auth::{HTTPAuthentication, StandardHTTPAuthenticationBuilder}, config::{ Config, local_fs::{get_config, load_configs}, @@ -28,7 +29,11 @@ use sysand_core::{ env::local_directory::{DEFAULT_ENV_NAME, LocalDirectoryEnvironment}, init::InitError, lock::Lock, - project::utils::wrapfs, + project::{ + any::{AnyProject, OverrideProject}, + reference::ProjectReference, + utils::wrapfs, + }, stdlib::known_std_libs, }; @@ -112,15 +117,26 @@ fn set_panic_hook() { pub fn run_cli(args: cli::Args) -> Result<()> { sysand_core::style::set_style_config(crate::style::CONFIG); + let log_level = get_log_level(args.global_opts.verbose, args.global_opts.quiet); + if logger::init(log_level).is_err() { + let warn = style::WARN; + eprintln!( + "{warn}warning{warn:#}: failed to set up logger because it has already been set up;\n\ + {:>8} log messages may not be formatted properly", + ' ' + ); + log::set_max_level(log_level); + } + let current_workspace = sysand_core::discover::current_workspace()?; let current_project = sysand_core::discover::current_project()?; let cwd = wrapfs::current_dir()?; - let project_root = current_project.clone().map(|p| p.root_path()).clone(); + let project_root = current_project.as_ref().map(|p| p.root_path().to_owned()); let current_environment = { let dir = project_root.as_ref().unwrap_or(&cwd); - crate::get_env(dir) + crate::get_env(dir)? }; let auto_config = if args.global_opts.no_config { @@ -137,22 +153,6 @@ pub fn run_cli(args: cli::Args) -> Result<()> { config.merge(auto_config); - let (verbose, quiet) = if args.global_opts.sets_log_level() { - (args.global_opts.verbose, args.global_opts.quiet) - } else { - get_config_verbose_quiet(&config) - }; - let log_level = get_log_level(verbose, quiet); - if logger::init(log_level).is_err() { - let warn = style::WARN; - eprintln!( - "{warn}warning{warn:#}: failed to set up logger because it has already been set up;\n\ - {:>8} log messages may not be formatted properly", - ' ' - ); - log::set_max_level(log_level); - } - let client = reqwest_middleware::ClientBuilder::new(reqwest::Client::new()).build(); let runtime = Arc::new( @@ -333,11 +333,12 @@ pub fn run_cli(args: cli::Args) -> Result<()> { } }, cli::Command::Lock { resolution_opts } => { - if project_root.is_some() { + if let Some(project_root) = project_root { crate::commands::lock::command_lock( ".", resolution_opts, &config, + project_root, client, runtime, basic_auth_policy, @@ -362,11 +363,12 @@ pub fn run_cli(args: cli::Args) -> Result<()> { }; let project_root = project_root.unwrap_or(cwd); let lockfile = project_root.join(sysand_core::commands::lock::DEFAULT_LOCKFILE_NAME); - if !lockfile.is_file() { + if !wrapfs::is_file(&lockfile)? { command_lock( ".", resolution_opts, &config, + project_root.clone(), client.clone(), runtime.clone(), basic_auth_policy.clone(), @@ -427,6 +429,15 @@ pub fn run_cli(args: cli::Args) -> Result<()> { HashSet::default() }; + let project_root = project_root.unwrap_or(cwd); + let overrides = get_overrides( + &config, + &project_root, + &client, + runtime.clone(), + basic_auth_policy.clone(), + )?; + enum Location { WorkDir, Iri(fluent_uri::Iri), @@ -517,6 +528,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { client, index_urls, &excluded_iris, + overrides, runtime, basic_auth_policy, ), @@ -529,6 +541,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { numbered, client, index_urls, + overrides, runtime, basic_auth_policy, ) @@ -547,19 +560,28 @@ pub fn run_cli(args: cli::Args) -> Result<()> { no_lock, no_sync, resolution_opts, + source_opts, } => command_add( iri, version_constraint, no_lock, no_sync, resolution_opts, - &config, + source_opts, + config, + args.global_opts.config_file, + args.global_opts.no_config, current_project, client, runtime, basic_auth_policy, ), - cli::Command::Remove { iri } => command_remove(iri, current_project), + cli::Command::Remove { iri } => command_remove( + iri, + current_project, + args.global_opts.config_file, + args.global_opts.no_config, + ), cli::Command::Include { paths, compute_checksum: add_checksum, @@ -574,11 +596,11 @@ pub fn run_cli(args: cli::Args) -> Result<()> { } else { let mut output_dir = current_workspace .as_ref() - .map(|workspace| &workspace.workspace_path) + .map(|workspace| workspace.root_path()) .unwrap_or_else(|| ¤t_project.project_path) .join("output"); let name = sysand_core::build::default_kpar_file_name(¤t_project)?; - if !output_dir.is_dir() { + if !wrapfs::is_dir(&output_dir)? { wrapfs::create_dir(&output_dir)?; } output_dir.push(name); @@ -592,8 +614,8 @@ pub fn run_cli(args: cli::Args) -> Result<()> { let current_workspace = current_workspace.ok_or(CliError::MissingProjectCurrentDir)?; let output_dir = - path.unwrap_or_else(|| current_workspace.workspace_path.join("output")); - if !output_dir.is_dir() { + path.unwrap_or_else(|| current_workspace.root_path().join("output")); + if !wrapfs::is_dir(&output_dir)? { wrapfs::create_dir(&output_dir)?; } command_build_for_workspace(output_dir, current_workspace) @@ -638,30 +660,21 @@ pub fn run_cli(args: cli::Args) -> Result<()> { } } -pub fn get_env(project_root: impl AsRef) -> Option { +pub fn get_env(project_root: impl AsRef) -> Result> { let environment_path = project_root.as_ref().join(DEFAULT_ENV_NAME); - if !environment_path.is_dir() { - None - } else { - Some(LocalDirectoryEnvironment { environment_path }) - } + let env = wrapfs::is_dir(&environment_path)? + .then_some(LocalDirectoryEnvironment { environment_path }); + Ok(env) } pub fn get_or_create_env(project_root: impl AsRef) -> Result { let project_root = project_root.as_ref(); - match get_env(project_root) { + match get_env(project_root)? { Some(env) => Ok(env), None => command_env(project_root.join(DEFAULT_ENV_NAME)), } } -fn get_config_verbose_quiet(config: &Config) -> (bool, bool) { - ( - config.verbose.unwrap_or_default(), - config.quiet.unwrap_or_default(), - ) -} - fn get_log_level(verbose: bool, quiet: bool) -> log::LevelFilter { match (verbose, quiet) { (true, true) => unreachable!(), @@ -670,3 +683,31 @@ fn get_log_level(verbose: bool, quiet: bool) -> log::LevelFilter { (false, false) => log::LevelFilter::Info, } } + +pub type Overrides = Vec<(Iri, Vec>)>; + +pub fn get_overrides, Policy: HTTPAuthentication>( + config: &Config, + project_root: P, + client: &reqwest_middleware::ClientWithMiddleware, + runtime: Arc, + auth_policy: Arc, +) -> Result> { + let mut overrides = Vec::new(); + for config_project in &config.projects { + for identifier in &config_project.identifiers { + let mut projects = Vec::new(); + for source in &config_project.sources { + projects.push(ProjectReference::new(AnyProject::try_from_source( + source.clone(), + &project_root, + auth_policy.clone(), + client.clone(), + runtime.clone(), + )?)); + } + overrides.push((Iri::parse(identifier.as_str())?.into(), projects)); + } + } + Ok(overrides) +} diff --git a/sysand/tests/cfg_base.rs b/sysand/tests/cfg_base.rs deleted file mode 100644 index 8c4a37b7..00000000 --- a/sysand/tests/cfg_base.rs +++ /dev/null @@ -1,47 +0,0 @@ -// SPDX-FileCopyrightText: © 2025 Sysand contributors -// SPDX-License-Identifier: MIT OR Apache-2.0 - -use assert_cmd::prelude::*; -use predicates::prelude::*; - -// pub due to https://github.com/rust-lang/rust/issues/46379 -mod common; -pub use common::*; - -#[test] -fn cfg_set_quiet() -> Result<(), Box> { - let (_, _, out_normal) = run_sysand(["init", "cfg_set_quiet"], None)?; - - out_normal - .assert() - .success() - .stderr(predicate::str::contains( - "Creating interchange project `cfg_set_quiet`", - )); - - let (_, _, out_quiet_flag) = run_sysand(["init", "--quiet", "cfg_set_quiet"], None)?; - - out_quiet_flag - .assert() - .success() - .stderr(predicate::str::contains("Creating interchange project `cfg_set_quiet`").not()); - - let (_temp_dir, cwd) = new_temp_cwd()?; - - let quiet_cfg = toml::to_string(&sysand_core::config::Config { - quiet: Some(true), - verbose: None, - index: None, - // auth: None, - })?; - - let out_quiet_local_config = - run_sysand_in(&cwd, ["init", "cfg_set_quiet"], Some(quiet_cfg.as_str()))?; - - out_quiet_local_config - .assert() - .success() - .stderr(predicate::str::contains("Creating interchange project `cfg_set_quiet`").not()); - - Ok(()) -} diff --git a/sysand/tests/cli_add_remove.rs b/sysand/tests/cli_add_remove.rs index fd3f2738..2c92d4c9 100644 --- a/sysand/tests/cli_add_remove.rs +++ b/sysand/tests/cli_add_remove.rs @@ -61,6 +61,709 @@ fn add_and_remove_without_lock() -> Result<(), Box> { Ok(()) } +#[test] +fn add_and_remove_as_editable() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-editable", + "local/test", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { editable = "local/test" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_as_local_src() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-local-src", + "local/test", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { src_path = "local/test" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_as_local_kpar() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-local-kpar", + "local/test.kpar", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { kpar_path = "local/test.kpar" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_as_remote_src() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-remote-src", + "https://www.example.com/test", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { remote_src = "https://www.example.com/test" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_as_remote_kpar() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-remote-kpar", + "https://www.example.com/test.kpar", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { remote_kpar = "https://www.example.com/test.kpar" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_as_remote_git() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test", + "--as-remote-git", + "https://www.example.com/test.git", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test", +] +sources = [ + { remote_git = "https://www.example.com/test.git" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test` from usages + Removed `urn:kpar:test`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +#[test] +fn add_and_remove_from_path() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "add_and_remove"], + None, + )?; + + out.assert().success(); + + let config_path = cwd.join("sysand.toml"); + + std::fs::create_dir_all(cwd.join("local/test"))?; + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test-src", + "--from-path", + "local/test", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Creating configuration file at `{config_path}` + Adding source for `urn:kpar:test-src` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test-src`"# + ))); + + std::fs::File::create_new(cwd.join("local/test.kpar"))?; + + let out = run_sysand_in( + &cwd, + [ + "add", + "--no-lock", + "urn:kpar:test-kpar", + "--from-path", + "local/test.kpar", + ], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Adding source for `urn:kpar:test-kpar` to configuration file at `{config_path}` + Adding usage: `urn:kpar:test-kpar`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [ + { + "resource": "urn:kpar:test-src" + }, + { + "resource": "urn:kpar:test-kpar" + } + ] +} +"# + ); + + let config = std::fs::read_to_string(&config_path)?; + + assert_eq!( + config, + r#"[[project]] +identifiers = [ + "urn:kpar:test-src", +] +sources = [ + { src_path = "local/test" }, +] + +[[project]] +identifiers = [ + "urn:kpar:test-kpar", +] +sources = [ + { kpar_path = "local/test.kpar" }, +] +"# + ); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test-src"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test-src` from configuration file at `{config_path}` + Removing `urn:kpar:test-src` from usages + Removed `urn:kpar:test-src`"# + ))); + + let out = run_sysand_in( + &cwd, + ["remove", "urn:kpar:test-kpar"], + Some(config_path.as_str()), + )?; + + out.assert() + .success() + .stderr(predicate::str::contains(format!( + r#"Removing source for `urn:kpar:test-kpar` from configuration file at `{config_path}` + Removing empty configuration file at `{config_path}` + Removing `urn:kpar:test-kpar` from usages + Removed `urn:kpar:test-kpar`"# + ))); + + let info_json = std::fs::read_to_string(cwd.join(".project.json"))?; + + assert_eq!( + info_json, + r#"{ + "name": "add_and_remove", + "version": "1.2.3", + "usage": [] +} +"# + ); + + assert!(!config_path.is_file()); + + Ok(()) +} + +// TODO: Add +// #[test] +// fn add_and_remove_from_url() -> Result<(), Box> { ... } + #[test] fn add_and_remove_with_lock_preinstall() -> Result<(), Box> { let (_temp_dir_dep, cwd_dep, out) = run_sysand( diff --git a/sysand/tests/cli_info.rs b/sysand/tests/cli_info.rs index f2f28b4f..31c01751 100644 --- a/sysand/tests/cli_info.rs +++ b/sysand/tests/cli_info.rs @@ -1250,6 +1250,8 @@ fn info_multi_index_url_config() -> Result<(), Box> { .expect_at_most(2) // TODO: Reduce this to 1 .create(); + let (_temp_dir, cwd) = new_temp_cwd()?; + let cfg = format!( r#" [[index]] @@ -1263,9 +1265,12 @@ fn info_multi_index_url_config() -> Result<(), Box> { &server_alt.url() ); + let cfg_path = cwd.join(sysand_core::config::local_fs::CONFIG_FILE); + std::fs::write(&cfg_path, cfg)?; + let (_, _, out) = run_sysand( ["info", "--iri", "urn:kpar:info_multi_index_url_config"], - Some(cfg.as_str()), + Some(cfg_path.as_str()), )?; out.assert() @@ -1277,7 +1282,7 @@ fn info_multi_index_url_config() -> Result<(), Box> { let (_, _, out) = run_sysand( ["info", "--iri", "urn:kpar:info_multi_index_url_config_alt"], - Some(cfg.as_str()), + Some(cfg_path.as_str()), )?; out.assert() diff --git a/sysand/tests/cli_lock.rs b/sysand/tests/cli_lock.rs index 1ca1b1f6..e4d0a1fe 100644 --- a/sysand/tests/cli_lock.rs +++ b/sysand/tests/cli_lock.rs @@ -52,6 +52,49 @@ fn lock_trivial() -> Result<(), Box> { Ok(()) } +#[test] +fn lock_local_source() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--name", "lock_local_source", "--version", "1.2.3"], + None, + )?; + + out.assert().success().stdout(predicate::str::is_empty()); + + let out = run_sysand_in(&cwd, ["init", "--version", "1.0.0", "local_dep"], None)?; + + out.assert().success().stdout(predicate::str::is_empty()); + + let out = run_sysand_in(&cwd, ["add", "urn:kpar:local_dep", "--no-lock"], None)?; + + out.assert().success().stdout(predicate::str::is_empty()); + + let cfg = toml::to_string(&sysand_core::config::Config { + indexes: vec![], + projects: vec![sysand_core::config::ConfigProject { + identifiers: vec!["urn:kpar:local_dep".to_string()], + sources: vec![sysand_core::lock::Source::LocalSrc { + src_path: cwd.join("local_dep").as_str().into(), + }], + }], + })?; + + let cfg_path = cwd.join(sysand_core::config::local_fs::CONFIG_FILE); + std::fs::write(&cfg_path, cfg)?; + + let out = run_sysand_in(&cwd, ["lock"], Some(cfg_path.as_str()))?; + + out.assert().success().stdout(predicate::str::is_empty()); + + let lock_file: Lock = + toml::from_str(&std::fs::read_to_string(cwd.join(DEFAULT_LOCKFILE_NAME))?)?; + let projects = lock_file.projects; + + assert_eq!(projects.len(), 2); + + Ok(()) +} + fn mock_project< P: AsRef, N: AsRef, diff --git a/sysand/tests/common/mod.rs b/sysand/tests/common/mod.rs index 17396ab3..7be8b8bd 100644 --- a/sysand/tests/common/mod.rs +++ b/sysand/tests/common/mod.rs @@ -10,7 +10,6 @@ use rexpect::session::{PtySession, spawn_command}; use std::os::unix::process::ExitStatusExt; use std::{ error::Error, - io::Write, process::{Command, Output}, }; @@ -27,23 +26,13 @@ pub fn fixture_path(name: &str) -> Utf8PathBuf { pub fn sysand_cmd_in_with<'a, I: IntoIterator>( cwd: &Utf8Path, args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, env: &IndexMap, impl AsRef>, ) -> Result> { - let cfg_args = if let Some(config) = cfg { - let config_path = cwd.join("sysand.toml"); - let mut config_file = std::fs::File::create_new(&config_path)?; - config_file.write_all(config.as_bytes())?; - vec!["--config-file".to_string(), config_path.to_string()] - } else { - vec![] - }; - let args = [ - args.into_iter().map(|s| s.to_string()).collect(), - vec!["--no-config".to_string()], - cfg_args, - ] - .concat(); + let args = args + .into_iter() + .chain(["--no-config"]) + .chain(cfg.iter().flat_map(|cfg| ["--config-file", cfg])); // NOTE had trouble getting test-temp-dir crate working, but would be better let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("sysand")); @@ -60,7 +49,7 @@ pub fn sysand_cmd_in_with<'a, I: IntoIterator>( pub fn sysand_cmd_in<'a, I: IntoIterator>( cwd: &Utf8Path, args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, ) -> Result> { sysand_cmd_in_with(cwd, args, cfg, &IndexMap::<&str, &str>::default()) } @@ -79,7 +68,7 @@ pub fn new_temp_cwd() -> Result<(Utf8TempDir, Utf8PathBuf), Box> { pub fn sysand_cmd<'a, I: IntoIterator>( args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, env: &IndexMap, impl AsRef>, ) -> Result<(Utf8TempDir, Utf8PathBuf, Command), Box> { // NOTE had trouble getting test-temp-dir crate working, but would be better @@ -92,7 +81,7 @@ pub fn sysand_cmd<'a, I: IntoIterator>( pub fn run_sysand_in_with<'a, I: IntoIterator>( cwd: &Utf8Path, args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, env: &IndexMap, impl AsRef>, ) -> Result> { Ok(sysand_cmd_in_with(cwd, args, cfg, env)?.output()?) @@ -101,14 +90,14 @@ pub fn run_sysand_in_with<'a, I: IntoIterator>( pub fn run_sysand_in<'a, I: IntoIterator>( cwd: &Utf8Path, args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, ) -> Result> { Ok(sysand_cmd_in(cwd, args, cfg)?.output()?) } pub fn run_sysand_with<'a, I: IntoIterator>( args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, env: &IndexMap, impl AsRef>, ) -> Result<(Utf8TempDir, Utf8PathBuf, Output), Box> { let (temp_dir, cwd, mut cmd) = sysand_cmd(args /*, stdin*/, cfg, env)?; @@ -118,7 +107,7 @@ pub fn run_sysand_with<'a, I: IntoIterator>( pub fn run_sysand<'a, I: IntoIterator>( args: I, - cfg: Option<&str>, + cfg: Option<&'a str>, ) -> Result<(Utf8TempDir, Utf8PathBuf, Output), Box> { run_sysand_with(args, cfg, &IndexMap::<&str, &str>::default()) } @@ -129,7 +118,7 @@ pub fn run_sysand_interactive_in<'a, I: IntoIterator>( cwd: &Utf8Path, args: I, timeout_ms: Option, - cfg: Option<&str>, + cfg: Option<&'a str>, ) -> Result> { let cmd = sysand_cmd_in(cwd, args, cfg)?; @@ -141,7 +130,7 @@ pub fn run_sysand_interactive_in<'a, I: IntoIterator>( pub fn run_sysand_interactive_with<'a, I: IntoIterator>( args: I, timeout_ms: Option, - cfg: Option<&str>, + cfg: Option<&'a str>, env: &IndexMap, impl AsRef>, ) -> Result<(Utf8TempDir, Utf8PathBuf, PtySession), Box> { let (temp_dir, cwd, cmd) = sysand_cmd(args, cfg, env)?; @@ -153,7 +142,7 @@ pub fn run_sysand_interactive_with<'a, I: IntoIterator>( pub fn run_sysand_interactive<'a, I: IntoIterator>( args: I, timeout_ms: Option, - cfg: Option<&str>, + cfg: Option<&'a str>, ) -> Result<(Utf8TempDir, Utf8PathBuf, PtySession), Box> { run_sysand_interactive_with(args, timeout_ms, cfg, &IndexMap::<&str, &str>::default()) }