diff options
author | Matthias Beyer <mail@beyermatthias.de> | 2021-03-10 11:49:10 +0100 |
---|---|---|
committer | Matthias Beyer <mail@beyermatthias.de> | 2021-03-10 11:49:10 +0100 |
commit | fd05d457f8c89a6da8ca241a972254c02a150fe7 (patch) | |
tree | 6ab6dd5fc7aab1605bd3e80e17460b0cf6d7bb24 /src | |
parent | 7022b92db47341da9e93c044666d77fce477e404 (diff) | |
parent | 64c01a9b3337088fcee07a9d69e4f1c3e60aad5c (diff) |
Merge branch 'pathes-to-absolute'
Diffstat (limited to 'src')
-rw-r--r-- | src/endpoint/configured.rs | 49 | ||||
-rw-r--r-- | src/repository/repository.rs | 92 |
2 files changed, 138 insertions, 3 deletions
diff --git a/src/endpoint/configured.rs b/src/endpoint/configured.rs index 219b6f9..7265dd6 100644 --- a/src/endpoint/configured.rs +++ b/src/endpoint/configured.rs @@ -395,8 +395,9 @@ impl<'a> PreparedContainer<'a> { let create_info = Self::build_container(endpoint, &job).await?; let container = endpoint.docker.containers().get(&create_info.id); - let (cpysrc, cpyart, cpyscr) = tokio::join!( + let (cpysrc, cpypch, cpyart, cpyscr) = tokio::join!( Self::copy_source_to_container(&container, &job), + Self::copy_patches_to_container(&container, &job), Self::copy_artifacts_to_container(&container, &job, staging_store, &release_stores), Self::copy_script_to_container(&container, &script) ); @@ -409,6 +410,14 @@ impl<'a> PreparedContainer<'a> { ) })?; + let _ = cpypch.with_context(|| { + anyhow!( + "Copying the patches to container {} on '{}'", + create_info.id, + endpoint.name + ) + })?; + let _ = cpyart.with_context(|| { anyhow!( "Copying the artifacts to container {} on '{}'", @@ -517,6 +526,44 @@ impl<'a> PreparedContainer<'a> { .map_err(Error::from) } + async fn copy_patches_to_container<'ca>( + container: &Container<'ca>, + job: &RunnableJob, + ) -> Result<()> { + use tokio::io::AsyncReadExt; + + log::debug!("Copying patches to container: {:?}", job.package().patches()); + job.package() + .patches() + .iter() + .map(|patch| async move { + let destination = PathBuf::from("/patches").join(patch); + trace!("Copying patch {} to container at /patches/{}", patch.display(), destination.display()); + + let mut buf = vec![]; + tokio::fs::OpenOptions::new() + .create(false) + .create_new(false) + .append(false) + .write(false) + .read(true) + .open(&patch) + .await + .with_context(|| anyhow!("Getting patch file: {}", patch.display()))? + .read_to_end(&mut buf) + .await + .with_context(|| anyhow!("Reading file {}", patch.display()))?; + + let _ = container.copy_file_into(destination, &buf).await?; + Ok(()) + }) + .collect::<futures::stream::FuturesUnordered<_>>() + .collect::<Result<()>>() + .await + .with_context(|| anyhow!("Copying patches to container {}", container.id())) + .map_err(Error::from) + } + async fn copy_artifacts_to_container<'ca>( container: &Container<'ca>, job: &RunnableJob, diff --git a/src/repository/repository.rs b/src/repository/repository.rs index b30998f..f853f00 100644 --- a/src/repository/repository.rs +++ b/src/repository/repository.rs @@ -13,9 +13,11 @@ use std::path::Path; use std::path::PathBuf; use anyhow::Context; +use anyhow::Error; use anyhow::Result; use log::trace; use resiter::Map; +use resiter::FilterMap; use crate::package::Package; use crate::package::PackageName; @@ -57,6 +59,7 @@ impl Repository { } fn load_recursive( + root: &Path, path: &Path, mut config: config::Config, progress: &indicatif::ProgressBar, @@ -67,9 +70,93 @@ impl Repository { let buf = std::fs::read_to_string(&pkg_file) .with_context(|| format!("Reading {}", pkg_file.display()))?; + // This function has an issue: It loads packages recursively, but if there are + // patches set for a package, these patches are set _relative_ to the current + // pkg.toml file. + // + // E.G.: + // (1) /pkg.toml + // (2) /a/pkg.toml + // (3) /a/1.0/pkg.toml + // (4) /a/2.0/pkg.toml + // + // If (2) defines a patches = ["./1.patch"], the patch exists at /a/1.patch. + // We can fix that by modifying the Config object after loading (2) and fixing the + // path of the patch to be relative to the repostory root. + // + // But if we continue loading the /a/ subdirectory recursively, this value gets + // overwritten by Config::refresh(), which is called by Config::merge, for example. + // + // The trick is, to get the list of patches _before_ the merge, and later + // re-setting them after the merge, if there were no new patches set (which itself + // is tricky to find out, because the `Config` object _looks like_ there is a new + // array set). + // + // If (3), for example, does set a new patches=[] array, the old array is + // invalidated and no longer relevant for that package! + // Thus, we can savely throw it away and continue with the new array, fixing the + // pathes to be relative to repo root again. + // + // If (4) does _not_ set any patches, we must ensure that the patches from the + // loading of (2) are used and not overwritten by the Config::refresh() call + // happening during Config::merge(). + // + + // first of all, we get the patches array. + // This is either the patches array from the last recursion or the newly set one, + // that doesn't matter here. + let patches_before_merge = match config.get_array("patches") { + Ok(v) => v, + Err(config::ConfigError::NotFound(_)) => vec![], + Err(e) => return Err(e).map_err(Error::from), + }; + trace!("Patches before merging: {:?}", patches_before_merge); + + // Merge the new pkg.toml file over the already loaded configuration config .merge(config::File::from_str(&buf, config::FileFormat::Toml)) .with_context(|| format!("Loading contents of {}", pkg_file.display()))?; + + let path_relative_to_root = path.strip_prefix(root)?; + + // get the patches that are in the `config` object after the merge + let patches = match config.get_array("patches") { + Ok(v) => { + trace!("Patches after merging: {:?}", v); + v + }, + + // if there was none, we simply use an empty array + // This is cheap because Vec::with_capacity(0) does not allocate + Err(config::ConfigError::NotFound(_)) => Vec::with_capacity(0), + Err(e) => return Err(e).map_err(Error::from), + } + .into_iter() + + // Map all `Value`s to String and then join them on the path that is relative to + // the root directory of the repository. + .map(|patch| patch.into_str().map_err(Error::from)) + .map_ok(|patch| path_relative_to_root.join(patch)) + + // if the patch file exists, use it (as config::Value), otherwise ignore the + // element in the iterator + .filter_map_ok(|patch| if patch.exists() { + Some(config::Value::from(patch.display().to_string())) + } else { + None + }) + .collect::<Result<Vec<_>>>()?; + + // If we found any patches, use them. Otherwise use the array from before the merge + // (which already has the correct pathes from the previous recursion). + let patches = if !patches.is_empty() { + patches + } else { + patches_before_merge + }; + + trace!("Patches after postprocessing merge: {:?}", patches); + config.set_once("patches", config::Value::from(patches))?; } let subdirs = all_subdirs(path) @@ -89,7 +176,8 @@ impl Repository { } else { subdirs.into_iter().fold(Ok(Vec::new()), |vec, dir| { vec.and_then(|mut v| { - let mut loaded = load_recursive(&dir, config.clone(), progress) + trace!("Recursing into {}", dir.display()); + let mut loaded = load_recursive(root, &dir, config.clone(), progress) .with_context(|| format!("Recursing for {}", pkg_file.display()))?; v.append(&mut loaded); @@ -99,7 +187,7 @@ impl Repository { } } - let inner = load_recursive(path, config::Config::default(), progress) + let inner = load_recursive(path, path, config::Config::default(), progress) .with_context(|| format!("Recursing for {}", path.display()))? .into_iter() .inspect(|p| trace!("Loading into repository: {:?}", p)) |