summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthias Beyer <matthias.beyer@atos.net>2021-08-18 10:11:00 +0200
committerMatthias Beyer <matthias.beyer@atos.net>2021-08-18 10:11:00 +0200
commitfcf7ea173bae57aea6509ff91164d968ccda8001 (patch)
tree31aa010af30faaa7836e529071b4b2b6031619f4
parentfb2019b80d4fd4f6a72e4a0d01602fbf3cd6be13 (diff)
parent0c310bab9e4df023acc0fe006d1e25ee07687c87 (diff)
Merge branch 'refactor'
-rw-r--r--src/endpoint/configured.rs62
-rw-r--r--src/filestore/staging.rs16
-rw-r--r--src/main.rs19
-rw-r--r--src/orchestrator/orchestrator.rs7
-rw-r--r--src/repository/repository.rs89
5 files changed, 86 insertions, 107 deletions
diff --git a/src/endpoint/configured.rs b/src/endpoint/configured.rs
index e38258c..e2dda2c 100644
--- a/src/endpoint/configured.rs
+++ b/src/endpoint/configured.rs
@@ -721,12 +721,12 @@ impl<'a> PreparedContainer<'a> {
}
pub async fn start(self) -> Result<StartedContainer<'a>> {
- let container = self.endpoint.docker.containers().get(&self.create_info.id);
- let _ = container
+ self.endpoint
+ .docker
+ .containers()
+ .get(&self.create_info.id)
.start()
- .inspect(|r| {
- trace!("Starting container {} -> {:?}", self.create_info.id, r);
- })
+ .inspect(|r| trace!("Starting container {} -> {:?}", self.create_info.id, r))
.map(|r| {
r.with_context(|| {
anyhow!(
@@ -766,13 +766,12 @@ impl<'a> StartedContainer<'a> {
.build();
trace!("Exec options = {:?}", exec_opts);
- let container = self.endpoint.docker.containers().get(&self.create_info.id);
-
- trace!(
- "Moving logs to log sink for container {}",
- self.create_info.id
- );
- let stream = container.exec(&exec_opts);
+ trace!("Moving logs to log sink for container {}", self.create_info.id);
+ let stream = self.endpoint
+ .docker
+ .containers()
+ .get(&self.create_info.id)
+ .exec(&exec_opts);
let exited_successfully: Option<(bool, Option<String>)> =
buffer_stream_to_line_stream(stream)
@@ -790,7 +789,6 @@ impl<'a> StartedContainer<'a> {
self.create_info.id
)
})
- .map_err(Error::from)
.and_then(|l| {
crate::log::parser()
.parse(l.as_bytes())
@@ -802,31 +800,21 @@ impl<'a> StartedContainer<'a> {
l
)
})
- .map_err(Error::from)
- .and_then(|item| {
- let mut exited_successfully = None;
- {
- match item {
- LogItem::State(Ok(_)) => {
- exited_successfully = Some((true, None))
- }
- LogItem::State(Err(ref msg)) => {
- exited_successfully = Some((false, Some(msg.clone())))
- }
- _ => {
- // Nothing
- }
- }
- }
-
- trace!("Log item: {}", item.display()?);
- logsink
- .send(item)
- .with_context(|| anyhow!("Sending log to log sink"))
- .map_err(Error::from)
- .map(|_| exited_successfully)
- })
})
+ .and_then(|item| {
+ let exited_successfully = match item {
+ LogItem::State(Ok(_)) => Some((true, None)),
+ LogItem::State(Err(ref msg)) => Some((false, Some(msg.clone()))),
+ _ => None, // Nothing
+ };
+
+ trace!("Log item: {}", item.display()?);
+ logsink
+ .send(item)
+ .with_context(|| anyhow!("Sending log to log sink"))
+ .map(|_| exited_successfully)
+ })
+ .map_err(Error::from)
})
.collect::<Result<Vec<_>>>()
.map(|r| {
diff --git a/src/filestore/staging.rs b/src/filestore/staging.rs
index af62980..ff4b17c 100644
--- a/src/filestore/staging.rs
+++ b/src/filestore/staging.rs
@@ -64,14 +64,14 @@ impl StagingStore {
if self.0.root_path().is_dir(&path) {
None
} else {
- Some({
- // Clippy doesn't detect this properly
- #[allow(clippy::redundant_clone)]
- ArtifactPath::new(path.to_path_buf())
- .inspect(|r| trace!("Loaded from path {} = {:?}", path.display(), r))
- .with_context(|| anyhow!("Loading from path: {}", path.display()))
- .map(|ap| self.0.load_from_path(&ap).clone())
- })
+ // Clippy doesn't detect this properly
+ #[allow(clippy::redundant_clone)]
+ ArtifactPath::new(path.to_path_buf())
+ .inspect(|r| trace!("Loaded from path {} = {:?}", path.display(), r))
+ .with_context(|| anyhow!("Loading from path: {}", path.display()))
+ .map(|ap| self.0.load_from_path(&ap).clone())
+ .map(Some)
+ .transpose()
}
})
.collect()
diff --git a/src/main.rs b/src/main.rs
index 3ddce14..1dc49f9 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -273,20 +273,11 @@ fn generate_completions(matches: &ArgMatches) {
use clap_generate::generators::{Bash, Elvish, Fish, Zsh};
let appname = "butido";
- match matches.value_of("shell").unwrap() {
- // unwrap safe by clap
- "bash" => {
- generate::<Bash, _>(&mut cli::cli(), appname, &mut std::io::stdout());
- }
- "elvish" => {
- generate::<Elvish, _>(&mut cli::cli(), appname, &mut std::io::stdout());
- }
- "fish" => {
- generate::<Fish, _>(&mut cli::cli(), appname, &mut std::io::stdout());
- }
- "zsh" => {
- generate::<Zsh, _>(&mut cli::cli(), appname, &mut std::io::stdout());
- }
+ match matches.value_of("shell").unwrap() { // unwrap safe by clap
+ "bash" => generate::<Bash, _>(&mut cli::cli(), appname, &mut std::io::stdout()),
+ "elvish" => generate::<Elvish, _>(&mut cli::cli(), appname, &mut std::io::stdout()),
+ "fish" => generate::<Fish, _>(&mut cli::cli(), appname, &mut std::io::stdout()),
+ "zsh" => generate::<Zsh, _>(&mut cli::cli(), appname, &mut std::io::stdout()),
_ => unreachable!(),
}
}
diff --git a/src/orchestrator/orchestrator.rs b/src/orchestrator/orchestrator.rs
index 0e548e3..fc15494 100644
--- a/src/orchestrator/orchestrator.rs
+++ b/src/orchestrator/orchestrator.rs
@@ -559,12 +559,13 @@ impl<'a> JobTask<'a> {
self.jobdef.dependencies.iter().map(|u| u.to_string()).collect::<Vec<String>>()
});
+ let dep_len = self.jobdef.dependencies.len();
// A list of job run results from dependencies that were received from the tasks for the
// dependencies
- let mut received_dependencies: HashMap<Uuid, Vec<ProducedArtifact>> = HashMap::new();
+ let mut received_dependencies: HashMap<Uuid, Vec<ProducedArtifact>> = HashMap::with_capacity(dep_len);
// A list of errors that were received from the tasks for the dependencies
- let mut received_errors: HashMap<Uuid, Error> = HashMap::with_capacity(self.jobdef.dependencies.len());
+ let mut received_errors: HashMap<Uuid, Error> = HashMap::with_capacity(dep_len);
// Helper function to check whether all UUIDs are in a list of UUIDs
let all_dependencies_are_in = |dependency_uuids: &[Uuid], list: &HashMap<Uuid, Vec<_>>| {
@@ -582,7 +583,7 @@ impl<'a> JobTask<'a> {
self.jobdef.job.package().name(),
self.jobdef.job.package().version(),
received_dependencies.iter().filter(|(rd_uuid, _)| self.jobdef.dependencies.contains(rd_uuid)).count(),
- self.jobdef.dependencies.len())
+ dep_len)
});
trace!("[{}]: Updated bar", self.jobdef.job.uuid());
diff --git a/src/repository/repository.rs b/src/repository/repository.rs
index 170c2b0..7b7b046 100644
--- a/src/repository/repository.rs
+++ b/src/repository/repository.rs
@@ -70,7 +70,7 @@ impl Repository {
if pkg_file.is_file() {
let buf = std::fs::read_to_string(&pkg_file)
- .with_context(|| format!("Reading {}", pkg_file.display()))?;
+ .with_context(|| anyhow!("Reading {}", pkg_file.display()))?;
// This function has an issue: It loads packages recursively, but if there are
// patches set for a package, these patches are set _relative_ to the current
@@ -110,9 +110,9 @@ impl Repository {
let patches_before_merge = match config.get_array("patches") {
Ok(v) => {
v.into_iter()
- .map(|p| {
- p.into_str()
- .map(PathBuf::from)
+ .map(config::Value::into_str)
+ .map(|r| {
+ r.map(PathBuf::from)
.with_context(|| anyhow!("patches must be strings"))
.map_err(Error::from)
})
@@ -126,48 +126,47 @@ impl Repository {
// Merge the new pkg.toml file over the already loaded configuration
config
.merge(config::File::from_str(&buf, config::FileFormat::Toml))
- .with_context(|| format!("Loading contents of {}", pkg_file.display()))?;
+ .with_context(|| anyhow!("Loading contents of {}", pkg_file.display()))?;
let path_relative_to_root = path.strip_prefix(root)?;
// get the patches that are in the `config` object after the merge
- let patches = match config.get_array("patches") {
- Ok(v) => {
- trace!("Patches after merging: {:?}", v);
- v
- },
-
- // if there was none, we simply use an empty array
- // This is cheap because Vec::with_capacity(0) does not allocate
- Err(config::ConfigError::NotFound(_)) => Vec::with_capacity(0),
- Err(e) => return Err(e).map_err(Error::from),
- }
- .into_iter()
-
- // Map all `Value`s to String and then join them on the path that is relative to
- // the root directory of the repository.
- .map(|patch| patch.into_str().map_err(Error::from))
- .map_ok(|patch| path_relative_to_root.join(patch))
- .inspect(|patch| trace!("Patch relative to root: {:?}", patch.as_ref().map(|p| p.display())))
+ let patches = config
+ .get_array("patches")
+ .or_else(|e| match e {
+
+ // if there was none, we simply use an empty array
+ // This is cheap because Vec::with_capacity(0) does not allocate
+ config::ConfigError::NotFound(_) => Ok(Vec::with_capacity(0)),
+ other => Err(other),
+ })?
+ .into_iter()
- // if the patch file exists, use it (as config::Value).
- //
- // Otherwise we have an error here, because we're refering to a non-existing file.
- .and_then_ok(|patch| if patch.exists() {
- trace!("Path to patch exists: {}", patch.display());
- Ok(Some(patch))
- } else if patches_before_merge.iter().any(|pb| pb.file_name() == patch.file_name()) {
- // We have a patch already in the array that is named equal to the patch
- // we have in the current recursion.
- // It seems like this patch was already in the list and we re-found it
- // because we loaded a deeper pkg.toml file.
- Ok(None)
- } else {
- trace!("Path to patch does not exist: {}", patch.display());
- Err(anyhow!("Patch does not exist: {}", patch.display()))
- })
- .filter_map_ok(|o| o)
- .collect::<Result<Vec<_>>>()?;
+ // Map all `Value`s to String and then join them on the path that is relative to
+ // the root directory of the repository.
+ .map(config::Value::into_str)
+ .map_err(Error::from)
+ .map_ok(|patch| path_relative_to_root.join(patch))
+ .inspect(|patch| trace!("Patch relative to root: {:?}", patch.as_ref().map(|p| p.display())))
+
+ // if the patch file exists, use it (as config::Value).
+ //
+ // Otherwise we have an error here, because we're refering to a non-existing file.
+ .and_then_ok(|patch| if patch.exists() {
+ trace!("Path to patch exists: {}", patch.display());
+ Ok(Some(patch))
+ } else if patches_before_merge.iter().any(|pb| pb.file_name() == patch.file_name()) {
+ // We have a patch already in the array that is named equal to the patch
+ // we have in the current recursion.
+ // It seems like this patch was already in the list and we re-found it
+ // because we loaded a deeper pkg.toml file.
+ Ok(None)
+ } else {
+ trace!("Path to patch does not exist: {}", patch.display());
+ Err(anyhow!("Patch does not exist: {}", patch.display()))
+ })
+ .filter_map_ok(|o| o)
+ .collect::<Result<Vec<_>>>()?;
// If we found any patches, use them. Otherwise use the array from before the merge
// (which already has the correct pathes from the previous recursion).
@@ -187,13 +186,13 @@ impl Repository {
}
let subdirs = all_subdirs(path)
- .with_context(|| format!("Finding subdirs for {}", pkg_file.display()))?;
+ .with_context(|| anyhow!("Finding subdirs for {}", pkg_file.display()))?;
if subdirs.is_empty() {
progress.tick();
if pkg_file.is_file() {
let package = config.try_into()
- .with_context(|| format!("Failed to parse {} into package", path.display()))
+ .with_context(|| anyhow!("Failed to parse {} into package", path.display()))
.and_then(|package: Package| {
if package.name().is_empty() {
Err(anyhow!("Package name cannot be empty: {}", pkg_file.display()))
@@ -213,7 +212,7 @@ impl Repository {
vec.and_then(|mut v| {
trace!("Recursing into {}", dir.display());
let mut loaded = load_recursive(root, &dir, config.clone(), progress)
- .with_context(|| format!("Reading package from {}", pkg_file.display()))?;
+ .with_context(|| anyhow!("Reading package from {}", pkg_file.display()))?;
v.append(&mut loaded);
Ok(v)
@@ -223,7 +222,7 @@ impl Repository {
}
let inner = load_recursive(path, path, config::Config::default(), progress)
- .with_context(|| format!("Recursing for {}", path.display()))?
+ .with_context(|| anyhow!("Recursing for {}", path.display()))?
.into_iter()
.inspect(|p| trace!("Loading into repository: {:?}", p))
.map_ok(|p| ((p.name().clone(), p.version().clone()), p))