summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Cargo.toml6
-rw-r--r--src/cli.rs4
-rw-r--r--src/commands/build.rs6
-rw-r--r--src/commands/db.rs64
-rw-r--r--src/commands/release.rs14
-rw-r--r--src/commands/source.rs86
-rw-r--r--src/commands/tree_of.rs2
-rw-r--r--src/db/connection.rs15
-rw-r--r--src/db/find_artifacts.rs20
-rw-r--r--src/endpoint/configured.rs35
-rw-r--r--src/main.rs11
-rw-r--r--src/orchestrator/orchestrator.rs86
12 files changed, 220 insertions, 129 deletions
diff --git a/Cargo.toml b/Cargo.toml
index b982d11..6c29ecc 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -36,7 +36,7 @@ daggy = { version = "0.7", features = [ "serde" ] }
dialoguer = "0.8"
diesel = { version = ">=1.4.6", features = ["postgres", "chrono", "uuid", "serde_json"] }
diesel_migrations = "*"
-env_logger = "0.8"
+env_logger = "0.9"
filters = "0.4.0"
futures = "0.3"
getset = "0.1"
@@ -94,3 +94,7 @@ rand = "=0.4.3"
# See https://github.com/bitvecto-rs/bitvec/issues/105#issuecomment-778570981
funty = "=1.1.0"
+# Pin, because dialoguer pulls it in, but 1.4.x and newer has MSRV 1.51.0. With
+# the pin here, we enforce the build to not use 1.4.0 or newer.
+zeroize = ">=1.3.0, <1.4.0"
+
diff --git a/src/cli.rs b/src/cli.rs
index a8b0cc1..512e35a 100644
--- a/src/cli.rs
+++ b/src/cli.rs
@@ -27,11 +27,11 @@ pub fn cli<'a>() -> App<'a> {
.version(crate_version!())
.about("Generic Build Orchestration System for building linux packages with docker")
- .after_help(r#"
+ .after_help(indoc::indoc!(r#"
The following environment variables can be passed to butido:
RUST_LOG - to enable logging, for exact usage see the rust cookbook
- "#)
+ "#))
.arg(Arg::new("hide_bars")
.required(false)
diff --git a/src/commands/build.rs b/src/commands/build.rs
index 8825750..bb0c2d9 100644
--- a/src/commands/build.rs
+++ b/src/commands/build.rs
@@ -33,22 +33,22 @@ use tokio_stream::StreamExt;
use uuid::Uuid;
use crate::config::*;
-use crate::filestore::path::StoreRoot;
use crate::filestore::ReleaseStore;
use crate::filestore::StagingStore;
+use crate::filestore::path::StoreRoot;
use crate::job::JobResource;
use crate::log::LogItem;
use crate::orchestrator::OrchestratorSetup;
+use crate::package::Dag;
use crate::package::PackageName;
use crate::package::PackageVersion;
use crate::package::Shebang;
-use crate::package::Dag;
use crate::repository::Repository;
use crate::schema;
use crate::source::SourceCache;
+use crate::util::EnvironmentVariableName;
use crate::util::docker::ImageName;
use crate::util::progress::ProgressBars;
-use crate::util::EnvironmentVariableName;
/// Implementation of the "build" subcommand
#[allow(clippy::too_many_arguments)]
diff --git a/src/commands/db.rs b/src/commands/db.rs
index e984848..672a0d6 100644
--- a/src/commands/db.rs
+++ b/src/commands/db.rs
@@ -33,11 +33,12 @@ use log::trace;
use crate::commands::util::get_date_filter;
use crate::config::Configuration;
-use crate::db::models;
use crate::db::DbConnectionConfig;
+use crate::db::models;
use crate::log::JobResult;
use crate::package::Script;
use crate::schema;
+use crate::util::progress::ProgressBars;
diesel_migrations::embed_migrations!("migrations");
@@ -46,19 +47,20 @@ pub fn db(
db_connection_config: DbConnectionConfig<'_>,
config: &Configuration,
matches: &ArgMatches,
+ progressbars: ProgressBars,
) -> Result<()> {
match matches.subcommand() {
Some(("cli", matches)) => cli(db_connection_config, matches),
- Some(("setup", _matches)) => setup(db_connection_config),
- Some(("artifacts", matches)) => artifacts(db_connection_config, matches),
- Some(("envvars", matches)) => envvars(db_connection_config, matches),
- Some(("images", matches)) => images(db_connection_config, matches),
- Some(("submit", matches)) => submit(db_connection_config, matches),
- Some(("submits", matches)) => submits(db_connection_config, matches),
- Some(("jobs", matches)) => jobs(db_connection_config, matches),
- Some(("job", matches)) => job(db_connection_config, config, matches),
- Some(("log-of", matches)) => log_of(db_connection_config, matches),
- Some(("releases", matches)) => releases(db_connection_config, config, matches),
+ Some(("setup", _matches)) => setup(db_connection_config, progressbars),
+ Some(("artifacts", matches)) => artifacts(db_connection_config, matches, progressbars),
+ Some(("envvars", matches)) => envvars(db_connection_config, matches, progressbars),
+ Some(("images", matches)) => images(db_connection_config, matches, progressbars),
+ Some(("submit", matches)) => submit(db_connection_config, matches, progressbars),
+ Some(("submits", matches)) => submits(db_connection_config, matches, progressbars),
+ Some(("jobs", matches)) => jobs(db_connection_config, matches, progressbars),
+ Some(("job", matches)) => job(db_connection_config, config, matches, progressbars),
+ Some(("log-of", matches)) => log_of(db_connection_config, matches, progressbars),
+ Some(("releases", matches)) => releases(db_connection_config, config, matches, progressbars),
Some((other, _)) => Err(anyhow!("Unknown subcommand: {}", other)),
None => Err(anyhow!("No subcommand")),
}
@@ -148,18 +150,18 @@ fn cli(db_connection_config: DbConnectionConfig<'_>, matches: &ArgMatches) -> Re
.run_for_uri(db_connection_config)
}
-fn setup(conn_cfg: DbConnectionConfig<'_>) -> Result<()> {
- let conn = conn_cfg.establish_connection()?;
+fn setup(conn_cfg: DbConnectionConfig<'_>, progressbars: ProgressBars) -> Result<()> {
+ let conn = conn_cfg.establish_connection(&progressbars)?;
embedded_migrations::run_with_output(&conn, &mut std::io::stdout()).map_err(Error::from)
}
/// Implementation of the "db artifacts" subcommand
-fn artifacts(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
+fn artifacts(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
use crate::schema::artifacts::dsl;
let csv = matches.is_present("csv");
let hdrs = crate::commands::util::mk_header(vec!["Path", "Released", "Job"]);
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let data = matches
.value_of("job_uuid")
.map(uuid::Uuid::parse_str)
@@ -203,12 +205,12 @@ fn artifacts(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<(
}
/// Implementation of the "db envvars" subcommand
-fn envvars(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
+fn envvars(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
use crate::schema::envvars::dsl;
let csv = matches.is_present("csv");
let hdrs = crate::commands::util::mk_header(vec!["Name", "Value"]);
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let data = dsl::envvars
.load::<models::EnvVar>(&conn)?
.into_iter()
@@ -225,12 +227,12 @@ fn envvars(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()>
}
/// Implementation of the "db images" subcommand
-fn images(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
+fn images(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
use crate::schema::images::dsl;
let csv = matches.is_present("csv");
let hdrs = crate::commands::util::mk_header(vec!["Name"]);
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let data = dsl::images
.load::<models::Image>(&conn)?
.into_iter()
@@ -247,8 +249,8 @@ fn images(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()>
}
/// Implementation of the "db submit" subcommand
-fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
- let conn = conn_cfg.establish_connection()?;
+fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let submit_id = matches.value_of("submit")
.map(uuid::Uuid::from_str)
.transpose()
@@ -331,11 +333,11 @@ fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()>
}
/// Implementation of the "db submits" subcommand
-fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
+fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
let csv = matches.is_present("csv");
let limit = matches.value_of("limit").map(i64::from_str).transpose()?;
let hdrs = crate::commands::util::mk_header(vec!["Time", "UUID"]);
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let query = schema::submits::table
.order_by(schema::submits::id.desc()); // required for the --limit implementation
@@ -397,7 +399,7 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()>
}
/// Implementation of the "db jobs" subcommand
-fn jobs(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
+fn jobs(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
let csv = matches.is_present("csv");
let hdrs = crate::commands::util::mk_header(vec![
"Submit",
@@ -408,7 +410,7 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
"Package",
"Version",
]);
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let older_than_filter = get_date_filter("older_than", matches)?;
let newer_than_filter = get_date_filter("newer_than", matches)?;
@@ -495,14 +497,14 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
}
/// Implementation of the "db job" subcommand
-fn job(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches) -> Result<()> {
+fn job(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
let script_highlight = !matches.is_present("no_script_highlight");
let script_line_numbers = !matches.is_present("no_script_line_numbers");
let configured_theme = config.script_highlight_theme();
let show_log = matches.is_present("show_log");
let show_script = matches.is_present("show_script");
let csv = matches.is_present("csv");
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let job_uuid = matches
.value_of("job_uuid")
.map(uuid::Uuid::parse_str)
@@ -669,8 +671,8 @@ fn job(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMa
}
/// Implementation of the subcommand "db log-of"
-fn log_of(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> {
- let conn = conn_cfg.establish_connection()?;
+fn log_of(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let job_uuid = matches
.value_of("job_uuid")
.map(uuid::Uuid::parse_str)
@@ -692,9 +694,9 @@ fn log_of(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()>
}
/// Implementation of the "db releases" subcommand
-fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches) -> Result<()> {
+fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches, progressbars: ProgressBars) -> Result<()> {
let csv = matches.is_present("csv");
- let conn = conn_cfg.establish_connection()?;
+ let conn = conn_cfg.establish_connection(&progressbars)?;
let header = crate::commands::util::mk_header(["Package", "Version", "Date", "Path"].to_vec());
let mut query = schema::jobs::table
.inner_join(schema::packages::table)
diff --git a/src/commands/release.rs b/src/commands/release.rs
index 6f0232b..12decff 100644
--- a/src/commands/release.rs
+++ b/src/commands/release.rs
@@ -22,18 +22,20 @@ use log::{debug, info, trace};
use tokio_stream::StreamExt;
use crate::config::Configuration;
-use crate::db::models as dbmodels;
use crate::db::DbConnectionConfig;
+use crate::db::models as dbmodels;
+use crate::util::progress::ProgressBars;
/// Implementation of the "release" subcommand
pub async fn release(
db_connection_config: DbConnectionConfig<'_>,
config: &Configuration,
matches: &ArgMatches,
+ progressbars: ProgressBars,
) -> Result<()> {
match matches.subcommand() {
- Some(("new", matches)) => new_release(db_connection_config, config, matches).await,
- Some(("rm", matches)) => rm_release(db_connection_config, config, matches).await,
+ Some(("new", matches)) => new_release(db_connection_config, config, matches, progressbars).await,
+ Some(("rm", matches)) => rm_release(db_connection_config, config, matches, progressbars).await,
Some((other, _matches)) => Err(anyhow!("Unknown subcommand: {}", other)),
None => Err(anyhow!("Missing subcommand")),
}
@@ -44,6 +46,7 @@ async fn new_release(
db_connection_config: DbConnectionConfig<'_>,
config: &Configuration,
matches: &ArgMatches,
+ progressbars: ProgressBars,
) -> Result<()> {
let print_released_file_pathes = !matches.is_present("quiet");
let release_store_name = matches.value_of("release_store_name").unwrap(); // safe by clap
@@ -60,7 +63,7 @@ async fn new_release(
debug!("Release called for: {:?} {:?}", pname, pvers);
- let conn = db_connection_config.establish_connection()?;
+ let conn = db_connection_config.establish_connection(&progressbars)?;
let submit_uuid = matches
.value_of("submit_uuid")
.map(uuid::Uuid::parse_str)
@@ -193,6 +196,7 @@ pub async fn rm_release(
db_connection_config: DbConnectionConfig<'_>,
config: &Configuration,
matches: &ArgMatches,
+ progressbars: ProgressBars,
) -> Result<()> {
let release_store_name = matches.value_of("release_store_name").map(String::from).unwrap(); // safe by clap
if !(config.releases_directory().exists() && config.releases_directory().is_dir()) {
@@ -209,7 +213,7 @@ pub async fn rm_release(
let pvers = matches.value_of("package_version").map(String::from).unwrap(); // safe by clap
debug!("Remove Release called for: {:?} {:?}", pname, pvers);
- let conn = db_connection_config.establish_connection()?;
+ let conn = db_connection_config.establish_connection(&progressbars)?;
let (release, artifact) = crate::schema::jobs::table
.inner_join(crate::schema::packages::table)
diff --git a/src/commands/source.rs b/src/commands/source.rs
index 3e0167d..36cf352 100644
--- a/src/commands/source.rs
+++ b/src/commands/source.rs
@@ -214,6 +214,49 @@ pub async fn download(
repo: Repository,
progressbars: ProgressBars,
) -> Result<()> {
+ async fn perform_download(source: &SourceEntry, bar: &indicatif::ProgressBar) -> Result<()> {
+ trace!("Creating: {:?}", source);
+ let file = source.create().await.with_context(|| {
+ anyhow!(
+ "Creating source file destination: {}",
+ source.path().display()
+ )
+ })?;
+
+ let mut file = tokio::io::BufWriter::new(file);
+ let response = match reqwest::get(source.url().as_ref()).await {
+ Ok(resp) => resp,
+ Err(e) => {
+ bar.finish_with_message(format!("Failed: {}", source.url()));
+ return Err(e).with_context(|| anyhow!("Downloading '{}'", source.url()))
+ }
+ };
+
+ if let Some(len) = response.content_length() {
+ bar.set_length(len);
+ }
+
+ let mut stream = reqwest::get(source.url().as_ref()).await?.bytes_stream();
+ let mut bytes_written = 0;
+ while let Some(bytes) = stream.next().await {
+ let bytes = bytes?;
+ file.write_all(bytes.as_ref()).await?;
+ bytes_written += bytes.len();
+
+ bar.inc(bytes.len() as u64);
+ if let Some(len) = response.content_length() {
+ bar.set_message(format!("Downloading {} ({}/{} bytes)", source.url(), bytes_written, len));
+ } else {
+ bar.set_message(format!("Downloading {} ({} bytes)", source.url(), bytes_written));
+ }
+ }
+
+ file.flush()
+ .await
+ .map_err(Error::from)
+ .map(|_| ())
+ }
+
let force = matches.is_present("force");
let cache = PathBuf::from(config.source_cache_root());
let sc = SourceCache::new(cache);
@@ -260,49 +303,6 @@ pub async fn download(
if source_path_exists && !force {
Err(anyhow!("Source exists: {}", source.path().display()))
} else {
- async fn perform_download(source: &SourceEntry, bar: &indicatif::ProgressBar) -> Result<()> {
- trace!("Creating: {:?}", source);
- let file = source.create().await.with_context(|| {
- anyhow!(
- "Creating source file destination: {}",
- source.path().display()
- )
- })?;
-
- let mut file = tokio::io::BufWriter::new(file);
- let response = match reqwest::get(source.url().as_ref()).await {
- Ok(resp) => resp,
- Err(e) => {
- bar.finish_with_message(format!("Failed: {}", source.url()));
- return Err(e).with_context(|| anyhow!("Downloading '{}'", source.url()))
- }
- };
-
- if let Some(len) = response.content_length() {
- bar.set_length(len);
- }
-
- let mut stream = reqwest::get(source.url().as_ref()).await?.bytes_stream();
- let mut bytes_written = 0;
- while let Some(bytes) = stream.next().await {
- let bytes = bytes?;
- file.write_all(bytes.as_ref()).await?;
- bytes_written += bytes.len();
-
- bar.inc(bytes.len() as u64);
- if let Some(len) = response.content_length() {
- bar.set_message(format!("Downloading {} ({}/{} bytes)", source.url(), bytes_written, len));
- } else {
- bar.set_message(format!("Downloading {} ({} bytes)", source.url(), bytes_written));
- }
- }
-
- file.flush()
- .await
- .map_err(Error::from)
- .map(|_| ())
- }
-
if source_path_exists /* && force is implied by 'if' above*/ {
if let Err(e) = source.remove_file().await {
bar.finish_with_message(format!("Failed to remove existing file: {}", source.path().display()));
diff --git a/src/commands/tree_of.rs b/src/commands/tree_of.rs
index e632ed5..4f78c61 100644
--- a/src/commands/tree_of.rs
+++ b/src/commands/tree_of.rs
@@ -17,9 +17,9 @@ use anyhow::Result;
use clap::ArgMatches;
use resiter::AndThen;
+use crate::package::Dag;
use crate::package::PackageName;
use crate::package::PackageVersionConstraint;
-use crate::package::Dag;
use crate::repository::Repository;
use crate::util::progress::ProgressBars;
diff --git a/src/db/connection.rs b/src/db/connection.rs
index cf9a0c0..76d38c1 100644
--- a/src/db/connection.rs
+++ b/src/db/connection.rs
@@ -19,6 +19,7 @@ use getset::Getters;
use log::debug;
use crate::config::Configuration;
+use crate::util::progress::ProgressBars;
#[derive(Getters)]
pub struct DbConnectionConfig<'a> {
@@ -78,7 +79,7 @@ impl<'a> DbConnectionConfig<'a> {
})
}
- pub fn establish_connection(self) -> Result<PgConnection> {
+ pub fn establish_connection(self, progressbars: &ProgressBars) -> Result<PgConnection> {
debug!("Trying to connect to database: {:?}", self);
let database_uri: String = format!(
"postgres://{user}:{password}@{host}:{port}/{name}?connect_timeout={timeout}",
@@ -89,7 +90,17 @@ impl<'a> DbConnectionConfig<'a> {
name = self.database_name,
timeout = self.database_connection_timeout,
);
- PgConnection::establish(&database_uri).map_err(Error::from)
+
+ let bar = progressbars.spinner();
+ bar.set_message("Establishing database connection");
+
+ let conn = PgConnection::establish(&database_uri).map_err(Error::from);
+ if conn.is_err() {
+ bar.finish_with_message("Connection could not be established");
+ } else {
+ bar.finish_with_message("Connection established");
+ }
+ conn
}
}
diff --git a/src/db/find_artifacts.rs b/src/db/find_artifacts.rs
index 4f5401f..8434530 100644
--- a/src/db/find_artifacts.rs
+++ b/src/db/find_artifacts.rs
@@ -24,7 +24,6 @@ use diesel::RunQueryDsl;
use log::trace;
use resiter::AndThen;
use resiter::FilterMap;
-use resiter::Map;
use crate::config::Configuration;
use crate::db::models as dbmodels;
@@ -33,7 +32,6 @@ use crate::filestore::path::FullArtifactPath;
use crate::filestore::ReleaseStore;
use crate::filestore::StagingStore;
use crate::package::Package;
-use crate::package::ParseDependency;
use crate::package::ScriptBuilder;
use crate::package::Shebang;
use crate::schema;
@@ -73,24 +71,6 @@ pub fn find_artifacts<'a>(
};
let package_environment = pkg.environment();
- let build_dependencies_names = pkg
- .dependencies()
- .build()
- .iter()
- .map(|d| d.parse_as_name_and_version())
- .map_ok(|tpl| tpl.0) // TODO: We only filter by dependency NAME right now, not by version constraint
- .collect::<Result<Vec<_>>>()?;
-
- let runtime_dependencies_names = pkg
- .dependencies()
- .runtime()
- .iter()
- .map(|d| d.parse_as_name_and_version())
- .map_ok(|tpl| tpl.0) // TODO: We only filter by dependency NAME right now, not by version constraint
- .collect::<Result<Vec<_>>>()?;
-
- trace!("Build dependency names: {:?}", build_dependencies_names);
- trace!("Runtime dependency names: {:?}", runtime_dependencies_names);
let mut query = schema::packages::table
.filter({
// The package with pkg.name() and pkg.version()
diff --git a/src/endpoint/configured.rs b/src/endpoint/configured.rs
index aecb291..6a1e3be 100644
--- a/src/endpoint/configured.rs
+++ b/src/endpoint/configured.rs
@@ -13,18 +13,19 @@ use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
-use anyhow::anyhow;
use anyhow::Context;
use anyhow::Error;
use anyhow::Result;
+use anyhow::anyhow;
use futures::FutureExt;
use getset::{CopyGetters, Getters};
use log::trace;
+use result_inspect::ResultInspect;
use shiplift::Container;
use shiplift::Docker;
use shiplift::ExecContainerOptions;
-use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::RwLock;
+use tokio::sync::mpsc::UnboundedSender;
use tokio_stream::StreamExt;
use typed_builder::TypedBuilder;
@@ -478,6 +479,7 @@ impl<'a> PreparedContainer<'a> {
.containers()
.create(&builder_opts)
.await
+ .with_context(|| anyhow!("Creating container with builder options = {:?}", builder_opts))
.with_context(|| anyhow!("Creating container on '{}'", endpoint.name))?;
trace!("Create info = {:?}", create_info);
Ok(create_info)
@@ -522,12 +524,16 @@ impl<'a> PreparedContainer<'a> {
.with_context(|| anyhow!("Reading file {}", source_path.display()))?;
drop(entry);
- let _ = container.copy_file_into(destination, &buf).await?;
- Ok(())
+ container.copy_file_into(destination, &buf)
+ .await
+ .inspect(|_| trace!("Successfully copied source {} to container {}", source_path.display(), container.id()))
+ .with_context(|| anyhow!("Failed to copy source {} to container {}", source_path.display(), container.id()))
+ .map_err(Error::from)
})
.collect::<futures::stream::FuturesUnordered<_>>()
.collect::<Result<()>>()
.await
+ .inspect(|_| trace!("Successfully copied sources to container {}", container.id()))
.with_context(|| anyhow!("Copying sources to container {}", container.id()))
.map_err(Error::from)
}
@@ -544,7 +550,7 @@ impl<'a> PreparedContainer<'a> {
.iter()
.map(|patch| async move {
let destination = PathBuf::from(crate::consts::PATCH_DIR_PATH).join(patch);
- trace!("Copying patch {} to container at {}/{}", crate::consts::PATCH_DIR_PATH, patch.display(), destination.display());
+ trace!("Copying patch {} to container at {}", patch.display(), destination.display());
let mut buf = vec![];
tokio::fs::OpenOptions::new()
@@ -560,12 +566,18 @@ impl<'a> PreparedContainer<'a> {
.await
.with_context(|| anyhow!("Reading file {}", patch.display()))?;
- let _ = container.copy_file_into(destination, &buf).await?;
- Ok(())
+ container.copy_file_into(destination, &buf)
+ .await
+ .map_err(Error::from)
+ .inspect(|_| trace!("Copying patch {} successfull", patch.display()))
+ .with_context(|| anyhow!("Copying patch {} to container {}", patch.display(), container.id()))
+ .map_err(Error::from)
})
.collect::<futures::stream::FuturesUnordered<_>>()
.collect::<Result<()>>()
.await
+ .map_err(Error::from)
+ .inspect(|_| trace!("Copied all patches"))
.with_context(|| anyhow!("Copying patches to container {}", container.id()))
.map_err(Error::from)
}
@@ -611,7 +623,10 @@ impl<'a> PreparedContainer<'a> {
found = Some(path);
break;
},
- Err(e) => return Err(e),
+ Err(e) => {
+ trace!("Failed to join '{:?}' + '{:?}'", release_store.root_path(), art.display());
+ return Err(e)
+ },
Ok(None) => continue,
}
}
@@ -626,10 +641,12 @@ impl<'a> PreparedContainer<'a> {
art.display()
)
})?;
+ trace!("Successfully read {} into buffer", art.display());
let r = container
.copy_file_into(&destination, &buf)
.await
+ .inspect(|_| trace!("Successfully copied {} to container", art.display()))
.with_context(|| {
anyhow!(
"Copying artifact {} to container {} at {}",
@@ -645,6 +662,7 @@ impl<'a> PreparedContainer<'a> {
.collect::<futures::stream::FuturesUnordered<_>>()
.collect::<Result<Vec<_>>>()
.await
+ .inspect(|_| trace!("Successfully copied all artifacts to the container {}", container.id()))
.with_context(|| anyhow!("Copying artifacts to container {}", container.id()))
.map_err(Error::from)
.map(|_| ())
@@ -658,6 +676,7 @@ impl<'a> PreparedContainer<'a> {
container
.copy_file_into(script_path, script.as_ref().as_bytes())
.await
+ .inspect(|_| trace!("Successfully copied script to container {}", container.id()))
.with_context(|| anyhow!("Copying the script into container {}", container.id()))
.map_err(Error::from)
}
diff --git a/src/main.rs b/src/main.rs
index 59dd559..4c5ce5d 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -62,6 +62,7 @@ use logcrate::error;
use rand as _; // Required to make lints happy
use aquamarine as _; // doc-helper crate
use funty as _; // doc-helper crate
+use zeroize as _; // Required to make lints happy
mod cli;
mod commands;
@@ -151,9 +152,9 @@ async fn main() -> Result<()> {
let db_connection_config = crate::db::DbConnectionConfig::parse(&config, &cli)?;
match cli.subcommand() {
Some(("generate-completions", matches)) => generate_completions(matches),
- Some(("db", matches)) => crate::commands::db(db_connection_config, &config, matches)?,
+ Some(("db", matches)) => crate::commands::db(db_connection_config, &config, matches, progressbars)?,
Some(("build", matches)) => {
- let conn = db_connection_config.establish_connection()?;
+ let conn = db_connection_config.establish_connection(&progressbars)?;
let repo = load_repo()?;
@@ -199,7 +200,7 @@ async fn main() -> Result<()> {
Some(("find-artifact", matches)) => {
let repo = load_repo()?;
- let conn = db_connection_config.establish_connection()?;
+ let conn = db_connection_config.establish_connection(&progressbars)?;
crate::commands::find_artifact(matches, &config, progressbars, repo, conn)
.await
.context("find-artifact command failed")?
@@ -220,7 +221,7 @@ async fn main() -> Result<()> {
}
Some(("release", matches)) => {
- crate::commands::release(db_connection_config, &config, matches)
+ crate::commands::release(db_connection_config, &config, matches, progressbars)
.await
.context("release command failed")?
}
@@ -241,7 +242,7 @@ async fn main() -> Result<()> {
Some(("metrics", _)) => {
let repo = load_repo()?;
- let conn = db_connection_config.establish_connection()?;
+ let conn = db_connection_config.establish_connection(&progressbars)?;
crate::commands::metrics(&repo_path, &config, repo, conn)
.await
.context("metrics command failed")?
diff --git a/src/orchestrator/orchestrator.rs b/src/orchestrator/orchestrator.rs
index dd1322d..7e2799d 100644
--- a/src/orchestrator/orchestrator.rs
+++ b/src/orchestrator/orchestrator.rs
@@ -214,7 +214,47 @@ impl<'a> OrchestratorSetup<'a> {
/// It is either a list of artifacts with the UUID of the job they were produced by,
/// or a UUID and an Error object, where the UUID is the job UUID and the error is the
/// anyhow::Error that was issued.
-type JobResult = std::result::Result<HashMap<Uuid, Vec<ArtifactPath>>, HashMap<Uuid, Error>>;
+///
+/// The artifacts are encapsulated into a `ProducedArtifact`, see the documentation of the type for
+/// why.
+type JobResult = std::result::Result<HashMap<Uuid, Vec<ProducedArtifact>>, HashMap<Uuid, Error>>;
+
+/// A type that represents whether an artifact was built or reused from an old job
+///
+/// This is necessary to decide in dependent jobs whether a package needs to be rebuild even though
+/// the script and environment did not change.
+///
+/// E.G.: If a libA depends on libB, if libB changed and needs to be rebuilt, we need to rebuilt
+/// all packages that depend (directly or indirectly) on that library.
+#[derive(Clone, Debug)]
+enum ProducedArtifact {
+ Built(ArtifactPath),
+ Reused(ArtifactPath),
+}
+
+impl ProducedArtifact {
+ /// Get whether the ProducedArtifact was built or reused from another job
+ fn was_build(&self) -> bool {
+ std::matches!(self, ProducedArtifact::Built(_))
+ }
+
+ /// Unpack the ProducedArtifact object into the ArtifactPath object it contains
+ fn unpack(self) -> ArtifactPath {
+ match self {
+ ProducedArtifact::Built(a) => a,
+ ProducedArtifact::Reused(a) => a,
+ }
+ }
+}
+
+impl Borrow<ArtifactPath> for ProducedArtifact {
+ fn borrow(&self) -> &ArtifactPath {
+ match self {
+ ProducedArtifact::Built(a) => a,
+ ProducedArtifact::Reused(a) => a,
+ }
+ }
+}
impl<'a> Orchestrator<'a> {
pub async fn run(self, output: &mut Vec<ArtifactPath>) -> Result<HashMap<Uuid, Error>> {
@@ -375,6 +415,7 @@ impl<'a> Orchestrator<'a> {
let sender = prep.3.into_inner().unwrap_or_else(|| vec![root_sender.clone()]);
JobTask::new(prep.0, prep.1, sender)
})
+ .inspect(|task| trace!("Running: {}", task.jobdef.job.uuid()))
.map(|task| task.run())
.collect::<futures::stream::FuturesUn