diff options
author | Matthias Beyer <matthias.beyer@atos.net> | 2021-01-18 14:48:59 +0100 |
---|---|---|
committer | Matthias Beyer <matthias.beyer@atos.net> | 2021-01-18 14:48:59 +0100 |
commit | 0295809436d8e178a7d0528b47b9d4313b292eef (patch) | |
tree | 55671566fb700328c81a34b322cfa55309e098f8 /src/commands/build.rs | |
parent | 5bee5329b823431fd3c971f75281084617766edd (diff) |
Run `cargo fmt`
Signed-off-by: Matthias Beyer <matthias.beyer@atos.net>
Diffstat (limited to 'src/commands/build.rs')
-rw-r--r-- | src/commands/build.rs | 202 |
1 files changed, 126 insertions, 76 deletions
diff --git a/src/commands/build.rs b/src/commands/build.rs index d0c8150..2af3588 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -13,10 +13,10 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; use colored::Colorize; use diesel::ExpressionMethods; @@ -24,14 +24,14 @@ use diesel::PgConnection; use diesel::QueryDsl; use diesel::RunQueryDsl; use itertools::Itertools; -use log::{debug, info, warn, trace}; +use log::{debug, info, trace, warn}; use tokio::stream::StreamExt; use tokio::sync::RwLock; use crate::config::*; +use crate::filestore::path::StoreRoot; use crate::filestore::ReleaseStore; use crate::filestore::StagingStore; -use crate::filestore::path::StoreRoot; use crate::job::JobResource; use crate::job::JobSet; use crate::log::LogItem; @@ -43,55 +43,63 @@ use crate::package::Tree; use crate::repository::Repository; use crate::schema; use crate::source::SourceCache; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; use crate::util::progress::ProgressBars; +use crate::util::EnvironmentVariableName; /// Implementation of the "build" subcommand #[allow(clippy::too_many_arguments)] -pub async fn build(repo_root: &Path, - matches: &ArgMatches, - progressbars: ProgressBars, - database_connection: PgConnection, - config: &Configuration, - repo: Repository, - repo_path: &Path, - max_packages: u64) - -> Result<()> -{ - use crate::db::models::{ - EnvVar, - Package, - Job, - GitHash, - Image, - Submit, - }; +pub async fn build( + repo_root: &Path, + matches: &ArgMatches, + progressbars: ProgressBars, + database_connection: PgConnection, + config: &Configuration, + repo: Repository, + repo_path: &Path, + max_packages: u64, +) -> Result<()> { + use crate::db::models::{EnvVar, GitHash, Image, Job, Package, Submit}; let now = chrono::offset::Local::now().naive_local(); let submit_id = uuid::Uuid::new_v4(); println!("Submit {}, started {}", submit_id, now); let shebang = Shebang::from({ - matches.value_of("shebang") + matches + .value_of("shebang") .map(String::from) .unwrap_or_else(|| config.shebang().clone()) }); - let image_name = matches.value_of("image").map(String::from).map(ImageName::from).unwrap(); // safe by clap - if config.docker().verify_images_present() && !config.docker().images().iter().any(|img| image_name == *img) { - return Err(anyhow!("Requested build image {} is not in the configured images")) - .with_context(|| anyhow!("Available images: {:?}", config.docker().images())) - .with_context(|| anyhow!("Image present verification failed")) - .map_err(Error::from) + let image_name = matches + .value_of("image") + .map(String::from) + .map(ImageName::from) + .unwrap(); // safe by clap + if config.docker().verify_images_present() + && !config + .docker() + .images() + .iter() + .any(|img| image_name == *img) + { + return Err(anyhow!( + "Requested build image {} is not in the configured images" + )) + .with_context(|| anyhow!("Available images: {:?}", config.docker().images())) + .with_context(|| anyhow!("Image present verification failed")) + .map_err(Error::from); } debug!("Getting repository HEAD"); - let hash_str = crate::util::git::get_repo_head_commit_hash(repo_path)?; + let hash_str = crate::util::git::get_repo_head_commit_hash(repo_path)?; trace!("Repository HEAD = {}", hash_str); let phases = config.available_phases(); - let endpoint_configurations = config.docker().endpoints() + let endpoint_configurations = config + .docker() + .endpoints() .iter() .cloned() .map(|ep_cfg| { @@ -105,17 +113,20 @@ pub async fn build(repo_root: &Path, .collect(); info!("Endpoint config build"); - let pname = matches.value_of("package_name") + let pname = matches + .value_of("package_name") .map(String::from) .map(PackageName::from) .unwrap(); // safe by clap - let pvers = matches.value_of("package_version") + let pvers = matches + .value_of("package_version") .map(String::from) .map(PackageVersion::from); info!("We want {} ({:?})", pname, pvers); - let additional_env = matches.values_of("env") + let additional_env = matches + .values_of("env") .unwrap_or_default() .map(crate::util::env::parse_to_env) .collect::<Result<Vec<(EnvironmentVariableName, String)>>>()?; @@ -130,11 +141,16 @@ pub async fn build(repo_root: &Path, // We only support building one package per call. // Everything else is invalid if packages.len() > 1 { - return Err(anyhow!("Found multiple packages ({}). Cannot decide which one to build", packages.len())) + return Err(anyhow!( + "Found multiple packages ({}). Cannot decide which one to build", + packages.len() + )); } - let package = *packages.get(0).ok_or_else(|| anyhow!("Found no package."))?; + let package = *packages + .get(0) + .ok_or_else(|| anyhow!("Found no package."))?; - let release_dir = { + let release_dir = { let bar_release_loading = progressbars.bar(); bar_release_loading.set_length(max_packages); @@ -154,10 +170,15 @@ pub async fn build(repo_root: &Path, bar_staging_loading.set_length(max_packages); let p = if let Some(staging_dir) = matches.value_of("staging_dir").map(PathBuf::from) { - info!("Setting staging dir to {} for this run", staging_dir.display()); + info!( + "Setting staging dir to {} for this run", + staging_dir.display() + ); staging_dir } else { - config.staging_directory().join(submit_id.hyphenated().to_string()) + config + .staging_directory() + .join(submit_id.hyphenated().to_string()) }; if !p.is_dir() { @@ -171,9 +192,7 @@ pub async fn build(repo_root: &Path, } else { bar_staging_loading.finish_with_message("Failed to load staging"); } - r.map(RwLock::new) - .map(Arc::new) - .map(|store| (store, p))? + r.map(RwLock::new).map(Arc::new).map(|store| (store, p))? }; let tree = { @@ -192,8 +211,12 @@ pub async fn build(repo_root: &Path, if matches.is_present("no_verification") { warn!("No hash verification will be performed"); } else { - crate::commands::source::verify_impl(tree.all_packages().into_iter(), &source_cache, &progressbars) - .await?; + crate::commands::source::verify_impl( + tree.all_packages().into_iter(), + &source_cache, + &progressbars, + ) + .await?; } // linting the package scripts @@ -216,13 +239,23 @@ pub async fn build(repo_root: &Path, .map(|pkg| { if let Some(allowlist) = pkg.allowed_images() { if !allowlist.contains(&image_name) { - return Err(anyhow!("Package {} {} is only allowed on: {}", pkg.name(), pkg.version(), allowlist.iter().join(", "))) + return Err(anyhow!( + "Package {} {} is only allowed on: {}", + pkg.name(), + pkg.version(), + allowlist.iter().join(", ") + )); } } if let Some(deniedlist) = pkg.denied_images() { if deniedlist.iter().any(|denied| image_name == *denied) { - return Err(anyhow!("Package {} {} is not allowed to be built on {}", pkg.name(), pkg.version(), image_name)) + return Err(anyhow!( + "Package {} {} is not allowed to be built on {}", + pkg.name(), + pkg.version(), + image_name + )); } } @@ -233,9 +266,10 @@ pub async fn build(repo_root: &Path, trace!("Setting up database jobs for Package, GitHash, Image"); let db_package = async { Package::create_or_fetch(&database_connection, &package) }; let db_githash = async { GitHash::create_or_fetch(&database_connection, &hash_str) }; - let db_image = async { Image::create_or_fetch(&database_connection, &image_name) }; - let db_envs = async { - additional_env.clone() + let db_image = async { Image::create_or_fetch(&database_connection, &image_name) }; + let db_envs = async { + additional_env + .clone() .into_iter() .map(|(k, v)| async { let k: EnvironmentVariableName = k; // hack to work around move semantics @@ -248,25 +282,26 @@ pub async fn build(repo_root: &Path, }; trace!("Running database jobs for Package, GitHash, Image"); - let (db_package, db_githash, db_image, db_envs) = tokio::join!( - db_package, - db_githash, - db_image, - db_envs - ); + let (db_package, db_githash, db_image, db_envs) = + tokio::join!(db_package, db_githash, db_image, db_envs); let (db_package, db_githash, db_image, _) = (db_package?, db_githash?, db_image?, db_envs?); trace!("Database jobs for Package, GitHash, Image finished successfully"); trace!("Creating Submit in database"); - let submit = Submit::create(&database_connection, + let submit = Submit::create( + &database_connection, &tree, &now, &submit_id, &db_image, &db_package, - &db_githash)?; - trace!("Creating Submit in database finished successfully: {:?}", submit); + &db_githash, + )?; + trace!( + "Creating Submit in database finished successfully: {:?}", + submit + ); trace!("Setting up job sets"); let resources: Vec<JobResource> = additional_env.into_iter().map(JobResource::from).collect(); @@ -283,7 +318,11 @@ pub async fn build(repo_root: &Path, .database(database_connection.clone()) .source_cache(source_cache) .submit(submit) - .log_dir(if matches.is_present("write-log-file") { Some(config.log_dir().clone()) } else { None }) + .log_dir(if matches.is_present("write-log-file") { + Some(config.log_dir().clone()) + } else { + None + }) .jobsets(jobsets) .config(config) .build() @@ -292,18 +331,16 @@ pub async fn build(repo_root: &Path, info!("Running orchestrator..."); let mut artifacts = vec![]; - let errors = orch.run(&mut artifacts).await?; - let out = std::io::stdout(); + let errors = orch.run(&mut artifacts).await?; + let out = std::io::stdout(); let mut outlock = out.lock(); if !artifacts.is_empty() { writeln!(outlock, "Packages created:")?; } - artifacts.into_iter() - .try_for_each(|artifact| { - writeln!(outlock, "-> {}", staging_dir.join(artifact.path).display()) - .map_err(Error::from) - })?; + artifacts.into_iter().try_for_each(|artifact| { + writeln!(outlock, "-> {}", staging_dir.join(artifact.path).display()).map_err(Error::from) + })?; let mut had_error = false; for (job_uuid, error) in errors { @@ -318,27 +355,36 @@ pub async fn build(repo_root: &Path, .first::<(Job, Package)>(database_connection.as_ref())?; let number_log_lines = *config.build_error_lines(); - writeln!(outlock, "Last {} lines of Job {}", number_log_lines, job_uuid)?; - writeln!(outlock, "for package {} {}\n\n", data.1.name, data.1.version)?; + writeln!( + outlock, + "Last {} lines of Job {}", + number_log_lines, job_uuid + )?; + writeln!( + outlock, + "for package {} {}\n\n", + data.1.name, data.1.version + )?; let parsed_log = crate::log::ParsedLog::build_from(&data.0.log_text)?; let mut last_phase = None; let mut error_catched = false; - let lines = parsed_log.iter() + let lines = parsed_log + .iter() .map(|line_item| match line_item { - LogItem::Line(s) => Ok(String::from_utf8(s.to_vec())?.normal()), - LogItem::Progress(u) => Ok(format!("#BUTIDO:PROGRESS:{}", u).bright_black()), + LogItem::Line(s) => Ok(String::from_utf8(s.to_vec())?.normal()), + LogItem::Progress(u) => Ok(format!("#BUTIDO:PROGRESS:{}", u).bright_black()), LogItem::CurrentPhase(p) => { if !error_catched { last_phase = Some(p.clone()); } Ok(format!("#BUTIDO:PHASE:{}", p).bright_black()) - }, - LogItem::State(Ok(())) => Ok("#BUTIDO:STATE:OK".to_string().green()), - LogItem::State(Err(s)) => { + } + LogItem::State(Ok(())) => Ok("#BUTIDO:STATE:OK".to_string().green()), + LogItem::State(Err(s)) => { error_catched = true; Ok(format!("#BUTIDO:STATE:ERR:{}", s).red()) - }, + } }) .collect::<Result<Vec<_>>>()?; @@ -363,7 +409,11 @@ pub async fn build(repo_root: &Path, } writeln!(outlock, "\n\n")?; } else { - writeln!(outlock, "{}", "Error seems not to be caused by packaging script.".red())?; + writeln!( + outlock, + "{}", + "Error seems not to be caused by packaging script.".red() + )?; } } |