summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthias Beyer <matthias.beyer@atos.net>2021-09-09 08:38:59 +0200
committerMatthias Beyer <matthias.beyer@atos.net>2021-09-09 08:38:59 +0200
commit7fa55210e54e49e419d9e0bcff3ca7f144aac597 (patch)
treea6d7eb2430461ebf23832828cbdb9a2f059f896f
parent9f021c020ada7403bec2a0dbff0f1b052b916b70 (diff)
parented9d61a07ac69b9bdaf649356622e2553d0ef72c (diff)
Merge branch 'rewrite-find-artifact'
-rw-r--r--src/cli.rs9
-rw-r--r--src/commands/find_artifact.rs17
-rw-r--r--src/db/find_artifacts.rs305
-rw-r--r--src/db/mod.rs2
-rw-r--r--src/orchestrator/orchestrator.rs20
5 files changed, 201 insertions, 152 deletions
diff --git a/src/cli.rs b/src/cli.rs
index 5c4b399..dbafd91 100644
--- a/src/cli.rs
+++ b/src/cli.rs
@@ -662,6 +662,15 @@ pub fn cli<'a>() -> App<'a> {
.validator(env_pass_validator)
.about("Filter for this \"key=value\" environment variable")
)
+ .arg(Arg::new("image")
+ .required(false)
+ .multiple(false)
+ .long("image")
+ .short('I')
+ .takes_value(true)
+ .value_name("IMAGE")
+ .about("Only list artifacts that were built on IMAGE")
+ )
)
.subcommand(App::new("find-pkg")
diff --git a/src/commands/find_artifact.rs b/src/commands/find_artifact.rs
index 0fc9b8c..6904cf4 100644
--- a/src/commands/find_artifact.rs
+++ b/src/commands/find_artifact.rs
@@ -31,6 +31,7 @@ use crate::filestore::path::StoreRoot;
use crate::package::PackageVersionConstraint;
use crate::repository::Repository;
use crate::util::progress::ProgressBars;
+use crate::util::docker::ImageName;
/// Implementation of the "find_artifact" subcommand
pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progressbars: ProgressBars, repo: Repository, database_connection: PgConnection) -> Result<()> {
@@ -50,6 +51,10 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres
.transpose()?
.unwrap_or_default();
+ let image_name = matches.value_of("image")
+ .map(String::from)
+ .map(ImageName::from);
+
log::debug!("Finding artifacts for '{:?}' '{:?}'", package_name_regex, package_version_constraint);
let release_stores = config
@@ -102,7 +107,17 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres
.inspect(|pkg| trace!("Found package: {:?}", pkg))
.map(|pkg| {
let script_filter = !matches.is_present("no_script_filter");
- let pathes = crate::db::find_artifacts(database.clone(), config, pkg, &release_stores, staging_store.as_ref(), &env_filter, script_filter)?;
+ let pathes = crate::db::FindArtifacts::builder()
+ .config(config)
+ .release_stores(&release_stores)
+ .staging_store(staging_store.as_ref())
+ .database_connection(database.clone())
+ .env_filter(&env_filter)
+ .script_filter(script_filter)
+ .image_name(image_name.as_ref())
+ .package(pkg)
+ .build()
+ .run()?;
pathes.iter()
.map(|tpl| (tpl.0.joined(), tpl.1))
diff --git a/src/db/find_artifacts.rs b/src/db/find_artifacts.rs
index 8434530..8974973 100644
--- a/src/db/find_artifacts.rs
+++ b/src/db/find_artifacts.rs
@@ -12,7 +12,6 @@ use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
-use anyhow::Error;
use anyhow::Result;
use chrono::NaiveDateTime;
use diesel::BoolExpressionMethods;
@@ -36,6 +35,7 @@ use crate::package::ScriptBuilder;
use crate::package::Shebang;
use crate::schema;
use crate::util::EnvironmentVariableName;
+use crate::util::docker::ImageName;
/// Find an artifact by a job description
///
@@ -49,159 +49,182 @@ use crate::util::EnvironmentVariableName;
/// If the artifact was released, the return value contains a Some(NaiveDateTime), marking the date
/// of the release.
/// Releases are returned prefferably, if multiple equal pathes for an artifact are found.
-pub fn find_artifacts<'a>(
+#[derive(typed_builder::TypedBuilder)]
+pub struct FindArtifacts<'a> {
+ config: &'a Configuration,
database_connection: Arc<PgConnection>,
- config: &Configuration,
- pkg: &Package,
+
+ /// The release stores to search in
release_stores: &'a [Arc<ReleaseStore>],
+
+ /// The staging store to search in, if any
+ #[builder(default)]
staging_store: Option<&'a StagingStore>,
- additional_env: &[(EnvironmentVariableName, String)],
+
+ /// Whether to apply a filter that matches for equal script
+ ///
+ /// If a job can be found, but the script is not equal to the script of the found, the job is
+ /// not returned
script_filter: bool,
-) -> Result<Vec<(FullArtifactPath<'a>, Option<NaiveDateTime>)>> {
- let shebang = Shebang::from(config.shebang().clone());
- let script = if script_filter {
- let script = ScriptBuilder::new(&shebang).build(
- pkg,
- config.available_phases(),
- *config.strict_script_interpolation(),
- )?;
- Some(script)
- } else {
- None
- };
-
- let package_environment = pkg.environment();
- let mut query = schema::packages::table
- .filter({
- // The package with pkg.name() and pkg.version()
- let package_name_filter = schema::packages::name.eq(pkg.name().as_ref() as &str);
- let package_version_filter =
- schema::packages::version.eq(pkg.version().as_ref() as &str);
-
- package_name_filter.and(package_version_filter)
- })
-
- // TODO: Only select from submits where the submit contained jobs that are in the
- // dependencies of `pkg`.
- .inner_join(schema::jobs::table.inner_join(schema::submits::table))
- .inner_join(schema::artifacts::table.on(schema::jobs::id.eq(schema::artifacts::job_id)))
-
- // TODO: We do not yet have a method to "left join" properly, because diesel only has
- // left_outer_join (left_join is an alias)
- // So do not include release dates here, for now
- //.left_outer_join(schema::releases::table.on(schema::releases::artifact_id.eq(schema::artifacts::id)))
- .inner_join(schema::images::table.on(schema::submits::requested_image_id.eq(schema::images::id)))
- .into_boxed();
-
- if let Some(allowed_images) = pkg.allowed_images() {
- trace!("Filtering with allowed_images = {:?}", allowed_images);
- let imgs = allowed_images
- .iter()
- .map(AsRef::<str>::as_ref)
- .collect::<Vec<_>>();
- query = query.filter(schema::images::name.eq_any(imgs));
- }
- if let Some(denied_images) = pkg.denied_images() {
- trace!("Filtering with denied_images = {:?}", denied_images);
- let imgs = denied_images
- .iter()
- .map(AsRef::<str>::as_ref)
- .collect::<Vec<_>>();
- query = query.filter(schema::images::name.ne_all(imgs));
- }
+ /// Filter for these environment variables
+ env_filter: &'a [(EnvironmentVariableName, String)],
- if let Some(script_text) = script.as_ref() {
- query = query.filter(schema::jobs::script_text.eq(script_text.as_ref()));
- }
+ /// Filter for image name
+ #[builder(default)]
+ image_name: Option<&'a ImageName>,
- trace!("Query = {}", diesel::debug_query(&query));
-
- query
- .select({
- let arts = schema::artifacts::all_columns;
- let jobs = schema::jobs::all_columns;
- //let rels = schema::releases::release_date.nullable();
-
- (arts, jobs)
- })
- .load::<(dbmodels::Artifact, dbmodels::Job)>(
- &*database_connection,
- )
- .map_err(Error::from)
- .and_then(|results: Vec<_>| {
- results
- .into_iter()
- .inspect(|(art, job)| log::debug!("Filtering further: {:?}, job {:?}", art, job.id))
- //
- // Filter by environment variables
- // All environment variables of the package must be present in the loaded
- // package, so that we can be sure that the loaded package was built with
- // the same ENV.
- //
- // TODO:
- // Doing this in the database query would be way nicer, but I was not able
- // to implement it.
- //
- .map(|tpl| -> Result<(_, _)> {
- // This is a Iterator::filter() but because our condition here might fail, we
- // map() and do the actual filtering later.
-
- let job = tpl.1;
- let job_env: Vec<(String, String)> = job
- .env(&*database_connection)?
- .into_iter()
- .map(|var: dbmodels::EnvVar| (var.name, var.value))
- .collect();
-
- trace!("The job we found had env: {:?}", job_env);
- let envs_equal = environments_equal(&job_env, package_environment.as_ref(), additional_env);
- trace!("environments where equal = {}", envs_equal);
- Ok((tpl.0, envs_equal))
- })
- .filter(|r| match r { // the actual filtering from above
- Err(_) => true,
- Ok((_, bl)) => *bl,
- })
- .and_then_ok(|(art, _)| {
- if let Some(release) = art.get_release(&*database_connection)? {
- Ok((art, Some(release.release_date)))
- } else {
- Ok((art, None))
- }
- })
- .and_then_ok(|(p, ndt)| ArtifactPath::new(PathBuf::from(p.path)).map(|a| (a, ndt)))
- .and_then_ok(|(artpath, ndt)| {
- if let Some(staging) = staging_store.as_ref() {
- trace!(
- "Searching in staging: {:?} for {:?}",
- staging.root_path(),
- artpath
- );
- if let Some(art) = staging.get(&artpath) {
- trace!("Found in staging: {:?}", art);
- return staging.root_path().join(art).map(|p| p.map(|p| (p, ndt)))
- }
+ /// Search for this package
+ package: &'a Package,
+}
+
+impl<'a> FindArtifacts<'a> {
+ /// Run the FindArtifact as configured
+ pub fn run(self) -> Result<Vec<(FullArtifactPath<'a>, Option<NaiveDateTime>)>> {
+ let shebang = Shebang::from(self.config.shebang().clone());
+ let script = if self.script_filter {
+ let script = ScriptBuilder::new(&shebang).build(
+ self.package,
+ self.config.available_phases(),
+ *self.config.strict_script_interpolation(),
+ )?;
+ Some(script)
+ } else {
+ None
+ };
+
+ let package_environment = self.package.environment();
+ let mut query = schema::packages::table
+ .filter({
+ // The package with pkg.name() and pkg.version()
+ let package_name_filter = schema::packages::name.eq(self.package.name().as_ref() as &str);
+ let package_version_filter =
+ schema::packages::version.eq(self.package.version().as_ref() as &str);
+
+ package_name_filter.and(package_version_filter)
+ })
+
+ // TODO: Only select from submits where the submit contained jobs that are in the
+ // dependencies of `pkg`.
+ .inner_join(schema::jobs::table.inner_join(schema::submits::table))
+ .inner_join(schema::artifacts::table.on(schema::jobs::id.eq(schema::artifacts::job_id)))
+
+ // TODO: We do not yet have a method to "left join" properly, because diesel only has
+ // left_outer_join (left_join is an alias)
+ // So do not include release dates here, for now
+ //.left_outer_join(schema::releases::table.on(schema::releases::artifact_id.eq(schema::artifacts::id)))
+ .inner_join(schema::images::table.on(schema::submits::requested_image_id.eq(schema::images::id)))
+ .into_boxed();
+
+ if let Some(allowed_images) = self.package.allowed_images() {
+ trace!("Filtering with allowed_images = {:?}", allowed_images);
+ let imgs = allowed_images
+ .iter()
+ .map(AsRef::<str>::as_ref)
+ .collect::<Vec<_>>();
+ query = query.filter(schema::images::name.eq_any(imgs));
+ }
+
+ if let Some(denied_images) = self.package.denied_images() {
+ trace!("Filtering with denied_images = {:?}", denied_images);
+ let imgs = denied_images
+ .iter()
+ .map(AsRef::<str>::as_ref)
+ .collect::<Vec<_>>();
+ query = query.filter(schema::images::name.ne_all(imgs));
+ }
+
+ if let Some(script_text) = script.as_ref() {
+ query = query.filter(schema::jobs::script_text.eq(script_text.as_ref()));
+ }
+
+ if let Some(image_name) = self.image_name.as_ref() {
+ query = query.filter(schema::images::name.eq(image_name.as_ref()));
+ }
+
+ trace!("Query = {}", diesel::debug_query(&query));
+
+ query
+ .select({
+ let arts = schema::artifacts::all_columns;
+ let jobs = schema::jobs::all_columns;
+ //let rels = schema::releases::release_date.nullable();
+
+ (arts, jobs)
+ })
+ .load::<(dbmodels::Artifact, dbmodels::Job)>(&*self.database_connection)?
+ .into_iter()
+ .inspect(|(art, job)| log::debug!("Filtering further: {:?}, job {:?}", art, job.id))
+ //
+ // Filter by environment variables
+ // All environment variables of the package must be present in the loaded
+ // package, so that we can be sure that the loaded package was built with
+ // the same ENV.
+ //
+ // TODO:
+ // Doing this in the database query would be way nicer, but I was not able
+ // to implement it.
+ //
+ .map(|tpl| -> Result<(_, _)> {
+ // This is a Iterator::filter() but because our condition here might fail, we
+ // map() and do the actual filtering later.
+
+ let job = tpl.1;
+ let job_env: Vec<(String, String)> = job
+ .env(&*self.database_connection)?
+ .into_iter()
+ .map(|var: dbmodels::EnvVar| (var.name, var.value))
+ .collect();
+
+ trace!("The job we found had env: {:?}", job_env);
+ let envs_equal = environments_equal(&job_env, package_environment.as_ref(), self.env_filter);
+ trace!("environments where equal = {}", envs_equal);
+ Ok((tpl.0, envs_equal))
+ })
+ .filter(|r| match r { // the actual filtering from above
+ Err(_) => true,
+ Ok((_, bl)) => *bl,
+ })
+ .and_then_ok(|(art, _)| {
+ if let Some(release) = art.get_release(&*self.database_connection)? {
+ Ok((art, Some(release.release_date)))
+ } else {
+ Ok((art, None))
+ }
+ })
+ .and_then_ok(|(p, ndt)| ArtifactPath::new(PathBuf::from(p.path)).map(|a| (a, ndt)))
+ .and_then_ok(|(artpath, ndt)| {
+ if let Some(staging) = self.staging_store.as_ref() {
+ trace!(
+ "Searching in staging: {:?} for {:?}",
+ staging.root_path(),
+ artpath
+ );
+ if let Some(art) = staging.get(&artpath) {
+ trace!("Found in staging: {:?}", art);
+ return staging.root_path().join(art).map(|p| p.map(|p| (p, ndt)))
}
+ }
- // If we cannot find the artifact in the release store either, we return None.
- // This is the case if there indeed was a release, but it was removed from the
- // filesystem.
- for release_store in release_stores {
- if let Some(art) = release_store.get(&artpath) {
- trace!("Found in release: {:?}", art);
- return release_store.root_path().join(art).map(|p| p.map(|p| (p, ndt)))
- }
+ // If we cannot find the artifact in the release store either, we return None.
+ // This is the case if there indeed was a release, but it was removed from the
+ // filesystem.
+ for release_store in self.release_stores {
+ if let Some(art) = release_store.get(&artpath) {
+ trace!("Found in release: {:?}", art);
+ return release_store.root_path().join(art).map(|p| p.map(|p| (p, ndt)))
}
+ }
- trace!("Found no release for artifact {:?} in any release store", artpath.display());
- Ok(None)
- })
- .filter_map_ok(|opt| opt)
- .collect::<Result<Vec<(FullArtifactPath<'a>, Option<NaiveDateTime>)>>>()
- })
+ trace!("Found no release for artifact {:?} in any release store", artpath.display());
+ Ok(None)
+ })
+ .filter_map_ok(|opt| opt)
+ .collect::<Result<Vec<(FullArtifactPath<'a>, Option<NaiveDateTime>)>>>()
+ }
}
+
fn environments_equal(job_env: &[(String, String)], pkg_env: Option<&HashMap<EnvironmentVariableName, String>>, add_env: &[(EnvironmentVariableName, String)]) -> bool {
use std::ops::Deref;
diff --git a/src/db/mod.rs b/src/db/mod.rs
index 8f29c0e..93a3216 100644
--- a/src/db/mod.rs
+++ b/src/db/mod.rs
@@ -12,6 +12,6 @@ mod connection;
pub use connection::*;
mod find_artifacts;
-pub use find_artifacts::find_artifacts;
+pub use find_artifacts::FindArtifacts;
pub mod models;
diff --git a/src/orchestrator/orchestrator.rs b/src/orchestrator/orchestrator.rs
index fc15494..0f2e528 100644
--- a/src/orchestrator/orchestrator.rs
+++ b/src/orchestrator/orchestrator.rs
@@ -640,11 +640,12 @@ impl<'a> JobTask<'a> {
.chain(self.git_commit_env.cloned().into_iter())
.collect::<Vec<_>>();
- let replacement_artifacts = crate::db::find_artifacts(
- self.database.clone(),
- self.config,
- self.jobdef.job.package(),
- &self.release_stores,
+ let replacement_artifacts = crate::db::FindArtifacts::builder()
+ .database_connection(self.database.clone())
+ .config(self.config)
+ .package(self.jobdef.job.package())
+ .release_stores(&self.release_stores)
+ .image_name(Some(self.jobdef.job.image()))
// We can simply pass the staging store here, because it doesn't hurt. There are
// two scenarios:
@@ -659,10 +660,11 @@ impl<'a> JobTask<'a> {
// The fact that released artifacts are returned prefferably from this function
// call does not change anything, because if there is an artifact that's a released
// one that matches this job, we should use it anyways.
- Some(&staging_store),
- &additional_env,
- true
- )?;
+ .staging_store(Some(&staging_store))
+ .env_filter(&additional_env)
+ .script_filter(true)
+ .build()
+ .run()?;
debug!("[{}]: Found {} replacement artifacts", self.jobdef.job.uuid(), replacement_artifacts.len());
trace!("[{}]: Found replacement artifacts: {:?}", self.jobdef.job.uuid(), replacement_artifacts);