From c0aac9131ec0b29302973d81fad66c2656a02639 Mon Sep 17 00:00:00 2001 From: Ryan Gonzalez Date: Thu, 17 Jul 2025 15:27:36 -0500 Subject: [PATCH 1/3] Refactor out the OBS interaction code into a new 'obo-core' crate This is largely just a rename, as well as the slightly-silly `obo-test-support` crate that exists to be depended on by `obo-core` *and* `obs-gitlab-runner`. --- Cargo.lock | 46 ++++++++++++-- Cargo.toml | 50 +++++---------- obo-core/Cargo.toml | 37 +++++++++++ {src => obo-core/src}/actions.rs | 52 ++++++++++++++++ {src => obo-core/src}/artifacts.rs | 0 {src => obo-core/src}/binaries.rs | 3 +- {src => obo-core/src}/build_meta.rs | 3 +- {src => obo-core/src}/dsc.rs | 0 obo-core/src/lib.rs | 10 +++ obo-core/src/logging.rs | 57 +++++++++++++++++ {src => obo-core/src}/monitor.rs | 3 +- {src => obo-core/src}/prune.rs | 3 +- {src => obo-core/src}/retry.rs | 3 +- {src => obo-core/src}/upload.rs | 3 +- obo-test-support/Cargo.toml | 9 +++ .../src/lib.rs | 0 obs-gitlab-runner/Cargo.toml | 42 +++++++++++++ .../chart}/.helmignore | 0 {chart => obs-gitlab-runner/chart}/Chart.yaml | 0 .../chart}/templates/_helpers.tpl | 0 .../chart}/templates/deployment.yaml | 0 .../chart}/templates/gitlab-token-secret.yaml | 0 .../chart}/templates/hpa.yaml | 0 .../chart}/values.yaml | 0 {src => obs-gitlab-runner/src}/handler.rs | 30 ++++----- {src => obs-gitlab-runner/src}/logging.rs | 51 +++------------ {src => obs-gitlab-runner/src}/main.rs | 12 ---- {src => obs-gitlab-runner/src}/pipeline.rs | 62 ++++++++----------- 28 files changed, 322 insertions(+), 154 deletions(-) create mode 100644 obo-core/Cargo.toml rename {src => obo-core/src}/actions.rs (89%) rename {src => obo-core/src}/artifacts.rs (100%) rename {src => obo-core/src}/binaries.rs (97%) rename {src => obo-core/src}/build_meta.rs (99%) rename {src => obo-core/src}/dsc.rs (100%) create mode 100644 obo-core/src/lib.rs create mode 100644 obo-core/src/logging.rs rename {src => obo-core/src}/monitor.rs (99%) rename {src => obo-core/src}/prune.rs (99%) rename {src => obo-core/src}/retry.rs (99%) rename {src => obo-core/src}/upload.rs (99%) create mode 100644 obo-test-support/Cargo.toml rename src/test_support.rs => obo-test-support/src/lib.rs (100%) create mode 100644 obs-gitlab-runner/Cargo.toml rename {chart => obs-gitlab-runner/chart}/.helmignore (100%) rename {chart => obs-gitlab-runner/chart}/Chart.yaml (100%) rename {chart => obs-gitlab-runner/chart}/templates/_helpers.tpl (100%) rename {chart => obs-gitlab-runner/chart}/templates/deployment.yaml (100%) rename {chart => obs-gitlab-runner/chart}/templates/gitlab-token-secret.yaml (100%) rename {chart => obs-gitlab-runner/chart}/templates/hpa.yaml (100%) rename {chart => obs-gitlab-runner/chart}/values.yaml (100%) rename {src => obs-gitlab-runner/src}/handler.rs (99%) rename {src => obs-gitlab-runner/src}/logging.rs (77%) rename {src => obs-gitlab-runner/src}/main.rs (96%) rename {src => obs-gitlab-runner/src}/pipeline.rs (68%) diff --git a/Cargo.lock b/Cargo.lock index 6971eb0..97810fc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1494,8 +1494,8 @@ dependencies = [ ] [[package]] -name = "obs-gitlab-runner" -version = "0.1.8" +name = "obo-core" +version = "0.1.0" dependencies = [ "async-trait", "base16ct", @@ -1506,9 +1506,8 @@ dependencies = [ "color-eyre", "derivative", "futures-util", - "gitlab-runner", - "gitlab-runner-mock", "md-5", + "obo-test-support", "open-build-service-api", "open-build-service-mock", "reqwest", @@ -1517,12 +1516,49 @@ dependencies = [ "serde", "serde_yaml", "shell-words", + "tempfile", + "thiserror 2.0.16", + "tokio", + "tokio-retry2 0.6.0", + "tokio-util", + "tracing", + "wiremock", +] + +[[package]] +name = "obo-test-support" +version = "0.1.0" +dependencies = [ + "open-build-service-api", + "open-build-service-mock", +] + +[[package]] +name = "obs-gitlab-runner" +version = "0.1.8" +dependencies = [ + "async-trait", + "camino", + "claims", + "clap", + "color-eyre", + "derivative", + "futures-util", + "gitlab-runner", + "gitlab-runner-mock", + "obo-core", + "obo-test-support", + "open-build-service-api", + "open-build-service-mock", + "rstest", + "serde", + "serde_yaml", + "shell-words", "shellexpand", "strum", "tempfile", "thiserror 2.0.16", "tokio", - "tokio-retry2 0.6.0", "tokio-util", "tracing", "tracing-error", diff --git a/Cargo.toml b/Cargo.toml index 5a66c3e..6f8e8bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,50 +1,30 @@ -[package] -name = "obs-gitlab-runner" -version = "0.1.8" -edition = "2024" -license = "MIT OR Apache-2.0" +[workspace] +resolver = "3" +members = [ + "obo-core", + "obo-test-support", + "obs-gitlab-runner" +] -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] +[workspace.dependencies] async-trait = "0.1" -base16ct = { version = "0.2", features = ["std"] } -bytes = "1.10" camino = "1.1" +claims = "0.8" clap = { version = "4.5", features = ["default", "derive", "env"] } color-eyre = "0.6" derivative = "2.2" futures-util = "0.3" -md-5 = "0.10" -reqwest = "0.12" -rfc822-like = "0.2" +open-build-service-api = { git = "https://github.com/collabora/open-build-service-rs" } +# open-build-service-api = { path = "../open-build-service-rs/open-build-service-api" } +open-build-service-mock = { git = "https://github.com/collabora/open-build-service-rs" } +# open-build-service-mock = { path = "../open-build-service-rs/open-build-service-api" } +rstest = "0.26" serde = "1.0" serde_yaml = "0.9" -shellexpand = "3.1" shell-words = "1.1" -strum = { version = "0.27", features = ["derive"] } tempfile = "3.20" +thiserror = "2.0" tokio = { version = "1.45", features = ["full"] } -tokio-retry2 = { version = "0.6.0", features = ["jitter"] } tokio-util = { version = "0.7", features = ["full"] } tracing = "0.1" -tracing-error = "0.2" -tracing-subscriber = { version = "0.3", features = ["default", "json"] } -url = "2.5" - -gitlab-runner = "0.3.0-rc1" -# gitlab-runner = { path = "../gitlab-runner-rs/gitlab-runner" } -open-build-service-api = { git = "https://github.com/collabora/open-build-service-rs" } -thiserror = "2.0.12" -# open-build-service-api = { path = "../open-build-service-rs/open-build-service-api" } - -[dev-dependencies] -claims = "0.8" -rstest = "0.26" wiremock = "0.6" -zip = "5.1" - -gitlab-runner-mock = "0.2.1" -# gitlab-runner-mock = { path = "../gitlab-runner-rs/gitlab-runner-mock" } -open-build-service-mock = { git = "https://github.com/collabora/open-build-service-rs" } -# open-build-service-mock = { path = "../open-build-service-rs/open-build-service-mock" } diff --git a/obo-core/Cargo.toml b/obo-core/Cargo.toml new file mode 100644 index 0000000..146640a --- /dev/null +++ b/obo-core/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "obo-core" +description = "OBS Build Orchestrator — core" +version = "0.1.0" +edition = "2024" +license = "MIT OR Apache-2.0" + +[dependencies] +async-trait.workspace = true +base16ct = { version = "0.2", features = ["std"] } +bytes = "1.10" +camino.workspace = true +clap.workspace = true +color-eyre.workspace = true +derivative.workspace = true +futures-util.workspace = true +md-5 = "0.10" +obo-test-support = { path = "../obo-test-support" } +open-build-service-api.workspace = true +reqwest = "0.12" +rfc822-like = "0.2" +serde.workspace = true +serde_yaml.workspace = true +shell-words.workspace = true +tempfile.workspace = true +thiserror.workspace = true +tokio.workspace = true +tokio-retry2 = { version = "0.6.0", features = ["jitter"] } +tokio-util.workspace = true +tracing.workspace = true + +[dev-dependencies] +claims.workspace = true +rstest.workspace = true +open-build-service-mock.workspace = true +wiremock.workspace = true + diff --git a/src/actions.rs b/obo-core/src/actions.rs similarity index 89% rename from src/actions.rs rename to obo-core/src/actions.rs index 140f8e0..1dfee2c 100644 --- a/src/actions.rs +++ b/obo-core/src/actions.rs @@ -42,6 +42,27 @@ impl FlagSupportingExplicitValue for clap::Arg { } } +#[derive(Debug)] +struct CommandBuilder { + args: Vec, +} + +impl CommandBuilder { + fn new(name: String) -> Self { + Self { args: vec![name] } + } + + fn add(&mut self, arg: &str, value: &str) -> &mut Self { + self.args + .push(format!("--{arg}={}", shell_words::quote(value))); + self + } + + fn build(self) -> String { + self.args.join(" ") + } +} + #[derive(Parser, Debug)] pub struct DputAction { pub project: String, @@ -74,6 +95,24 @@ pub struct MonitorAction { pub build_log_out: String, } +impl MonitorAction { + pub fn generate_command(&self) -> String { + let mut builder = CommandBuilder::new("monitor".to_owned()); + builder + .add("project", &self.project) + .add("package", &self.package) + .add("rev", &self.rev) + .add("srcmd5", &self.srcmd5) + .add("repository", &self.repository) + .add("arch", &self.arch) + .add("build-log-out", &self.build_log_out); + if let Some(endtime) = &self.prev_endtime_for_commit { + builder.add("prev-endtime-for-commit", &endtime.to_string()); + } + builder.build() + } +} + #[derive(Parser, Debug)] pub struct DownloadBinariesAction { #[clap(long)] @@ -88,6 +127,19 @@ pub struct DownloadBinariesAction { pub build_results_dir: Utf8PathBuf, } +impl DownloadBinariesAction { + pub fn generate_command(&self) -> String { + let mut builder = CommandBuilder::new("download-binaries".to_owned()); + builder + .add("project", &self.project) + .add("package", &self.package) + .add("repository", &self.repository) + .add("arch", &self.arch) + .add("build-results-dir", self.build_results_dir.as_str()); + builder.build() + } +} + #[derive(Parser, Debug)] pub struct PruneAction { #[clap(long, default_value_t = DEFAULT_BUILD_INFO.to_owned())] diff --git a/src/artifacts.rs b/obo-core/src/artifacts.rs similarity index 100% rename from src/artifacts.rs rename to obo-core/src/artifacts.rs diff --git a/src/binaries.rs b/obo-core/src/binaries.rs similarity index 97% rename from src/binaries.rs rename to obo-core/src/binaries.rs index a010537..86b0cb5 100644 --- a/src/binaries.rs +++ b/obo-core/src/binaries.rs @@ -80,9 +80,10 @@ mod tests { use std::time::SystemTime; use claims::*; + use obo_test_support::*; use open_build_service_mock::*; - use crate::{artifacts::test_support::MockArtifactDirectory, test_support::*}; + use crate::artifacts::test_support::MockArtifactDirectory; use super::*; diff --git a/src/build_meta.rs b/obo-core/src/build_meta.rs similarity index 99% rename from src/build_meta.rs rename to obo-core/src/build_meta.rs index 3588110..c2330b6 100644 --- a/src/build_meta.rs +++ b/obo-core/src/build_meta.rs @@ -265,11 +265,10 @@ mod tests { use std::time::{Duration, SystemTime}; use claims::*; + use obo_test_support::*; use open_build_service_mock::*; use rstest::rstest; - use crate::test_support::*; - use super::*; #[tokio::test] diff --git a/src/dsc.rs b/obo-core/src/dsc.rs similarity index 100% rename from src/dsc.rs rename to obo-core/src/dsc.rs diff --git a/obo-core/src/lib.rs b/obo-core/src/lib.rs new file mode 100644 index 0000000..9e5a517 --- /dev/null +++ b/obo-core/src/lib.rs @@ -0,0 +1,10 @@ +pub mod actions; +pub mod artifacts; +pub mod binaries; +pub mod build_meta; +pub mod dsc; +pub mod logging; +pub mod monitor; +pub mod prune; +pub mod retry; +pub mod upload; diff --git a/obo-core/src/logging.rs b/obo-core/src/logging.rs new file mode 100644 index 0000000..ae94cfc --- /dev/null +++ b/obo-core/src/logging.rs @@ -0,0 +1,57 @@ +use tracing::{ + Event, Metadata, + field::{self, Field}, +}; + +pub const TRACING_FIELD: &str = "obo_core.output"; + +#[macro_export] +macro_rules! outputln { + ($($args:tt)*) => { + ::tracing::trace!(obo_core.output = true, $($args)*) + }; +} + +struct OutputTester(bool); + +impl field::Visit for OutputTester { + fn record_bool(&mut self, field: &Field, value: bool) { + if field.name() == TRACING_FIELD { + self.0 = value; + } + } + + fn record_debug(&mut self, _field: &Field, _value: &dyn std::fmt::Debug) {} +} + +pub fn is_output_field_set_in_event(event: &Event<'_>) -> bool { + let mut visitor = OutputTester(false); + event.record(&mut visitor); + visitor.0 +} + +pub fn is_output_field_in_metadata(metadata: &Metadata<'_>) -> bool { + metadata.fields().iter().any(|f| f.name() == TRACING_FIELD) +} + +struct MessageExtractor(Option); + +impl field::Visit for MessageExtractor { + fn record_str(&mut self, field: &Field, value: &str) { + if field.name() == "message" { + self.0 = Some(value.to_owned()); + } + } + + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + if field.name() == "message" { + self.0 = Some(format!("{value:?}")); + } + } +} + +pub fn get_event_message(event: &Event) -> Option { + let mut visitor = MessageExtractor(None); + event.record(&mut visitor); + visitor.0 +} diff --git a/src/monitor.rs b/obo-core/src/monitor.rs similarity index 99% rename from src/monitor.rs rename to obo-core/src/monitor.rs index 95c6c7c..ecf8b81 100644 --- a/src/monitor.rs +++ b/obo-core/src/monitor.rs @@ -287,10 +287,11 @@ mod tests { use std::{collections::HashMap, time::SystemTime}; use claims::*; + use obo_test_support::*; use obs::PackageCode; use open_build_service_mock::*; - use crate::{artifacts::test_support::MockArtifactDirectory, test_support::*}; + use crate::artifacts::test_support::MockArtifactDirectory; use super::*; diff --git a/src/prune.rs b/obo-core/src/prune.rs similarity index 99% rename from src/prune.rs rename to obo-core/src/prune.rs index 088f871..9178328 100644 --- a/src/prune.rs +++ b/obo-core/src/prune.rs @@ -95,10 +95,9 @@ mod tests { use std::time::SystemTime; use claims::*; + use obo_test_support::*; use open_build_service_mock::*; - use crate::test_support::*; - use super::*; #[tokio::test] diff --git a/src/retry.rs b/obo-core/src/retry.rs similarity index 99% rename from src/retry.rs rename to obo-core/src/retry.rs index 857a6d8..6b90f39 100644 --- a/src/retry.rs +++ b/obo-core/src/retry.rs @@ -79,6 +79,7 @@ macro_rules! retry_request { mod tests { use claims::*; use futures_util::Future; + use obo_test_support::*; use open_build_service_api as obs; use rstest::*; use wiremock::{ @@ -86,8 +87,6 @@ mod tests { matchers::{method, path_regex}, }; - use crate::test_support::*; - use super::*; #[fixture] diff --git a/src/upload.rs b/obo-core/src/upload.rs similarity index 99% rename from src/upload.rs rename to obo-core/src/upload.rs index 844698b..a2dbd17 100644 --- a/src/upload.rs +++ b/obo-core/src/upload.rs @@ -328,9 +328,10 @@ mod tests { use std::time::SystemTime; use claims::*; + use obo_test_support::*; use open_build_service_mock::*; - use crate::{artifacts::test_support::MockArtifactDirectory, test_support::*}; + use crate::artifacts::test_support::MockArtifactDirectory; use super::*; diff --git a/obo-test-support/Cargo.toml b/obo-test-support/Cargo.toml new file mode 100644 index 0000000..e6c281c --- /dev/null +++ b/obo-test-support/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "obo-test-support" +version = "0.1.0" +edition = "2024" +license = "MIT OR Apache-2.0" + +[dependencies] +open-build-service-api.workspace = true +open-build-service-mock.workspace = true diff --git a/src/test_support.rs b/obo-test-support/src/lib.rs similarity index 100% rename from src/test_support.rs rename to obo-test-support/src/lib.rs diff --git a/obs-gitlab-runner/Cargo.toml b/obs-gitlab-runner/Cargo.toml new file mode 100644 index 0000000..4e0c2cb --- /dev/null +++ b/obs-gitlab-runner/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "obs-gitlab-runner" +version = "0.1.8" +edition = "2024" +license = "MIT OR Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait.workspace = true +camino.workspace = true +clap.workspace = true +color-eyre.workspace = true +derivative.workspace = true +futures-util.workspace = true +gitlab-runner = "0.3.0-rc1" +# gitlab-runner = { path = "../gitlab-runner-rs/gitlab-runner" } +obo-core = { path = "../obo-core" } +obo-test-support = { path = "../obo-test-support" } +open-build-service-api.workspace = true +serde.workspace = true +serde_yaml.workspace = true +shellexpand = "3.1" +shell-words.workspace = true +strum = { version = "0.27", features = ["derive"] } +tempfile.workspace = true +thiserror.workspace = true +tokio.workspace = true +tokio-util.workspace = true +tracing.workspace = true +tracing-error = "0.2" +tracing-subscriber = { version = "0.3", features = ["default", "json"] } +url = "2.5" + +[dev-dependencies] +claims.workspace = true +gitlab-runner-mock = "0.2.1" +# gitlab-runner-mock = { path = "../gitlab-runner-rs/gitlab-runner-mock" } +open-build-service-mock.workspace = true +rstest.workspace = true +wiremock.workspace = true +zip = "5.1" diff --git a/chart/.helmignore b/obs-gitlab-runner/chart/.helmignore similarity index 100% rename from chart/.helmignore rename to obs-gitlab-runner/chart/.helmignore diff --git a/chart/Chart.yaml b/obs-gitlab-runner/chart/Chart.yaml similarity index 100% rename from chart/Chart.yaml rename to obs-gitlab-runner/chart/Chart.yaml diff --git a/chart/templates/_helpers.tpl b/obs-gitlab-runner/chart/templates/_helpers.tpl similarity index 100% rename from chart/templates/_helpers.tpl rename to obs-gitlab-runner/chart/templates/_helpers.tpl diff --git a/chart/templates/deployment.yaml b/obs-gitlab-runner/chart/templates/deployment.yaml similarity index 100% rename from chart/templates/deployment.yaml rename to obs-gitlab-runner/chart/templates/deployment.yaml diff --git a/chart/templates/gitlab-token-secret.yaml b/obs-gitlab-runner/chart/templates/gitlab-token-secret.yaml similarity index 100% rename from chart/templates/gitlab-token-secret.yaml rename to obs-gitlab-runner/chart/templates/gitlab-token-secret.yaml diff --git a/chart/templates/hpa.yaml b/obs-gitlab-runner/chart/templates/hpa.yaml similarity index 100% rename from chart/templates/hpa.yaml rename to obs-gitlab-runner/chart/templates/hpa.yaml diff --git a/chart/values.yaml b/obs-gitlab-runner/chart/values.yaml similarity index 100% rename from chart/values.yaml rename to obs-gitlab-runner/chart/values.yaml diff --git a/src/handler.rs b/obs-gitlab-runner/src/handler.rs similarity index 99% rename from src/handler.rs rename to obs-gitlab-runner/src/handler.rs index 6b761e6..8b7751e 100644 --- a/src/handler.rs +++ b/obs-gitlab-runner/src/handler.rs @@ -14,15 +14,7 @@ use gitlab_runner::{ JobHandler, JobResult, Phase, UploadableFile, job::{Dependency, Job, Variable}, }; -use open_build_service_api as obs; -use tokio::fs::File as AsyncFile; -use tokio_util::{ - compat::{Compat, TokioAsyncReadCompatExt}, - io::ReaderStream, -}; -use tracing::{error, instrument, warn}; - -use crate::{ +use obo_core::{ actions::{ Actions, DEFAULT_BUILD_INFO, DEFAULT_BUILD_LOG, DownloadBinariesAction, DputAction, FailedBuild, FlagSupportingExplicitValue, LOG_TAIL_2MB, MonitorAction, ObsBuildInfo, @@ -31,7 +23,17 @@ use crate::{ artifacts::{ArtifactDirectory, ArtifactReader, ArtifactWriter, MissingArtifact, SaveCallback}, monitor::PackageMonitoringOptions, outputln, - pipeline::{GeneratePipelineOptions, PipelineDownloadBinaries, generate_monitor_pipeline}, +}; +use open_build_service_api as obs; +use tokio::fs::File as AsyncFile; +use tokio_util::{ + compat::{Compat, TokioAsyncReadCompatExt}, + io::ReaderStream, +}; +use tracing::{error, instrument, warn}; + +use crate::pipeline::{ + GeneratePipelineOptions, PipelineDownloadBinaries, generate_monitor_pipeline, }; const DEFAULT_MONITOR_PIPELINE: &str = "obs.yml"; @@ -447,6 +449,8 @@ mod tests { use claims::*; use gitlab_runner::{GitlabLayer, Runner, RunnerBuilder}; use gitlab_runner_mock::*; + use obo_core::{build_meta::RepoArch, upload::compute_md5}; + use obo_test_support::*; use open_build_service_mock::*; use rstest::rstest; use tempfile::TempDir; @@ -454,9 +458,7 @@ mod tests { use tracing_subscriber::{Layer, Registry, filter::Targets, prelude::*}; use zip::ZipArchive; - use crate::{ - build_meta::RepoArch, logging::GitLabForwarder, test_support::*, upload::compute_md5, - }; + use crate::logging::GitLabForwarder; use super::*; @@ -1567,7 +1569,7 @@ mod tests { )] test: Option, ) { - use crate::build_meta::CommitBuildInfo; + use obo_core::build_meta::CommitBuildInfo; const TEST_MONITOR_TIMEOUT: &str = "10 minutes"; diff --git a/src/logging.rs b/obs-gitlab-runner/src/logging.rs similarity index 77% rename from src/logging.rs rename to obs-gitlab-runner/src/logging.rs index 138c9ab..41e11b9 100644 --- a/src/logging.rs +++ b/obs-gitlab-runner/src/logging.rs @@ -1,7 +1,8 @@ use gitlab_runner::GitlabLayer; +use obo_core::logging::{get_event_message, is_output_field_set_in_event}; use tracing::{ Event, Level, Metadata, Subscriber, - field::{self, Field, FieldSet}, + field::FieldSet, span::{Attributes, Id}, subscriber::Interest, }; @@ -12,37 +13,9 @@ use tracing_subscriber::{ registry::LookupSpan, }; -struct OutputTester(bool); - -impl field::Visit for OutputTester { - fn record_bool(&mut self, field: &field::Field, value: bool) { - if field.name() == "obs_gitlab_runner.output" { - self.0 = value - } - } - - fn record_debug(&mut self, _field: &field::Field, _value: &dyn std::fmt::Debug) {} -} - -struct MessageExtractor(Option); - -impl field::Visit for MessageExtractor { - fn record_str(&mut self, field: &Field, value: &str) { - if field.name() == "message" { - self.0 = Some(value.to_owned()); - } - } - - fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { - if field.name() == "message" { - self.0 = Some(format!("{value:?}")); - } - } -} - // This mostly wraps a standard GitlabLayer, but it bypasses the filter to pass -// through any events with `obs_gitlab_runner.output` set, rewriting them to -// instead use `gitlab.output`. +// through any events with TRACING_FIELD set set, rewriting them to instead use +// `gitlab.output`. pub struct GitLabForwarder>(Filtered); impl LookupSpan<'span>, F: Filter + 'static> @@ -50,6 +23,7 @@ impl LookupSpan<'span>, F: Fi { pub fn new(inner: Filtered) -> Filtered { GitLabForwarder(inner).with_filter(Targets::new().with_targets([ + ("obo_core", Level::TRACE), ("obs_gitlab_runner", Level::TRACE), // This target is used to inject the current job ID, which // gitlab-runner needs to actually send the logs out. @@ -99,17 +73,13 @@ impl LookupSpan<'span>, F: Fi } fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) { - let mut visitor = OutputTester(false); - event.record(&mut visitor); - if !visitor.0 { + if !is_output_field_set_in_event(event) { // No special behavior needed, so just forward it as-is. self.0.on_event(event, ctx); return; } - let mut visitor = MessageExtractor(None); - event.record(&mut visitor); - let Some(message) = visitor.0 else { + let Some(message) = get_event_message(event) else { return; }; @@ -157,10 +127,3 @@ impl LookupSpan<'span>, F: Fi self.0.on_id_change(old, new, ctx); } } - -#[macro_export] -macro_rules! outputln { - ($($args:tt)*) => { - ::tracing::trace!(obs_gitlab_runner.output = true, $($args)*) - }; -} diff --git a/src/main.rs b/obs-gitlab-runner/src/main.rs similarity index 96% rename from src/main.rs rename to obs-gitlab-runner/src/main.rs index 5e03b55..60a79db 100644 --- a/src/main.rs +++ b/obs-gitlab-runner/src/main.rs @@ -18,21 +18,9 @@ use url::Url; use crate::handler::{HandlerOptions, ObsJobHandler}; -mod actions; -mod artifacts; -mod binaries; -mod build_meta; -mod dsc; mod handler; mod logging; -mod monitor; mod pipeline; -mod prune; -mod retry; -mod upload; - -#[cfg(test)] -mod test_support; #[derive(Debug, Clone)] struct TargetsArg { diff --git a/src/pipeline.rs b/obs-gitlab-runner/src/pipeline.rs similarity index 68% rename from src/pipeline.rs rename to obs-gitlab-runner/src/pipeline.rs index 6936603..375bb19 100644 --- a/src/pipeline.rs +++ b/obs-gitlab-runner/src/pipeline.rs @@ -1,11 +1,13 @@ use std::collections::HashMap; use color_eyre::eyre::{Context, Result}; +use obo_core::{ + actions::{DownloadBinariesAction, MonitorAction}, + build_meta::{CommitBuildInfo, RepoArch}, +}; use serde::Serialize; use tracing::instrument; -use crate::build_meta::{CommitBuildInfo, RepoArch}; - #[derive(Clone, Debug, PartialEq, Eq)] pub enum PipelineDownloadBinaries { OnSuccess { build_results_dir: String }, @@ -43,16 +45,6 @@ struct JobSpec { rules: Option, } -fn generate_command(command_name: String, args: &[(&str, String)]) -> String { - let mut command = vec![command_name]; - - for (arg, value) in args { - command.extend_from_slice(&[format!("--{arg}"), shell_words::quote(value).into_owned()]); - } - - command.join(" ") -} - #[instrument] pub fn generate_monitor_pipeline( project: &str, @@ -74,34 +66,34 @@ pub fn generate_monitor_pipeline( let mut script = vec![]; let mut artifact_paths = vec![]; - let common_args = vec![ - ("project", project.to_owned()), - ("package", package.to_owned()), - ("repository", repo.to_owned()), - ("arch", arch.to_owned()), - ]; - - let mut monitor_args = vec![ - ("rev", rev.to_owned()), - ("srcmd5", srcmd5.to_owned()), - ("build-log-out", options.build_log_out.clone()), - ]; - if let Some(endtime) = &info.prev_endtime_for_commit { - monitor_args.push(("prev-endtime-for-commit", endtime.to_string())); - } - monitor_args.extend_from_slice(&common_args); - script.push(generate_command("monitor".to_owned(), &monitor_args)); + script.push( + MonitorAction { + project: project.to_owned(), + package: package.to_owned(), + repository: repo.to_owned(), + arch: arch.to_owned(), + rev: rev.to_owned(), + srcmd5: srcmd5.to_owned(), + build_log_out: options.build_log_out.clone(), + prev_endtime_for_commit: info.prev_endtime_for_commit, + } + .generate_command(), + ); artifact_paths.push(options.build_log_out.clone()); if let PipelineDownloadBinaries::OnSuccess { build_results_dir } = &options.download_binaries { - let mut download_args = vec![("build-results-dir", build_results_dir.clone())]; - download_args.extend_from_slice(&common_args); - script.push(generate_command( - "download-binaries".to_owned(), - &download_args, - )); + script.push( + DownloadBinariesAction { + project: project.to_owned(), + package: package.to_owned(), + repository: repo.to_owned(), + arch: arch.to_owned(), + build_results_dir: build_results_dir.into(), + } + .generate_command(), + ); artifact_paths.push(build_results_dir.clone()); } From 9d78d16c6bb0c8f5f404e20b8dfd84bfe54d297f Mon Sep 17 00:00:00 2001 From: Ryan Gonzalez Date: Fri, 25 Jul 2025 17:13:38 -0500 Subject: [PATCH 2/3] Refactor integration test core into a new 'obo-tests' crate Most of this logic is rather intricate and shareable between multiple implementations, so it makes sense for them to be in a separate crate. This necessitates quite a bit of refactoring of the test structure so that the shared test code can actually run commands and inspect the logs + artifacts. Since there are various somewhat strange requirements around command execution (i.e. needing to use timeouts but still get the result), that API was redone around a builder trait, which also makes it easy for implementation-specific tests to have their own methods on it. --- Cargo.lock | 16 + Cargo.toml | 1 + obo-tests/Cargo.toml | 17 + obo-tests/src/lib.rs | 598 +++++++++++++++ obs-gitlab-runner/Cargo.toml | 1 + obs-gitlab-runner/src/handler.rs | 1229 ++++++++++-------------------- 6 files changed, 1018 insertions(+), 844 deletions(-) create mode 100644 obo-tests/Cargo.toml create mode 100644 obo-tests/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 97810fc..04f3085 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1533,6 +1533,21 @@ dependencies = [ "open-build-service-mock", ] +[[package]] +name = "obo-tests" +version = "0.1.0" +dependencies = [ + "async-trait", + "camino", + "claims", + "obo-core", + "obo-test-support", + "open-build-service-api", + "open-build-service-mock", + "serde_yaml", + "tokio", +] + [[package]] name = "obs-gitlab-runner" version = "0.1.8" @@ -1548,6 +1563,7 @@ dependencies = [ "gitlab-runner-mock", "obo-core", "obo-test-support", + "obo-tests", "open-build-service-api", "open-build-service-mock", "rstest", diff --git a/Cargo.toml b/Cargo.toml index 6f8e8bd..7116534 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ resolver = "3" members = [ "obo-core", + "obo-tests", "obo-test-support", "obs-gitlab-runner" ] diff --git a/obo-tests/Cargo.toml b/obo-tests/Cargo.toml new file mode 100644 index 0000000..4b938f7 --- /dev/null +++ b/obo-tests/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "obo-tests" +description = "OBS Build Orchestrator — shared tests for different frontends" +version = "0.1.0" +edition = "2024" +license = "MIT OR Apache-2.0" + +[dependencies] +async-trait.workspace = true +camino.workspace = true +claims.workspace = true +obo-core = { path = "../obo-core" } +obo-test-support = { path = "../obo-test-support" } +open-build-service-api.workspace = true +open-build-service-mock.workspace = true +serde_yaml.workspace = true +tokio.workspace = true diff --git a/obo-tests/src/lib.rs b/obo-tests/src/lib.rs new file mode 100644 index 0000000..0dfe12c --- /dev/null +++ b/obo-tests/src/lib.rs @@ -0,0 +1,598 @@ +use std::{ + cmp::Ordering, + collections::HashMap, + fmt, + time::{Duration, SystemTime}, +}; + +use async_trait::async_trait; +use camino::Utf8Path; +use claims::*; +use obo_core::{ + actions::{DEFAULT_BUILD_INFO, DEFAULT_BUILD_LOG, ObsBuildInfo}, + build_meta::RepoArch, + upload::compute_md5, +}; +use obo_test_support::*; +use open_build_service_api as obs; +use open_build_service_mock::*; + +#[derive(Clone)] +pub struct ObsContext { + pub client: obs::Client, + pub mock: ObsMock, +} + +// A handle to some set of artifacts, either artificially "injected" (i.e. +// custom artifacts manually set up to be read by some command) or the outputs +// of executing a command. +pub trait ArtifactsHandle: Send + Sync + Clone + fmt::Debug {} + +// The result of a command, letting you easily deterimine if it succeeded and +// access the output logs / artifacts. +pub trait ExecutionResult: Send + Sync + Clone + fmt::Debug { + type Artifacts: ArtifactsHandle; + + fn ok(&self) -> bool; + fn log(&self) -> String; + fn artifacts(&self) -> Self::Artifacts; +} + +pub const EXECUTION_DEFAULT_TIMEOUT: Duration = Duration::from_secs(30); + +// Builds a set of commands to run, along with the ability to determine its +// input artifacts and timeout. +#[async_trait] +pub trait RunBuilder<'context>: Send + Sync + Sized { + type ArtifactsHandle: ArtifactsHandle; + type ExecutionResult: ExecutionResult; + + fn command(self, cmd: impl Into) -> Self { + self.script(&[cmd.into()]) + } + + fn script(self, cmd: &[String]) -> Self; + fn artifacts(self, artifacts: Self::ArtifactsHandle) -> Self; + fn timeout(self, timeout: Duration) -> Self; + async fn go(self) -> Self::ExecutionResult; +} + +// A generic "context", implemented by specific obo frontends, that allows the +// tests to manipulate artifacts and run commands in a generalized way. +#[async_trait] +pub trait TestContext: Send + Sync { + type ArtifactsHandle: ArtifactsHandle; + type ExecutionResult: ExecutionResult; + type RunBuilder<'context>: RunBuilder< + 'context, + ArtifactsHandle = Self::ArtifactsHandle, + ExecutionResult = Self::ExecutionResult, + > + 'context + where + Self: 'context; + + fn obs(&self) -> &ObsContext; + + async fn inject_artifacts( + &mut self, + artifacts: HashMap>, + ) -> Self::ArtifactsHandle; + + async fn fetch_artifacts(&self, handle: &Self::ArtifactsHandle) -> HashMap>; + + fn run(&mut self) -> Self::RunBuilder<'_>; +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum DputTest { + Basic, + Rebuild, + ReusePreviousBuild, + Branch, +} + +pub async fn test_dput( + context: &mut C, + test: DputTest, +) -> (C::ArtifactsHandle, ObsBuildInfo) { + let test1_file = "test1"; + let test1_contents = b"123"; + let test1_md5 = compute_md5(test1_contents); + + let dsc1_file = "test1.dsc"; + let dsc1_contents = format!( + "Source: {}\nFiles:\n {} {} {}", + TEST_PACKAGE_1, + test1_md5.clone(), + test1_contents.len(), + test1_file + ); + let dsc1_md5 = compute_md5(dsc1_contents.as_bytes()); + + let dsc1_bad_file = "test1-bad.dsc"; + let dsc1_bad_contents = + dsc1_contents.replace(test1_file, &(test1_file.to_owned() + ".missing")); + + context.obs().mock.add_project(TEST_PROJECT.to_owned()); + + context.obs().mock.add_or_update_repository( + TEST_PROJECT, + TEST_REPO.to_owned(), + TEST_ARCH_1.to_owned(), + MockRepositoryCode::Finished, + ); + context.obs().mock.add_or_update_repository( + TEST_PROJECT, + TEST_REPO.to_owned(), + TEST_ARCH_2.to_owned(), + MockRepositoryCode::Finished, + ); + + if test == DputTest::Rebuild { + // We also test excluded repos on rebuilds; this test makes it easier, + // because it's not testing creating a new package, so we can create it + // ourselves first with the desired metadata. + context.obs().mock.add_new_package( + TEST_PROJECT, + TEST_PACKAGE_1.to_owned(), + MockPackageOptions::default(), + ); + context.obs().mock.set_package_build_status( + TEST_PROJECT, + TEST_REPO, + TEST_ARCH_2, + TEST_PACKAGE_1.to_owned(), + MockBuildStatus::new(MockPackageCode::Disabled), + ); + } + + let artifacts = context + .inject_artifacts( + [ + (dsc1_file.to_owned(), dsc1_contents.as_bytes().to_vec()), + ( + dsc1_bad_file.to_owned(), + dsc1_bad_contents.as_bytes().to_vec(), + ), + (test1_file.to_owned(), test1_contents.to_vec()), + ] + .into(), + ) + .await; + + let mut dput_command = format!("dput {TEST_PROJECT} {dsc1_file}"); + let mut created_project = TEST_PROJECT.to_owned(); + + if test == DputTest::Branch { + created_project += ":branched"; + dput_command += &format!(" --branch-to {created_project}"); + } + + let dput = context + .run() + .command(dput_command.replace(dsc1_file, dsc1_bad_file)) + .artifacts(artifacts.clone()) + .go() + .await; + + assert!(!dput.ok()); + + let results = context.fetch_artifacts(&dput.artifacts()).await; + let build_info: ObsBuildInfo = + serde_yaml::from_slice(results.get(DEFAULT_BUILD_INFO).unwrap()).unwrap(); + + assert_eq!(build_info.project, created_project); + assert_eq!(build_info.package, TEST_PACKAGE_1); + assert_none!(build_info.rev); + assert_eq!(build_info.is_branched, test == DputTest::Branch); + + let mut dput = context + .run() + .command(&dput_command) + .artifacts(artifacts.clone()) + .go() + .await; + assert!(dput.ok()); + + if test == DputTest::Rebuild || test == DputTest::ReusePreviousBuild { + context.obs().mock.add_or_update_repository( + &created_project, + TEST_REPO.to_owned(), + TEST_ARCH_1.to_owned(), + MockRepositoryCode::Building, + ); + // Also test endtimes, since we now have an existing package to modify + // the metadata of. + let dir = assert_ok!( + context + .obs() + .client + .project(TEST_PROJECT.to_owned()) + .package(TEST_PACKAGE_1.to_owned()) + .list(None) + .await + ); + // Testing of reused builds never had the second arch disabled, so also + // add that build history. + if test == DputTest::ReusePreviousBuild { + context.obs().mock.add_job_history( + TEST_PROJECT, + TEST_REPO, + TEST_ARCH_2, + MockJobHistoryEntry { + package: TEST_PACKAGE_1.to_owned(), + rev: dir.rev.clone().unwrap(), + srcmd5: dir.srcmd5.clone(), + ..Default::default() + }, + ); + } + context.obs().mock.add_job_history( + TEST_PROJECT, + TEST_REPO, + TEST_ARCH_1, + MockJobHistoryEntry { + package: TEST_PACKAGE_1.to_owned(), + rev: dir.rev.unwrap(), + srcmd5: dir.srcmd5, + ..Default::default() + }, + ); + + context.obs().mock.set_package_build_status_for_rebuilds( + &created_project, + MockBuildStatus::new(MockPackageCode::Broken), + ); + context.obs().mock.set_package_build_status( + &created_project, + TEST_REPO, + TEST_ARCH_1, + TEST_PACKAGE_1.to_owned(), + MockBuildStatus::new(MockPackageCode::Failed), + ); + + let status = assert_ok!( + context + .obs() + .client + .project(created_project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .status(TEST_REPO, TEST_ARCH_1) + .await + ); + assert_eq!(status.code, obs::PackageCode::Failed); + + dput = context + .run() + .command(&dput_command) + .artifacts(artifacts.clone()) + .go() + .await; + assert!(dput.ok()); + + assert!(dput.log().contains("unchanged")); + + let status = assert_ok!( + context + .obs() + .client + .project(created_project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .status(TEST_REPO, TEST_ARCH_1) + .await + ); + assert_eq!(status.code, obs::PackageCode::Failed); + + if test == DputTest::Rebuild { + dput = context + .run() + .command(format!("{dput_command} --rebuild-if-unchanged")) + .artifacts(artifacts.clone()) + .go() + .await; + assert!(dput.ok()); + + let status = assert_ok!( + context + .obs() + .client + .project(created_project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .status(TEST_REPO, TEST_ARCH_1) + .await + ); + assert_eq!(status.code, obs::PackageCode::Broken); + + assert!(dput.log().contains("unchanged")); + } + } + + let results = context.fetch_artifacts(&dput.artifacts()).await; + let build_info: ObsBuildInfo = + serde_yaml::from_slice(results.get(DEFAULT_BUILD_INFO).unwrap()).unwrap(); + + assert_eq!(build_info.project, created_project); + assert_eq!(build_info.package, TEST_PACKAGE_1); + assert_some!(build_info.rev.as_deref()); + assert_eq!(build_info.is_branched, test == DputTest::Branch); + + assert_eq!( + build_info.enabled_repos.len(), + if test == DputTest::Rebuild { 1 } else { 2 } + ); + + let arch_1 = build_info + .enabled_repos + .get(&RepoArch { + repo: TEST_REPO.to_owned(), + arch: TEST_ARCH_1.to_owned(), + }) + .unwrap(); + + if test == DputTest::Rebuild { + assert_some!(arch_1.prev_endtime_for_commit); + } else { + assert_none!(arch_1.prev_endtime_for_commit); + + let arch_2 = build_info + .enabled_repos + .get(&RepoArch { + repo: TEST_REPO.to_owned(), + arch: TEST_ARCH_2.to_owned(), + }) + .unwrap(); + assert_none!(arch_2.prev_endtime_for_commit); + } + + let mut dir = assert_ok!( + context + .obs() + .client + .project(created_project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .list(None) + .await + ); + + assert_eq!(dir.entries.len(), 3); + dir.entries.sort_by(|a, b| a.name.cmp(&b.name)); + assert_eq!(dir.entries[0].name, "_meta"); + assert_eq!(dir.entries[1].name, test1_file); + assert_eq!(dir.entries[1].size, test1_contents.len() as u64); + assert_eq!(dir.entries[1].md5, test1_md5); + assert_eq!(dir.entries[2].name, dsc1_file); + assert_eq!(dir.entries[2].size, dsc1_contents.len() as u64); + assert_eq!(dir.entries[2].md5, dsc1_md5); + + (dput.artifacts(), build_info) +} + +pub const MONITOR_TEST_BUILD_RESULTS_DIR: &str = "results"; +pub const MONITOR_TEST_BUILD_RESULT: &str = "test-build-result"; +pub const MONITOR_TEST_BUILD_RESULT_CONTENTS: &[u8] = b"abcdef"; +pub const MONITOR_TEST_LOG_TAIL: u64 = 50; +pub const MONITOR_TEST_OLD_STATUS_SLEEP_DURATION: Duration = Duration::from_millis(100); + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum MonitorLogTest { + // Test that long logs are truncated. + Long, + // Test that short logs are fully shown. + Short, + // Test that revision mismatches result in unavailable logs. + Unavailable, +} + +#[allow(clippy::too_many_arguments)] +pub async fn test_monitoring( + context: &mut C, + dput: C::ArtifactsHandle, + build_info: &ObsBuildInfo, + repo: &RepoArch, + script: &[String], + success: bool, + dput_test: DputTest, + log_test: MonitorLogTest, + download_binaries: bool, +) { + let srcmd5_prefix = format!( + "srcmd5 '{}' ", + if log_test == MonitorLogTest::Unavailable { + ZERO_REV_SRCMD5.to_owned() + } else { + let dir = assert_ok!( + context + .obs() + .client + .project(build_info.project.to_owned()) + .package(TEST_PACKAGE_1.to_owned()) + .list(None) + .await + ); + + dir.srcmd5 + } + ); + + let (log_contents, log_vs_limit) = if log_test == MonitorLogTest::Short { + (srcmd5_prefix + "short", Ordering::Less) + } else { + ( + srcmd5_prefix + "this is a long log that will need to be trimmed when printed", + Ordering::Greater, + ) + }; + + assert_eq!( + log_contents.len().cmp(&(MONITOR_TEST_LOG_TAIL as usize)), + log_vs_limit + ); + + // Sanity check this, even though test_dput should have already checked it. + assert_eq!(repo.repo, TEST_REPO); + assert!( + repo.arch == TEST_ARCH_1 || repo.arch == TEST_ARCH_2, + "unexpected arch '{}'", + repo.arch + ); + + context.obs().mock.set_package_build_status( + &build_info.project, + &repo.repo, + &repo.arch, + TEST_PACKAGE_1.to_owned(), + MockBuildStatus::new(if success { + MockPackageCode::Succeeded + } else { + MockPackageCode::Failed + }), + ); + context.obs().mock.add_completed_build_log( + &build_info.project, + TEST_REPO, + &repo.arch, + TEST_PACKAGE_1.to_owned(), + MockBuildLog::new(log_contents.to_owned()), + success, + ); + context.obs().mock.set_package_binaries( + &build_info.project, + TEST_REPO, + &repo.arch, + TEST_PACKAGE_1.to_owned(), + [( + MONITOR_TEST_BUILD_RESULT.to_owned(), + MockBinary { + contents: MONITOR_TEST_BUILD_RESULT_CONTENTS.to_vec(), + mtime: SystemTime::now(), + }, + )] + .into(), + ); + + if dput_test != DputTest::ReusePreviousBuild { + // Update the endtime in the background, otherwise the monitor will hang + // forever waiting. + let mock = context.obs().mock.clone(); + let build_info_2 = build_info.clone(); + let repo_2 = repo.clone(); + tokio::spawn(async move { + tokio::time::sleep(MONITOR_TEST_OLD_STATUS_SLEEP_DURATION * 10).await; + mock.add_job_history( + &build_info_2.project, + &repo_2.repo, + &repo_2.arch, + MockJobHistoryEntry { + package: build_info_2.package, + endtime: SystemTime::UNIX_EPOCH + Duration::from_secs(999), + srcmd5: build_info_2.srcmd5.unwrap(), + ..Default::default() + }, + ); + }); + } + + let monitor = context + .run() + .script(script) + .artifacts(dput.clone()) + .timeout(MONITOR_TEST_OLD_STATUS_SLEEP_DURATION * 20) + .go() + .await; + assert_eq!( + monitor.ok(), + success && log_test != MonitorLogTest::Unavailable + ); + + let log = monitor.log(); + + assert_eq!( + log.contains("unavailable"), + log_test == MonitorLogTest::Unavailable + ); + + // If we reused a previous build, we're not waiting for a new build, so + // don't check for an old build status. + let build_actually_occurred = dput_test != DputTest::ReusePreviousBuild; + assert_eq!( + log.contains("Waiting for build status"), + build_actually_occurred + ); + + assert_eq!( + log.contains(&log_contents), + !success && log_test == MonitorLogTest::Short + ); + + if !success && log_test == MonitorLogTest::Long { + let log_bytes = log_contents.as_bytes(); + let truncated_log_bytes = &log_bytes[log_bytes.len() - (MONITOR_TEST_LOG_TAIL as usize)..]; + assert!(log.contains(String::from_utf8_lossy(truncated_log_bytes).as_ref())); + } + + let results = context.fetch_artifacts(&monitor.artifacts()).await; + let build_result_path = Utf8Path::new(MONITOR_TEST_BUILD_RESULTS_DIR) + .join(MONITOR_TEST_BUILD_RESULT) + .into_string(); + let mut has_built_result = false; + + if log_test != MonitorLogTest::Unavailable { + let full_log = results.get(DEFAULT_BUILD_LOG).unwrap(); + assert_eq!(log_contents, String::from_utf8_lossy(full_log)); + + if success && download_binaries { + let build_result = results.get(&build_result_path).unwrap(); + assert_eq!(MONITOR_TEST_BUILD_RESULT_CONTENTS, &build_result[..]); + + has_built_result = true; + } + } + + if !has_built_result { + assert_none!(results.get(&build_result_path)); + } +} + +pub async fn test_prune_missing_build_info(context: &mut C) { + let prune = context.run().command("prune").go().await; + assert!(!prune.ok()); + + let prune = context + .run() + .command("prune --ignore-missing-build-info") + .go() + .await; + assert!(prune.ok()); + + assert!(prune.log().contains("Skipping prune")); +} + +pub async fn test_prune_deleted_package_1_if_branched( + context: &C, + build_info: &ObsBuildInfo, + prune: &C::ExecutionResult, +) { + if build_info.is_branched { + assert_err!( + context + .obs() + .client + .project(build_info.project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .list(None) + .await + ); + } else { + assert!(prune.log().contains("package was not branched")); + + assert_ok!( + context + .obs() + .client + .project(build_info.project.clone()) + .package(TEST_PACKAGE_1.to_owned()) + .list(None) + .await + ); + } +} diff --git a/obs-gitlab-runner/Cargo.toml b/obs-gitlab-runner/Cargo.toml index 4e0c2cb..ac14003 100644 --- a/obs-gitlab-runner/Cargo.toml +++ b/obs-gitlab-runner/Cargo.toml @@ -36,6 +36,7 @@ url = "2.5" claims.workspace = true gitlab-runner-mock = "0.2.1" # gitlab-runner-mock = { path = "../gitlab-runner-rs/gitlab-runner-mock" } +obo-tests = { path = "../obo-tests" } open-build-service-mock.workspace = true rstest.workspace = true wiremock.workspace = true diff --git a/obs-gitlab-runner/src/handler.rs b/obs-gitlab-runner/src/handler.rs index 8b7751e..7276334 100644 --- a/obs-gitlab-runner/src/handler.rs +++ b/obs-gitlab-runner/src/handler.rs @@ -439,19 +439,18 @@ async fn check_for_artifact( #[cfg(test)] mod tests { use std::{ - cmp::Ordering, io::{Cursor, Read}, + marker::PhantomData, sync::{Arc, Once}, - time::{Duration, SystemTime}, + time::Duration, }; - use camino::Utf8Path; use claims::*; use gitlab_runner::{GitlabLayer, Runner, RunnerBuilder}; use gitlab_runner_mock::*; - use obo_core::{build_meta::RepoArch, upload::compute_md5}; + use obo_core::build_meta::{CommitBuildInfo, RepoArch}; use obo_test_support::*; - use open_build_service_mock::*; + use obo_tests::*; use rstest::rstest; use tempfile::TempDir; use tracing::{Level, instrument::WithSubscriber}; @@ -462,151 +461,6 @@ mod tests { use super::*; - const JOB_TIMEOUT: u64 = 3600; - const TEST_LOG_TAIL: u64 = 50; - const OLD_STATUS_SLEEP_DURATION: Duration = Duration::from_millis(100); - - const DEFAULT_HANDLER_OPTIONS: HandlerOptions = HandlerOptions { - default_monitor_job_timeout: None, - log_tail: TEST_LOG_TAIL, - monitor: PackageMonitoringOptions { - sleep_on_building: Duration::ZERO, - sleep_on_old_status: OLD_STATUS_SLEEP_DURATION, - // High limit, since we don't really test that - // functionality in the handler tests. - max_old_status_retries: 99, - }, - }; - - static COLOR_EYRE_INSTALL: Once = Once::new(); - - struct TestContext { - _runner_dir: TempDir, - gitlab_mock: GitlabRunnerMock, - runner: Runner, - - obs_mock: ObsMock, - obs_client: obs::Client, - } - - async fn with_context(func: impl AsyncFnOnce(TestContext) -> T) -> T { - COLOR_EYRE_INSTALL.call_once(|| color_eyre::install().unwrap()); - - let runner_dir = tempfile::tempdir().unwrap(); - let gitlab_mock = GitlabRunnerMock::start().await; - let (layer, jobs) = GitlabLayer::new(); - let runner = RunnerBuilder::new( - gitlab_mock.uri(), - gitlab_mock.runner_token().to_owned(), - runner_dir.path().to_owned(), - jobs, - ) - .build() - .await; - - let obs_mock = create_default_mock().await; - let obs_client = create_default_client(&obs_mock); - - let ctx = TestContext { - _runner_dir: runner_dir, - gitlab_mock, - runner, - obs_mock, - obs_client, - }; - - func(ctx) - .with_subscriber( - Registry::default() - .with( - tracing_subscriber::fmt::layer() - .with_test_writer() - .with_filter( - Targets::new().with_target("obs_gitlab_runner", Level::TRACE), - ), - ) - .with(tracing_error::ErrorLayer::default()) - .with(GitLabForwarder::new(layer)), - ) - .await - } - - #[derive(Default)] - struct JobSpec { - name: String, - dependencies: Vec, - variables: HashMap, - script: Vec, - after_script: Vec, - } - - fn enqueue_job(context: &TestContext, spec: JobSpec) -> MockJob { - let mut builder = context.gitlab_mock.job_builder(spec.name); - - builder.add_step( - MockJobStepName::Script, - spec.script, - JOB_TIMEOUT, - MockJobStepWhen::OnSuccess, - false, - ); - - if !spec.after_script.is_empty() { - builder.add_step( - MockJobStepName::AfterScript, - spec.after_script, - JOB_TIMEOUT, - MockJobStepWhen::OnSuccess, - false, - ); - } - - builder.add_artifact( - None, - false, - vec!["*".to_owned()], - Some(MockJobArtifactWhen::Always), - "archive".to_owned(), - Some("zip".to_owned()), - None, - ); - - for dependency in spec.dependencies { - builder.dependency(dependency); - } - for (key, value) in spec.variables { - builder.add_variable(key, value, true, false); - } - - builder.add_variable( - "OBS_SERVER".to_owned(), - context.obs_client.url().to_string(), - false, - true, - ); - builder.add_variable("OBS_USER".to_owned(), TEST_USER.to_owned(), false, true); - builder.add_variable("OBS_PASSWORD".to_owned(), TEST_PASS.to_owned(), false, true); - - let job = builder.build(); - context.gitlab_mock.enqueue_job(job.clone()); - job - } - - async fn run_handler(context: &mut TestContext, handler_func: Func) - where - U: UploadableFile + Send + 'static, - H: JobHandler + Send + 'static, - Func: (FnOnce(Job) -> H) + Send + Sync + 'static, - { - let got_job = context - .runner - .request_job(move |job| futures_util::future::ready(Ok(handler_func(job)))) - .await - .unwrap(); - assert!(got_job); - context.runner.wait_for_space(1).await; - } - struct PutArtifactsHandler { artifacts: Arc>>, } @@ -651,369 +505,334 @@ mod tests { } } - async fn put_artifacts( - context: &mut TestContext, - artifacts: HashMap>, - ) -> MockJob { - let artifacts_job = enqueue_job( - context, - JobSpec { - name: "artifacts".to_owned(), - script: vec!["dummy".to_owned()], - ..Default::default() - }, - ); - run_handler(context, |_| PutArtifactsHandler { - artifacts: Arc::new(artifacts), - }) - .await; - artifacts_job - } + #[derive(Clone, Debug)] + struct GitLabArtifactsHandle(MockJob); - fn get_job_artifacts(job: &MockJob) -> HashMap> { - let Some(artifact) = job.uploaded_artifacts().next() else { - return Default::default(); - }; + impl ArtifactsHandle for GitLabArtifactsHandle {} - let data = (*artifact.data).clone(); - assert!(!data.is_empty()); + #[derive(Clone, Debug)] + struct GitLabExecutionResult(MockJob); - let cursor = Cursor::new(data); - let mut zip = ZipArchive::new(cursor).unwrap(); + impl ExecutionResult for GitLabExecutionResult { + type Artifacts = GitLabArtifactsHandle; - (0..zip.len()) - .map(|i| { - let mut file = zip.by_index(i).unwrap(); + fn ok(&self) -> bool { + self.0.state() == MockJobState::Success + } - let mut contents = vec![]; - file.read_to_end(&mut contents).unwrap(); + fn log(&self) -> String { + String::from_utf8_lossy(&self.0.log()).into_owned() + } - (file.name().to_owned(), contents) - }) - .collect() + fn artifacts(&self) -> Self::Artifacts { + GitLabArtifactsHandle(self.0.clone()) + } } - async fn run_obs_handler_with_options(context: &mut TestContext, options: HandlerOptions) { - run_handler(context, move |job| { - assert_ok!(ObsJobHandler::from_obs_config_in_job(job, options)) - }) - .await; + // All the GitLabRunHandlerWrapper* boilerplate is basically a trick to be + // able to type-erase the generics of Runner::request_job(), so that you can + // pass a job handler to the RunBuilder without having to then propagate the + // handler's type everywhere. Ideally we could just use a lambda, but that + // causes a variety of lifetime issues due to the various constraints around + // async function types. + #[async_trait] + trait GitLabRunHandlerWrapper: Send + Sync { + async fn request_job(&mut self, runner: &mut Runner); } - async fn run_obs_handler(context: &mut TestContext) { - run_obs_handler_with_options(context, DEFAULT_HANDLER_OPTIONS).await; + struct GitLabRunHandlerWrapperImpl< + U: UploadableFile + Send + Sync + 'static, + H: JobHandler + Send + Sync + 'static, + Func: (FnOnce(Job) -> H) + Send + Sync + 'static, + > { + func: Option, + _phantom: PhantomData<(U, H)>, } - #[derive(Debug, PartialEq, Eq, Clone, Copy)] - enum DputTest { - Basic, - Rebuild, - ReusePreviousBuild, - Branch, + #[async_trait] + impl< + U: UploadableFile + Send + Sync + 'static, + H: JobHandler + Send + Sync + 'static, + Func: (FnOnce(Job) -> H) + Send + Sync + 'static, + > GitLabRunHandlerWrapper for GitLabRunHandlerWrapperImpl + { + async fn request_job(&mut self, runner: &mut Runner) { + let handler = self + .func + .take() + .expect("request_job can only be called once"); + let got_job = runner + .request_job(move |job| futures_util::future::ready(Ok(handler(job)))) + .await + .unwrap(); + assert!(got_job); + } } - async fn test_dput(context: &mut TestContext, test: DputTest) -> (MockJob, ObsBuildInfo) { - let test1_file = "test1"; - let test1_contents = b"123"; - let test1_md5 = compute_md5(test1_contents); - - let dsc1_file = "test1.dsc"; - let dsc1_contents = format!( - "Source: {}\nFiles:\n {} {} {}", - TEST_PACKAGE_1, - test1_md5.clone(), - test1_contents.len(), - test1_file - ); - let dsc1_md5 = compute_md5(dsc1_contents.as_bytes()); + struct GitLabRunBuilder<'context> { + context: &'context mut GitLabTestContext, + script: Vec, + after_script: Vec, + dependencies: Vec, + variables: HashMap, + handler: Box, + timeout: Duration, + } - let dsc1_bad_file = "test1-bad.dsc"; - let dsc1_bad_contents = - dsc1_contents.replace(test1_file, &(test1_file.to_owned() + ".missing")); + fn create_obs_job_handler_factory( + options: HandlerOptions, + ) -> impl FnOnce(Job) -> ObsJobHandler { + move |job| assert_ok!(ObsJobHandler::from_obs_config_in_job(job, options)) + } - context.obs_mock.add_project(TEST_PROJECT.to_owned()); + impl GitLabRunBuilder<'_> { + fn after_command(self, cmd: impl Into) -> Self { + self.after_script(&[cmd.into()]) + } - context.obs_mock.add_or_update_repository( - TEST_PROJECT, - TEST_REPO.to_owned(), - TEST_ARCH_1.to_owned(), - MockRepositoryCode::Finished, - ); - context.obs_mock.add_or_update_repository( - TEST_PROJECT, - TEST_REPO.to_owned(), - TEST_ARCH_2.to_owned(), - MockRepositoryCode::Finished, - ); + fn after_script(mut self, script: &[String]) -> Self { + self.after_script.extend_from_slice(script); + self + } - if test == DputTest::Rebuild { - // We also test excluded repos on rebuilds; this test makes it - // easier, because it's not testing creating a new package, so we - // can create it ourselves first with the desired metadata. - context.obs_mock.add_new_package( - TEST_PROJECT, - TEST_PACKAGE_1.to_owned(), - MockPackageOptions::default(), - ); - context.obs_mock.set_package_build_status( - TEST_PROJECT, - TEST_REPO, - TEST_ARCH_2, - TEST_PACKAGE_1.to_owned(), - MockBuildStatus::new(MockPackageCode::Disabled), - ); + fn variable(mut self, name: impl Into, value: impl Into) -> Self { + self.variables.insert(name.into(), value.into()); + self } - let artifacts = put_artifacts( - context, - [ - (dsc1_file.to_owned(), dsc1_contents.as_bytes().to_vec()), - ( - dsc1_bad_file.to_owned(), - dsc1_bad_contents.as_bytes().to_vec(), - ), - (test1_file.to_owned(), test1_contents.to_vec()), - ] - .into(), - ) - .await; + fn job_handler_factory< + U: UploadableFile + Send + Sync + 'static, + H: JobHandler + Send + Sync + 'static, + Func: (FnOnce(Job) -> H) + Send + Sync + 'static, + >( + mut self, + handler_func: Func, + ) -> Self { + self.handler = Box::new(GitLabRunHandlerWrapperImpl { + func: Some(handler_func), + _phantom: PhantomData, + }); + self + } - let mut dput_command = format!("dput {TEST_PROJECT} {dsc1_file}"); - let mut created_project = TEST_PROJECT.to_owned(); + fn obs_job_handler(self, options: HandlerOptions) -> Self { + self.job_handler_factory(create_obs_job_handler_factory(options)) + } + } - if test == DputTest::Branch { - created_project += ":branched"; - dput_command += &format!(" --branch-to {created_project}"); + #[async_trait] + impl<'context> RunBuilder<'context> for GitLabRunBuilder<'context> { + type ArtifactsHandle = GitLabArtifactsHandle; + type ExecutionResult = GitLabExecutionResult; + + fn script(mut self, script: &[String]) -> Self { + self.script.extend_from_slice(script); + self } - let dput = enqueue_job( - context, - JobSpec { - name: "dput".to_owned(), - dependencies: vec![artifacts.clone()], - script: vec![dput_command.replace(dsc1_file, dsc1_bad_file)], - ..Default::default() - }, - ); + fn artifacts(mut self, artifacts: Self::ArtifactsHandle) -> Self { + self.dependencies.push(artifacts.0); + self + } - run_obs_handler(context).await; - assert_eq!(MockJobState::Failed, dput.state()); - - let results = get_job_artifacts(&dput); - let build_info: ObsBuildInfo = - serde_yaml::from_slice(results.get(DEFAULT_BUILD_INFO).unwrap()).unwrap(); - - assert_eq!(build_info.project, created_project); - assert_eq!(build_info.package, TEST_PACKAGE_1); - assert_none!(build_info.rev); - assert_eq!(build_info.is_branched, test == DputTest::Branch); - - let mut dput = enqueue_job( - context, - JobSpec { - name: "dput".to_owned(), - dependencies: vec![artifacts.clone()], - script: vec![dput_command.clone()], - ..Default::default() - }, - ); + fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = timeout; + self + } - run_obs_handler(context).await; - assert_eq!(MockJobState::Success, dput.state()); + async fn go(mut self) -> Self::ExecutionResult { + const JOB_TIMEOUT: u64 = 3600; - if test == DputTest::Rebuild || test == DputTest::ReusePreviousBuild { - context.obs_mock.add_or_update_repository( - &created_project, - TEST_REPO.to_owned(), - TEST_ARCH_1.to_owned(), - MockRepositoryCode::Building, - ); - // Also test endtimes, since we now have an existing package to - // modify the metadata of. - let dir = assert_ok!( - context - .obs_client - .project(TEST_PROJECT.to_owned()) - .package(TEST_PACKAGE_1.to_owned()) - .list(None) - .await + let mut builder = self.context.gitlab_mock.job_builder("test".to_owned()); + + builder.add_step( + MockJobStepName::Script, + self.script, + JOB_TIMEOUT, + MockJobStepWhen::OnSuccess, + false, ); - // Testing of reused builds never had the second arch disabled, so - // also add that build history. - if test == DputTest::ReusePreviousBuild { - context.obs_mock.add_job_history( - TEST_PROJECT, - TEST_REPO, - TEST_ARCH_2, - MockJobHistoryEntry { - package: TEST_PACKAGE_1.to_owned(), - rev: dir.rev.clone().unwrap(), - srcmd5: dir.srcmd5.clone(), - ..Default::default() - }, + + if !self.after_script.is_empty() { + builder.add_step( + MockJobStepName::AfterScript, + self.after_script, + JOB_TIMEOUT, + MockJobStepWhen::OnSuccess, + false, ); } - context.obs_mock.add_job_history( - TEST_PROJECT, - TEST_REPO, - TEST_ARCH_1, - MockJobHistoryEntry { - package: TEST_PACKAGE_1.to_owned(), - rev: dir.rev.unwrap(), - srcmd5: dir.srcmd5, - ..Default::default() - }, - ); - context.obs_mock.set_package_build_status_for_rebuilds( - &created_project, - MockBuildStatus::new(MockPackageCode::Broken), - ); - context.obs_mock.set_package_build_status( - &created_project, - TEST_REPO, - TEST_ARCH_1, - TEST_PACKAGE_1.to_owned(), - MockBuildStatus::new(MockPackageCode::Failed), + builder.add_artifact( + None, + false, + vec!["*".to_owned()], + Some(MockJobArtifactWhen::Always), + "archive".to_owned(), + Some("zip".to_owned()), + None, ); - let status = assert_ok!( - context - .obs_client - .project(created_project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .status(TEST_REPO, TEST_ARCH_1) - .await - ); - assert_eq!(status.code, obs::PackageCode::Failed); + for dependency in self.dependencies { + builder.dependency(dependency); + } + for (key, value) in self.variables { + builder.add_variable(key, value, true, false); + } - dput = enqueue_job( - context, - JobSpec { - name: "dput".to_owned(), - dependencies: vec![artifacts.clone()], - script: vec![dput_command.clone()], - ..Default::default() - }, + builder.add_variable( + "OBS_SERVER".to_owned(), + self.context.obs.client.url().to_string(), + false, + true, ); + builder.add_variable("OBS_USER".to_owned(), TEST_USER.to_owned(), false, true); + builder.add_variable("OBS_PASSWORD".to_owned(), TEST_PASS.to_owned(), false, true); - run_obs_handler(context).await; - assert_eq!(MockJobState::Success, dput.state()); + let job = builder.build(); + self.context.gitlab_mock.enqueue_job(job.clone()); - let job_log = String::from_utf8_lossy(&dput.log()).into_owned(); - assert!(job_log.contains("unchanged")); + self.handler.request_job(&mut self.context.runner).await; + self.context.runner.wait_for_space(1).await; - let status = assert_ok!( - context - .obs_client - .project(created_project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .status(TEST_REPO, TEST_ARCH_1) - .await - ); - assert_eq!(status.code, obs::PackageCode::Failed); - - if test == DputTest::Rebuild { - dput = enqueue_job( - context, - JobSpec { - name: "dput".to_owned(), - dependencies: vec![artifacts.clone()], - script: vec![format!("{} --rebuild-if-unchanged", dput_command)], - ..Default::default() - }, - ); + GitLabExecutionResult(job) + } + } + + const DEFAULT_HANDLER_OPTIONS: HandlerOptions = HandlerOptions { + default_monitor_job_timeout: None, + log_tail: MONITOR_TEST_LOG_TAIL, + monitor: PackageMonitoringOptions { + sleep_on_building: Duration::ZERO, + sleep_on_old_status: MONITOR_TEST_OLD_STATUS_SLEEP_DURATION, + // High limit, since we don't really test that + // functionality in the handler tests. + max_old_status_retries: 99, + }, + }; - run_obs_handler(context).await; - assert_eq!(MockJobState::Success, dput.state()); + struct GitLabTestContext { + _runner_dir: TempDir, + gitlab_mock: GitlabRunnerMock, + runner: Runner, + obs: ObsContext, + } - let status = assert_ok!( - context - .obs_client - .project(created_project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .status(TEST_REPO, TEST_ARCH_1) - .await - ); - assert_eq!(status.code, obs::PackageCode::Broken); + #[async_trait] + impl TestContext for GitLabTestContext { + type ArtifactsHandle = GitLabArtifactsHandle; + type ExecutionResult = GitLabExecutionResult; + type RunBuilder<'context> = GitLabRunBuilder<'context>; - let job_log = String::from_utf8_lossy(&dput.log()).into_owned(); - assert!(job_log.contains("unchanged")); - } + fn obs(&self) -> &ObsContext { + &self.obs } - let results = get_job_artifacts(&dput); - let build_info: ObsBuildInfo = - serde_yaml::from_slice(results.get(DEFAULT_BUILD_INFO).unwrap()).unwrap(); + async fn inject_artifacts( + &mut self, + artifacts: HashMap>, + ) -> Self::ArtifactsHandle { + self.run() + .job_handler_factory(|_| PutArtifactsHandler { + artifacts: Arc::new(artifacts), + }) + .command("dummy") + .go() + .await + .artifacts() + } - assert_eq!(build_info.project, created_project); - assert_eq!(build_info.package, TEST_PACKAGE_1); - assert_some!(build_info.rev.as_deref()); - assert_eq!(build_info.is_branched, test == DputTest::Branch); + async fn fetch_artifacts( + &self, + handle: &Self::ArtifactsHandle, + ) -> HashMap> { + let Some(artifact) = handle.0.uploaded_artifacts().next() else { + return Default::default(); + }; - assert_eq!( - build_info.enabled_repos.len(), - if test == DputTest::Rebuild { 1 } else { 2 } - ); + let data = (*artifact.data).clone(); + assert!(!data.is_empty()); - let arch_1 = build_info - .enabled_repos - .get(&RepoArch { - repo: TEST_REPO.to_owned(), - arch: TEST_ARCH_1.to_owned(), - }) - .unwrap(); + let cursor = Cursor::new(data); + let mut zip = ZipArchive::new(cursor).unwrap(); - if test == DputTest::Rebuild { - assert_some!(arch_1.prev_endtime_for_commit); - } else { - assert_none!(arch_1.prev_endtime_for_commit); + (0..zip.len()) + .map(|i| { + let mut file = zip.by_index(i).unwrap(); + + let mut contents = vec![]; + file.read_to_end(&mut contents).unwrap(); - let arch_2 = build_info - .enabled_repos - .get(&RepoArch { - repo: TEST_REPO.to_owned(), - arch: TEST_ARCH_2.to_owned(), + (file.name().to_owned(), contents) }) - .unwrap(); - assert_none!(arch_2.prev_endtime_for_commit); + .collect() } - let mut dir = assert_ok!( - context - .obs_client - .project(created_project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .list(None) - .await - ); - - assert_eq!(dir.entries.len(), 3); - dir.entries.sort_by(|a, b| a.name.cmp(&b.name)); - assert_eq!(dir.entries[0].name, "_meta"); - assert_eq!(dir.entries[1].name, test1_file); - assert_eq!(dir.entries[1].size, test1_contents.len() as u64); - assert_eq!(dir.entries[1].md5, test1_md5); - assert_eq!(dir.entries[2].name, dsc1_file); - assert_eq!(dir.entries[2].size, dsc1_contents.len() as u64); - assert_eq!(dir.entries[2].md5, dsc1_md5); - - (dput, build_info) + fn run(&mut self) -> Self::RunBuilder<'_> { + GitLabRunBuilder { + context: self, + script: vec![], + after_script: vec![], + dependencies: vec![], + variables: Default::default(), + handler: Box::new(GitLabRunHandlerWrapperImpl { + func: Some(create_obs_job_handler_factory(DEFAULT_HANDLER_OPTIONS)), + _phantom: PhantomData, + }), + timeout: EXECUTION_DEFAULT_TIMEOUT, + } + } } - #[derive(Debug, PartialEq, Eq)] - enum MonitorLogTest { - // Test that long logs are truncated. - Long, - // Test that short logs are fully shown. - Short, - // Test that revision mismatches result in unavailable logs. - Unavailable, + static COLOR_EYRE_INSTALL: Once = Once::new(); + + async fn with_context(func: impl AsyncFnOnce(GitLabTestContext) -> T) -> T { + COLOR_EYRE_INSTALL.call_once(|| color_eyre::install().unwrap()); + + let runner_dir = tempfile::tempdir().unwrap(); + let gitlab_mock = GitlabRunnerMock::start().await; + let (layer, jobs) = GitlabLayer::new(); + let runner = RunnerBuilder::new( + gitlab_mock.uri(), + gitlab_mock.runner_token().to_owned(), + runner_dir.path().to_owned(), + jobs, + ) + .build() + .await; + + let obs_mock = create_default_mock().await; + let obs_client = create_default_client(&obs_mock); + + let ctx = GitLabTestContext { + _runner_dir: runner_dir, + gitlab_mock, + runner, + obs: ObsContext { + client: obs_client, + mock: obs_mock, + }, + }; + + func(ctx) + .with_subscriber( + Registry::default() + .with( + tracing_subscriber::fmt::layer() + .with_test_writer() + .with_filter( + Targets::new().with_target("obs_gitlab_runner", Level::TRACE), + ), + ) + .with(tracing_error::ErrorLayer::default()) + .with(GitLabForwarder::new(layer)), + ) + .await } - async fn test_monitoring( - context: &mut TestContext, - dput: MockJob, + async fn test_monitoring_with_generation( + context: &mut GitLabTestContext, + dput: GitLabArtifactsHandle, build_info: &ObsBuildInfo, success: bool, dput_test: DputTest, @@ -1022,41 +841,6 @@ mod tests { ) { const TEST_JOB_RUNNER_TAG: &str = "test-tag"; const TEST_MONITOR_TIMEOUT: &str = "1 day"; - const TEST_BUILD_RESULTS_DIR: &str = "results"; - const TEST_BUILD_RESULT: &str = "test-build-result"; - const TEST_BUILD_RESULT_CONTENTS: &[u8] = b"abcdef"; - - let srcmd5_prefix = format!( - "srcmd5 '{}' ", - if log_test == MonitorLogTest::Unavailable { - ZERO_REV_SRCMD5.to_owned() - } else { - let dir = assert_ok!( - context - .obs_client - .project(build_info.project.to_owned()) - .package(TEST_PACKAGE_1.to_owned()) - .list(None) - .await - ); - - dir.srcmd5 - } - ); - - let (log_contents, log_vs_limit) = if log_test == MonitorLogTest::Short { - (srcmd5_prefix + "short", Ordering::Less) - } else { - ( - srcmd5_prefix + "this is a long log that will need to be trimmed when printed", - Ordering::Greater, - ) - }; - - assert_eq!( - log_contents.len().cmp(&(TEST_LOG_TAIL as usize)), - log_vs_limit - ); let mut generate_command = format!( "generate-monitor {TEST_JOB_RUNNER_TAG} \ @@ -1064,22 +848,19 @@ mod tests { --rules '[{{a: 1}}, {{b: 2}}]'" ); if download_binaries { - generate_command += &format!(" --download-build-results-to {TEST_BUILD_RESULTS_DIR}"); + generate_command += + &format!(" --download-build-results-to {MONITOR_TEST_BUILD_RESULTS_DIR}"); } - let generate = enqueue_job( - context, - JobSpec { - name: "generate".to_owned(), - dependencies: vec![dput.clone()], - script: vec![generate_command], - ..Default::default() - }, - ); - run_obs_handler(context).await; - assert_eq!(generate.state(), MockJobState::Success); + let generate = context + .run() + .command(generate_command) + .artifacts(dput.clone()) + .go() + .await; + assert!(generate.ok()); - let results = get_job_artifacts(&generate); + let results = context.fetch_artifacts(&generate.artifacts()).await; let pipeline_yaml: serde_yaml::Value = assert_ok!(serde_yaml::from_slice( results.get(DEFAULT_MONITOR_PIPELINE).unwrap() )); @@ -1088,49 +869,6 @@ mod tests { assert_eq!(pipeline_map.len(), build_info.enabled_repos.len()); for repo in build_info.enabled_repos.keys() { - // Sanity check this, even though test_dput should have already - // checked it. - assert_eq!(repo.repo, TEST_REPO); - assert!( - repo.arch == TEST_ARCH_1 || repo.arch == TEST_ARCH_2, - "unexpected arch '{}'", - repo.arch - ); - - context.obs_mock.set_package_build_status( - &build_info.project, - &repo.repo, - &repo.arch, - TEST_PACKAGE_1.to_owned(), - MockBuildStatus::new(if success { - MockPackageCode::Succeeded - } else { - MockPackageCode::Failed - }), - ); - context.obs_mock.add_completed_build_log( - &build_info.project, - TEST_REPO, - &repo.arch, - TEST_PACKAGE_1.to_owned(), - MockBuildLog::new(log_contents.to_owned()), - success, - ); - context.obs_mock.set_package_binaries( - &build_info.project, - TEST_REPO, - &repo.arch, - TEST_PACKAGE_1.to_owned(), - [( - TEST_BUILD_RESULT.to_owned(), - MockBinary { - contents: TEST_BUILD_RESULT_CONTENTS.to_vec(), - mtime: SystemTime::now(), - }, - )] - .into(), - ); - let monitor_job_name = format!( "{}-{}-{}", DEFAULT_PIPELINE_JOB_PREFIX, TEST_REPO, &repo.arch @@ -1163,7 +901,7 @@ mod tests { if download_binaries { assert_eq!( &artifact_paths, - &[DEFAULT_BUILD_LOG, TEST_BUILD_RESULTS_DIR] + &[DEFAULT_BUILD_LOG, MONITOR_TEST_BUILD_RESULTS_DIR] ); } else { assert_eq!(&artifact_paths, &[DEFAULT_BUILD_LOG]); @@ -1203,214 +941,68 @@ mod tests { .map(|v| v.as_str().unwrap().to_owned()) .collect::>(); - let monitor = enqueue_job( + test_monitoring( context, - JobSpec { - name: monitor_job_name.clone(), - dependencies: vec![dput.clone()], - script: script.clone(), - ..Default::default() - }, - ); - - if dput_test != DputTest::ReusePreviousBuild { - // Update the endtime in the background, otherwise the monitor - // will hang forever waiting. - let mock = context.obs_mock.clone(); - let build_info_2 = build_info.clone(); - let repo_2 = repo.clone(); - tokio::spawn(async move { - tokio::time::sleep(OLD_STATUS_SLEEP_DURATION * 10).await; - mock.add_job_history( - &build_info_2.project, - &repo_2.repo, - &repo_2.arch, - MockJobHistoryEntry { - package: build_info_2.package, - endtime: SystemTime::UNIX_EPOCH + Duration::from_secs(999), - srcmd5: build_info_2.srcmd5.unwrap(), - ..Default::default() - }, - ); - }); - } - - assert_ok!( - tokio::time::timeout(OLD_STATUS_SLEEP_DURATION * 20, run_obs_handler(context)) - .await - ); - assert_eq!( - monitor.state(), - if success && log_test != MonitorLogTest::Unavailable { - MockJobState::Success - } else { - MockJobState::Failed - } - ); - - let job_log = String::from_utf8_lossy(&monitor.log()).into_owned(); - - assert_eq!( - job_log.contains("unavailable"), - log_test == MonitorLogTest::Unavailable - ); - - // If we reused a previous build, we're not waiting for a new build, - // so don't check for an old build status. - let build_actually_occurred = dput_test != DputTest::ReusePreviousBuild; - assert_eq!( - job_log.contains("Waiting for build status"), - build_actually_occurred - ); - - assert_eq!( - job_log.contains(&log_contents), - !success && log_test == MonitorLogTest::Short - ); - - if !success && log_test == MonitorLogTest::Long { - let log_bytes = log_contents.as_bytes(); - let truncated_log_bytes = &log_bytes[log_bytes.len() - (TEST_LOG_TAIL as usize)..]; - assert!(job_log.contains(String::from_utf8_lossy(truncated_log_bytes).as_ref())); - } - - let results = get_job_artifacts(&monitor); - let build_result_path = Utf8Path::new(TEST_BUILD_RESULTS_DIR) - .join(TEST_BUILD_RESULT) - .into_string(); - let mut has_built_result = false; - - if log_test != MonitorLogTest::Unavailable { - let full_log = results.get(DEFAULT_BUILD_LOG).unwrap(); - assert_eq!(log_contents, String::from_utf8_lossy(full_log)); - - if success && download_binaries { - let build_result = results.get(&build_result_path).unwrap(); - assert_eq!(TEST_BUILD_RESULT_CONTENTS, &build_result[..]); - - has_built_result = true; - } - } - - if !has_built_result { - assert_none!(results.get(&build_result_path)); - } + dput.clone(), + build_info, + repo, + &script, + success, + dput_test, + log_test, + download_binaries, + ) + .await; } } async fn test_prune( - context: &mut TestContext, - dput: MockJob, + context: &mut GitLabTestContext, + dput: GitLabArtifactsHandle, build_info: &ObsBuildInfo, only_if_job_unsuccessful: bool, ) { - let prune = enqueue_job( - context, - JobSpec { - name: "prune".to_owned(), - script: vec!["prune".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(context).await; - assert_eq!(MockJobState::Failed, prune.state()); - - let prune = enqueue_job( - context, - JobSpec { - name: "prune".to_owned(), - script: vec!["prune --ignore-missing-build-info".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(context).await; - assert_eq!(MockJobState::Success, prune.state()); - - assert!(String::from_utf8_lossy(&prune.log()).contains("Skipping prune")); + test_prune_missing_build_info(context).await; let prune = if only_if_job_unsuccessful { - let prune = enqueue_job( - context, - JobSpec { - name: "prune".to_owned(), - dependencies: vec![dput.clone()], - script: vec!["echo".to_owned()], - after_script: vec!["prune --only-if-job-unsuccessful".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(context).await; - assert_eq!(MockJobState::Success, prune.state()); + let prune = context + .run() + .command("echo") + .after_command("prune --only-if-job-unsuccessful") + .artifacts(dput.clone()) + .go() + .await; - assert!(String::from_utf8_lossy(&prune.log()).contains("Skipping prune")); + assert!(prune.ok()); + assert!(prune.log().contains("Skipping prune")); assert_ok!( context - .obs_client + .obs() + .client .project(build_info.project.clone()) .package(TEST_PACKAGE_1.to_owned()) .list(None) .await ); - enqueue_job( - context, - JobSpec { - name: "prune".to_owned(), - - dependencies: vec![dput.clone()], - script: vec!["echo --fail".to_owned()], - after_script: vec!["prune --only-if-job-unsuccessful".to_owned()], - ..Default::default() - }, - ) + context + .run() + .command("echo --fail") + .after_command("prune --only-if-job-unsuccessful") + .artifacts(dput.clone()) + .go() + .await } else { - enqueue_job( - context, - JobSpec { - name: "prune".to_owned(), - - dependencies: vec![dput.clone()], - script: vec!["prune".to_owned()], - ..Default::default() - }, - ) + context + .run() + .command("prune") + .artifacts(dput.clone()) + .go() + .await }; - run_obs_handler(context).await; - assert_eq!( - prune.state(), - if only_if_job_unsuccessful { - MockJobState::Failed - } else { - MockJobState::Success - } - ); - - if build_info.is_branched { - assert_err!( - context - .obs_client - .project(build_info.project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .list(None) - .await - ); - } else { - assert!(String::from_utf8_lossy(&prune.log()).contains("package was not branched")); - - assert_ok!( - context - .obs_client - .project(build_info.project.clone()) - .package(TEST_PACKAGE_1.to_owned()) - .list(None) - .await - ); - } + test_prune_deleted_package_1_if_branched(context, build_info, &prune).await; } #[rstest] @@ -1436,7 +1028,7 @@ mod tests { with_context(async |mut context| { let (dput, build_info) = test_dput(&mut context, dput_test).await; - test_monitoring( + test_monitoring_with_generation( &mut context, dput.clone(), &build_info, @@ -1449,7 +1041,7 @@ mod tests { test_prune( &mut context, - dput, + dput.clone(), &build_info, prune_only_if_job_unsuccessful, ) @@ -1462,27 +1054,18 @@ mod tests { #[tokio::test] async fn test_variable_expansion() { with_context(async |mut context| { - let job = enqueue_job( - &context, - JobSpec { - name: "expansion".to_owned(), - variables: [ - ("ESCAPED".to_owned(), "this should not appear".to_owned()), - ("QUOTED".to_owned(), "spaces should be preserved".to_owned()), - ("RECURSIVE".to_owned(), "recursion($RECURSIVE)".to_owned()), - ] - .into(), - script: vec!["echo --sep ; $MISSING $$ESCAPED $QUOTED $RECURSIVE".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(&mut context).await; - assert_eq!(job.state(), MockJobState::Success); + let expansion = context + .run() + .variable("ESCAPED", "this should not appear") + .variable("QUOTED", "spaces should be preserved") + .variable("RECURSIVE", "recursion($RECURSIVE)") + .command("echo --sep ; $MISSING $$ESCAPED $QUOTED $RECURSIVE") + .go() + .await; + assert!(expansion.ok()); - let job_log = String::from_utf8_lossy(&job.log()).into_owned(); assert_eq!( - job_log.lines().last().unwrap(), + expansion.log().lines().last().unwrap(), ";$ESCAPED;spaces should be preserved;recursion()" ); }) @@ -1493,62 +1076,31 @@ mod tests { #[tokio::test] async fn test_flag_parsing() { with_context(async |mut context| { - let job = enqueue_job( - &context, - JobSpec { - name: "flag".to_owned(), - script: vec!["echo --uppercase false".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(&mut context).await; - assert_eq!(job.state(), MockJobState::Success); - - let job_log = String::from_utf8_lossy(&job.log()).into_owned(); - assert_eq!(job_log.lines().last().unwrap(), "FALSE"); - - let job = enqueue_job( - &context, - JobSpec { - name: "flag".to_owned(), - script: vec!["echo --uppercase=false true".to_owned()], - ..Default::default() - }, - ); + let echo = context.run().command("echo --uppercase false").go().await; + assert!(echo.ok()); - run_obs_handler(&mut context).await; - assert_eq!(job.state(), MockJobState::Success); + assert_eq!(echo.log().lines().last().unwrap(), "FALSE"); - let job_log = String::from_utf8_lossy(&job.log()).into_owned(); - assert_eq!(job_log.lines().last().unwrap(), "true"); + let echo = context + .run() + .command("echo --uppercase=false true") + .go() + .await; + assert!(echo.ok()); - let job = enqueue_job( - &context, - JobSpec { - name: "flag".to_owned(), - script: vec!["echo --uppercase=true false".to_owned()], - ..Default::default() - }, - ); + assert_eq!(echo.log().lines().last().unwrap(), "true"); - run_obs_handler(&mut context).await; - assert_eq!(job.state(), MockJobState::Success); + let echo = context + .run() + .command("echo --uppercase=true false") + .go() + .await; + assert!(echo.ok()); - let job_log = String::from_utf8_lossy(&job.log()).into_owned(); - assert_eq!(job_log.lines().last().unwrap(), "FALSE"); + assert_eq!(echo.log().lines().last().unwrap(), "FALSE"); - let job = enqueue_job( - &context, - JobSpec { - name: "flag".to_owned(), - script: vec!["echo --uppercase=X false".to_owned()], - ..Default::default() - }, - ); - - run_obs_handler(&mut context).await; - assert_eq!(job.state(), MockJobState::Failed); + let echo = context.run().command("echo --uppercase=X false").go().await; + assert!(!echo.ok()); }) .await; } @@ -1569,8 +1121,6 @@ mod tests { )] test: Option, ) { - use obo_core::build_meta::CommitBuildInfo; - const TEST_MONITOR_TIMEOUT: &str = "10 minutes"; with_context(async |mut context| { @@ -1592,49 +1142,40 @@ mod tests { .into(), }; - let build_info = put_artifacts( - &mut context, - [( - DEFAULT_BUILD_INFO.to_owned(), - serde_yaml::to_string(&build_info).unwrap().into_bytes(), - )] - .into(), - ) - .await; - - let mut generate_spec = JobSpec { - name: "generate".to_owned(), - script: vec!["generate-monitor tag".to_owned()], - dependencies: vec![build_info], - ..Default::default() - }; - - if test == Some(GenerateMonitorTimeoutLocation::Argument) { - use std::fmt::Write; - write!( - &mut generate_spec.script[0], - " --job-timeout '{TEST_MONITOR_TIMEOUT}'" + let build_info = context + .inject_artifacts( + [( + DEFAULT_BUILD_INFO.to_owned(), + serde_yaml::to_string(&build_info).unwrap().into_bytes(), + )] + .into(), ) - .unwrap(); - } + .await; - let generate = enqueue_job(&context, generate_spec); + let generate_builder = if test == Some(GenerateMonitorTimeoutLocation::Argument) { + context.run().command(format!( + "generate-monitor tag --job-timeout '{TEST_MONITOR_TIMEOUT}'" + )) + } else { + context.run().command("generate-monitor tag") + } + .artifacts(build_info); - if test == Some(GenerateMonitorTimeoutLocation::HandlerOption) { - run_obs_handler_with_options( - &mut context, - HandlerOptions { + let generate = if test == Some(GenerateMonitorTimeoutLocation::HandlerOption) { + generate_builder + .obs_job_handler(HandlerOptions { default_monitor_job_timeout: Some(TEST_MONITOR_TIMEOUT.to_owned()), ..DEFAULT_HANDLER_OPTIONS - }, - ) - .await; + }) + .go() + .await } else { - run_obs_handler(&mut context).await; - } - assert_eq!(generate.state(), MockJobState::Success); + generate_builder.go().await + }; + + assert!(generate.ok()); - let results = get_job_artifacts(&generate); + let results = context.fetch_artifacts(&generate.artifacts()).await; let pipeline_yaml: serde_yaml::Value = assert_ok!(serde_yaml::from_slice( results.get(DEFAULT_MONITOR_PIPELINE).unwrap() )); From 1b177cb6142f8f37bb24dce8459b318d5c0482e7 Mon Sep 17 00:00:00 2001 From: Ryan Gonzalez Date: Wed, 6 Aug 2025 15:48:45 -0500 Subject: [PATCH 3/3] Rework the build-info format to be JSON External services like GH Actions can read JSON but *not* YAML, and the previous format couldn't simply be written "as JSON" because it relied on having structs as keys. This tweaks the format to be JSON-friendly and uses actual JSON files. --- Cargo.lock | 3 +- Cargo.toml | 1 + README.md | 14 ++++----- obo-core/Cargo.toml | 2 +- obo-core/src/actions.rs | 17 +++++------ obo-core/src/build_meta.rs | 51 ++++++++++++++----------------- obo-tests/src/lib.rs | 12 +++----- obs-gitlab-runner/Cargo.toml | 1 + obs-gitlab-runner/src/handler.rs | 24 +++++++-------- obs-gitlab-runner/src/pipeline.rs | 10 +++--- 10 files changed, 63 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 04f3085..8308f3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1514,7 +1514,7 @@ dependencies = [ "rfc822-like", "rstest", "serde", - "serde_yaml", + "serde_json", "shell-words", "tempfile", "thiserror 2.0.16", @@ -1568,6 +1568,7 @@ dependencies = [ "open-build-service-mock", "rstest", "serde", + "serde_json", "serde_yaml", "shell-words", "shellexpand", diff --git a/Cargo.toml b/Cargo.toml index 7116534..176b3e2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,6 +21,7 @@ open-build-service-mock = { git = "https://github.com/collabora/open-build-servi # open-build-service-mock = { path = "../open-build-service-rs/open-build-service-api" } rstest = "0.26" serde = "1.0" +serde_json = "1.0.140" serde_yaml = "0.9" shell-words = "1.1" tempfile = "3.20" diff --git a/README.md b/README.md index 6ff07e5..cb35248 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ In order to connect to OBS, three variables must be set (generally within the ```bash dput PROJECT DSC_FILE [--branch-to BRANCHED_PROJECT] - [--build-info-out BUILD_INFO_FILE=build-info.yml] + [--build-info-out BUILD_INFO_FILE=build-info.json] [--rebuild-if-unchanged] ``` @@ -92,7 +92,7 @@ within, will be removed. Metadata information on the uploaded revision, such as the revision number, project name, and package name, will be saved into the file specified by -`--build-info-out` (default is `build-info.yml`). This file is **required** by +`--build-info-out` (default is `build-info.json`). This file is **required** by the `generate-monitor` and `prune` steps. Do note that, if `--branch-to` is given, the file will be written *immediately* after the branch takes place (i.e. before the upload); that way, if the upload fails, the branched project can still @@ -109,7 +109,7 @@ testing builds on MRs; you can create an OBS branch named after the MR's Git branch, and then builds can take place there without interfering with your main projects. -##### `--build-info-out BUILD_INFO_FILE=build-info.yml` +##### `--build-info-out BUILD_INFO_FILE=build-info.json` Changes the filename that the build info will be written to. @@ -130,7 +130,7 @@ operation, there will *always* be a change to upload. generate-monitor RUNNER_TAG [--rules RULES] [--download-build-results-to BUILD_RESULTS_DIR] - [--build-info BUILD_INFO_FILE=build-info.yml] + [--build-info BUILD_INFO_FILE=build-info.json] [--pipeline-out PIPELINE_FILE=obs.yml] [--job-prefix MONITOR_JOB_PREFIX=obs] [--job-timeout MONITOR_JOB_TIMEOUT] @@ -199,7 +199,7 @@ dput-and-generate: After a monitoring job completes, download the build results from OBS to the given `BUILD_RESULTS_DIR`, and upload it as a GitLab build artifact.. -##### `--build-info BUILD_INFO_FILE=build-info.yml` +##### `--build-info BUILD_INFO_FILE=build-info.json` Specifies the name of the build info file to read. In particular, if a different build info filename was used with `dput` via @@ -233,7 +233,7 @@ Changes the filename each monitoring job will save the build log into. ```bash prune - [--build-info BUILD_INFO_FILE=build-info.yml] + [--build-info BUILD_INFO_FILE=build-info.json] [--ignore-missing-build-info] [--only-if-job-unsuccessful] ``` @@ -242,7 +242,7 @@ If a branch occurred, deletes the branched package and, if now empty, project, using the information from the build info file. (If no branching occurred, this does nothing.) -##### `--build-info BUILD_INFO_FILE=build-info.yml` +##### `--build-info BUILD_INFO_FILE=build-info.json` Specifies the name of the build info file to read. In particular, if a different build info filename was used with `dput` via diff --git a/obo-core/Cargo.toml b/obo-core/Cargo.toml index 146640a..da839db 100644 --- a/obo-core/Cargo.toml +++ b/obo-core/Cargo.toml @@ -20,7 +20,7 @@ open-build-service-api.workspace = true reqwest = "0.12" rfc822-like = "0.2" serde.workspace = true -serde_yaml.workspace = true +serde_json.workspace = true shell-words.workspace = true tempfile.workspace = true thiserror.workspace = true diff --git a/obo-core/src/actions.rs b/obo-core/src/actions.rs index 1dfee2c..11244d3 100644 --- a/obo-core/src/actions.rs +++ b/obo-core/src/actions.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, io::SeekFrom}; +use std::io::SeekFrom; use camino::{Utf8Path, Utf8PathBuf}; use clap::{ArgAction, Parser}; @@ -11,10 +11,7 @@ use tracing::{debug, instrument}; use crate::{ artifacts::{ArtifactDirectory, ArtifactReader, ArtifactWriter, MissingArtifactToNone}, binaries::download_binaries, - build_meta::{ - BuildHistoryRetrieval, BuildMeta, BuildMetaOptions, CommitBuildInfo, DisabledRepos, - RepoArch, - }, + build_meta::{BuildHistoryRetrieval, BuildMeta, BuildMetaOptions, DisabledRepos, EnabledRepo}, monitor::{MonitoredPackage, ObsMonitor, PackageCompletion, PackageMonitoringOptions}, outputln, prune::prune_branch, @@ -22,7 +19,7 @@ use crate::{ upload::ObsDscUploader, }; -pub const DEFAULT_BUILD_INFO: &str = "build-info.yml"; +pub const DEFAULT_BUILD_INFO: &str = "build-info.json"; pub const DEFAULT_BUILD_LOG: &str = "build.log"; // Our flags can all take explicit values, because it makes it easier to @@ -155,7 +152,7 @@ pub struct ObsBuildInfo { pub rev: Option, pub srcmd5: Option, pub is_branched: bool, - pub enabled_repos: HashMap, + pub enabled_repos: Vec, } impl ObsBuildInfo { @@ -164,7 +161,7 @@ impl ObsBuildInfo { artifacts .save_with(path, async |file: &mut ArtifactWriter| { let data = - serde_yaml::to_string(&self).wrap_err("Failed to serialize build info")?; + serde_json::to_string(&self).wrap_err("Failed to serialize build info")?; file.write_all(data.as_bytes()) .await .wrap_err("Failed to write build info file")?; @@ -217,7 +214,7 @@ impl Actions { rev: None, srcmd5: None, is_branched, - enabled_repos: HashMap::new(), + enabled_repos: vec![], }; debug!("Saving initial build info: {:?}", build_info); build_info @@ -414,7 +411,7 @@ impl Actions { artifacts.read_string(&args.build_info).await? }; - let build_info: ObsBuildInfo = serde_yaml::from_str(&build_info_data) + let build_info: ObsBuildInfo = serde_json::from_str(&build_info_data) .wrap_err("Failed to parse provided build info file")?; if build_info.is_branched { diff --git a/obo-core/src/build_meta.rs b/obo-core/src/build_meta.rs index c2330b6..41ebad7 100644 --- a/obo-core/src/build_meta.rs +++ b/obo-core/src/build_meta.rs @@ -14,7 +14,9 @@ pub struct RepoArch { } #[derive(Deserialize, Serialize, Debug, Clone)] -pub struct CommitBuildInfo { +pub struct EnabledRepo { + #[serde(flatten)] + pub repo_arch: RepoArch, pub prev_endtime_for_commit: Option, } @@ -237,26 +239,19 @@ impl BuildMeta { Ok(()) } - pub fn get_commit_build_info(&self, srcmd5: &str) -> HashMap { - let mut repos = HashMap::new(); - - for (repo, jobhist) in &self.repos { - let prev_endtime_for_commit = jobhist - .jobhist - .iter() - .filter(|e| e.srcmd5 == srcmd5) - .next_back() - .map(|e| e.endtime); - - repos.insert( - repo.clone(), - CommitBuildInfo { - prev_endtime_for_commit, - }, - ); - } - - repos + pub fn get_commit_build_info(&self, srcmd5: &str) -> Vec { + self.repos + .iter() + .map(|(repo, jobhist)| EnabledRepo { + repo_arch: repo.clone(), + prev_endtime_for_commit: jobhist + .jobhist + .iter() + .filter(|e| e.srcmd5 == srcmd5) + .next_back() + .map(|e| e.endtime), + }) + .collect() } } @@ -360,10 +355,10 @@ mod tests { let build_info = meta.get_commit_build_info(&srcmd5_1); - let arch_1 = assert_some!(build_info.get(&repo_arch_1)); + let arch_1 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_1)); assert_some_eq!(arch_1.prev_endtime_for_commit, endtime_1); - let arch_2 = assert_some!(build_info.get(&repo_arch_2)); + let arch_2 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_2)); assert_none!(arch_2.prev_endtime_for_commit); let meta = assert_ok!( @@ -385,9 +380,9 @@ mod tests { let build_info = meta.get_commit_build_info(&srcmd5_1); - let arch_1 = assert_some!(build_info.get(&repo_arch_1)); + let arch_1 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_1)); assert_none!(arch_1.prev_endtime_for_commit); - let arch_2 = assert_some!(build_info.get(&repo_arch_2)); + let arch_2 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_2)); assert_none!(arch_2.prev_endtime_for_commit); assert!(meta.repos.contains_key(&repo_arch_2)); @@ -431,7 +426,7 @@ mod tests { let build_info = meta.get_commit_build_info(&srcmd5_2); - let arch_1 = assert_some!(build_info.get(&repo_arch_2)); + let arch_1 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_2)); assert_some_eq!(arch_1.prev_endtime_for_commit, endtime_2); mock.add_job_history( @@ -465,13 +460,13 @@ mod tests { let build_info = meta.get_commit_build_info(&srcmd5_1); - let arch_2 = assert_some!(build_info.get(&repo_arch_2)); + let arch_2 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_2)); assert_some_eq!(arch_2.prev_endtime_for_commit, endtime_1); meta.clear_stored_history(); let build_info = meta.get_commit_build_info(&srcmd5_1); - let arch_2 = assert_some!(build_info.get(&repo_arch_2)); + let arch_2 = assert_some!(build_info.iter().find(|e| e.repo_arch == repo_arch_2)); assert_none!(arch_2.prev_endtime_for_commit); mock.set_package_build_status( diff --git a/obo-tests/src/lib.rs b/obo-tests/src/lib.rs index 0dfe12c..0561dc9 100644 --- a/obo-tests/src/lib.rs +++ b/obo-tests/src/lib.rs @@ -323,10 +323,8 @@ pub async fn test_dput( let arch_1 = build_info .enabled_repos - .get(&RepoArch { - repo: TEST_REPO.to_owned(), - arch: TEST_ARCH_1.to_owned(), - }) + .iter() + .find(|e| e.repo_arch.repo == TEST_REPO && e.repo_arch.arch == TEST_ARCH_1) .unwrap(); if test == DputTest::Rebuild { @@ -336,10 +334,8 @@ pub async fn test_dput( let arch_2 = build_info .enabled_repos - .get(&RepoArch { - repo: TEST_REPO.to_owned(), - arch: TEST_ARCH_2.to_owned(), - }) + .iter() + .find(|e| e.repo_arch.repo == TEST_REPO && e.repo_arch.arch == TEST_ARCH_2) .unwrap(); assert_none!(arch_2.prev_endtime_for_commit); } diff --git a/obs-gitlab-runner/Cargo.toml b/obs-gitlab-runner/Cargo.toml index ac14003..7b3e203 100644 --- a/obs-gitlab-runner/Cargo.toml +++ b/obs-gitlab-runner/Cargo.toml @@ -19,6 +19,7 @@ obo-core = { path = "../obo-core" } obo-test-support = { path = "../obo-test-support" } open-build-service-api.workspace = true serde.workspace = true +serde_json.workspace = true serde_yaml.workspace = true shellexpand = "3.1" shell-words.workspace = true diff --git a/obs-gitlab-runner/src/handler.rs b/obs-gitlab-runner/src/handler.rs index 7276334..ae825c0 100644 --- a/obs-gitlab-runner/src/handler.rs +++ b/obs-gitlab-runner/src/handler.rs @@ -228,7 +228,7 @@ impl ObsJobHandler { }; let build_info_data = artifacts.read_string(&args.build_info).await?; - let build_info: ObsBuildInfo = serde_yaml::from_str(&build_info_data) + let build_info: ObsBuildInfo = serde_json::from_str(&build_info_data) .wrap_err("Failed to parse provided build info file")?; let pipeline = generate_monitor_pipeline( @@ -448,7 +448,7 @@ mod tests { use claims::*; use gitlab_runner::{GitlabLayer, Runner, RunnerBuilder}; use gitlab_runner_mock::*; - use obo_core::build_meta::{CommitBuildInfo, RepoArch}; + use obo_core::build_meta::{EnabledRepo, RepoArch}; use obo_test_support::*; use obo_tests::*; use rstest::rstest; @@ -868,10 +868,10 @@ mod tests { assert_eq!(pipeline_map.len(), build_info.enabled_repos.len()); - for repo in build_info.enabled_repos.keys() { + for enabled in &build_info.enabled_repos { let monitor_job_name = format!( "{}-{}-{}", - DEFAULT_PIPELINE_JOB_PREFIX, TEST_REPO, &repo.arch + DEFAULT_PIPELINE_JOB_PREFIX, TEST_REPO, &enabled.repo_arch.arch ); let monitor_map = pipeline_yaml @@ -945,7 +945,7 @@ mod tests { context, dput.clone(), build_info, - repo, + &enabled.repo_arch, &script, success, dput_test, @@ -1130,23 +1130,21 @@ mod tests { rev: Some("1".to_owned()), srcmd5: Some("abc".to_owned()), is_branched: false, - enabled_repos: [( - RepoArch { + enabled_repos: vec![EnabledRepo { + repo_arch: RepoArch { repo: TEST_REPO.to_owned(), arch: TEST_ARCH_1.to_owned(), }, - CommitBuildInfo { - prev_endtime_for_commit: None, - }, - )] - .into(), + + prev_endtime_for_commit: None, + }], }; let build_info = context .inject_artifacts( [( DEFAULT_BUILD_INFO.to_owned(), - serde_yaml::to_string(&build_info).unwrap().into_bytes(), + serde_json::to_string(&build_info).unwrap().into_bytes(), )] .into(), ) diff --git a/obs-gitlab-runner/src/pipeline.rs b/obs-gitlab-runner/src/pipeline.rs index 375bb19..a0295fa 100644 --- a/obs-gitlab-runner/src/pipeline.rs +++ b/obs-gitlab-runner/src/pipeline.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use color_eyre::eyre::{Context, Result}; use obo_core::{ actions::{DownloadBinariesAction, MonitorAction}, - build_meta::{CommitBuildInfo, RepoArch}, + build_meta::{EnabledRepo, RepoArch}, }; use serde::Serialize; use tracing::instrument; @@ -51,7 +51,7 @@ pub fn generate_monitor_pipeline( package: &str, rev: &str, srcmd5: &str, - enabled_repos: &HashMap, + enabled_repos: &[EnabledRepo], options: GeneratePipelineOptions, ) -> Result { let rules: Option = options @@ -62,7 +62,9 @@ pub fn generate_monitor_pipeline( .wrap_err("Failed to parse provided rules")?; let mut jobs = HashMap::new(); - for (RepoArch { repo, arch }, info) in enabled_repos { + for enabled in enabled_repos { + let RepoArch { repo, arch } = &enabled.repo_arch; + let mut script = vec![]; let mut artifact_paths = vec![]; @@ -75,7 +77,7 @@ pub fn generate_monitor_pipeline( rev: rev.to_owned(), srcmd5: srcmd5.to_owned(), build_log_out: options.build_log_out.clone(), - prev_endtime_for_commit: info.prev_endtime_for_commit, + prev_endtime_for_commit: enabled.prev_endtime_for_commit, } .generate_command(), );