Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
3a064d5
initial work
Oct 6, 2025
b215a0c
fixes
Oct 6, 2025
92d4904
Add support for force published artifacts
Oct 6, 2025
233acfa
test out new force publish artifact step
Oct 6, 2025
ccbe79d
regen yaml
Oct 6, 2025
33381cf
Try new job
Oct 6, 2025
c46f87f
condense
Oct 6, 2025
e7826ba
Merge branch 'main' into add-verify-tests-step
Oct 6, 2025
693630a
Try a successful vmm_tests run
Oct 6, 2025
2c2795d
update pipeline files
Oct 7, 2025
6be7aa0
.
Oct 7, 2025
689dd30
fix typo
Oct 7, 2025
922d587
Prepare for review
Oct 7, 2025
51f8bb8
print debug logs
Oct 7, 2025
cb736c0
Handle both dirs and files gracefully
Oct 7, 2025
d1348e7
Remove logging statements
Oct 7, 2025
4c0d383
.
Oct 7, 2025
2962333
Proper refactor of publish_test_result.rs
Oct 7, 2025
3fa2e2e
More cleanup
Oct 7, 2025
044179e
Merge branch 'main' into add-verify-tests-step
Oct 8, 2025
e944546
Add file extension
Oct 8, 2025
2cd9f9d
Fix local vmm_tests
Oct 8, 2025
fcb1ed5
Scope changes in YAML
Oct 9, 2025
d522230
Address feedback
Oct 10, 2025
b4595ac
.
Oct 10, 2025
717272c
.
Oct 13, 2025
de175cb
Trim unnecessary optionals
Oct 13, 2025
2185870
Merge branch 'main' into add-verify-tests-step
Oct 14, 2025
4b8f415
Feedback
Oct 15, 2025
130e31e
Update comments and make the run_cargo_nextest_list command configurable
Oct 15, 2025
eea6cc1
Revert this change
Oct 15, 2025
2e1d8e2
Revert "Revert this change"
Oct 15, 2025
20e6604
.
Oct 15, 2025
98bfa23
Revert "."
Oct 17, 2025
2c1eccb
Merge branch 'main' into add-verify-tests-step
Oct 17, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,132 changes: 751 additions & 381 deletions .github/workflows/openvmm-ci.yaml

Large diffs are not rendered by default.

1,132 changes: 751 additions & 381 deletions .github/workflows/openvmm-pr-release.yaml

Large diffs are not rendered by default.

1,149 changes: 760 additions & 389 deletions .github/workflows/openvmm-pr.yaml

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1950,6 +1950,7 @@ dependencies = [
"igvmfilegen_config",
"log",
"powershell_builder",
"quick-xml",
"serde",
"serde_json",
"target-lexicon",
Expand Down Expand Up @@ -6050,6 +6051,15 @@ dependencies = [
"syn 2.0.106",
]

[[package]]
name = "quick-xml"
version = "0.38.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89"
dependencies = [
"memchr",
]

[[package]]
name = "quote"
version = "1.0.40"
Expand Down
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -501,6 +501,7 @@ proc-macro2 = "1.0"
prost = "0.11"
prost-build = "0.11"
prost-types = "0.11"
quick-xml = "0.38.3"
quote = "1.0"
range_map_vec = "0.2.0"
rayon = "1.5"
Expand Down
23 changes: 17 additions & 6 deletions flowey/flowey_cli/src/pipeline_resolver/ado_yaml.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use super::common_yaml::FloweySource;
use super::common_yaml::check_generated_yaml_and_json;
use super::common_yaml::job_flowey_bootstrap_source;
use super::common_yaml::write_generated_yaml_and_json;
use super::generic::ResolvedJobArtifact;
use super::generic::ResolvedJobUseParameter;
use crate::cli::exec_snippet::FloweyPipelineStaticDb;
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_WORKING_DIR;
Expand All @@ -16,6 +15,8 @@ use crate::flow_resolver::stage1_dag::OutputGraphEntry;
use crate::flow_resolver::stage1_dag::Step;
use crate::pipeline_resolver::generic::ResolvedPipeline;
use crate::pipeline_resolver::generic::ResolvedPipelineJob;
use crate::pipeline_resolver::generic::ResolvedPublishedArtifact;
use crate::pipeline_resolver::generic::ResolvedUsedArtifact;
use anyhow::Context;
use flowey_core::node::FlowArch;
use flowey_core::node::FlowBackend;
Expand Down Expand Up @@ -191,7 +192,7 @@ pub fn ado_yaml(
}

// also download any artifacts that'll be used
for ResolvedJobArtifact {
for ResolvedUsedArtifact {
flowey_var: _,
name,
} in artifacts_used
Expand Down Expand Up @@ -311,7 +312,10 @@ EOF

// next, emit ado steps to create dirs for artifacts which will be
// published
for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
writeln!(
flowey_bootstrap_bash,
r#"mkdir -p "$(AgentTempDirNormal)/publish_artifacts/{name}""#
Expand All @@ -325,7 +329,7 @@ EOF

// lastly, emit ado steps that report the dirs for any artifacts which
// are used by this job
for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
// do NOT use ADO macro syntax $(...), since this is in the same
// bootstrap block as where those ADO vars get defined, meaning it's
// not available yet!
Expand Down Expand Up @@ -423,20 +427,27 @@ EOF

// ..and once that's done, the last order of business is to emit some
// ado steps to publish the various artifacts created by this job
for ResolvedJobArtifact {
for ResolvedPublishedArtifact {
flowey_var: _,
name,
force_upload,
} in artifacts_published
{
ado_steps.push({
let map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
let mut map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
r#"
publish: $(FLOWEY_TEMP_DIR)/publish_artifacts/{name}
displayName: '🌼📦 Publish {name}'
artifact: {name}
"#
))
.unwrap();
if *force_upload {
map.insert(
"condition".into(),
serde_yaml::Value::String("always()".into()),
);
}
map.into()
});
}
Expand Down
10 changes: 7 additions & 3 deletions flowey/flowey_cli/src/pipeline_resolver/direct_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_PERSISTENT_STORAGE_DIR;
use crate::flow_resolver::stage1_dag::OutputGraphEntry;
use crate::flow_resolver::stage1_dag::Step;
use crate::pipeline_resolver::generic::ResolvedJobArtifact;
use crate::pipeline_resolver::generic::ResolvedJobUseParameter;
use crate::pipeline_resolver::generic::ResolvedPipeline;
use crate::pipeline_resolver::generic::ResolvedPipelineJob;
use crate::pipeline_resolver::generic::ResolvedPublishedArtifact;
use crate::pipeline_resolver::generic::ResolvedUsedArtifact;
use flowey_core::node::FlowArch;
use flowey_core::node::FlowBackend;
use flowey_core::node::FlowPlatform;
Expand Down Expand Up @@ -276,7 +277,10 @@ fn direct_run_do_work(
serde_json::to_string(&persist_dir).unwrap().into(),
);

for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
let path = out_dir.join("artifacts").join(name);
fs_err::create_dir_all(&path)?;

Expand All @@ -292,7 +296,7 @@ fn direct_run_do_work(
}
fs_err::create_dir_all(out_dir.join(".job_artifacts"))?;

for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
let path = out_dir.join(".job_artifacts").join(name);
fs_err::create_dir_all(&path)?;
copy_dir_all(out_dir.join("artifacts").join(name), &path)?;
Expand Down
32 changes: 23 additions & 9 deletions flowey/flowey_cli/src/pipeline_resolver/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,14 @@ pub struct ResolvedPipeline {
}

#[derive(Debug, Clone)]
pub struct ResolvedJobArtifact {
pub struct ResolvedPublishedArtifact {
pub flowey_var: String,
pub name: String,
pub force_upload: bool,
}

#[derive(Debug, Clone)]
pub struct ResolvedUsedArtifact {
pub flowey_var: String,
pub name: String,
}
Expand Down Expand Up @@ -77,9 +84,9 @@ pub struct ResolvedPipelineJob {

pub parameters_used: Vec<ResolvedJobUseParameter>,
// correspond to injected download nodes at the start of the job
pub artifacts_used: Vec<ResolvedJobArtifact>,
pub artifacts_used: Vec<ResolvedUsedArtifact>,
// correspond to injected publish nodes at the end of the job
pub artifacts_published: Vec<ResolvedJobArtifact>,
pub artifacts_published: Vec<ResolvedPublishedArtifact>,
}

pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline> {
Expand Down Expand Up @@ -113,6 +120,7 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>
name,
published_by_job,
used_by_jobs,
force_published: _,
} in &artifacts
{
let no_existing = m
Expand Down Expand Up @@ -185,16 +193,21 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>

let artifacts_published: Vec<_> = artifacts_published
.into_iter()
.map(|a| ResolvedJobArtifact {
flowey_var: flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, false,
),
name: a,
.map(|a| {
let artifact_meta = artifacts.iter().find(|meta| meta.name == a).unwrap();
ResolvedPublishedArtifact {
flowey_var:
flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, false,
),
name: a,
force_upload: artifact_meta.force_published,
}
})
.collect();
let artifacts_used: Vec<_> = artifacts_used
.into_iter()
.map(|a| ResolvedJobArtifact {
.map(|a| ResolvedUsedArtifact {
flowey_var: flowey_core::pipeline::internal::consistent_artifact_runtime_var_name(
&a, true,
),
Expand Down Expand Up @@ -248,6 +261,7 @@ pub fn resolve_pipeline(pipeline: Pipeline) -> anyhow::Result<ResolvedPipeline>
name: _,
published_by_job,
used_by_jobs,
force_published: _,
} in artifacts
{
let published_idx = job_graph_idx[published_by_job.expect("checked in loop above")];
Expand Down
18 changes: 13 additions & 5 deletions flowey/flowey_cli/src/pipeline_resolver/github_yaml/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
use super::common_yaml::BashCommands;
use super::common_yaml::check_generated_yaml_and_json;
use super::common_yaml::write_generated_yaml_and_json;
use super::generic::ResolvedJobArtifact;
use super::generic::ResolvedJobUseParameter;
use super::generic::ResolvedPublishedArtifact;
use super::generic::ResolvedUsedArtifact;
use crate::cli::exec_snippet::FloweyPipelineStaticDb;
use crate::cli::exec_snippet::VAR_DB_SEEDVAR_FLOWEY_WORKING_DIR;
use crate::cli::pipeline::CheckMode;
Expand Down Expand Up @@ -369,7 +370,10 @@ EOF

// next, emit GitHub steps to create dirs for artifacts which will be
// published
for ResolvedJobArtifact { flowey_var, name } in artifacts_published {
for ResolvedPublishedArtifact {
flowey_var, name, ..
} in artifacts_published
{
writeln!(
flowey_bootstrap_bash,
r#"mkdir -p "$AgentTempDirNormal/publish_artifacts/{name}""#
Expand All @@ -393,7 +397,7 @@ EOF

// lastly, emit GitHub steps that report the dirs for any artifacts which
// are used by this job
for ResolvedJobArtifact { flowey_var, name } in artifacts_used {
for ResolvedUsedArtifact { flowey_var, name } in artifacts_used {
let var_db_inject_cmd = bootstrap_bash_var_db_inject(flowey_var, true);
match platform.kind() {
FlowPlatformKind::Windows => {
Expand Down Expand Up @@ -428,13 +432,14 @@ EOF

// ..and once that's done, the last order of business is to emit some
// GitHub steps to publish the various artifacts created by this job
for ResolvedJobArtifact {
for ResolvedPublishedArtifact {
flowey_var: _,
name,
force_upload,
} in artifacts_published
{
gh_steps.push({
let map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
let mut map: serde_yaml::Mapping = serde_yaml::from_str(&format!(
r#"
name: 🌼📦 Publish {name}
uses: actions/upload-artifact@v4
Expand All @@ -445,6 +450,9 @@ EOF
"#
))
.unwrap();
if *force_upload {
map.insert("if".into(), serde_yaml::Value::String("always()".into()));
}
map.into()
});
}
Expand Down
26 changes: 26 additions & 0 deletions flowey/flowey_core/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -642,6 +642,7 @@ impl Pipeline {
name: owned_name,
published_by_job: None,
used_by_jobs: BTreeSet::new(),
force_published: false,
});

(PublishArtifact { idx }, UseArtifact { idx })
Expand All @@ -661,6 +662,30 @@ impl Pipeline {
)
}

/// Mark an artifact to be force published, meaning it will be published
/// even if the job fails.
///
/// This is useful for artifacts that contain diagnostic information or logs
/// that are needed to debug failures.
#[track_caller]
pub fn force_publish_artifact(&mut self, artifact: &PublishArtifact) -> &mut Self {
self.artifacts[artifact.idx].force_published = true;
self
}

/// Mark a typed artifact to be force published, meaning it will be published
/// even if the job fails.
///
/// This is useful for artifacts that contain diagnostic information or logs
/// that are needed to debug failures.
#[track_caller]
pub fn force_publish_typed_artifact<T: Artifact>(
&mut self,
artifact: &PublishTypedArtifact<T>,
) -> &mut Self {
self.force_publish_artifact(&artifact.0)
}

/// (ADO only) Set the pipeline-level name.
///
/// <https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number?view=azure-devops&tabs=yaml>
Expand Down Expand Up @@ -1343,6 +1368,7 @@ pub mod internal {
pub name: String,
pub published_by_job: Option<usize>,
pub used_by_jobs: BTreeSet<usize>,
pub force_published: bool,
}

#[derive(Debug)]
Expand Down
Loading
Loading