Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
382 changes: 232 additions & 150 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@ slog-term = "2.9.1"
smf = "0.2"
socket2 = { version = "0.5", features = ["all"] }
sp-sim = { path = "sp-sim" }
sprockets-tls = { git = "https://github.com/oxidecomputer/sprockets.git", rev = "6d31fa63217c6a51061dc4afa1ebe175a0021981" }
sprockets-tls = { git = "https://github.com/oxidecomputer/sprockets.git", rev = "7b63ccb979288408fb772aa6d15f4a324497c754" }
sqlformat = "0.3.5"
sqlparser = { version = "0.45.0", features = [ "visitor" ] }
static_assertions = "1.1.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ INFO added artifact, name: internal-dns, kind: zone, version: 1.0.0, hash: ffbf1
INFO added artifact, name: ntp, kind: zone, version: 1.0.0, hash: 67593d686ed04a1709f93972b71f4ebc148a9362120f65d239943e814a9a7439, length: 1681
INFO added artifact, name: nexus, kind: zone, version: 1.0.0, hash: 0e32b4a3e5d3668bb1d6a16fb06b74dc60b973fa479dcee0aae3adbb52bf1388, length: 1682
INFO added artifact, name: oximeter, kind: zone, version: 1.0.0, hash: 048d8fe8cdef5b175aad714d0f148aa80ce36c9114ac15ce9d02ed3d37877a77, length: 1682
INFO added artifact, name: sp_corpus, kind: measurement_corpus, version: 1.0.0, hash: ac95be27f1d2ef747290b76b83a1e6b162f344451387477b04a0e74bc7be17a6, length: 11913
INFO added artifact, name: rot_corpus, kind: measurement_corpus, version: 1.0.0, hash: 8a354560e17a992a46bc73a37556f0f9c090c570adaa85537ae5994b1986fd50, length: 11913
INFO added artifact, name: fake-psc-sp, kind: psc_sp, version: 1.0.0, hash: f896cf5b19ca85864d470ad8587f980218bff3954e7f52bbd999699cd0f9635b, length: 744
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_a, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_b, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ INFO added artifact, name: internal-dns, kind: zone, version: 1.0.0, hash: ffbf1
INFO added artifact, name: ntp, kind: zone, version: 1.0.0, hash: 67593d686ed04a1709f93972b71f4ebc148a9362120f65d239943e814a9a7439, length: 1681
INFO added artifact, name: nexus, kind: zone, version: 1.0.0, hash: 0e32b4a3e5d3668bb1d6a16fb06b74dc60b973fa479dcee0aae3adbb52bf1388, length: 1682
INFO added artifact, name: oximeter, kind: zone, version: 1.0.0, hash: 048d8fe8cdef5b175aad714d0f148aa80ce36c9114ac15ce9d02ed3d37877a77, length: 1682
INFO added artifact, name: sp_corpus, kind: measurement_corpus, version: 1.0.0, hash: ac95be27f1d2ef747290b76b83a1e6b162f344451387477b04a0e74bc7be17a6, length: 11913
INFO added artifact, name: rot_corpus, kind: measurement_corpus, version: 1.0.0, hash: 8a354560e17a992a46bc73a37556f0f9c090c570adaa85537ae5994b1986fd50, length: 11913
INFO added artifact, name: fake-psc-sp, kind: psc_sp, version: 1.0.0, hash: f896cf5b19ca85864d470ad8587f980218bff3954e7f52bbd999699cd0f9635b, length: 744
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_a, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_b, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
Expand Down Expand Up @@ -120,6 +122,8 @@ INFO added artifact, name: internal-dns, kind: zone, version: 2.0.0, hash: de306
INFO added artifact, name: ntp, kind: zone, version: 2.0.0, hash: d76e26198daed69cdae04490d7477f8c842e0dbe37d463eac0d0a8d3fb803095, length: 1682
INFO added artifact, name: nexus, kind: zone, version: 2.0.0, hash: e9b7035f41848a987a798c15ac424cc91dd662b1af0920d58d8aa1ebad7467b6, length: 1683
INFO added artifact, name: oximeter, kind: zone, version: 2.0.0, hash: 9f4bc56a15d5fd943fdac94309994b8fd73aa2be1ec61faf44bfcf2356c9dc23, length: 1683
INFO added artifact, name: sp_corpus, kind: measurement_corpus, version: 2.0.0, hash: aa21cbeece8fa0097024edc347bc85bf2bc4b6cf0cc83eefd1a269856b3e10d1, length: 11914
INFO added artifact, name: rot_corpus, kind: measurement_corpus, version: 2.0.0, hash: 79b9888b01b0cd1110f8733fc85f389c7cf64805c1342b960eb977747c21e016, length: 11914
INFO added artifact, name: fake-psc-sp, kind: psc_sp, version: 2.0.0, hash: 7adf04de523865003dbf120cebddd5fcf5bad650640281b294197e6ca7016e47, length: 748
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_a, version: 2.0.0, hash: 6d1c432647e9b9e4cf846ff5d17932d75cba49c0d3f23d24243238bc40bcfef5, length: 746
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_b, version: 2.0.0, hash: 6d1c432647e9b9e4cf846ff5d17932d75cba49c0d3f23d24243238bc40bcfef5, length: 746
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ INFO added artifact, name: internal-dns, kind: zone, version: 1.0.0, hash: ffbf1
INFO added artifact, name: ntp, kind: zone, version: 1.0.0, hash: 67593d686ed04a1709f93972b71f4ebc148a9362120f65d239943e814a9a7439, length: 1681
INFO added artifact, name: nexus, kind: zone, version: 1.0.0, hash: 0e32b4a3e5d3668bb1d6a16fb06b74dc60b973fa479dcee0aae3adbb52bf1388, length: 1682
INFO added artifact, name: oximeter, kind: zone, version: 1.0.0, hash: 048d8fe8cdef5b175aad714d0f148aa80ce36c9114ac15ce9d02ed3d37877a77, length: 1682
INFO added artifact, name: sp_corpus, kind: measurement_corpus, version: 1.0.0, hash: ac95be27f1d2ef747290b76b83a1e6b162f344451387477b04a0e74bc7be17a6, length: 11913
INFO added artifact, name: rot_corpus, kind: measurement_corpus, version: 1.0.0, hash: 8a354560e17a992a46bc73a37556f0f9c090c570adaa85537ae5994b1986fd50, length: 11913
INFO added artifact, name: fake-psc-sp, kind: psc_sp, version: 1.0.0, hash: f896cf5b19ca85864d470ad8587f980218bff3954e7f52bbd999699cd0f9635b, length: 744
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_a, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
INFO added artifact, name: fake-psc-rot, kind: psc_rot_image_b, version: 1.0.0, hash: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b, length: 750
Expand Down Expand Up @@ -87,6 +89,8 @@ target release (generation 2): 1.0.0 (system-update-v1.0.0.zip)
artifact: 67593d686ed04a1709f93972b71f4ebc148a9362120f65d239943e814a9a7439 zone (ntp version 1.0.0)
artifact: 0e32b4a3e5d3668bb1d6a16fb06b74dc60b973fa479dcee0aae3adbb52bf1388 zone (nexus version 1.0.0)
artifact: 048d8fe8cdef5b175aad714d0f148aa80ce36c9114ac15ce9d02ed3d37877a77 zone (oximeter version 1.0.0)
artifact: ac95be27f1d2ef747290b76b83a1e6b162f344451387477b04a0e74bc7be17a6 measurement_corpus (sp_corpus version 1.0.0)
artifact: 8a354560e17a992a46bc73a37556f0f9c090c570adaa85537ae5994b1986fd50 measurement_corpus (rot_corpus version 1.0.0)
artifact: f896cf5b19ca85864d470ad8587f980218bff3954e7f52bbd999699cd0f9635b psc_sp (fake-psc-sp version 1.0.0)
artifact: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b psc_rot_image_a (fake-psc-rot version 1.0.0)
artifact: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b psc_rot_image_b (fake-psc-rot version 1.0.0)
Expand Down Expand Up @@ -160,6 +164,8 @@ target release (generation 2): 1.0.0 (system-update-v1.0.0.zip)
artifact: 67593d686ed04a1709f93972b71f4ebc148a9362120f65d239943e814a9a7439 zone (ntp version 1.0.0)
artifact: 0e32b4a3e5d3668bb1d6a16fb06b74dc60b973fa479dcee0aae3adbb52bf1388 zone (nexus version 1.0.0)
artifact: 048d8fe8cdef5b175aad714d0f148aa80ce36c9114ac15ce9d02ed3d37877a77 zone (oximeter version 1.0.0)
artifact: ac95be27f1d2ef747290b76b83a1e6b162f344451387477b04a0e74bc7be17a6 measurement_corpus (sp_corpus version 1.0.0)
artifact: 8a354560e17a992a46bc73a37556f0f9c090c570adaa85537ae5994b1986fd50 measurement_corpus (rot_corpus version 1.0.0)
artifact: f896cf5b19ca85864d470ad8587f980218bff3954e7f52bbd999699cd0f9635b psc_sp (fake-psc-sp version 1.0.0)
artifact: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b psc_rot_image_a (fake-psc-rot version 1.0.0)
artifact: 179eb660ebc92e28b6748b6af03d9f998d6131319edd4654a1e948454c62551b psc_rot_image_b (fake-psc-rot version 1.0.0)
Expand Down
32 changes: 32 additions & 0 deletions dev-tools/releng/src/hubris.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,19 @@ pub(crate) async fn fetch_hubris_artifacts(

fs::create_dir_all(&output_dir).await?;

// We need to remove our old downloaded corpus to make sure nothing else
// gets added to the repo unexpectedly. This should only really be a
// issue with local builds
if std::fs::exists(&output_dir.join("measurement_corpus"))
.context("failed to check `measurement_corpus`")?
{
std::fs::remove_dir_all(&output_dir.join("measurement_corpus"))
.context("failed to remove `measurement_corpus")?;
}
fs::create_dir_all(&output_dir.join("measurement_corpus"))
.await
.context("Failed to create `measurement_corpus`")?;

// This could be parallelized with FuturesUnordered but in practice this
// takes less time than OS builds.

Expand Down Expand Up @@ -106,6 +119,22 @@ pub(crate) async fn fetch_hubris_artifacts(
}
}
}
if let Some(corpus) = hash_manifest.corpus {
let hash = match corpus {
Source::File(file) => file.hash,
_ => anyhow::bail!(
"Unexpected file type: should be a single file, not an RoT"
),
};
let data =
fetch_hash(&logger, base_url, &client, &hash).await?;
fs::write(
output_dir.join("measurement_corpus").join(hash),
data,
)
.await
.context("failed to write file {hash}")?;
}
}
}

Expand Down Expand Up @@ -160,6 +189,9 @@ async fn fetch_hash(
struct Manifest {
#[serde(rename = "artifact")]
artifacts: HashMap<KnownArtifactKind, Vec<Artifact>>,
// Add a default for backwards compatibility
#[serde(rename = "measurement_corpus")]
corpus: Option<Source>,
}

#[derive(Deserialize)]
Expand Down
38 changes: 37 additions & 1 deletion dev-tools/releng/src/tuf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,38 @@ pub(crate) async fn build_tuf_repo(
}
}

let mut measurement_corpus = vec![];

for entry in std::fs::read_dir(
output_dir.join("hubris-staging").join("measurement_corpus"),
)
.context("failed to read `hubris-staging/measurement_corpus")?
{
let entry = entry?;
measurement_corpus.push(DeserializedControlPlaneZoneSource::File {
file_name: Some(format!(
"{}.cbor",
entry.file_name().into_string().unwrap()
)),
path: Utf8PathBuf::from_path_buf(entry.path()).unwrap(),
});
}

for entry in std::fs::read_dir(
output_dir.join("hubris-production").join("measurement_corpus"),
)
.context("failed to read `hubris-production/measurement_corpus")?
{
let entry = entry?;
measurement_corpus.push(DeserializedControlPlaneZoneSource::File {
file_name: Some(format!(
"{}.cbor",
entry.file_name().into_string().unwrap()
)),
path: Utf8PathBuf::from_path_buf(entry.path()).unwrap(),
});
}

// Add the OS images.
manifest.artifacts.insert(
KnownArtifactKind::Host,
Expand Down Expand Up @@ -111,12 +143,16 @@ pub(crate) async fn build_tuf_repo(
.join(format!("{}.tar.gz", package)),
});
}

manifest.artifacts.insert(
KnownArtifactKind::ControlPlane,
vec![DeserializedArtifactData {
name: "control-plane".to_string(),
version: artifact_version.clone(),
source: DeserializedArtifactSource::CompositeControlPlane { zones },
source: DeserializedArtifactSource::CompositeControlPlane {
zones,
measurement_corpus,
},
}],
);

Expand Down
8 changes: 8 additions & 0 deletions installinator-common/src/progress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,8 @@ pub enum WriteError {
#[source]
error: Box<NestedEngineError<ControlPlaneZonesSpec>>,
},
#[error("error creating directory: {error}")]
CreateDirError { error: std::io::Error },
}

impl From<NestedEngineError<ControlPlaneZonesSpec>> for WriteError {
Expand Down Expand Up @@ -297,6 +299,12 @@ pub enum ControlPlaneZonesStepId {
/// Writing the MUPdate override file.
MupdateOverride,

/// Creating Measurement directory
CreateMeasurementDir,

/// Writing a measurement corpus
MeasurementCorpus { name: String },
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this structure precludes anything I'm about to say. I'm just writing a note as I think about this.

We've talked about stacking multiple versioned corpuses so each TUF repo doesn't need to include the measurements for all versions. This would allow us to check for version N and N-1 measurement values without having to put both of them in one TUF repo. However, for installinator, for a mupdate of a single sled on an existing rack, we'd probably want to be able to include the last few versions of measurements in a TUF repo or have some other way to stack when doing a fresh install. The problem with the former is that we won't always require strict release upgrading from version N-1 to N in the future. The problem with the latter is we now need to be able to source the different manifests from wicket.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Including previous versions of measurements in a TUF repo would get a little tricky and require some manual hand holding unless our automation gets a lot smarter. What I was planning to do is rotate the measurements from INSTALL to CLUSTER after an initial boot/attestation and have a set of all available measurements. I started trying to do that here but I struggled with finding a good spot to do the rotation.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Makes sense. As long as we can handle sleds running either old (for some specific set of old) or new software, this should work fine.


/// Writing the zone manifest.
ZoneManifest,

Expand Down
90 changes: 89 additions & 1 deletion installinator/src/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -777,6 +777,58 @@ impl ControlPlaneZoneWriteContext<'_> {
.register();
}

engine
.new_step(
WriteComponent::ControlPlane,
ControlPlaneZonesStepId::CreateMeasurementDir,
"Creating measurement directory".to_string(),
async move |_cx| {
if !std::fs::exists(
self.output_directory.join("measurements"),
)
.map_err(|error| WriteError::CreateDirError { error })?
{
std::fs::create_dir(
self.output_directory.join("measurements"),
)
.map_err(|error| {
WriteError::CreateDirError { error }
})?;
}
StepSuccess::new(()).into()
},
)
.register();

for (name, data) in &self.zones.measurement_corpus {
let out_path =
self.output_directory.join("measurements").join(name);
transport = engine
.new_step(
WriteComponent::ControlPlane,
ControlPlaneZonesStepId::MeasurementCorpus {
name: name.clone(),
},
format!("Writing measurement corpus {name}"),
async move |cx| {
let transport = transport.into_value(cx.token()).await;
write_artifact_impl(
WriteComponent::ControlPlane,
slot,
data.clone().into(),
&out_path,
transport,
&cx,
)
.await?;

StepSuccess::new(transport).into()
},
)
.register();
}

// XXX here is where we can write the corpus
// `fsync()` the directory to ensure the directory entries for all the
// files we just created are written to disk.
let output_directory = self.output_directory.to_path_buf();
Expand Down Expand Up @@ -1127,10 +1179,12 @@ mod tests {
data1: Vec<Vec<u8>>,
#[strategy(prop::collection::vec(prop::collection::vec(any::<u8>(), 0..8192), 0..16))]
data2: Vec<Vec<u8>>,
#[strategy(prop::collection::vec(prop::collection::vec(any::<u8>(), 0..8192), 0..16))]
data3: Vec<Vec<u8>>,
#[strategy(WriteOps::strategy())] write_ops: WriteOps,
) {
with_test_runtime(async move {
proptest_write_artifact_impl(data1, data2, write_ops)
proptest_write_artifact_impl(data1, data2, data3, write_ops)
.await
.expect("test failed");
})
Expand Down Expand Up @@ -1209,6 +1263,7 @@ mod tests {
async fn proptest_write_artifact_impl(
data1: Vec<Vec<u8>>,
data2: Vec<Vec<u8>>,
data3: Vec<Vec<u8>>,
write_ops: WriteOps,
) -> Result<()> {
let logctx = test_setup_log("test_write_artifact");
Expand All @@ -1219,10 +1274,15 @@ mod tests {
let destination_control_plane =
tempdir_path.join("test-control-plane.bin");

let destination_corpus =
tempdir_path.join("measurements").join("test-corpus.bin");

let mut artifact_host: BufList =
data1.into_iter().map(Bytes::from).collect();
let mut artifact_control_plane: BufList =
data2.into_iter().map(Bytes::from).collect();
let mut artifact_corpus: BufList =
data3.into_iter().map(Bytes::from).collect();

let host_id = ArtifactHashId {
kind: ArtifactKind::HOST_PHASE_2,
Expand Down Expand Up @@ -1287,6 +1347,10 @@ mod tests {
destination_control_plane.file_name().unwrap().to_string(),
artifact_control_plane.iter().flatten().copied().collect(),
)],
measurement_corpus: vec![(
destination_corpus.file_name().unwrap().to_string(),
artifact_corpus.iter().flatten().copied().collect(),
)],
};

let mut writer = ArtifactWriter::new(
Expand Down Expand Up @@ -1418,6 +1482,30 @@ mod tests {
.copy_to_bytes(artifact_control_plane.num_bytes());
assert_eq!(buf, bytes, "bytes written to disk match");

// Read the corpus artifact from disk and ensure it is correct.
let mut file = tokio::fs::File::open(&destination_corpus)
.await
.with_context(|| {
format!(
"failed to open {destination_corpus} to verify contents"
)
})?;
let mut buf = Vec::with_capacity(artifact_corpus.num_bytes());
let read_num_bytes =
file.read_to_end(&mut buf).await.with_context(|| {
format!(
"failed to read {destination_control_plane} into memory"
)
})?;
assert_eq!(
read_num_bytes,
artifact_corpus.num_bytes(),
"read num_bytes matches"
);

let bytes = artifact_corpus.copy_to_bytes(artifact_corpus.num_bytes());
assert_eq!(buf, bytes, "bytes written to disk match");

logctx.cleanup_successful();
Ok(())
}
Expand Down
3 changes: 2 additions & 1 deletion nexus/reconfigurator/planning/src/mgs_updates/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,8 @@ fn try_make_update_sp(
| KnownArtifactKind::SwitchRot
| KnownArtifactKind::GimletRotBootloader
| KnownArtifactKind::PscRotBootloader
| KnownArtifactKind::SwitchRotBootloader,
| KnownArtifactKind::SwitchRotBootloader
| KnownArtifactKind::MeasurementCorpus,
) => false,
}
})
Expand Down
7 changes: 7 additions & 0 deletions sled-agent/config-reconciler/src/internal_disks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,13 @@ impl InternalDisks {
})
}

/// Returns all `INSTALL_DATASET` paths within available M.2 disks.
pub fn all_install_datasets(
&self,
) -> impl ExactSizeIterator<Item = Utf8PathBuf> + '_ {
self.all_datasets(INSTALL_DATASET)
}

/// Returns all `CONFIG_DATASET` paths within available M.2 disks.
pub fn all_config_datasets(
&self,
Expand Down
6 changes: 5 additions & 1 deletion sled-agent/src/bootstrap/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use super::params::version;
use super::views::SledAgentResponse;
use crate::bootstrap::views::Response;
use crate::bootstrap::views::ResponseEnvelope;
use camino::Utf8PathBuf;
use sled_agent_types::sled::StartSledAgentRequest;
use slog::Logger;
use sprockets_tls::client::Client as SprocketsClient;
Expand Down Expand Up @@ -72,15 +73,17 @@ pub(crate) struct Client {
addr: SocketAddrV6,
log: Logger,
sprockets_conf: SprocketsConfig,
corpus: Vec<Utf8PathBuf>,
}

impl Client {
pub(crate) fn new(
addr: SocketAddrV6,
sprockets_conf: SprocketsConfig,
corpus: Vec<Utf8PathBuf>,
log: Logger,
) -> Self {
Self { addr, sprockets_conf, log }
Self { addr, sprockets_conf, log, corpus }
}

/// Start sled agent by sending an initialization request determined from
Expand Down Expand Up @@ -114,6 +117,7 @@ impl Client {
let stream = SprocketsClient::connect(
self.sprockets_conf.clone(),
self.addr,
self.corpus.clone(),
log.clone(),
)
.await
Expand Down
Loading
Loading