Skip to content

Commit

Permalink
wip: Install with fsverity
Browse files Browse the repository at this point in the history
Signed-off-by: Colin Walters <[email protected]>
  • Loading branch information
cgwalters committed Nov 11, 2024
1 parent c534a1e commit 1636572
Show file tree
Hide file tree
Showing 14 changed files with 298 additions and 186 deletions.
10 changes: 9 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,17 @@ jobs:
run: sudo rm -f /bin/skopeo /usr/bin/skopeo
- name: Free up disk space on runner
run: sudo ./ci/clean-gha-runner.sh
- name: Enable fsverity for /
run: sudo tune2fs -O verity $(findmnt -vno SOURCE /)
- name: Install utils
run: sudo apt -y install fsverity
- name: Integration tests
run: |
set -xeu
# Build images to test; TODO investigate doing single container builds
# via GHA and pushing to a temporary registry to share among workflows?
sudo podman build -t localhost/bootc -f hack/Containerfile .
sudo podman build -t localhost/bootc-fsverity -f ci/Containerfile.install-fsverity
export CARGO_INCREMENTAL=0 # because we aren't caching the test runner bits
cargo build --release -p tests-integration
df -h /
Expand All @@ -83,8 +90,9 @@ jobs:
df -h /
# Nondestructive but privileged tests
sudo bootc-integration-tests host-privileged localhost/bootc
# Finally the install-alongside suite
# Install tests
sudo bootc-integration-tests install-alongside localhost/bootc
sudo bootc-integration-tests install-fsverity localhost/bootc-fsverity
docs:
if: ${{ contains(github.event.pull_request.labels.*.name, 'documentation') }}
runs-on: ubuntu-latest
Expand Down
10 changes: 10 additions & 0 deletions ci/Containerfile.install-fsverity
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Enable fsverity at install time
FROM localhost/bootc
RUN <<EORUN
set -xeuo pipefail
mkdir -p /usr/lib/bootc/install
cat > /usr/lib/bootc/install/30-fsverity.toml <<EOF
[install]
fsverity = "enabled"
EOF
EORUN
26 changes: 26 additions & 0 deletions lib/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,16 @@ pub(crate) enum ImageOpts {
Cmd(ImageCmdOpts),
}

/// Options for consistency checking
#[derive(Debug, clap::Subcommand, PartialEq, Eq)]
pub(crate) enum FsckOpts {
/// Check the state of fsverity on the ostree objects. Possible output:
/// "enabled" => All .file objects have fsverity
/// "disabled" => No .file objects have fsverity
/// "inconsistent" => Mixed state
OstreeVerity,
}

/// Hidden, internal only options
#[derive(Debug, clap::Subcommand, PartialEq, Eq)]
pub(crate) enum InternalsOpts {
Expand All @@ -293,6 +303,8 @@ pub(crate) enum InternalsOpts {
FixupEtcFstab,
/// Should only be used by `make update-generated`
PrintJsonSchema,
/// Perform consistency checking.
Fsck,
/// Perform cleanup actions
Cleanup,
/// Proxy frontend for the `ostree-ext` CLI.
Expand Down Expand Up @@ -952,6 +964,20 @@ async fn run_from_opt(opt: Opt) -> Result<()> {
)
.await
}
InternalsOpts::Fsck => {
let storage = get_storage().await?;
let r = crate::fsck::fsck(&storage).await?;
match r.errors.as_slice() {
[] => {}
errs => {
for err in errs {
eprintln!("error: {err}");
}
anyhow::bail!("fsck found errors");
}
}
Ok(())
}
InternalsOpts::FixupEtcFstab => crate::deploy::fixup_etc_fstab(&root),
InternalsOpts::PrintJsonSchema => {
let schema = schema_for!(crate::spec::Host);
Expand Down
122 changes: 122 additions & 0 deletions lib/src/fsck.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
//! # Write deployments merging image with configmap
//!
//! Create a merged filesystem tree with the image and mounted configmaps.

use std::os::fd::AsFd;
use std::str::FromStr as _;

use anyhow::Ok;
use anyhow::{Context, Result};
use camino::Utf8PathBuf;
use cap_std::fs::Dir;
use cap_std_ext::cap_std;
use fn_error_context::context;
use ostree_ext::keyfileext::KeyFileExt;
use ostree_ext::ostree;
use serde::{Deserialize, Serialize};

use crate::install::config::Tristate;
use crate::store::{self, Storage};

#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub(crate) enum VerityState {
Enabled,
Disabled,
Inconsistent,
}

#[derive(Default, Serialize, Deserialize, Debug, PartialEq, Eq)]
pub(crate) struct FsckResult {
pub(crate) errors: Vec<String>,
pub(crate) verity: Option<VerityState>,
}

/// Check the fsverity state of all regular files in this object directory.
#[context("Computing verity state")]
fn verity_state_of_objects(d: &Dir) -> Result<(u64, u64)> {
let mut enabled = 0;
let mut disabled = 0;
for ent in d.entries()? {
let ent = ent?;
if !ent.file_type()?.is_file() {
continue;
}
let name = ent.file_name();
let name = name
.into_string()
.map(Utf8PathBuf::from)
.map_err(|_| anyhow::anyhow!("Invalid UTF-8"))?;
let Some("file") = name.extension() else {
continue;
};
let f = d
.open(&name)
.with_context(|| format!("Failed to open {name}"))?;
let r: Option<crate::fsverity::Sha256HashValue> =
crate::fsverity::ioctl::fs_ioc_measure_verity_optional(f.as_fd())?;
drop(f);
if r.is_some() {
enabled += 1;
} else {
disabled += 1;
}
}
Ok((enabled, disabled))
}

async fn verity_state_of_all_objects(repo: &ostree::Repo) -> Result<(u64, u64)> {
const MAX_CONCURRENT: usize = 3;

let repo_config = repo.config();
let verity_state = {
let (k, v) = store::REPO_VERITY_CONFIG.split_once('.').unwrap();
repo_config
.optional_string(k, v)?
.map(|v| Tristate::from_str(&v))
.transpose()?
.unwrap_or_default()
};

let repodir = Dir::reopen_dir(&repo.dfd_borrow())?;

let mut joinset = tokio::task::JoinSet::new();
let mut results = Vec::new();

for ent in repodir.read_dir("objects")? {
while joinset.len() >= MAX_CONCURRENT {
results.push(joinset.join_next().await.unwrap()??);
}
let ent = ent?;
if !ent.file_type()?.is_dir() {
continue;
}
let objdir = ent.open_dir()?;
joinset.spawn_blocking(move || verity_state_of_objects(&objdir));
}

while let Some(output) = joinset.join_next().await {
results.push(output??);
}
let r = results.into_iter().fold((0, 0), |mut acc, v| {
acc.0 += v.0;
acc.1 += v.1;
acc
});
Ok(r)
}

pub(crate) async fn fsck(storage: &Storage) -> Result<FsckResult> {
let mut r = FsckResult::default();
r.verity = match verity_state_of_all_objects(&storage.repo()).await? {
(0, 0) => None,
(_, 0) => Some(VerityState::Enabled),
(0, _) => Some(VerityState::Disabled),
_ => Some(VerityState::Inconsistent),
};
if matches!(&r.verity, &Some(VerityState::Inconsistent)) {
r.errors.push("Inconsistent fsverity state".into());
}
serde_json::to_writer(std::io::stdout().lock(), &r)?;
Ok(r)
}
169 changes: 0 additions & 169 deletions lib/src/fsverity/digest.rs
Original file line number Diff line number Diff line change
@@ -1,169 +0,0 @@
use std::cmp::min;

use sha2::{Digest, Sha256};

use super::Sha256HashValue;

// TODO: support Sha512

struct FsVerityLayer {
context: Sha256,
remaining: usize,
}

impl FsVerityLayer {
fn new() -> FsVerityLayer {
FsVerityLayer {
context: Sha256::new(),
remaining: 4096,
}
}

fn add_data(&mut self, data: &[u8]) {
self.context.update(data);
self.remaining -= data.len();
}

fn complete(&mut self) -> Sha256HashValue {
self.context.update(&[0u8; 4096][..self.remaining]);
self.remaining = 4096;
self.context.finalize_reset().into()
}
}

pub struct FsVerityHasher {
layers: Vec<FsVerityLayer>,
value: Option<Sha256HashValue>,
n_bytes: u64,
}

impl FsVerityHasher {
pub fn hash(buffer: &[u8]) -> Sha256HashValue {
let mut hasher = FsVerityHasher::new();

let mut start = 0;
while start < buffer.len() {
let end = min(start + 4096, buffer.len());
hasher.add_data(&buffer[start..end]);
start = end;
}

hasher.digest()
}

pub fn new() -> FsVerityHasher {
FsVerityHasher {
layers: vec![],
value: None,
n_bytes: 0,
}
}

pub fn add_data(&mut self, data: &[u8]) {
if let Some(value) = self.value {
// We had a complete value, but now we're adding new data.
// This means that we need to add a new hash layer...
let mut new_layer = FsVerityLayer::new();
new_layer.add_data(&value);
self.layers.push(new_layer);
self.value = None;
}

// Get the value of this block
let mut context = FsVerityLayer::new();
context.add_data(data);
let mut value = context.complete();
self.n_bytes += data.len() as u64;

for layer in self.layers.iter_mut() {
// We have a layer we need to hash this value into
layer.add_data(&value);
if layer.remaining != 0 {
return;
}
// ...but now this layer itself is now complete, so get the value of *it*.
value = layer.complete();
}

// If we made it this far, we completed the last layer and have a value. Store it.
self.value = Some(value);
}

pub fn root_hash(&mut self) -> Sha256HashValue {
if let Some(value) = self.value {
value
} else {
let mut value = [0u8; 32];

for layer in self.layers.iter_mut() {
// We have a layer we need to hash this value into
if value != [0u8; 32] {
layer.add_data(&value);
}
if layer.remaining != 4096 {
// ...but now this layer itself is complete, so get the value of *it*.
value = layer.complete();
} else {
value = [0u8; 32];
}
}

self.value = Some(value);

value
}
}

pub fn digest(&mut self) -> Sha256HashValue {
/*
let descriptor = FsVerityDescriptor {
version: 1,
hash_algorithm: 1,
log_blocksize: 12,
salt_size: 0,
reserved_0x04: 0,
data_size: self.n_bytes,
root_hash: (self.root_hash(), [0; 32]),
salt: [0; 32],
reserved: [0; 144],
};
let mut context = Sha256::new();
context.update(descriptor);
return context.finalize().into();
*/

let mut context = Sha256::new();
context.update(1u8.to_le_bytes()); /* version */
context.update(1u8.to_le_bytes()); /* hash_algorithm */
context.update(12u8.to_le_bytes()); /* log_blocksize */
context.update(0u8.to_le_bytes()); /* salt_size */
context.update([0; 4]); /* reserved */
context.update(self.n_bytes.to_le_bytes());
context.update(self.root_hash());
context.update([0; 32]);
context.update([0; 32]); /* salt */
context.update([0; 144]); /* reserved */
context.finalize().into()
}
}

#[cfg(test)]
mod tests {
use anyhow::Result;

use super::*;

#[test]
fn test_digest() -> Result<()> {
let digest = FsVerityHasher::hash(b"hello world");
assert_eq!(
digest,
[
30, 46, 170, 66, 2, 215, 80, 164, 17, 116, 238, 69, 73, 112, 185, 44, 27, 194, 249,
37, 177, 227, 80, 118, 216, 199, 213, 245, 99, 98, 186, 100
]
);
Ok(())
}
}
Loading

0 comments on commit 1636572

Please sign in to comment.