diff --git a/.dockerignore b/.dockerignore index 5f13236dd..c03d1cb22 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,6 +8,8 @@ # Toplevel build bits !Makefile !Cargo.* +# cargo alias config (defines `cargo xtask` shorthand used in Dockerfile stages) +!.cargo/config.toml # License and doc files needed for RPM !LICENSE-* !README.md diff --git a/.gitignore b/.gitignore index 81d5d39b5..e2ada6385 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,6 @@ bootc.tar.zst # Added by cargo /target + +# Git worktrees +.worktrees/ diff --git a/Dockerfile b/Dockerfile index 86eb4c2de..f08f35900 100644 --- a/Dockerfile +++ b/Dockerfile @@ -185,6 +185,24 @@ RUN --network=none --mount=type=tmpfs,target=/run --mount=type=tmpfs,target=/tmp FROM buildroot as validate RUN --network=none --mount=type=tmpfs,target=/run --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/src/target --mount=type=cache,target=/var/roothome make validate +FROM validate as validate-post-build +RUN --network=none --mount=type=tmpfs,target=/run --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/src/target --mount=type=cache,target=/var/roothome \ + cargo xtask update-generated from-code --check + +# Stage for updating generated docs files (man pages, JSON schemas) on the host. +# Usage: podman build --target update-generated-from-code --output type=local,dest=. . +# This runs `from-code` inside the container (where ostree is available) and +# exports only the generated files back to the host working directory. +FROM buildroot as update-generated-from-code +RUN --network=none --mount=type=tmpfs,target=/run --mount=type=tmpfs,target=/tmp --mount=type=cache,target=/src/target --mount=type=cache,target=/var/roothome \ + cargo xtask update-generated from-code +# Export only the generated docs — not the entire container filesystem. +# Glob patterns here automatically pick up any new schemas added to JSON_SCHEMAS +# in crates/xtask/src/xtask.rs without requiring Dockerfile changes. +FROM scratch as update-generated-from-code-output +COPY --from=update-generated-from-code /src/docs/src/man/ /docs/src/man/ +COPY --from=update-generated-from-code /src/docs/src/*.schema.json /docs/src/ + # ---- # Stages for the final image # ---- diff --git a/Justfile b/Justfile index 089a36411..60372d7fa 100644 --- a/Justfile +++ b/Justfile @@ -169,10 +169,11 @@ test-upgrade *ARGS: build _build-upgrade-source-image "${composefs_args[@]}" \ {{upgrade_source_img}} {{ARGS}} readonly -# Run cargo fmt and clippy checks in container +# Run all validation checks: tmt plan staleness (local), then fmt/clippy/man/schema (container) [group('core')] validate: - podman build {{base_buildargs}} --target validate . + cargo xtask update-generated direct --check + podman build {{base_buildargs}} --target validate-post-build . # Test container export via Anaconda liveimg install in a QEMU VM [group('testing')] @@ -291,9 +292,12 @@ pullspec-for-os TYPE NAME: # ============================================================================ # Update generated files (man pages, JSON schemas) +# tmt plans are updated directly; man pages + JSON schemas are regenerated +# inside a container (so ostree is available) and written back via --output. [group('maintenance')] update-generated: - cargo run -p xtask update-generated + cargo xtask update-generated direct + podman build {{base_buildargs}} --target update-generated-from-code-output --output type=local,dest=. . # Remove all locally-built test container images [group('maintenance')] diff --git a/Makefile b/Makefile index 44f6807f3..e57b2ba74 100644 --- a/Makefile +++ b/Makefile @@ -129,9 +129,6 @@ fix-rust: cargo clippy --fix --allow-dirty -- $(CLIPPY_CONFIG) .PHONY: fix-rust -update-generated: - cargo xtask update-generated -.PHONY: update-generated vendor: cargo xtask $@ diff --git a/crates/xtask/src/man.rs b/crates/xtask/src/man.rs index 149750d4a..a1b080518 100644 --- a/crates/xtask/src/man.rs +++ b/crates/xtask/src/man.rs @@ -174,20 +174,19 @@ fn format_options_as_markdown(options: &[CliOption], positionals: &[CliPositiona result } -/// Update markdown file with generated subcommands -pub fn update_markdown_with_subcommands( +/// Compute what `docs/src/man/` should look like after regenerating its subcommands section. +/// Returns `None` if the file has no subcommands marker (nothing to do). +fn compute_markdown_with_subcommands( markdown_path: &Utf8Path, + content: &str, subcommands: &[CliCommand], parent_path: &[&str], -) -> Result<()> { - let content = - fs::read_to_string(markdown_path).with_context(|| format!("Reading {}", markdown_path))?; - +) -> Result> { let begin_marker = ""; let end_marker = ""; let Some((before, rest)) = content.split_once(begin_marker) else { - return Ok(()); // Skip files without markers + return Ok(None); // Skip files without markers }; let Some((_, after)) = rest.split_once(end_marker) else { @@ -202,34 +201,25 @@ pub fn update_markdown_with_subcommands( // Trim trailing whitespace from before section and ensure exactly one blank line let before = before.trim_end(); - let new_content = format!( + Ok(Some(format!( "{}\n\n{}\n{}{}{}", before, begin_marker, generated_subcommands, end_marker, after - ); - - // Only write if content has changed to avoid updating mtime unnecessarily - if new_content != content { - fs::write(markdown_path, new_content) - .with_context(|| format!("Writing to {}", markdown_path))?; - println!("Updated subcommands in {}", markdown_path); - } - Ok(()) + ))) } -/// Update markdown file with generated options -pub fn update_markdown_with_options( +/// Compute what `docs/src/man/` should look like after regenerating its options section. +/// Returns `None` if the file has no options marker (nothing to do). +fn compute_markdown_with_options( markdown_path: &Utf8Path, + content: &str, options: &[CliOption], positionals: &[CliPositional], -) -> Result<()> { - let content = - fs::read_to_string(markdown_path).with_context(|| format!("Reading {}", markdown_path))?; - +) -> Result> { let begin_marker = ""; let end_marker = ""; let Some((before, rest)) = content.split_once(begin_marker) else { - return Ok(()); // Skip files without markers + return Ok(None); // Skip files without markers }; let Some((_, after)) = rest.split_once(end_marker) else { @@ -256,6 +246,48 @@ pub fn update_markdown_with_options( format!("{}\n\n{}\n{}{}", before, begin_marker, end_marker, after) }; + Ok(Some(new_content)) +} + +/// Update markdown file with generated subcommands +pub fn update_markdown_with_subcommands( + markdown_path: &Utf8Path, + subcommands: &[CliCommand], + parent_path: &[&str], +) -> Result<()> { + let content = + fs::read_to_string(markdown_path).with_context(|| format!("Reading {}", markdown_path))?; + + let Some(new_content) = + compute_markdown_with_subcommands(markdown_path, &content, subcommands, parent_path)? + else { + return Ok(()); + }; + + // Only write if content has changed to avoid updating mtime unnecessarily + if new_content != content { + fs::write(markdown_path, new_content) + .with_context(|| format!("Writing to {}", markdown_path))?; + println!("Updated subcommands in {}", markdown_path); + } + Ok(()) +} + +/// Update markdown file with generated options +pub fn update_markdown_with_options( + markdown_path: &Utf8Path, + options: &[CliOption], + positionals: &[CliPositional], +) -> Result<()> { + let content = + fs::read_to_string(markdown_path).with_context(|| format!("Reading {}", markdown_path))?; + + let Some(new_content) = + compute_markdown_with_options(markdown_path, &content, options, positionals)? + else { + return Ok(()); + }; + // Only write if content has changed to avoid updating mtime unnecessarily if new_content != content { fs::write(markdown_path, new_content) @@ -611,6 +643,121 @@ TODO: Add practical examples showing how to use this command. Ok(()) } +/// Check that all man page markdown files are up to date. +/// Fails with an error if any file would change, similar to `cargo fmt --check`. +#[context("Checking man pages")] +pub fn check_manpages(sh: &Shell) -> Result<()> { + let cli_structure = extract_cli_json(sh)?; + + // First: check no man pages are missing + fn collect_commands(cmd: &CliCommand, path: Vec, acc: &mut Vec>) { + for sub in &cmd.subcommands { + let mut sub_path = path.clone(); + sub_path.push(sub.name.clone()); + acc.push(sub_path.clone()); + collect_commands(sub, sub_path, acc); + } + } + let mut commands_to_check = Vec::new(); + collect_commands(&cli_structure, Vec::new(), &mut commands_to_check); + for command_parts in &commands_to_check { + let filename = format!("bootc-{}.8.md", command_parts.join("-")); + let filepath = Utf8Path::new("docs/src/man").join(&filename); + if !filepath.exists() { + anyhow::bail!( + "{} is missing; run `cargo xtask update-generated` to create it", + filepath + ); + } + } + + let mappings = discover_man_page_mappings(&cli_structure)?; + + for (filename, subcommand_path) in mappings { + let markdown_path = Utf8Path::new("docs/src/man").join(&filename); + if !markdown_path.exists() { + continue; + } + + let target_cmd = if let Some(ref path) = subcommand_path { + let path_refs: Vec<&str> = path.iter().map(|s| s.as_str()).collect(); + find_subcommand(&cli_structure, &path_refs) + .ok_or_else(|| anyhow::anyhow!("Subcommand {:?} not found", path))? + } else { + &cli_structure + }; + + let content = fs::read_to_string(&markdown_path) + .with_context(|| format!("Reading {}", markdown_path))?; + + if content.contains("") { + check_markdown_options( + &markdown_path, + &content, + &target_cmd.options, + &target_cmd.positionals, + )?; + } + if content.contains("") { + let parent_path: Vec<&str> = if let Some(path) = &subcommand_path { + path.iter().map(|s| s.as_str()).collect() + } else { + vec![] + }; + check_markdown_subcommands( + &markdown_path, + &content, + &target_cmd.subcommands, + &parent_path, + )?; + } + } + + Ok(()) +} + +/// Compare-only variant of `update_markdown_with_options`. +fn check_markdown_options( + markdown_path: &Utf8Path, + content: &str, + options: &[CliOption], + positionals: &[CliPositional], +) -> Result<()> { + let Some(new_content) = + compute_markdown_with_options(markdown_path, content, options, positionals)? + else { + return Ok(()); + }; + if new_content != content { + anyhow::bail!( + "{} is out of date; run `cargo xtask update-generated` to update it", + markdown_path + ); + } + Ok(()) +} + +/// Compare-only variant of `update_markdown_with_subcommands`. +fn check_markdown_subcommands( + markdown_path: &Utf8Path, + content: &str, + subcommands: &[CliCommand], + parent_path: &[&str], +) -> Result<()> { + let Some(new_content) = + compute_markdown_with_subcommands(markdown_path, content, subcommands, parent_path)? + else { + return Ok(()); + }; + if new_content != content { + anyhow::bail!( + "{} is out of date; run `cargo xtask update-generated` to update it", + markdown_path + ); + } + Ok(()) +} + /// Apply post-processing fixes to generated man pages #[context("Fixing man pages")] fn apply_man_page_fixes(sh: &Shell, dir: &Utf8Path) -> Result<()> { diff --git a/crates/xtask/src/tmt.rs b/crates/xtask/src/tmt.rs index 8d0152466..f93deddc6 100644 --- a/crates/xtask/src/tmt.rs +++ b/crates/xtask/src/tmt.rs @@ -964,16 +964,84 @@ impl PartialOrd for TestDef { } } +/// Check that tmt generated files are up to date. +/// Fails with an error if any file would change, similar to `cargo fmt --check`. +#[context("Checking TMT generated files")] +pub(crate) fn check_integration() -> Result<()> { + let tests_fmf_path = Utf8Path::new("tmt/tests/tests.fmf"); + let integration_fmf_path = Utf8Path::new("tmt/plans/integration.fmf"); + + let (tests_generated, integration_generated) = generate_integration()?; + + let tests_on_disk = std::fs::read_to_string(tests_fmf_path) + .with_context(|| format!("Reading {}", tests_fmf_path))?; + let integration_on_disk = std::fs::read_to_string(integration_fmf_path) + .with_context(|| format!("Reading {}", integration_fmf_path))?; + + if tests_generated != tests_on_disk { + anyhow::bail!( + "{} is out of date; run `cargo xtask update-generated` to update it", + tests_fmf_path + ); + } + if integration_generated != integration_on_disk { + anyhow::bail!( + "{} is out of date; run `cargo xtask update-generated` to update it", + integration_fmf_path + ); + } + + Ok(()) +} + /// Generate tmt/plans/integration.fmf from test definitions #[context("Updating TMT integration.fmf")] pub(crate) fn update_integration() -> Result<()> { + let tests_fmf_path = Utf8Path::new("tmt/tests/tests.fmf"); + let integration_fmf_path = Utf8Path::new("tmt/plans/integration.fmf"); + + let (tests_content, integration_content) = generate_integration()?; + + let needs_update_tests = match std::fs::read_to_string(tests_fmf_path) { + Ok(existing) => existing != tests_content, + Err(_) => true, + }; + if needs_update_tests { + std::fs::write(tests_fmf_path, &tests_content).context("Writing tests.fmf")?; + println!("Generated {}", tests_fmf_path); + } else { + println!("Unchanged: {}", tests_fmf_path); + } + + let needs_update_integration = match std::fs::read_to_string(integration_fmf_path) { + Ok(existing) => existing != integration_content, + Err(_) => true, + }; + if needs_update_integration { + std::fs::write(integration_fmf_path, &integration_content) + .context("Writing integration.fmf")?; + println!("Generated {}", integration_fmf_path); + } else { + println!("Unchanged: {}", integration_fmf_path); + } + + Ok(()) +} + +/// Pure function: compute the content of tests.fmf and integration.fmf from +/// the test file metadata in tmt/tests/booted/, without writing to disk. +/// Returns (tests_fmf_content, integration_fmf_content). +#[context("Generating TMT integration content")] +fn generate_integration() -> Result<(String, String)> { // Define tests in order let mut tests = vec![]; // Scan for test-*.nu, test-*.sh, and test-*.py files in tmt/tests/booted/ let booted_dir = Utf8Path::new("tmt/tests/booted"); - for entry in std::fs::read_dir(booted_dir)? { + for entry in std::fs::read_dir(booted_dir) + .with_context(|| format!("Reading directory {}", booted_dir))? + { let entry = entry?; let path = entry.path(); let Some(filename) = path.file_name().and_then(|n| n.to_str()) else { @@ -1072,8 +1140,6 @@ pub(crate) fn update_integration() -> Result<()> { tests.sort(); // Generate single tests.fmf file using structured YAML - let tests_dir = Utf8Path::new("tmt/tests"); - let tests_fmf_path = tests_dir.join("tests.fmf"); // Build YAML structure let mut tests_mapping = serde_yaml::Mapping::new(); @@ -1132,19 +1198,6 @@ pub(crate) fn update_integration() -> Result<()> { tests_content.push_str("\n"); tests_content.push_str(&tests_yaml_formatted); - // Only write if content changed - let needs_update = match std::fs::read_to_string(&tests_fmf_path) { - Ok(existing) => existing != tests_content, - Err(_) => true, - }; - - if needs_update { - std::fs::write(&tests_fmf_path, tests_content).context("Writing tests.fmf")?; - println!("Generated {}", tests_fmf_path); - } else { - println!("Unchanged: {}", tests_fmf_path); - } - // Generate plans section using structured YAML let mut plans_mapping = serde_yaml::Mapping::new(); for test in &tests { @@ -1237,12 +1290,12 @@ pub(crate) fn update_integration() -> Result<()> { plans_section.push('\n'); } - // Update integration.fmf with generated plans - let output_path = Utf8Path::new("tmt/plans/integration.fmf"); + // Build integration.fmf content by splicing the generated plans section + // between the existing marker lines, preserving hand-written content outside them. + let integration_fmf_path = Utf8Path::new("tmt/plans/integration.fmf"); let existing_content = - std::fs::read_to_string(output_path).context("Reading integration.fmf")?; + std::fs::read_to_string(integration_fmf_path).context("Reading integration.fmf")?; - // Replace plans section let (before_plans, rest) = existing_content .split_once(PLAN_MARKER_BEGIN) .context("Missing # BEGIN GENERATED PLANS marker in integration.fmf")?; @@ -1250,25 +1303,12 @@ pub(crate) fn update_integration() -> Result<()> { .split_once(PLAN_MARKER_END) .context("Missing # END GENERATED PLANS marker in integration.fmf")?; - let new_content = format!( + let integration_content = format!( "{}{}{}{}{}", before_plans, PLAN_MARKER_BEGIN, plans_section, PLAN_MARKER_END, after_plans ); - // Only write if content changed - let needs_update = match std::fs::read_to_string(output_path) { - Ok(existing) => existing != new_content, - Err(_) => true, - }; - - if needs_update { - std::fs::write(output_path, new_content)?; - println!("Generated {}", output_path); - } else { - println!("Unchanged: {}", output_path); - } - - Ok(()) + Ok((tests_content, integration_content)) } #[cfg(test)] diff --git a/crates/xtask/src/xtask.rs b/crates/xtask/src/xtask.rs index 0332dba5d..36770a4c3 100644 --- a/crates/xtask/src/xtask.rs +++ b/crates/xtask/src/xtask.rs @@ -22,6 +22,13 @@ mod sysext; mod tmt; const NAME: &str = "bootc"; +/// JSON schemas generated from the bootc CLI: (schema-name, output-path) pairs. +/// All output paths must be under `docs/src/` and match the `*.schema.json` naming +/// convention so the Dockerfile glob picks them up automatically. +const JSON_SCHEMAS: &[(&str, &str)] = &[ + ("host", "docs/src/host-v1.schema.json"), + ("progress", "docs/src/progress-v0.schema.json"), +]; /// File used to identify the bootc source tree toplevel. const TOPLEVEL_MARKER: &str = "ADOPTERS.md"; const TAR_REPRODUCIBLE_OPTS: &[&str] = &[ @@ -45,8 +52,11 @@ struct Cli { enum Commands { /// Generate man pages Manpages, - /// Update generated files (man pages, JSON schemas) - UpdateGenerated, + /// Update or check generated files + UpdateGenerated { + #[command(subcommand)] + command: UpdateGeneratedCommands, + }, /// Package the source code Package, /// Package source RPM @@ -70,6 +80,27 @@ enum Commands { }, } +/// Subcommands for `update-generated` +#[derive(Debug, Subcommand)] +enum UpdateGeneratedCommands { + /// Update/check files derived directly from source (tmt plans). + /// No binary build required; safe to run in any environment with the full source tree. + Direct { + /// Check that files are up to date instead of updating them. + /// Exits non-zero if any file needs regeneration, similar to `cargo fmt --check`. + #[arg(long)] + check: bool, + }, + /// Update/check files derived from the built binary (man pages, JSON schemas). + /// Requires `cargo run --features=docgen` to extract the current CLI structure. + FromCode { + /// Check that files are up to date instead of updating them. + /// Exits non-zero if any file needs regeneration, similar to `cargo fmt --check`. + #[arg(long)] + check: bool, + }, +} + /// Subcommands for development VM management #[derive(Debug, Subcommand)] enum BcvkCommands { @@ -270,7 +301,24 @@ fn try_main() -> Result<()> { match cli.command { Commands::Manpages => man::generate_man_pages(&sh), - Commands::UpdateGenerated => update_generated(&sh), + Commands::UpdateGenerated { command } => match command { + UpdateGeneratedCommands::Direct { check } => { + if check { + tmt::check_integration() + } else { + tmt::update_integration() + } + } + UpdateGeneratedCommands::FromCode { check } => { + if check { + man::check_manpages(&sh)?; + check_json_schemas(&sh) + } else { + man::update_manpages(&sh)?; + update_json_schemas(&sh) + } + } + }, Commands::Package => package(&sh), Commands::PackageSrpm => package_srpm(&sh), Commands::Spec => spec(&sh), @@ -522,37 +570,42 @@ fn package_srpm(sh: &Shell) -> Result<()> { Ok(()) } +/// Generate and normalize a JSON schema from the binary. +/// Ensures a consistent trailing newline so files are stable across editors. +fn generate_normalized_json_schema(sh: &Shell, of: &str) -> Result { + let schema = cmd!(sh, "cargo run -q -- internals print-json-schema --of={of}").read()?; + Ok(if schema.ends_with('\n') { + schema + } else { + format!("{schema}\n") + }) +} + /// Update JSON schema files #[context("Updating JSON schemas")] fn update_json_schemas(sh: &Shell) -> Result<()> { - for (of, target) in [ - ("host", "docs/src/host-v1.schema.json"), - ("progress", "docs/src/progress-v0.schema.json"), - ] { - let schema = cmd!(sh, "cargo run -q -- internals print-json-schema --of={of}").read()?; + for (of, target) in JSON_SCHEMAS { + let schema = generate_normalized_json_schema(sh, of)?; std::fs::write(target, &schema)?; println!("Updated {target}"); } Ok(()) } -/// Update all generated files -/// This is the main command developers should use to update generated content. -/// It handles: -/// - Creating new man page templates for new commands -/// - Syncing CLI options to existing man pages -/// - Updating JSON schema files -#[context("Updating generated files")] -fn update_generated(sh: &Shell) -> Result<()> { - // Update man pages (create new templates + sync options) - man::update_manpages(sh)?; - - // Update JSON schemas - update_json_schemas(sh)?; - - // Update TMT integration.fmf - tmt::update_integration()?; - +/// Check that JSON schema files are up to date. +/// Fails with an error if any file would change, similar to `cargo fmt --check`. +#[context("Checking JSON schemas")] +fn check_json_schemas(sh: &Shell) -> Result<()> { + for (of, target) in JSON_SCHEMAS { + let generated = generate_normalized_json_schema(sh, of)?; + let on_disk = + std::fs::read_to_string(target).with_context(|| format!("Reading {target}"))?; + if generated != on_disk { + anyhow::bail!( + "{target} is out of date; run `cargo xtask update-generated` to update it" + ); + } + } Ok(()) } diff --git a/docs/src/host-v1.schema.json b/docs/src/host-v1.schema.json index eee565acd..db716c3f1 100644 --- a/docs/src/host-v1.schema.json +++ b/docs/src/host-v1.schema.json @@ -520,4 +520,4 @@ ] } } -} \ No newline at end of file +} diff --git a/docs/src/progress-v0.schema.json b/docs/src/progress-v0.schema.json index 956f3ef16..dc5fef217 100644 --- a/docs/src/progress-v0.schema.json +++ b/docs/src/progress-v0.schema.json @@ -230,4 +230,4 @@ ] } } -} \ No newline at end of file +}