diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a9a176b..ea240ed 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,6 +57,9 @@ jobs: with: key: partition-${{ matrix.partition }} - uses: taiki-e/install-action@nextest + - uses: taiki-e/install-action@v2 + with: + tool: jj-cli - name: Build run: cargo build - name: Run tests diff --git a/crates/dropshot-api-manager/src/cmd/debug.rs b/crates/dropshot-api-manager/src/cmd/debug.rs index 1270f45..ddba2ce 100644 --- a/crates/dropshot-api-manager/src/cmd/debug.rs +++ b/crates/dropshot-api-manager/src/cmd/debug.rs @@ -113,10 +113,14 @@ fn dump_structure( for (version, files) in info.versions() { println!(" version {}:", version); for api_spec in files.as_raw_files() { + let version_str = api_spec + .version() + .map(|v| v.to_string()) + .unwrap_or_else(|| "unparseable".to_string()); println!( " file {} (v{})", api_spec.spec_file_name().path(), - api_spec.version() + version_str ); } } diff --git a/crates/dropshot-api-manager/src/git.rs b/crates/dropshot-api-manager/src/git.rs index 0912ea9..ef04ed7 100644 --- a/crates/dropshot-api-manager/src/git.rs +++ b/crates/dropshot-api-manager/src/git.rs @@ -90,13 +90,23 @@ pub enum CommitHashParseError { InvalidHex(hex::FromHexError), } -/// Given a revision, return its merge base with HEAD +/// Given a revision, return its merge base with the current working state. +/// +/// If we're in the middle of a merge (MERGE_HEAD exists), we use MERGE_HEAD +/// instead of HEAD for the merge base calculation. This is important because +/// during a merge conflict, HEAD points to our branch while MERGE_HEAD points +/// to the branch being merged in. Using MERGE_HEAD ensures we see blessed +/// documents from the incoming branch. pub fn git_merge_base_head( repo_root: &Utf8Path, revision: &GitRevision, ) -> anyhow::Result { + // Check if we're in a merge state. If so, use MERGE_HEAD instead of HEAD. + let base_ref = + if git_merge_head_exists(repo_root) { "MERGE_HEAD" } else { "HEAD" }; + let mut cmd = git_start(repo_root); - cmd.arg("merge-base").arg("--all").arg("HEAD").arg(revision.as_str()); + cmd.arg("merge-base").arg("--all").arg(base_ref).arg(revision.as_str()); let label = cmd_label(&cmd); let stdout = do_run(&mut cmd)?; let stdout = stdout.trim(); @@ -110,6 +120,14 @@ pub fn git_merge_base_head( Ok(GitRevision::from(stdout.to_owned())) } +/// Returns true if MERGE_HEAD exists, indicating we're in the middle of a +/// merge. +fn git_merge_head_exists(repo_root: &Utf8Path) -> bool { + let mut cmd = git_start(repo_root); + cmd.args(["rev-parse", "--verify", "--quiet", "MERGE_HEAD"]); + matches!(cmd.status(), Ok(status) if status.success()) +} + /// List files recursively under some path `path` in Git revision `revision`. pub fn git_ls_tree( repo_root: &Utf8Path, diff --git a/crates/dropshot-api-manager/src/resolved.rs b/crates/dropshot-api-manager/src/resolved.rs index c32155d..8a1c9bd 100644 --- a/crates/dropshot-api-manager/src/resolved.rs +++ b/crates/dropshot-api-manager/src/resolved.rs @@ -11,7 +11,7 @@ use crate::{ output::{InlineErrorChain, plural}, spec_files_blessed::{BlessedApiSpecFile, BlessedFiles, BlessedGitRef}, spec_files_generated::{GeneratedApiSpecFile, GeneratedFiles}, - spec_files_generic::ApiFiles, + spec_files_generic::{ApiFiles, UnparseableFile}, spec_files_local::{LocalApiSpecFile, LocalFiles}, validation::{ CheckStale, CheckStatus, DynValidationFn, overwrite_file, validate, @@ -21,7 +21,7 @@ use anyhow::{Context, anyhow}; use camino::{Utf8Path, Utf8PathBuf}; use dropshot_api_manager_types::{ApiIdent, ApiSpecFileName}; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, HashSet}, fmt::{Debug, Display}, }; use thiserror::Error; @@ -143,11 +143,10 @@ impl Display for ResolutionKind { } /// Describes a problem resolving the blessed spec(s), generated spec(s), and -/// local spec files for a particular API +/// local spec files for a particular API. #[derive(Debug, Error)] pub enum Problem<'a> { - // This kind of problem is not associated with any *supported* version of an - // API. (All the others are.) + // These problems are not associated with any *supported* version of an API. #[error( "A local OpenAPI document was found that does not correspond to a \ supported version of this API: {spec_file_name}. This is unusual, \ @@ -158,6 +157,15 @@ pub enum Problem<'a> { )] LocalSpecFileOrphaned { spec_file_name: ApiSpecFileName }, + #[error( + "A local OpenAPI document could not be parsed: {}. \ + This may happen if the file has merge conflict markers or is \ + otherwise corrupted. This tool can delete this file and regenerate \ + the correct one for you.", + unparseable_file.path, + )] + UnparseableLocalFile { unparseable_file: UnparseableFile }, + // All other problems are associated with specific supported versions of an // API. #[error( @@ -305,7 +313,22 @@ pub enum Problem<'a> { "Blessed version is stored as a git ref file, but should be stored as \ JSON. This tool can perform the conversion for you." )] - GitRefShouldBeJson { local_file: &'a LocalApiSpecFile }, + GitRefShouldBeJson { + local_file: &'a LocalApiSpecFile, + blessed: &'a BlessedApiSpecFile, + }, + + #[error( + "Local file for this blessed version is corrupted (possibly due to \ + merge conflict markers). This tool can regenerate the file from the \ + blessed version for you." + )] + BlessedVersionCorruptedLocal { + local_file: &'a LocalApiSpecFile, + blessed: &'a BlessedApiSpecFile, + /// If Some, regenerate as a git ref instead of JSON. + git_ref: Option, + }, #[error( "Duplicate local file found: both JSON and git ref versions exist for \ @@ -380,9 +403,18 @@ impl<'a> Problem<'a> { Problem::BlessedVersionShouldBeGitRef { local_file, git_ref } => { Some(Fix::ConvertToGitRef { local_file, git_ref }) } - Problem::GitRefShouldBeJson { local_file } => { - Some(Fix::ConvertToJson { local_file }) + Problem::GitRefShouldBeJson { local_file, blessed } => { + Some(Fix::ConvertToJson { local_file, blessed }) } + Problem::BlessedVersionCorruptedLocal { + local_file, + blessed, + git_ref, + } => Some(Fix::RegenerateFromBlessed { + local_file, + blessed, + git_ref: git_ref.as_ref(), + }), Problem::DuplicateLocalFile { local_file } => { Some(Fix::DeleteFiles { files: DisplayableVec(vec![ @@ -391,6 +423,11 @@ impl<'a> Problem<'a> { }) } Problem::GitRefFirstCommitUnknown { .. } => None, + Problem::UnparseableLocalFile { unparseable_file } => { + Some(Fix::DeleteUnparseableFile { + path: unparseable_file.path.clone(), + }) + } } } } @@ -422,6 +459,18 @@ pub enum Fix<'a> { /// Convert a git ref file back to a full JSON file. ConvertToJson { local_file: &'a LocalApiSpecFile, + blessed: &'a BlessedApiSpecFile, + }, + /// Regenerate a corrupted local file from the blessed content. + RegenerateFromBlessed { + local_file: &'a LocalApiSpecFile, + blessed: &'a BlessedApiSpecFile, + /// If Some, regenerate as a git ref instead of JSON. + git_ref: Option<&'a GitRef>, + }, + /// Delete an unparseable file (e.g., one with merge conflict markers). + DeleteUnparseableFile { + path: Utf8PathBuf, }, } @@ -477,19 +526,87 @@ impl Display for Fix<'_> { local_file.spec_file_name().path() )?; } - Fix::ConvertToJson { local_file } => { + Fix::ConvertToJson { local_file, .. } => { writeln!( f, "convert {} from git ref to JSON", local_file.spec_file_name().path() )?; } + Fix::RegenerateFromBlessed { local_file, git_ref, .. } => { + if git_ref.is_some() { + writeln!( + f, + "regenerate {} from blessed content as git ref", + local_file.spec_file_name().path() + )?; + } else { + writeln!( + f, + "regenerate {} from blessed content", + local_file.spec_file_name().path() + )?; + } + } + Fix::DeleteUnparseableFile { path } => { + writeln!(f, "delete unparseable file {path}")?; + } }; Ok(()) } } impl Fix<'_> { + /// Adds the paths (relative to the OpenAPI documents directory) that this + /// fix will write to. Used to determine if an unparseable file will be + /// overwritten. + pub fn add_paths_written(&self, paths: &mut HashSet) { + match self { + Fix::DeleteFiles { .. } => {} + Fix::UpdateLockstepFile { generated } => { + paths.insert(generated.spec_file_name().path().to_owned()); + } + Fix::UpdateVersionedFiles { generated, .. } => { + paths.insert(generated.spec_file_name().path().to_owned()); + } + Fix::UpdateExtraFile { path, .. } => { + paths.insert((*path).to_owned()); + } + Fix::UpdateSymlink { .. } => {} + Fix::ConvertToGitRef { local_file, .. } => { + // Writes to the .gitref path, not the JSON path. + let json_path = local_file.spec_file_name().path(); + paths + .insert(Utf8PathBuf::from(format!("{}.gitref", json_path))); + } + Fix::ConvertToJson { local_file, .. } => { + // Writes to the JSON path (removing .gitref suffix). + let git_ref_path = local_file.spec_file_name().path(); + if let Some(json_path) = + git_ref_path.as_str().strip_suffix(".gitref") + { + paths.insert(Utf8PathBuf::from(json_path)); + } + } + Fix::RegenerateFromBlessed { local_file, git_ref, .. } => { + if git_ref.is_some() { + // When regenerating as git ref, writes to a .gitref file. + let json_path = local_file.spec_file_name().path(); + paths.insert(Utf8PathBuf::from(format!( + "{}.gitref", + json_path + ))); + } else { + // Overwrites the corrupted local file. + paths.insert(local_file.spec_file_name().path().to_owned()); + } + } + Fix::DeleteUnparseableFile { .. } => {} + } + // No wildcard match: adding a new Fix variant should cause a compile + // error here, forcing consideration of what paths it writes. + } + pub fn execute(&self, env: &ResolvedEnv) -> anyhow::Result> { let root = env.openapi_abs_dir(); match self { @@ -584,14 +701,13 @@ impl Fix<'_> { format!("created {}", git_ref_path), ]) } - Fix::ConvertToJson { local_file } => { + Fix::ConvertToJson { local_file, blessed } => { let git_ref_path = root.join(local_file.spec_file_name().path()); - // The local_file already has the contents loaded from git (git - // ref files are dereferenced when loaded). We just need to - // write those contents to a new JSON file. - let contents = local_file.contents(); + // Use the blessed file's contents since it's guaranteed to be + // valid. + let contents = blessed.contents(); // Compute the JSON file path by removing the .gitref suffix. let git_ref_basename = local_file.spec_file_name().basename(); @@ -618,6 +734,48 @@ impl Fix<'_> { format!("created {}", json_path), ]) } + Fix::RegenerateFromBlessed { local_file, blessed, git_ref } => { + let local_path = root.join(local_file.spec_file_name().path()); + + // Remove the corrupted file. + match fs_err::remove_file(&local_path) { + Ok(()) => {} + Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} + Err(e) => return Err(e.into()), + } + + if let Some(git_ref) = git_ref { + // Write as a git ref file. + let git_ref_basename = format!( + "{}.gitref", + local_file.spec_file_name().basename() + ); + let git_ref_path = local_path + .parent() + .ok_or_else(|| anyhow!("cannot get parent directory"))? + .join(&git_ref_basename); + + // Add a trailing newline for clean diffs. + fs_err::write(&git_ref_path, format!("{}\n", git_ref))?; + + Ok(vec![ + format!("removed corrupted file {}", local_path), + format!("created git ref {}", git_ref_path), + ]) + } else { + // Write the JSON content directly. + fs_err::write(&local_path, blessed.contents())?; + Ok(vec![format!( + "regenerated {} from blessed content", + local_path + )]) + } + } + Fix::DeleteUnparseableFile { path } => { + let full_path = root.join(path); + fs_err::remove_file(&full_path)?; + Ok(vec![format!("removed unparseable file {}", full_path)]) + } } } } @@ -690,15 +848,16 @@ impl<'a> Resolved<'a> { // Get the other easy case out of the way: if there are any local spec // files for APIs or API versions that aren't supported any more, that's // a (fixable) problem. - let non_version_problems = + let mut non_version_problems: Vec> = resolve_orphaned_local_specs(&supported_versions_by_api, local) .map(|spec_file_name| Problem::LocalSpecFileOrphaned { spec_file_name: spec_file_name.clone(), }) .collect(); - // Now resolve each of the supported API versions. - let api_results = apis + // Resolve each of the supported API versions first, so we know what + // paths will be written. + let api_results: BTreeMap> = apis .iter_apis() .map(|api| { let ident = api.ident().clone(); @@ -722,6 +881,34 @@ impl<'a> Resolved<'a> { }) .collect(); + // Now collect any unparseable files. These are local files that exist + // but couldn't be parsed (e.g., due to merge conflict markers). + // + // Only report unparseable files whose paths won't be overwritten by a + // fix. We check the actual fixes (not just generated paths) because + // some fixes write git refs instead of JSON files. + let mut paths_written: HashSet = HashSet::new(); + for api_resolved in api_results.values() { + for resolution in api_resolved.by_version.values() { + for problem in &resolution.problems { + if let Some(fix) = problem.fix() { + fix.add_paths_written(&mut paths_written); + } + } + } + } + + for (_ident, api_files) in local.iter() { + for unparseable in api_files.unparseable_files() { + // Only report if no fix will overwrite this path. + if !paths_written.contains(&unparseable.path) { + non_version_problems.push(Problem::UnparseableLocalFile { + unparseable_file: unparseable.clone(), + }); + } + } + } + Resolved { notes, non_version_problems, @@ -1245,36 +1432,98 @@ fn resolve_api_version_blessed<'a>( // Now, there should be at least one local spec that exactly matches the // blessed one. - let (matching, non_matching): (Vec<_>, Vec<_>) = - local.iter().partition(|local| { - // It should be enough to compare the hashes, since we should have - // already validated that the hashes are correct for the contents. - // But while it's cheap enough to do, we may as well compare the - // contents, too, and make sure we haven't messed something up. - let contents_match = local.contents() == blessed.contents(); - let local_hash = local.spec_file_name().hash().expect( - "this should be a versioned file so it should have a hash", - ); - let blessed_hash = blessed.spec_file_name().hash().expect( - "this should be a versioned file so it should have a hash", + // + // We partition local files into three categories: + // 1. Valid files with matching hash/contents -> matching + // 2. Unparseable files with matching hash -> corrupted (need regeneration) + // 3. Everything else -> non-matching + let blessed_hash = blessed + .spec_file_name() + .hash() + .expect("this should be a versioned file so it should have a hash"); + + let mut matching = Vec::new(); + let mut corrupted = Vec::new(); + let mut non_matching = Vec::new(); + + for local_file in local { + let local_hash = local_file + .spec_file_name() + .hash() + .expect("this should be a versioned file so it should have a hash"); + let hashes_match = local_hash == blessed_hash; + + if local_file.is_unparseable() { + // Unparseable files can't have their contents compared, so we rely + // solely on the hash. If the hash matches, the file is corrupted + // and needs regeneration. + if hashes_match { + corrupted.push(local_file); + } else { + non_matching.push(local_file); + } + } else { + // For valid files, verify that hash matching implies content + // matching (and vice versa). + let contents_match = local_file.contents() == blessed.contents(); + assert_eq!( + hashes_match, contents_match, + "hash and contents should match for valid files" ); - let hashes_match = local_hash == blessed_hash; - // If the hashes are equal, the contents should be equal, and vice - // versa. - assert_eq!(hashes_match, contents_match); - hashes_match - }); - if matching.is_empty() { + if hashes_match { + matching.push(local_file); + } else { + non_matching.push(local_file); + } + } + } + + // Local function to compute the git ref for this version. This is + // expensive because it may need to resolve a git revision to a commit + // hash. + let compute_should_be_git_ref = + |problems: &mut Vec>| match git_ref { + Some(r) => match r.to_git_ref(&env.repo_root) { + Ok(current) => should_convert_to_git_ref( + latest_first_commit, + current.commit, + ) + .then_some(current), + Err(error) => { + problems.push(Problem::GitRefFirstCommitUnknown { + spec_file_name: blessed.spec_file_name().clone(), + source: error, + }); + None + } + }, + None => None, + }; + + if matching.is_empty() && corrupted.is_empty() { + // No valid or corrupted local files match the blessed version. problems.push(Problem::BlessedVersionMissingLocal { spec_file_name: blessed.spec_file_name().clone(), }); } else if !use_git_ref_storage || is_latest { // Fast path: git ref storage disabled or this is the latest version. - // Computing first commits is slow, and we know we always want JSON in - // this case, so we can avoid computing them here. + // We know we always want JSON in this case, so we can avoid computing + // git refs here. + + // Report corrupted local files that need regeneration from blessed. + for local_file in &corrupted { + problems.push(Problem::BlessedVersionCorruptedLocal { + local_file, + blessed, + git_ref: None, + }); + } - if matching.len() > 1 { + if matching.is_empty() { + // Only corrupted files match - they'll be regenerated. Still need + // to mark non-matching files as extra. + } else if matching.len() > 1 { // We might have both api.json and api.json.gitref for the same // version. Mark the redundant file (always the gitref file in this // case) for deletion. @@ -1286,7 +1535,8 @@ fn resolve_api_version_blessed<'a>( } else { let local_file = matching[0]; if local_file.spec_file_name().is_git_ref() { - problems.push(Problem::GitRefShouldBeJson { local_file }); + problems + .push(Problem::GitRefShouldBeJson { local_file, blessed }); } } @@ -1296,32 +1546,23 @@ fn resolve_api_version_blessed<'a>( } })); } else { - // Slow path: git ref storage enabled and not latest; need to check the - // respective first commits to determine if this version should be a git - // ref. - // - // A version should be stored as a git ref if it was introduced in a - // different commit from the latest (see RFD 634). If we can't determine - // the first commit, report an error. - let should_be_git_ref = match git_ref { - Some(r) => match r.to_git_ref(&env.repo_root) { - Ok(current) => should_convert_to_git_ref( - latest_first_commit, - current.commit, - ) - .then_some(current), - Err(error) => { - problems.push(Problem::GitRefFirstCommitUnknown { - spec_file_name: blessed.spec_file_name().clone(), - source: error, - }); - None - } - }, - None => None, - }; + // Slow path: git ref storage enabled and not latest. Compute whether + // this version should be stored as a git ref. + let should_be_git_ref = compute_should_be_git_ref(&mut problems); + + // Report corrupted local files that need regeneration from blessed. + for local_file in &corrupted { + problems.push(Problem::BlessedVersionCorruptedLocal { + local_file, + blessed, + git_ref: should_be_git_ref.clone(), + }); + } - if matching.len() > 1 { + if matching.is_empty() { + // Only corrupted files match - they'll be regenerated. Still need + // to mark non-matching files as extra. + } else if matching.len() > 1 { // We might have both api.json and api.json.gitref for the same // version. Mark the redundant file for deletion. for local_file in matching { @@ -1350,7 +1591,10 @@ fn resolve_api_version_blessed<'a>( } (None, true) => { // Should be JSON but is git ref: convert to JSON. - problems.push(Problem::GitRefShouldBeJson { local_file }); + problems.push(Problem::GitRefShouldBeJson { + local_file, + blessed, + }); } (Some(_), true) | (None, false) => { // Format matches preference: no conversion needed. diff --git a/crates/dropshot-api-manager/src/spec_files_blessed.rs b/crates/dropshot-api-manager/src/spec_files_blessed.rs index d6ac0c5..aef6466 100644 --- a/crates/dropshot-api-manager/src/spec_files_blessed.rs +++ b/crates/dropshot-api-manager/src/spec_files_blessed.rs @@ -12,11 +12,12 @@ use crate::{ }, spec_files_generic::{ ApiFiles, ApiLoad, ApiSpecFile, ApiSpecFilesBuilder, AsRawFiles, + SpecFileInfo, }, }; use anyhow::{anyhow, bail}; use camino::{Utf8Path, Utf8PathBuf}; -use dropshot_api_manager_types::ApiIdent; +use dropshot_api_manager_types::{ApiIdent, ApiSpecFileName}; use std::{collections::BTreeMap, ops::Deref}; /// Newtype wrapper around [`ApiSpecFile`] to describe OpenAPI documents from @@ -41,6 +42,7 @@ NewtypeFrom! { () pub struct BlessedApiSpecFile(ApiSpecFile); } impl ApiLoad for BlessedApiSpecFile { const MISCONFIGURATIONS_ALLOWED: bool = true; + type Unparseable = std::convert::Infallible; fn make_item(raw: ApiSpecFile) -> Self { BlessedApiSpecFile(raw) @@ -55,13 +57,28 @@ impl ApiLoad for BlessedApiSpecFile { item.spec_file_name() ); } + + fn make_unparseable( + _name: ApiSpecFileName, + _contents: Vec, + ) -> Option { + None + } + + fn unparseable_into_self(unparseable: Self::Unparseable) -> Self { + match unparseable {} + } + + fn extend_unparseable(&mut self, unparseable: Self::Unparseable) { + match unparseable {} + } } impl AsRawFiles for BlessedApiSpecFile { fn as_raw_files<'a>( &'a self, - ) -> Box + 'a> { - Box::new(std::iter::once(self.deref())) + ) -> Box + 'a> { + Box::new(std::iter::once(self.deref() as &dyn SpecFileInfo)) } } @@ -362,7 +379,8 @@ impl BlessedFiles { } } - Ok(BlessedFiles { files: api_files.into_map(), git_refs }) + let files = api_files.into_map(); + Ok(BlessedFiles { files, git_refs }) } } diff --git a/crates/dropshot-api-manager/src/spec_files_generated.rs b/crates/dropshot-api-manager/src/spec_files_generated.rs index f6ab59e..e976c00 100644 --- a/crates/dropshot-api-manager/src/spec_files_generated.rs +++ b/crates/dropshot-api-manager/src/spec_files_generated.rs @@ -8,7 +8,7 @@ use crate::{ environment::ErrorAccumulator, spec_files_generic::{ ApiFiles, ApiLoad, ApiSpecFile, ApiSpecFilesBuilder, AsRawFiles, - hash_contents, + SpecFileInfo, hash_contents, }, }; use anyhow::{anyhow, bail}; @@ -35,6 +35,7 @@ NewtypeFrom! { () pub struct GeneratedApiSpecFile(ApiSpecFile); } impl ApiLoad for GeneratedApiSpecFile { const MISCONFIGURATIONS_ALLOWED: bool = false; + type Unparseable = std::convert::Infallible; fn make_item(raw: ApiSpecFile) -> Self { GeneratedApiSpecFile(raw) @@ -49,13 +50,28 @@ impl ApiLoad for GeneratedApiSpecFile { item.spec_file_name() ); } + + fn make_unparseable( + _name: ApiSpecFileName, + _contents: Vec, + ) -> Option { + None + } + + fn unparseable_into_self(unparseable: Self::Unparseable) -> Self { + match unparseable {} + } + + fn extend_unparseable(&mut self, unparseable: Self::Unparseable) { + match unparseable {} + } } impl AsRawFiles for GeneratedApiSpecFile { fn as_raw_files<'a>( &'a self, - ) -> Box + 'a> { - Box::new(std::iter::once(self.deref())) + ) -> Box + 'a> { + Box::new(std::iter::once(self.deref() as &dyn SpecFileInfo)) } } diff --git a/crates/dropshot-api-manager/src/spec_files_generic.rs b/crates/dropshot-api-manager/src/spec_files_generic.rs index d6a518e..7dc8d3f 100644 --- a/crates/dropshot-api-manager/src/spec_files_generic.rs +++ b/crates/dropshot-api-manager/src/spec_files_generic.rs @@ -4,8 +4,8 @@ //! repository use crate::{apis::ManagedApis, environment::ErrorAccumulator}; -use anyhow::{Context, anyhow, bail}; -use camino::Utf8Path; +use anyhow::anyhow; +use camino::{Utf8Path, Utf8PathBuf}; use debug_ignore::DebugIgnore; use dropshot_api_manager_types::{ ApiIdent, ApiSpecFileName, ApiSpecFileNameKind, @@ -18,6 +18,18 @@ use std::{ }; use thiserror::Error; +/// Represents a local file that exists on disk but couldn't be parsed. +/// +/// Used to track files that need to be deleted/overwritten during generate. +/// This allows the tool to clean up corrupted files (e.g., files with merge +/// conflict markers) rather than leaving them orphaned. +#[derive(Clone, Debug)] +pub struct UnparseableFile { + /// The path to the file on disk, relative to the OpenAPI documents + /// directory. + pub path: Utf8PathBuf, +} + /// Attempts to parse the given file basename as an ApiSpecFileName of kind /// `Versioned` /// @@ -188,6 +200,27 @@ enum BadVersionedFileName { UnexpectedName { ident: ApiIdent, source: anyhow::Error }, } +/// Errors that can occur when parsing an API spec file. +#[derive(Debug, Error)] +enum ApiSpecFileParseError { + #[error("file {path:?}: parsing as JSON")] + JsonParse { path: Utf8PathBuf, source: serde_json::Error }, + #[error("file {path:?}: parsing OpenAPI document")] + OpenApiParse { path: Utf8PathBuf, source: serde_json::Error }, + #[error("file {path:?}: parsing version from generated spec")] + VersionParse { path: Utf8PathBuf, source: semver::Error }, + #[error( + "file {path:?}: version in the file ({file_version}) differs from \ + the one in the filename" + )] + VersionMismatch { path: Utf8PathBuf, file_version: semver::Version }, + #[error( + "file {path:?}: computed hash {expected:?}, but file name has \ + different hash {actual:?}" + )] + HashMismatch { path: Utf8PathBuf, expected: String, actual: String }, +} + /// Describes an OpenAPI document #[derive(Debug)] pub struct ApiSpecFile { @@ -204,53 +237,89 @@ pub struct ApiSpecFile { } impl ApiSpecFile { + /// Parse an OpenAPI document from raw contents. + /// + /// On error, returns both the error and the original contents buffer so + /// that callers can still use the contents (e.g., for unparseable file + /// tracking). pub fn for_contents( spec_file_name: ApiSpecFileName, contents_buf: Vec, - ) -> anyhow::Result { + ) -> Result)> { + Self::for_contents_inner(spec_file_name, contents_buf) + .map_err(|(e, buf)| (e.into(), buf)) + } + + fn for_contents_inner( + spec_file_name: ApiSpecFileName, + contents_buf: Vec, + ) -> Result)> { // Parse a serde_json::Value from the contents buffer. - let value: serde_json::Value = serde_json::from_slice(&contents_buf) - .with_context(|| { - format!("file {:?}: parsing as JSON", spec_file_name.path()) - })?; + let value: serde_json::Value = + match serde_json::from_slice(&contents_buf) { + Ok(v) => v, + Err(e) => { + return Err(( + ApiSpecFileParseError::JsonParse { + path: spec_file_name.path().to_owned(), + source: e, + }, + contents_buf, + )); + } + }; + // Parse the OpenAPI document from the contents buffer rather than the // value for better error messages. - let openapi: OpenAPI = serde_json::from_slice(&contents_buf) - .with_context(|| { - format!( - "file {:?}: parsing OpenAPI document", - spec_file_name.path() - ) - })?; - - let parsed_version: semver::Version = - openapi.info.version.parse().with_context(|| { - format!( - "file {:?}: parsing version from generated spec", - spec_file_name.path() - ) - })?; + let openapi: OpenAPI = match serde_json::from_slice(&contents_buf) { + Ok(o) => o, + Err(e) => { + return Err(( + ApiSpecFileParseError::OpenApiParse { + path: spec_file_name.path().to_owned(), + source: e, + }, + contents_buf, + )); + } + }; + + let parsed_version: semver::Version = match openapi.info.version.parse() + { + Ok(v) => v, + Err(e) => { + return Err(( + ApiSpecFileParseError::VersionParse { + path: spec_file_name.path().to_owned(), + source: e, + }, + contents_buf, + )); + } + }; match spec_file_name.kind() { ApiSpecFileNameKind::Versioned { version, hash } => { if *version != parsed_version { - bail!( - "file {:?}: version in the file ({}) differs from \ - the one in the filename", - spec_file_name.path(), - parsed_version - ); + return Err(( + ApiSpecFileParseError::VersionMismatch { + path: spec_file_name.path().to_owned(), + file_version: parsed_version, + }, + contents_buf, + )); } let expected_hash = hash_contents(&contents_buf); if expected_hash != *hash { - bail!( - "file {:?}: computed hash {:?}, but file name has \ - different hash {:?}", - spec_file_name.path(), - expected_hash, - hash - ); + return Err(( + ApiSpecFileParseError::HashMismatch { + path: spec_file_name.path().to_owned(), + expected: expected_hash, + actual: hash.clone(), + }, + contents_buf, + )); } } ApiSpecFileNameKind::VersionedGitRef { version, .. } => { @@ -258,12 +327,13 @@ impl ApiSpecFile { // hash check. The content came from git, so the git ref itself // is the source of truth. if *version != parsed_version { - bail!( - "file {:?}: version in the file ({}) differs from \ - the one in the filename", - spec_file_name.path(), - parsed_version - ); + return Err(( + ApiSpecFileParseError::VersionMismatch { + path: spec_file_name.path().to_owned(), + file_version: parsed_version, + }, + contents_buf, + )); } } ApiSpecFileNameKind::Lockstep => {} @@ -605,15 +675,17 @@ impl<'a, T: ApiLoad + AsRawFiles> ApiSpecFilesBuilder<'a, T> { } } - /// Load an API document + /// Load an API document. /// - /// On failure, records errors or warnings. + /// On failure, records errors or warnings. For local files (where + /// `T::UNPARSEABLE_FILES_ALLOWED` is true), unparseable files are recorded + /// as warnings and tracked so they can be cleaned up during generate. pub fn load_contents( &mut self, file_name: ApiSpecFileName, contents: Vec, ) { - let maybe_file = ApiSpecFile::for_contents(file_name, contents); + let maybe_file = ApiSpecFile::for_contents(file_name.clone(), contents); match maybe_file { Ok(file) => { let ident = file.spec_file_name().ident(); @@ -637,12 +709,69 @@ impl<'a, T: ApiLoad + AsRawFiles> ApiSpecFilesBuilder<'a, T> { } }; } - Err(error) => { - self.load_error(error); + Err((error, contents)) => { + match T::make_unparseable(file_name.clone(), contents) { + Some(unparseable) => { + // For local files, track the unparseable file so it + // can be cleaned up during generate. Record a warning + // so the user knows about it. + self.load_warning( + error.context("skipping unparseable file"), + ); + + // Can the file be associated with a version? + if let Some(version) = file_name.version() { + let ident = file_name.ident().clone(); + let entry = self + .spec_files + .entry(ident) + .or_insert_with(ApiFiles::new) + .spec_files + .entry(version.clone()); + + match entry { + Entry::Vacant(vacant_entry) => { + vacant_entry.insert( + T::unparseable_into_self(unparseable), + ); + } + Entry::Occupied(mut occupied_entry) => { + occupied_entry + .get_mut() + .extend_unparseable(unparseable); + } + } + } else { + // No version info, fall back to old behavior. + self.record_unparseable_file( + file_name.ident().clone(), + UnparseableFile { + path: file_name.path().to_owned(), + }, + ); + } + } + None => { + self.load_error(error); + } + } } } } + /// Record an unparseable file for later cleanup. + fn record_unparseable_file( + &mut self, + ident: ApiIdent, + unparseable: UnparseableFile, + ) { + self.spec_files + .entry(ident) + .or_insert_with(ApiFiles::new) + .unparseable_files + .push(unparseable); + } + /// Load the "latest" symlink for a versioned API /// /// On failure, warnings or errors are recorded. @@ -698,7 +827,7 @@ impl<'a, T: ApiLoad + AsRawFiles> ApiSpecFilesBuilder<'a, T> { } /// Describes a set of OpenAPI documents and associated "latest" symlink for a -/// given API +/// given API. /// /// Parametrized by `T` because callers use newtypes around `ApiSpecFile` to /// avoid confusing them. See the documentation on [`ApiSpecFilesBuilder`]. @@ -706,11 +835,18 @@ impl<'a, T: ApiLoad + AsRawFiles> ApiSpecFilesBuilder<'a, T> { pub struct ApiFiles { spec_files: BTreeMap, latest_link: Option, + /// Files that exist on disk but couldn't be parsed. These are tracked so + /// that generate can delete them and create correct files in their place. + unparseable_files: Vec, } impl ApiFiles { fn new() -> ApiFiles { - ApiFiles { spec_files: BTreeMap::new(), latest_link: None } + ApiFiles { + spec_files: BTreeMap::new(), + latest_link: None, + unparseable_files: Vec::new(), + } } pub fn versions(&self) -> &BTreeMap { @@ -720,25 +856,56 @@ impl ApiFiles { pub fn latest_link(&self) -> Option<&ApiSpecFileName> { self.latest_link.as_ref() } + + /// Returns files that couldn't be parsed but should be tracked for cleanup. + pub fn unparseable_files(&self) -> &[UnparseableFile] { + &self.unparseable_files + } +} + +/// Trait for types that provide spec file metadata. +/// +/// This allows iterating over both valid and unparseable files while still +/// being able to access their names. +pub trait SpecFileInfo { + /// Returns the spec file name. + fn spec_file_name(&self) -> &ApiSpecFileName; + + /// Returns the version from the parsed file, if available. + /// + /// For unparseable files, this returns `None`. Use + /// `spec_file_name().version()` to get the version from the filename + /// instead. + fn version(&self) -> Option<&semver::Version>; +} + +impl SpecFileInfo for ApiSpecFile { + fn spec_file_name(&self) -> &ApiSpecFileName { + &self.name + } + + fn version(&self) -> Option<&semver::Version> { + Some(&self.version) + } } /// Implemented by Newtype wrappers around `ApiSpecFile` to convert back to an -/// iterator of `&'a ApiSpecFile` for callers that do not care which Newtype -/// they're operating on. +/// iterator of `&'a dyn SpecFileInfo` for callers that do not care which +/// Newtype they're operating on. /// /// This is sort of like `Deref` except that some of the implementors are /// collections. See [`ApiSpecFilesBuilder`] for more on this. pub trait AsRawFiles: Debug { fn as_raw_files<'a>( &'a self, - ) -> Box + 'a>; + ) -> Box + 'a>; } impl AsRawFiles for Vec { fn as_raw_files<'a>( &'a self, - ) -> Box + 'a> { - Box::new(self.iter()) + ) -> Box + 'a> { + Box::new(self.iter().map(|f| f as &dyn SpecFileInfo)) } } @@ -750,7 +917,7 @@ impl AsRawFiles for Vec { /// in this package). pub trait ApiLoad { /// Determines whether it's allowed in this context to load the wrong kind - /// of file for an API + /// of file for an API. /// /// Recall that there are basically three implementors here: /// @@ -763,15 +930,42 @@ pub trait ApiLoad { /// necessary in order to convert an API from lockstep to versioned. const MISCONFIGURATIONS_ALLOWED: bool; - /// Record having loaded a single OpenAPI document for an API + /// The type representing unparseable file data. + /// + /// For contexts in which unparseable files are allowed (local files), this + /// is a concrete type holding the filename and contents. Otherwise, this is + /// `std::convert::Infallible`, making it impossible to construct. + type Unparseable; + + /// Record having loaded a single OpenAPI document for an API. fn make_item(raw: ApiSpecFile) -> Self; - /// Try to record additional OpenAPI documents for an API + /// Try to record additional OpenAPI documents for an API. /// /// (This trait API might seem a little strange. It looks this way because /// every implementor supports loading a single OpenAPI document, but only /// some allow more than one.) fn try_extend(&mut self, raw: ApiSpecFile) -> anyhow::Result<()>; + + /// Try to create unparseable file data. + /// + /// Returns `Some` with the unparseable data if unparseable files are + /// allowed in this context, `None` otherwise. When `Self::Unparseable` is + /// `Infallible`, this always returns `None`. + fn make_unparseable( + name: ApiSpecFileName, + contents: Vec, + ) -> Option; + + /// Convert unparseable file data into a `Self` for insertion. + /// + /// This is used when inserting into a vacant entry. + fn unparseable_into_self(unparseable: Self::Unparseable) -> Self + where + Self: Sized; + + /// Add unparseable file data to an existing entry. + fn extend_unparseable(&mut self, unparseable: Self::Unparseable); } /// Return the hash of an OpenAPI document file for the purposes of this tool @@ -797,6 +991,7 @@ pub(crate) fn hash_contents(contents: &[u8]) -> String { mod test { use super::*; use crate::ManagedApiConfig; + use anyhow::Context; use assert_matches::assert_matches; use dropshot::{ApiDescription, ApiDescriptionBuildErrors, StubContext}; use dropshot_api_manager_types::{ diff --git a/crates/dropshot-api-manager/src/spec_files_local.rs b/crates/dropshot-api-manager/src/spec_files_local.rs index c811163..9c4339c 100644 --- a/crates/dropshot-api-manager/src/spec_files_local.rs +++ b/crates/dropshot-api-manager/src/spec_files_local.rs @@ -9,23 +9,83 @@ use crate::{ git::GitRef, spec_files_generic::{ ApiFiles, ApiLoad, ApiSpecFile, ApiSpecFilesBuilder, AsRawFiles, + SpecFileInfo, }, }; use anyhow::{Context, anyhow}; use camino::Utf8Path; -use dropshot_api_manager_types::ApiIdent; -use std::{collections::BTreeMap, ops::Deref}; +use dropshot_api_manager_types::{ApiIdent, ApiSpecFileName}; +use std::collections::BTreeMap; -/// Newtype wrapper around [`ApiSpecFile`] to describe OpenAPI documents found -/// in this working tree +/// A local file that exists but couldn't be parsed. +/// +/// This happens when a file has merge conflict markers or is otherwise +/// corrupted. The version is determined from the filename, allowing the +/// generate command to regenerate the correct contents. +/// +/// We still store the raw contents so they can be accessed if needed (e.g., +/// for diffing or debugging). +#[derive(Debug)] +pub struct LocalApiUnparseable { + /// The parsed filename (contains version and hash info). + pub name: ApiSpecFileName, + /// The raw file contents that couldn't be parsed. + pub contents: Vec, +} + +/// Represents an OpenAPI document found in this working tree. /// /// This includes documents for lockstep APIs and versioned APIs, for both /// blessed and locally-added versions. -pub struct LocalApiSpecFile(ApiSpecFile); -NewtypeDebug! { () pub struct LocalApiSpecFile(ApiSpecFile); } -NewtypeDeref! { () pub struct LocalApiSpecFile(ApiSpecFile); } -NewtypeDerefMut! { () pub struct LocalApiSpecFile(ApiSpecFile); } -NewtypeFrom! { () pub struct LocalApiSpecFile(ApiSpecFile); } +/// +/// Files may be either valid (successfully parsed) or unparseable (e.g., due +/// to merge conflict markers). Unparseable files are tracked so they can be +/// regenerated during the generate command. +#[derive(Debug)] +pub enum LocalApiSpecFile { + /// A valid, successfully parsed OpenAPI document. + Valid(Box), + /// A file that exists but couldn't be parsed. + Unparseable(LocalApiUnparseable), +} + +impl LocalApiSpecFile { + /// Returns the spec file name. + pub fn spec_file_name(&self) -> &ApiSpecFileName { + match self { + Self::Valid(spec) => spec.spec_file_name(), + Self::Unparseable(u) => &u.name, + } + } + + /// Returns the raw file contents. + /// + /// This works for both valid and unparseable files. + pub fn contents(&self) -> &[u8] { + match self { + Self::Valid(spec) => spec.contents(), + Self::Unparseable(u) => &u.contents, + } + } + + /// Returns true if this file is unparseable. + pub fn is_unparseable(&self) -> bool { + matches!(self, Self::Unparseable(_)) + } +} + +impl SpecFileInfo for LocalApiSpecFile { + fn spec_file_name(&self) -> &ApiSpecFileName { + self.spec_file_name() + } + + fn version(&self) -> Option<&semver::Version> { + match self { + Self::Valid(spec) => Some(spec.version()), + Self::Unparseable(_) => None, + } + } +} // Trait impls that allow us to use `ApiFiles>` // @@ -34,22 +94,38 @@ NewtypeFrom! { () pub struct LocalApiSpecFile(ApiSpecFile); } impl ApiLoad for Vec { const MISCONFIGURATIONS_ALLOWED: bool = false; + type Unparseable = LocalApiUnparseable; fn try_extend(&mut self, item: ApiSpecFile) -> anyhow::Result<()> { - self.push(LocalApiSpecFile::from(item)); + self.push(LocalApiSpecFile::Valid(Box::new(item))); Ok(()) } fn make_item(raw: ApiSpecFile) -> Self { - vec![LocalApiSpecFile::from(raw)] + vec![LocalApiSpecFile::Valid(Box::new(raw))] + } + + fn make_unparseable( + name: ApiSpecFileName, + contents: Vec, + ) -> Option { + Some(LocalApiUnparseable { name, contents }) + } + + fn unparseable_into_self(unparseable: Self::Unparseable) -> Self { + vec![LocalApiSpecFile::Unparseable(unparseable)] + } + + fn extend_unparseable(&mut self, unparseable: Self::Unparseable) { + self.push(LocalApiSpecFile::Unparseable(unparseable)); } } impl AsRawFiles for Vec { fn as_raw_files<'a>( &'a self, - ) -> Box + 'a> { - Box::new(self.iter().map(|t| t.deref())) + ) -> Box + 'a> { + Box::new(self.iter().map(|f| f as &dyn SpecFileInfo)) } } @@ -207,7 +283,32 @@ fn load_versioned_directory( let file_name = entry.file_name(); if ident.versioned_api_is_latest_symlink(file_name) { - // We should be looking at a symlink. + // We should be looking at a symlink. However, VCS tools like jj + // can turn symlinks into regular files with conflict markers when + // there's a symlink conflict. In that case, we treat it as a + // missing/corrupted symlink and let generate recreate it. + let file_type = match entry.file_type() { + Ok(ft) => ft, + Err(error) => { + api_files.load_warning(anyhow!(error).context(format!( + "failed to get file type for {:?}", + entry.path() + ))); + continue; + } + }; + + if !file_type.is_symlink() { + // This is not a symlink (likely corrupted by a merge conflict). + // Skip it so generate will recreate it. + api_files.load_warning(anyhow!( + "expected symlink but found regular file {:?}; \ + will regenerate", + entry.path() + )); + continue; + } + let symlink = match entry.path().read_link_utf8() { Ok(s) => s, Err(error) => { diff --git a/crates/integration-tests/src/environment.rs b/crates/integration-tests/src/environment.rs index bd10dc7..8f03222 100644 --- a/crates/integration-tests/src/environment.rs +++ b/crates/integration-tests/src/environment.rs @@ -9,10 +9,75 @@ use camino_tempfile_ext::{fixture::ChildPath, prelude::*}; use clap::Parser; use dropshot_api_manager::{Environment, GitRef, ManagedApis}; use std::{ + collections::BTreeSet, fs, - process::{Command, ExitCode}, + process::{Command, ExitCode, ExitStatus}, }; +/// Result of attempting a git merge. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MergeResult { + /// The merge completed successfully with no conflicts. + Clean, + /// The merge resulted in conflicts that need to be resolved. + /// + /// Contains the set of conflicted file paths. + Conflict(BTreeSet), +} + +/// Result of attempting a git rebase. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RebaseResult { + /// The rebase completed successfully with no conflicts. + Clean, + /// The rebase resulted in conflicts that need to be resolved. + /// + /// Contains the set of conflicted file paths. + Conflict(BTreeSet), +} + +/// Result of attempting a jj merge. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum JjMergeResult { + /// The merge completed successfully with no conflicts. + Clean, + /// The merge resulted in conflicts that need to be resolved. + /// + /// Contains the set of conflicted file paths. + Conflict(BTreeSet), +} + +/// Result of attempting a jj rebase. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum JjRebaseResult { + /// The rebase completed successfully with no conflicts. + Clean, + /// The rebase resulted in conflicts that need to be resolved. + /// + /// Contains the set of conflicted file paths. + Conflict(BTreeSet), +} + +/// Check if jj is available and the test should run. +/// +/// Returns `Ok(true)` if jj is available. +/// Returns `Ok(false)` if the `SKIP_JJ_TESTS` env var is set. +/// Returns `Err` with a helpful message if jj is not found. +pub fn check_jj_available() -> Result { + if std::env::var("SKIP_JJ_TESTS").is_ok() { + return Ok(false); + } + + let jj = std::env::var("JJ").unwrap_or_else(|_| "jj".to_string()); + match Command::new(&jj).arg("--version").output() { + Ok(o) if o.status.success() => Ok(true), + Ok(_) | Err(_) => Err(anyhow!( + "jj not found. Install jj (https://jj-vcs.dev/) or set \ + SKIP_JJ_TESTS=1 to skip these tests" + )), + } +} + /// A temporary test environment that manages directories and cleanup. pub struct TestEnvironment { /// Temporary directory that will be cleaned up automatically. @@ -471,6 +536,145 @@ impl TestEnvironment { Ok(()) } + /// Create a new branch at the current HEAD. + pub fn create_branch(&self, name: &str) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["branch", name])?; + Ok(()) + } + + /// Checkout a branch. + pub fn checkout_branch(&self, name: &str) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["checkout", name])?; + Ok(()) + } + + /// Merge a branch into the current branch. + /// + /// Returns `Ok(())` on a clean merge, `Err` if there's a conflict or other + /// error. + /// + /// Uses `-X no-renames` to disable rename detection during merge. This + /// tests the scenario without rename/rename conflicts. + pub fn merge_branch_without_renames(&self, source: &str) -> Result<()> { + let message = format!("Merge branch '{}'", source); + Self::run_git_command( + &self.workspace_root, + &["merge", "-m", &message, "-X", "no-renames", source], + )?; + Ok(()) + } + + /// Attempt to merge a branch, returning whether conflicts occurred. + /// + /// Unlike `merge_branch_without_renames`, this method does not use `-X + /// no-renames`, so Git's rename detection is active. This will cause + /// rename/rename conflicts when both branches convert the same file to a + /// git ref and add different new versions. + /// + /// Returns `MergeResult::Clean` if the merge completed cleanly, or + /// `MergeResult::Conflict` with the list of conflicted files if there were + /// conflicts (the working directory will be in a conflicted state). + pub fn try_merge_branch(&self, source: &str) -> Result { + let message = format!("Merge branch '{}'", source); + + // Run the merge command. + let status = Self::run_git_command_unchecked( + &self.workspace_root, + &["merge", "-m", &message, source], + )?; + + if status.success() { + return Ok(MergeResult::Clean); + } + + // Use git ls-files --unmerged to check for conflicts. Output format: + // \t + // Each conflicted file appears multiple times (once per stage), so we + // deduplicate. + let unmerged = Self::run_git_command( + &self.workspace_root, + &["ls-files", "--unmerged"], + )?; + + if unmerged.is_empty() { + Ok(MergeResult::Clean) + } else { + let conflicted_files: BTreeSet = unmerged + .lines() + .filter_map(|line| { + // Split on tab to get the file path (second part). + line.split_once('\t') + .map(|(_, path)| Utf8PathBuf::from(path)) + }) + .collect(); + Ok(MergeResult::Conflict(conflicted_files)) + } + } + + /// Abort an in-progress merge. + pub fn abort_merge(&self) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["merge", "--abort"])?; + Ok(()) + } + + /// Add all changes and complete a merge after conflicts have been + /// resolved. + pub fn complete_merge(&self) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["add", "-A"])?; + Self::run_git_command(&self.workspace_root, &["commit", "--no-edit"])?; + Ok(()) + } + + /// Attempt to rebase the current branch onto a target branch. + /// + /// Returns `RebaseResult::Clean` if the rebase completed cleanly, or + /// `RebaseResult::Conflict` with the list of conflicted files if there + /// were conflicts (the working directory will be in a conflicted state). + pub fn try_rebase_onto(&self, target: &str) -> Result { + // Run the rebase command. + let status = Self::run_git_command_unchecked( + &self.workspace_root, + &["rebase", target], + )?; + + if status.success() { + return Ok(RebaseResult::Clean); + } + + // Use git ls-files --unmerged to check for conflicts. + let unmerged = Self::run_git_command( + &self.workspace_root, + &["ls-files", "--unmerged"], + )?; + + if unmerged.is_empty() { + Ok(RebaseResult::Clean) + } else { + let conflicted_files: BTreeSet = unmerged + .lines() + .filter_map(|line| { + line.split_once('\t') + .map(|(_, path)| Utf8PathBuf::from(path)) + }) + .collect(); + Ok(RebaseResult::Conflict(conflicted_files)) + } + } + + /// Abort an in-progress rebase. + pub fn abort_rebase(&self) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["rebase", "--abort"])?; + Ok(()) + } + + /// Add all changes and continue a rebase after conflicts have been + /// resolved. + pub fn continue_rebase(&self) -> Result<()> { + Self::run_git_command(&self.workspace_root, &["add", "-A"])?; + Self::run_git_command(&self.workspace_root, &["rebase", "--continue"])?; + Ok(()) + } + /// Helper to run git commands in the workspace root. fn run_git_command( workspace_root: &Utf8Path, @@ -480,15 +684,19 @@ impl TestEnvironment { std::env::var("GIT").ok().unwrap_or_else(|| String::from("git")); let output = Command::new(git) .current_dir(workspace_root) + // Prevent interactive prompts (e.g., during rebase --continue). + .env("EDITOR", "true") .args(args) .output() .context("failed to execute git command")?; if !output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); let stderr = String::from_utf8_lossy(&output.stderr); return Err(anyhow!( - "git command failed: git {}\nstderr: {}", + "git command failed: git {}\nstdout: {}\nstderr: {}", args.join(" "), + stdout, stderr )); } @@ -497,6 +705,23 @@ impl TestEnvironment { .context("git command output was not valid UTF-8") } + /// Helper to run git commands that may fail, returning the exit status. + fn run_git_command_unchecked( + workspace_root: &Utf8Path, + args: &[&str], + ) -> Result { + let git = + std::env::var("GIT").ok().unwrap_or_else(|| String::from("git")); + let output = Command::new(git) + .current_dir(workspace_root) + // Prevent interactive prompts (e.g., during rebase --continue). + .env("EDITOR", "true") + .args(args) + .output() + .context("failed to execute git command")?; + Ok(output.status) + } + /// List all files in the documents directory. pub fn list_document_files(&self) -> Result> { let mut files = Vec::new(); @@ -541,6 +766,149 @@ impl TestEnvironment { } Ok(()) } + + // ------------------------------------------------------------------------- + // jj (Jujutsu) methods + // ------------------------------------------------------------------------- + + /// Initialize jj in the existing git repository (colocated mode). + pub fn jj_init(&self) -> Result<()> { + self.run_jj_command(&["git", "init", "--colocate"])?; + Ok(()) + } + + /// Attempt to create a merge commit from two git branches using jj. + /// + /// Uses `jj new branch1 branch2 -m "message"` to create a merge commit. + /// Returns `JjMergeResult::Clean` if the merge has no conflicts, or + /// `JjMergeResult::Conflict` with the set of conflicted files. + pub fn jj_try_merge( + &self, + branch1: &str, + branch2: &str, + message: &str, + ) -> Result { + // Create a merge commit with jj new. + self.run_jj_command(&["new", branch1, branch2, "-m", message])?; + + // Check if the resulting commit has conflicts. + if self.jj_has_conflicts("@")? { + let conflicts = self.jj_get_conflicted_files(branch1, "@")?; + Ok(JjMergeResult::Conflict(conflicts)) + } else { + Ok(JjMergeResult::Clean) + } + } + + /// Attempt to rebase a git branch onto another using jj. + /// + /// Uses `jj rebase -s source -d dest` to rebase the source commit and its + /// descendants onto the destination. Returns `JjRebaseResult::Clean` if the + /// rebase has no conflicts, or `JjRebaseResult::Conflict` with the set of + /// conflicted files. + pub fn jj_try_rebase( + &self, + source: &str, + dest: &str, + ) -> Result { + // Rebase source and descendants onto dest. + self.run_jj_command(&["rebase", "-s", source, "-d", dest])?; + + // Check if the rebased commit has conflicts. + if self.jj_has_conflicts(source)? { + let conflicts = self.jj_get_conflicted_files(dest, source)?; + Ok(JjRebaseResult::Conflict(conflicts)) + } else { + Ok(JjRebaseResult::Clean) + } + } + + /// Resolve jj conflicts by running generate and committing the resolution. + /// + /// After a merge or rebase creates conflicts, this method: + /// 1. Runs generate to create the correct files + /// 2. Commits the working copy to complete the merge/rebase + /// 3. Verifies the result has no conflicts + pub fn jj_resolve_conflicts(&self, apis: &ManagedApis) -> Result<()> { + self.generate_documents(apis)?; + self.run_jj_command(&["commit", "-m", "resolve conflicts"])?; + + // Verify the parent commit (the resolved merge/rebase) has no conflicts. + if self.jj_has_conflicts("@-")? { + return Err(anyhow::anyhow!( + "jj conflict resolution failed: @- still has conflicts" + )); + } + + Ok(()) + } + + /// Check if a jj revision has conflicts. + fn jj_has_conflicts(&self, rev: &str) -> Result { + let output = self.run_jj_command(&[ + "log", + "-r", + rev, + "-T", + "conflict", + "--no-graph", + ])?; + Ok(output.trim() == "true") + } + + /// Get the set of conflicted files using `jj diff --types`. + /// + /// Parses the output of `jj diff --from from --to to --types` and returns + /// files where the second character is 'C' (conflict). + fn jj_get_conflicted_files( + &self, + from: &str, + to: &str, + ) -> Result> { + let output = self + .run_jj_command(&["diff", "--from", from, "--to", to, "--types"])?; + + let mut conflicts = BTreeSet::new(); + for line in output.lines() { + // Format: "FC path/to/file" where F=file type, C=conflict. + let line = line.trim(); + if line.len() >= 2 { + let chars: Vec = line.chars().collect(); + // Second character is 'C' for conflict. + if chars.get(1) == Some(&'C') { + // Skip the type prefix and space to get the path. + if let Some(path) = line.get(3..) { + conflicts.insert(Utf8PathBuf::from(path)); + } + } + } + } + Ok(conflicts) + } + + /// Helper to run jj commands in the workspace root. + fn run_jj_command(&self, args: &[&str]) -> Result { + let jj = std::env::var("JJ").unwrap_or_else(|_| "jj".to_string()); + let output = Command::new(&jj) + .current_dir(&self.workspace_root) + .args(args) + .output() + .context("failed to execute jj command")?; + + if !output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(anyhow!( + "jj command failed: jj {}\nstdout: {}\nstderr: {}", + args.join(" "), + stdout, + stderr + )); + } + + String::from_utf8(output.stdout) + .context("jj command output was not valid UTF-8") + } } #[cfg(windows)] diff --git a/crates/integration-tests/src/fixtures.rs b/crates/integration-tests/src/fixtures.rs index c9e09f5..581a978 100644 --- a/crates/integration-tests/src/fixtures.rs +++ b/crates/integration-tests/src/fixtures.rs @@ -527,6 +527,201 @@ pub mod versioned_health_v1_only { }; } +/// Versioned health API fixture with v1, v2, and v4 (skipping v3). +/// +/// Used to test merge scenarios where two branches add different new versions +/// while both converting the same older versions to git refs. +pub mod versioned_health_v1_v2_v4 { + use super::*; + use dropshot_api_manager_types::api_versions; + + api_versions!([ + (4, WITH_ENHANCED_METRICS), + (2, WITH_DETAILED_STATUS), + (1, INITIAL), + ]); + + #[dropshot::api_description { module = "api_mod" }] + pub trait VersionedHealthApi { + type Context; + + /// Check if the service is healthy (all versions). + #[endpoint { + method = GET, + path = "/health", + operation_id = "health_check", + versions = "1.0.0".. + }] + async fn health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get detailed health status (v2+). + #[endpoint { + method = GET, + path = "/health/detailed", + operation_id = "detailed_health_check", + versions = "2.0.0".. + }] + async fn detailed_health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + } + + // Reuse the same response types from the main versioned_health module. + pub use super::versioned_health::{ + DependencyStatus, DetailedHealthStatus, HealthStatusV1, + }; +} + +/// Alternate v3 of the versioned health API with different content. +/// +/// This produces a different OpenAPI document (different hash) than the +/// standard v3, which is useful for testing merge conflict scenarios where two +/// branches both add "v3" but with different content. +pub mod versioned_health_v3_alternate { + use super::*; + use dropshot_api_manager_types::api_versions; + + api_versions!([(3, WITH_METRICS), (2, WITH_DETAILED_STATUS), (1, INITIAL)]); + + #[dropshot::api_description { module = "api_mod" }] + pub trait VersionedHealthApi { + type Context; + + /// Check if the service is healthy (all versions). + #[endpoint { + method = GET, + path = "/health", + operation_id = "health_check", + versions = "1.0.0".. + }] + async fn health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get detailed health status (v2+). + #[endpoint { + method = GET, + path = "/health/detailed", + operation_id = "detailed_health_check", + versions = "2.0.0".. + }] + async fn detailed_health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get service metrics (v3+) - alternate version with extended metrics. + #[endpoint { + method = GET, + path = "/metrics", + operation_id = "get_metrics", + versions = "3.0.0".. + }] + async fn get_metrics( + rqctx: RequestContext, + ) -> Result, HttpError>; + } + + /// Extended service metrics response (alternate v3). + /// + /// This has additional fields compared to the standard ServiceMetrics, + /// producing a different OpenAPI document. + #[derive(JsonSchema, Serialize)] + pub struct ServiceMetricsExtended { + pub requests_per_second: f64, + pub error_rate: f64, + pub avg_response_time_ms: f64, + pub active_connections: u32, + /// Additional field not in the standard v3. + pub peak_connections: u32, + /// Additional field not in the standard v3. + pub uptime_seconds: u64, + } + + // Reuse the same response types from the main versioned_health module. + pub use super::versioned_health::{ + DependencyStatus, DetailedHealthStatus, HealthStatusV1, + }; +} + +/// Versioned health API with v1, v2, v3, v4 where v4 uses the alternate metrics. +/// +/// Used to resolve merge conflicts by incorporating main's v3 and making the +/// branch's alternate v3 into v4. +pub mod versioned_health_v1_v2_v3_v4alt { + use super::*; + use dropshot_api_manager_types::api_versions; + + api_versions!([ + (4, WITH_EXTENDED_METRICS), + (3, WITH_METRICS), + (2, WITH_DETAILED_STATUS), + (1, INITIAL), + ]); + + #[dropshot::api_description { module = "api_mod" }] + pub trait VersionedHealthApi { + type Context; + + /// Check if the service is healthy (all versions). + #[endpoint { + method = GET, + path = "/health", + operation_id = "health_check", + versions = "1.0.0".. + }] + async fn health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get detailed health status (v2+). + #[endpoint { + method = GET, + path = "/health/detailed", + operation_id = "detailed_health_check", + versions = "2.0.0".. + }] + async fn detailed_health_check( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get service metrics (v3+): combined version. + /// + /// This doc comment is deliberately different from the others to ensure + /// that we use the blessed version to resolve merge conflicts. + #[endpoint { + method = GET, + path = "/metrics", + operation_id = "get_metrics", + versions = "3.0.0"..VERSION_WITH_EXTENDED_METRICS + }] + async fn get_metrics( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get service metrics (v4+). + #[endpoint { + method = GET, + path = "/metrics", + operation_id = "get_metrics", + versions = "4.0.0".. + }] + async fn get_metrics_extended( + rqctx: RequestContext, + ) -> Result, HttpError>; + } + + // Reuse types from versioned_health and the alternate module. + pub use super::{ + versioned_health::{ + DependencyStatus, DetailedHealthStatus, HealthStatusV1, + ServiceMetrics, + }, + versioned_health_v3_alternate::ServiceMetricsExtended, + }; +} + /// Versioned health API fixture that skips the middle version (2.0.0). /// This has versions 3.0.0 and 1.0.0 only, simulating retirement of an older /// blessed version. @@ -1251,3 +1446,88 @@ pub fn versioned_health_reduced_git_ref_apis() -> Result { "failed to create reduced versioned health git ref ManagedApis", ) } + +/// Create a versioned health API with v1, v2, v4 (skipping v3) and git ref +/// storage enabled. +/// +/// Used to test merge scenarios where two branches add different new versions +/// while both converting the same older versions to git refs. +pub fn versioned_health_v1_v2_v4_git_ref_apis() -> Result { + let config = ManagedApiConfig { + ident: "versioned-health", + versions: Versions::Versioned { + supported_versions: versioned_health_v1_v2_v4::supported_versions(), + }, + title: "Versioned Health API", + metadata: ManagedApiMetadata { + description: Some( + "A versioned health API for testing version evolution", + ), + ..Default::default() + }, + api_description: + versioned_health_v1_v2_v4::api_mod::stub_api_description, + }; + + ManagedApis::new(vec![ManagedApi::from(config).with_git_ref_storage()]) + .context( + "failed to create v1,v2,v4 versioned health git ref ManagedApis", + ) +} + +/// Create a versioned health API with the alternate v3 (different content than +/// standard v3) and git ref storage enabled. +/// +/// Used to simulate a branch that adds v3 with different content than main. +pub fn versioned_health_v3_alternate_git_ref_apis() -> Result { + let config = ManagedApiConfig { + ident: "versioned-health", + versions: Versions::Versioned { + supported_versions: + versioned_health_v3_alternate::supported_versions(), + }, + title: "Versioned Health API", + metadata: ManagedApiMetadata { + description: Some( + "A versioned health API for testing version evolution", + ), + ..Default::default() + }, + api_description: + versioned_health_v3_alternate::api_mod::stub_api_description, + }; + + ManagedApis::new(vec![ManagedApi::from(config).with_git_ref_storage()]) + .context( + "failed to create v3-alternate versioned health git ref ManagedApis", + ) +} + +/// Create a versioned health API with v1, v2, v3, v4 where v4 uses the extended +/// metrics (from the alternate v3), with git ref storage enabled. +/// +/// Used to resolve merge conflicts by incorporating main's v3 and making the +/// branch's alternate v3 into v4. +pub fn versioned_health_v1_v2_v3_v4alt_git_ref_apis() -> Result { + let config = ManagedApiConfig { + ident: "versioned-health", + versions: Versions::Versioned { + supported_versions: + versioned_health_v1_v2_v3_v4alt::supported_versions(), + }, + title: "Versioned Health API", + metadata: ManagedApiMetadata { + description: Some( + "A versioned health API for testing version evolution", + ), + ..Default::default() + }, + api_description: + versioned_health_v1_v2_v3_v4alt::api_mod::stub_api_description, + }; + + ManagedApis::new(vec![ManagedApi::from(config).with_git_ref_storage()]) + .context( + "failed to create v1,v2,v3,v4-alt versioned health git ref ManagedApis", + ) +} diff --git a/crates/integration-tests/src/lib.rs b/crates/integration-tests/src/lib.rs index 10aa75b..787cd0c 100644 --- a/crates/integration-tests/src/lib.rs +++ b/crates/integration-tests/src/lib.rs @@ -5,5 +5,8 @@ mod environment; mod fixtures; -pub use environment::{TestEnvironment, rel_path_forward_slashes}; +pub use environment::{ + JjMergeResult, JjRebaseResult, MergeResult, RebaseResult, TestEnvironment, + check_jj_available, rel_path_forward_slashes, +}; pub use fixtures::*; diff --git a/crates/integration-tests/tests/integration/git_ref.rs b/crates/integration-tests/tests/integration/git_ref.rs index 9b1e622..130742c 100644 --- a/crates/integration-tests/tests/integration/git_ref.rs +++ b/crates/integration-tests/tests/integration/git_ref.rs @@ -8,11 +8,52 @@ //! runtime. use anyhow::Result; +use camino::Utf8PathBuf; use dropshot_api_manager::{ - GitRef, + GitRef, ManagedApis, test_util::{CheckResult, check_apis_up_to_date}, }; use integration_tests::*; +use std::collections::{BTreeMap, BTreeSet}; + +/// The kind of conflict expected on a file during merge/rebase. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum ExpectedConflictKind { + /// A rename/rename conflict. + /// + /// Git's rename detection sees both branches "renaming" the same source + /// file to different destinations. jj does not have rename detection, so + /// this conflict kind only applies to git. + Rename, + /// A symlink conflict. + /// + /// Both branches update a symlink to point to different targets. Both git + /// and jj detect this as a conflict. + Symlink, +} + +/// Extract all conflicted file paths from the expected conflicts map. +/// +/// Used by git tests which detect all conflict types. +fn all_conflict_paths( + conflicts: &BTreeMap, +) -> BTreeSet { + conflicts.keys().cloned().collect() +} + +/// Extract only non-rename conflicted file paths from the expected conflicts +/// map. +/// +/// Used by jj tests since jj does not have rename detection. +fn jj_conflict_paths( + conflicts: &BTreeMap, +) -> BTreeSet { + conflicts + .iter() + .filter(|(_, kind)| **kind != ExpectedConflictKind::Rename) + .map(|(path, _)| path.clone()) + .collect() +} /// Test that git ref conversion happens when adding a new version, and that /// the content is preserved correctly. @@ -914,3 +955,699 @@ fn test_shallow_clone_creates_incorrect_git_refs() -> Result<()> { Ok(()) } + +/// Test that git ref files don't cause merge conflicts when two branches with +/// different merge bases both convert the same API version to a git ref. +/// +/// See [`no_conflict_setup`] for the test scenario. +#[test] +fn test_no_merge_conflict() -> Result<()> { + let env = TestEnvironment::new()?; + let v1_v2_commit = no_conflict_setup(&env)?; + + env.merge_branch_without_renames("branch_a")?; + + no_conflict_verify(&env, &v1_v2_commit) +} + +/// Test that rebase succeeds without conflict when both branches add the same +/// version. +/// +/// This is the rebase equivalent of [`test_no_merge_conflict`]. See +/// [`no_conflict_setup`] for the test scenario. +#[test] +fn test_no_rebase_conflict() -> Result<()> { + let env = TestEnvironment::new()?; + let v1_v2_commit = no_conflict_setup(&env)?; + + env.checkout_branch("branch_a")?; + let rebase_result = env.try_rebase_onto("branch_b")?; + assert_eq!( + rebase_result, + RebaseResult::Clean, + "rebase should succeed without conflict" + ); + + no_conflict_verify(&env, &v1_v2_commit) +} + +/// Test that jj merge succeeds without conflict when both branches add the +/// same version. +/// +/// This is the jj equivalent of [`test_no_merge_conflict`]. Uses git for setup, +/// jj for merge. +#[test] +fn test_jj_no_merge_conflict() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let v1_v2_commit = no_conflict_setup(&env)?; + env.jj_init()?; + + let merge_result = env.jj_try_merge("branch_a", "branch_b", "merge")?; + assert_eq!( + merge_result, + JjMergeResult::Clean, + "jj merge should succeed without conflict" + ); + + no_conflict_verify(&env, &v1_v2_commit) +} + +/// Test that jj rebase succeeds without conflict when both branches add the +/// same version. +/// +/// This is the jj equivalent of [`test_no_rebase_conflict`]. Uses git for +/// setup, jj for rebase. +#[test] +fn test_jj_no_rebase_conflict() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let v1_v2_commit = no_conflict_setup(&env)?; + env.jj_init()?; + + let rebase_result = env.jj_try_rebase("branch_a", "branch_b")?; + assert_eq!( + rebase_result, + JjRebaseResult::Clean, + "jj rebase should succeed without conflict" + ); + + no_conflict_verify(&env, &v1_v2_commit) +} + +/// Setup for [`test_no_merge_conflict`] and [`test_no_rebase_conflict`]. +/// +/// Git ref files store `:` where `` is "the +/// first commit when the file was most recently introduced." This is a +/// deterministic property of the file's history, independent of which branch +/// you're on. +/// +/// ```text +/// History: +/// main: [initial] -- [v1,v2 added] -- [unrelated A] -- [unrelated B] +/// | | +/// | +-- branch_b: [add v3] +/// | (v1,v2 become git refs) +/// | +/// +-- branch_a: [add v3] +/// (v1,v2 become git refs) +/// ``` +/// +/// Both branches add the same v3, so: +/// - The v1 and v2 git refs should have identical content on both branches. +/// - The merge/rebase should succeed without conflict. +/// +/// Returns the commit hash where v1/v2 were added. Leaves the environment on +/// branch_b. +fn no_conflict_setup(env: &TestEnvironment) -> Result { + let v1_v2_apis = versioned_health_reduced_git_ref_apis()?; + env.generate_documents(&v1_v2_apis)?; + env.commit_documents()?; + let v1_v2_commit = env.get_current_commit_hash_full()?; + + env.make_unrelated_commit("unrelated A")?; + env.create_branch("branch_a")?; + + // Give branch_a and branch_b different merge bases. + env.make_unrelated_commit("unrelated B")?; + env.create_branch("branch_b")?; + + env.checkout_branch("branch_a")?; + let v1_v2_v3_apis = versioned_health_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_apis)?; + env.commit_documents()?; + + assert!( + env.versioned_git_ref_exists("versioned-health", "1.0.0")?, + "v1 should be a git ref on branch_a" + ); + assert!( + env.versioned_git_ref_exists("versioned-health", "2.0.0")?, + "v2 should be a git ref on branch_a" + ); + + let v1_ref_branch_a = + env.read_versioned_git_ref("versioned-health", "1.0.0")?; + let v2_ref_branch_a = + env.read_versioned_git_ref("versioned-health", "2.0.0")?; + + env.checkout_branch("branch_b")?; + env.generate_documents(&v1_v2_v3_apis)?; + env.commit_documents()?; + + assert!( + env.versioned_git_ref_exists("versioned-health", "1.0.0")?, + "v1 should be a git ref on branch_b" + ); + assert!( + env.versioned_git_ref_exists("versioned-health", "2.0.0")?, + "v2 should be a git ref on branch_b" + ); + + let v1_ref_branch_b = + env.read_versioned_git_ref("versioned-health", "1.0.0")?; + let v2_ref_branch_b = + env.read_versioned_git_ref("versioned-health", "2.0.0")?; + + // Git refs should be identical: both point to v1_v2_commit. + assert_eq!( + v1_ref_branch_a, v1_ref_branch_b, + "v1 git refs should be identical on both branches" + ); + assert_eq!( + v2_ref_branch_a, v2_ref_branch_b, + "v2 git refs should be identical on both branches" + ); + + let v1_commit = v1_ref_branch_a.trim().split(':').next().unwrap(); + assert_eq!( + v1_commit, v1_v2_commit, + "git ref should point to the original commit" + ); + + Ok(v1_v2_commit) +} + +/// Verify the result of [`test_no_merge_conflict`] or +/// [`test_no_rebase_conflict`]. +fn no_conflict_verify(env: &TestEnvironment, v1_v2_commit: &str) -> Result<()> { + assert!( + env.versioned_git_ref_exists("versioned-health", "1.0.0")?, + "v1 git ref should exist" + ); + assert!( + env.versioned_git_ref_exists("versioned-health", "2.0.0")?, + "v2 git ref should exist" + ); + + let v1_ref = env.read_versioned_git_ref("versioned-health", "1.0.0")?; + let v1_ref_commit = v1_ref.trim().split(':').next().unwrap(); + assert_eq!( + v1_ref_commit, v1_v2_commit, + "v1 git ref should point to the original commit" + ); + + assert!( + env.versioned_local_document_exists("versioned-health", "3.0.0")?, + "v3 should exist" + ); + + Ok(()) +} + +/// Test that the generate command resolves rename/rename conflicts that occur +/// when two branches both add different new API versions. +/// +/// See [`rename_conflict_v3_v4_setup`] for the test scenario. +#[test] +fn test_rename_conflict_resolved_by_generate() -> Result<()> { + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = rename_conflict_v3_v4_setup(&env)?; + + let merge_result = env.try_merge_branch("branch_a")?; + let MergeResult::Conflict(conflicted_files) = merge_result else { + panic!("merge should have conflicts due to rename/rename detection"); + }; + assert_eq!( + conflicted_files, + all_conflict_paths(&expected_conflicts), + "conflicted files should match expected" + ); + + let v1_v2_v3_v4_apis = versioned_health_with_v4_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_v4_apis)?; + env.complete_merge()?; + + rename_conflict_v3_v4_verify(&env, &v1_v2_commit, &v1_v2_v3_v4_apis) +} + +/// Test that the generate command resolves rename/rename conflicts during +/// rebase. +/// +/// This is the rebase equivalent of [`test_rename_conflict_resolved_by_generate`]. +/// See [`rename_conflict_v3_v4_setup`] for the test scenario. +#[test] +fn test_rebase_rename_conflict_resolved_by_generate() -> Result<()> { + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = rename_conflict_v3_v4_setup(&env)?; + + env.checkout_branch("branch_a")?; + let rebase_result = env.try_rebase_onto("branch_b")?; + let RebaseResult::Conflict(conflicted_files) = rebase_result else { + panic!("rebase should have conflicts due to rename/rename detection"); + }; + assert_eq!( + conflicted_files, + all_conflict_paths(&expected_conflicts), + "conflicted files should match expected" + ); + + let v1_v2_v3_v4_apis = versioned_health_with_v4_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_v4_apis)?; + env.continue_rebase()?; + + rename_conflict_v3_v4_verify(&env, &v1_v2_commit, &v1_v2_v3_v4_apis) +} + +/// Test that jj merge only conflicts on the symlink when branches add +/// different versions (v3 vs v4). +/// +/// Unlike git, jj does NOT have rename detection. Git sees the deletion of +/// v2.json and creation of v3.json/v4.json as a rename conflict, but jj treats +/// them as independent operations. The only conflict is on the latest symlink +/// which both branches update to different targets. +/// +/// This is the jj equivalent of [`test_rename_conflict_resolved_by_generate`]. +#[test] +fn test_jj_symlink_conflict_v3_v4_merge() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = rename_conflict_v3_v4_setup(&env)?; + env.jj_init()?; + + let merge_result = env.jj_try_merge("branch_a", "branch_b", "merge")?; + let JjMergeResult::Conflict(conflicted_files) = merge_result else { + panic!("jj merge should have symlink conflict; got clean merge"); + }; + assert_eq!( + conflicted_files, + jj_conflict_paths(&expected_conflicts), + "jj should only conflict on symlink, not rename-related files" + ); + + let v1_v2_v3_v4_apis = versioned_health_with_v4_git_ref_apis()?; + env.jj_resolve_conflicts(&v1_v2_v3_v4_apis)?; + + rename_conflict_v3_v4_verify(&env, &v1_v2_commit, &v1_v2_v3_v4_apis) +} + +/// Test that jj rebase only conflicts on the symlink when branches add +/// different versions (v3 vs v4). +/// +/// This is the rebase equivalent of [`test_jj_symlink_conflict_v3_v4_merge`]. +#[test] +fn test_jj_symlink_conflict_v3_v4_rebase() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = rename_conflict_v3_v4_setup(&env)?; + env.jj_init()?; + + let rebase_result = env.jj_try_rebase("branch_a", "branch_b")?; + let JjRebaseResult::Conflict(conflicted_files) = rebase_result else { + panic!("jj rebase should have symlink conflict; got clean rebase"); + }; + assert_eq!( + conflicted_files, + jj_conflict_paths(&expected_conflicts), + "jj should only conflict on symlink, not rename-related files" + ); + + let v1_v2_v3_v4_apis = versioned_health_with_v4_git_ref_apis()?; + env.jj_resolve_conflicts(&v1_v2_v3_v4_apis)?; + + rename_conflict_v3_v4_verify(&env, &v1_v2_commit, &v1_v2_v3_v4_apis) +} + +/// Setup for [`test_rename_conflict_resolved_by_generate`] and +/// [`test_rebase_rename_conflict_resolved_by_generate`]. +/// +/// When Git's rename detection is active, it can misinterpret the deletion of +/// an old version (converted to git ref) and creation of a new version as a +/// "rename". If two branches both do this with different new versions, Git +/// reports a rename/rename conflict. +/// +/// ```text +/// History: +/// main: [initial] -- [v1,v2 added] -- [unrelated A] -- [unrelated B] +/// | | +/// | +-- branch_b: [add v4] +/// | (v1,v2 become git refs) +/// | +/// +-- branch_a: [add v3] +/// (v1,v2 become git refs) +/// ``` +/// +/// Git sees both branches "renaming" v2.json to different files (v3.json vs +/// v4.json), causing a rename/rename conflict. +/// +/// Returns the commit hash where v1/v2 were added and the expected conflicted +/// files with their conflict kinds. Leaves the environment on branch_b. +fn rename_conflict_v3_v4_setup( + env: &TestEnvironment, +) -> Result<(String, BTreeMap)> { + let v1_v2_apis = versioned_health_reduced_git_ref_apis()?; + env.generate_documents(&v1_v2_apis)?; + env.commit_documents()?; + let v1_v2_commit = env.get_current_commit_hash_full()?; + + env.make_unrelated_commit("unrelated A")?; + env.create_branch("branch_a")?; + + env.make_unrelated_commit("unrelated B")?; + env.create_branch("branch_b")?; + + // Capture paths before git ref conversion for conflict tracking. + let v2_json_path = env + .find_versioned_document_path("versioned-health", "2.0.0")? + .expect("v2 should exist as JSON before branching"); + + env.checkout_branch("branch_a")?; + let v1_v2_v3_apis = versioned_health_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_apis)?; + let v3_json_path = env + .find_versioned_document_path("versioned-health", "3.0.0")? + .expect("v3 should exist as JSON on branch_a"); + env.commit_documents()?; + + env.checkout_branch("branch_b")?; + let v1_v2_v4_apis = versioned_health_v1_v2_v4_git_ref_apis()?; + env.generate_documents(&v1_v2_v4_apis)?; + let v4_json_path = env + .find_versioned_document_path("versioned-health", "4.0.0")? + .expect("v4 should exist as JSON on branch_b"); + env.commit_documents()?; + + // Git's rename detection sees v2.json "renamed" to v3/v4 on different + // branches. jj has no rename detection, so only the symlink conflicts. + let latest_symlink: Utf8PathBuf = + "documents/versioned-health/versioned-health-latest.json".into(); + let expected_conflicts: BTreeMap = [ + (v2_json_path, ExpectedConflictKind::Rename), + (v3_json_path, ExpectedConflictKind::Rename), + (v4_json_path, ExpectedConflictKind::Rename), + (latest_symlink, ExpectedConflictKind::Symlink), + ] + .into_iter() + .collect(); + + Ok((v1_v2_commit, expected_conflicts)) +} + +/// Verify the result of [`test_rename_conflict_resolved_by_generate`] or +/// [`test_rebase_rename_conflict_resolved_by_generate`]. +fn rename_conflict_v3_v4_verify( + env: &TestEnvironment, + v1_v2_commit: &str, + v1_v2_v3_v4_apis: &ManagedApis, +) -> Result<()> { + assert!( + env.versioned_git_ref_exists("versioned-health", "1.0.0")?, + "v1 should be a git ref" + ); + assert!( + env.versioned_git_ref_exists("versioned-health", "2.0.0")?, + "v2 should be a git ref" + ); + + let v1_ref = env.read_versioned_git_ref("versioned-health", "1.0.0")?; + let v1_ref_commit = v1_ref.trim().split(':').next().unwrap(); + assert_eq!( + v1_ref_commit, v1_v2_commit, + "v1 git ref should point to the original commit" + ); + + // v3 is locally added (not blessed), so it should be JSON. + assert!( + env.versioned_local_document_exists("versioned-health", "3.0.0")?, + "v3 should exist as JSON (locally added)" + ); + assert!( + !env.versioned_git_ref_exists("versioned-health", "3.0.0")?, + "v3 should not be a git ref (locally added, not blessed)" + ); + + assert!( + env.versioned_local_document_exists("versioned-health", "4.0.0")?, + "v4 should exist as JSON (latest)" + ); + assert!( + !env.versioned_git_ref_exists("versioned-health", "4.0.0")?, + "v4 should not be a git ref" + ); + + let result = check_apis_up_to_date(env.environment(), v1_v2_v3_v4_apis)?; + assert_eq!(result, CheckResult::Success); + + Ok(()) +} + +/// Test that the generate command resolves rename/rename conflicts when both +/// branches add v3 with different content. +/// +/// See [`rename_conflict_blessed_setup`] for the test scenario. +#[test] +fn test_rename_conflict_blessed_versions() -> Result<()> { + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = + rename_conflict_blessed_setup(&env)?; + + let merge_result = env.try_merge_branch("main")?; + let MergeResult::Conflict(conflicted_files) = merge_result else { + panic!( + "merge should have conflicts due to different v3 contents; \ + got clean merge" + ); + }; + assert_eq!( + conflicted_files, + all_conflict_paths(&expected_conflicts), + "conflicted files should match expected" + ); + + // Resolution: make branch's alternate v3 into v4, keep main's v3. + let v1_v2_v3_v4alt_apis = versioned_health_v1_v2_v3_v4alt_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_v4alt_apis)?; + env.complete_merge()?; + + rename_conflict_blessed_verify(&env, &v1_v2_commit, &v1_v2_v3_v4alt_apis) +} + +/// Test that the generate command resolves rename/rename conflicts during +/// rebase when both branches add v3 with different content. +/// +/// This is the rebase equivalent of [`test_rename_conflict_blessed_versions`]. +/// See [`rename_conflict_blessed_setup`] for the test scenario. +#[test] +fn test_rebase_rename_conflict_blessed_versions() -> Result<()> { + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = + rename_conflict_blessed_setup(&env)?; + + let rebase_result = env.try_rebase_onto("main")?; + let RebaseResult::Conflict(conflicted_files) = rebase_result else { + panic!( + "rebase should have conflicts due to different v3 contents; \ + got clean rebase" + ); + }; + assert_eq!( + conflicted_files, + all_conflict_paths(&expected_conflicts), + "conflicted files should match expected" + ); + + // Resolution: make branch's alternate v3 into v4, keep main's v3. + let v1_v2_v3_v4alt_apis = versioned_health_v1_v2_v3_v4alt_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_v4alt_apis)?; + env.continue_rebase()?; + + rename_conflict_blessed_verify(&env, &v1_v2_commit, &v1_v2_v3_v4alt_apis) +} + +/// Test that jj merge only conflicts on the symlink when both branches add v3 +/// with different content. +/// +/// Although both branches add v3 with different content, the v3 files have +/// different hashes (and thus different filenames), so jj treats them as +/// different files. The only conflict is on the latest symlink. +/// +/// This is the jj equivalent of [`test_rename_conflict_blessed_versions`]. +#[test] +fn test_jj_symlink_conflict_blessed_merge() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = + rename_conflict_blessed_setup(&env)?; + env.jj_init()?; + + let merge_result = env.jj_try_merge("main", "branch_b", "merge")?; + let JjMergeResult::Conflict(conflicted_files) = merge_result else { + panic!("jj merge should have symlink conflict; got clean merge"); + }; + assert_eq!( + conflicted_files, + jj_conflict_paths(&expected_conflicts), + "jj should only conflict on symlink, not rename-related files" + ); + + // Resolution: make branch's alternate v3 into v4, keep main's v3. + let v1_v2_v3_v4alt_apis = versioned_health_v1_v2_v3_v4alt_git_ref_apis()?; + env.jj_resolve_conflicts(&v1_v2_v3_v4alt_apis)?; + + rename_conflict_blessed_verify(&env, &v1_v2_commit, &v1_v2_v3_v4alt_apis) +} + +/// Test that jj rebase only conflicts on the symlink when both branches add v3 +/// with different content. +/// +/// This is the rebase equivalent of [`test_jj_symlink_conflict_blessed_merge`]. +#[test] +fn test_jj_symlink_conflict_blessed_rebase() -> Result<()> { + if !check_jj_available()? { + return Ok(()); + } + + let env = TestEnvironment::new()?; + let (v1_v2_commit, expected_conflicts) = + rename_conflict_blessed_setup(&env)?; + env.jj_init()?; + + let rebase_result = env.jj_try_rebase("branch_b", "main")?; + let JjRebaseResult::Conflict(conflicted_files) = rebase_result else { + panic!("jj rebase should have symlink conflict; got clean rebase"); + }; + assert_eq!( + conflicted_files, + jj_conflict_paths(&expected_conflicts), + "jj should only conflict on symlink, not rename-related files" + ); + + // Resolution: make branch's alternate v3 into v4, keep main's v3. + let v1_v2_v3_v4alt_apis = versioned_health_v1_v2_v3_v4alt_git_ref_apis()?; + env.jj_resolve_conflicts(&v1_v2_v3_v4alt_apis)?; + + rename_conflict_blessed_verify(&env, &v1_v2_commit, &v1_v2_v3_v4alt_apis) +} + +/// Setup for [`test_rename_conflict_blessed_versions`] and +/// [`test_rebase_rename_conflict_blessed_versions`]. +/// +/// This simulates a realistic scenario where: +/// 1. main adds v3 (standard version) +/// 2. branch adds v3 with different content (alternate version) +/// 3. Both branches convert v2 to git ref when adding v3 +/// 4. Merge/rebase causes a conflict on v3 files (different hashes due to +/// different content) +/// 5. Resolution: the branch's v3-alternate becomes v4, main's v3 stays as v3 +/// +/// ```text +/// History: +/// main: [v1,v2] -- [add v3 standard] +/// | +/// +-- branch_b: [add v3 alternate] +/// ``` +/// +/// Both branches add v3, but with different content. Git sees: +/// - main: v2.json deleted, v3-.json created +/// - branch_b: v2.json deleted, v3-.json created +/// +/// This causes a rename/rename conflict. +/// +/// Returns the commit hash where v1/v2 were added and the expected conflicted +/// files with their conflict kinds. Leaves the environment on branch_b. +fn rename_conflict_blessed_setup( + env: &TestEnvironment, +) -> Result<(String, BTreeMap)> { + let v1_v2_apis = versioned_health_reduced_git_ref_apis()?; + env.generate_documents(&v1_v2_apis)?; + env.commit_documents()?; + let v1_v2_commit = env.get_current_commit_hash_full()?; + + env.create_branch("branch_b")?; + + // Capture paths before git ref conversion for conflict tracking. + let v2_json_path = env + .find_versioned_document_path("versioned-health", "2.0.0")? + .expect("v2 should exist as JSON before generating v3"); + + // On main: add v3 (standard). + let v1_v2_v3_apis = versioned_health_git_ref_apis()?; + env.generate_documents(&v1_v2_v3_apis)?; + let v3_json_path_main = env + .find_versioned_document_path("versioned-health", "3.0.0")? + .expect("v3 should exist as JSON on main"); + env.commit_documents()?; + + // On branch_b: add v3-alternate (different content, different hash). + env.checkout_branch("branch_b")?; + let v3_alt_apis = versioned_health_v3_alternate_git_ref_apis()?; + env.generate_documents(&v3_alt_apis)?; + let v3_json_path_b = env + .find_versioned_document_path("versioned-health", "3.0.0")? + .expect("v3 should exist as JSON on branch_b"); + env.commit_documents()?; + + // Git's rename detection sees v2.json "renamed" to v3 on both branches + // (different destinations). jj has no rename detection. + let latest_symlink: Utf8PathBuf = + "documents/versioned-health/versioned-health-latest.json".into(); + let expected_conflicts: BTreeMap = [ + (v2_json_path, ExpectedConflictKind::Rename), + (v3_json_path_main, ExpectedConflictKind::Rename), + (v3_json_path_b, ExpectedConflictKind::Rename), + (latest_symlink, ExpectedConflictKind::Symlink), + ] + .into_iter() + .collect(); + + Ok((v1_v2_commit, expected_conflicts)) +} + +/// Verify the result of [`test_rename_conflict_blessed_versions`] or +/// [`test_rebase_rename_conflict_blessed_versions`]. +fn rename_conflict_blessed_verify( + env: &TestEnvironment, + v1_v2_commit: &str, + v1_v2_v3_v4alt_apis: &ManagedApis, +) -> Result<()> { + assert!( + env.versioned_git_ref_exists("versioned-health", "1.0.0")?, + "v1 should be a git ref" + ); + assert!( + env.versioned_git_ref_exists("versioned-health", "2.0.0")?, + "v2 should be a git ref" + ); + + let v1_ref = env.read_versioned_git_ref("versioned-health", "1.0.0")?; + let v1_ref_commit = v1_ref.trim().split(':').next().unwrap(); + assert_eq!( + v1_ref_commit, v1_v2_commit, + "v1 git ref should point to the original commit" + ); + + // v3 is blessed (from main) and not latest. + assert!( + env.versioned_git_ref_exists("versioned-health", "3.0.0")?, + "v3 should be a git ref (blessed from main, not latest)" + ); + + assert!( + env.versioned_local_document_exists("versioned-health", "4.0.0")?, + "v4 should exist as JSON (latest)" + ); + assert!( + !env.versioned_git_ref_exists("versioned-health", "4.0.0")?, + "v4 should not be a git ref" + ); + + let result = check_apis_up_to_date(env.environment(), v1_v2_v3_v4alt_apis)?; + assert_eq!(result, CheckResult::Success); + + Ok(()) +}