ok/jj
1
0
Fork 0
forked from mirrors/jj

conflicts: move describe_conflict() etc. onto Conflict

Before we had `conflicts::Conflict`, most of these functions took a
`backend::Conflict`. I think I didn't want to pollute the `backend`
module with this kind of logic, trying to keep it focused on
storage. Now that we have the type in `conflicts`, however, I think it
makes sense to move these functions onto it.
This commit is contained in:
Martin von Zweigbergk 2023-06-16 04:11:19 -07:00 committed by Martin von Zweigbergk
parent 7a8cabafc9
commit 82883e648d
6 changed files with 229 additions and 231 deletions

View file

@ -128,6 +128,184 @@ impl Conflict<Option<TreeValue>> {
.collect();
backend::Conflict { removes, adds }
}
pub fn materialize(
&self,
store: &Store,
path: &RepoPath,
output: &mut dyn Write,
) -> std::io::Result<()> {
if let Some(file_conflict) = self.to_file_conflict() {
let content = file_conflict.extract_as_single_hunk(store, path);
materialize_merge_result(&content, output)
} else {
// Unless all terms are regular files, we can't do much better than to try to
// describe the conflict.
self.describe(output)
}
}
pub fn to_file_conflict(&self) -> Option<Conflict<Option<FileId>>> {
fn collect_file_terms(terms: &[Option<TreeValue>]) -> Option<Vec<Option<FileId>>> {
let mut file_terms = vec![];
for term in terms {
match term {
None => {
file_terms.push(None);
}
Some(TreeValue::File {
id,
executable: false,
}) => {
file_terms.push(Some(id.clone()));
}
_ => {
return None;
}
}
}
Some(file_terms)
}
let file_removes = collect_file_terms(self.removes())?;
let file_adds = collect_file_terms(self.adds())?;
Some(Conflict::new(file_removes, file_adds))
}
/// Give a summary description of the conflict's "removes" and "adds"
pub fn describe(&self, file: &mut dyn Write) -> std::io::Result<()> {
file.write_all(b"Conflict:\n")?;
for term in self.removes().iter().flatten() {
file.write_all(format!(" Removing {}\n", describe_conflict_term(term)).as_bytes())?;
}
for term in self.adds().iter().flatten() {
file.write_all(format!(" Adding {}\n", describe_conflict_term(term)).as_bytes())?;
}
Ok(())
}
/// Returns `None` if there are no conflict markers in `content`.
pub fn update_from_content(
&self,
store: &Store,
path: &RepoPath,
content: &[u8],
) -> BackendResult<Option<Conflict<Option<TreeValue>>>> {
// TODO: Check that the conflict only involves files and convert it to a
// `Conflict<Option<FileId>>` so we can remove the wildcard pattern in the loops
// further down.
// First check if the new content is unchanged compared to the old content. If
// it is, we don't need parse the content or write any new objects to the
// store. This is also a way of making sure that unchanged tree/file
// conflicts (for example) are not converted to regular files in the working
// copy.
let mut old_content = Vec::with_capacity(content.len());
self.materialize(store, path, &mut old_content).unwrap();
if content == old_content {
return Ok(Some(self.clone()));
}
let mut removed_content = vec![vec![]; self.removes().len()];
let mut added_content = vec![vec![]; self.adds().len()];
// TODO: Change to let-else once our MSRV is above 1.65
let hunks =
if let Some(hunks) = parse_conflict(content, self.removes().len(), self.adds().len()) {
hunks
} else {
// Either there are no self markers of they don't have the expected arity
return Ok(None);
};
for hunk in hunks {
match hunk {
MergeHunk::Resolved(slice) => {
for buf in &mut removed_content {
buf.extend_from_slice(&slice);
}
for buf in &mut added_content {
buf.extend_from_slice(&slice);
}
}
MergeHunk::Conflict(ConflictHunk { removes, adds }) => {
for (i, buf) in removes.iter().enumerate() {
removed_content[i].extend_from_slice(buf);
}
for (i, buf) in adds.iter().enumerate() {
added_content[i].extend_from_slice(buf);
}
}
}
}
// Now write the new files contents we found by parsing the file
// with conflict markers. Update the Conflict object with the new
// FileIds.
let mut new_removes = vec![];
for (i, buf) in removed_content.iter().enumerate() {
match &self.removes()[i] {
Some(TreeValue::File { id: _, executable }) => {
let file_id = store.write_file(path, &mut buf.as_slice())?;
let new_value = TreeValue::File {
id: file_id,
executable: *executable,
};
new_removes.push(Some(new_value));
}
None if buf.is_empty() => {
// The missing side of a conflict is still represented by
// the empty string we materialized it as
new_removes.push(None);
}
_ => {
// The user edited a non-file side. This should never happen. We consider the
// conflict resolved for now.
return Ok(None);
}
}
}
let mut new_adds = vec![];
for (i, buf) in added_content.iter().enumerate() {
match &self.adds()[i] {
Some(TreeValue::File { id: _, executable }) => {
let file_id = store.write_file(path, &mut buf.as_slice())?;
let new_value = TreeValue::File {
id: file_id,
executable: *executable,
};
new_adds.push(Some(new_value));
}
None if buf.is_empty() => {
// The missing side of a conflict is still represented by
// the empty string we materialized it as => nothing to do
new_adds.push(None);
}
_ => {
// The user edited a non-file side. This should never happen. We consider the
// conflict resolved for now.
return Ok(None);
}
}
}
Ok(Some(Conflict::new(new_removes, new_adds)))
}
}
impl Conflict<Option<FileId>> {
pub fn extract_as_single_hunk(&self, store: &Store, path: &RepoPath) -> ConflictHunk {
let removes_content = self
.removes()
.iter()
.map(|term| get_file_contents(store, path, term))
.collect_vec();
let adds_content = self
.adds()
.iter()
.map(|term| get_file_contents(store, path, term))
.collect_vec();
ConflictHunk {
removes: removes_content,
adds: adds_content,
}
}
}
fn describe_conflict_term(value: &TreeValue) -> String {
@ -159,49 +337,6 @@ fn describe_conflict_term(value: &TreeValue) -> String {
}
}
/// Give a summary description of a conflict's "removes" and "adds"
pub fn describe_conflict(
conflict: &Conflict<Option<TreeValue>>,
file: &mut dyn Write,
) -> std::io::Result<()> {
file.write_all(b"Conflict:\n")?;
for term in conflict.removes().iter().flatten() {
file.write_all(format!(" Removing {}\n", describe_conflict_term(term)).as_bytes())?;
}
for term in conflict.adds().iter().flatten() {
file.write_all(format!(" Adding {}\n", describe_conflict_term(term)).as_bytes())?;
}
Ok(())
}
pub fn to_file_conflict(
conflict: &Conflict<Option<TreeValue>>,
) -> Option<Conflict<Option<FileId>>> {
fn collect_file_terms(terms: &[Option<TreeValue>]) -> Option<Vec<Option<FileId>>> {
let mut file_terms = vec![];
for term in terms {
match term {
None => {
file_terms.push(None);
}
Some(TreeValue::File {
id,
executable: false,
}) => {
file_terms.push(Some(id.clone()));
}
_ => {
return None;
}
}
}
Some(file_terms)
}
let file_removes = collect_file_terms(conflict.removes())?;
let file_adds = collect_file_terms(conflict.adds())?;
Some(Conflict::new(file_removes, file_adds))
}
fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> Vec<u8> {
match term {
Some(id) => {
@ -243,44 +378,6 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result
Ok(())
}
pub fn materialize_conflict(
store: &Store,
path: &RepoPath,
conflict: &Conflict<Option<TreeValue>>,
output: &mut dyn Write,
) -> std::io::Result<()> {
if let Some(file_conflict) = to_file_conflict(conflict) {
let content = extract_file_conflict_as_single_hunk(store, path, &file_conflict);
materialize_merge_result(&content, output)
} else {
// Unless all terms are regular files, we can't do much better than to try to
// describe the conflict.
describe_conflict(conflict, output)
}
}
pub fn extract_file_conflict_as_single_hunk(
store: &Store,
path: &RepoPath,
conflict: &Conflict<Option<FileId>>,
) -> ConflictHunk {
let removes_content = conflict
.removes()
.iter()
.map(|term| get_file_contents(store, path, term))
.collect_vec();
let adds_content = conflict
.adds()
.iter()
.map(|term| get_file_contents(store, path, term))
.collect_vec();
ConflictHunk {
removes: removes_content,
adds: adds_content,
}
}
pub fn materialize_merge_result(
single_hunk: &ConflictHunk,
output: &mut dyn Write,
@ -471,111 +568,6 @@ fn parse_conflict_hunk(input: &[u8]) -> MergeHunk {
MergeHunk::Conflict(ConflictHunk { removes, adds })
}
/// Returns `None` if there are no conflict markers in `content`.
pub fn update_conflict_from_content(
store: &Store,
path: &RepoPath,
conflict: &Conflict<Option<TreeValue>>,
content: &[u8],
) -> BackendResult<Option<Conflict<Option<TreeValue>>>> {
// TODO: Check that the conflict only involves files and convert it to a
// `Conflict<Option<FileId>>` so we can remove the wildcard pattern in the loops
// further down.
// First check if the new content is unchanged compared to the old content. If
// it is, we don't need parse the content or write any new objects to the
// store. This is also a way of making sure that unchanged tree/file
// conflicts (for example) are not converted to regular files in the working
// copy.
let mut old_content = Vec::with_capacity(content.len());
materialize_conflict(store, path, conflict, &mut old_content).unwrap();
if content == old_content {
return Ok(Some(conflict.clone()));
}
let mut removed_content = vec![vec![]; conflict.removes().len()];
let mut added_content = vec![vec![]; conflict.adds().len()];
// TODO: Change to let-else once our MSRV is above 1.65
let hunks = if let Some(hunks) =
parse_conflict(content, conflict.removes().len(), conflict.adds().len())
{
hunks
} else {
// Either there are no conflict markers of they don't have the expected arity
return Ok(None);
};
for hunk in hunks {
match hunk {
MergeHunk::Resolved(slice) => {
for buf in &mut removed_content {
buf.extend_from_slice(&slice);
}
for buf in &mut added_content {
buf.extend_from_slice(&slice);
}
}
MergeHunk::Conflict(ConflictHunk { removes, adds }) => {
for (i, buf) in removes.iter().enumerate() {
removed_content[i].extend_from_slice(buf);
}
for (i, buf) in adds.iter().enumerate() {
added_content[i].extend_from_slice(buf);
}
}
}
}
// Now write the new files contents we found by parsing the file
// with conflict markers. Update the Conflict object with the new
// FileIds.
let mut new_removes = vec![];
for (i, buf) in removed_content.iter().enumerate() {
match &conflict.removes()[i] {
Some(TreeValue::File { id: _, executable }) => {
let file_id = store.write_file(path, &mut buf.as_slice())?;
let new_value = TreeValue::File {
id: file_id,
executable: *executable,
};
new_removes.push(Some(new_value));
}
None if buf.is_empty() => {
// The missing side of a conflict is still represented by
// the empty string we materialized it as
new_removes.push(None);
}
_ => {
// The user edited a non-file side. This should never happen. We consider the
// conflict resolved for now.
return Ok(None);
}
}
}
let mut new_adds = vec![];
for (i, buf) in added_content.iter().enumerate() {
match &conflict.adds()[i] {
Some(TreeValue::File { id: _, executable }) => {
let file_id = store.write_file(path, &mut buf.as_slice())?;
let new_value = TreeValue::File {
id: file_id,
executable: *executable,
};
new_adds.push(Some(new_value));
}
None if buf.is_empty() => {
// The missing side of a conflict is still represented by
// the empty string we materialized it as => nothing to do
new_adds.push(None);
}
_ => {
// The user edited a non-file side. This should never happen. We consider the
// conflict resolved for now.
return Ok(None);
}
}
}
Ok(Some(Conflict::new(new_removes, new_adds)))
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -34,7 +34,6 @@ use thiserror::Error;
use crate::backend::{
BackendError, ConflictId, FileId, MillisSinceEpoch, ObjectId, SymlinkId, TreeId, TreeValue,
};
use crate::conflicts::{materialize_conflict, update_conflict_from_content};
use crate::gitignore::GitIgnoreFile;
use crate::lock::FileLock;
use crate::matchers::{DifferenceMatcher, Matcher, PrefixMatcher};
@ -659,13 +658,9 @@ impl TreeState {
let mut content = vec![];
file.read_to_end(&mut content).unwrap();
let conflict = self.store.read_conflict(&repo_path, conflict_id)?;
if let Some(new_conflict) = update_conflict_from_content(
self.store.as_ref(),
&repo_path,
&conflict,
&content,
)
.unwrap()
if let Some(new_conflict) = conflict
.update_from_content(self.store.as_ref(), &repo_path, &content)
.unwrap()
{
new_file_state.file_type = FileType::Conflict;
*current_file_state = new_file_state;
@ -792,7 +787,8 @@ impl TreeState {
err,
})?;
let mut conflict_data = vec![];
materialize_conflict(self.store.as_ref(), path, &conflict, &mut conflict_data)
conflict
.materialize(self.store.as_ref(), path, &mut conflict_data)
.expect("Failed to materialize conflict to in-memory buffer");
file.write_all(&conflict_data)
.map_err(|err| CheckoutError::IoError {

View file

@ -13,9 +13,7 @@
// limitations under the License.
use jujutsu_lib::backend::{FileId, TreeValue};
use jujutsu_lib::conflicts::{
materialize_conflict, parse_conflict, update_conflict_from_content, Conflict,
};
use jujutsu_lib::conflicts::{parse_conflict, Conflict};
use jujutsu_lib::repo::Repo;
use jujutsu_lib::repo_path::RepoPath;
use jujutsu_lib::store::Store;
@ -290,7 +288,7 @@ line 5 right
vec![Some(file_value(&left_id)), Some(file_value(&right_id))],
);
let mut result: Vec<u8> = vec![];
materialize_conflict(store, &path, &conflict, &mut result).unwrap();
conflict.materialize(store, &path, &mut result).unwrap();
insta::assert_snapshot!(
String::from_utf8(result.clone()).unwrap(),
@r###"
@ -648,24 +646,28 @@ fn test_update_conflict_from_content() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let mut materialized = vec![];
materialize_conflict(store, &path, &conflict, &mut materialized).unwrap();
let result = update_conflict_from_content(store, &path, &conflict, &materialized).unwrap();
conflict
.materialize(store, &path, &mut materialized)
.unwrap();
let result = conflict
.update_from_content(store, &path, &materialized)
.unwrap();
assert_eq!(result, Some(conflict.clone()));
// If the conflict is resolved, we get None back to indicate that.
let result =
update_conflict_from_content(store, &path, &conflict, b"resolved 1\nline 2\nresolved 3\n")
.unwrap();
let result = conflict
.update_from_content(store, &path, b"resolved 1\nline 2\nresolved 3\n")
.unwrap();
assert_eq!(result, None);
// If the conflict is partially resolved, we get a new conflict back.
let result = update_conflict_from_content(
store,
&path,
&conflict,
b"resolved 1\nline 2\n<<<<<<<\n%%%%%%%\n-line 3\n+left 3\n+++++++\nright 3\n>>>>>>>\n",
)
.unwrap();
let result = conflict
.update_from_content(
store,
&path,
b"resolved 1\nline 2\n<<<<<<<\n%%%%%%%\n-line 3\n+left 3\n+++++++\nright 3\n>>>>>>>\n",
)
.unwrap();
let new_conflict = result.unwrap();
assert_ne!(new_conflict, conflict);
// Calculate expected new FileIds
@ -700,19 +702,24 @@ fn test_update_conflict_from_content_modify_delete() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let mut materialized = vec![];
materialize_conflict(store, &path, &conflict, &mut materialized).unwrap();
let result = update_conflict_from_content(store, &path, &conflict, &materialized).unwrap();
conflict
.materialize(store, &path, &mut materialized)
.unwrap();
let result = conflict
.update_from_content(store, &path, &materialized)
.unwrap();
assert_eq!(result, Some(conflict.clone()));
// If the conflict is resolved, we get None back to indicate that.
let result = update_conflict_from_content(store, &path, &conflict, b"resolved\n").unwrap();
let result = conflict
.update_from_content(store, &path, b"resolved\n")
.unwrap();
assert_eq!(result, None);
// If the conflict is modified, we get a new conflict back.
let result = update_conflict_from_content(
let result = conflict.update_from_content(
store,
&path,
&conflict,
b"<<<<<<<\n%%%%%%%\n line 1\n-line 2 before\n+line 2 modified after\n line 3\n+++++++\n>>>>>>>\n",
)
.unwrap();
@ -737,6 +744,6 @@ fn materialize_conflict_string(
conflict: &Conflict<Option<TreeValue>>,
) -> String {
let mut result: Vec<u8> = vec![];
materialize_conflict(store, path, conflict, &mut result).unwrap();
conflict.materialize(store, path, &mut result).unwrap();
String::from_utf8(result).unwrap()
}

View file

@ -47,7 +47,7 @@ use jujutsu_lib::rewrite::{back_out_commit, merge_commit_trees, rebase_commit, D
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::tree::{merge_trees, Tree};
use jujutsu_lib::workspace::Workspace;
use jujutsu_lib::{conflicts, file_util, revset};
use jujutsu_lib::{file_util, revset};
use maplit::{hashmap, hashset};
use crate::cli_util::{
@ -1290,7 +1290,9 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C
Some(TreeValue::Conflict(id)) => {
let conflict = repo.store().read_conflict(&path, &id)?;
let mut contents = vec![];
conflicts::materialize_conflict(repo.store(), &path, &conflict, &mut contents).unwrap();
conflict
.materialize(repo.store(), &path, &mut contents)
.unwrap();
ui.request_pager();
ui.stdout_formatter().write_all(&contents)?;
}

View file

@ -27,7 +27,7 @@ use jujutsu_lib::repo::{ReadonlyRepo, Repo};
use jujutsu_lib::repo_path::RepoPath;
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::tree::{Tree, TreeDiffIterator};
use jujutsu_lib::{conflicts, diff, files, rewrite, tree};
use jujutsu_lib::{diff, files, rewrite, tree};
use crate::cli_util::{CommandError, WorkspaceCommandHelper};
use crate::formatter::Formatter;
@ -307,7 +307,9 @@ fn diff_content(
TreeValue::Conflict(id) => {
let conflict = repo.store().read_conflict(path, id).unwrap();
let mut content = vec![];
conflicts::materialize_conflict(repo.store(), path, &conflict, &mut content).unwrap();
conflict
.materialize(repo.store(), path, &mut content)
.unwrap();
Ok(content)
}
}
@ -456,7 +458,9 @@ fn git_diff_part(
mode = "100644".to_string();
hash = id.hex();
let conflict = repo.store().read_conflict(path, id).unwrap();
conflicts::materialize_conflict(repo.store(), path, &conflict, &mut content).unwrap();
conflict
.materialize(repo.store(), path, &mut content)
.unwrap();
}
}
let hash = hash[0..10].to_string();

View file

@ -22,10 +22,7 @@ use std::sync::Arc;
use config::ConfigError;
use itertools::Itertools;
use jujutsu_lib::backend::{TreeId, TreeValue};
use jujutsu_lib::conflicts::{
describe_conflict, extract_file_conflict_as_single_hunk, materialize_merge_result,
to_file_conflict, update_conflict_from_content,
};
use jujutsu_lib::conflicts::materialize_merge_result;
use jujutsu_lib::gitignore::GitIgnoreFile;
use jujutsu_lib::matchers::EverythingMatcher;
use jujutsu_lib::repo_path::RepoPath;
@ -161,9 +158,10 @@ pub fn run_mergetool(
None => return Err(ConflictResolveError::PathNotFoundError(repo_path.clone())),
};
let conflict = tree.store().read_conflict(repo_path, &conflict_id)?;
let file_conflict = to_file_conflict(&conflict).ok_or_else(|| {
let file_conflict = conflict.to_file_conflict().ok_or_else(|| {
let mut summary_bytes: Vec<u8> = vec![];
describe_conflict(&conflict, &mut summary_bytes)
conflict
.describe(&mut summary_bytes)
.expect("Writing to an in-memory buffer should never fail");
ConflictResolveError::NotNormalFilesError(
repo_path.clone(),
@ -177,7 +175,7 @@ pub fn run_mergetool(
sides: file_conflict.adds().len(),
});
};
let mut content = extract_file_conflict_as_single_hunk(tree.store(), repo_path, &file_conflict);
let mut content = file_conflict.extract_as_single_hunk(tree.store(), repo_path);
let editor = get_merge_tool_from_settings(ui, settings)?;
let initial_output_content: Vec<u8> = if editor.merge_tool_edits_conflict_markers {
@ -246,10 +244,9 @@ pub fn run_mergetool(
let mut new_tree_value: Option<TreeValue> = None;
if editor.merge_tool_edits_conflict_markers {
if let Some(new_conflict) = update_conflict_from_content(
if let Some(new_conflict) = conflict.update_from_content(
tree.store(),
repo_path,
&conflict,
output_file_contents.as_slice(),
)? {
let new_conflict_id = tree.store().write_conflict(repo_path, &new_conflict)?;