ok/jj
1
0
Fork 0
forked from mirrors/jj

diff: simply pass tokenizer Fn by value

This commit is contained in:
Yuya Nishihara 2024-06-19 16:48:38 +09:00
parent be411de8f6
commit 5f2f13a876
5 changed files with 13 additions and 13 deletions

View file

@ -726,7 +726,7 @@ fn unified_diff_hunks<'content>(
lines: vec![],
};
let mut show_context_after = false;
let diff = Diff::for_tokenizer(&[left_content, right_content], &diff::find_line_ranges);
let diff = Diff::for_tokenizer(&[left_content, right_content], diff::find_line_ranges);
for hunk in diff.hunks() {
match hunk {
DiffHunk::Matching(content) => {

View file

@ -227,7 +227,7 @@ fn make_diff_sections(
) -> Result<Vec<scm_record::Section<'static>>, BuiltinToolError> {
let diff = Diff::for_tokenizer(
&[left_contents.as_bytes(), right_contents.as_bytes()],
&find_line_ranges,
find_line_ranges,
);
let mut sections = Vec::new();
for hunk in diff.hunks() {

View file

@ -259,7 +259,7 @@ pub fn materialize_merge_result(
output.write_all(&left.0)?;
continue;
};
let diff1 = Diff::for_tokenizer(&[&left.0, &right1.0], &find_line_ranges)
let diff1 = Diff::for_tokenizer(&[&left.0, &right1.0], find_line_ranges)
.hunks()
.collect_vec();
// Check if the diff against the next positive term is better. Since
@ -267,7 +267,7 @@ pub fn materialize_merge_result(
// any later positive terms.
if let Some(right2) = hunk.get_add(add_index + 1) {
let diff2 =
Diff::for_tokenizer(&[&left.0, &right2.0], &find_line_ranges)
Diff::for_tokenizer(&[&left.0, &right2.0], find_line_ranges)
.hunks()
.collect_vec();
if diff_size(&diff2) < diff_size(&diff1) {

View file

@ -395,7 +395,7 @@ fn intersect_regions(
impl<'input> Diff<'input> {
pub fn for_tokenizer(
inputs: &[&'input [u8]],
tokenizer: &impl Fn(&[u8]) -> Vec<Range<usize>>,
tokenizer: impl Fn(&[u8]) -> Vec<Range<usize>>,
) -> Self {
assert!(!inputs.is_empty());
let base_input = inputs[0];
@ -444,7 +444,7 @@ impl<'input> Diff<'input> {
}
pub fn unrefined(inputs: &[&'input [u8]]) -> Self {
Diff::for_tokenizer(inputs, &|_| vec![])
Diff::for_tokenizer(inputs, |_| vec![])
}
// TODO: At least when merging, it's wasteful to refine the diff if e.g. if 2
@ -454,9 +454,9 @@ impl<'input> Diff<'input> {
// probably mean that many callers repeat the same code. Perhaps it
// should be possible to refine a whole diff *or* individual hunks.
pub fn default_refinement(inputs: &[&'input [u8]]) -> Self {
let mut diff = Diff::for_tokenizer(inputs, &find_line_ranges);
diff.refine_changed_regions(&find_word_ranges);
diff.refine_changed_regions(&find_nonword_ranges);
let mut diff = Diff::for_tokenizer(inputs, find_line_ranges);
diff.refine_changed_regions(find_word_ranges);
diff.refine_changed_regions(find_nonword_ranges);
diff
}
@ -475,7 +475,7 @@ impl<'input> Diff<'input> {
/// Uses the given tokenizer to split the changed regions into smaller
/// regions. Then tries to finds unchanged regions among them.
pub fn refine_changed_regions(&mut self, tokenizer: &impl Fn(&[u8]) -> Vec<Range<usize>>) {
pub fn refine_changed_regions(&mut self, tokenizer: impl Fn(&[u8]) -> Vec<Range<usize>>) {
let mut previous = UnchangedRange {
base_range: 0..0,
offsets: vec![0; self.other_inputs.len()],
@ -493,7 +493,7 @@ impl<'input> Diff<'input> {
slices.push(&self.other_inputs[i][changed_range]);
}
let refined_diff = Diff::for_tokenizer(&slices, tokenizer);
let refined_diff = Diff::for_tokenizer(&slices, &tokenizer);
for UnchangedRange {
base_range,
@ -931,7 +931,7 @@ mod tests {
// Tests that unchanged regions are compacted when using for_tokenizer()
let diff = Diff::for_tokenizer(
&[b"a\nb\nc\nd\ne\nf\ng", b"a\nb\nc\nX\ne\nf\ng"],
&find_line_ranges,
find_line_ranges,
);
assert_eq!(
diff.hunks().collect_vec(),

View file

@ -165,7 +165,7 @@ pub fn merge(slices: &Merge<&[u8]>) -> MergeResult {
let num_diffs = slices.removes().len();
let diff_inputs = slices.removes().chain(slices.adds()).copied().collect_vec();
let diff = Diff::for_tokenizer(&diff_inputs, &diff::find_line_ranges);
let diff = Diff::for_tokenizer(&diff_inputs, diff::find_line_ranges);
let mut resolved_hunk = ContentHunk(vec![]);
let mut merge_hunks: Vec<Merge<ContentHunk>> = vec![];
for diff_hunk in diff.hunks() {