repo_path: add stub RepoPathBuf type, update callers

Most RepoPath::from_internal_string() callers will be migrated to the function
that returns &RepoPath, and cloning &RepoPath won't work.
This commit is contained in:
Yuya Nishihara 2023-11-26 16:12:36 +09:00
parent f5938985f0
commit 0a1bc2ba42
29 changed files with 976 additions and 900 deletions

View file

@ -26,7 +26,7 @@ use jj_lib::local_working_copy::LocalWorkingCopy;
use jj_lib::merged_tree::MergedTree;
use jj_lib::op_store::{OperationId, WorkspaceId};
use jj_lib::repo::ReadonlyRepo;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::RepoPathBuf;
use jj_lib::settings::UserSettings;
use jj_lib::store::Store;
use jj_lib::working_copy::{
@ -171,7 +171,7 @@ impl WorkingCopy for ConflictsWorkingCopy {
self.inner.tree_id()
}
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError> {
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError> {
self.inner.sparse_patterns()
}
@ -225,13 +225,13 @@ impl LockedWorkingCopy for LockedConflictsWorkingCopy {
self.inner.reset(new_tree)
}
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError> {
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError> {
self.inner.sparse_patterns()
}
fn set_sparse_patterns(
&mut self,
new_sparse_patterns: Vec<RepoPath>,
new_sparse_patterns: Vec<RepoPathBuf>,
) -> Result<CheckoutStats, CheckoutError> {
self.inner.set_sparse_patterns(new_sparse_patterns)
}

View file

@ -49,7 +49,7 @@ use jj_lib::repo::{
CheckOutCommitError, EditCommitError, MutableRepo, ReadonlyRepo, Repo, RepoLoader,
RepoLoaderError, RewriteRootCommit, StoreFactories, StoreLoadError,
};
use jj_lib::repo_path::{FsPathParseError, RepoPath};
use jj_lib::repo_path::{FsPathParseError, RepoPath, RepoPathBuf};
use jj_lib::revset::{
DefaultSymbolResolver, Revset, RevsetAliasesMap, RevsetCommitRef, RevsetEvaluationError,
RevsetExpression, RevsetIteratorExt, RevsetParseContext, RevsetParseError,
@ -904,8 +904,8 @@ impl WorkspaceCommandHelper {
/// Parses a path relative to cwd into a RepoPath, which is relative to the
/// workspace root.
pub fn parse_file_path(&self, input: &str) -> Result<RepoPath, FsPathParseError> {
RepoPath::parse_fs_path(&self.cwd, self.workspace_root(), input)
pub fn parse_file_path(&self, input: &str) -> Result<RepoPathBuf, FsPathParseError> {
RepoPathBuf::parse_fs_path(&self.cwd, self.workspace_root(), input)
}
pub fn matcher_from_values(&self, values: &[String]) -> Result<Box<dyn Matcher>, CommandError> {

View file

@ -1119,13 +1119,13 @@ fn cmd_git_submodule_print_gitmodules(
let repo = workspace_command.repo();
let commit = workspace_command.resolve_single_rev(&args.revisions, ui)?;
let tree = commit.tree()?;
let gitmodules_path = RepoPath::from_internal_string(".gitmodules");
let mut gitmodules_file = match tree.path_value(&gitmodules_path).into_resolved() {
let gitmodules_path = &RepoPath::from_internal_string(".gitmodules");
let mut gitmodules_file = match tree.path_value(gitmodules_path).into_resolved() {
Ok(None) => {
writeln!(ui.stderr(), "No submodules!")?;
return Ok(());
}
Ok(Some(TreeValue::File { id, .. })) => repo.store().read_file(&gitmodules_path, &id)?,
Ok(Some(TreeValue::File { id, .. })) => repo.store().read_file(gitmodules_path, &id)?,
_ => {
return Err(user_error(".gitmodules is not a file."));
}

View file

@ -18,7 +18,7 @@ use std::io::Write;
use itertools::Itertools;
use jj_lib::backend::{ObjectId, TreeValue};
use jj_lib::merge::MergedTreeValue;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::RepoPathBuf;
use tracing::instrument;
use crate::cli_util::{CommandError, CommandHelper, WorkspaceCommandHelper};
@ -127,7 +127,7 @@ pub(crate) fn cmd_resolve(
#[instrument(skip_all)]
pub(crate) fn print_conflicted_paths(
conflicts: &[(RepoPath, MergedTreeValue)],
conflicts: &[(RepoPathBuf, MergedTreeValue)],
formatter: &mut dyn Formatter,
workspace_command: &WorkspaceCommandHelper,
) -> Result<(), CommandError> {

View file

@ -19,7 +19,7 @@ use std::path::Path;
use clap::Subcommand;
use itertools::Itertools;
use jj_lib::file_util;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::RepoPathBuf;
use jj_lib::settings::UserSettings;
use tracing::instrument;
@ -122,7 +122,7 @@ fn cmd_sparse_set(
let (mut locked_ws, wc_commit) = workspace_command.start_working_copy_mutation()?;
let mut new_patterns = HashSet::new();
if args.reset {
new_patterns.insert(RepoPath::root());
new_patterns.insert(RepoPathBuf::root());
} else {
if !args.clear {
new_patterns.extend(locked_ws.locked_wc().sparse_patterns()?.iter().cloned());
@ -161,9 +161,9 @@ fn cmd_sparse_set(
fn edit_sparse(
workspace_root: &Path,
repo_path: &Path,
sparse: &[RepoPath],
sparse: &[RepoPathBuf],
settings: &UserSettings,
) -> Result<Vec<RepoPath>, CommandError> {
) -> Result<Vec<RepoPathBuf>, CommandError> {
let file = (|| -> Result<_, io::Error> {
let mut file = tempfile::Builder::new()
.prefix("editor-")
@ -216,7 +216,7 @@ fn edit_sparse(
path = file_path.display()
))
})?;
Ok::<_, CommandError>(RepoPath::parse_fs_path(
Ok::<_, CommandError>(RepoPathBuf::parse_fs_path(
workspace_root,
workspace_root,
line.trim(),

View file

@ -10,7 +10,7 @@ use jj_lib::files::{self, ContentHunk, MergeResult};
use jj_lib::matchers::Matcher;
use jj_lib::merge::Merge;
use jj_lib::merged_tree::{MergedTree, MergedTreeBuilder};
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::store::Store;
use pollster::FutureExt;
use thiserror::Error;
@ -23,7 +23,7 @@ pub enum BuiltinToolError {
ReadFileBackend(BackendError),
#[error("Failed to read file {path:?} with ID {id:?}: {source}")]
ReadFileIo {
path: RepoPath,
path: RepoPathBuf,
id: FileId,
source: std::io::Error,
},
@ -119,7 +119,7 @@ fn read_file_contents(
reader
.read_to_end(&mut buf)
.map_err(|err| BuiltinToolError::ReadFileIo {
path: path.clone(),
path: path.to_owned(),
id: id.clone(),
source: err,
})?;
@ -235,7 +235,7 @@ pub fn make_diff_files(
store: &Arc<Store>,
left_tree: &MergedTree,
right_tree: &MergedTree,
changed_files: &[RepoPath],
changed_files: &[RepoPathBuf],
) -> Result<Vec<scm_record::File<'static>>, BuiltinToolError> {
let mut files = Vec::new();
for changed_path in changed_files {
@ -361,7 +361,7 @@ pub fn apply_diff_builtin(
store: Arc<Store>,
left_tree: &MergedTree,
right_tree: &MergedTree,
changed_files: Vec<RepoPath>,
changed_files: Vec<RepoPathBuf>,
files: &[scm_record::File],
) -> Result<MergedTreeId, BackendError> {
let mut tree_builder = MergedTreeBuilder::new(left_tree.id().clone());
@ -537,7 +537,7 @@ pub fn edit_merge_builtin(
tree.store().clone(),
tree,
tree,
vec![path.clone()],
vec![path.to_owned()],
&[file],
)
.map_err(BuiltinToolError::BackendError)
@ -558,29 +558,33 @@ mod tests {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let unused_path = RepoPath::from_internal_string("unused");
let unchanged = RepoPath::from_internal_string("unchanged");
let changed_path = RepoPath::from_internal_string("changed");
let added_path = RepoPath::from_internal_string("added");
let unused_path = &RepoPath::from_internal_string("unused");
let unchanged = &RepoPath::from_internal_string("unchanged");
let changed_path = &RepoPath::from_internal_string("changed");
let added_path = &RepoPath::from_internal_string("added");
let left_tree = testutils::create_tree(
&test_repo.repo,
&[
(&unused_path, "unused\n"),
(&unchanged, "unchanged\n"),
(&changed_path, "line1\nline2\nline3\n"),
(unused_path, "unused\n"),
(unchanged, "unchanged\n"),
(changed_path, "line1\nline2\nline3\n"),
],
);
let right_tree = testutils::create_tree(
&test_repo.repo,
&[
(&unused_path, "unused\n"),
(&unchanged, "unchanged\n"),
(&changed_path, "line1\nchanged1\nchanged2\nline3\nadded1\n"),
(&added_path, "added\n"),
(unused_path, "unused\n"),
(unchanged, "unchanged\n"),
(changed_path, "line1\nchanged1\nchanged2\nline3\nadded1\n"),
(added_path, "added\n"),
],
);
let changed_files = vec![unchanged.clone(), changed_path.clone(), added_path.clone()];
let changed_files = vec![
unchanged.to_owned(),
changed_path.to_owned(),
added_path.to_owned(),
];
let files = make_diff_files(store, &left_tree, &right_tree, &changed_files).unwrap();
insta::assert_debug_snapshot!(files, @r###"
[
@ -712,18 +716,18 @@ mod tests {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("file");
let path = &RepoPath::from_internal_string("file");
let base_tree = testutils::create_tree(
&test_repo.repo,
&[(&path, "base 1\nbase 2\nbase 3\nbase 4\nbase 5\n")],
&[(path, "base 1\nbase 2\nbase 3\nbase 4\nbase 5\n")],
);
let left_tree = testutils::create_tree(
&test_repo.repo,
&[(&path, "left 1\nbase 2\nbase 3\nbase 4\nleft 5\n")],
&[(path, "left 1\nbase 2\nbase 3\nbase 4\nleft 5\n")],
);
let right_tree = testutils::create_tree(
&test_repo.repo,
&[(&path, "right 1\nbase 2\nbase 3\nbase 4\nright 5\n")],
&[(path, "right 1\nbase 2\nbase 3\nbase 4\nright 5\n")],
);
fn to_file_id(tree_value: MergedTreeValue) -> Option<FileId> {
@ -735,11 +739,11 @@ mod tests {
}
}
let merge = Merge::from_vec(vec![
to_file_id(left_tree.path_value(&path)),
to_file_id(base_tree.path_value(&path)),
to_file_id(right_tree.path_value(&path)),
to_file_id(left_tree.path_value(path)),
to_file_id(base_tree.path_value(path)),
to_file_id(right_tree.path_value(path)),
]);
let content = extract_as_single_hunk(&merge, store, &path).block_on();
let content = extract_as_single_hunk(&merge, store, path).block_on();
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
let merge_result = files::merge(&slices);
let sections = make_merge_sections(merge_result).unwrap();

View file

@ -15,7 +15,7 @@ use jj_lib::local_working_copy::{TreeState, TreeStateError};
use jj_lib::matchers::Matcher;
use jj_lib::merge::{Merge, MergedTreeValue};
use jj_lib::merged_tree::{MergedTree, MergedTreeBuilder};
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::settings::UserSettings;
use jj_lib::store::Store;
use jj_lib::working_copy::{CheckoutError, SnapshotOptions};
@ -189,7 +189,7 @@ fn check_out(
wc_dir: PathBuf,
state_dir: PathBuf,
tree: &MergedTree,
sparse_patterns: Vec<RepoPath>,
sparse_patterns: Vec<RepoPathBuf>,
) -> Result<TreeState, DiffCheckoutError> {
std::fs::create_dir(&wc_dir).map_err(DiffCheckoutError::SetUpDir)?;
std::fs::create_dir(&state_dir).map_err(DiffCheckoutError::SetUpDir)?;
@ -384,7 +384,7 @@ pub fn run_mergetool_external(
Err(new_file_ids) => conflict.with_new_file_ids(&new_file_ids),
};
let mut tree_builder = MergedTreeBuilder::new(tree.id());
tree_builder.set_or_remove(repo_path.clone(), new_tree_value);
tree_builder.set_or_remove(repo_path.to_owned(), new_tree_value);
let new_tree = tree_builder.write_tree(tree.store())?;
Ok(new_tree)
}

View file

@ -23,7 +23,7 @@ use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::gitignore::GitIgnoreFile;
use jj_lib::matchers::Matcher;
use jj_lib::merged_tree::MergedTree;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::settings::{ConfigResultExt as _, UserSettings};
use jj_lib::working_copy::SnapshotError;
use pollster::FutureExt;
@ -66,16 +66,16 @@ pub enum ConflictResolveError {
#[error(transparent)]
ExternalTool(#[from] ExternalToolError),
#[error("Couldn't find the path {0:?} in this revision")]
PathNotFound(RepoPath),
PathNotFound(RepoPathBuf),
#[error("Couldn't find any conflicts at {0:?} in this revision")]
NotAConflict(RepoPath),
NotAConflict(RepoPathBuf),
#[error(
"Only conflicts that involve normal files (not symlinks, not executable, etc.) are \
supported. Conflict summary for {0:?}:\n{1}"
)]
NotNormalFiles(RepoPath, String),
NotNormalFiles(RepoPathBuf, String),
#[error("The conflict at {path:?} has {sides} sides. At most 2 sides are supported.")]
ConflictTooComplicated { path: RepoPath, sides: usize },
ConflictTooComplicated { path: RepoPathBuf, sides: usize },
#[error(
"The output file is either unchanged or empty after the editor quit (run with --verbose \
to see the exact invocation)."
@ -93,8 +93,8 @@ pub fn run_mergetool(
) -> Result<MergedTreeId, ConflictResolveError> {
let conflict = match tree.path_value(repo_path).into_resolved() {
Err(conflict) => conflict,
Ok(Some(_)) => return Err(ConflictResolveError::NotAConflict(repo_path.clone())),
Ok(None) => return Err(ConflictResolveError::PathNotFound(repo_path.clone())),
Ok(Some(_)) => return Err(ConflictResolveError::NotAConflict(repo_path.to_owned())),
Ok(None) => return Err(ConflictResolveError::PathNotFound(repo_path.to_owned())),
};
let file_merge = conflict.to_file_merge().ok_or_else(|| {
let mut summary_bytes: Vec<u8> = vec![];
@ -102,14 +102,14 @@ pub fn run_mergetool(
.describe(&mut summary_bytes)
.expect("Writing to an in-memory buffer should never fail");
ConflictResolveError::NotNormalFiles(
repo_path.clone(),
repo_path.to_owned(),
String::from_utf8_lossy(summary_bytes.as_slice()).to_string(),
)
})?;
// We only support conflicts with 2 sides (3-way conflicts)
if file_merge.num_sides() > 2 {
return Err(ConflictResolveError::ConflictTooComplicated {
path: repo_path.clone(),
path: repo_path.to_owned(),
sides: file_merge.num_sides(),
});
};

View file

@ -57,7 +57,7 @@ use crate::matchers::{
use crate::merge::{Merge, MergeBuilder, MergedTreeValue};
use crate::merged_tree::{MergedTree, MergedTreeBuilder};
use crate::op_store::{OperationId, WorkspaceId};
use crate::repo_path::{RepoPath, RepoPathComponent};
use crate::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent};
use crate::settings::HumanByteSize;
use crate::store::Store;
use crate::tree::Tree;
@ -143,7 +143,7 @@ impl FileState {
/// build a loaded `BTreeMap<RepoPath, _>` at all.
#[derive(Clone, Debug)]
struct LazyFileStatesMap {
loaded: OnceLock<BTreeMap<RepoPath, FileState>>,
loaded: OnceLock<BTreeMap<RepoPathBuf, FileState>>,
proto: Option<Vec<crate::protos::working_copy::FileStateEntry>>,
}
@ -173,14 +173,14 @@ impl LazyFileStatesMap {
}
}
fn get_or_load(&self) -> &BTreeMap<RepoPath, FileState> {
fn get_or_load(&self) -> &BTreeMap<RepoPathBuf, FileState> {
self.loaded.get_or_init(|| {
let proto = self.proto.as_ref().expect("loaded or proto must exist");
file_states_from_proto(proto)
})
}
fn make_mut(&mut self) -> &mut BTreeMap<RepoPath, FileState> {
fn make_mut(&mut self) -> &mut BTreeMap<RepoPathBuf, FileState> {
self.get_or_load();
self.proto.take(); // mark dirty
self.loaded.get_mut().unwrap()
@ -194,7 +194,7 @@ pub struct TreeState {
tree_id: MergedTreeId,
file_states: LazyFileStatesMap,
// Currently only path prefixes
sparse_patterns: Vec<RepoPath>,
sparse_patterns: Vec<RepoPathBuf>,
own_mtime: MillisSinceEpoch,
/// The most recent clock value returned by Watchman. Will only be set if
@ -247,19 +247,19 @@ fn file_state_to_proto(file_state: &FileState) -> crate::protos::working_copy::F
#[instrument(skip(proto))]
fn file_states_from_proto(
proto: &[crate::protos::working_copy::FileStateEntry],
) -> BTreeMap<RepoPath, FileState> {
) -> BTreeMap<RepoPathBuf, FileState> {
tracing::debug!("loading file states from proto");
proto
.iter()
.map(|entry| {
let path = RepoPath::from_internal_string(&entry.path);
let path = RepoPathBuf::from_internal_string(&entry.path);
(path, file_state_from_proto(entry.state.as_ref().unwrap()))
})
.collect()
}
fn file_states_to_proto(
file_states: &BTreeMap<RepoPath, FileState>,
file_states: &BTreeMap<RepoPathBuf, FileState>,
) -> Vec<crate::protos::working_copy::FileStateEntry> {
file_states
.iter()
@ -274,16 +274,16 @@ fn file_states_to_proto(
fn sparse_patterns_from_proto(
proto: Option<&crate::protos::working_copy::SparsePatterns>,
) -> Vec<RepoPath> {
) -> Vec<RepoPathBuf> {
let mut sparse_patterns = vec![];
if let Some(proto_sparse_patterns) = proto {
for prefix in &proto_sparse_patterns.prefixes {
sparse_patterns.push(RepoPath::from_internal_string(prefix));
sparse_patterns.push(RepoPathBuf::from_internal_string(prefix));
}
} else {
// For compatibility with old working copies.
// TODO: Delete this is late 2022 or so.
sparse_patterns.push(RepoPath::root());
sparse_patterns.push(RepoPathBuf::root());
}
sparse_patterns
}
@ -382,7 +382,7 @@ struct FsmonitorMatcher {
}
struct DirectoryToVisit {
dir: RepoPath,
dir: RepoPathBuf,
disk_dir: PathBuf,
git_ignore: Arc<GitIgnoreFile>,
}
@ -422,11 +422,11 @@ impl TreeState {
&self.tree_id
}
pub fn file_states(&self) -> &BTreeMap<RepoPath, FileState> {
pub fn file_states(&self) -> &BTreeMap<RepoPathBuf, FileState> {
self.file_states.get_or_load()
}
pub fn sparse_patterns(&self) -> &Vec<RepoPath> {
pub fn sparse_patterns(&self) -> &Vec<RepoPathBuf> {
&self.sparse_patterns
}
@ -454,7 +454,7 @@ impl TreeState {
state_path,
tree_id,
file_states: LazyFileStatesMap::new(),
sparse_patterns: vec![RepoPath::root()],
sparse_patterns: vec![RepoPathBuf::root()],
own_mtime: MillisSinceEpoch(0),
watchman_clock: None,
}
@ -660,7 +660,7 @@ impl TreeState {
trace_span!("traverse filesystem").in_scope(|| -> Result<(), SnapshotError> {
let current_tree = self.current_tree()?;
let directory_to_visit = DirectoryToVisit {
dir: RepoPath::root(),
dir: RepoPathBuf::root(),
disk_dir: self.working_copy_path.clone(),
git_ignore: base_ignores,
};
@ -738,9 +738,9 @@ impl TreeState {
&self,
matcher: &dyn Matcher,
current_tree: &MergedTree,
tree_entries_tx: Sender<(RepoPath, MergedTreeValue)>,
file_states_tx: Sender<(RepoPath, FileState)>,
present_files_tx: Sender<RepoPath>,
tree_entries_tx: Sender<(RepoPathBuf, MergedTreeValue)>,
file_states_tx: Sender<(RepoPathBuf, FileState)>,
present_files_tx: Sender<RepoPathBuf>,
directory_to_visit: DirectoryToVisit,
progress: Option<&SnapshotProgress>,
max_new_file_size: u64,
@ -936,7 +936,7 @@ impl TreeState {
let repo_paths = trace_span!("processing fsmonitor paths").in_scope(|| {
changed_files
.into_iter()
.filter_map(RepoPath::from_relative_path)
.filter_map(RepoPathBuf::from_relative_path)
.collect_vec()
});
@ -1164,7 +1164,7 @@ impl TreeState {
pub fn set_sparse_patterns(
&mut self,
sparse_patterns: Vec<RepoPath>,
sparse_patterns: Vec<RepoPathBuf>,
) -> Result<CheckoutStats, CheckoutError> {
let tree = self.current_tree().map_err(|err| match err {
err @ BackendError::ObjectNotFound { .. } => CheckoutError::SourceNotFound {
@ -1176,7 +1176,7 @@ impl TreeState {
let new_matcher = PrefixMatcher::new(&sparse_patterns);
let added_matcher = DifferenceMatcher::new(&new_matcher, &old_matcher);
let removed_matcher = DifferenceMatcher::new(&old_matcher, &new_matcher);
let empty_tree = MergedTree::resolved(Tree::null(self.store.clone(), RepoPath::root()));
let empty_tree = MergedTree::resolved(Tree::null(self.store.clone(), RepoPathBuf::root()));
let added_stats = self.update(&empty_tree, &tree, &added_matcher).block_on()?;
let removed_stats = self
.update(&tree, &empty_tree, &removed_matcher)
@ -1379,7 +1379,7 @@ impl WorkingCopy for LocalWorkingCopy {
Ok(self.tree_state()?.current_tree_id())
}
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError> {
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError> {
Ok(self.tree_state()?.sparse_patterns())
}
@ -1523,7 +1523,7 @@ impl LocalWorkingCopy {
Ok(self.tree_state.get_mut().unwrap())
}
pub fn file_states(&self) -> Result<&BTreeMap<RepoPath, FileState>, WorkingCopyStateError> {
pub fn file_states(&self) -> Result<&BTreeMap<RepoPathBuf, FileState>, WorkingCopyStateError> {
Ok(self.tree_state()?.file_states())
}
@ -1618,13 +1618,13 @@ impl LockedWorkingCopy for LockedLocalWorkingCopy {
Ok(())
}
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError> {
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError> {
self.wc.sparse_patterns()
}
fn set_sparse_patterns(
&mut self,
new_sparse_patterns: Vec<RepoPath>,
new_sparse_patterns: Vec<RepoPathBuf>,
) -> Result<CheckoutStats, CheckoutError> {
// TODO: Write a "pending_checkout" file with new sparse patterns so we can
// continue an interrupted update if we find such a file.

View file

@ -569,7 +569,7 @@ where
if let Some(id) = id {
store.get_tree(dir, id)
} else {
Ok(Tree::null(store.clone(), dir.clone()))
Ok(Tree::null(store.clone(), dir.to_owned()))
}
};
Ok(Some(tree_id_merge.try_map(get_tree)?))

View file

@ -30,7 +30,7 @@ use pollster::FutureExt;
use crate::backend::{BackendError, BackendResult, ConflictId, MergedTreeId, TreeId, TreeValue};
use crate::matchers::{EverythingMatcher, Matcher};
use crate::merge::{Merge, MergeBuilder, MergedTreeValue};
use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathComponentsIter};
use crate::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent, RepoPathComponentsIter};
use crate::store::Store;
use crate::tree::{try_resolve_file_conflict, Tree, TreeMergeError};
use crate::tree_builder::TreeBuilder;
@ -69,11 +69,11 @@ impl MergedTreeVal<'_> {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct DiffSummary {
/// Modified files
pub modified: Vec<RepoPath>,
pub modified: Vec<RepoPathBuf>,
/// Added files
pub added: Vec<RepoPath>,
pub added: Vec<RepoPathBuf>,
/// Removed files
pub removed: Vec<RepoPath>,
pub removed: Vec<RepoPathBuf>,
}
impl MergedTree {
@ -125,7 +125,7 @@ impl MergedTree {
// value, so we use that.
let terms_padded = conflict.into_iter().chain(iter::repeat(None));
for (builder, term) in zip(&mut tree_builders, terms_padded) {
builder.set_or_remove(path.clone(), term);
builder.set_or_remove(path.to_owned(), term);
}
}
@ -197,7 +197,7 @@ impl MergedTree {
/// all sides are trees, so tree/file conflicts will be reported as a single
/// conflict, not one for each path in the tree.
// TODO: Restrict this by a matcher (or add a separate method for that).
pub fn conflicts(&self) -> impl Iterator<Item = (RepoPath, MergedTreeValue)> {
pub fn conflicts(&self) -> impl Iterator<Item = (RepoPathBuf, MergedTreeValue)> {
ConflictIterator::new(self)
}
@ -411,7 +411,12 @@ impl MergedTree {
/// ones) can fetch trees asynchronously.
pub type TreeDiffStream<'matcher> = Pin<
Box<
dyn Stream<Item = (RepoPath, BackendResult<(MergedTreeValue, MergedTreeValue)>)> + 'matcher,
dyn Stream<
Item = (
RepoPathBuf,
BackendResult<(MergedTreeValue, MergedTreeValue)>,
),
> + 'matcher,
>,
>;
@ -548,7 +553,7 @@ pub struct TreeEntriesIterator<'matcher> {
struct TreeEntriesDirItem {
tree: MergedTree,
entries: Vec<(RepoPath, MergedTreeValue)>,
entries: Vec<(RepoPathBuf, MergedTreeValue)>,
}
impl TreeEntriesDirItem {
@ -583,7 +588,7 @@ impl<'matcher> TreeEntriesIterator<'matcher> {
}
impl Iterator for TreeEntriesIterator<'_> {
type Item = (RepoPath, MergedTreeValue);
type Item = (RepoPathBuf, MergedTreeValue);
fn next(&mut self) -> Option<Self::Item> {
while let Some(top) = self.stack.last_mut() {
@ -610,7 +615,7 @@ impl Iterator for TreeEntriesIterator<'_> {
/// The state for the non-recursive iteration over the conflicted entries in a
/// single directory.
struct ConflictsDirItem {
entries: Vec<(RepoPath, MergedTreeValue)>,
entries: Vec<(RepoPathBuf, MergedTreeValue)>,
}
impl From<&Merge<Tree>> for ConflictsDirItem {
@ -637,7 +642,7 @@ impl From<&Merge<Tree>> for ConflictsDirItem {
enum ConflictIterator {
Legacy {
store: Arc<Store>,
conflicts_iter: vec::IntoIter<(RepoPath, ConflictId)>,
conflicts_iter: vec::IntoIter<(RepoPathBuf, ConflictId)>,
},
Merge {
store: Arc<Store>,
@ -661,7 +666,7 @@ impl ConflictIterator {
}
impl Iterator for ConflictIterator {
type Item = (RepoPath, MergedTreeValue);
type Item = (RepoPathBuf, MergedTreeValue);
fn next(&mut self) -> Option<Self::Item> {
match self {
@ -741,7 +746,7 @@ pub struct TreeDiffIterator<'matcher> {
struct TreeDiffDirItem {
tree1: MergedTree,
tree2: MergedTree,
entries: Vec<(RepoPath, MergedTreeValue, MergedTreeValue)>,
entries: Vec<(RepoPathBuf, MergedTreeValue, MergedTreeValue)>,
}
enum TreeDiffItem {
@ -749,18 +754,18 @@ enum TreeDiffItem {
// This is used for making sure that when a directory gets replaced by a file, we
// yield the value for the addition of the file after we yield the values
// for removing files in the directory.
File(RepoPath, MergedTreeValue, MergedTreeValue),
File(RepoPathBuf, MergedTreeValue, MergedTreeValue),
}
impl<'matcher> TreeDiffIterator<'matcher> {
/// Creates a iterator over the differences between two trees. Generally
/// prefer `MergedTree::diff()` of calling this directly.
pub fn new(tree1: MergedTree, tree2: MergedTree, matcher: &'matcher dyn Matcher) -> Self {
let root_dir = RepoPath::root();
let root_dir = &RepoPath::root();
let mut stack = Vec::new();
if !matcher.visit(&root_dir).is_nothing() {
if !matcher.visit(root_dir).is_nothing() {
stack.push(TreeDiffItem::Dir(TreeDiffDirItem::from_trees(
&root_dir, tree1, tree2, matcher,
root_dir, tree1, tree2, matcher,
)));
};
Self { stack, matcher }
@ -773,7 +778,7 @@ impl<'matcher> TreeDiffIterator<'matcher> {
) -> BackendResult<Tree> {
match value {
Some(TreeValue::Tree(tree_id)) => store.get_tree_async(dir, tree_id).await,
_ => Ok(Tree::null(store.clone(), dir.clone())),
_ => Ok(Tree::null(store.clone(), dir.to_owned())),
}
}
@ -790,7 +795,7 @@ impl<'matcher> TreeDiffIterator<'matcher> {
.await?;
builder.build()
} else {
Merge::resolved(Tree::null(tree.store().clone(), dir.clone()))
Merge::resolved(Tree::null(tree.store().clone(), dir.to_owned()))
};
// Maintain the type of tree, so we resolve `TreeValue::Conflict` as necessary
// in the subtree
@ -846,7 +851,10 @@ impl TreeDiffDirItem {
}
impl Iterator for TreeDiffIterator<'_> {
type Item = (RepoPath, BackendResult<(MergedTreeValue, MergedTreeValue)>);
type Item = (
RepoPathBuf,
BackendResult<(MergedTreeValue, MergedTreeValue)>,
);
fn next(&mut self) -> Option<Self::Item> {
while let Some(top) = self.stack.last_mut() {
@ -926,7 +934,7 @@ pub struct TreeDiffStreamImpl<'matcher> {
// TODO: Is it better to combine this and `items` into a single map?
#[allow(clippy::type_complexity)]
pending_trees: VecDeque<(
RepoPath,
RepoPathBuf,
Pin<Box<dyn Future<Output = BackendResult<(MergedTree, MergedTree)>> + 'matcher>>,
)>,
/// The maximum number of trees to request concurrently. However, we do the
@ -946,19 +954,19 @@ pub struct TreeDiffStreamImpl<'matcher> {
/// directories that have the file as a prefix.
#[derive(PartialEq, Eq, Clone, Debug)]
struct DiffStreamKey {
path: RepoPath,
path: RepoPathBuf,
file_after_dir: bool,
}
impl DiffStreamKey {
fn normal(path: RepoPath) -> Self {
fn normal(path: RepoPathBuf) -> Self {
DiffStreamKey {
path,
file_after_dir: false,
}
}
fn file_after_dir(path: RepoPath) -> Self {
fn file_after_dir(path: RepoPathBuf) -> Self {
DiffStreamKey {
path,
file_after_dir: true,
@ -1004,7 +1012,7 @@ impl<'matcher> TreeDiffStreamImpl<'matcher> {
max_concurrent_reads,
max_queued_items: 10000,
};
stream.add_dir_diff_items(RepoPath::root(), Ok((tree1, tree2)));
stream.add_dir_diff_items(RepoPathBuf::root(), Ok((tree1, tree2)));
stream
}
@ -1012,7 +1020,7 @@ impl<'matcher> TreeDiffStreamImpl<'matcher> {
async fn tree(
store: Arc<Store>,
legacy_format: bool,
dir: RepoPath,
dir: RepoPathBuf,
values: MergedTreeValue,
) -> BackendResult<MergedTree> {
let trees = if values.is_tree() {
@ -1035,7 +1043,7 @@ impl<'matcher> TreeDiffStreamImpl<'matcher> {
fn add_dir_diff_items(
&mut self,
dir: RepoPath,
dir: RepoPathBuf,
tree_diff: BackendResult<(MergedTree, MergedTree)>,
) {
let (tree1, tree2) = match tree_diff {
@ -1135,7 +1143,10 @@ impl<'matcher> TreeDiffStreamImpl<'matcher> {
}
impl Stream for TreeDiffStreamImpl<'_> {
type Item = (RepoPath, BackendResult<(MergedTreeValue, MergedTreeValue)>);
type Item = (
RepoPathBuf,
BackendResult<(MergedTreeValue, MergedTreeValue)>,
);
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
// Go through all pending tree futures and poll them.
@ -1173,7 +1184,7 @@ impl Stream for TreeDiffStreamImpl<'_> {
/// (allowing path-level conflicts) or as multiple conflict-free trees.
pub struct MergedTreeBuilder {
base_tree_id: MergedTreeId,
overrides: BTreeMap<RepoPath, MergedTreeValue>,
overrides: BTreeMap<RepoPathBuf, MergedTreeValue>,
}
impl MergedTreeBuilder {
@ -1191,7 +1202,7 @@ impl MergedTreeBuilder {
/// `Merge::absent()` to remove a value from the tree. When the base tree is
/// a legacy tree, conflicts can be written either as a multi-way `Merge`
/// value or as a resolved `Merge` value using `TreeValue::Conflict`.
pub fn set_or_remove(&mut self, path: RepoPath, values: MergedTreeValue) {
pub fn set_or_remove(&mut self, path: RepoPathBuf, values: MergedTreeValue) {
if let MergedTreeId::Merge(_) = &self.base_tree_id {
assert!(!values
.iter()

View file

@ -120,8 +120,8 @@ pub struct RepoPathComponentsIter<'a> {
impl RepoPathComponentsIter<'_> {
// TODO: add borrowed RepoPath type and implement as_path() instead
fn to_path(&self) -> RepoPath {
RepoPath {
fn to_path(&self) -> RepoPathBuf {
RepoPathBuf {
value: self.value.to_owned(),
}
}
@ -159,6 +159,9 @@ impl DoubleEndedIterator for RepoPathComponentsIter<'_> {
impl FusedIterator for RepoPathComponentsIter<'_> {}
// TODO: make RepoPath a borrowed type
pub type RepoPathBuf = RepoPath;
#[derive(Clone, Eq, Hash, PartialEq)]
pub struct RepoPath {
value: String,
@ -170,24 +173,24 @@ impl Debug for RepoPath {
}
}
impl RepoPath {
impl RepoPathBuf {
pub const fn root() -> Self {
RepoPath {
RepoPathBuf {
value: String::new(),
}
}
/// Creates `RepoPath` from valid string representation.
/// Creates `RepoPathBuf` from valid string representation.
///
/// The input `value` must not contain empty path components. For example,
/// `"/"`, `"/foo"`, `"foo/"`, `"foo//bar"` are all invalid.
pub fn from_internal_string(value: impl Into<String>) -> Self {
let value = value.into();
assert!(is_valid_repo_path_str(&value));
RepoPath { value }
RepoPathBuf { value }
}
/// Converts repo-relative `Path` to `RepoPath`.
/// Converts repo-relative `Path` to `RepoPathBuf`.
///
/// The input path should not contain `.` or `..`.
pub fn from_relative_path(relative_path: impl AsRef<Path>) -> Option<Self> {
@ -208,10 +211,10 @@ impl RepoPath {
value.push('/');
value.push_str(name?);
}
Some(RepoPath { value })
Some(RepoPathBuf { value })
}
/// Parses an `input` path into a `RepoPath` relative to `base`.
/// Parses an `input` path into a `RepoPathBuf` relative to `base`.
///
/// The `cwd` and `base` paths are supposed to be absolute and normalized in
/// the same manner. The `input` path may be either relative to `cwd` or
@ -225,12 +228,14 @@ impl RepoPath {
let abs_input_path = file_util::normalize_path(&cwd.join(input));
let repo_relative_path = file_util::relative_path(base, &abs_input_path);
if repo_relative_path == Path::new(".") {
return Ok(RepoPath::root());
return Ok(Self::root());
}
Self::from_relative_path(repo_relative_path)
.ok_or_else(|| FsPathParseError::InputNotInRepo(input.to_owned()))
}
}
impl RepoPath {
/// The full string form used internally, not for presenting to users (where
/// we may want to use the platform's separator). This format includes a
/// trailing slash, unless this path represents the root directory. That
@ -279,11 +284,13 @@ impl RepoPath {
}
}
pub fn parent(&self) -> Option<RepoPath> {
// TODO: make it return borrowed RepoPath type
pub fn parent(&self) -> Option<RepoPathBuf> {
self.split().map(|(parent, _)| parent)
}
pub fn split(&self) -> Option<(RepoPath, &RepoPathComponent)> {
// TODO: make it return borrowed RepoPath type
pub fn split(&self) -> Option<(RepoPathBuf, &RepoPathComponent)> {
let mut components = self.components();
let basename = components.next_back()?;
Some((components.to_path(), basename))
@ -293,13 +300,13 @@ impl RepoPath {
RepoPathComponentsIter { value: &self.value }
}
pub fn join(&self, entry: &RepoPathComponent) -> RepoPath {
pub fn join(&self, entry: &RepoPathComponent) -> RepoPathBuf {
let value = if self.value.is_empty() {
entry.as_str().to_owned()
} else {
[&self.value, "/", entry.as_str()].concat()
};
RepoPath { value }
RepoPathBuf { value }
}
}
@ -513,20 +520,20 @@ mod tests {
let wc_path = &cwd_path;
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, wc_path, ""),
RepoPathBuf::parse_fs_path(&cwd_path, wc_path, ""),
Ok(RepoPath::root())
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, wc_path, "."),
RepoPathBuf::parse_fs_path(&cwd_path, wc_path, "."),
Ok(RepoPath::root())
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, wc_path, "file"),
RepoPathBuf::parse_fs_path(&cwd_path, wc_path, "file"),
Ok(repo_path("file"))
);
// Both slash and the platform's separator are allowed
assert_eq!(
RepoPath::parse_fs_path(
RepoPathBuf::parse_fs_path(
&cwd_path,
wc_path,
format!("dir{}file", std::path::MAIN_SEPARATOR)
@ -534,24 +541,24 @@ mod tests {
Ok(repo_path("dir/file"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, wc_path, "dir/file"),
RepoPathBuf::parse_fs_path(&cwd_path, wc_path, "dir/file"),
Ok(repo_path("dir/file"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, wc_path, ".."),
RepoPathBuf::parse_fs_path(&cwd_path, wc_path, ".."),
Err(FsPathParseError::InputNotInRepo("..".into()))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &cwd_path, "../repo"),
RepoPathBuf::parse_fs_path(&cwd_path, &cwd_path, "../repo"),
Ok(RepoPath::root())
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &cwd_path, "../repo/file"),
RepoPathBuf::parse_fs_path(&cwd_path, &cwd_path, "../repo/file"),
Ok(repo_path("file"))
);
// Input may be absolute path with ".."
assert_eq!(
RepoPath::parse_fs_path(
RepoPathBuf::parse_fs_path(
&cwd_path,
&cwd_path,
cwd_path.join("../repo").to_str().unwrap()
@ -567,31 +574,31 @@ mod tests {
let wc_path = cwd_path.parent().unwrap().to_path_buf();
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, ""),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, ""),
Ok(repo_path("dir"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "."),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "."),
Ok(repo_path("dir"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "file"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "file"),
Ok(repo_path("dir/file"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "subdir/file"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "subdir/file"),
Ok(repo_path("dir/subdir/file"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, ".."),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, ".."),
Ok(RepoPath::root())
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "../.."),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "../.."),
Err(FsPathParseError::InputNotInRepo("../..".into()))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "../other-dir/file"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "../other-dir/file"),
Ok(repo_path("other-dir/file"))
);
}
@ -603,23 +610,23 @@ mod tests {
let wc_path = cwd_path.join("repo");
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, ""),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, ""),
Err(FsPathParseError::InputNotInRepo("".into()))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "not-repo"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "not-repo"),
Err(FsPathParseError::InputNotInRepo("not-repo".into()))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "repo"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "repo"),
Ok(RepoPath::root())
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "repo/file"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "repo/file"),
Ok(repo_path("file"))
);
assert_eq!(
RepoPath::parse_fs_path(&cwd_path, &wc_path, "repo/dir/file"),
RepoPathBuf::parse_fs_path(&cwd_path, &wc_path, "repo/dir/file"),
Ok(repo_path("dir/file"))
);
}

View file

@ -38,7 +38,7 @@ use crate::hex_util::to_forward_hex;
use crate::index::{HexPrefix, PrefixResolution};
use crate::op_store::WorkspaceId;
use crate::repo::Repo;
use crate::repo_path::{FsPathParseError, RepoPath};
use crate::repo_path::{FsPathParseError, RepoPathBuf};
use crate::revset_graph::RevsetGraphEdge;
use crate::store::Store;
use crate::str_util::StringPattern;
@ -337,7 +337,7 @@ pub enum RevsetFilterPredicate {
/// Commits with committer's name or email containing the needle.
Committer(StringPattern),
/// Commits modifying the paths specified by the pattern.
File(Option<Vec<RepoPath>>), // TODO: embed matcher expression?
File(Option<Vec<RepoPathBuf>>), // TODO: embed matcher expression?
/// Commits with conflicts
HasConflict,
}
@ -1286,7 +1286,7 @@ static BUILTIN_FUNCTION_MAP: Lazy<HashMap<&'static str, RevsetFunction>> = Lazy:
.map(|arg| -> Result<_, RevsetParseError> {
let span = arg.as_span();
let needle = parse_function_argument_to_string(name, arg, state)?;
let path = RepoPath::parse_fs_path(ctx.cwd, ctx.workspace_root, needle)
let path = RepoPathBuf::parse_fs_path(ctx.cwd, ctx.workspace_root, needle)
.map_err(|e| {
RevsetParseError::with_span(
RevsetParseErrorKind::FsPathParseError(e),
@ -2870,8 +2870,8 @@ mod tests {
))
.minus(&RevsetExpression::filter(RevsetFilterPredicate::File(
Some(vec![
RepoPath::from_internal_string("arg1"),
RepoPath::from_internal_string("arg2"),
RepoPathBuf::from_internal_string("arg1"),
RepoPathBuf::from_internal_string("arg2"),
])
)))
.minus(&RevsetExpression::visible_heads()))
@ -3126,16 +3126,16 @@ mod tests {
assert_eq!(
parse_with_workspace("file(foo)", &WorkspaceId::default()),
Ok(RevsetExpression::filter(RevsetFilterPredicate::File(Some(
vec![RepoPath::from_internal_string("foo")]
vec![RepoPathBuf::from_internal_string("foo")]
))))
);
assert_eq!(
parse_with_workspace("file(foo, bar, baz)", &WorkspaceId::default()),
Ok(RevsetExpression::filter(RevsetFilterPredicate::File(Some(
vec![
RepoPath::from_internal_string("foo"),
RepoPath::from_internal_string("bar"),
RepoPath::from_internal_string("baz"),
RepoPathBuf::from_internal_string("foo"),
RepoPathBuf::from_internal_string("bar"),
RepoPathBuf::from_internal_string("baz"),
]
))))
);

View file

@ -29,7 +29,7 @@ use crate::backend::{
use crate::commit::Commit;
use crate::merge::{Merge, MergedTreeValue};
use crate::merged_tree::MergedTree;
use crate::repo_path::RepoPath;
use crate::repo_path::{RepoPath, RepoPathBuf};
use crate::tree::Tree;
use crate::tree_builder::TreeBuilder;
@ -38,7 +38,7 @@ use crate::tree_builder::TreeBuilder;
pub struct Store {
backend: Box<dyn Backend>,
commit_cache: RwLock<HashMap<CommitId, Arc<backend::Commit>>>,
tree_cache: RwLock<HashMap<(RepoPath, TreeId), Arc<backend::Tree>>>,
tree_cache: RwLock<HashMap<(RepoPathBuf, TreeId), Arc<backend::Tree>>>,
use_tree_conflict_format: bool,
}
@ -146,7 +146,7 @@ impl Store {
id: &TreeId,
) -> BackendResult<Tree> {
let data = self.get_backend_tree(dir, id).await?;
Ok(Tree::new(self.clone(), dir.clone(), id.clone(), data))
Ok(Tree::new(self.clone(), dir.to_owned(), id.clone(), data))
}
async fn get_backend_tree(
@ -154,7 +154,7 @@ impl Store {
dir: &RepoPath,
id: &TreeId,
) -> BackendResult<Arc<backend::Tree>> {
let key = (dir.clone(), id.clone());
let key = (dir.to_owned(), id.clone());
{
let read_locked_cache = self.tree_cache.read().unwrap();
if let Some(data) = read_locked_cache.get(&key).cloned() {
@ -190,10 +190,10 @@ impl Store {
let data = Arc::new(tree);
{
let mut write_locked_cache = self.tree_cache.write().unwrap();
write_locked_cache.insert((path.clone(), tree_id.clone()), data.clone());
write_locked_cache.insert((path.to_owned(), tree_id.clone()), data.clone());
}
Ok(Tree::new(self.clone(), path.clone(), tree_id, data))
Ok(Tree::new(self.clone(), path.to_owned(), tree_id, data))
}
pub fn read_file(&self, path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>> {

View file

@ -30,7 +30,7 @@ use crate::backend::{
use crate::files::MergeResult;
use crate::matchers::{EverythingMatcher, Matcher};
use crate::merge::{trivial_merge, Merge, MergedTreeValue};
use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathComponentsIter};
use crate::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent, RepoPathComponentsIter};
use crate::store::Store;
use crate::{backend, files};
@ -48,7 +48,7 @@ pub enum TreeMergeError {
#[derive(Clone)]
pub struct Tree {
store: Arc<Store>,
dir: RepoPath,
dir: RepoPathBuf,
id: TreeId,
data: Arc<backend::Tree>,
}
@ -78,7 +78,7 @@ impl Hash for Tree {
}
impl Tree {
pub fn new(store: Arc<Store>, dir: RepoPath, id: TreeId, data: Arc<backend::Tree>) -> Self {
pub fn new(store: Arc<Store>, dir: RepoPathBuf, id: TreeId, data: Arc<backend::Tree>) -> Self {
Tree {
store,
dir,
@ -87,7 +87,7 @@ impl Tree {
}
}
pub fn null(store: Arc<Store>, dir: RepoPath) -> Self {
pub fn null(store: Arc<Store>, dir: RepoPathBuf) -> Self {
Tree {
store,
dir,
@ -170,7 +170,7 @@ impl Tree {
}
}
pub fn conflicts_matching(&self, matcher: &dyn Matcher) -> Vec<(RepoPath, ConflictId)> {
pub fn conflicts_matching(&self, matcher: &dyn Matcher) -> Vec<(RepoPathBuf, ConflictId)> {
let mut conflicts = vec![];
for (name, value) in self.entries_matching(matcher) {
if let TreeValue::Conflict(id) = value {
@ -181,7 +181,7 @@ impl Tree {
}
#[instrument]
pub fn conflicts(&self) -> Vec<(RepoPath, ConflictId)> {
pub fn conflicts(&self) -> Vec<(RepoPathBuf, ConflictId)> {
self.conflicts_matching(&EverythingMatcher)
}
@ -197,7 +197,7 @@ pub struct TreeEntriesIterator<'matcher> {
struct TreeEntriesDirItem {
tree: Tree,
entries: Vec<(RepoPath, TreeValue)>,
entries: Vec<(RepoPathBuf, TreeValue)>,
}
impl From<Tree> for TreeEntriesDirItem {
@ -222,7 +222,7 @@ impl<'matcher> TreeEntriesIterator<'matcher> {
}
impl Iterator for TreeEntriesIterator<'_> {
type Item = (RepoPath, TreeValue);
type Item = (RepoPathBuf, TreeValue);
fn next(&mut self) -> Option<Self::Item> {
while let Some(top) = self.stack.last_mut() {

View file

@ -19,7 +19,7 @@ use std::sync::Arc;
use crate::backend;
use crate::backend::{TreeId, TreeValue};
use crate::repo_path::RepoPath;
use crate::repo_path::RepoPathBuf;
use crate::store::Store;
use crate::tree::Tree;
@ -33,7 +33,7 @@ enum Override {
pub struct TreeBuilder {
store: Arc<Store>,
base_tree_id: TreeId,
overrides: BTreeMap<RepoPath, Override>,
overrides: BTreeMap<RepoPathBuf, Override>,
}
impl TreeBuilder {
@ -50,17 +50,17 @@ impl TreeBuilder {
self.store.as_ref()
}
pub fn set(&mut self, path: RepoPath, value: TreeValue) {
pub fn set(&mut self, path: RepoPathBuf, value: TreeValue) {
assert!(!path.is_root());
self.overrides.insert(path, Override::Replace(value));
}
pub fn remove(&mut self, path: RepoPath) {
pub fn remove(&mut self, path: RepoPathBuf) {
assert!(!path.is_root());
self.overrides.insert(path, Override::Tombstone);
}
pub fn set_or_remove(&mut self, path: RepoPath, value: Option<TreeValue>) {
pub fn set_or_remove(&mut self, path: RepoPathBuf, value: Option<TreeValue>) {
assert!(!path.is_root());
if let Some(value) = value {
self.overrides.insert(path, Override::Replace(value));
@ -116,18 +116,18 @@ impl TreeBuilder {
unreachable!("trees_to_write must contain the root tree");
}
fn get_base_trees(&self) -> BTreeMap<RepoPath, backend::Tree> {
fn get_base_trees(&self) -> BTreeMap<RepoPathBuf, backend::Tree> {
let store = &self.store;
let mut tree_cache = {
let dir = RepoPath::root();
let dir = RepoPathBuf::root();
let tree = store.get_tree(&dir, &self.base_tree_id).unwrap();
BTreeMap::from([(dir, tree)])
};
fn populate_trees<'a>(
tree_cache: &'a mut BTreeMap<RepoPath, Tree>,
tree_cache: &'a mut BTreeMap<RepoPathBuf, Tree>,
store: &Arc<Store>,
dir: RepoPath,
dir: RepoPathBuf,
) -> &'a Tree {
// `if let Some(tree) = ...` doesn't pass lifetime check as of Rust 1.69.0
if tree_cache.contains_key(&dir) {

View file

@ -28,7 +28,7 @@ use crate::fsmonitor::FsmonitorKind;
use crate::gitignore::GitIgnoreFile;
use crate::merged_tree::MergedTree;
use crate::op_store::{OperationId, WorkspaceId};
use crate::repo_path::RepoPath;
use crate::repo_path::{RepoPath, RepoPathBuf};
use crate::settings::HumanByteSize;
/// The trait all working-copy implementations must implement.
@ -56,7 +56,7 @@ pub trait WorkingCopy {
/// out in the working copy. An empty list means that no paths should be
/// checked out in the working copy. A single `RepoPath::root()` entry means
/// that all files should be checked out.
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError>;
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError>;
/// Locks the working copy and returns an instance with methods for updating
/// the working copy files and state.
@ -87,7 +87,7 @@ pub trait LockedWorkingCopy {
fn reset(&mut self, new_tree: &MergedTree) -> Result<(), ResetError>;
/// See `WorkingCopy::sparse_patterns()`
fn sparse_patterns(&self) -> Result<&[RepoPath], WorkingCopyStateError>;
fn sparse_patterns(&self) -> Result<&[RepoPathBuf], WorkingCopyStateError>;
/// Updates the patterns that decide which paths from the current tree
/// should be checked out in the working copy.
@ -97,7 +97,7 @@ pub trait LockedWorkingCopy {
// to use sparse).
fn set_sparse_patterns(
&mut self,
new_sparse_patterns: Vec<RepoPath>,
new_sparse_patterns: Vec<RepoPathBuf>,
) -> Result<CheckoutStats, CheckoutError>;
/// Finish the modifications to the working copy by writing the updated

View file

@ -16,11 +16,15 @@ use jj_lib::backend::{ChangeId, MillisSinceEpoch, ObjectId, Signature, Timestamp
use jj_lib::matchers::EverythingMatcher;
use jj_lib::merged_tree::DiffSummary;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::settings::UserSettings;
use test_case::test_case;
use testutils::{assert_rebased, create_tree, CommitGraphBuilder, TestRepo, TestRepoBackend};
fn to_owned_path_vec(paths: &[&RepoPath]) -> Vec<RepoPathBuf> {
paths.iter().map(|&path| path.to_owned()).collect()
}
#[test_case(TestRepoBackend::Local ; "local backend")]
#[test_case(TestRepoBackend::Git ; "git backend")]
fn test_initial(backend: TestRepoBackend) {
@ -29,13 +33,13 @@ fn test_initial(backend: TestRepoBackend) {
let repo = &test_repo.repo;
let store = repo.store();
let root_file_path = RepoPath::from_internal_string("file");
let dir_file_path = RepoPath::from_internal_string("dir/file");
let root_file_path = &RepoPath::from_internal_string("file");
let dir_file_path = &RepoPath::from_internal_string("dir/file");
let tree = create_tree(
repo,
&[
(&root_file_path, "file contents"),
(&dir_file_path, "dir/file contents"),
(root_file_path, "file contents"),
(dir_file_path, "dir/file contents"),
],
);
@ -87,8 +91,8 @@ fn test_initial(backend: TestRepoBackend) {
.unwrap(),
DiffSummary {
modified: vec![],
added: vec![dir_file_path, root_file_path],
removed: vec![]
added: to_owned_path_vec(&[dir_file_path, root_file_path]),
removed: vec![],
}
);
}
@ -101,13 +105,13 @@ fn test_rewrite(backend: TestRepoBackend) {
let repo = &test_repo.repo;
let store = repo.store().clone();
let root_file_path = RepoPath::from_internal_string("file");
let dir_file_path = RepoPath::from_internal_string("dir/file");
let root_file_path = &RepoPath::from_internal_string("file");
let dir_file_path = &RepoPath::from_internal_string("dir/file");
let initial_tree = create_tree(
repo,
&[
(&root_file_path, "file contents"),
(&dir_file_path, "dir/file contents"),
(root_file_path, "file contents"),
(dir_file_path, "dir/file contents"),
],
);
@ -126,8 +130,8 @@ fn test_rewrite(backend: TestRepoBackend) {
let rewritten_tree = create_tree(
&repo,
&[
(&root_file_path, "file contents"),
(&dir_file_path, "updated dir/file contents"),
(root_file_path, "file contents"),
(dir_file_path, "updated dir/file contents"),
],
);
@ -172,8 +176,8 @@ fn test_rewrite(backend: TestRepoBackend) {
.unwrap(),
DiffSummary {
modified: vec![],
added: vec![dir_file_path.clone(), root_file_path],
removed: vec![]
added: to_owned_path_vec(&[dir_file_path, root_file_path]),
removed: vec![],
}
);
assert_eq!(
@ -183,9 +187,9 @@ fn test_rewrite(backend: TestRepoBackend) {
.diff_summary(&rewritten_commit.tree().unwrap(), &EverythingMatcher)
.unwrap(),
DiffSummary {
modified: vec![dir_file_path],
modified: to_owned_path_vec(&[dir_file_path]),
added: vec![],
removed: vec![]
removed: vec![],
}
);
}

View file

@ -28,10 +28,10 @@ fn test_materialize_conflict_basic() {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("file");
let path = &RepoPath::from_internal_string("file");
let base_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
line 3
@ -41,7 +41,7 @@ line 5
);
let left_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
left 3.1
@ -53,7 +53,7 @@ line 5
);
let right_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
right 3.1
@ -69,7 +69,7 @@ line 5
vec![Some(left_id.clone()), Some(right_id.clone())],
);
insta::assert_snapshot!(
&materialize_conflict_string(store, &path, &conflict),
&materialize_conflict_string(store, path, &conflict),
@r###"
line 1
line 2
@ -93,7 +93,7 @@ line 5
vec![Some(right_id.clone()), Some(left_id.clone())],
);
insta::assert_snapshot!(
&materialize_conflict_string(store, &path, &conflict),
&materialize_conflict_string(store, path, &conflict),
@r###"
line 1
line 2
@ -118,10 +118,10 @@ fn test_materialize_conflict_multi_rebase_conflicts() {
let store = test_repo.repo.store();
// Create changes (a, b, c) on top of the base, and linearize them.
let path = RepoPath::from_internal_string("file");
let path = &RepoPath::from_internal_string("file");
let base_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2 base
line 3
@ -129,7 +129,7 @@ line 3
);
let a_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2 a.1
line 2 a.2
@ -139,7 +139,7 @@ line 3
);
let b_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2 b.1
line 2 b.2
@ -148,7 +148,7 @@ line 3
);
let c_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2 c.1
line 3
@ -162,7 +162,7 @@ line 3
vec![Some(a_id.clone()), Some(b_id.clone()), Some(c_id.clone())],
);
insta::assert_snapshot!(
&materialize_conflict_string(store, &path, &conflict),
&materialize_conflict_string(store, path, &conflict),
@r###"
line 1
<<<<<<<
@ -186,7 +186,7 @@ line 3
vec![Some(c_id.clone()), Some(b_id.clone()), Some(a_id.clone())],
);
insta::assert_snapshot!(
&materialize_conflict_string(store, &path, &conflict),
&materialize_conflict_string(store, path, &conflict),
@r###"
line 1
<<<<<<<
@ -210,7 +210,7 @@ line 3
vec![Some(c_id.clone()), Some(a_id.clone()), Some(b_id.clone())],
);
insta::assert_snapshot!(
&materialize_conflict_string(store, &path, &conflict),
&materialize_conflict_string(store, path, &conflict),
@r###"
line 1
<<<<<<<
@ -236,10 +236,10 @@ fn test_materialize_parse_roundtrip() {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("file");
let path = &RepoPath::from_internal_string("file");
let base_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
line 3
@ -249,7 +249,7 @@ line 5
);
let left_id = testutils::write_file(
store,
&path,
path,
"line 1 left
line 2 left
line 3
@ -259,7 +259,7 @@ line 5 left
);
let right_id = testutils::write_file(
store,
&path,
path,
"line 1 right
line 2
line 3
@ -272,7 +272,7 @@ line 5 right
vec![Some(base_id.clone())],
vec![Some(left_id.clone()), Some(right_id.clone())],
);
let materialized = materialize_conflict_string(store, &path, &conflict);
let materialized = materialize_conflict_string(store, path, &conflict);
insta::assert_snapshot!(
materialized,
@r###"
@ -331,10 +331,10 @@ fn test_materialize_conflict_modify_delete() {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("file");
let path = &RepoPath::from_internal_string("file");
let base_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
line 3
@ -344,7 +344,7 @@ line 5
);
let modified_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
modified
@ -354,7 +354,7 @@ line 5
);
let deleted_id = testutils::write_file(
store,
&path,
path,
"line 1
line 2
line 4
@ -367,7 +367,7 @@ line 5
vec![Some(base_id.clone())],
vec![Some(modified_id.clone()), Some(deleted_id.clone())],
);
insta::assert_snapshot!(&materialize_conflict_string(store, &path, &conflict), @r###"
insta::assert_snapshot!(&materialize_conflict_string(store, path, &conflict), @r###"
line 1
line 2
<<<<<<<
@ -386,7 +386,7 @@ line 5
vec![Some(base_id.clone())],
vec![Some(deleted_id.clone()), Some(modified_id.clone())],
);
insta::assert_snapshot!(&materialize_conflict_string(store, &path, &conflict), @r###"
insta::assert_snapshot!(&materialize_conflict_string(store, path, &conflict), @r###"
line 1
line 2
<<<<<<<
@ -405,7 +405,7 @@ line 5
vec![Some(base_id.clone())],
vec![Some(modified_id.clone()), None],
);
insta::assert_snapshot!(&materialize_conflict_string(store, &path, &conflict), @r###"
insta::assert_snapshot!(&materialize_conflict_string(store, path, &conflict), @r###"
<<<<<<<
%%%%%%%
line 1
@ -595,10 +595,10 @@ fn test_update_conflict_from_content() {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("dir/file");
let base_file_id = testutils::write_file(store, &path, "line 1\nline 2\nline 3\n");
let left_file_id = testutils::write_file(store, &path, "left 1\nline 2\nleft 3\n");
let right_file_id = testutils::write_file(store, &path, "right 1\nline 2\nright 3\n");
let path = &RepoPath::from_internal_string("dir/file");
let base_file_id = testutils::write_file(store, path, "line 1\nline 2\nline 3\n");
let left_file_id = testutils::write_file(store, path, "left 1\nline 2\nleft 3\n");
let right_file_id = testutils::write_file(store, path, "right 1\nline 2\nright 3\n");
let conflict = Merge::from_removes_adds(
vec![Some(base_file_id.clone())],
vec![Some(left_file_id.clone()), Some(right_file_id.clone())],
@ -606,16 +606,16 @@ fn test_update_conflict_from_content() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let materialized = materialize_conflict_string(store, path, &conflict);
let parse = |content| {
update_from_content(&conflict, store, &path, content)
update_from_content(&conflict, store, path, content)
.block_on()
.unwrap()
};
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
let expected_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nresolved 3\n");
let expected_file_id = testutils::write_file(store, path, "resolved 1\nline 2\nresolved 3\n");
assert_eq!(
parse(b"resolved 1\nline 2\nresolved 3\n"),
Merge::normal(expected_file_id)
@ -627,9 +627,9 @@ fn test_update_conflict_from_content() {
);
assert_ne!(new_conflict, conflict);
// Calculate expected new FileIds
let new_base_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nline 3\n");
let new_left_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nleft 3\n");
let new_right_file_id = testutils::write_file(store, &path, "resolved 1\nline 2\nright 3\n");
let new_base_file_id = testutils::write_file(store, path, "resolved 1\nline 2\nline 3\n");
let new_left_file_id = testutils::write_file(store, path, "resolved 1\nline 2\nleft 3\n");
let new_right_file_id = testutils::write_file(store, path, "resolved 1\nline 2\nright 3\n");
assert_eq!(
new_conflict,
Merge::from_removes_adds(
@ -647,24 +647,24 @@ fn test_update_conflict_from_content_modify_delete() {
let test_repo = TestRepo::init();
let store = test_repo.repo.store();
let path = RepoPath::from_internal_string("dir/file");
let before_file_id = testutils::write_file(store, &path, "line 1\nline 2 before\nline 3\n");
let after_file_id = testutils::write_file(store, &path, "line 1\nline 2 after\nline 3\n");
let path = &RepoPath::from_internal_string("dir/file");
let before_file_id = testutils::write_file(store, path, "line 1\nline 2 before\nline 3\n");
let after_file_id = testutils::write_file(store, path, "line 1\nline 2 after\nline 3\n");
let conflict =
Merge::from_removes_adds(vec![Some(before_file_id)], vec![Some(after_file_id), None]);
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let materialized = materialize_conflict_string(store, path, &conflict);
let parse = |content| {
update_from_content(&conflict, store, &path, content)
update_from_content(&conflict, store, path, content)
.block_on()
.unwrap()
};
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
let expected_file_id = testutils::write_file(store, &path, "resolved\n");
let expected_file_id = testutils::write_file(store, path, "resolved\n");
assert_eq!(parse(b"resolved\n"), Merge::normal(expected_file_id));
// If the conflict is modified, we get a new conflict back.
@ -672,9 +672,9 @@ fn test_update_conflict_from_content_modify_delete() {
b"<<<<<<<\n%%%%%%%\n line 1\n-line 2 before\n+line 2 modified after\n line 3\n+++++++\n>>>>>>>\n",
);
// Calculate expected new FileIds
let new_base_file_id = testutils::write_file(store, &path, "line 1\nline 2 before\nline 3\n");
let new_base_file_id = testutils::write_file(store, path, "line 1\nline 2 before\nline 3\n");
let new_left_file_id =
testutils::write_file(store, &path, "line 1\nline 2 modified after\nline 3\n");
testutils::write_file(store, path, "line 1\nline 2 modified after\nline 3\n");
assert_eq!(
new_conflict,

View file

@ -14,43 +14,47 @@
use jj_lib::matchers::{EverythingMatcher, FilesMatcher};
use jj_lib::merged_tree::DiffSummary;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use testutils::{create_tree, TestRepo};
fn to_owned_path_vec(paths: &[&RepoPath]) -> Vec<RepoPathBuf> {
paths.iter().map(|&path| path.to_owned()).collect()
}
#[test]
fn test_types() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let clean_path = RepoPath::from_internal_string("clean");
let modified_path = RepoPath::from_internal_string("modified");
let added_path = RepoPath::from_internal_string("added");
let removed_path = RepoPath::from_internal_string("removed");
let clean_path = &RepoPath::from_internal_string("clean");
let modified_path = &RepoPath::from_internal_string("modified");
let added_path = &RepoPath::from_internal_string("added");
let removed_path = &RepoPath::from_internal_string("removed");
let tree1 = create_tree(
repo,
&[
(&clean_path, "clean"),
(&modified_path, "contents before"),
(&removed_path, "removed contents"),
(clean_path, "clean"),
(modified_path, "contents before"),
(removed_path, "removed contents"),
],
);
let tree2 = create_tree(
repo,
&[
(&clean_path, "clean"),
(&modified_path, "contents after"),
(&added_path, "added contents"),
(clean_path, "clean"),
(modified_path, "contents after"),
(added_path, "added contents"),
],
);
assert_eq!(
tree1.diff_summary(&tree2, &EverythingMatcher).unwrap(),
DiffSummary {
modified: vec![modified_path],
added: vec![added_path],
removed: vec![removed_path]
modified: to_owned_path_vec(&[modified_path]),
added: to_owned_path_vec(&[added_path]),
removed: to_owned_path_vec(&[removed_path]),
}
);
}
@ -60,26 +64,26 @@ fn test_tree_file_transition() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let dir_file_path = RepoPath::from_internal_string("dir/file");
let dir_path = RepoPath::from_internal_string("dir");
let dir_file_path = &RepoPath::from_internal_string("dir/file");
let dir_path = &RepoPath::from_internal_string("dir");
let tree1 = create_tree(repo, &[(&dir_file_path, "contents")]);
let tree2 = create_tree(repo, &[(&dir_path, "contents")]);
let tree1 = create_tree(repo, &[(dir_file_path, "contents")]);
let tree2 = create_tree(repo, &[(dir_path, "contents")]);
assert_eq!(
tree1.diff_summary(&tree2, &EverythingMatcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![dir_path.clone()],
removed: vec![dir_file_path.clone()]
added: to_owned_path_vec(&[dir_path]),
removed: to_owned_path_vec(&[dir_file_path]),
}
);
assert_eq!(
tree2.diff_summary(&tree1, &EverythingMatcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![dir_file_path],
removed: vec![dir_path]
added: to_owned_path_vec(&[dir_file_path]),
removed: to_owned_path_vec(&[dir_path]),
}
);
}
@ -89,61 +93,54 @@ fn test_sorting() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let a_path = RepoPath::from_internal_string("a");
let b_path = RepoPath::from_internal_string("b");
let f_a_path = RepoPath::from_internal_string("f/a");
let f_b_path = RepoPath::from_internal_string("f/b");
let f_f_a_path = RepoPath::from_internal_string("f/f/a");
let f_f_b_path = RepoPath::from_internal_string("f/f/b");
let n_path = RepoPath::from_internal_string("n");
let s_b_path = RepoPath::from_internal_string("s/b");
let z_path = RepoPath::from_internal_string("z");
let a_path = &RepoPath::from_internal_string("a");
let b_path = &RepoPath::from_internal_string("b");
let f_a_path = &RepoPath::from_internal_string("f/a");
let f_b_path = &RepoPath::from_internal_string("f/b");
let f_f_a_path = &RepoPath::from_internal_string("f/f/a");
let f_f_b_path = &RepoPath::from_internal_string("f/f/b");
let n_path = &RepoPath::from_internal_string("n");
let s_b_path = &RepoPath::from_internal_string("s/b");
let z_path = &RepoPath::from_internal_string("z");
let tree1 = create_tree(
repo,
&[
(&a_path, "before"),
(&f_a_path, "before"),
(&f_f_a_path, "before"),
(a_path, "before"),
(f_a_path, "before"),
(f_f_a_path, "before"),
],
);
let tree2 = create_tree(
repo,
&[
(&a_path, "after"),
(&b_path, "after"),
(&f_a_path, "after"),
(&f_b_path, "after"),
(&f_f_a_path, "after"),
(&f_f_b_path, "after"),
(&n_path, "after"),
(&s_b_path, "after"),
(&z_path, "after"),
(a_path, "after"),
(b_path, "after"),
(f_a_path, "after"),
(f_b_path, "after"),
(f_f_a_path, "after"),
(f_f_b_path, "after"),
(n_path, "after"),
(s_b_path, "after"),
(z_path, "after"),
],
);
assert_eq!(
tree1.diff_summary(&tree2, &EverythingMatcher).unwrap(),
DiffSummary {
modified: vec![a_path.clone(), f_a_path.clone(), f_f_a_path.clone()],
added: vec![
b_path.clone(),
f_b_path.clone(),
f_f_b_path.clone(),
n_path.clone(),
s_b_path.clone(),
z_path.clone(),
],
removed: vec![]
modified: to_owned_path_vec(&[a_path, f_a_path, f_f_a_path]),
added: to_owned_path_vec(&[b_path, f_b_path, f_f_b_path, n_path, s_b_path, z_path]),
removed: vec![],
}
);
assert_eq!(
tree2.diff_summary(&tree1, &EverythingMatcher).unwrap(),
DiffSummary {
modified: vec![a_path, f_a_path, f_f_a_path],
modified: to_owned_path_vec(&[a_path, f_a_path, f_f_a_path]),
added: vec![],
removed: vec![b_path, f_b_path, f_f_b_path, n_path, s_b_path, z_path]
removed: to_owned_path_vec(&[b_path, f_b_path, f_f_b_path, n_path, s_b_path, z_path]),
}
);
}
@ -153,11 +150,11 @@ fn test_matcher_dir_file_transition() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let a_path = RepoPath::from_internal_string("a");
let a_a_path = RepoPath::from_internal_string("a/a");
let a_path = &RepoPath::from_internal_string("a");
let a_a_path = &RepoPath::from_internal_string("a/a");
let tree1 = create_tree(repo, &[(&a_path, "before")]);
let tree2 = create_tree(repo, &[(&a_a_path, "after")]);
let tree1 = create_tree(repo, &[(a_path, "before")]);
let tree2 = create_tree(repo, &[(a_a_path, "after")]);
let matcher = FilesMatcher::new([&a_path]);
assert_eq!(
@ -165,25 +162,25 @@ fn test_matcher_dir_file_transition() {
DiffSummary {
modified: vec![],
added: vec![],
removed: vec![a_path.clone()]
removed: to_owned_path_vec(&[a_path]),
}
);
assert_eq!(
tree2.diff_summary(&tree1, &matcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![a_path.clone()],
removed: vec![]
added: to_owned_path_vec(&[a_path]),
removed: vec![],
}
);
let matcher = FilesMatcher::new([&a_a_path]);
let matcher = FilesMatcher::new([a_a_path]);
assert_eq!(
tree1.diff_summary(&tree2, &matcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![a_a_path.clone()],
removed: vec![]
added: to_owned_path_vec(&[a_a_path]),
removed: vec![],
}
);
assert_eq!(
@ -191,25 +188,25 @@ fn test_matcher_dir_file_transition() {
DiffSummary {
modified: vec![],
added: vec![],
removed: vec![a_a_path.clone()]
removed: to_owned_path_vec(&[a_a_path]),
}
);
let matcher = FilesMatcher::new([&a_path, &a_a_path]);
let matcher = FilesMatcher::new([a_path, a_a_path]);
assert_eq!(
tree1.diff_summary(&tree2, &matcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![a_a_path.clone()],
removed: vec![a_path.clone()]
added: to_owned_path_vec(&[a_a_path]),
removed: to_owned_path_vec(&[a_path]),
}
);
assert_eq!(
tree2.diff_summary(&tree1, &matcher).unwrap(),
DiffSummary {
modified: vec![],
added: vec![a_path],
removed: vec![a_a_path]
added: to_owned_path_vec(&[a_path]),
removed: to_owned_path_vec(&[a_a_path]),
}
);
}
@ -219,12 +216,12 @@ fn test_matcher_normal_cases() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let a_path = RepoPath::from_internal_string("a");
let dir1_a_path = RepoPath::from_internal_string("dir1/a");
let dir2_b_path = RepoPath::from_internal_string("dir2/b");
let z_path = RepoPath::from_internal_string("z");
let a_path = &RepoPath::from_internal_string("a");
let dir1_a_path = &RepoPath::from_internal_string("dir1/a");
let dir2_b_path = &RepoPath::from_internal_string("dir2/b");
let z_path = &RepoPath::from_internal_string("z");
let tree1 = create_tree(repo, &[(&a_path, "before"), (&dir1_a_path, "before")]);
let tree1 = create_tree(repo, &[(a_path, "before"), (dir1_a_path, "before")]);
// File "a" gets modified
// File "dir1/a" gets modified
// File "dir2/b" gets created
@ -232,46 +229,46 @@ fn test_matcher_normal_cases() {
let tree2 = create_tree(
repo,
&[
(&a_path, "after"),
(&dir1_a_path, "after"),
(&dir2_b_path, "after"),
(&z_path, "after"),
(a_path, "after"),
(dir1_a_path, "after"),
(dir2_b_path, "after"),
(z_path, "after"),
],
);
let matcher = FilesMatcher::new([&a_path, &z_path]);
let matcher = FilesMatcher::new([a_path, z_path]);
assert_eq!(
tree1.diff_summary(&tree2, &matcher).unwrap(),
DiffSummary {
modified: vec![a_path.clone()],
added: vec![z_path.clone()],
removed: vec![]
modified: to_owned_path_vec(&[a_path]),
added: to_owned_path_vec(&[z_path]),
removed: vec![],
}
);
assert_eq!(
tree2.diff_summary(&tree1, &matcher).unwrap(),
DiffSummary {
modified: vec![a_path],
modified: to_owned_path_vec(&[a_path]),
added: vec![],
removed: vec![z_path]
removed: to_owned_path_vec(&[z_path]),
}
);
let matcher = FilesMatcher::new([&dir1_a_path, &dir2_b_path]);
let matcher = FilesMatcher::new([dir1_a_path, dir2_b_path]);
assert_eq!(
tree1.diff_summary(&tree2, &matcher).unwrap(),
DiffSummary {
modified: vec![dir1_a_path.clone()],
added: vec![dir2_b_path.clone()],
removed: vec![]
modified: to_owned_path_vec(&[dir1_a_path]),
added: to_owned_path_vec(&[dir2_b_path]),
removed: vec![],
}
);
assert_eq!(
tree2.diff_summary(&tree1, &matcher).unwrap(),
DiffSummary {
modified: vec![dir1_a_path],
modified: to_owned_path_vec(&[dir1_a_path]),
added: vec![],
removed: vec![dir2_b_path]
removed: to_owned_path_vec(&[dir2_b_path]),
}
);
}

View file

@ -33,7 +33,7 @@ use jj_lib::merge::Merge;
use jj_lib::merged_tree::MergedTreeBuilder;
use jj_lib::op_store::{OperationId, WorkspaceId};
use jj_lib::repo::{ReadonlyRepo, Repo};
use jj_lib::repo_path::{RepoPath, RepoPathComponent};
use jj_lib::repo_path::{RepoPath, RepoPathBuf, RepoPathComponent};
use jj_lib::settings::UserSettings;
use jj_lib::working_copy::{CheckoutStats, SnapshotError, SnapshotOptions};
use jj_lib::workspace::LockedWorkspace;
@ -42,6 +42,10 @@ use testutils::{
commit_with_tree, create_tree, write_random_commit, TestRepoBackend, TestWorkspace,
};
fn to_owned_path_vec(paths: &[&RepoPath]) -> Vec<RepoPathBuf> {
paths.iter().map(|&path| path.to_owned()).collect()
}
#[test]
fn test_root() {
// Test that the working copy is clean and empty after init.
@ -49,7 +53,7 @@ fn test_root() {
let mut test_workspace = TestWorkspace::init(&settings);
let wc = test_workspace.workspace.working_copy();
assert_eq!(wc.sparse_patterns().unwrap(), vec![RepoPath::root()]);
assert_eq!(wc.sparse_patterns().unwrap(), vec![RepoPathBuf::root()]);
let new_tree = test_workspace.snapshot().unwrap();
let repo = &test_workspace.repo;
let wc_commit_id = repo
@ -161,7 +165,7 @@ fn test_checkout_file_transitions(backend: TestRepoBackend) {
Merge::normal(TreeValue::GitSubmodule(id))
}
};
tree_builder.set_or_remove(path.clone(), value);
tree_builder.set_or_remove(path.to_owned(), value);
}
let mut kinds = vec![
@ -182,10 +186,10 @@ fn test_checkout_file_transitions(backend: TestRepoBackend) {
let mut files = vec![];
for left_kind in &kinds {
for right_kind in &kinds {
let path = RepoPath::from_internal_string(format!("{left_kind:?}_{right_kind:?}"));
let path = RepoPathBuf::from_internal_string(format!("{left_kind:?}_{right_kind:?}"));
write_path(&settings, repo, &mut left_tree_builder, *left_kind, &path);
write_path(&settings, repo, &mut right_tree_builder, *right_kind, &path);
files.push((*left_kind, *right_kind, path));
files.push((*left_kind, *right_kind, path.to_owned()));
}
}
let left_tree_id = left_tree_builder.write_tree(&store).unwrap();
@ -271,10 +275,10 @@ fn test_conflict_subdirectory() {
let mut test_workspace = TestWorkspace::init(&settings);
let repo = &test_workspace.repo;
let path = RepoPath::from_internal_string("sub/file");
let path = &RepoPath::from_internal_string("sub/file");
let empty_tree = create_tree(repo, &[]);
let tree1 = create_tree(repo, &[(&path, "0")]);
let tree2 = create_tree(repo, &[(&path, "1")]);
let tree1 = create_tree(repo, &[(path, "0")]);
let tree2 = create_tree(repo, &[(path, "1")]);
let merged_tree = tree1.merge(&empty_tree, &tree2).unwrap();
let commit1 = commit_with_tree(repo.store(), tree1.id());
let merged_commit = commit_with_tree(repo.store(), merged_tree.id());
@ -299,12 +303,12 @@ fn test_tree_builder_file_directory_transition() {
ws.check_out(repo.op_id().clone(), None, &commit).unwrap();
};
let parent_path = RepoPath::from_internal_string("foo/bar");
let child_path = RepoPath::from_internal_string("foo/bar/baz");
let parent_path = &RepoPath::from_internal_string("foo/bar");
let child_path = &RepoPath::from_internal_string("foo/bar/baz");
// Add file at parent_path
let mut tree_builder = store.tree_builder(store.empty_tree_id().clone());
testutils::write_normal_file(&mut tree_builder, &parent_path, "");
testutils::write_normal_file(&mut tree_builder, parent_path, "");
let tree_id = tree_builder.write_tree();
check_out_tree(&tree_id);
assert!(parent_path.to_fs_path(&workspace_root).is_file());
@ -312,8 +316,8 @@ fn test_tree_builder_file_directory_transition() {
// Turn parent_path into directory, add file at child_path
let mut tree_builder = store.tree_builder(tree_id);
tree_builder.remove(parent_path.clone());
testutils::write_normal_file(&mut tree_builder, &child_path, "");
tree_builder.remove(parent_path.to_owned());
testutils::write_normal_file(&mut tree_builder, child_path, "");
let tree_id = tree_builder.write_tree();
check_out_tree(&tree_id);
assert!(parent_path.to_fs_path(&workspace_root).is_dir());
@ -321,8 +325,8 @@ fn test_tree_builder_file_directory_transition() {
// Turn parent_path back to file
let mut tree_builder = store.tree_builder(tree_id);
tree_builder.remove(child_path.clone());
testutils::write_normal_file(&mut tree_builder, &parent_path, "");
tree_builder.remove(child_path.to_owned());
testutils::write_normal_file(&mut tree_builder, parent_path, "");
let tree_id = tree_builder.write_tree();
check_out_tree(&tree_id);
assert!(parent_path.to_fs_path(&workspace_root).is_file());
@ -338,20 +342,20 @@ fn test_conflicting_changes_on_disk() {
let workspace_root = ws.workspace_root().clone();
// file on disk conflicts with file in target commit
let file_file_path = RepoPath::from_internal_string("file-file");
let file_file_path = &RepoPath::from_internal_string("file-file");
// file on disk conflicts with directory in target commit
let file_dir_path = RepoPath::from_internal_string("file-dir");
let file_dir_path = &RepoPath::from_internal_string("file-dir");
// directory on disk conflicts with file in target commit
let dir_file_path = RepoPath::from_internal_string("dir-file");
let dir_file_path = &RepoPath::from_internal_string("dir-file");
let tree = create_tree(
repo,
&[
(&file_file_path, "committed contents"),
(file_file_path, "committed contents"),
(
&file_dir_path.join(RepoPathComponent::new("file")),
"committed contents",
),
(&dir_file_path, "committed contents"),
(dir_file_path, "committed contents"),
],
);
let commit = commit_with_tree(repo.store(), tree.id());
@ -406,13 +410,13 @@ fn test_reset() {
let op_id = repo.op_id().clone();
let workspace_root = test_workspace.workspace.workspace_root().clone();
let ignored_path = RepoPath::from_internal_string("ignored");
let gitignore_path = RepoPath::from_internal_string(".gitignore");
let ignored_path = &RepoPath::from_internal_string("ignored");
let gitignore_path = &RepoPath::from_internal_string(".gitignore");
let tree_without_file = create_tree(repo, &[(&gitignore_path, "ignored\n")]);
let tree_without_file = create_tree(repo, &[(gitignore_path, "ignored\n")]);
let tree_with_file = create_tree(
repo,
&[(&gitignore_path, "ignored\n"), (&ignored_path, "code")],
&[(gitignore_path, "ignored\n"), (ignored_path, "code")],
);
let ws = &mut test_workspace.workspace;
@ -422,7 +426,7 @@ fn test_reset() {
// Test the setup: the file should exist on disk and in the tree state.
assert!(ignored_path.to_fs_path(&workspace_root).is_file());
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert!(wc.file_states().unwrap().contains_key(&ignored_path));
assert!(wc.file_states().unwrap().contains_key(ignored_path));
// After we reset to the commit without the file, it should still exist on disk,
// but it should not be in the tree state, and it should not get added when we
@ -432,7 +436,7 @@ fn test_reset() {
locked_ws.finish(op_id.clone()).unwrap();
assert!(ignored_path.to_fs_path(&workspace_root).is_file());
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert!(!wc.file_states().unwrap().contains_key(&ignored_path));
assert!(!wc.file_states().unwrap().contains_key(ignored_path));
let new_tree = test_workspace.snapshot().unwrap();
assert_eq!(new_tree.id(), tree_without_file.id());
@ -444,7 +448,7 @@ fn test_reset() {
locked_ws.finish(op_id.clone()).unwrap();
assert!(ignored_path.to_fs_path(&workspace_root).is_file());
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert!(wc.file_states().unwrap().contains_key(&ignored_path));
assert!(wc.file_states().unwrap().contains_key(ignored_path));
let new_tree = test_workspace.snapshot().unwrap();
assert_eq!(new_tree.id(), tree_with_file.id());
}
@ -459,12 +463,12 @@ fn test_checkout_discard() {
let repo = test_workspace.repo.clone();
let workspace_root = test_workspace.workspace.workspace_root().clone();
let file1_path = RepoPath::from_internal_string("file1");
let file2_path = RepoPath::from_internal_string("file2");
let file1_path = &RepoPath::from_internal_string("file1");
let file2_path = &RepoPath::from_internal_string("file2");
let store = repo.store();
let tree1 = create_tree(&repo, &[(&file1_path, "contents")]);
let tree2 = create_tree(&repo, &[(&file2_path, "contents")]);
let tree1 = create_tree(&repo, &[(file1_path, "contents")]);
let tree2 = create_tree(&repo, &[(file2_path, "contents")]);
let commit1 = commit_with_tree(repo.store(), tree1.id());
let commit2 = commit_with_tree(repo.store(), tree2.id());
@ -476,7 +480,7 @@ fn test_checkout_discard() {
// Test the setup: the file should exist on disk and in the tree state.
assert!(file1_path.to_fs_path(&workspace_root).is_file());
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert!(wc.file_states().unwrap().contains_key(&file1_path));
assert!(wc.file_states().unwrap().contains_key(file1_path));
// Start a checkout
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
@ -486,19 +490,19 @@ fn test_checkout_discard() {
assert!(file2_path.to_fs_path(&workspace_root).is_file());
let reloaded_wc =
LocalWorkingCopy::load(store.clone(), workspace_root.clone(), state_path.clone());
assert!(reloaded_wc.file_states().unwrap().contains_key(&file1_path));
assert!(!reloaded_wc.file_states().unwrap().contains_key(&file2_path));
assert!(reloaded_wc.file_states().unwrap().contains_key(file1_path));
assert!(!reloaded_wc.file_states().unwrap().contains_key(file2_path));
drop(locked_ws);
// The change should remain in the working copy, but not in memory and not saved
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert!(wc.file_states().unwrap().contains_key(&file1_path));
assert!(!wc.file_states().unwrap().contains_key(&file2_path));
assert!(wc.file_states().unwrap().contains_key(file1_path));
assert!(!wc.file_states().unwrap().contains_key(file2_path));
assert!(!file1_path.to_fs_path(&workspace_root).is_file());
assert!(file2_path.to_fs_path(&workspace_root).is_file());
let reloaded_wc = LocalWorkingCopy::load(store.clone(), workspace_root, state_path);
assert!(reloaded_wc.file_states().unwrap().contains_key(&file1_path));
assert!(!reloaded_wc.file_states().unwrap().contains_key(&file2_path));
assert!(reloaded_wc.file_states().unwrap().contains_key(file1_path));
assert!(!reloaded_wc.file_states().unwrap().contains_key(file2_path));
}
#[test]
@ -547,10 +551,10 @@ fn test_snapshot_special_file() {
let store = test_workspace.repo.store();
let ws = &mut test_workspace.workspace;
let file1_path = RepoPath::from_internal_string("file1");
let file1_path = &RepoPath::from_internal_string("file1");
let file1_disk_path = file1_path.to_fs_path(&workspace_root);
std::fs::write(&file1_disk_path, "contents".as_bytes()).unwrap();
let file2_path = RepoPath::from_internal_string("file2");
let file2_path = &RepoPath::from_internal_string("file2");
let file2_disk_path = file2_path.to_fs_path(&workspace_root);
std::fs::write(file2_disk_path, "contents".as_bytes()).unwrap();
let socket_disk_path = workspace_root.join("socket");
@ -570,12 +574,16 @@ fn test_snapshot_special_file() {
// Only the regular files should be in the tree
assert_eq!(
tree.entries().map(|(path, _value)| path).collect_vec(),
vec![file1_path.clone(), file2_path.clone()]
to_owned_path_vec(&[file1_path, file2_path])
);
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert_eq!(
wc.file_states().unwrap().keys().cloned().collect_vec(),
vec![file1_path, file2_path.clone()]
wc.file_states()
.unwrap()
.keys()
.map(AsRef::as_ref)
.collect_vec(),
vec![file1_path, file2_path]
);
// Replace a regular file by a socket and snapshot the working copy again
@ -585,12 +593,16 @@ fn test_snapshot_special_file() {
// Only the regular file should be in the tree
assert_eq!(
tree.entries().map(|(path, _value)| path).collect_vec(),
vec![file2_path.clone()]
to_owned_path_vec(&[file2_path])
);
let ws = &mut test_workspace.workspace;
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert_eq!(
wc.file_states().unwrap().keys().cloned().collect_vec(),
wc.file_states()
.unwrap()
.keys()
.map(AsRef::as_ref)
.collect_vec(),
vec![file2_path]
);
}
@ -603,54 +615,54 @@ fn test_gitignores() {
let mut test_workspace = TestWorkspace::init(&settings);
let workspace_root = test_workspace.workspace.workspace_root().clone();
let gitignore_path = RepoPath::from_internal_string(".gitignore");
let added_path = RepoPath::from_internal_string("added");
let modified_path = RepoPath::from_internal_string("modified");
let removed_path = RepoPath::from_internal_string("removed");
let ignored_path = RepoPath::from_internal_string("ignored");
let subdir_modified_path = RepoPath::from_internal_string("dir/modified");
let subdir_ignored_path = RepoPath::from_internal_string("dir/ignored");
let gitignore_path = &RepoPath::from_internal_string(".gitignore");
let added_path = &RepoPath::from_internal_string("added");
let modified_path = &RepoPath::from_internal_string("modified");
let removed_path = &RepoPath::from_internal_string("removed");
let ignored_path = &RepoPath::from_internal_string("ignored");
let subdir_modified_path = &RepoPath::from_internal_string("dir/modified");
let subdir_ignored_path = &RepoPath::from_internal_string("dir/ignored");
testutils::write_working_copy_file(&workspace_root, &gitignore_path, "ignored\n");
testutils::write_working_copy_file(&workspace_root, &modified_path, "1");
testutils::write_working_copy_file(&workspace_root, &removed_path, "1");
testutils::write_working_copy_file(&workspace_root, gitignore_path, "ignored\n");
testutils::write_working_copy_file(&workspace_root, modified_path, "1");
testutils::write_working_copy_file(&workspace_root, removed_path, "1");
std::fs::create_dir(workspace_root.join("dir")).unwrap();
testutils::write_working_copy_file(&workspace_root, &subdir_modified_path, "1");
testutils::write_working_copy_file(&workspace_root, subdir_modified_path, "1");
let tree1 = test_workspace.snapshot().unwrap();
let files1 = tree1.entries().map(|(name, _value)| name).collect_vec();
assert_eq!(
files1,
vec![
gitignore_path.clone(),
subdir_modified_path.clone(),
modified_path.clone(),
removed_path.clone(),
]
to_owned_path_vec(&[
gitignore_path,
subdir_modified_path,
modified_path,
removed_path,
])
);
testutils::write_working_copy_file(
&workspace_root,
&gitignore_path,
gitignore_path,
"ignored\nmodified\nremoved\n",
);
testutils::write_working_copy_file(&workspace_root, &added_path, "2");
testutils::write_working_copy_file(&workspace_root, &modified_path, "2");
testutils::write_working_copy_file(&workspace_root, added_path, "2");
testutils::write_working_copy_file(&workspace_root, modified_path, "2");
std::fs::remove_file(removed_path.to_fs_path(&workspace_root)).unwrap();
testutils::write_working_copy_file(&workspace_root, &ignored_path, "2");
testutils::write_working_copy_file(&workspace_root, &subdir_modified_path, "2");
testutils::write_working_copy_file(&workspace_root, &subdir_ignored_path, "2");
testutils::write_working_copy_file(&workspace_root, ignored_path, "2");
testutils::write_working_copy_file(&workspace_root, subdir_modified_path, "2");
testutils::write_working_copy_file(&workspace_root, subdir_ignored_path, "2");
let tree2 = test_workspace.snapshot().unwrap();
let files2 = tree2.entries().map(|(name, _value)| name).collect_vec();
assert_eq!(
files2,
vec![
to_owned_path_vec(&[
gitignore_path,
added_path,
subdir_modified_path,
modified_path,
]
])
);
}
@ -664,17 +676,17 @@ fn test_gitignores_in_ignored_dir() {
let op_id = test_workspace.repo.op_id().clone();
let workspace_root = test_workspace.workspace.workspace_root().clone();
let gitignore_path = RepoPath::from_internal_string(".gitignore");
let nested_gitignore_path = RepoPath::from_internal_string("ignored/.gitignore");
let ignored_path = RepoPath::from_internal_string("ignored/file");
let gitignore_path = &RepoPath::from_internal_string(".gitignore");
let nested_gitignore_path = &RepoPath::from_internal_string("ignored/.gitignore");
let ignored_path = &RepoPath::from_internal_string("ignored/file");
let tree1 = create_tree(&test_workspace.repo, &[(&gitignore_path, "ignored\n")]);
let tree1 = create_tree(&test_workspace.repo, &[(gitignore_path, "ignored\n")]);
let commit1 = commit_with_tree(test_workspace.repo.store(), tree1.id());
let ws = &mut test_workspace.workspace;
ws.check_out(op_id.clone(), None, &commit1).unwrap();
testutils::write_working_copy_file(&workspace_root, &nested_gitignore_path, "!file\n");
testutils::write_working_copy_file(&workspace_root, &ignored_path, "contents");
testutils::write_working_copy_file(&workspace_root, nested_gitignore_path, "!file\n");
testutils::write_working_copy_file(&workspace_root, ignored_path, "contents");
let new_tree = test_workspace.snapshot().unwrap();
assert_eq!(
@ -686,8 +698,8 @@ fn test_gitignores_in_ignored_dir() {
let tree2 = create_tree(
&test_workspace.repo,
&[
(&gitignore_path, "ignored\n"),
(&nested_gitignore_path, "!file\n"),
(gitignore_path, "ignored\n"),
(nested_gitignore_path, "!file\n"),
],
);
let mut locked_ws = test_workspace
@ -715,13 +727,13 @@ fn test_gitignores_checkout_never_overwrites_ignored() {
let workspace_root = test_workspace.workspace.workspace_root().clone();
// Write an ignored file called "modified" to disk
let gitignore_path = RepoPath::from_internal_string(".gitignore");
testutils::write_working_copy_file(&workspace_root, &gitignore_path, "modified\n");
let modified_path = RepoPath::from_internal_string("modified");
testutils::write_working_copy_file(&workspace_root, &modified_path, "garbage");
let gitignore_path = &RepoPath::from_internal_string(".gitignore");
testutils::write_working_copy_file(&workspace_root, gitignore_path, "modified\n");
let modified_path = &RepoPath::from_internal_string("modified");
testutils::write_working_copy_file(&workspace_root, modified_path, "garbage");
// Create a tree that adds the same file but with different contents
let tree = create_tree(repo, &[(&modified_path, "contents")]);
let tree = create_tree(repo, &[(modified_path, "contents")]);
let commit = commit_with_tree(repo.store(), tree.id());
// Now check out the tree that adds the file "modified" with contents
@ -746,17 +758,17 @@ fn test_gitignores_ignored_directory_already_tracked() {
let workspace_root = test_workspace.workspace.workspace_root().clone();
let repo = test_workspace.repo.clone();
let gitignore_path = RepoPath::from_internal_string(".gitignore");
let unchanged_path = RepoPath::from_internal_string("ignored/unchanged");
let modified_path = RepoPath::from_internal_string("ignored/modified");
let deleted_path = RepoPath::from_internal_string("ignored/deleted");
let gitignore_path = &RepoPath::from_internal_string(".gitignore");
let unchanged_path = &RepoPath::from_internal_string("ignored/unchanged");
let modified_path = &RepoPath::from_internal_string("ignored/modified");
let deleted_path = &RepoPath::from_internal_string("ignored/deleted");
let tree = create_tree(
&repo,
&[
(&gitignore_path, "/ignored/\n"),
(&unchanged_path, "contents"),
(&modified_path, "contents"),
(&deleted_path, "contents"),
(gitignore_path, "/ignored/\n"),
(unchanged_path, "contents"),
(modified_path, "contents"),
(deleted_path, "contents"),
],
);
let commit = commit_with_tree(repo.store(), tree.id());
@ -774,9 +786,9 @@ fn test_gitignores_ignored_directory_already_tracked() {
let expected_tree = create_tree(
&repo,
&[
(&gitignore_path, "/ignored/\n"),
(&unchanged_path, "contents"),
(&modified_path, "modified"),
(gitignore_path, "/ignored/\n"),
(unchanged_path, "contents"),
(modified_path, "modified"),
],
);
assert_eq!(
@ -831,14 +843,14 @@ fn test_gitsubmodule() {
let mut tree_builder = store.tree_builder(store.empty_tree_id().clone());
let added_path = RepoPath::from_internal_string("added");
let submodule_path = RepoPath::from_internal_string("submodule");
let added_submodule_path = RepoPath::from_internal_string("submodule/added");
let added_path = &RepoPath::from_internal_string("added");
let submodule_path = &RepoPath::from_internal_string("submodule");
let added_submodule_path = &RepoPath::from_internal_string("submodule/added");
tree_builder.set(
added_path.clone(),
added_path.to_owned(),
TreeValue::File {
id: testutils::write_file(repo.store(), &added_path, "added\n"),
id: testutils::write_file(repo.store(), added_path, "added\n"),
executable: false,
},
);
@ -848,7 +860,7 @@ fn test_gitsubmodule() {
tx.commit();
tree_builder.set(
submodule_path.clone(),
submodule_path.to_owned(),
TreeValue::GitSubmodule(submodule_id),
);
@ -862,7 +874,7 @@ fn test_gitsubmodule() {
testutils::write_working_copy_file(
&workspace_root,
&added_submodule_path,
added_submodule_path,
"i am a file in a submodule\n",
);
@ -890,8 +902,8 @@ fn test_existing_directory_symlink() {
// Creates a symlink in working directory, and a tree that will add a file under
// the symlinked directory.
std::os::unix::fs::symlink("..", workspace_root.join("parent")).unwrap();
let file_path = RepoPath::from_internal_string("parent/escaped");
let tree = create_tree(repo, &[(&file_path, "contents")]);
let file_path = &RepoPath::from_internal_string("parent/escaped");
let tree = create_tree(repo, &[(file_path, "contents")]);
let commit = commit_with_tree(repo.store(), tree.id());
// Checkout should fail because "parent" already exists and is a symlink.
@ -912,20 +924,20 @@ fn test_fsmonitor() {
let ws = &mut test_workspace.workspace;
assert_eq!(
ws.working_copy().sparse_patterns().unwrap(),
vec![RepoPath::root()]
vec![RepoPathBuf::root()]
);
let foo_path = RepoPath::from_internal_string("foo");
let bar_path = RepoPath::from_internal_string("bar");
let nested_path = RepoPath::from_internal_string("path/to/nested");
testutils::write_working_copy_file(&workspace_root, &foo_path, "foo\n");
testutils::write_working_copy_file(&workspace_root, &bar_path, "bar\n");
testutils::write_working_copy_file(&workspace_root, &nested_path, "nested\n");
let foo_path = &RepoPath::from_internal_string("foo");
let bar_path = &RepoPath::from_internal_string("bar");
let nested_path = &RepoPath::from_internal_string("path/to/nested");
testutils::write_working_copy_file(&workspace_root, foo_path, "foo\n");
testutils::write_working_copy_file(&workspace_root, bar_path, "bar\n");
testutils::write_working_copy_file(&workspace_root, nested_path, "nested\n");
let ignored_path = RepoPath::from_internal_string("path/to/ignored");
let gitignore_path = RepoPath::from_internal_string("path/.gitignore");
testutils::write_working_copy_file(&workspace_root, &ignored_path, "ignored\n");
testutils::write_working_copy_file(&workspace_root, &gitignore_path, "to/ignored\n");
let ignored_path = &RepoPath::from_internal_string("path/to/ignored");
let gitignore_path = &RepoPath::from_internal_string("path/.gitignore");
testutils::write_working_copy_file(&workspace_root, ignored_path, "ignored\n");
testutils::write_working_copy_file(&workspace_root, gitignore_path, "to/ignored\n");
let snapshot = |locked_ws: &mut LockedWorkspace, paths: &[&RepoPath]| {
let fs_paths = paths.iter().map(|p| p.to_fs_path(Path::new(""))).collect();
@ -948,7 +960,7 @@ fn test_fsmonitor() {
{
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
let tree_id = snapshot(&mut locked_ws, &[&foo_path]);
let tree_id = snapshot(&mut locked_ws, &[foo_path]);
insta::assert_snapshot!(testutils::dump_tree(repo.store(), &tree_id), @r###"
tree d5e38c0a1b0ee5de47c5
file "foo" (e99c2057c15160add351): "foo\n"
@ -959,7 +971,7 @@ fn test_fsmonitor() {
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
let tree_id = snapshot(
&mut locked_ws,
&[&foo_path, &bar_path, &nested_path, &ignored_path],
&[foo_path, bar_path, nested_path, ignored_path],
);
insta::assert_snapshot!(testutils::dump_tree(repo.store(), &tree_id), @r###"
tree f408c8d080414f8e90e1
@ -971,10 +983,10 @@ fn test_fsmonitor() {
}
{
testutils::write_working_copy_file(&workspace_root, &foo_path, "updated foo\n");
testutils::write_working_copy_file(&workspace_root, &bar_path, "updated bar\n");
testutils::write_working_copy_file(&workspace_root, foo_path, "updated foo\n");
testutils::write_working_copy_file(&workspace_root, bar_path, "updated bar\n");
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
let tree_id = snapshot(&mut locked_ws, &[&foo_path]);
let tree_id = snapshot(&mut locked_ws, &[foo_path]);
insta::assert_snapshot!(testutils::dump_tree(repo.store(), &tree_id), @r###"
tree e994a93c46f41dc91704
file "bar" (94cc973e7e1aefb7eff6): "bar\n"
@ -986,7 +998,7 @@ fn test_fsmonitor() {
{
std::fs::remove_file(foo_path.to_fs_path(&workspace_root)).unwrap();
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
let tree_id = snapshot(&mut locked_ws, &[&foo_path]);
let tree_id = snapshot(&mut locked_ws, &[foo_path]);
insta::assert_snapshot!(testutils::dump_tree(repo.store(), &tree_id), @r###"
tree 1df764981d4d74a4ecfa
file "bar" (94cc973e7e1aefb7eff6): "bar\n"
@ -1009,8 +1021,8 @@ fn test_snapshot_max_new_file_size() {
);
let mut test_workspace = TestWorkspace::init(&settings);
let workspace_root = test_workspace.workspace.workspace_root().clone();
let small_path = RepoPath::from_internal_string("small");
let large_path = RepoPath::from_internal_string("large");
let small_path = &RepoPath::from_internal_string("small");
let large_path = &RepoPath::from_internal_string("large");
std::fs::write(small_path.to_fs_path(&workspace_root), vec![0; 1024]).unwrap();
test_workspace
.snapshot()

View file

@ -17,7 +17,7 @@ use std::thread;
use assert_matches::assert_matches;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::working_copy::{CheckoutError, SnapshotOptions};
use jj_lib::workspace::{default_working_copy_factories, Workspace};
use testutils::{commit_with_tree, create_tree, write_working_copy_file, TestRepo, TestWorkspace};
@ -87,7 +87,7 @@ fn test_checkout_parallel() {
let num_threads = max(num_cpus::get(), 4);
let mut tree_ids = vec![];
for i in 0..num_threads {
let path = RepoPath::from_internal_string(format!("file{i}"));
let path = RepoPathBuf::from_internal_string(format!("file{i}"));
let tree = create_tree(repo, &[(&path, "contents")]);
tree_ids.push(tree.id());
}
@ -147,8 +147,8 @@ fn test_racy_checkout() {
let op_id = repo.op_id().clone();
let workspace_root = test_workspace.workspace.workspace_root().clone();
let path = RepoPath::from_internal_string("file");
let tree = create_tree(repo, &[(&path, "1")]);
let path = &RepoPath::from_internal_string("file");
let tree = create_tree(repo, &[(path, "1")]);
let commit = commit_with_tree(repo.store(), tree.id());
let mut num_matches = 0;
@ -161,13 +161,13 @@ fn test_racy_checkout() {
);
// A file written right after checkout (hopefully, from the test's perspective,
// within the file system timestamp granularity) is detected as changed.
write_working_copy_file(&workspace_root, &path, "x");
write_working_copy_file(&workspace_root, path, "x");
let modified_tree = test_workspace.snapshot().unwrap();
if modified_tree.id() == tree.id() {
num_matches += 1;
}
// Reset the state for the next round
write_working_copy_file(&workspace_root, &path, "1");
write_working_copy_file(&workspace_root, path, "1");
}
assert_eq!(num_matches, 0);
}

View file

@ -16,10 +16,14 @@ use itertools::Itertools;
use jj_lib::local_working_copy::LocalWorkingCopy;
use jj_lib::matchers::EverythingMatcher;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::working_copy::{CheckoutStats, WorkingCopy};
use testutils::{commit_with_tree, create_tree, TestWorkspace};
fn to_owned_path_vec(paths: &[&RepoPath]) -> Vec<RepoPathBuf> {
paths.iter().map(|&path| path.to_owned()).collect()
}
#[test]
fn test_sparse_checkout() {
let settings = testutils::user_settings();
@ -27,25 +31,25 @@ fn test_sparse_checkout() {
let repo = &test_workspace.repo;
let working_copy_path = test_workspace.workspace.workspace_root().clone();
let root_file1_path = RepoPath::from_internal_string("file1");
let root_file2_path = RepoPath::from_internal_string("file2");
let dir1_path = RepoPath::from_internal_string("dir1");
let dir1_file1_path = RepoPath::from_internal_string("dir1/file1");
let dir1_file2_path = RepoPath::from_internal_string("dir1/file2");
let dir1_subdir1_path = RepoPath::from_internal_string("dir1/subdir1");
let dir1_subdir1_file1_path = RepoPath::from_internal_string("dir1/subdir1/file1");
let dir2_path = RepoPath::from_internal_string("dir2");
let dir2_file1_path = RepoPath::from_internal_string("dir2/file1");
let root_file1_path = &RepoPath::from_internal_string("file1");
let root_file2_path = &RepoPath::from_internal_string("file2");
let dir1_path = &RepoPath::from_internal_string("dir1");
let dir1_file1_path = &RepoPath::from_internal_string("dir1/file1");
let dir1_file2_path = &RepoPath::from_internal_string("dir1/file2");
let dir1_subdir1_path = &RepoPath::from_internal_string("dir1/subdir1");
let dir1_subdir1_file1_path = &RepoPath::from_internal_string("dir1/subdir1/file1");
let dir2_path = &RepoPath::from_internal_string("dir2");
let dir2_file1_path = &RepoPath::from_internal_string("dir2/file1");
let tree = create_tree(
repo,
&[
(&root_file1_path, "contents"),
(&root_file2_path, "contents"),
(&dir1_file1_path, "contents"),
(&dir1_file2_path, "contents"),
(&dir1_subdir1_file1_path, "contents"),
(&dir2_file1_path, "contents"),
(root_file1_path, "contents"),
(root_file2_path, "contents"),
(dir1_file1_path, "contents"),
(dir1_file2_path, "contents"),
(dir1_subdir1_file1_path, "contents"),
(dir2_file1_path, "contents"),
],
);
let commit = commit_with_tree(repo.store(), tree.id());
@ -58,7 +62,7 @@ fn test_sparse_checkout() {
// Set sparse patterns to only dir1/
let mut locked_ws = ws.start_working_copy_mutation().unwrap();
let sparse_patterns = vec![dir1_path];
let sparse_patterns = to_owned_path_vec(&[dir1_path]);
let stats = locked_ws
.locked_wc()
.set_sparse_patterns(sparse_patterns.clone())
@ -89,8 +93,12 @@ fn test_sparse_checkout() {
locked_ws.finish(repo.op_id().clone()).unwrap();
let wc: &LocalWorkingCopy = ws.working_copy().as_any().downcast_ref().unwrap();
assert_eq!(
wc.file_states().unwrap().keys().collect_vec(),
vec![&dir1_file1_path, &dir1_file2_path, &dir1_subdir1_file1_path]
wc.file_states()
.unwrap()
.keys()
.map(AsRef::as_ref)
.collect_vec(),
vec![dir1_file1_path, dir1_file2_path, dir1_subdir1_file1_path]
);
assert_eq!(wc.sparse_patterns().unwrap(), sparse_patterns);
@ -101,14 +109,18 @@ fn test_sparse_checkout() {
wc.state_path().to_path_buf(),
);
assert_eq!(
wc.file_states().unwrap().keys().collect_vec(),
vec![&dir1_file1_path, &dir1_file2_path, &dir1_subdir1_file1_path]
wc.file_states()
.unwrap()
.keys()
.map(AsRef::as_ref)
.collect_vec(),
vec![dir1_file1_path, dir1_file2_path, dir1_subdir1_file1_path]
);
assert_eq!(wc.sparse_patterns().unwrap(), sparse_patterns);
// Set sparse patterns to file2, dir1/subdir1/ and dir2/
let mut locked_wc = wc.start_mutation().unwrap();
let sparse_patterns = vec![root_file1_path.clone(), dir1_subdir1_path, dir2_path];
let sparse_patterns = to_owned_path_vec(&[root_file1_path, dir1_subdir1_path, dir2_path]);
let stats = locked_wc
.set_sparse_patterns(sparse_patterns.clone())
.unwrap();
@ -133,8 +145,12 @@ fn test_sparse_checkout() {
let wc = locked_wc.finish(repo.op_id().clone()).unwrap();
let wc: &LocalWorkingCopy = wc.as_any().downcast_ref().unwrap();
assert_eq!(
wc.file_states().unwrap().keys().collect_vec(),
vec![&dir1_subdir1_file1_path, &dir2_file1_path, &root_file1_path]
wc.file_states()
.unwrap()
.keys()
.map(AsRef::as_ref)
.collect_vec(),
vec![dir1_subdir1_file1_path, dir2_file1_path, root_file1_path]
);
}
@ -147,18 +163,18 @@ fn test_sparse_commit() {
let op_id = repo.op_id().clone();
let working_copy_path = test_workspace.workspace.workspace_root().clone();
let root_file1_path = RepoPath::from_internal_string("file1");
let dir1_path = RepoPath::from_internal_string("dir1");
let dir1_file1_path = RepoPath::from_internal_string("dir1/file1");
let dir2_path = RepoPath::from_internal_string("dir2");
let dir2_file1_path = RepoPath::from_internal_string("dir2/file1");
let root_file1_path = &RepoPath::from_internal_string("file1");
let dir1_path = &RepoPath::from_internal_string("dir1");
let dir1_file1_path = &RepoPath::from_internal_string("dir1/file1");
let dir2_path = &RepoPath::from_internal_string("dir2");
let dir2_file1_path = &RepoPath::from_internal_string("dir2/file1");
let tree = create_tree(
repo,
&[
(&root_file1_path, "contents"),
(&dir1_file1_path, "contents"),
(&dir2_file1_path, "contents"),
(root_file1_path, "contents"),
(dir1_file1_path, "contents"),
(dir2_file1_path, "contents"),
],
);
@ -173,7 +189,7 @@ fn test_sparse_commit() {
.workspace
.start_working_copy_mutation()
.unwrap();
let sparse_patterns = vec![dir1_path.clone()];
let sparse_patterns = to_owned_path_vec(&[dir1_path]);
locked_ws
.locked_wc()
.set_sparse_patterns(sparse_patterns)
@ -192,14 +208,14 @@ fn test_sparse_commit() {
let modified_tree = test_workspace.snapshot().unwrap();
let diff = tree.diff(&modified_tree, &EverythingMatcher).collect_vec();
assert_eq!(diff.len(), 1);
assert_eq!(diff[0].0, dir1_file1_path);
assert_eq!(diff[0].0.as_ref(), dir1_file1_path);
// Set sparse patterns to also include dir2/
let mut locked_ws = test_workspace
.workspace
.start_working_copy_mutation()
.unwrap();
let sparse_patterns = vec![dir1_path, dir2_path];
let sparse_patterns = to_owned_path_vec(&[dir1_path, dir2_path]);
locked_ws
.locked_wc()
.set_sparse_patterns(sparse_patterns)
@ -211,8 +227,8 @@ fn test_sparse_commit() {
let modified_tree = test_workspace.snapshot().unwrap();
let diff = tree.diff(&modified_tree, &EverythingMatcher).collect_vec();
assert_eq!(diff.len(), 2);
assert_eq!(diff[0].0, dir1_file1_path);
assert_eq!(diff[1].0, dir2_file1_path);
assert_eq!(diff[0].0.as_ref(), dir1_file1_path);
assert_eq!(diff[1].0.as_ref(), dir2_file1_path);
}
#[test]
@ -223,16 +239,16 @@ fn test_sparse_commit_gitignore() {
let repo = &test_workspace.repo;
let working_copy_path = test_workspace.workspace.workspace_root().clone();
let dir1_path = RepoPath::from_internal_string("dir1");
let dir1_file1_path = RepoPath::from_internal_string("dir1/file1");
let dir1_file2_path = RepoPath::from_internal_string("dir1/file2");
let dir1_path = &RepoPath::from_internal_string("dir1");
let dir1_file1_path = &RepoPath::from_internal_string("dir1/file1");
let dir1_file2_path = &RepoPath::from_internal_string("dir1/file2");
// Set sparse patterns to only dir1/
let mut locked_ws = test_workspace
.workspace
.start_working_copy_mutation()
.unwrap();
let sparse_patterns = vec![dir1_path.clone()];
let sparse_patterns = to_owned_path_vec(&[dir1_path]);
locked_ws
.locked_wc()
.set_sparse_patterns(sparse_patterns)
@ -250,5 +266,5 @@ fn test_sparse_commit_gitignore() {
let modified_tree = test_workspace.snapshot().unwrap();
let entries = modified_tree.entries().collect_vec();
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].0, dir1_file2_path);
assert_eq!(entries[0].0.as_ref(), dir1_file2_path);
}

View file

@ -195,11 +195,11 @@ fn test_executable() {
let write_tree = |files: &[(&str, bool)]| -> Tree {
let mut tree_builder = store.tree_builder(store.empty_tree_id().clone());
for &(path, executable) in files {
let repo_path = RepoPath::from_internal_string(path);
let repo_path = &RepoPath::from_internal_string(path);
if executable {
testutils::write_executable_file(&mut tree_builder, &repo_path, "contents");
testutils::write_executable_file(&mut tree_builder, repo_path, "contents");
} else {
testutils::write_normal_file(&mut tree_builder, &repo_path, "contents");
testutils::write_normal_file(&mut tree_builder, repo_path, "contents");
}
}
let tree_id = tree_builder.write_tree();
@ -457,8 +457,8 @@ fn test_simplify_conflict() {
let store = repo.store();
let component = RepoPathComponent::new("file");
let path = RepoPath::from_internal_string("file");
let write_tree = |contents: &str| -> Tree { create_single_tree(repo, &[(&path, contents)]) };
let path = &RepoPath::from_internal_string("file");
let write_tree = |contents: &str| -> Tree { create_single_tree(repo, &[(path, contents)]) };
let base_tree = write_tree("base contents");
let branch_tree = write_tree("branch contents");
@ -515,7 +515,7 @@ fn test_simplify_conflict() {
merge_trees(&upstream2_tree, &upstream1_tree, &rebased1_tree).unwrap();
match further_rebased_tree.value(component).unwrap() {
TreeValue::Conflict(id) => {
let conflict = store.read_conflict(&path, id).unwrap();
let conflict = store.read_conflict(path, id).unwrap();
assert_eq!(
conflict.removes().map(|v| v.as_ref()).collect_vec(),
vec![base_tree.value(component)]
@ -550,9 +550,9 @@ fn test_simplify_conflict_after_resolving_parent() {
// which creates a conflict. We resolve the conflict in the first line and
// rebase C2 (the rebased C) onto the resolved conflict. C3 should not have
// a conflict since it changed an unrelated line.
let path = RepoPath::from_internal_string("dir/file");
let path = &RepoPath::from_internal_string("dir/file");
let mut tx = repo.start_transaction(&settings, "test");
let tree_a = create_tree(repo, &[(&path, "abc\ndef\nghi\n")]);
let tree_a = create_tree(repo, &[(path, "abc\ndef\nghi\n")]);
let commit_a = tx
.mut_repo()
.new_commit(
@ -562,19 +562,19 @@ fn test_simplify_conflict_after_resolving_parent() {
)
.write()
.unwrap();
let tree_b = create_tree(repo, &[(&path, "Abc\ndef\nghi\n")]);
let tree_b = create_tree(repo, &[(path, "Abc\ndef\nghi\n")]);
let commit_b = tx
.mut_repo()
.new_commit(&settings, vec![commit_a.id().clone()], tree_b.id())
.write()
.unwrap();
let tree_c = create_tree(repo, &[(&path, "Abc\ndef\nGhi\n")]);
let tree_c = create_tree(repo, &[(path, "Abc\ndef\nGhi\n")]);
let commit_c = tx
.mut_repo()
.new_commit(&settings, vec![commit_b.id().clone()], tree_c.id())
.write()
.unwrap();
let tree_d = create_tree(repo, &[(&path, "abC\ndef\nghi\n")]);
let tree_d = create_tree(repo, &[(path, "abC\ndef\nghi\n")]);
let commit_d = tx
.mut_repo()
.new_commit(&settings, vec![commit_a.id().clone()], tree_d.id())
@ -588,11 +588,11 @@ fn test_simplify_conflict_after_resolving_parent() {
// Test the setup: Both B and C should have conflicts.
let tree_b2 = commit_b2.tree().unwrap();
let tree_c2 = commit_b2.tree().unwrap();
assert!(!tree_b2.path_value(&path).is_resolved());
assert!(!tree_c2.path_value(&path).is_resolved());
assert!(!tree_b2.path_value(path).is_resolved());
assert!(!tree_c2.path_value(path).is_resolved());
// Create the resolved B and rebase C on top.
let tree_b3 = create_tree(repo, &[(&path, "AbC\ndef\nghi\n")]);
let tree_b3 = create_tree(repo, &[(path, "AbC\ndef\nghi\n")]);
let commit_b3 = tx
.mut_repo()
.rewrite_commit(&settings, &commit_b2)
@ -605,14 +605,14 @@ fn test_simplify_conflict_after_resolving_parent() {
// The conflict should now be resolved.
let tree_c2 = commit_c3.tree().unwrap();
let resolved_value = tree_c2.path_value(&path);
let resolved_value = tree_c2.path_value(path);
match resolved_value.into_resolved() {
Ok(Some(TreeValue::File {
id,
executable: false,
})) => {
assert_eq!(
testutils::read_file(repo.store(), &path, &id),
testutils::read_file(repo.store(), path, &id),
b"AbC\ndef\nGhi\n"
);
}

File diff suppressed because it is too large Load diff

View file

@ -2583,30 +2583,30 @@ fn test_evaluate_expression_file() {
let mut tx = repo.start_transaction(&settings, "test");
let mut_repo = tx.mut_repo();
let added_clean_clean = RepoPath::from_internal_string("added_clean_clean");
let added_modified_clean = RepoPath::from_internal_string("added_modified_clean");
let added_modified_removed = RepoPath::from_internal_string("added_modified_removed");
let added_clean_clean = &RepoPath::from_internal_string("added_clean_clean");
let added_modified_clean = &RepoPath::from_internal_string("added_modified_clean");
let added_modified_removed = &RepoPath::from_internal_string("added_modified_removed");
let tree1 = create_tree(
repo,
&[
(&added_clean_clean, "1"),
(&added_modified_clean, "1"),
(&added_modified_removed, "1"),
(added_clean_clean, "1"),
(added_modified_clean, "1"),
(added_modified_removed, "1"),
],
);
let tree2 = create_tree(
repo,
&[
(&added_clean_clean, "1"),
(&added_modified_clean, "2"),
(&added_modified_removed, "2"),
(added_clean_clean, "1"),
(added_modified_clean, "2"),
(added_modified_removed, "2"),
],
);
let tree3 = create_tree(
repo,
&[
(&added_clean_clean, "1"),
(&added_modified_clean, "2"),
(added_clean_clean, "1"),
(added_modified_clean, "2"),
// added_modified_removed,
],
);
@ -2634,18 +2634,20 @@ fn test_evaluate_expression_file() {
let resolve = |file_path: &RepoPath| -> Vec<CommitId> {
let mut_repo = &*mut_repo;
let expression =
RevsetExpression::filter(RevsetFilterPredicate::File(Some(vec![file_path.clone()])));
RevsetExpression::filter(RevsetFilterPredicate::File(Some(
vec![file_path.to_owned()],
)));
let revset = expression.evaluate_programmatic(mut_repo).unwrap();
revset.iter().collect()
};
assert_eq!(resolve(&added_clean_clean), vec![commit1.id().clone()]);
assert_eq!(resolve(added_clean_clean), vec![commit1.id().clone()]);
assert_eq!(
resolve(&added_modified_clean),
resolve(added_modified_clean),
vec![commit2.id().clone(), commit1.id().clone()]
);
assert_eq!(
resolve(&added_modified_removed),
resolve(added_modified_removed),
vec![
commit3.id().clone(),
commit2.id().clone(),
@ -2690,11 +2692,11 @@ fn test_evaluate_expression_conflict() {
let mut_repo = tx.mut_repo();
// Create a few trees, including one with a conflict in `file1`
let file_path1 = RepoPath::from_internal_string("file1");
let file_path2 = RepoPath::from_internal_string("file2");
let tree1 = create_tree(repo, &[(&file_path1, "1"), (&file_path2, "1")]);
let tree2 = create_tree(repo, &[(&file_path1, "2"), (&file_path2, "2")]);
let tree3 = create_tree(repo, &[(&file_path1, "3"), (&file_path2, "1")]);
let file_path1 = &RepoPath::from_internal_string("file1");
let file_path2 = &RepoPath::from_internal_string("file2");
let tree1 = create_tree(repo, &[(file_path1, "1"), (file_path2, "1")]);
let tree2 = create_tree(repo, &[(file_path1, "2"), (file_path2, "2")]);
let tree3 = create_tree(repo, &[(file_path1, "3"), (file_path2, "1")]);
let tree4 = tree2.merge(&tree1, &tree3).unwrap();
let mut create_commit = |parent_ids, tree_id| {

View file

@ -34,17 +34,14 @@ fn test_restore_tree() {
let test_repo = TestRepo::init();
let repo = &test_repo.repo;
let path1 = RepoPath::from_internal_string("file1");
let path2 = RepoPath::from_internal_string("dir1/file2");
let path3 = RepoPath::from_internal_string("dir1/file3");
let path4 = RepoPath::from_internal_string("dir2/file4");
let left = create_tree(
repo,
&[(&path2, "left"), (&path3, "left"), (&path4, "left")],
);
let path1 = &RepoPath::from_internal_string("file1");
let path2 = &RepoPath::from_internal_string("dir1/file2");
let path3 = &RepoPath::from_internal_string("dir1/file3");
let path4 = &RepoPath::from_internal_string("dir2/file4");
let left = create_tree(repo, &[(path2, "left"), (path3, "left"), (path4, "left")]);
let right = create_tree(
repo,
&[(&path1, "right"), (&path2, "right"), (&path3, "right")],
&[(path1, "right"), (path2, "right"), (path3, "right")],
);
// Restore everything using EverythingMatcher
@ -61,8 +58,8 @@ fn test_restore_tree() {
assert_eq!(restored, left.id());
// Restore some files
let restored = restore_tree(&left, &right, &FilesMatcher::new([&path1, &path2])).unwrap();
let expected = create_tree(repo, &[(&path2, "left"), (&path3, "right")]);
let restored = restore_tree(&left, &right, &FilesMatcher::new([path1, path2])).unwrap();
let expected = create_tree(repo, &[(path2, "left"), (path3, "right")]);
assert_eq!(restored, expected.id());
}
@ -880,8 +877,8 @@ fn test_rebase_descendants_contents() {
// |/
// A
let mut tx = repo.start_transaction(&settings, "test");
let path1 = RepoPath::from_internal_string("file1");
let tree1 = create_tree(repo, &[(&path1, "content")]);
let path1 = &RepoPath::from_internal_string("file1");
let tree1 = create_tree(repo, &[(path1, "content")]);
let commit_a = tx
.mut_repo()
.new_commit(
@ -891,22 +888,22 @@ fn test_rebase_descendants_contents() {
)
.write()
.unwrap();
let path2 = RepoPath::from_internal_string("file2");
let tree2 = create_tree(repo, &[(&path2, "content")]);
let path2 = &RepoPath::from_internal_string("file2");
let tree2 = create_tree(repo, &[(path2, "content")]);
let commit_b = tx
.mut_repo()
.new_commit(&settings, vec![commit_a.id().clone()], tree2.id())
.write()
.unwrap();
let path3 = RepoPath::from_internal_string("file3");
let tree3 = create_tree(repo, &[(&path3, "content")]);
let path3 = &RepoPath::from_internal_string("file3");
let tree3 = create_tree(repo, &[(path3, "content")]);
let commit_c = tx
.mut_repo()
.new_commit(&settings, vec![commit_b.id().clone()], tree3.id())
.write()
.unwrap();
let path4 = RepoPath::from_internal_string("file4");
let tree4 = create_tree(repo, &[(&path4, "content")]);
let path4 = &RepoPath::from_internal_string("file4");
let tree4 = create_tree(repo, &[(path4, "content")]);
let commit_d = tx
.mut_repo()
.new_commit(&settings, vec![commit_a.id().clone()], tree4.id())
@ -933,9 +930,9 @@ fn test_rebase_descendants_contents() {
let tree_c = commit_c.tree().unwrap();
let tree_d = commit_d.tree().unwrap();
let new_tree_c = new_commit_c.tree().unwrap();
assert_eq!(new_tree_c.path_value(&path3), tree_c.path_value(&path3));
assert_eq!(new_tree_c.path_value(&path4), tree_d.path_value(&path4));
assert_ne!(new_tree_c.path_value(&path2), tree_b.path_value(&path2));
assert_eq!(new_tree_c.path_value(path3), tree_c.path_value(path3));
assert_eq!(new_tree_c.path_value(path4), tree_d.path_value(path4));
assert_ne!(new_tree_c.path_value(path2), tree_b.path_value(path2));
}
#[test]

View file

@ -29,7 +29,7 @@ use jj_lib::git_backend::GitBackend;
use jj_lib::local_backend::LocalBackend;
use jj_lib::merged_tree::MergedTree;
use jj_lib::repo::{MutableRepo, ReadonlyRepo, Repo, RepoLoader, StoreFactories};
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
use jj_lib::rewrite::RebasedDescendant;
use jj_lib::settings::UserSettings;
use jj_lib::store::Store;
@ -256,7 +256,7 @@ pub fn write_normal_file(
) -> FileId {
let id = write_file(tree_builder.store(), path, contents);
tree_builder.set(
path.clone(),
path.to_owned(),
TreeValue::File {
id: id.clone(),
executable: false,
@ -268,7 +268,7 @@ pub fn write_normal_file(
pub fn write_executable_file(tree_builder: &mut TreeBuilder, path: &RepoPath, contents: &str) {
let id = write_file(tree_builder.store(), path, contents);
tree_builder.set(
path.clone(),
path.to_owned(),
TreeValue::File {
id,
executable: true,
@ -278,7 +278,7 @@ pub fn write_executable_file(tree_builder: &mut TreeBuilder, path: &RepoPath, co
pub fn write_symlink(tree_builder: &mut TreeBuilder, path: &RepoPath, target: &str) {
let id = tree_builder.store().write_symlink(path, target).unwrap();
tree_builder.set(path.clone(), TreeValue::Symlink(id));
tree_builder.set(path.to_owned(), TreeValue::Symlink(id));
}
pub fn create_single_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]) -> Tree {
@ -298,7 +298,7 @@ pub fn create_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]
#[must_use]
pub fn create_random_tree(repo: &Arc<ReadonlyRepo>) -> MergedTreeId {
let number = rand::random::<u32>();
let path = RepoPath::from_internal_string(format!("file{number}"));
let path = RepoPathBuf::from_internal_string(format!("file{number}"));
create_tree(repo, &[(&path, "contents")]).id()
}

View file

@ -24,7 +24,7 @@ use jj_lib::backend::{
make_root_commit, Backend, BackendError, BackendResult, ChangeId, Commit, CommitId, Conflict,
ConflictId, FileId, ObjectId, SecureSig, SigningFn, SymlinkId, Tree, TreeId,
};
use jj_lib::repo_path::RepoPath;
use jj_lib::repo_path::{RepoPath, RepoPathBuf};
const HASH_LENGTH: usize = 10;
const CHANGE_ID_LENGTH: usize = 16;
@ -39,10 +39,10 @@ fn backend_data() -> &'static Mutex<HashMap<PathBuf, Arc<Mutex<TestBackendData>>
#[derive(Default)]
pub struct TestBackendData {
commits: HashMap<CommitId, Commit>,
trees: HashMap<RepoPath, HashMap<TreeId, Tree>>,
files: HashMap<RepoPath, HashMap<FileId, Vec<u8>>>,
symlinks: HashMap<RepoPath, HashMap<SymlinkId, String>>,
conflicts: HashMap<RepoPath, HashMap<ConflictId, Conflict>>,
trees: HashMap<RepoPathBuf, HashMap<TreeId, Tree>>,
files: HashMap<RepoPathBuf, HashMap<FileId, Vec<u8>>>,
symlinks: HashMap<RepoPathBuf, HashMap<SymlinkId, String>>,
conflicts: HashMap<RepoPathBuf, HashMap<ConflictId, Conflict>>,
}
fn get_hash(content: &(impl jj_lib::content_hash::ContentHash + ?Sized)) -> Vec<u8> {
@ -166,7 +166,7 @@ impl Backend for TestBackend {
let id = FileId::new(get_hash(&bytes));
self.locked_data()
.files
.entry(path.clone())
.entry(path.to_owned())
.or_default()
.insert(id.clone(), bytes);
Ok(id)
@ -193,7 +193,7 @@ impl Backend for TestBackend {
let id = SymlinkId::new(get_hash(target.as_bytes()));
self.locked_data()
.symlinks
.entry(path.clone())
.entry(path.to_owned())
.or_default()
.insert(id.clone(), target.to_string());
Ok(id)
@ -223,7 +223,7 @@ impl Backend for TestBackend {
let id = TreeId::new(get_hash(contents));
self.locked_data()
.trees
.entry(path.clone())
.entry(path.to_owned())
.or_default()
.insert(id.clone(), contents.clone());
Ok(id)
@ -250,7 +250,7 @@ impl Backend for TestBackend {
let id = ConflictId::new(get_hash(contents));
self.locked_data()
.conflicts
.entry(path.clone())
.entry(path.to_owned())
.or_default()
.insert(id.clone(), contents.clone());
Ok(id)