ok/jj
1
0
Fork 0
forked from mirrors/jj

view: migrate in-memory structure to per-remote branches map

There's a subtle behavior change. Unlike the original remove_remote_branch(),
remote_views entry is not discarded when the branches map becomes empty. The
reasoning here is that the remote view can be added/removed when the remote
is added/removed respectively, though that's not implemented yet. Since the
serialized data cannot represent an empty remote, such view may generate
non-unique content hash.
This commit is contained in:
Yuya Nishihara 2023-10-06 00:19:41 +09:00
parent 3ec3cac90b
commit b7c7b19eb8
15 changed files with 192 additions and 276 deletions

View file

@ -222,17 +222,15 @@ fn find_globs(
let glob = glob::Pattern::new(glob_str)?;
let names = view
.branches()
.iter()
.filter_map(|(branch_name, branch_target)| {
if glob.matches(branch_name)
&& (allow_deleted || branch_target.local_target.is_present())
{
Some(branch_name)
Some(branch_name.to_owned())
} else {
None
}
})
.cloned()
.collect_vec();
if names.is_empty() {
failed_globs.push(glob);
@ -385,10 +383,10 @@ fn cmd_branch_list(
let mut formatter = ui.stdout_formatter();
let formatter = formatter.as_mut();
let branches_to_list = view.branches().iter().filter(|&(name, _)| {
let branches_to_list = view.branches().filter(|&(name, _)| {
branch_names_to_list
.as_ref()
.map_or(true, |branch_names| branch_names.contains(name.as_str()))
.map_or(true, |branch_names| branch_names.contains(name))
});
for (name, branch_target) in branches_to_list {
let found_non_git_remote = {

View file

@ -894,7 +894,8 @@ fn cmd_git_push(
// Check if there are conflicts in any commits we're about to push that haven't
// already been pushed.
let mut old_heads = vec![];
for branch_target in repo.view().branches().values() {
// TODO: get list of branches by remote name instead of using .branches()
for (_, branch_target) in repo.view().branches() {
if let Some(old_head) = branch_target.remote_targets.get(&remote) {
old_heads.extend(old_head.added_ids().cloned());
}

View file

@ -1,8 +1,5 @@
use std::collections::{BTreeMap, BTreeSet};
use clap::Subcommand;
use jj_lib::backend::ObjectId;
use jj_lib::op_store::{BranchTarget, RefTarget};
use jj_lib::operation;
use jj_lib::repo::Repo;
@ -179,39 +176,11 @@ fn view_with_desired_portions_restored(
return new_view;
}
let all_branch_names: BTreeSet<_> = itertools::chain(
view_being_restored.branches.keys(),
current_view.branches.keys(),
)
.collect();
let branch_source_view = if what.contains(&UndoWhatToRestore::RemoteTracking) {
view_being_restored
if what.contains(&UndoWhatToRestore::RemoteTracking) {
new_view.remote_views = view_being_restored.remote_views.clone();
} else {
current_view
};
let mut new_branches = BTreeMap::default();
for branch_name in all_branch_names {
let local_target = new_view
.branches
.get(branch_name)
.map(|br| br.local_target.clone())
.unwrap_or_else(RefTarget::absent);
let remote_targets = branch_source_view
.branches
.get(branch_name)
.map(|br| br.remote_targets.clone())
.unwrap_or_default();
if local_target.is_present() || !remote_targets.is_empty() {
new_branches.insert(
branch_name.to_string(),
BranchTarget {
local_target,
remote_targets,
},
);
}
new_view.remote_views = current_view.remote_views.clone();
}
new_view.branches = new_branches;
new_view
}

View file

@ -371,7 +371,7 @@ fn build_branches_index(repo: &dyn Repo) -> RefNamesIndex {
} else if unsynced_remote_targets.peek().is_some() {
format!("{branch_name}*")
} else {
branch_name.clone()
branch_name.to_owned()
};
index.insert(local_target.added_ids(), decorated_name);
}

View file

@ -55,15 +55,15 @@ fn test_concurrent_operations_auto_rebase() {
test_env.jj_cmd_ok(&repo_path, &["describe", "-m", "initial"]);
let stdout = test_env.jj_cmd_success(&repo_path, &["op", "log"]);
insta::assert_snapshot!(stdout, @r###"
@ cfc96ff553b9 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
@ 8626e1f87784 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
describe commit 123ed18e4c4c0d77428df41112bc02ffc83fb935
args: jj describe -m initial
65a6c90b9544 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
80b17428029f test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
snapshot working copy
args: jj describe -m initial
19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
let op_id_hex = stdout[3..15].to_string();
@ -168,21 +168,21 @@ fn test_concurrent_snapshot_wc_reloadable() {
let template = r#"id ++ "\n" ++ description ++ "\n" ++ tags"#;
let op_log_stdout = test_env.jj_cmd_success(&repo_path, &["op", "log", "-T", template]);
insta::assert_snapshot!(op_log_stdout, @r###"
@ 9be517934aaabc351597e88ed4119aa9454ae3588ab7f28646a810272c82f3dafb1deb20b3c978dbb58ba9abc8f08fe870fe3c7ce5f682411991e83eee40a77f
@ 98aae8ac6dabe0f4e97b0708afd5848f22f0831d8f5be3169063120efe0769b27d123b038f505f2a78d1cd92a0b8345eee2bd201ba6d687fb8c9326dac1fcfaa
commit 323b414dd255b51375d7f4392b7b2641ffe4289f
args: jj commit -m 'new child1'
d967c09eb12b38dad2065a0bc9e251824247f9f84ba406a7356f5405e4c93c21562178a3f00cafedfa1df1435ba496265f39da9d1ccebaccb78bdcb4bd7031e1
13ce1be149ff5fe4386896a753f9321e1826cb6603a6310e50eb778aacbaf7080ca50e7eb3c3ddcacb3e433c619e222fe0f7637dec14551350a03b8a6959f739
snapshot working copy
args: jj commit -m 'new child1'
b6d168ba4fb4534257b6e58d53eb407582567342358eab07cf5a01a7e4d797313b692f27664c2fb7935b2380d398d0298233c9732f821b8c687e35607ea08a55
21126efd140dc442d7663ac963912f664dcde62b397e71bcc1628d914f454c3767a55ae16756a47793303b68099150d4a86344129e32e561f4853f283e745814
commit 3d918700494a9895696e955b85fa05eb0d314cc6
args: jj commit -m initial
5e9e3f82fc14750ff985c5a39f1935ed8876b973b8800b56bc03d1c9754795e724956d862d1fcb2c533d06ca36abc9fa9f7cb7d3b2b64e993e9a87f80d5af670
f29c2903a0a18deefc8323c6e1ac8c1165c238f74ed508180a7ae77b46ba1c1be0ff13d7112d59368d56a3552564476b8595f7c2b3a6dc26aec1e926d0f280e6
snapshot working copy
args: jj commit -m initial
19b8089fc78b7c49171f3c8934248be6f89f52311005e961cab5780f9f138b142456d77b27d223d7ee84d21d8c30c4a80100eaf6735b548b1acd0da688f94c80
d50e0e495b10b2575e2a9b914fed7045e427254baec051bc769d22f607f2f55f09788e6f6c9e12a4e56ec994ac7a63decb1fa7e353b2c8fd20ce77760777283d
add workspace 'default'
f1c462c494be39f6690928603c5393f908866bc8d81d8cd1ae0bb2ea02cb4f78cafa47165fa5b7cda258e2178f846881de199066991960a80954ba6066ba0821
23b83cc0392f51ef6b548af289e54efbe8661fe88ab5d8c036444d8c4ec798fb9d4d6063f02c4db768b17bde4608bcae2d75ba79da91ddd59dc57a23d45b5c57
initialize repo
"###);
let op_log_lines = op_log_stdout.lines().collect_vec();

View file

@ -127,7 +127,7 @@ fn test_debug_operation_id() {
let stdout =
test_env.jj_cmd_success(&workspace_path, &["debug", "operation", "--display", "id"]);
assert_snapshot!(filter_index_stats(&stdout), @r###"
19b8089fc78b7c49171f3c8934248be6f89f52311005e961cab5780f9f138b142456d77b27d223d7ee84d21d8c30c4a80100eaf6735b548b1acd0da688f94c80
d50e0e495b10b2575e2a9b914fed7045e427254baec051bc769d22f607f2f55f09788e6f6c9e12a4e56ec994ac7a63decb1fa7e353b2c8fd20ce77760777283d
"###
);
}

View file

@ -38,12 +38,12 @@ fn test_op_log() {
],
);
insta::assert_snapshot!(&stdout, @r###"
@ 98f7262e4a06 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
@ 745ab9998f2f test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
describe commit 230dd059e1b059aefc0da06a2e5a7dbf22362f22
args: jj describe -m 'description 0'
19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
let op_log_lines = stdout.lines().collect_vec();
@ -139,9 +139,9 @@ fn test_op_log_no_graph() {
let stdout =
test_env.jj_cmd_success(&repo_path, &["op", "log", "--no-graph", "--color=always"]);
insta::assert_snapshot!(stdout, @r###"
19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
}
@ -164,7 +164,7 @@ fn test_op_log_no_graph_null_terminated() {
r#"id.short(4) ++ "\0""#,
],
);
insta::assert_debug_snapshot!(stdout, @r###""c8b0\07277\019b8\0f1c4\0""###);
insta::assert_debug_snapshot!(stdout, @r###""16e6\0fb05\0d50e\023b8\0""###);
}
#[test]
@ -175,14 +175,14 @@ fn test_op_log_template() {
let render = |template| test_env.jj_cmd_success(&repo_path, &["op", "log", "-T", template]);
insta::assert_snapshot!(render(r#"id ++ "\n""#), @r###"
@ 19b8089fc78b7c49171f3c8934248be6f89f52311005e961cab5780f9f138b142456d77b27d223d7ee84d21d8c30c4a80100eaf6735b548b1acd0da688f94c80
f1c462c494be39f6690928603c5393f908866bc8d81d8cd1ae0bb2ea02cb4f78cafa47165fa5b7cda258e2178f846881de199066991960a80954ba6066ba0821
@ d50e0e495b10b2575e2a9b914fed7045e427254baec051bc769d22f607f2f55f09788e6f6c9e12a4e56ec994ac7a63decb1fa7e353b2c8fd20ce77760777283d
23b83cc0392f51ef6b548af289e54efbe8661fe88ab5d8c036444d8c4ec798fb9d4d6063f02c4db768b17bde4608bcae2d75ba79da91ddd59dc57a23d45b5c57
"###);
insta::assert_snapshot!(
render(r#"separate(" ", id.short(5), current_operation, user,
time.start(), time.end(), time.duration()) ++ "\n""#), @r###"
@ 19b80 true test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 2001-02-03 04:05:07.000 +07:00 less than a microsecond
f1c46 false test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 2001-02-03 04:05:07.000 +07:00 less than a microsecond
@ d50e0 true test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 2001-02-03 04:05:07.000 +07:00 less than a microsecond
23b83 false test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 2001-02-03 04:05:07.000 +07:00 less than a microsecond
"###);
// Negative length shouldn't cause panic (and is clamped.)
@ -204,9 +204,9 @@ fn test_op_log_template() {
let regex = Regex::new(r"\d\d years").unwrap();
let stdout = test_env.jj_cmd_success(&repo_path, &["op", "log"]);
insta::assert_snapshot!(regex.replace_all(&stdout, "NN years"), @r###"
@ 19b8089fc78b test-username@host.example.com NN years ago, lasted less than a microsecond
@ d50e0e495b10 test-username@host.example.com NN years ago, lasted less than a microsecond
add workspace 'default'
f1c462c494be test-username@host.example.com NN years ago, lasted less than a microsecond
23b83cc0392f test-username@host.example.com NN years ago, lasted less than a microsecond
initialize repo
"###);
}
@ -220,24 +220,24 @@ fn test_op_log_builtin_templates() {
test_env.jj_cmd_ok(&repo_path, &["describe", "-m", "description 0"]);
insta::assert_snapshot!(render(r#"builtin_op_log_compact"#), @r###"
@ 98f7262e4a06 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
@ 745ab9998f2f test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
describe commit 230dd059e1b059aefc0da06a2e5a7dbf22362f22
args: jj describe -m 'description 0'
19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
insta::assert_snapshot!(render(r#"builtin_op_log_comfortable"#), @r###"
@ 98f7262e4a06 test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
@ 745ab9998f2f test-username@host.example.com 2001-02-03 04:05:08.000 +07:00 - 2001-02-03 04:05:08.000 +07:00
describe commit 230dd059e1b059aefc0da06a2e5a7dbf22362f22
args: jj describe -m 'description 0'
19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
@ -264,18 +264,18 @@ fn test_op_log_word_wrap() {
// ui.log-word-wrap option works
insta::assert_snapshot!(render(&["op", "log"], 40, false), @r###"
@ 19b8089fc78b test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
@ d50e0e495b10 test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
23b83cc0392f test-username@host.example.com 2001-02-03 04:05:07.000 +07:00 - 2001-02-03 04:05:07.000 +07:00
initialize repo
"###);
insta::assert_snapshot!(render(&["op", "log"], 40, true), @r###"
@ 19b8089fc78b
@ d50e0e495b10
test-username@host.example.com
2001-02-03 04:05:07.000 +07:00 -
2001-02-03 04:05:07.000 +07:00
add workspace 'default'
f1c462c494be
23b83cc0392f
test-username@host.example.com
2001-02-03 04:05:07.000 +07:00 -
2001-02-03 04:05:07.000 +07:00

View file

@ -174,14 +174,14 @@ fn test_workspaces_conflicting_edits() {
"###);
let stderr = test_env.jj_cmd_failure(&secondary_path, &["st"]);
insta::assert_snapshot!(stderr, @r###"
Error: The working copy is stale (not updated since operation 5c95db542ebd).
Error: The working copy is stale (not updated since operation d47ba8bebf5d).
Hint: Run `jj workspace update-stale` to update it.
See https://github.com/martinvonz/jj/blob/main/docs/working-copy.md#stale-working-copy for more information.
"###);
// Same error on second run, and from another command
let stderr = test_env.jj_cmd_failure(&secondary_path, &["log"]);
insta::assert_snapshot!(stderr, @r###"
Error: The working copy is stale (not updated since operation 5c95db542ebd).
Error: The working copy is stale (not updated since operation d47ba8bebf5d).
Hint: Run `jj workspace update-stale` to update it.
See https://github.com/martinvonz/jj/blob/main/docs/working-copy.md#stale-working-copy for more information.
"###);
@ -261,7 +261,7 @@ fn test_workspaces_updated_by_other() {
"###);
let stderr = test_env.jj_cmd_failure(&secondary_path, &["st"]);
insta::assert_snapshot!(stderr, @r###"
Error: The working copy is stale (not updated since operation 5c95db542ebd).
Error: The working copy is stale (not updated since operation d47ba8bebf5d).
Hint: Run `jj workspace update-stale` to update it.
See https://github.com/martinvonz/jj/blob/main/docs/working-copy.md#stale-working-copy for more information.
"###);

View file

@ -189,8 +189,9 @@ content_hash! {
pub head_ids: HashSet<CommitId>,
/// Heads of the set of public commits.
pub public_head_ids: HashSet<CommitId>,
pub branches: BTreeMap<String, BranchTarget>,
pub local_branches: BTreeMap<String, RefTarget>,
pub tags: BTreeMap<String, RefTarget>,
pub remote_views: BTreeMap<String, RemoteView>,
pub git_refs: BTreeMap<String, RefTarget>,
/// The commit the Git HEAD points to.
// TODO: Support multiple Git worktrees?
@ -213,7 +214,6 @@ content_hash! {
}
/// Iterates pair of local and remote branches by branch name.
#[allow(dead_code)] // TODO
pub(crate) fn merge_join_branch_views<'a>(
local_branches: &'a BTreeMap<String, RefTarget>,
remote_views: &'a BTreeMap<String, RemoteView>,
@ -251,7 +251,6 @@ pub(crate) fn merge_join_branch_views<'a>(
}
/// Iterates branch `((name, remote_name), target)`s in lexicographical order.
#[allow(dead_code)] // TODO
pub(crate) fn flatten_remote_branches(
remote_views: &BTreeMap<String, RemoteView>,
) -> impl Iterator<Item = ((&str, &str), &RefTarget)> {

View file

@ -34,7 +34,7 @@ pub fn diff_named_refs<'a, 'b, K: Ord>(
/// Iterates `refs1` and `refs2` target pairs by name.
///
/// `refs1` and `refs2` must be sorted by `K`.
fn iter_named_ref_pairs<'a, 'b, K: Ord>(
pub fn iter_named_ref_pairs<'a, 'b, K: Ord>(
refs1: impl IntoIterator<Item = (K, &'a RefTarget)>,
refs2: impl IntoIterator<Item = (K, &'b RefTarget)>,
) -> impl Iterator<Item = (K, (&'a RefTarget, &'b RefTarget))> {

View file

@ -2028,14 +2028,13 @@ fn resolve_remote_branch(repo: &dyn Repo, name: &str, remote: &str) -> Option<Ve
fn collect_branch_symbols(repo: &dyn Repo, include_synced_remotes: bool) -> Vec<String> {
let view = repo.view();
view.branches()
.iter()
.flat_map(|(name, branch_target)| {
let local_target = &branch_target.local_target;
let local_target = branch_target.local_target;
let local_symbol = local_target.is_present().then(|| name.to_owned());
let remote_symbols = branch_target
.remote_targets
.iter()
.filter(move |&(_, target)| include_synced_remotes || target != local_target)
.into_iter()
.filter(move |(_, target)| include_synced_remotes || *target != local_target)
.map(move |(remote_name, _)| format!("{name}@{remote_name}"));
local_symbol.into_iter().chain(remote_symbols)
})
@ -2191,8 +2190,8 @@ fn resolve_commit_ref(
RevsetCommitRef::Root => Ok(vec![repo.store().root_commit_id().clone()]),
RevsetCommitRef::Branches(pattern) => {
let view_data = repo.view().store_view();
let commit_ids = filter_map_values_by_key_pattern(&view_data.branches, pattern)
.flat_map(|branch_target| branch_target.local_target.added_ids())
let commit_ids = filter_map_values_by_key_pattern(&view_data.local_branches, pattern)
.flat_map(|target| target.added_ids())
.cloned()
.collect();
Ok(commit_ids)
@ -2202,13 +2201,14 @@ fn resolve_commit_ref(
remote_pattern,
} => {
let view_data = repo.view().store_view();
let commit_ids = filter_map_values_by_key_pattern(&view_data.branches, branch_pattern)
.flat_map(|branch_target| {
filter_map_values_by_key_pattern(&branch_target.remote_targets, remote_pattern)
})
.flat_map(|remote_target| remote_target.added_ids())
.cloned()
.collect();
let commit_ids =
filter_map_values_by_key_pattern(&view_data.remote_views, remote_pattern)
.flat_map(|remote_view| {
filter_map_values_by_key_pattern(&remote_view.branches, branch_pattern)
})
.flat_map(|remote_ref| remote_ref.target.added_ids())
.cloned()
.collect();
Ok(commit_ids)
}
RevsetCommitRef::Tags => {

View file

@ -29,8 +29,8 @@ use crate::content_hash::blake2b_hash;
use crate::file_util::persist_content_addressed_temp_file;
use crate::merge::Merge;
use crate::op_store::{
BranchTarget, OpStore, OpStoreError, OpStoreResult, Operation, OperationId, OperationMetadata,
RefTarget, RemoteRef, RemoteView, View, ViewId, WorkspaceId,
OpStore, OpStoreError, OpStoreResult, Operation, OperationId, OperationMetadata, RefTarget,
RemoteRef, RemoteView, View, ViewId, WorkspaceId,
};
use crate::{git, op_store};
@ -259,23 +259,7 @@ fn view_to_proto(view: &View) -> crate::protos::op_store::View {
proto.public_head_ids.push(head_id.to_bytes());
}
for (name, target) in &view.branches {
let mut branch_proto = crate::protos::op_store::Branch {
name: name.clone(),
..Default::default()
};
branch_proto.name = name.clone();
branch_proto.local_target = ref_target_to_proto(&target.local_target);
for (remote_name, target) in &target.remote_targets {
branch_proto
.remote_branches
.push(crate::protos::op_store::RemoteBranch {
remote_name: remote_name.clone(),
target: ref_target_to_proto(target),
});
}
proto.branches.push(branch_proto);
}
proto.branches = branch_views_to_proto_legacy(&view.local_branches, &view.remote_views);
for (name, target) in &view.tags {
proto.tags.push(crate::protos::op_store::Tag {
@ -317,25 +301,9 @@ fn view_from_proto(proto: crate::protos::op_store::View) -> View {
view.public_head_ids.insert(CommitId::new(head_id_bytes));
}
for branch_proto in proto.branches {
let local_target = ref_target_from_proto(branch_proto.local_target);
let mut remote_targets = BTreeMap::new();
for remote_branch in branch_proto.remote_branches {
remote_targets.insert(
remote_branch.remote_name,
ref_target_from_proto(remote_branch.target),
);
}
view.branches.insert(
branch_proto.name.clone(),
BranchTarget {
local_target,
remote_targets,
},
);
}
let (local_branches, remote_views) = branch_views_from_proto_legacy(proto.branches);
view.local_branches = local_branches;
view.remote_views = remote_views;
for tag_proto in proto.tags {
view.tags
@ -366,7 +334,6 @@ fn view_from_proto(proto: crate::protos::op_store::View) -> View {
view
}
#[allow(dead_code)] // TODO
fn branch_views_to_proto_legacy(
local_branches: &BTreeMap<String, RefTarget>,
remote_views: &BTreeMap<String, RemoteView>,
@ -393,7 +360,6 @@ fn branch_views_to_proto_legacy(
.collect()
}
#[allow(dead_code)] // TODO
fn branch_views_from_proto_legacy(
branches_legacy: Vec<crate::protos::op_store::Branch>,
) -> (BTreeMap<String, RefTarget>, BTreeMap<String, RemoteView>) {
@ -424,21 +390,18 @@ fn migrate_git_refs_to_remote(view: &mut View) {
}
tracing::info!("migrating Git-tracking branches");
for branch_target in view.branches.values_mut() {
branch_target
.remote_targets
.remove(git::REMOTE_NAME_FOR_LOCAL_GIT_REPO);
}
let mut git_view = RemoteView::default();
for (full_name, target) in &view.git_refs {
if let Some(name) = full_name.strip_prefix("refs/heads/") {
assert!(!name.is_empty());
let branch_target = view.branches.entry(name.to_owned()).or_default();
branch_target.remote_targets.insert(
git::REMOTE_NAME_FOR_LOCAL_GIT_REPO.to_owned(),
target.clone(),
);
let remote_ref = RemoteRef {
target: target.clone(),
};
git_view.branches.insert(name.to_owned(), remote_ref);
}
}
view.remote_views
.insert(git::REMOTE_NAME_FOR_LOCAL_GIT_REPO.to_owned(), git_view);
// jj < 0.9 might have imported refs from remote named "git"
let reserved_git_ref_prefix = format!("refs/remotes/{}/", git::REMOTE_NAME_FOR_LOCAL_GIT_REPO);
@ -529,9 +492,12 @@ mod tests {
use super::*;
use crate::backend::{CommitId, MillisSinceEpoch, ObjectId, Timestamp};
use crate::content_hash::blake2b_hash;
use crate::op_store::{BranchTarget, OperationMetadata, RefTarget, WorkspaceId};
use crate::op_store::{OperationMetadata, RefTarget, WorkspaceId};
fn create_view() -> View {
let remote_ref = |target: &RefTarget| RemoteRef {
target: target.clone(),
};
let head_id1 = CommitId::from_hex("aaa111");
let head_id2 = CommitId::from_hex("aaa222");
let public_head_id1 = CommitId::from_hex("bbb444");
@ -550,23 +516,20 @@ mod tests {
View {
head_ids: hashset! {head_id1, head_id2},
public_head_ids: hashset! {public_head_id1, public_head_id2},
branches: btreemap! {
"main".to_string() => BranchTarget {
local_target: branch_main_local_target,
remote_targets: btreemap! {
"origin".to_string() => branch_main_origin_target,
},
},
"deleted".to_string() => BranchTarget {
local_target: RefTarget::absent(),
remote_targets: btreemap! {
"origin".to_string() => branch_deleted_origin_target,
},
},
local_branches: btreemap! {
"main".to_string() => branch_main_local_target,
},
tags: btreemap! {
"v1.0".to_string() => tag_v1_target,
},
remote_views: btreemap! {
"origin".to_string() => RemoteView {
branches: btreemap! {
"main".to_string() => remote_ref(&branch_main_origin_target),
"deleted".to_string() => remote_ref(&branch_deleted_origin_target),
},
},
},
git_refs: btreemap! {
"refs/heads/main".to_string() => git_refs_main_target,
"refs/heads/feature".to_string() => git_refs_feature_target,
@ -611,7 +574,7 @@ mod tests {
// Test exact output so we detect regressions in compatibility
assert_snapshot!(
ViewId::new(blake2b_hash(&create_view()).to_vec()).hex(),
@"3c1c6efecfc0809130a5bf139aec77e6299cd7d5985b95c01a29318d40a5e2defc9bd12329e91511e545fbad065f60ce5da91f5f0368c9bf549ca761bb047f7e"
@"6ef5f01bb0bd239670c79966753c6af9ce18694bba1d5dbd1aa82de7f5c421dfc3bf91c1608eec480ae8e244093485bcff3e95db7acdc3958c7a8ead7a453b54"
);
}
@ -697,6 +660,9 @@ mod tests {
#[test]
fn test_migrate_git_refs_remote_named_git() {
let normal_ref_target = |id_hex: &str| RefTarget::normal(CommitId::from_hex(id_hex));
let normal_remote_ref = |id_hex: &str| RemoteRef {
target: normal_ref_target(id_hex),
};
let branch_to_proto =
|name: &str, local_ref_target, remote_branches| crate::protos::op_store::Branch {
name: name.to_owned(),
@ -734,13 +700,22 @@ mod tests {
let view = view_from_proto(proto);
assert_eq!(
view.branches,
view.local_branches,
btreemap! {
"main".to_owned() => BranchTarget {
local_target: normal_ref_target("111111"),
remote_targets: btreemap! {
"git".to_owned() => normal_ref_target("444444"), // refs/heads/main
"gita".to_owned() => normal_ref_target("333333"),
"main".to_owned() => normal_ref_target("111111"),
},
);
assert_eq!(
view.remote_views,
btreemap! {
"git".to_owned() => RemoteView {
branches: btreemap! {
"main".to_owned() => normal_remote_ref("444444"), // refs/heads/main
},
},
"gita".to_owned() => RemoteView {
branches: btreemap! {
"main".to_owned() => normal_remote_ref("333333"),
},
},
},
@ -758,7 +733,7 @@ mod tests {
assert!(proto.has_git_refs_migrated_to_remote);
proto.branches.clear();
let view = view_from_proto(proto);
assert!(view.branches.is_empty());
assert!(!view.remote_views.contains_key("git"));
}
#[test]

View file

@ -22,8 +22,8 @@ use itertools::Itertools;
use crate::backend::CommitId;
use crate::index::Index;
use crate::op_store;
use crate::op_store::{BranchTarget, RefTarget, RefTargetOptionExt as _, WorkspaceId};
use crate::refs::{merge_ref_targets, TrackingRefPair};
use crate::op_store::{BranchTarget, RefTarget, RefTargetOptionExt as _, RemoteRef, WorkspaceId};
use crate::refs::{iter_named_ref_pairs, merge_ref_targets, TrackingRefPair};
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash, Debug)]
pub enum RefName {
@ -86,8 +86,9 @@ impl View {
&self.data.public_head_ids
}
pub fn branches(&self) -> &BTreeMap<String, BranchTarget> {
&self.data.branches
/// Iterates pair of local and remote branches by branch name.
pub fn branches(&self) -> impl Iterator<Item = (&str, BranchTarget)> {
op_store::merge_join_branch_views(&self.data.local_branches, &self.data.remote_views)
}
pub fn tags(&self) -> &BTreeMap<String, RefTarget> {
@ -151,68 +152,58 @@ impl View {
/// Returns true if any local or remote branch of the given `name` exists.
#[must_use]
pub fn has_branch(&self, name: &str) -> bool {
self.data.branches.contains_key(name)
self.data.local_branches.contains_key(name)
|| self
.data
.remote_views
.values()
.any(|remote_view| remote_view.branches.contains_key(name))
}
// TODO: maybe rename to forget_branch() because this seems unusual operation?
pub fn remove_branch(&mut self, name: &str) {
self.data.branches.remove(name);
self.data.local_branches.remove(name);
for remote_view in self.data.remote_views.values_mut() {
remote_view.branches.remove(name);
}
}
/// Iterates local branch `(name, target)`s in lexicographical order.
pub fn local_branches(&self) -> impl Iterator<Item = (&str, &RefTarget)> {
self.data
.branches
.local_branches
.iter()
.map(|(name, branch_target)| (name.as_ref(), &branch_target.local_target))
.filter(|(_, target)| target.is_present())
.map(|(name, target)| (name.as_ref(), target))
}
pub fn get_local_branch(&self, name: &str) -> &RefTarget {
if let Some(branch_target) = self.data.branches.get(name) {
&branch_target.local_target
} else {
RefTarget::absent_ref()
}
self.data.local_branches.get(name).flatten()
}
/// Sets local branch to point to the given target. If the target is absent,
/// and if no associated remote branches exist, the branch will be removed.
pub fn set_local_branch_target(&mut self, name: &str, target: RefTarget) {
if target.is_present() {
self.insert_local_branch(name.to_owned(), target);
self.data.local_branches.insert(name.to_owned(), target);
} else {
self.remove_local_branch(name);
}
}
fn insert_local_branch(&mut self, name: String, target: RefTarget) {
assert!(target.is_present());
self.data.branches.entry(name).or_default().local_target = target;
}
fn remove_local_branch(&mut self, name: &str) {
if let Some(branch) = self.data.branches.get_mut(name) {
branch.local_target = RefTarget::absent();
if branch.remote_targets.is_empty() {
self.remove_branch(name);
}
self.data.local_branches.remove(name);
}
}
/// Iterates remote branch `((name, remote_name), target)`s in
/// lexicographical order.
pub fn remote_branches(&self) -> impl Iterator<Item = ((&str, &str), &RefTarget)> {
self.data.branches.iter().flat_map(|(name, branch_target)| {
branch_target
.remote_targets
.iter()
.map(|(remote_name, target)| ((name.as_ref(), remote_name.as_ref()), target))
})
// TODO: maybe yield RemoteRef instead of RefTarget?
op_store::flatten_remote_branches(&self.data.remote_views)
}
pub fn get_remote_branch(&self, name: &str, remote_name: &str) -> &RefTarget {
if let Some(branch_target) = self.data.branches.get(name) {
let maybe_target = branch_target.remote_targets.get(remote_name);
// TODO: maybe return RemoteRef instead of RefTarget?
if let Some(remote_view) = self.data.remote_views.get(remote_name) {
let maybe_target = remote_view
.branches
.get(name)
.map(|remote_ref| &remote_ref.target);
maybe_target.flatten()
} else {
RefTarget::absent_ref()
@ -223,28 +214,15 @@ impl View {
/// is absent, the branch will be removed.
pub fn set_remote_branch_target(&mut self, name: &str, remote_name: &str, target: RefTarget) {
if target.is_present() {
self.insert_remote_branch(name.to_owned(), remote_name.to_owned(), target);
} else {
self.remove_remote_branch(name, remote_name);
}
}
fn insert_remote_branch(&mut self, name: String, remote_name: String, target: RefTarget) {
assert!(target.is_present());
self.data
.branches
.entry(name)
.or_default()
.remote_targets
.insert(remote_name, target);
}
fn remove_remote_branch(&mut self, name: &str, remote_name: &str) {
if let Some(branch) = self.data.branches.get_mut(name) {
branch.remote_targets.remove(remote_name);
if branch.remote_targets.is_empty() && branch.local_target.is_absent() {
self.remove_branch(name);
}
let remote_ref = RemoteRef { target }; // TODO: preserve or reset tracking flag?
let remote_view = self
.data
.remote_views
.entry(remote_name.to_owned())
.or_default();
remote_view.branches.insert(name.to_owned(), remote_ref);
} else if let Some(remote_view) = self.data.remote_views.get_mut(remote_name) {
remote_view.branches.remove(name);
}
}
@ -252,44 +230,38 @@ impl View {
/// in lexicographical order.
pub fn local_remote_branches<'a>(
&'a self,
remote_name: &'a str, // TODO: migrate to per-remote view and remove 'a
remote_name: &str,
) -> impl Iterator<Item = (&'a str, TrackingRefPair<'a>)> + 'a {
// TODO: maybe untracked remote_target can be translated to absent, and rename
// TODO: maybe untracked remote target can be translated to absent, and rename
// the method accordingly.
self.data
.branches
.iter()
.filter_map(move |(name, branch_target)| {
let local_target = &branch_target.local_target;
let remote_target = branch_target.remote_targets.get(remote_name).flatten();
(local_target.is_present() || remote_target.is_present()).then_some((
name.as_ref(),
TrackingRefPair {
local_target,
remote_target,
},
))
let maybe_remote_view = self.data.remote_views.get(remote_name);
let remote_branches = maybe_remote_view
.map(|remote_view| {
remote_view
.branches
.iter()
.map(|(name, remote_ref)| (name, &remote_ref.target))
})
.into_iter()
.flatten();
iter_named_ref_pairs(&self.data.local_branches, remote_branches).map(
|(name, (local_target, remote_target))| {
let targets = TrackingRefPair {
local_target,
remote_target,
};
(name.as_ref(), targets)
},
)
}
pub fn remove_remote(&mut self, remote_name: &str) {
let mut branches_to_delete = vec![];
for (branch, target) in &self.data.branches {
if target.remote_targets.contains_key(remote_name) {
branches_to_delete.push(branch.clone());
}
}
for branch in branches_to_delete {
self.remove_remote_branch(&branch, remote_name);
}
self.data.remote_views.remove(remote_name);
}
pub fn rename_remote(&mut self, old: &str, new: &str) {
for branch in self.data.branches.values_mut() {
let target = branch.remote_targets.remove(old).flatten();
if target.is_present() {
branch.remote_targets.insert(new.to_owned(), target);
}
if let Some(remote_view) = self.data.remote_views.remove(old) {
self.data.remote_views.insert(new.to_owned(), remote_view);
}
}

View file

@ -120,7 +120,7 @@ fn test_import_refs() {
};
assert_eq!(*view.heads(), expected_heads);
assert_eq!(view.branches().len(), 4);
assert_eq!(view.branches().count(), 4);
assert_eq!(
view.get_local_branch("main"),
&RefTarget::normal(jj_id(&commit2))
@ -257,7 +257,7 @@ fn test_import_refs_reimport() {
};
assert_eq!(*view.heads(), expected_heads);
assert_eq!(view.branches().len(), 2);
assert_eq!(view.branches().count(), 2);
let commit1_target = RefTarget::normal(jj_id(&commit1));
let commit2_target = RefTarget::normal(jj_id(&commit2));
assert_eq!(
@ -453,7 +453,7 @@ fn test_import_refs_reimport_with_deleted_remote_ref() {
};
let view = repo.view();
assert_eq!(*view.heads(), expected_heads);
assert_eq!(view.branches().len(), 3);
assert_eq!(view.branches().count(), 3);
// Even though the git repo does not have a local branch for
// `feature-remote-only`, jj creates one. This follows the model explained
// in docs/branches.md.
@ -495,7 +495,7 @@ fn test_import_refs_reimport_with_deleted_remote_ref() {
let view = repo.view();
// The local branches were indeed deleted
assert_eq!(view.branches().len(), 2);
assert_eq!(view.branches().count(), 2);
assert!(view.has_branch("main"));
assert!(!view.has_branch("feature-remote-only"));
assert!(view
@ -557,7 +557,7 @@ fn test_import_refs_reimport_with_moved_remote_ref() {
};
let view = repo.view();
assert_eq!(*view.heads(), expected_heads);
assert_eq!(view.branches().len(), 3);
assert_eq!(view.branches().count(), 3);
// Even though the git repo does not have a local branch for
// `feature-remote-only`, jj creates one. This follows the model explained
// in docs/branches.md.
@ -608,7 +608,7 @@ fn test_import_refs_reimport_with_moved_remote_ref() {
let repo = tx.commit();
let view = repo.view();
assert_eq!(view.branches().len(), 3);
assert_eq!(view.branches().count(), 3);
// The local branches are moved
assert_eq!(
view.get_local_branch("feature-remote-only"),
@ -813,7 +813,7 @@ fn test_import_some_refs() {
// Check that branches feature[1-4] have been locally imported and are known to
// be present on origin as well.
assert_eq!(view.branches().len(), 4);
assert_eq!(view.branches().count(), 4);
let commit_feat1_target = RefTarget::normal(jj_id(&commit_feat1));
let commit_feat2_target = RefTarget::normal(jj_id(&commit_feat2));
let commit_feat3_target = RefTarget::normal(jj_id(&commit_feat3));
@ -871,7 +871,7 @@ fn test_import_some_refs() {
// feature2 and feature4 will still be heads, and all four branches should be
// present.
let view = repo.view();
assert_eq!(view.branches().len(), 4);
assert_eq!(view.branches().count(), 4);
assert_eq!(*view.heads(), expected_heads);
// Import feature1: this should cause the branch to be deleted, but the
@ -888,7 +888,7 @@ fn test_import_some_refs() {
// feature2 and feature4 should still be the heads, and all three branches
// feature2, feature3, and feature3 should exist.
let view = repo.view();
assert_eq!(view.branches().len(), 3);
assert_eq!(view.branches().count(), 3);
assert_eq!(*view.heads(), expected_heads);
// Import feature3: this should cause the branch to be deleted, but
@ -905,7 +905,7 @@ fn test_import_some_refs() {
// feature2 and feature4 should still be the heads, and both branches
// should exist.
let view = repo.view();
assert_eq!(view.branches().len(), 2);
assert_eq!(view.branches().count(), 2);
assert_eq!(*view.heads(), expected_heads);
// Import feature4: both the head and the branch will disappear.
@ -920,7 +920,7 @@ fn test_import_some_refs() {
// feature2 should now be the only head and only branch.
let view = repo.view();
assert_eq!(view.branches().len(), 1);
assert_eq!(view.branches().count(), 1);
let expected_heads = hashset! {
jj_id(&commit_feat2),
};
@ -993,7 +993,7 @@ fn test_import_refs_empty_git_repo() {
.unwrap();
let repo = tx.commit();
assert_eq!(*repo.view().heads(), heads_before);
assert_eq!(repo.view().branches().len(), 0);
assert_eq!(repo.view().branches().count(), 0);
assert_eq!(repo.view().tags().len(), 0);
assert_eq!(repo.view().git_refs().len(), 0);
assert_eq!(repo.view().git_head(), RefTarget::absent_ref());
@ -1757,7 +1757,7 @@ fn test_fetch_empty_repo() {
assert_eq!(stats.default_branch, None);
assert!(stats.import_stats.abandoned_commits.is_empty());
assert_eq!(*tx.mut_repo().view().git_refs(), btreemap! {});
assert_eq!(*tx.mut_repo().view().branches(), btreemap! {});
assert_eq!(tx.mut_repo().view().branches().count(), 0);
}
#[test]
@ -1793,9 +1793,9 @@ fn test_fetch_initial_commit() {
}
);
assert_eq!(
*view.branches(),
view.branches().collect::<BTreeMap<_, _>>(),
btreemap! {
"main".to_string() => BranchTarget {
"main" => BranchTarget {
local_target: initial_commit_target.clone(),
remote_targets: btreemap! {
"origin".to_string() => initial_commit_target,
@ -1864,9 +1864,9 @@ fn test_fetch_success() {
}
);
assert_eq!(
*view.branches(),
view.branches().collect::<BTreeMap<_, _>>(),
btreemap! {
"main".to_string() => BranchTarget {
"main" => BranchTarget {
local_target: new_commit_target.clone(),
remote_targets: btreemap! {
"origin".to_string() => new_commit_target.clone(),

View file

@ -12,6 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use jj_lib::op_store::{BranchTarget, RefTarget, WorkspaceId};
use jj_lib::repo::Repo;
use maplit::{btreemap, hashset};
@ -305,10 +307,10 @@ fn test_merge_views_branches() {
remote_targets: btreemap! {},
};
assert_eq!(
repo.view().branches(),
&btreemap! {
"main".to_string() => expected_main_branch,
"feature".to_string() => expected_feature_branch,
repo.view().branches().collect::<BTreeMap<_, _>>(),
btreemap! {
"main" => expected_main_branch,
"feature" => expected_feature_branch,
}
);
}